diff --git a/antialiasing.py b/antialiasing.py index f44141e..fb49435 100644 --- a/antialiasing.py +++ b/antialiasing.py @@ -5,8 +5,10 @@ def regularize_gabornet( - model, kernel_size, target, fn, method, factor, gauss_stddevs=1.0, gauss_factor=0.5 + model, horizon, factor, target="gabor", fn="l2_relu", method="together", gauss_stddevs=1.0 ): + """Regularize a FlexNet. + """ # if method != "summed": # raise NotImplementedError() @@ -35,7 +37,7 @@ def regularize_gabornet( else: flexconv_freqs = magnet_freqs - nyquist_freq = torch.ones_like(flexconv_freqs) * nyquist_frequency(kernel_size) + nyquist_freq = torch.ones_like(flexconv_freqs) * nyquist_frequency(horizon) elif method == "together" and target == "gabor": if masks: @@ -44,7 +46,7 @@ def regularize_gabornet( flexconv_freqs = magnet_freqs # Divide Nyquist frequency by amount of filters in each layer - nyquist_freq = torch.ones_like(flexconv_freqs) * nyquist_frequency(kernel_size) + nyquist_freq = torch.ones_like(flexconv_freqs) * nyquist_frequency(horizon) nyquist_freq = nyquist_freq / nyquist_freq.shape[1] elif method in ["together", "together+mask"] and target == "gabor+mask": @@ -59,14 +61,9 @@ def regularize_gabornet( raise NotImplementedError() # Divide Nyquist frequency by amount of filters in each layer - nyquist_freq = torch.ones_like(flexconv_freqs) * nyquist_frequency(kernel_size) + nyquist_freq = torch.ones_like(flexconv_freqs) * nyquist_frequency(horizon) nyquist_freq = nyquist_freq / nyquist_freq.shape[1] - # if method == "distributed": - # # Further divide Nyquist freq between sines and gausses - # nyquist_freq[:, :, 0] = nyquist_freq[:, :, 0] * (1.0 - gauss_factor) - # nyquist_freq[:, :, 1] = nyquist_freq[:, :, 1] * gauss_factor - if fn == "l2_relu": # L2 ReLU return factor * l2_relu(flexconv_freqs, nyquist_freq) diff --git a/cfg/config.yaml b/cfg/config.yaml index 65b5cf8..1d22106 100644 --- a/cfg/config.yaml +++ b/cfg/config.yaml @@ -121,16 +121,9 @@ profile: time: False summary: [0, ] summary_depth: 5 - - - - - - - - - - - - - +testcase: + load: False + save: False + epochs: 1 + batches: 20 + path: "" \ No newline at end of file diff --git a/ckconv/nn/ckconv.py b/ckconv/nn/ckconv.py index de544b8..faa42d3 100644 --- a/ckconv/nn/ckconv.py +++ b/ckconv/nn/ckconv.py @@ -18,62 +18,54 @@ def __init__( self, in_channels: int, out_channels: int, - kernel_config: OmegaConf, - conv_config: OmegaConf, + horizon: int, + kernel_type = "MAGNet", + kernel_dim_linear = 2, + kernel_no_hidden = 32, + kernel_no_layers = 3, + kernel_activ_function = "ReLU", + kernel_norm = "BatchNorm", + kernel_omega_0 = 0.0, + kernel_learn_omega_0 = False, + kernel_weight_norm = False, + kernel_steerable = False, + kernel_init_spatial_value = 1.0, + kernel_bias_init = None, + kernel_input_scale = 25.6, + kernel_sampling_rate_norm = 1.0, + conv_use_fft = False, + conv_bias = True, + conv_padding = "same", + conv_stride = 1, ): """ Continuous Kernel Convolution. :param in_channels: Number of channels in the input signal :param out_channels: Number of channels produced by the convolution - :param kernel_config: OmegaConf with settings for the kernel generator. - :param type: Identifier for the type of kernel generator to use. - :param dim_linear: Dimensionality of the input signal, e.g. 2 for images. - :param no_hidden: Amount of hidden channels to use. - :param activ_function: Activation function for type=MLP. - :param norm: Normalization function for type=MLP. - :param weight_norm: Weight normalization, for type=[MLP, SIREN, nSIREN]. - :param no_layers: Amount of layers to use in kernel generator. - :param omega_0: Initial value for omega_0, for type=SIREN. - :param learn_omega_0: Whether to learn omega_0, for type=SIREN. - :param steerable: Whether to learn steerable kernels, for type=MAGNet. - :param init_spatial_value: Initial mu for gabor filters, for type=[GaborNet, MAGNet]. - :param bias_init: Bias init strategy, for all types but type=MLP. - :param input_scale: Scaling factor for linear functions, for type=[GaborNet, MAGNet]. - :param sampling_rate_norm: Kernel scaling factor for sampling rate normalization. - :param conv_config: OmegaConf with settings for the convolutional operator. - :param use_fft: Whether to use FFT implementation of convolution. - :param horizon: Maximum kernel size. Recommended to be odd and cover the entire image. - :param bias: Whether to use bias in kernel generator. TODO(rjbruin): move to kernel_config. - :param padding: Padding strategy for convolution. - :param stride: Stride applied in convolution. + :param horizon: Maximum kernel size. Recommended to be odd and cover the entire image. + :param kernel_type: Identifier for the type of kernel generator to use. + :param kernel_dim_linear: Dimensionality of the input signal, e.g. 2 for images. + :param kernel_no_hidden: Amount of hidden channels to use. + :param kernel_activ_function: Activation function for type=MLP. + :param kernel_norm: Normalization function for type=MLP. + :param kernel_weight_norm: Weight normalization, for type=[MLP, SIREN, nSIREN]. + :param kernel_no_layers: Amount of layers to use in kernel generator. + :param kernel_omega_0: Initial value for omega_0, for type=SIREN. + :param kernel_learn_omega_0: Whether to learn omega_0, for type=SIREN. + :param kernel_steerable: Whether to learn steerable kernels, for type=MAGNet. + :param kernel_init_spatial_value: Initial mu for gabor filters, for type=[GaborNet, MAGNet]. + :param kernel_bias_init: Bias init strategy, for all types but type=MLP. + :param kernel_input_scale: Scaling factor for linear functions, for type=[GaborNet, MAGNet]. + :param kernel_sampling_rate_norm: Kernel scaling factor for sampling rate normalization. + :param conv_use_fft: Whether to use FFT implementation of convolution. + :param conv_bias: Whether to use bias in kernel generator. TODO(rjbruin): move to kernel_config. + :param conv_padding: Padding strategy for convolution. + :param conv_stride: Stride applied in convolution. """ super().__init__() - # Unpack values from kernel_config - kernel_type = kernel_config.type - kernel_dim_linear = kernel_config.dim_linear - kernel_hidden_channels = kernel_config.no_hidden - kernel_activ_function = kernel_config.activ_function - kernel_norm = kernel_config.norm - kernel_weight_norm = kernel_config.weight_norm - kernel_no_layers = kernel_config.no_layers - kernel_omega_0 = kernel_config.omega_0 - kernel_learn_omega_0 = kernel_config.learn_omega_0 - kernel_steerable = kernel_config.steerable - kernel_init_spatial_value = kernel_config.init_spatial_value - kernel_bias_init = kernel_config.bias_init - kernel_input_scale = kernel_config.input_scale - kernel_sampling_rate_norm = kernel_config.sampling_rate_norm - - # Unpack values from conv_config - use_fftconv = conv_config.use_fft - horizon = conv_config.horizon - bias = conv_config.bias - padding = conv_config.padding - stride = conv_config.stride - # Since kernels are defined between [-1, 1] if values are bigger than one, they are modified. if kernel_init_spatial_value > 1.0: kernel_init_spatial_value = 1.0 @@ -87,21 +79,21 @@ def __init__( self.Kernel = ckconv.nn.ck.MLP( dim_linear=kernel_dim_linear, out_channels=out_channels * in_channels, - hidden_channels=kernel_hidden_channels, + hidden_channels=kernel_no_hidden, activation_function=kernel_activ_function, norm_type=kernel_norm, weight_norm=kernel_weight_norm, no_layers=kernel_no_layers, - bias=bias, + bias=conv_bias, ) if kernel_type == "SIREN": self.Kernel = ckconv.nn.ck.SIREN( dim_linear=kernel_dim_linear, out_channels=out_channels * in_channels, - hidden_channels=kernel_hidden_channels, + hidden_channels=kernel_no_hidden, weight_norm=kernel_weight_norm, no_layers=kernel_no_layers, - bias=bias, + bias=conv_bias, bias_init=kernel_bias_init, omega_0=kernel_omega_0, learn_omega_0=kernel_learn_omega_0, @@ -110,10 +102,10 @@ def __init__( self.Kernel = ckconv.nn.ck.nSIREN( dim_linear=kernel_dim_linear, out_channels=out_channels * in_channels, - hidden_channels=kernel_hidden_channels, + hidden_channels=kernel_no_hidden, weight_norm=kernel_weight_norm, no_layers=kernel_no_layers, - bias=bias, + bias=conv_bias, bias_init=kernel_bias_init, omega_0=kernel_omega_0, learn_omega_0=kernel_learn_omega_0, @@ -122,18 +114,18 @@ def __init__( self.Kernel = ckconv.nn.ck.FourierNet( dim_linear=kernel_dim_linear, out_channels=out_channels * in_channels, - hidden_channels=kernel_hidden_channels, + hidden_channels=kernel_no_hidden, no_layers=kernel_no_layers, - bias=bias, + bias=conv_bias, bias_init=kernel_bias_init, ) elif kernel_type == "Gabor": self.Kernel = ckconv.nn.ck.GaborNet( dim_linear=kernel_dim_linear, out_channels=out_channels * in_channels, - hidden_channels=kernel_hidden_channels, + hidden_channels=kernel_no_hidden, no_layers=kernel_no_layers, - bias=bias, + bias=conv_bias, bias_init=kernel_bias_init, init_spatial_value=kernel_init_spatial_value, input_scale=kernel_input_scale, @@ -142,16 +134,16 @@ def __init__( self.Kernel = ckconv.nn.ck.MAGNet( dim_linear=kernel_dim_linear, out_channels=out_channels * in_channels, - hidden_channels=kernel_hidden_channels, + hidden_channels=kernel_no_hidden, no_layers=kernel_no_layers, steerable=kernel_steerable, - bias=bias, + bias=conv_bias, bias_init=kernel_bias_init, init_spatial_value=kernel_init_spatial_value, input_scale=kernel_input_scale, ) - if bias: + if conv_bias: self.bias = torch.nn.Parameter(torch.Tensor(out_channels)) self.bias.data.fill_(value=0.0) else: @@ -160,12 +152,12 @@ def __init__( # Save arguments in self # --------------------- # Non-persistent values - self.padding = padding - self.stride = stride + self.padding = conv_padding + self.stride = conv_stride self.rel_positions = None self.kernel_dim_linear = kernel_dim_linear self.horizon = horizon - self.use_fftconv = use_fftconv + self.use_fftconv = conv_use_fft self.kernel_sampling_rate_norm = kernel_sampling_rate_norm # Variable placeholders @@ -174,7 +166,7 @@ def __init__( # Define convolution type conv_type = "conv" - if use_fftconv: + if conv_use_fft: conv_type = "fft" + conv_type if kernel_dim_linear == 1: conv_type = "causal_" + conv_type @@ -228,53 +220,63 @@ def __init__( self, in_channels: int, out_channels: int, - kernel_config: OmegaConf, - conv_config: OmegaConf, - mask_config: OmegaConf, + horizon: int, + kernel_type = "MAGNet", + kernel_dim_linear = 2, + kernel_no_hidden = 32, + kernel_no_layers = 3, + kernel_activ_function = "ReLU", + kernel_norm = "BatchNorm", + kernel_omega_0 = 0.0, + kernel_learn_omega_0 = False, + kernel_weight_norm = False, + kernel_steerable = False, + kernel_init_spatial_value = 1.0, + kernel_bias_init = None, + kernel_input_scale = 25.6, + kernel_sampling_rate_norm = 1.0, + conv_use_fft = False, + conv_bias = True, + conv_padding = "same", + conv_stride = 1, + mask_use = True, + mask_type = "gaussian", + mask_init_value = 0.075, + mask_temperature = 15.0, + mask_dynamic_cropping = True, + mask_threshold = 0.1, ): """ Flexible Size Continuous Kernel Convolution. :param in_channels: Number of channels in the input signal :param out_channels: Number of channels produced by the convolution - :param kernel_config: OmegaConf with settings for the kernel generator. - :param type: Identifier for the type of kernel generator to use. - :param dim_linear: Dimensionality of the input signal, e.g. 2 for images. - :param no_hidden: Amount of hidden channels to use. - :param activ_function: Activation function for type=MLP. - :param norm: Normalization function for type=MLP. - :param weight_norm: Weight normalization, for type=[MLP, SIREN, nSIREN]. - :param no_layers: Amount of layers to use in kernel generator. - :param omega_0: Initial value for omega_0, for type=SIREN. - :param learn_omega_0: Whether to learn omega_0, for type=SIREN. - :param steerable: Whether to learn steerable kernels, for type=MAGNet. - :param init_spatial_value: Initial mu for gabor filters, for type=[GaborNet, MAGNet]. - :param bias_init: Bias init strategy, for all types but type=MLP. - :param input_scale: Scaling factor for linear functions, for type=[GaborNet, MAGNet]. - :param sampling_rate_norm: Kernel scaling factor for sampling rate normalization. - :param conv_config: OmegaConf with settings for the convolutional operator. - :param use_fft: Whether to use FFT implementation of convolution. - :param horizon: Maximum kernel size. Recommended to be odd and cover the entire image. - :param bias: Whether to use bias in kernel generator. TODO(rjbruin): move to kernel_config. - :param padding: Padding strategy for convolution. - :param stride: Stride applied in convolution. - :param mask_config: OmegaConf with settings for the FlexConv Gaussian mask. - :param use: Whether to apply Gaussian mask. - :param type: Type of mask. Recommended to use "gaussian". - :param init_value: Initial value for the size of the kernel. - :param temperature: Temperature of the sigmoid function, for type=sigmoid. - :param dynamic_cropping: Whether to crop away pixels below the threshold. - :param threshold: Threshold for cropping pixels. Recommended to be 15.0. + :param horizon: Maximum kernel size. Recommended to be odd and cover the entire image. + :param kernel_type: Identifier for the type of kernel generator to use. + :param kernel_dim_linear: Dimensionality of the input signal, e.g. 2 for images. + :param kernel_no_hidden: Amount of hidden channels to use. + :param kernel_activ_function: Activation function for type=MLP. + :param kernel_norm: Normalization function for type=MLP. + :param kernel_weight_norm: Weight normalization, for type=[MLP, SIREN, nSIREN]. + :param kernel_no_layers: Amount of layers to use in kernel generator. + :param kernel_omega_0: Initial value for omega_0, for type=SIREN. + :param kernel_learn_omega_0: Whether to learn omega_0, for type=SIREN. + :param kernel_steerable: Whether to learn steerable kernels, for type=MAGNet. + :param kernel_init_spatial_value: Initial mu for gabor filters, for type=[GaborNet, MAGNet]. + :param kernel_bias_init: Bias init strategy, for all types but type=MLP. + :param kernel_input_scale: Scaling factor for linear functions, for type=[GaborNet, MAGNet]. + :param kernel_sampling_rate_norm: Kernel scaling factor for sampling rate normalization. + :param conv_use_fft: Whether to use FFT implementation of convolution. + :param conv_bias: Whether to use bias in kernel generator. TODO(rjbruin): move to kernel_config. + :param conv_padding: Padding strategy for convolution. + :param conv_stride: Stride applied in convolution. + :param mask_use: Whether to apply Gaussian mask. + :param mask_type: Type of mask. Recommended to use "gaussian". + :param mask_init_value: Initial value for the size of the kernel. + :param mask_temperature: Temperature of the sigmoid function, for type=sigmoid. + :param mask_dynamic_cropping: Whether to crop away pixels below the threshold. + :param mask_threshold: Threshold for cropping pixels. Recommended to be 15.0. """ - # Unpack mask_config values: - mask_use = mask_config.use - mask_type = mask_config.type - mask_init_value = mask_config.init_value - mask_temperature = mask_config.temperature - mask_dynamic_cropping = mask_config.dynamic_cropping - mask_threshold = mask_config.threshold - - # conv_cache = conv_config.cache """ Initialise init_spatial_value: @@ -295,14 +297,31 @@ def __init__( # Modify the kernel_config if required if init_spatial_value != mask_init_value: - kernel_config.init_spatial_value = init_spatial_value + kernel_init_spatial_value = init_spatial_value # Super super().__init__( - in_channels=in_channels, - out_channels=out_channels, - kernel_config=kernel_config, - conv_config=conv_config, + in_channels, + out_channels, + horizon, + kernel_type, + kernel_dim_linear, + kernel_no_hidden, + kernel_no_layers, + kernel_activ_function, + kernel_norm, + kernel_omega_0, + kernel_learn_omega_0, + kernel_weight_norm, + kernel_steerable, + kernel_init_spatial_value, + kernel_bias_init, + kernel_input_scale, + kernel_sampling_rate_norm, + conv_use_fft, + conv_bias, + conv_padding, + conv_stride, ) # Define convolution types diff --git a/demo/flexconv.ipynb b/demo/flexconv.ipynb new file mode 100644 index 0000000..a5c990e --- /dev/null +++ b/demo/flexconv.ipynb @@ -0,0 +1,573 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# FlexConv demo\n", + "\n", + "In this notebook, we illustrate the construction of a simple FlexNet, proposed in our work [FlexConv: Continuous Kernel Convolutions with Differentiable Kernel Sizes](https://arxiv.org/abs/2110.08059). The code provided here is a simplified version of the main code. For baseline comparisons please refer to the main code.\n", + "\n", + "In particular, we will:\n", + "* Show how to instantiate a FlexConv layer, from the `ckconv` package.\n", + "* Show how to instantiate a FlexNet network, from the `models` package.\n", + "* Show how to regularize a FlexConv/FlexNet against aliasing.\n", + "\n", + "Let's go! First, we import some packages:" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "%autoreload\n", + "\n", + "# Append .. to path\n", + "import os,sys\n", + "ckconv_source = os.path.join(os.getcwd(), '..')\n", + "if ckconv_source not in sys.path:\n", + " sys.path.append(ckconv_source)\n", + " \n", + "import numpy as np\n", + "import torch\n", + "from torch.nn.utils import weight_norm\n", + "from omegaconf import OmegaConf\n", + "\n", + "import ckconv.nn as cknn\n", + "from models import Img_ResNet\n", + "\n", + "from matplotlib import pyplot as plt\n", + "from PIL import Image" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## FlexConv\n", + "\n", + "The FlexConv module can be used in place of a regular convolutional module in any architecture. The only required argument that is novel w.r.t. to vanilla convolutions is the `horizon` value, which is the largest kernel size the FlexConv can learn. The horizon should be an odd integer, so that the learned kernel is centered on a pixel, instead of on a subpixel. We recommend it to be the image size.\n", + "\n", + "The init signature of FlexConv is quite long, as it takes many arguments. Almost all arguments however are provided a default value, which the user does not need to think about.\n", + "\n", + "```python\n", + "\"\"\"\n", + "Flexible Size Continuous Kernel Convolution.\n", + "\n", + ":param in_channels: Number of channels in the input signal\n", + ":param out_channels: Number of channels produced by the convolution\n", + ":param horizon: Maximum kernel size. Recommended to be odd and cover the entire conv_image.\n", + ":param kernel_type: Identifier for the type of kernel generator to use.\n", + ":param kernel_dim_linear: Dimensionality of the input signal, e.g. 2 for images.\n", + ":param kernel_no_hidden: Amount of hidden channels to use.\n", + ":param kernel_activ_function: Activation function for type=MLP.\n", + ":param kernel_norm: Normalization function for type=MLP.\n", + ":param kernel_weight_norm: Weight normalization, for type=[MLP, SIREN, nSIREN].\n", + ":param kernel_no_layers: Amount of layers to use in kernel generator.\n", + ":param kernel_omega_0: Initial value for omega_0, for type=SIREN.\n", + ":param kernel_learn_omega_0: Whether to learn omega_0, for type=SIREN.\n", + ":param kernel_steerable: Whether to learn steerable kernels, for type=MAGNet.\n", + ":param kernel_init_spatial_value: Initial mu for gabor filters, for type=[GaborNet, MAGNet].\n", + ":param kernel_bias_init: Bias init strategy, for all types but type=MLP.\n", + ":param kernel_input_scale: Scaling factor for linear functions, for type=[GaborNet, MAGNet].\n", + ":param kernel_sampling_rate_norm: Kernel scaling factor for sampling rate normalization.\n", + ":param conv_use_fft: Whether to use FFT implementation of convolution.\n", + ":param conv_bias: Whether to use bias in kernel generator. TODO(rjbruin): move to kernel_config.\n", + ":param conv_padding: Padding strategy for convolution.\n", + ":param conv_stride: Stride applied in convolution.\n", + ":param mask_use: Whether to apply Gaussian mask.\n", + ":param mask_type: Type of mask. Recommended to use \"gaussian\".\n", + ":param mask_init_value: Initial value for the size of the kernel.\n", + ":param mask_temperature: Temperature of the sigmoid function, for type=sigmoid.\n", + ":param mask_dynamic_cropping: Whether to crop away pixels below the threshold.\n", + ":param mask_threshold: Threshold for cropping pixels. Recommended to be 15.0.\n", + "\"\"\"\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "# Override default config values\n", + "in_channels = 3\n", + "out_channels = 3\n", + "horizon = 33\n", + "\n", + "flexconv_example = cknn.FlexConv(\n", + " in_channels,\n", + " out_channels,\n", + " horizon,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's convolve a random CIFAR-10 image with our randomly initialized FlexConv:" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA2cAAADlCAYAAADX248rAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAABZ1klEQVR4nO29e5Rl113f+f2dc1916139bkmtltqSLFsGG+RXbMC8gnEINiQkwCQ4GSbOmgkrsEKYeJHJa2Yy8TCEZGWFYcVgYhMYAuFlk/iBbPzA4NiW5YcsyXq11FK/H/Wu+z5nzx9VTtr+fX/dVdVV1bdK389aWt361e5999l7//Y5+967P2UpJQghhBBCCCGEuLlkN7sBQgghhBBCCCG0ORNCCCGEEEKIoUCbMyGEEEIIIYQYArQ5E0IIIYQQQoghQJszIYQQQgghhBgCtDkTQgghhBBCiCFAmzMhxAsCM/unZvbrN7sdX4+ZfczM/qeb3Q4htgLlmRDbj/Jsb6PN2RBjZs+a2Xdt82sMZYILsRnM7EfN7EEzWzazc2b2ATN7/c1u12Yxs/vM7ENmdtnM9EspxVCwB/PsrWb2OTNbNLPTZvZzZla52e0SL2z2YJ79sJk9bmYLZnbRzN5jZhM3u13DiDZnQog9gZn9PQD/GsD/BeAQgGMA/l8Ab76JzbpR+gB+G8CP3+yGCAHs2TxrAvgpAPsBvBrAdwL4+zezQeKFzR7Nsz8F8LqU0iSAOwFUAPyfN7dJw4k2Z7sAM/sbZvZJM/t5M5szs2fM7Huv+vnHzOxfmNln1t6ReK+Zzaz97A1mdvrr6nvWzL7LzN4I4GcB/NW1d2a+uLNXJsTWYGaTAP53AH8npfR7KaWVlFI/pfSHKaWfuapozcx+zcyWzOwRM7v/qjrebmZPr/3sUTP7gat+tp4c/D/M7E/X/v0fmdn+q37+GjP7MzObN7Mvmtkb1nNdKaXHU0rvAvDI5ntHiK1hD+fZL6WU/iSl1EspnQHwGwBet+mOEuIG2MN59nxK6fJVoQLAizbcQS8AtDnbPbwawONYfWfv5wC8y8zsqp//GID/EcBRAAMA/+Z6FaaUPojVd2V+K6U0llL6xi1vtRA7w2sBNAD8/nXKfT+A/whgCsD7APzbq372NIBvATAJ4J8B+HUzO3LVz6+Xgz8K4G8COAighrV33s3sFgD/BavvEM6sxX/XzA5s9CKFuMm8UPLsW6E3RMTNY8/mmZm93swWACwB+EtY/XRQfB3anO0eTqWUfjmlVAB4D4AjWP2o+6v8h5TSl1NKKwD+EYC/Ymb5zWioEDeBfQAup5QG1yn3yZTS+9fy6D8A+G9vSKSU/lNK6WxKqUwp/RaAJwG86qp/e70c/PcppSdSSm2sfhXx5Wvxvwbg/WuvW6aUHgDwIIA3bf5yhbgp7Pk8M7O/CeB+AD+/kX8nxBayZ/MspfTJta813grg/wHw7Hr+3QsNbc52D+e/+peUUmvtr2NX/fz5q/5+CkAVq++ICPFC4AqA/Xb9Q/znr/p7C0Djq//GzH7MzL6w9lWNeQD34Wtz6Ho5+PV1f/VntwP4oa/Wu1b367F6MxRiN7Gn88zM3gLgHQC+9+u+fiXETrKn82ztNc8A+CBWP/kTX4dsRHuH2676+zGsigQuA1jB6mFnAMDap2lXf/wsA5zYC3wKQAfAWwD8zkb/sZndDuCXsSoC+FRKqTCzLwCwa/7D9fE8Vj/Z/ltbUJcQN5M9m2e2egb7lwH8hZTSw1vQHiE2y57Ns6+jAuDEFtSz59AnZ3uHv2ZmLzGzJlYPkv7O2sfVT2D13ZS/YGZVAP8bgPpV/+4CgONmprkgdi0ppQUA/xjAL5rZW8ysaWZVM/teM/u5dVQxitU3Ki4B/+2rTfdtUfN+HcBfNLPvMbPczBq2Kuq59Xr/0FZpYPU7/1j7t/Xr/DMhtoU9nGffgVUJyF9KKX1mi9ojxKbYw3n2P5jZsbX72u0A/jmAj2xRu/YUeiDfO/wHAO/G6kfRDQB/F/hvSf6/APgVAGew+kna1fbG/7T25xUze2inGivEVpNS+gUAfw+rb0Bcwuo7fD8B4A/W8W8fBfAvsfqO5QUAL8Oq9ncr2vU8VvXHP3tVu34G61t/bwfQxn+XE7SxeohbiJvCHs2zf4RVccL7bdVcvGxmH9iKdgmxGfZonr0EwJ8BWF5rz+MA9I0SgqWkb7XtdszsYwB+PaX0Kze7LUIIIYQQQojNoU/OhBBCCCGEEGII0OZMCCGEEEIIIYYAfa1RCCGEEEIIIYYAfXImhBBCCCGEEEPADW3OzOyNZva4mT1lZm/fqkYJIb4W5ZoQ24/yTIjtR3kmxLXZ9Nca136Z8RMAvhuravbPAviRNYUnJa9WU7VBfkVPKlxoauYgraNeb9D4yuK8i2VFn5bNypLGrfBxK4L+CfotlVF5X3ct2BtbtUbj7Yr//YEp43XMz8/TeEnaVxpvcxZcylhzzMWm983QsmfOnKbxgrxmo1qlZY0PF4rg2vt9P+4l6X8A4a/g3shvaixTeTmldOD6JTfHRnOtljXSSObHKLH5XeW/h74c4XOwJHMwIkVv/ZB4iqrd6K/MZOO5Fb92MyDr8XjeI33djyayX/8AAJXchfrjfLwsqCLvknYEa1TKgo6K+o9cjgVra7Re8tfjL7jYOT9UeQYAtXwkjVQnXbys+XEqqxubiGzds6gfo3WMxTdYRwiZL1HOp2BM2fVkJHcAAIMBj5N1LZpDqc7vManqGx7lqw2C9m0FUf6QePzsduPju4S5bcu1zeTZvpksHbvN51SfXFMr8TFeGIzQeCILXC3jC2rGFr0AVu+14hYMULaBgYvqYC85KP39BQDqGX9uHs26LpYHrxffMvxPLgzGadlK8OA3na/c0OsBQBE8cGRkwYyuMYpvhIcfHoR5xu/06+NVAJ5KKZ0EADP7j1j93QdhglUbddz2zf734KWe7+y3/PBP0DpOnLiHxj/7od9zsebCOVq23mvz9s37yVddDJ6+evxGMWj7OgAg73Zc7Gjwu2SrR4/R+CPTftEpxkdp2d9/73tpfHnFJ14358lYL/id9lvue5WL/aUf+1Fa9h/+rz9D44tV33/3HDlMyzaCm/X8SJPGz5z2477Sb9GylSJYKMkDbBb8nu7l3tIp+oOtY0O5NpKN4bVjb3bxYnHRxSr7D9EXXHkFn4Pdab+gRw9kgzrv20HTx4vg1yonfv/gD50A3TBE3w8I7k0I7suUsdN8bo495/O9enGJVzLnxwUAMOMf+s9/B39eqi3xDpl8wq+tWYvne9nkDzVlnd8m8o7P4WyFr3/oBw/WDLIpBYAPPfovhirPAGCkOonXHvsxF+/e7t+sah3i/Rvd4ysd8qBANtsAkA2CBzuywQg3HdEbkQFlw4/ToMnHrqjzJMx6/jWbp5d52fNXeDsWfV5ZLZjLJ/jvye0c9veSxgX+nJBfCfJ4I0Rv7rb9ugEAadnncQo3q3wBo28cl7zsh9PvbGeubTjPjt1WwR9/wL9hf4FszB/q8DF+/+zLaLxX+vXt9uYsLdsM3o1jm4B+cIMpghtSNXiHrUE2S9GGLQ82NBmJX+xN0LJ3jVyg8deMnHSxUeNzMHofqkM2Rf/64nfSsjNVP+cB4K9Mfda/XrBpXgo26ksl/5CnYb6vZzKek6PZjb9Jc8dt58M8u5GvNd6C1V8+91VOr8WEEFuLck2I7Ud5JsT2ozwT4jrcyOaM7Y3ddt7M3mZmD5rZgwX5upkQ4rpcN9euzrNeyd/pEUJckw3f03oF/3RFCBGy4Ty7fGUbv0oqxBByI5uz0wBuu+r/bwVw9usLpZTemVK6P6V0fx6cKRJCXJPr5trVeVbL+Ef2QohrsuF7Wi3n51iEECEbzrP9+yQWFy8sbmTGfxbAXWZ2h5nVAPwwgPdtTbOEEFehXBNi+1GeCbH9KM+EuA6bFoKklAZm9hMAPgQgB/CrKaVHrvVvirLActsfph2s+APxD36YCy3Kx0/Q+PTZM77ek0/TsjOBwaBs+6+opJIfeFycX6DxbpcfiJ9o+E8NJ45yGUPFuLziz32zPzj52AI/nGyjXJax0veHquuBvW08OFR97x3+6+H3HeB2zVpgyymTN0CUGTcE9iv8EG6R83FkZrAUHHbvBwdJc9LsQAC27Ww81wzI/WHk/ICXSZSH99EaVo7wpWHxTh8j56kBAEWT921q+oPPVtuYTTMNIsuHHySrBgKEPIr7Fy26/HB3f4zP2UQ+vRyrBAfBydoAAN2DXvbTPsAnYX+Mx0fP+fbVevzgeVkLDrCPROYUH7I+r9sGG7Cs3KRE28w9LeUZykm/1nanfFK0Dgb3HT6FUCHn4fNeYPgN3BAZiVe6gTwkkIpEwp/emP9BdzKQAHFvFZgYrj/KRQVej7NWB5svZP0DgN40/1bBMlnvBg3e6LFA5kHnfmAUjuyVWWDPpYbVFn9OCPOHtDtFZswbF9GFbOrZMSUsEXnJQ53bXCwSf3zqGXLzAjBY9An4uQn+zDHSDIRHhDywVjVq/HhPo8KTOBGJxkI7sJa3g2eolo9H97+X3M5FehNH/PPxnbWLtOzzff5c8dGFe13sgSdfTMvun+bPtnc3fPsOVnjZ8wO+aiwV/BsP47m/xrtq52lZgB8fqUV68Q1yI7ZGpJTeD+D9W9ISIUSIck2I7Ud5JsT2ozwT4troi7xCCCGEEEIIMQRocyaEEEIIIYQQQ4A2Z0IIIYQQQggxBNzQmbONksoC/UV/cG9fwx9uPMpOCgM4suCFFgBQbfuDmmfO89/yPjPJDytWzB8SHFT4QfZKLTi82Rij8VFimBgv+MHdl43P0PiFM5dc7PlzPgYA313n1/hQuuxilwve13md792ffuTTLvaBYFxmGvwaz8L335J3hAAARqK3EGp8+jZIvw76/JBmJ7DO5xmpeyNSg5tJZrAGEa4cnHaxldv5fO3MBOKJKd8HqcH7JQvi9bof+yzj49Pv8TEusuBQPpF5jAaHuG+fnqPxQekn3PmlcVp2PuPSgLnM9397H59slTaf+L0JPwb9Cd5PZTUShXgxQmUpEHwEIgELZDqMVOV1WyA6QLnLf39RZiia/rB9UVu/1ISc9wfARTsWLEHREkn8OBgEkqYsmBZlhZfvjft4b4rXMWgG+UrCVvCrqS3zXGuQeWvBvCprQU+RdkQ5Fc5xkidR2WjMU50LHTIW73NpRQokJFaQtdt2x/vzuRnGyQQtyMxvDQLDzgYo+rxf2q3gIYWQB8+O4WuS+w4AdPp+IVia5blQPc/lUiNtlqs8R06O8mfHT47e7esY5/P7XN8/awDA+Y6/j0aSlXrO+69V+jHoJC7nWCFlV8vz+1GT2I/YHLsWrPRm7nK7IzOFEEIIIYQQYo+jzZkQQgghhBBCDAHanAkhhBBCCCHEEKDNmRBCCCGEEEIMAdqcCSGEEEIIIcQQsKO2xrHGCF5/730uPp5748uxBjfuVE5fpPHBxUVfb40b1i7Or9D4Yt9bCxcDz8qAaaYA9Jn2CcCUeYvORO7tkABw9vNfofGn/vgTLrb/xffSsn91/3Eaf2PNt+90YDKc7fF+ypdbLjZ+5hQte2fQT0/12y5WH3D71EibmzGXugs0Pjk94WJZhV9j0ffXAgDFwL9mPd/RdNk8WY407m1OK3f4fJi/k19Td19g6GOWscA6Wq7wujssHtSR9QLzWD8yqfl2tw7SoiineB3dYv3jfPCAX3cAoDXp830lMH2Vvcie6K+F2SgBoL/A18vlI75uK/m6k4Jm5B3+mllv/Q6qyFrHdIKJ3A+GlbKaoX3Q932/6a8rEBAj48se8q7v9wqJAUDGl0hqEAzLlrzuyDzZJ0a7rMfLVgI9YVn3r9n3yzcAYPE2npf9pjfOVoI5OwjUvxkRw1kwvVPO60gVH4+sjBbcc6O3yxN5HrKC5zHIcwwAgNzTrB9MhiETE/cTcKHwffZI6xYXO7M8SeuInLPZmO+vamDjrgXxnKzV1cDW2KjwOrLgWYnFV2q87jIwWHdGiUl0nLejPcfn1YdLb2t8fB+/ueZB8rB769QINy3ONPjzZyf5e+tiwU3Iy0G8GmhvRzNujtwIRMyOYGm9JvrkTAghhBBCCCGGAG3OhBBCCCGEEGII0OZMCCGEEEIIIYYAbc6EEEIIIYQQYgjQ5kwIIYQQQgghhoAd1c+N1ht41Ym7XNwG3uyyfJFbGf/oqcdpfLHlLSutwJCS1bgNrJX5dnRLXnYQuH+6iVtgxojp77kZb9QDgO8acIPg7MBbhSbuaNKy9774lTQ+eOeDLvbyyn5adjpN0fgi6ZLlYJ8/Mc7rXlz0fT2yxC1Tox2uvDrbvULjnTFv6Bm7jbdjbJmbgoqBH8d+j2vVFleWafxmkfIM5aSfF0u3+DnYPrgx+1vjPLH/DXiOMAMawO10obGut7H2MbNca47nyKO9ozSe2EsG2rW7bz9P4y/bd443kLA04BbH5b6PX2nza1locMPWYssbOgdNvuwHAis0L3Lz1mjH/4OSGOsAwJjCKiCy4Q0jZQVYOeTnf0mGtBLckKrLQbzt43mHj0VkzswGxNYYmQLpxAeKBp8vZcVb01IWWBl9UQBAf9yX700GN+7DvO5ixPd/1NfRHGcW2kGDv16PmFgBoNL28zYyYOb9YMGLqHtbY8oik22wOPb8/TUuu+6W7QjLZR1/0jrh4n926U4XuzLv7Z0AMDLCL+rwxJKLjVf5c0GFPCMCQI3c7OrBTaoS3RgD2oWfb1E7LmRcddps+LHPgzqWn+G2y/6Sf159apHfu6zK67bM50OtwfupCO65jzWOuNhtjTladmHA74sHa9yy3CBK3fmC33M7waI2lfPn942ye+6CQgghhBBCCLGH0eZMCCGEEEIIIYYAbc6EEEIIIYQQYgi4oTNnZvYsgCWs/j75QUrp/q1olBDia1GuCbH9KM+E2H6UZ0Jcm60Qgnx7SunyegpaSsgH/lBmZr4Zixk/yPdwcPC5P+IPgXaMfzBYqQQyj+6Ki5VdfngzGT+smBHxBwCsZD5ejPLDlLe3fTsAYD75g5OTo15+AQC3jPkDxABQJc0+WeEijs4yF11Uu76SxSXeTy9JvH0vzf0hy1bi4zVe5QcvBxdmabzZPOxioymYN33e7pwIX8am+GHb589zKcQ2sL5cy4Ci7g/Js7OxFpy9b57n85vJIbJ+cPg+EAzkHR9nUiAAyLuB6CAYNyYvqLZ4Liwknn+JrA/BuWJc2s+lPvdMXvDtCGwEV7r8wHF74Of9coe3OaI84NfblZznU9YPpB2BeADw/VrpBnMhECNkTPgSzMkdZN33tJQDfe9coYKJ2jyvo7bM53h12c+XnEhYACDr8kP11vNx6/D1PiIL7jH1up8XibuBUFb43EpEFNPjPgIMGnxiVFZ8HcGtBBb4L4wMAfEwAAA604EAadzHI6FRPXh+yDtBAwlW8HUNwVqadfxaYF0vUgMA7Izjat151ks5TvdmXPzKil87i4IP/sFxflGvnDnlYpHMox9M8MmKl0A0gslWsMXhGiwXPv8GgaiuV/B4o+Lb0hkEz6qhyMu3u2wGUpoGr4QJQSJ6QfsudfyzfpUl8DVgz3cAUJCFYyV4TphNXDzTCspz/Nz7KvpaoxBCCCGEEEIMATe6OUsA/sjMPmdmb9uKBgkhKMo1IbYf5ZkQ24/yTIhrcKNfa3xdSumsmR0E8ICZfSWl9ImrC6wl3tsAYGqcfwwohLgu18y1q/OsXg++FySEuB4buqdVJqZvRhuF2O1sKM8mjwTfKxdij3JDn5yllM6u/XkRwO8DeBUp886U0v0ppftHR5RgQmyG6+Xa1XlWq/JzUEKIa7PRe1qlqVwTYqNsNM+a08H5OiH2KJvenJnZqJmNf/XvAP48gC9vVcOEEKso14TYfpRnQmw/yjMhrs+NfK3xEIDft1XrUAXA/5dS+uC1/0miWqSCGJtmK9wQtdSYovEDR25zsaNjvOzShTM0fmHpeRebvsUbggDgypUrNI4KVzxNjPmvvzSr/Bqby9z2stD2Jp6FRT6Ez813aPyuvn8H6lOtOVr2StXbnQAgH/g9/amleVr2b9QO0Pj4AW8+PPxN9/DXW+HteEnepvERYiwaBP1xYcCtQkRMhFQEGqPtZ2O5lgArvI2oQrogzXNz1NhZfq3N095KlSr8PZ6U83jeIuMZyJaYYQwAbJmPfTbpvzpdm+Q5Ul0O2k3MclkguJu7wr+q/dykXzc6BW/H0+d5jgyW/VqSL3IbV1nj9qlE4mmEd3Y0Xh2+BKIgtr5ImhX1X9727csDgdwOsPF7Gr+lUVtjIIDjxsrw9QIbZmAvZXHrBYMRGASt5IPKDJzROGcD3u4qERNXeGqHVJd93fX54PXawQQlxfujPB/6I7yfivr6LXy9cV53dSUwqZL+iyy5WWC0ri77taOyxJ9BtpkN51mJDK3SP7tQM2OQTmXi47NIVLzdcmOPxuO5v7kWQT5F1t6G8eRpFf55sBfYGouSvya79hT0RxlYSlnHVqf4c9Vt++dpvJ77RbCR8+uu5YGZlrTjfIcpc+MxLwNjZodcfDO4IW1kHItNfA626c1ZSukkgG/c7L8XQqwP5ZoQ24/yTIjtR3kmxPWRSl8IIYQQQgghhgBtzoQQQgghhBBiCNDmTAghhBBCCCGGAG3OhBBCCCGEEGIIuNFfQr1hEjGtDIiBar7NLTADBLYyUm9ZctNbv8frZo6jrMG7qDRucMkCc93UoYMuNj3Of4FpdspbIwFghVS9dO4yLTt7aIHGudyIm2vawd59YuDLrzT57yHpBv1358tf5GK9O47SsuUKt+K87Zg3dALA8a4v36vwdpwsuRrs9JI3WJ46d5aWffhJGr6pWOFnc3WJmPECM1p9jhuUspaPpxGudwq8aLAB+UlgoUNg00xt3nAb8+atktgXASDxpYTaviKDoC3xefX8ov9F4L0BL5vOcWNaY8HnX22Jt2MQSNe6+31fl42grys83jvMx6BfIybAnNdR9nhnW8vHc7bQDSlW8rxKxEBc6QamRWI9BAAmGktZMJcz3mfGjHFB2ajush7cA2u+nhQMXR6YBZmtMbSoBnVXWszWyOdsdZkrM+kYGL+n9cb5XO57ATGICBAA0N3H+zrv8rqZBTM0oAbzbOSKH8fxhSavZMgokmGJLHLMOGjBPOkRizPATX+dgt/TKoGOdqrqLcbTFR8DgEaoNOXhK33/uxTPr5DJBqDb5+2u5r7dlcCGmOr8GguSDtNj/Fn6+Ngsje+vL9M4IzJmPrXk7cbPz0/Rsu02z+EvBPejjNwDa3U+XhNNfu23js+72OFGcOPGp4O4PjkTQgghhBBCiKFAmzMhhBBCCCGEGAK0ORNCCCGEEEKIIUCbMyGEEEIIIYQYAnZUCLKwtIQPffhj/gc1fyj1UpXLMuhpfQBLi/Mu1lnxMQDod8gpZACJHApuLSzSshV20BqABWKDvOmvsdus8/b1+AHEdunj9T4/1Jl3ucGgIIe+K2VQR8kPhmbEpGB1fvK5rPMDmTbm++PiMj9AW7HgkOvTz9H47KlTLpYF7Ts6zQ/W3jo15mL3zxyhZX+ZRm8iKdED7iNX+DgzKst8DlpB6ujxXGDz5Kvt8/UGkoqIPLJ5EHjzQiGIkUuMznBXAnnBcoscXi+DfuryOJclRPKIYD0qSDzo61Tj8eYMz8uXHT7nYreNeJEOAPSDzj7b9uKUiy1/QB8ATtLozSXrJ4yd9xOmqPp+ry7z9bTSCfKSFI/yxIK1muVayoP3ZIM5FJUvar58JO3IBrzdFeL1qS7yskU9kGgQ2UjeC+5d3fULQWzA711M1AIAJZniZT24lkDgEwlEMtJsC9YTBPGi6hs4cm53CEFqVuDWul9b7th/xcUWurxzpxtcItWseHFcI+fzJAuEIJn5ca6yQQNQBDekuYEXfwDALBGCrHT53CyDsa9kkZ7LYyO83ez+VavwsiM5v2E2M9/Xkfhjqc/H8XKL9EeLP0sX87yfKkuBoIqkay/n7bhU8c+IAHBhzO9donvotdAnZ0IIIYQQQggxBGhzJoQQQgghhBBDgDZnQgghhBBCCDEEaHMmhBBCCCGEEEOANmdCCCGEEEIIMQTsqK1xMChwfs7bD9vFkot1J4N9Y5U3eUAkNZbxOsrA9GaZtwIOCl5HVuemlsJ4+TNnvdns2Di3wOTGr7FHzFvz85do2fFHuZ3nGyvEfEgMjqtw09SVmr/G2cAw2atxW86jz5x3sWenuTlqZpRbjKY6/DVnvBAI+Qo3dGKJ15E/v+BibabyGUIsAdbxFqXG5Y4vTCxlAJAtk7IArBtoCxkbMcsVfL5aEEeVGzyZcZXZF69FSaqO6qgu8dxptXwllvH+qAaXuBFrZCARA5FjIXpPrmB6QAD1KjdyfcP4GRd7/djjtGzDeMMvFd7M+PnWcVr2T2j05pL1SjSf92tL2SCTKMi1cI5voKwNgjo2UHdka4xeMyP5HRkEM2JUBIBKy9ddVngdPX7LRd5j7YjWniBO+i8qG0j4wAR1qcOvpawG95LoFkMUkSlYT1LwVFc0fB3FyI4+Am6a0ayL+5ve15qThe/5zsyG6q6TRTWyy0Y0SB15MJhR3UuBwrMgY58F9sU8D9ZwYlWsZvymNjnFzYLdvp8rjcDWyPoUAMpInbwBRqq+7mqVX0sR5FmqBInW9+2LHvsiyzLgx7fT4M/B10KfnAkhhBBCCCHEEKDNmRBCCCGEEEIMAdqcCSGEEEIIIcQQoM2ZEEIIIYQQQgwB192cmdmvmtlFM/vyVbEZM3vAzJ5c+3N6e5spxN5HuSbE9qM8E2L7UZ4JsXnWo+p5N4B/C+DXroq9HcBHUkrvMLO3r/3/P7heRSUMXWIyqTS9pa/DjG4Aij63w+TNERezQGFWltzskjG7Y2BfzOrcLMhscQCAntemjc57SyUAWGAhHBD7z7Nzp2nZ4pI3qQHAt41OutgyurRsGdigysKPQbbEbYhZhfd1bdKvydO3HKZll85foPHU5XUPLs+5WDWwBPUbdRpvJm/XKbZfbPVubEWulSWs6+dblm/AlBTk2UYwYhcFALB4VDaiwo1Xqe4HKQVvQZWBkKsYYW0J+i5qdo+8aD1Yj7h4En0iKbUisL8FczNjGtug0SlY6yJmKssu9uIqXwcmM26rulRcdrH5gttZt5B3Y4vuaVaUyBa83Szr+OtNgQ0RQV4mcj+yItCGDoI4u49G7QgIbY1dP4+ywI4W2U7zll9n6kHzLEjYbOBfMwUG4ihOxyCytAVLY0Zuo3mUUoHVMoI/ygTXGKwFbAwiI+UW8W5sUZ7lVmIq93l2qOqtyp1gQY0siSWxIQbi2pCMTJbo9TZqgqySwR+vUw0vWsEz22St7WIHGn79BoCZOrc1Xmh7s+50ULYS2CRZP0XUc55oNWKZtECpaDW+8JSNyAa//vZF9+JU83VUApvktbju3Til9AkAs18XfjOA96z9/T0A3rLhVxZCfA3KNSG2H+WZENuP8kyIzbPZM2eHUkrnAGDtz4Nb1yQhxFUo14TYfpRnQmw/yjMh1sG2C0HM7G1m9qCZPVgEX1UUQtwYV+dZr/BfYRBCbA3KNSG2n6vzbH52418LE2I3s9nN2QUzOwIAa39ejAqmlN6ZUro/pXR/zs50CSGuxbpy7eo8q+X+/KUQ4pps6p6mXBNiQ2wqz6ZmNnZOS4jdzmZ3S+8D8Na1v78VwHu3pjlCiK9DuSbE9qM8E2L7UZ4JsQ6u658zs98E8AYA+83sNIB/AuAdAH7bzH4cwHMAfmg9L1aUJRZXOi5eJdY9C6wptcByVOmSr5ckbnuxNrfU5D3ymoFJqyh4+6L3d+rwBsaV+XO0bC0w7hwk3wp9cZ9/VXQ0MGwVPd+O0UBNND7CjZTZotdS1RKvY3/JzZO96TEXO93lZa0XmBY73Fg0Nu5tb5b41yI6DZ4Cqe/ftxhss61xK3ONwexvqRrYSNHgdRCrYqoG5sRAVcYMYdYPvrYSxQPj3GDaf5LRH+XtKJo8z/pjPj4YDQyHkfRp0udINTA2RV+MG0z7yjtt3teRDY9JrCorvO8C4RWKMpgj5B9EVtS6cYNaw3wON4yvA1vFluZZSjBiNmVmRou+ORL0LyokTwbB0YDgyAAzLUZ5ichkGOTaRkx/ZTWwjNbX/4kIkbSt1kHu0f3xwMxX5etaRu6jZaCNZHK/qH1lZHaM5K9Rd2xAIBsJ8Vh+h0bdLWAr88yQ0CCLHFsrGlmwfkSnakg/srUNWLVGMqqkbVlQNhrkPGggsxZO1P1zNBBbC5sV3yf1QDvaCyyJo1X+vMWY7XHj7lLm868baLDbBb9nLHR9HYN+8AwSWVGDZ2yaU1GeBbmaKn4cKyR2Pa77uJlS+pHgR9+54VcTQoQo14TYfpRnQmw/yjMhNo8OgQkhhBBCCCHEEKDNmRBCCCGEEEIMAdqcCSGEEEIIIcQQsM2Kg6/FALCzyCPkUGq+skTrIGekV+smv2+mGsg8xnr+sD4AFD1/ELJd8rIpcVnGaHB68OCYv/BbmryOY0SWAQCT2bSLjY1N0rI1JjcBcNsSEYLs54c3m02uiR6t+X6a23+Ulj0UHMj8+OIlF3v6Ehe1HKvP0Hh0lrlskIOkgbmhVuUpkJNDuzk/nzp8mAFEPlCO+Gvtj0cXVafRbOA7vQykImUlOkm7vnoBIOvxg7SRYKB1yF/P4nFetneU53ZjjEgqavyQ+eQIP5h9bHzWxeqB0WBpwPu6Qg6UtwZ8vM4u83XgwrkpF6su1WjZrM/HIBKC9EmO9CMbwZ7FuDAjkGhQwoWMFA2kIqFshEkJgralPJBo1PkaWTSINInEVuO8edk4kRQFXRdKRVizI7dJcC6fCXUi2U8k7WD5k0fmj4BIG8BeM2pfdO2sX5m4ZhjJACoEaWZ+DR/L+ZrMpB1RvAg6sQw6PZR/EPrBYzdbTwFgse+TZ5FIMQCgE1jLLrX9M+W51gQtO7vCn0v7hW9fJCDZCClI+JHgnjso/BjUG7xsGdRd9IJ1dEDi0dAWQe6QfUcZiUmugT45E0IIIYQQQoghQJszIYQQQgghhBgCtDkTQgghhBBCiCFAmzMhhBBCCCGEGAK0ORNCCCGEEEKIIWBHbY2VPMO+cW+C2Z95A9lklytS5pdbNJ4RQ0pzhFvJIkNfRuwwS4U3EwLAYoe349YGNy2+mBhm7mpxg8u+JW6qnJryVrf94/wam5d4uxsDH6/3ucVoepkbcJp93+7zgcGql3gdKx1vZiwHgZ2nytvXCyxnsy0/Nlk/uJaM91+HjFfR5GWHkgox6U349rcO8lwog5WBWs2CsS+4hJBaHCPZVWQQHDR57qzc6isaObFAy75inzeGAsC+up8/M7UVWvbW2hyNH636eCPzFkgAqILP74O5z5GLBV9fHli8j8Z/b/blLhY0AzYIrJY9PsCt0s+nTpCTReID3CflO2m3aFEBZIY04id62djAWhFZAau+31Ng/7PA4pn1yNwq+VgUozxhexO87h4xLfa5+BdlLTBEsuvZoNiMCfSiNSkyQbJ1LeO3DFQ6gVmWlLeSl40siSl6ImNDFlkZI6P1LhapJgAFGbxW6efscsFNhkVkWsx953ZLPufZmhfVHZkCu8HN9VSLW6lPLfj4whI3abNrAYBWz19Pp8Ovsd8K1i5WdZRQ5BkR4PeYFCjYO1P8RjU94e/P4w1uXo5skktdvkAkZlWM8imyOJLXzLKNJ58+ORNCCCGEEEKIIUCbMyGEEEIIIYQYArQ5E0IIIYQQQoghQJszIYQQQgghhBgCtDkTQgghhBBCiCFgR22NhoQavC1wOvfWmKPBtnGmxi0rvdKrkqYr3NpTTdyOlg+88aVfcCXLQofH7yy5YeZlbW9eO3qRWxnrk3xY7PCUD7a4RW6uu0jjS0TlVB3l5p9ihdfdXvT916lz8898zvv6mefPuliaPMxfr8ZNN31i/QOAxog32uWpQ8vWMj6f+sSu092gRexmkTJDWfNziNnVOjP8ogZ8SnCDWZCrUR2DEd+3kV0tMowNJriN9LY7vIHxjUcepWXvrF/klROqTOcGYDxr0/ho5teSKRIDgAOBYWs68+vX5IDbIT9f9esLwO1TwaWE8UGPr0ctouNcKvlAngO3254c+FydJbFhJWWGklgOB2PEeLbB9aOsEgNcsOZFRrG85+eWFYFRsxHcWyeDNXLUtyWyqEbmVpr3G7WjrbdeIByDaO5TeMojI0tSIGsM17VIfseuJywbrMdMQBjZP4eNBKBPbjSLpb/JzA4CZWhAn3RuZFS8EuhIB2Tdy4JB7gZr5MXWOI3PLfjXLNq8jqzBJ3IikyWVfKJYYBak8623fitjGA/ma5bxhJ8Z8feS0Sq/tzLDJwC06nxBKgakMcG6E+VZPuYXgpF6oEi+BvrkTAghhBBCCCGGAG3OhBBCCCGEEGII0OZMCCGEEEIIIYaA627OzOxXzeyimX35qtg/NbMzZvaFtf/etL3NFGLvo1wTYvtRngmx/SjPhNg86xGCvBvAvwXwa18X/1cppZ/f6AuW5ITsKJFJNIk4BADq4Af5Kgen/WuNkEPZAOrBSb6Rqj9sv3iJSwOaY/xA5kjJ2z3eI9cdnE5OGT/UuTB7wcUagdxkjsgfACBVfbtnjLe5Z8z+AGR1P21qxl8vq/AxmJ72h1znq1wqUlb5AdUsKt/2JzgHwQHQ5TEuH1ghB+/rxw7Qsjj3FI9vnHdjq3It9+0vqz5WcGcOCiLtAICy5uvI+Vnc+JA9O3M7wk/dphFeydR+LsB47cFnXOzbxh6jZQ/k/GT/+cLPzVbJ508ffB3oJDY3eUfVg9ypWmQ1IK/HTvsDVK4QiRWo7AVAavHbxIXehIs93d9Hyz4fxB9cPO7bERkhto53Y6vyLDOUZD0cNP3YJZKT14LdplgOA0AZ1J2RA+6RjKII6u4R8QcADBpsjeF1p+BJg3kXmFhjtXBQN+mnSAgSHeKnKRj0UyRlSUSkEF13JEvqT0QGERIK1tfAN4H+OBO4rH+N2QTvxhblWY6EcfKckpNJ0S74JGwX/Flk3vxgMMEHAKwEdQyIXGO6xu8veZCAkbyCONxCsU0WyDzqdb+4FwVPhkhdURYk38M2R4YiUrbKEzvPeR09MjbjQZ/uI/IQACinebvnq00X67aCRS3g8L4FF+sVG8+z635yllL6BIDZDdcshNgQyjUhth/lmRDbj/JMiM1zI2fOfsLMvrT20bX/2EoIsVUo14TYfpRnQmw/yjMhrsNmN2e/BOAEgJcDOAfgX0YFzextZvagmT04CH5nmBAiZF25dnWe9Qf8o3whRMim7mn9Pv9dkEIIyqbybHZWz47ihcWmNmcppQsppSKlVAL4ZQCvukbZd6aU7k8p3V/JJYcUYiOsN9euzrNqxX9vWggRs9l7WrW6sV94K8QLmc3m2cyMnh3FC4tNzXgzO3LV//4AgC9HZYUQm0e5JsT2ozwTYvtRngmxPq5razSz3wTwBgD7zew0gH8C4A1m9nKs+oyeBfC31/NilmWojvp3Goumb8ZClVtxiuArW6eueKviqYx/FF7LuCrpZRUfP3KEG/oOHJ2i8bEuV02tfOWcixU93r5+oFOrTfuvZ3cDo83oeW6HsdRxsf0XuZ+nDMyTS6VvX4/EAKCs874+NH3Q15EFVrCSa6kmX3yCxo9/2xtc7Nka//rRUye5jXNlxLflR/9+MM0/+EEe3yBbl2sGmG8/EytF5r4IZvSrcnEibBBYNvu+Id1AozZo8rEfb3Dz4f7qkovNZH7OA0AzMDw1iKV0CVxrycpGnBl4uyEAXCl5HRPmr/FiwY9o9AM9XVbx18gMeQBQnw9MXxf4P3hs/pCLHaodp2U/euFuGj910q8DU0cWeQO3iK28pyUzFA3f94MRP58jW2PW5/2eB/cH2o5o7WRGxcA2WPJbLq0DAJjANJpbRY1fI7MZRsbHvBco6kjVkeG3aAQWWvKaeTeyawaGOjK+kTWyOx2072igvmXF54MBCyjrvpLu1PZ9IrWVeVa1DIdyf71TuX8erAe6z24wOavkJjhW5eMwXuX3koW+f87pBsbH+R7/ZstShxuBU8lu3LRoCDMzRrbGFBkYSXmr8DWq0uTPlJWgPKNk1w3gyorvv0rwrD9d5/uFyOI4u0i+CbHAF6QUrGntvp9ns5fHadlrcd3NWUrpR0j4XRt+JSHENVGuCbH9KM+E2H6UZ0JsHn2RVwghhBBCCCGGAG3OhBBCCCGEEGII0OZMCCGEEEIIIYYAbc6EEEIIIYQQYgi4rhBkKxlt1vDqbzrq4kuz3oDTHYzROtr9ORovC29UsS43snQLboDrZD4eCFlwgBhZAMDOz9J4o+f3wXkvsnRxa09vidh/iJUPAGqz3ACXzFvnLh2cpGVHjnsbGwDMnHixi42f4ObE4pC3sQHAx3/nt3zZ5Xla1oJBOPGXv4fGb/2B73OxU5/5U1r2gZ/95zQ+cdBb8XqPP0fLDh8JSKTPyFRh9kUAqLT4vKqQlGoEvyC00glsjQOfC4Pg10UNAqNiFsQZLaaEA5AnfvEtoqHrR3VQjRpQkM5+YOE+Wvahy7fR+ETdr4vHRvn6N5pzu1jZJ+tOIISrrvBrqS7xuXBhwRuoTk9wm+TZK3yNqV/0a9p8ja/9w0iqGDr7/L2nN0rMfcFbobXAdsrGKe9GtsFgHtZIrhETLQAUXKyLgt+OaDyyIRYjfI1IjfXb24oVbr+rLPvridqcDgeW12k/CP0Bf72F5/hcTmQuRwym+DPI7Yf580NObHQn+/zemi0GzyYFM3dGLRwuMhiambc1snW2mXNTYLvgdss6uQneUp+nZcdybmu82Pcm3uc7fC08Nwh0pAE5MRyWWXBfzHk+ZaS8BarmMrBjF20/v6Nns8kx3k9dklPL89xemV3m/dSd8TbOicDe3C+4JfG58zM03njCPx+PLNCiaB3h1z5PjPTorH9t+Cr65EwIIYQQQgghhgBtzoQQQgghhBBiCNDmTAghhBBCCCGGAG3OhBBCCCGEEGII2NHjoGOjdbzuVS9y8TNz/uDfY0/zU9L3vvR+Gn/mmZMuVpm9RMtmNX7Zd4z6w4MHcr5/vZS4bGSi5w8rAkC/9IdOl+v8BPbYoX00XrvjsIsdvO0WWjY/cSeNt5ZXXOznfvNdtOzMYW5p+Ef/5KddbHSaH7xc6fODmrN//IcuNnGAv97dRw/Q+MgKP/i79NnHXezAKT5eeZWfHH/my19xsS898Su07PBhVBTTJ5KCiBQVJWdgs34gklgODhZXfeUZEeYAQLbADwU/X+GHrT9eu9vFuiWvY7ricwEA5oidpBPUcaQ2T+M5/GHrD5+6h5ZNn+WCgXPTvl/P3cvz7NWHuawmEQlApcXHa1Dng96b5OVHa35N65bBwedA4GKlf00jB8+HlaIGLN3m5y4VUgQOm8A1Q8U52YBX0h/h+dOb9P07CMQf/fFgXowFMo+mz+9Kk9//psf5+ru/6eMWzJVnL/ND/MUzXiAT+YImJng7/vwtfr3vJz4P39t9GY33V3w7iGNsFSJ5ALj4AwAqTN5A5gcA1BZ4nAlm6gtRA4eLPkpcLPx6/cnFV7rYcyt8nlxu8+eLCunz+QkuqbijeZm3j8yVhT5PtDwQcRwZX6LxFSK7WOzw55Z+IKrrdvz9q9/m97R8ltfRWPbzqruPz5/FuhdrAEDxvO/Xqaf4fWfkCu+nK/d5scvlMT623SXeT2OPcTnM1El/PdEtrTfJf5Byv/jU9rd5JddAn5wJIYQQQgghxBCgzZkQQgghhBBCDAHanAkhhBBCCCHEEKDNmRBCCCGEEEIMAdqcCSGEEEIIIcQQsKO2RssyVEe8VeXwyCEXy4nRDQA6JW/y/GOPutjZ58/SstUmN7Uc/5bjLjZ2DzesXTn5FI0f/vbvoPH77r7PxQ4e8vZFABgZ89an1cq9xTGv8GtBxq1Zv/WL3jj4sed4P9VOXqDx7//rj7nY617zTbRs6xS3yJ39ih+v0Slu4Ds4wsf8zO//FxqvP/ebLnYxBeZALND4yFzHxY7czS2aQ4cBZdW/78LMbX0uOYqrJgKlssJzNW/zPq/lvnx1JXifyHi8XOQ2qEd63l461+HWrPEaN4kmoqocr/n5AACNSW8sBIAnVg66WOt5blo8fJJbqdoH/LV3TnDDVj9QShmxwpU1Pl4F71L0DvFrPEHsYmUKrJsZ1+eVFRLn3TGUlDVg5VbSx3Ufyzq8b6zk8eoyyZNA+jXgcjl09vn+HYwFYzHJ7xmHDs/T+IumvLnuxCg3JN/b4PeYo9U5F5st+P3vnfatNP7EKV++cYXP8bnLPAcXj/rJP5LzeV8SwygAVDrEPMq7FNkiv6c9c2Y/jSdiZqxf4HXUZ/lrVtp+3Otz/BqHjXaZ4eHehIt/9Pm7XGx5nq/3FqxB9abvg4k6X+/317lFfGXgrYCnFvnzzHJgWoxYnvPJXT3P7wPVJT43a0RSOrEc9McSj7dnfLw3xV9v0Of3o+ZFP4/3f5Fbk63gN4KFEz6HO4NAqTgItNNBuLWfPDdN8MLdGd6+iVE/d5p1bha/FvrkTAghhBBCCCGGAG3OhBBCCCGEEGII0OZMCCGEEEIIIYYAbc6EEEIIIYQQYgi47ubMzG4zs4+a2WNm9oiZ/eRafMbMHjCzJ9f+5KcfhRDXRXkmxM6gXBNi+1GeCbF51mNrHAD46ZTSQ2Y2DuBzZvYAgL8B4CMppXeY2dsBvB3AP7hWRY3mBF7yiu908d6S3yN+7syDvBIuqUGF2B2LkhtSijaPr/S9Wqlx+FZadt8I12NdCEyQ9oqXuVg2ydekC5eu0PiBcV8+D/bXnQVuWvz8U94yOTV1gJZtn+Ymw0+86zd8Hb/7R7Tslce92REAXjTnrUePneWaqQevXKTxI4EN8O7nvAGsv59rCUd7vP9GicnoK1e4cWyL2LI8S7mhP+4TpXWE2IUCa1GFmOIAwAofz3vc7pS3AkNR8uWry3wpKgNrKzPZAcBgxF/37AQf+06Dv2aj6teBkQq3ml3uc7PcFy94a+TIhcBkOODzmJHn3BBVEsMkANRGfLv7o1zLGAhegWpgpSIGy4zpPMENmABQkvU8b237Fzq2LNdQKWEHvfWTieHKjN+8mFk1wkqea0H3UgNnOcXn8sQ0UboBeM2hZ2n8jZMPu9hdVX7vurXCDXV1833ySI8bHzPj114hBsvmOV4Wid+fP9B4qYs1RoLnh7P83s9EldmAt8MKPuaDFu+nvOsHuM67GtUV/ppVYmvMO1HSbwlblmeLRRMfXPgGF+89MulijX5g+j7CrzUf9/m7r84NgpM516We63iT5MXLPgYAdjGwNQYW0PFzPj7zFZ7DtTk+ZysLpN09Xkca47bL9mt9X5cTvE9Hm7wdnX1+QVo6zl9v0OD90T7s7zFHZhZp2TRDwzhb4T+wtrc+pia/P48F6+Utk/65uRbeXGOue1dIKZ1LKT209vclAI8BuAXAmwG8Z63YewC8ZcOvLoQAoDwTYqdQrgmx/SjPhNg8G3qL0syOA3gFgE8DOJRSOgesJiEA/4t9Vv/N28zsQTN7cG6O726FEP+dG82zfo+/6yeE+FpuNNeKJeWaENfjRvOsNc9/J6UQe5V1b87MbAzA7wL4qZTSundZKaV3ppTuTyndPz3NP+YVQqyyFXlWrW3wN0sL8QJkK3ItH1euCXEttiLPmlMb+8XNQux21rU5M7MqVpPrN1JKv7cWvmBmR9Z+fgQAPxgkhFgXyjMhdgblmhDbj/JMiM1xXSGImRmAdwF4LKX0C1f96H0A3grgHWt/vve6L1ZpYP/hl7j42UtfcbFHP/YxWsfYidtp/Mh+fzB/8fZDtGxwrhgniKDjQDc40Jt411kgmJj0Z+eRj/O6m8Hp7nzBHzRsneeHp7/yhO9TAHjswmkXu9zmb2Yt56TRAB76r59wsW8NxAjNHq9jZL8/IJkFbxWkAT+4mtf5P+hXfN3nqvwg79wgkDRM+IOhD4xu39dytzLPVoUgvv12yI9F0fXlAABL/OA8O9caOCCAIJ6IAKEMVqIiEABF8bJGZCNE8AEAB0b5V9KaFX+YeazKv1ZznhwEB4DFOS8NmOBTEIMRnu8FebO4LIP5GixqTCDCJBEAYEH70Oevudz3DcyMz5t+lw9whTQ77wR2iy1iK3MtyxJGmn5utFZI37SDsePLG8+r4N4V5aCt3zUTMiiDNYLQS/waTw24IKBV+oZ/rHUvLfv0pf00Xpv382Vklud8pcvbN1/z+doZ5aKCkbng/tzxgxN5AKrBt2Etuvez54fo2SSYC0xOYoNo8b5xtjLPVgY1fPayf/Yb9Y8z6Abux2yUJ9r4CBEbBYk2N+AymJMLfm7aBf5p3+gZPgdzImwBgIlTvt2jXzpDyxYX+D63GPiJmO/jUozBLVM03tnv5+b0/iVa9lYixQCAZzM/3y40+T00VfjcPHrHZRd77cFnaNl+4mvXF3O+MF5Z8eNbr/Cyt09ygd2JMd++zbAeW+PrAPx1AA+b2RfWYj+L1cT6bTP7cQDPAfihLWmREC9MlGdC7AzKNSG2H+WZEJvkupuzlNInEQq34b34QogNozwTYmdQrgmx/SjPhNg82/4LZYQQQgghhBBCXB9tzoQQQgghhBBiCNDmTAghhBBCCCGGgPUIQbaMlIBux5tPUs/HVs6fp3XMLXAbzdma32c+dZ4bbfaPc0vNwuwFF3vy4w/Qss3LXG128NitNH4l8+anItATFmfP0fjpk0+42MJzZ2nZp0e4PbG36A0zh1rcRlMteXwl+WuvTe6jZYtBYMUZ8/GxI/R3UWI65xajkUqgnRs57EJZzm17M+f9mAPA+CFv+nyunOevN2QkA4qq/6p/ter7vAxMfHlv/ca8/igv25/h49Mf8wal/hivo7uPG5vKUT6vxg56DdorDz9Py75yghuemPnwYp8bpR6av43G0fbXWAaGye4kHwNWvhdYDy90xmm83yPlqzyfiiIY8yB+ZmHSxVodbmusnOHmssqKrzvqp2HFyHwpiQW1trx+E18EM+4BQHWFx2sLfm6lKu/gxQG/Z3wq54bklcKP9f7aclA3XwsudfxrPnnlAC07eIa3b3TJX3ve5euGlbyf6nMkB0s+XoGQEr0pX94CW2PJ0ySGNDvKk6Ietds3vFnZHe/PD8oMsy3/DMVWw94UH+OXHePPSq/b95SLNTNuFz3Xm6Jx9oqB0DucP9VWFCeTqMorz/fz57A06e8PrRNcazl3F59Y7dt9n7wosDIeGeHxRu7Nk08RgyMA1APL8isPPOdi0xXeeWe7UzSeAitqUfjBsaAdhxvcVHmi4fco0by5FrsjM4UQQgghhBBij6PNmRBCCCGEEEIMAdqcCSGEEEIIIcQQoM2ZEEIIIYQQQgwB2pwJIYQQQgghxBCwo7bGsizRbXvT33OnvVWxXXCDy0vuOE7jR6a9HevoS7j97xMPfI7G/+wTH3Wx6tFbaNkD8946AwDzX3qIxjsf9NbHQ4lbYKa5bAiDnrfDTBo369gd99J40fI2re85dict+/knn6Txfstbezpe3AYAyJpNGn/LD/5FF6vedQ8t2wusm4OT3ML3oUe9zefPvsyv5cW38DnyTd/6Whf7wZfeRcv+4cf/Fo3fLCxxq1t3xSvCUsfnDQBUvPQQALfCdaYD62g1MPd1id0uWInKcW5lnDjArXDfeMjPlW+b+got+7qRZ2m8k3yffCHjFtaHwG2NWdf3SW+cJ3Zku6RlO7yjnrzIDXf9tl8fsgZvR1njcQtsjfNnvMFy5BxvX/N8YIgkEseFu/mYDyMpGTodsgYPiLkvsP8N+BKJ9n4/h7Kga6ot3r+NKz6eBYbWYp7H55e43fhjl/z45w1+TysHvO7U8vOlOs/XpMblwEJIbJlFPXjfObi3gjxuRLa93hivJBGrcN7lba60gni07pLxLXk3ocslfOhO+9dsXuIW1WHDDMjJOLNrGkzxZ7N7JriZ+btHH3WxTjD4z+b7abw85Nvxnzu8b5eNPyz1Jvmc6Mx4S2X9Dv5cavyxGb1xX3frMJ/HvX08h2cOLbpYxfiC1C74c+l0zVsVp5vcfL7S488P//XCcRebX/Z9BAC9Fq8jBetAXvMdOD3K29cPtJvPdb0x8xMXXsRf8BrokzMhhBBCCCGEGAK0ORNCCCGEEEKIIUCbMyGEEEIIIYQYArQ5E0IIIYQQQoghYEeFIO1OF1969BkX71T9iejpb76f1vHs0nka/zt/+Y0udsuJUVr2H2f81OQf/M6nXay57w5atnvpMo2Plfww5UFybrJZ54cmZwKRQi/3/VRW+angs6lL41nPl//ml/JDrhdneV+fmrviYpen+YHMIzNTNP6aF73MxR4LDsx/4ksnafwjH/gQb9+Zcy5mRPIAAGmCn8a/t+EHbHJfcHJ/yLBBQmPWz8P8gp9XxQjPhdw7XwDwQ+j9cV62O8PHs3GFHOJu8hO62UhwOHnUHywGgFsb8y52S3WOlj2U8+Wvk/zY78u5gCQjh9QBLgcY8OUIqcLHoEYEDdkcXzM6gXABpH0pEn+0eR21K0F8wcdGz/Nraczyg+MLx/0YpNFdJAQpDYMumUeky4pAxNKf4PH2UR/vTfF1rHk2GFMSrnRo0VAmgDIQiLT8XCxrfH4G7gpkxN1QXQxkGUG7y5ov3wn6qdIJLpJdYuDpSdUg50k89XkleXAtI5d53bUl3+7eOB+X5dt5HYMxX8fKoR19BNw0uZWYHPGd9vwBMp7BuF3s8hvVn7TudrHZaLEOON7wz4OvPfosLftgziVSC0v8+aI77+/blUU+v6McLonooghkW1mT33PzzM+ry+0xWnap36DxO8d9P/ULfi0XL3JxSn7R90d1iQ96NhncW4/z+/l9h/2z491jF2nZmcDe89jKERc7fY5Lla6FPjkTQgghhBBCiCFAmzMhhBBCCCGEGAK0ORNCCCGEEEKIIUCbMyGEEEIIIYQYAq67OTOz28zso2b2mJk9YmY/uRb/p2Z2xsy+sPbfm7a/uULsTZRnQuwMyjUhth/lmRCbZz2qngGAn04pPWRm4wA+Z2YPrP3sX6WUfn69L9ZqtfHgF7/o4i+554SLHXvpi2gdTzzMDSmf/8IjLpbgrSkA8OY3v4XGF3CLiw2ucOthP3ELzOLCJRq/i9hrprhUCSX4D3q530u3ghG82OM2u1rLt3v+/BladqZep/GHp7xFp/k9b6BlP/3QF2j8l/7dv3OxB0+dpmWfP/kcjZfBGDBvTyXQhbUz/v7Ei196r4s1m9xAtEVsWZ5lgxK1Sz5Pmmf9eLYP8+vvcwkTjBg1ByN8vhZBd2U9X0cWWM1KYhcFgF5geGL2xKmsTctWjdexRIyrncQtdEu94CJJ1SkyO45xOxbm/HhFVqqC9CnA7V31+cCiGZjiIsNd1vflK+3gGoO3AfsTvi3V0UAVunVsWa6hBFLbL8LZqNcQDiYDjRwxqQFAc9LP2+4MN/n2x/k8rLT8ayZiXVv9AW9fVJ6NaWSLC14RIOWzIB1YWQAoSWoOArFuNgisbj3fwsgamXXX/4WjyDBZXQxyrR1cJGlKP7jG/iS38OWTfk72JrhleYvYsjyr5wPcMeEt0acmiWk6GOMn5w/Q+FzXd+SlNrc1tro8/26f8kbgW5vztOyJaX8dAHCmyif+heSft0I5b3DtqeLnm40EtkZiGgaAgtz7i2DNmG/zefUs9rnY7AqfyKnPL3Iw5fspu50/p/+5Y8/S+Pft8/sQAHhpjRvKGY/0DtP4J67c5YNLG7eiXvdfpJTOATi39vclM3sMILsYIcSmUZ4JsTMo14TYfpRnQmyeDZ05M7PjAF4B4Ku/EOwnzOxLZvarZja91Y0T4oWI8kyInUG5JsT2ozwTYmOse3NmZmMAfhfAT6WUFgH8EoATAF6O1XdH/mXw795mZg+a2YOtFv9KohBila3Is95AeSbE9diKXCuWlWtCXIutyLPOfPD9UCH2KOvanJlZFavJ9Rsppd8DgJTShZRSkVIqAfwygFexf5tSemdK6f6U0v3N5sZ+67oQLyS2Ks9qFeWZENdiq3ItH1OuCRGxVXnWmNrW895CDB3rsTUagHcBeCyl9AtXxa+2bfwAgC9vffOEeGGgPBNiZ1CuCbH9KM+E2DzrUYi8DsBfB/CwmX1hLfazAH7EzF6OVRHTswD+9vUq6hd9XJj3NsNXTrzUxe67j9saD8x4cw0AHJr0+8wnTl6kZReN28Due9VrXeyeqRla9tg0b0feWqbxyc8/5WKtL36alARWnrtA45cX/FdoqvUJWrYTGHcqXW+1KZf4u1IHR8Zp/Ln5eRf7yV/5NVr2zHPP0PgyaZ5l3PzzTfcQ+w2A7/u+76Px/fu9wale4Zalu28/RuN3HCcWKON2oy1iy/IMZYls2X8NpL7gO707E1gSA7tlIt3IbGkAkAXdFdrYGIEZrZbzyktijzpf8Bwpeks0frJ3m4t9vnU7LXt2kdedtXy7y3pgPazyeFnz4zXGxaXIg2/91FZ8HfVZvv7lXd6O7j4+wJ1JP0k6wemR/jifZ60j/jUPT/E19Gle9WbYulxLBiNztDLt52eq8P6t1nhCjDb8ONUqfN4v3RoYfolhLQ/qiGyN/U7wmNDz122BRs4CGyuzv0bGx6zg11hWfR1lHpgWB7zy0Ys+Xszza4nMowVpxyCQIXang3w4yhfeQdNfe38fnzdHj3EbYE5ssVea22pr3LI8yyxhNCfrFpkSlXk+X8/U+OJ0qenVxL1l/rxgK3x8vnjRPys9Mspt4YMgnyqX+Tqbe8kmUmBILsd4blfGfCUW2IPzYJ0arfk68oyXjayW3cJf+32HztGyB4/x+/N9o94u/tI6N31PZdzi+HTfWyMB4L2LL3exJ1sHadkvXeLjO3vJPxM0rgQPVNdgPbbGT4Lbyd+/4VcTQlCUZ0LsDMo1IbYf5ZkQm2dDtkYhhBBCCCGEENuDNmdCCCGEEEIIMQRocyaEEEIIIYQQQ4A2Z0IIIYQQQggxBKzH1rhl5JUqxvd5w8lKx1tguu0FWsdnvvgQjf/QX3yLi41OcMPa0w8/QeNfefZhF3vpt72elu0lbsWZrnJLzWf+6I9d7MWjdVoWR4/S8IE3fbuLdfdN0bIX/+APabwz8PaaY7feScs+tch/wWrnscdd7MzyHC1br/Mp9g133eNi3/0db6Blv+c7v4XGb7/9OI3Xat56lIHbcrIBH8f+wM8/y3bJexlmSFXf73nP25myHreG5VxyhIJM2VTl1icUvO5Ky8f6Xpi1VpjXfcsoXx9apc+/LwamRVYWAE6ueFPn47MHeB3Pcltj84K/9vYhPn/KwHCXEUvX2Dk+X0fOtmmc0Z/g1710O1+PWkG7uzPEIDfGxyuNcLNcfcprJveNkAkypFgJVNp+rBOxEEZWxmadDDS4kbTd40Y3nmnAxITvy6MTi7RsFtjbTs1xy93Sojf9pZXAyhi0kL5kYGvckCw3WqotWO/avvKMrJcA0CWWUgDo7Pd1r9wSGFpv5ffWOw7M0vhRst4drHOb3bE6tzV+YcmbiT9Z58a5YaNX5jjdmnJx6/ixyLiMFrbEn0X6yz5en4vWZD5/2NiXS3yejFzhdY+eDYyrE77uDpFJA8AgeM4p+v41I3tsPsPVvyMVv06t9Pm9pAjMrycmLrvY3zr4cVr2PmKHBIAWefb+s84hWvYXz72Sxh98jlu6y7N+TasFc6HKUxhjZIkOpJHXZJc8bQohhBBCCCHE3kabMyGEEEIIIYQYArQ5E0IIIYQQQoghQJszIYQQQgghhBgCdlQI0ukWePqUF0ece/ZPXOyb77mF1nHHPXfT+Nl5f2B2cmQfLdvu85PFi2dOu9jhBj/wOOjyQ+vL8/yQ7nLXnwjs7OfXeKbKD2p+8uRJF/uD3/40LbtwmR8svmt0xsXOLvMDoOdqvB2NgT+o+bLjx2nZ737jG2j8h77v+13s+J28jqIRHCYPTsEb+UE5CEQF4PEMftz7/U2c6rwZBEKQ6oofz+ZF3olFncczkjpljb/HY8HB/krL93lZCQYzkIpcanODyHzPH+h9eMAFO/PtBo3PXRl3sfqzXJYxfYbPn+qyv/iU837qFXyNaVz21551+esNxnkdK4f96eSVW3g7OvuCuie5yAJ1PxnyYM3IiNwCAJoNf3q/kfOD4MOIFUB91o/T8oSfL8VEIIQpeZzlYLfDhSCDNr+V9xo+XgkSs8KSG8CgCN7DXfRtqS3wspVlnsc1crtszAaigkDQAfOv2W/y12vP8HiPnOLvj/GyK7fyduTH/cXcf8sZWvZbpp+k8ZfWefmZ3D9vVANzShnIVy73/bpWBsKlYaNMhvbAj1Ei7S/4so68E9ynyPKWdSPFDiexda+3sfti4i4PMG9VVDZw+qDM/A9GD/Bn2Dv3caHMVM2Xv9DiQqyJOn+mfPWkf4adDGwZH+/4Z1UA+PfnvKTvc08cp2VHnuX3xfGLvKPqCz6e9zZiIgLaM0S+EozXtdAnZ0IIIYQQQggxBGhzJoQQQgghhBBDgDZnQgghhBBCCDEEaHMmhBBCCCGEEEOANmdCCCGEEEIIMQTsqK2xKBJmF7yZZbHvzS6X57khZbrGLTrPEtNiZ+lZWjZvjtL4nXfd4WK94PUmj+yn8WysSeMLd3sz47956gla9jNf/DKNPz/wZjPLuZqoNs6Hdq7h9+PnM1720fYlGv+7P/M/u9j9f+41tOzxE8dovEFkOWVgnzLj7csy/t4Ci1fywL6U87rLtlc4VWveBDiUpATrk/Yv+ZwaCfqlN8b7tk+m90g/sGkWgcmwTeLR8CxxzdEzl7jJCclX1J/jpsXGRT72M6d9+8bOcYNgbc7nJMDNjCnn7Uhn+cUnYthaOcLb3NnH6+gc8HUMpvi1ZCPcypjnfBzZK1qkCwtIZLxqgTVwGMn6wAgxf5UVP07daT6XuyN83etU/HpjXZ6XeS8wC5Y+fqY2SctGY9e6wu9po6f99TTP8zoac3xu1RZ8vLLE7W1lg5sqy4q/B0amxaXjNIzBlJ9zzQMrtOyrj3Cj4rdO+/v5yxunaNmjOb/G8YzPkZxkWxEsmt3E59N47p+zNmORuxnkljBe8+23hh+3MsiF0NZIuqusB3bn4OOMjNQdGR8rXGQY3gMHJP36h/kavv/gIo2fmL7sYveNn6Vlpyt83i+QhuTBmjFT5XV8afk2F/utM/fTsiefO0jjIye9gXH/2WDdCfYReTuwCvcDlSYhVYK1eJTc+zfxMZg+ORNCCCGEEEKIIUCbMyGEEEIIIYQYArQ5E0IIIYQQQoghQJszIYQQQgghhBgCrrs5M7OGmX3GzL5oZo+Y2T9bi8+Y2QNm9uTan9Pb31wh9ibKMyF2BuWaENuP8kyIzbMeW2MXwHeklJbNrArgk2b2AQA/COAjKaV3mNnbAbwdwD+4VkVlSugOvGVmpdV2sY/86X+ldTQDo9i+SW+S6bZ9vQBw7sIcjY8QG1Srx+s4cc+LaPzhz32exj/24Q/7dswt0bLVzNtoACCNeMtkXgZ2p8SHdjl5q82HHn2Ulj3w2nto/Aff+pddrN7gJkMzvv9nbp0sEOVkxOgGAKnkhp5+zxt6BmTeAQAKPp+qVW/Wy/NtVVttWZ6hTLCutwjmXd8v1WXet1kvMC22fPkyMD7mQR1Z38et4HU0z/M+7y+P0XiFpOvkJd6O0Yt8TjQuep1WtsjXAWTB3Kz5/Jv+Mje0pSq/xsU7fb4vHeP51NnPr7FgJsABb3O5zG14INbIkCBXAxErFsZ8P51pcpvgFrJluZYNEpoX/RqS9/yY9i/xvinq619XoryM6E36MZ2f3behOsYv8nZPPOvXk9HnW7Rs5RK3yCVy70ewrmeTPOdHGr7/Wge5GXUww9f7O++84GKv3vcsLfvyUW5gvKXinysaxm1xSyXP44UgT/okr3rY2P3ocp/038am00bZsjzLrcRk1a/LjTG/pnYrvBODaYXBwI+FBcuY5bzuVPg6iiX+DNab5OPWPhC85ollF/uu40/Tsq+ceIbGj1WvuFgz4/ejTsnvAyfh7YnHR/i1PLnCTYt/+ph/bh77Cn/ePXSW93Vjzt+3887G7ItWbMDKSMzLAJAiG3WLzIXA+n4trvvJWVrlq7OjuvZfAvBmAO9Zi78HwFs2/OpCCADKMyF2CuWaENuP8kyIzbOuM2dmlpvZFwBcBPBASunTAA6llM4BwNqfdKtsZm8zswfN7MFB8CmUEGLr8qxX8HevhRCrbFWu9Xv89/kIIbYuzzrz0S8HE2Jvsq7NWUqpSCm9HMCtAF5lZvet9wVSSu9MKd2fUrq/slt+ia8QN4GtyrNazn9prBBila3KtWrNf/VUCLHKVuVZY8r/onEh9jIbsjWmlOYBfAzAGwFcMLMjALD258WtbpwQL0SUZ0LsDMo1IbYf5ZkQG+O6QhAzOwCgn1KaN7MRAN8F4P8G8D4AbwXwjrU/33u9usqyj8XlSy7eWpx3se7iZVpHb4F/jaTo+gPHufGPwvt9fuCxIGd3v/x5LsvIgpO0g0Aw0Wj6A8q33noLLXvpyiyN9wb+62q9QXAoeJEfshyQw7KXJvkB/L/2pu+n8UbdX0stkAxUjLejS94WKBPvu8hHkAWykZWO76d2m8+bZp23D/CToQgOgG4FW5lnEfmyPwCcdXmfp0B0wQQYKTo9HWBESlMGUoTqcrBEBS9Zaflxy1tc/JEFcev5uA34wf7osDA7cGx93tf9w1M0XpJLZzEAyLz/ZTVODrtvlLQB74DxSwzFA+WSr/zZlUPrf8FNsJW5Zv0SjXN+vanP+uuK8oTlQ/x6fB5GwpVyxE+Y/kQgnAqmSnUpkCZd9qICzC7wdiyRsgASyQmLcirxi6yRe2tjlt/jlxf5ZJ5r+W/1PFHnUoNLvXEaz8y3L7eN3TO6BU/wduGvpwwWwejZ5Mk5b5yoLW5cVLBetjLPKlmJ/XU/h+7c70UXWdDnUzX+df+xil88a1kw5wPBS7vwOTXb499gWR5wWc1YhQs6vnHitIvd2zhDyx6u8PwbN39P6wefzSwFiwm79igXPvX0HTQ+9TnfT/sf4cedqpeCr4wH99ENET2zVPz6UNYjURYP18kz0vLR6DkzZj22xiMA3mNm+Vpzfjul9J/N7FMAftvMfhzAcwB+aMOvLoT4KsozIXYG5ZoQ24/yTIhNct3NWUrpSwBeQeJXAHzndjRKiBcayjMhdgblmhDbj/JMiM1z4995EUIIIYQQQghxw2hzJoQQQgghhBBDgDZnQgghhBBCCDEEWNqAKeqGX8zsEoBTa/+7HwBXMu4ddI27n/Vc3+0pJa/Cukkoz/YkusYhyzPga3JN47M30DWuMlS5pnvanmSvX+MN5dmObs6+5oXNHkwp3X9TXnyH0DXufnb79e329q8HXePeYDdf425u+3rRNe4Ndvs17vb2rwdd4+7nRq9PX2sUQgghhBBCiCFAmzMhhBBCCCGEGAJu5ubsnTfxtXcKXePuZ7df325v/3rQNe4NdvM17ua2rxdd495gt1/jbm//etA17n5u6Ppu2pkzIYQQQgghhBD/HX2tUQghhBBCCCGGgB3fnJnZG83scTN7yszevtOvvx2Y2a+a2UUz+/JVsRkze8DMnlz7c/pmtvFGMbPbzOyjZvaYmT1iZj+5Ft8z12lmDTP7jJl9ce0a/9lafFdeo3Jt96E8233XqDzbnez1XFOeDT/Ks71xnduRazu6OTOzHMAvAvheAC8B8CNm9pKdbMM28W4Ab/y62NsBfCSldBeAj6z9/25mAOCnU0r3AngNgL+zNnZ76Tq7AL4jpfSNAF4O4I1m9hrswmtUru1alGe76BqVZ7uavZ5ryrPh591Qnu2F69zyXNvpT85eBeCplNLJlFIPwH8E8OYdbsOWk1L6BIDZrwu/GcB71v7+HgBv2ck2bTUppXMppYfW/r4E4DEAt2APXWdaZXntf6tr/yXszmtUru1ClGe77hqVZ7uUvZ5ryrPhR3m2N65zO3JtpzdntwB4/qr/P70W24scSimdA1YnJ4CDN7k9W4aZHQfwCgCfxh67TjPLzewLAC4CeCCltFuvUbm2y1Ge7YprVJ7tAfZqrinPdiW7cXzWxV7NM2Drc22nN2dGYtJF7iLMbAzA7wL4qZTS4s1uz1aTUipSSi8HcCuAV5nZfTe5SZtFubaLUZ7tGpRnu5y9nGvKMzEs7OU8A7Y+13Z6c3YawG1X/f+tAM7ucBt2igtmdgQA1v68eJPbc8OYWRWryfUbKaXfWwvvuesEgJTSPICPYfX74LvxGpVruxTl2a66RuXZLuaFkmvKs13Fbhyfa/JCyTNg63JtpzdnnwVwl5ndYWY1AD8M4H073Iad4n0A3rr297cCeO9NbMsNY2YG4F0AHksp/cJVP9oz12lmB8xsau3vIwC+C8BXsDuvUbm2C1Ge7bprVJ7tUvZ6rinPdi27cXxC9nqeAduUaymlHf0PwJsAPAHgaQD/cKdff5uu6TcBnAPQx+o7PD8OYB9W7SxPrv05c7PbeYPX+Hqsfo3gSwC+sPbfm/bSdQL4BgCfX7vGLwP4x2vxXXmNyrXd95/ybPddo/Jsd/6313NNeTb8/ynP9sZ1bkeu2VoFQgghhBBCCCFuIjv+S6iFEEIIIYQQQni0ORNCCCGEEEKIIUCbMyGEEEIIIYQYArQ5E0IIIYQQQoghQJszIYQQQgghhBgCtDkTQgghhBBCiCFAmzMhhBBCCCGEGAK0ORNCCCGEEEKIIeD/B9GFK1f3C9vGAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "fig, axs = plt.subplots(1, 4, figsize=(15, 5))\n", + "\n", + "x_pyplot = np.asarray(Image.open('truck5.png')) / 255.\n", + "axs[0].imshow(x_pyplot)\n", + "axs[0].set_title('Input')\n", + "\n", + "x_net = torch.tensor(x_pyplot, dtype=torch.float32).permute(2, 0, 1)\n", + "x_net = x_net.reshape(1, 3, x_pyplot.shape[0], x_pyplot.shape[1])\n", + "y_net = flexconv_example(x_net)[0]\n", + "y_pyplot = y_net.detach().numpy()\n", + "axs[1].imshow(y_pyplot[0])\n", + "axs[1].set_title('Channel 1')\n", + "axs[2].imshow(y_pyplot[1])\n", + "axs[2].set_title('Channel 2')\n", + "axs[3].imshow(y_pyplot[2])\n", + "axs[3].set_title('Channel 3')\n", + "pass" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## FlexNet\n", + "\n", + "We will show how to construct FlexNets, which are residual networks that use FlexConvs. These classes take `OmegaConf` objects as arguments, which integrates with our experimentation framework. We realize this makes them harder to use, though we anticipate that most users will want to use FlexConv modules in their own network definitions rather than using our network definitions. For the former purpose,the FlexConv and CKConv modules are written in a more accessible, reusable API.\n", + "\n", + "We load the default configuration that we use for our experiments, and overwrite it with the settings we used in our CIFAR-10 FlexNet-5 experiments." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Block 0/5\n", + "Block 1/5\n", + "Block 2/5\n", + "Block 3/5\n", + "Block 4/5\n" + ] + } + ], + "source": [ + "config = OmegaConf.load('../cfg/config.yaml')\n", + "\n", + "config.net.type = \"ResNet\"\n", + "config.net.no_hidden = 22\n", + "config.net.no_blocks = 5\n", + "config.net.dropout = 0.2\n", + "config.net.dropout_in = 0.0\n", + "config.net.norm = \"BatchNorm\"\n", + "config.net.nonlinearity = \"ReLU\"\n", + "config.net.block_width_factors = [1.0, 1, 1.5, 2, 2.0, 2]\n", + "config.kernel.input_scale = 25.6\n", + "config.kernel.type = \"MAGNet\"\n", + "config.kernel.no_hidden = 32\n", + "config.kernel.no_layers = 3\n", + "config.kernel.dim_linear = 2\n", + "config.kernel.init_spatial_value = 1.0\n", + "config.kernel.bias_init = None\n", + "config.kernel.input_scale = 25.6\n", + "config.conv.type = \"FlexConv\"\n", + "config.conv.bias = True\n", + "config.conv.horizon = 33\n", + "config.mask.use = True\n", + "config.mask.type = \"gaussian\"\n", + "config.mask.temperature = 15.0\n", + "config.mask.init_value = 0.075\n", + "config.mask.dynamic_cropping = True\n", + "config.mask.threshold = 0.1\n", + "\n", + "network = Img_ResNet(\n", + " 3,\n", + " 10,\n", + " net_config=config.net,\n", + " kernel_config=config.kernel,\n", + " conv_config=config.conv,\n", + " mask_config=config.mask,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Voila! We have constructed a FlexNet. We let now a batch of CIFAR-10 images go through the network:" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Batch shape: torch.Size([10, 3, 32, 32])\n", + "Output shape: torch.Size([10, 10])\n" + ] + } + ], + "source": [ + "batch_size = 10\n", + "in_channels = 3\n", + "image_np = np.asarray(Image.open('truck5.png')) / 255.\n", + "batch = torch.tensor(image_np, dtype=torch.float32).unsqueeze(0).repeat(batch_size, 1, 1, 1).permute(0, 3, 1, 2)\n", + "print(f\"Batch shape: \", batch.shape)\n", + "\n", + "out = network(batch)\n", + "print(f\"Output shape: \", out.shape)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we look at some of the kernels at initialization of the network:" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABRMAAAEQCAYAAAAj5OOUAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAABAqklEQVR4nO3deZxkdXnv8e9Tvc7KrMAwMywioqLiMoIRFa64gEjQ3LhgVDAmqIkJ5pooRpNoolFzr4o3ahSXOxhRNEoEFUVFXEFkREBhZBWYYYZt9q3Xeu4f5wzW9PRMn6pz6lSdpz/v16tf00vVqafqU9VV8+tTVebuAgAAAAAAAICp1Do9AAAAAAAAAIBqYDERAAAAAAAAQCYsJgIAAAAAAADIhMVEAAAAAAAAAJmwmAgAAAAAAAAgExYTAQAAAAAAAGQSejHRzFaa2XvbtO2TzGxtO7aNfaNpPDSNh6bx0DQemsZCz3hoGg9Nq4+G8dC0dW1ZTDSzu83seQ1fv9LMNpnZie04vXaYeB66mZn9i5n92szGzOzdbToNmpbEzA40sy+Z2Toz22JmPzOz49twOjQtkZldZWYPmdlWM7vRzM5ow2nQtAPM7EQz83Y8EKFpudJZd5nZ9vTju206DZqWyMzONbPfmdkOM1ttZo8pcNv0LImZHdpw29z94Wb21oJPh6YlMrMnm9lP0se8a83sH9twGjQtkZk908x+YWbbzOwmM3tWAdukYYmmWl8ws1eZ2T3p/erXzWxBC6dB0xLtr6mZLTGzy9L1Bzezw7Nss+17JprZWZI+Luk0d/9Rk8ftbc9U1bSfy+MOSW+T9K2S5qBpQfZxecyWdJ2kp0laIOlCSd8ys9ltnIOmBdnP5XGupCXuPlfSOZK+YGZL2jgHTQuyv8vDzPokfVTStSXMQdOCTHF5nO7us9OPF7R5DpoWZF+Xh5n9maTXSzpNyf3riyU93KYZ6FmQyS4Pd7+34bY5W9ITJdUlfa2Nc9C0IPu5PL4o6cdKHvOeKOlNZvaHbZyDpgWZ7PJIF5Uuk/S/Jc2T9G+SvmFm8ws8XRoWpJX1BTM7RtKnJL1G0kGSdkr6RM45aFqQFteM6pK+I+l/NnNabV1MNLNzJH1I0gvd/er0eweY2WfNbL2Z3Wdm7zWznvRnZ1uyF9ZHzGyjpHdbstvpx83sW+lfN641syMbTuOxZvY9M9toZrea2cszznakmf3AzDaY2cNmdpGZzUt/9p+SDlXyi2+7mb0tw/bOM7M70xlvMbOXpt8fSGd7YsNhD7Rkz4fF6dcvNrMbzGyzmV1tZk9qOOzdZvZ2M7tJ0o59PLi60N2/LWlblvOeB03b39Td73L3D7v7encfd/cLJPVLOjrL5dAsmpZ2O73J3cd2fympT9LyLJdDs2haTtPUWyV9V9Jvs5z/VtG01KaloGn7m5pZTdI/Sfobd7/FE3e6+8Ysl0Mz6NmR2+hrJf3Y3e/Ocjk0i6alNT1c0kXpY947Jf1U0jFZLodm0bSUps+U9IC7/1fa9AuSHpL0R1kuhwzni4adX1/4E0nfcPcfu/t2Sf8g6Y/MbE6Wy2mS80nTDjd19wfc/RNKdmjKzt0L/5B0t5K/Ej4g6dgJP/u6kpXsWZIOlPQLSW9If3a2pDFJfyWpV9IMSSslbZR0XPq9iyRdnB5+lqQ1kl6X/uypSv7afEz685WS3ruPGR8t6fmSBiQtVvIXsfMnnIfn7ec8niRpbcPXL5N0iJIF2ldI2qFkLyQpWan/YMNhz1VyA1Q684OSjpfUI+ms9LQHGua4Qcmiw4wpLvcvSHo3TeM0TQ//ZElDkg6gabWbSvpm2tKV/PWnRtPqNpV0mKTblOzttM/zTNNKNb07vawfUrJIfOy+5qZp9zdV8iDf022ukfQ7Se9Rgb976dnRx0d3Sjqb22i1m0r6V0kfUPJH1qMlrZX0dJpWs6mk0yXdMuF7t0v6CA2r0XDCTHutL0i6VNLbJ3xvu6Sn0bSaTRt+1qvkcdPhmRrmuVFPccXYml7Rag3fP0jScOMZlHSmpKsarhj3TtjWSkmfafj6RZJ+m37+Ckk/mXD4T0n6p6muGJPM/BJJv2r1ijHJz2+QdEb6+fHpFbiWfr1K0svTz/9D0r9MOO6tkk5smONPM56Hdi8m0rT8pnMl/VrSO2gapmmfpFOV7ClD0wo3TS/nVzR7nmna1U1PUPKgdKakd0i6X9I8mlazqZI9ZFzJU3rmKdkD6jZJf07P6vWccLxnK/nP6+yiWtK0M02V3E7vULI44JLeQ9PqNpW0UNLm9HLsU7LoUZf0KRpWo+GE4022mHilpDdO+N59kk6iaTWbNvysqcXEdj7N+Y2SHiPpM2Zm6fcOU/JLZX26e+ZmJSEPbDjemkm2dX/D5zuV7AWye3vH795Wur0/kXTwVMOlu41enO42u1XJhboo87nbe3uvbdjtdLOkJ+zenrtfq2TV+UQze6ySFe7LGs7DWyech+VKVqx3m+wy6QSaltjUzGZI+oakn7v7+1s9H1Ogacm3U3cf9WQX8xdae14TiKYlNDWz0yXNcfcvtzp7E2ha0u3U3X/m7rvcfWf6e3ezkkWLotG0nKa70n//zd03e/J02E8p+U9GkehZ/mPesyR9zZOn2LUDTcu5L12g5Jka/yxpMD3uC83sL1o9L/tB0xKauvsGSWdI+l9K9jg7RdL3lexxmhcNu2N9YbuSHV4azVVrL7lG0+5o2pJ2vl7Qg5JOlvQjJbtsvknJGRyWtMh//9phE3kTp7FG0o/c/fktzPf+9LSe5O4bzOwlkj7WyhxmdpikTys5v9e4+7iZ3SDJGg52oaRXK7mSf9XdhxrOw/vc/X37OYlmLpN2omlJTc1sQMnu3fdJekPWuVtA087dTnslHTnloZpH03KanixphZntfuBygKRxM3uiu5+R9TxkRNPO3U59wmkXhablNL1V0kgz87aIniXeRi35Y+vLJL0069wtoGk5TR8ladzdP59+vdbMLlay4J/rDR0mQdOSbqeevIHG09NZepW8JMGHss6/HzTsjvWFmyUd2zDro5Q8Dfi2FrZF0+5o2pK2vgGLu6+T9FxJp5jZR9x9vZLXIPqQmc01s5olL2p5Yosn8U1JjzGz15hZX/rxdDN7XIbjzlGyqr7ZzJZK+rsJP39AyR1cFrOUxHtIkszsdUpWmRv9p5IHPa+W9PmG739a0hvN7HhLzDKz06yJFzBNz/egkp69ZjZo6QuUFo2me2hLU0veHfarSvaoeK271zPO3BKa7qFdTR9rZqea2Yz0/L9a0nOU3HEWjqZ7aNfv3n9Q8pfUJ6cfl6XbfF3G4zeFpnto1+30UDM7wcz60/vRv1Py1+KfZZy9KTTdQ1uauvtOSV+W9DYzm2NmyyT9uZLLplD03EPbHvOmXqpkr+GrmjxeU2i6h3Y1vS05SXtVenkerOQpiTdmPH5TaLqHdv7f9CnpeZ8r6f8oeZrnFVmPvz803EOn1hcuknS6mT3bzGYp2bP4EndvZc9Emu6pY2tG6c8G0i8H0q/3q62LiZLk7muUXDn+2Mzer+Sd1/ol3SJpk5JFkyUtbnubpBdIeqWkdUpWcD+o318I+/MeJS9kuUXJa+lcMuHn75f0Lkt2I/3bKea4RclfW65RcoV6oib858Pd10q6XskV6CcN31+l5IHtx5RcHncoeR2AZnxaycLTmZLemX7+mia3kRlNHzlMu5o+U9KLlVwOmy15d6jtZtaOp9pJomnDYdrV1CS9W8lf3x5S8oK6r3D365vYRlNo+shh2tLU3be5+/27P5T83t3hbXiX2IbTpKnaejudo+Q1aTYp2Sv8FEmnevKUrbag6SOHaedjpDcr+Y/AunSGL0r6XJPbyISejxymnT2l5CnOn3f3tu+FQdNHDtOu+9KtSt7l92/S498g6TeS9rf3TS40feQw7bydvk3JG1ysUXJZFroXMQ0fOUxH1hfc/WYlT0++SMn/beZIyvXSBDR95DCdXDPapeTxkiT9Vr9/qZh9shLuh5Eys89JWufu7+r0LCgGTeOhaTw0jYem8dA0FnrGQ9N4aFp9NIynSk3b+ZqJaGBmhyv5y9tTOjwKCkLTeGgaD03joWk8NI2FnvHQNB6aVh8N46la07Y/zRmSmf2Lkl32/7e7/67T8yA/msZD03hoGg9N46FpLPSMh6bx0LT6aBhPFZvyNGcAAAAAAAAAmbBnIgAAAAAAAIBMWEwEAAAAAAAAkEmpb8AyZ0GvL16a5R24923Uixl508iM3NuY0zdcwCTSjNpIruM/dN+wtm0cs0KGadLCBTU/dHm+Jg+N528hSVtHB3NvY1H/9qkPlMEBtdFcx793zZg2bKx3pGm/DfigZuXbyKximtb78l8EPcP1AiaRfNdQruMPaYdGfLj0pkX0tP6+YoapFfD3KyvmIvSh/L+/t/nGh919cQHjNKWQpn0FNS0ix9h4ARuRvJ7/tr5NmzrSdMGCmi/PeV+6oaD70nHPfzud27OrgEmkOTmvX3evGdXDG8c7cl+6aEGPH7483+3s4fFiHvNuG8//+GhWT77Hqrst6sn3u/feNWMda9rXP8sHB+fn2ka9vyOjt1XPSL6XzBoa2qSRkR2VfcxrvT2FzOID/bm3YUXdn47k+3/MkO/QiA+V3nTegh4/ZFm+Hg+Ozi1kluF6/t/fhw5sLGASqYD/TunGm0Y78vioiPvSO4eLabpzJP9j5wNnbitgEmlxzvtSSfrVTSO5m5a6mLh46YDee8kxubaxbjTfnfhul6x9cu5tPOegO/IPIulJM9fkOv67/ujmQuZoxaHLe/WDbx+YaxsXbH5CIbNc+eBjc2/jT5f9tIBJpBfOvD/X8Z976oOFzNGKQc3S8XZyvo086UmFzLLr4AL+A3TX1gImkeo3/TbX8a/1KwuZo1lF9Ow9eFkhs/icmfm3UcSCpCS/Pf/rGn9v6KJ7ChilacU0XVrMMD35e9QfLubBcn3Hjtzb+L5/tSNNly/v1RWXL8q1jQu3FPN7d9Nozj8mSTrlgJsKmER6Ts67gONemO/xVR6HL+/TL65YnmsbK7fme3y12w825X98dPwBxbwW/J8dcFeu459wyvpC5mjF4OB8rTj+zbm2sX1p/gUjSVIRL3lf0FLPnHvz/ad21aqPFzNIC4q4P+2Zt6CQWcaPyv9Yq+fhYhYq6vfcl+v4Px/9TiFzNOuQZT36wjcOzrWNf38g5/+BUndvz3+9+NijLy5gEungAta7Fy1b15HHR0Xcl/7xnc8rZJYb1uS/jb752B/mH0TSOQfclnsbc5bem7spT3MGAAAAAAAAkAmLiQAAAAAAAAAyYTERAAAAAAAAQCa5FhPN7BQzu9XM7jCz84oaCp1D03hoGg9N46FpLPSMh6bx0DQemsZD03hoGkfLi4lm1iPp45JOlfR4SWea2eOLGgzlo2k8NI2HpvHQNBZ6xkPTeGgaD03joWk8NI0lz56Jx0m6w93vcvcRSRdLOqOYsdAhNI2HpvHQNB6axkLPeGgaD03joWk8NI2HpoHkWUxcKmlNw9dr0++humgaD03joWk8NI2FnvHQNB6axkPTeGgaD00DybOYaJN8z/c6kNk5ZrbKzFZt2ziW4+RQgqabPryhXsJYyKHppqMaLmEs5DBlU3pWDk1jafr37gbuS7td000f2jBewljIofnHR6M7ShgLOfCYN56mHh9t2sh9aQU01ZT70u6WZzFxraTlDV8vk7Ru4oHc/QJ3X+HuK+Ys6M1xcihB000XLeQNwbtc0037NFDacGjJlE3pWTk0jaXp37sLuS/tdk03Xbywp7Th0JLmHx/1zSptOLSEx7zxNPX4aP4C7ksroKmm3Jd2tzy3uOskHWVmR5hZv6RXSrqsmLHQITSNh6bx0DQemsZCz3hoGg9N46FpPDSNh6aBtLyroLuPmdmbJV0hqUfS59z95sImQ+loGg9N46FpPDSNhZ7x0DQemsZD03hoGg9NY8n1vGN3v1zS5QXNgi5A03hoGg9N46FpLPSMh6bx0DQemsZD03hoGgcvLAAAAAAAAAAgExYTAQAAAAAAAGRS6tsrD9X7dOvQklzbuOb1Ty1klo0vnJt7G//6l5cUMIn01+uenuv428fvLGSOVuz0mm4cmZFrG5/4+f8oZJaDf5D/6vz2Z72igEkkPe/LuY6+pb6pmDlaYP196j14Wa5t3PVWL2SWq59xfu5tPPU75+YfRNLjPnhEruPbvf2FzNH06RbQ84EXLJ/6QBlsekL+68WsNcX8DeyQvgLeHe76/JtohQ0MqOfwI3NtY+OKxYXM0juUv+nojKUFTCIt/NGa/BspYBOt2F7v00925Xt89IXPvLCQWfq35G/6lcefUMAk0gfPuCjX8TfVO/cukBvGe/X5rYtybeNfrjutkFkO+8/8l8PP/8cxBUwiDb7kv3Mdf+P4hkLmaIX3mXYt7su1jdpoMY+P5n7p57m3UXvSYwuYRBqflfMdkevFXCatsIF+9S47PNc27jzrkEJmGT5kNPc2Ztw7u4BJpMMuzdfUbuvMu2RvGp+lr2zO9//qmz71xEJm2Xmw5d7Gi+756wImkT747P8qYCt7vTF6KUZ9XOvHtufaxs3ff0whs9jRO3Nvo8/GC5hE+uTmIn5/35t7C+yZCAAAAAAAACATFhMBAAAAAAAAZMJiIgAAAAAAAIBMWEwEAAAAAAAAkAmLiQAAAAAAAAAyYTERAAAAAAAAQCYsJgIAAAAAAADIhMVEAAAAAAAAAJmwmAgAAAAAAAAgExYTAQAAAAAAAGTCYiIAAAAAAACATFhMBAAAAAAAAJAJi4kAAAAAAAAAMmExEQAAAAAAAEAmLCYCAAAAAAAAyITFRAAAAAAAAACZsJgIAAAAAAAAIJPeMk9sbs8uPW/Ob3Jt46qDTihkluXvuzr3Nk77wukFTCL99n2Lch1/y9CPCpmjFbvq/fr10PJc25j3q/5CZpn7xfxNtxz5zAImkdaOLMx1/FEv9aa5B+/t1fjB83NtY2y0p5BZ5vfMzL2NFxyb73fObrce9YRcx68/0Jmm9YE+DR11UK5tnPW/Li9klr+af0/ubTzx2lcVMIk0ft1AIdvphPEZPdr++Hy/Y05+288KmeW9B/469zZeffdJ+QeRdPfw0fk3sib/JlqxbXxQV215XK5tLPv62kJmGV98QO5tDGzL/7tbkr574jG5jr91/O5C5mjFlvEZunzDk3JtY84vZhQyS9938z8+WjJwXAGTSA+clu/6NerFPL5oiUu1Mc+1iee885pCRpn/zh25t/GJn84tYBLpcR96ONfxbWy8kDlaMTarTxufcXCubXz77H8rZJYj+mbn3saTPvQXBUwi1Qf7ch3fa1bIHM0a85o2jM7KtY1Fv9pSyCz1G27JvY0tr35GAZNIO0+o7mPePuvRkt58t41Z9+X7vb3bwvfckHsb/+cTp+YfRNLfnvTtQraTF3smAgAAAAAAAMiExUQAAAAAAAAAmbCYCAAAAAAAACATFhMBAAAAAAAAZNLyYqKZLTezq8xstZndbGbnFjkYykfTeGgaD03joWk8NI2FnvHQNB6axkPTeGgaS563Fx2T9FZ3v97M5kj6pZl9z93zv3UROoWm8dA0HprGQ9N4aBoLPeOhaTw0jYem8dA0kJb3THT39e5+ffr5NkmrJS0tajCUj6bx0DQemsZD03hoGgs946FpPDSNh6bx0DSWQl4z0cwOl/QUSdcWsT10Hk3joWk8NI2HpvHQNBZ6xkPTeGgaD03joWn15V5MNLPZkr4m6S3uvnWSn59jZqvMbNXmDfW8J4cSNNN0+6aR8gdE05ppOjq2o/wB0bT9Nd2j5yg9qyJr07FhmlZF1qa7Ng11ZkA0pZn70mGaVkJTj4+Gt5c/IJrWTNOxIe5PqyDrfenQpuHODIimZW360IbxzgyITHItJppZn5IrwUXufslkh3H3C9x9hbuvmLeQN4/uds02nT2/v9wB0bRmm/b1zip3QDRtqqZ79OyjZxU007R3gKZV0EzTGfMHyx8QTWn2vnSApl2v6cdHA7PLHRBNa7Zp7yD3p92umfvSwfkD5Q+IpjXTdPHCnvIHRGZ53s3ZJH1W0mp3/3BxI6FTaBoPTeOhaTw0jYemsdAzHprGQ9N4aBoPTWPJs6vgCZJeI+m5ZnZD+vGiguZCZ9A0HprGQ9N4aBoPTWOhZzw0jYem8dA0HpoG0tvqEd39p5KswFnQYTSNh6bx0DQemsZD01joGQ9N46FpPDSNh6ax8CKGAAAAAAAAADJhMREAAAAAAABAJiwmAgAAAAAAAMik5ddMbO3EXIt7duXaxpf+4yOFzPLMK8/NvY0Dr+ovYBLp7556Wa7jf3Dm1kLmaEWP6ppTy9d0x7O3FzLL/f3PzL2Nx7/gtgImkZ43+5Zcx/9czss0F5PqvTn/znDvjEJGeeHqF+fext3XLC9gEunIezbkOn5tZLyQOZo+3dFxDTyQ7zb28a+eVsgsHzliKPc2bEMxv3f777u/kO10gtWl2rDn2saXfrOikFn+8ITrc2/jjs2LCphEGtxVL2Q7nTBQG9OjZjyUaxtXvPGphczyqPOuyb2Nrc/Jf38sScsHN+U6fr915vfu7tNePiPf/L942nAxw/xV/h47luf7nbPbYQMP5zp+f22skDla4SaNDeR7qa8vrTq+kFl+d9qnc2/jSze+oIBJpKHD5uc6fn19qf8d3UNtzDW4Md/viRevekMhs/zzE7+Rexu1kQIGkdRz17pcx7fhggZp0oKeHTpz4bW5tvGmM55SyCyDz8r/e3fL0cXchz118N5CttMJm+s1fX3H7Fzb2HhsMY8PF8+Zk3sbcw/ZVsAk0vqReYVsJy/2TAQAAAAAAACQCYuJAAAAAAAAADJhMREAAAAAAABAJiwmAgAAAAAAAMiExUQAAAAAAAAAmbCYCAAAAAAAACATFhMBAAAAAAAAZMJiIgAAAAAAAIBMWEwEAAAAAAAAkAmLiQAAAAAAAAAyYTERAAAAAAAAQCYsJgIAAAAAAADIhMVEAAAAAAAAAJmwmAgAAAAAAAAgExYTAQAAAAAAAGTCYiIAAAAAAACATFhMBAAAAAAAAJBJb5knNqqa7h+flWsbt9cHC5mlp7+eexvzz763gEmk8Zxrui4rZI5WzOsZ0Rmz1+TaxsIVXylkls1PyXfdkqRnDt5TwCTSEX2zcx1/pnVwnX/HLtm1v8m1iSNHHlvIKFtuWpZ7G4dsHCtgEsnvXptvAyOjhczRLB8a1vjNt+baxhH/VUxP7+vJv5HacP5tSBr7XTG39U6o7RzWzF/cmWsbS/sfXcgsb/j5ubm3MWdtMbfRWdcXc5/cCQt7dul1B+T7vXvDifl/X0rS7d8+Mvc2njb/1wVMIp09b1Wu43+1Z0chc7RiSd92vevAq3Nt49kn5PvdvdutK5bk3sbRA+sLmEQ6dea2XMc/v9aZ+1JJ6hka17xb8s0/+76BQmZ5xo/emHsbA+NewCRS39aRXMe3ejFztKK2c0Qzfnl3rm0srh1eyCwfq78i9zaW3lzM/eDYwxtyHd99vJA5mjVo4zq6b2uubbzjzGL+X1qE02cV03N+z8xCttMJc2vjesGMjbm2cdxTby9kljX/tTj3Nl61NN/jmt1eP++G3Nt4f/4x2DMRAAAAAAAAQDYsJgIAAAAAAADIhMVEAAAAAAAAAJmwmAgAAAAAAAAgk9yLiWbWY2a/MrNvFjEQOo+m8dA0HprGQs94aBoPTeOhaTw0jYemsdAzjiL2TDxX0uoCtoPuQdN4aBoPTWOhZzw0jYem8dA0HprGQ9NY6BlErsVEM1sm6TRJnylmHHQaTeOhaTw0jYWe8dA0HprGQ9N4aBoPTWOhZyx590w8X9LbJNXzj4Iucb5oGs35omk054umkZwvekZzvmgazfmiaTTni6bRnC+aRnO+aBrJ+aJnGC0vJprZiyU96O6/nOJw55jZKjNbtWXDeKsnhxK00vThDfwe6GatNB3VcEnToRVZmtKzOlq5jY7Uh0qaDq1opekG7ku7Go+P4mnp8dHYzpKmQytauz/dVdJ0aEWzj3k3bOT3bjdr7b6U9aNulmfPxBMk/aGZ3S3pYknPNbMvTDyQu1/g7ivcfcUBC3tynBxK0HTTRQt5Q/Au13TTPg2UPSOaM2VTelZK07fR/tpg2TOiOU03Xch9abfj8VE8zT8+6p1Z9oxoTgv3pzPKnhHNaeox78IF/N7tci3cl7J+1M1avsW5+zvcfZm7Hy7plZJ+4O6vLmwylI6m8dA0HprGQs94aBoPTeOhaTw0jYemsdAzHpbvAQAAAAAAAGTSW8RG3P2Hkn5YxLbQHWgaD03joWks9IyHpvHQNB6axkPTeGgaCz1jYM9EAAAAAAAAAJmwmAgAAAAAAAAgExYTAQAAAAAAAGRi7l7eiZk9JOmeKQ62SNLDJYxThG6Z9TB3X9yJE87QtFsuo6y6ZV6aFqObZu1IU37vthVNi9FNs9K0GN0yazffl0rdczll0S2zdnPTbrmMsuqWeWlajG6alfvSYnTTrDQtRjfNmrtpqYuJWZjZKndf0ek5sqjSrJ1StcuoavN2QpUuoyrN2klVupyqNGsnVelyqtKsnVSly6lKs3ZSlS6nKs3aKVW7jKo2bydU6TKq0qydVKXLqUqzdlKVLqcqzZoFT3MGAAAAAAAAkAmLiQAAAAAAAAAy6cbFxAs6PUATqjRrp1TtMqravJ1QpcuoSrN2UpUupyrN2klVupyqNGsnVelyqtKsnVSly6lKs3ZK1S6jqs3bCVW6jKo0aydV6XKq0qydVKXLqUqzTqnrXjMRAAAAAAAAQHfqxj0TAQAAAAAAAHShjiwmmtkpZnarmd1hZudN8nMzs/+b/vwmM3tqh+ZcbmZXmdlqM7vZzM6d5DAnmdkWM7sh/fjHTszaaTSNh6bx0DQemsZSlZ7pLDTNoCpN6ZkdTeOhaTw0jYemXcjdS/2Q1CPpTkmPktQv6UZJj59wmBdJ+rYkk/QMSdeWPWc6xxJJT00/nyPptklmPUnSNzsxX7d80DTeB03jfdA03gdNY31UqSdN4zWlJ02n6wdN433QNN4HTbvzoxN7Jh4n6Q53v8vdRyRdLOmMCYc5Q9LnPfFzSfPMbEnZg7r7ene/Pv18m6TVkpaWPUcF0DQemsZD03hoGktleko0zagyTemZGU3joWk8NI2Hpl2oE4uJSyWtafh6rfa+cLMcplRmdrikp0i6dpIf/4GZ3Whm3zazY8qdrCvQNB6axkPTeGgaSyV7SjTdj0o2ped+0TQemsZD03ho2oV6O3CaNsn3Jr6ldJbDlMbMZkv6mqS3uPvWCT++XtJh7r7dzF4k6euSjip5xE6jaTw0jYem8dA0lsr1lGg6hco1peeUaBoPTeOhaTw07UKd2DNxraTlDV8vk7SuhcOUwsz6lFwJLnL3Syb+3N23uvv29PPLJfWZ2aKSx+w0msZD03hoGg9NY6lUT4mmGVSqKT0zoWk8NI2HpvHQtAt1YjHxOklHmdkRZtYv6ZWSLptwmMskvdYSz5C0xd3Xlz2omZmkz0pa7e4f3sdhDk4PJzM7TslluqG8KbsCTeOhaTw0jYemsVSmp0TTjCrTlJ6Z0TQemsZD03ho2oVKf5qzu4+Z2ZslXaHkXXk+5+43m9kb059/UtLlSt6N5w5JOyW9ruw5UydIeo2kX5vZDen3/l7SodIjs/6xpDeZ2ZikXZJe6e4dfcpR2WgaD03joWk8NI2lYj0lmk6pYk3pmQFN46FpPDSNh6bdySo4MwAAAAAAAIAO6MTTnAEAAAAAAABUEIuJAAAAAAAAADJhMREAAAAAAABAJiwmAgAAAAAAAMiExUQAAAAAAAAAmbCYCAAAAAAAACATFhMBAAAAAAAAZMJiIgAAAAAAAIBMWEwEAAAAAAAAkAmLiQAAAAAAAAAyYTERAAAAAAAAQCYsJgIAAAAAAADIhMVEAAAAAAAAAJmwmAgAAAAAAAAgExYTAQAAAAAAAGTCYiIAAAAAAACATFhMBAAAAAAAAJAJi4kAAAAAAAAAMmExEQAAAAAAAEAmLCYCAAAAAAAAyITFRAAAAAAAAACZdNViopn9iZl9t9NzNDKzs83sp52eo6poGg9NY6FnPDSNh6bx0DQemsZD0+qjYSz07C6lLyaa2bPM7Goz22JmG83sZ2b2dEly94vc/QUlz+Nm9ugyT7NVZnaBmd1qZnUzO7vT8+xG09aY2WPM7FIzeyi93K4ws6M7PZdE01aZ2aL0stpgZpvN7BozO6EL5qJnTmZ2Vjr3n3V6FommeaSz7jCz7enHZzo9k0TTPMysx8zea2brzGybmf3KzOZ1wVw0bYGZPbvh9rn7w83sf3bBbDRtkZk918yuN7OtZnaXmZ3T6ZkkmuZhZqeb2W/S2+jVZvb4Ds1BwxbZFOsLZvY3ZnZ/etl+zswGSpiJni3aX08ze4Il6w0Pm5kXcXqlLiaa2VxJ35T075IWSFoq6T2Shsuco9uZWe8+fnSjpL+QdH2J4+wXTbPZR9N5ki6TdLSkgyT9QtKlJY41KZpms4+m2yX9qaTFkuZL+qCkb+znNt129Mxmf43MbL6kd0i6ubyJ9o2m2UxxuzvW3WenHx1fIKZpNvtp+h5Jz5T0B5LmSnqNpKGy5poMTbOZrKm7/6Th9jlb0ouV3L9+p/QBG9A0m8mamlmfpP+W9ClJB0h6haQPm9mxJY83cS6aZrCPpkdJukjSG5X8n+Ybki4r+zEvDbNpZX3BzF4o6TxJJ0s6XNKjlFy2bUPPbFpcLxqV9BVJry9sEHcv7UPSCkmb9/PzsyX9NP38bUoeOOz+GJW0Mv3ZAZI+K2m9pPskvVdSzz62eZykayRtTg//MUn96c9+LMkl7UhP4xX7myn9+qOS1kjaKumXkp6dfv9gSTslLWw47NMkPSSpL/36TyWtlrRJ0hWSDms4rEv6S0m3S/rdFJfjTyWdXWY7mra3aXr4BelxFk51WJp2f1Mlf6w5PT3OgfSsbk9Jn1Ryx/xDSX/WqZY0LaZpephHd7ojTYtpquQPN9slHdnpjjRt2+Oj/yfp/9G0uk2V/NHcJc1s+N51ks6kaWWbvlnStxq+rknaJelkGlaj4YSZ9lpfkPRFSf/a8PXJku6nZzV7Nvzs0ZK8kF7tvDJMMvhcSRskXSjpVEnz9xei4fvLJa2T9KL0668r+cvWLEkHKtmj6w37OM2nSXqGpF4lK+qrJb1lQpR9/qdikivHqyUtTLf3Vkn3SxpMf3a5pDc1HPYjkv49/fwlku6Q9Lj0uO+SdPWEOb6nZEFpRqtXjrI/aFpM04btradp9ZtKuknSSHr4T9Ozuj2VPMBYpeRB8g/VHYuJNM3X1NPL4X5Jl0g6nKbVbSrpOUr+A/D29DRvk/SXNK1u0wkzzZS0TdJJNK12UyULE38pqUfJXsQPSlpO02o2lfRXki5v+LpHyR7h59KwGg0nzDTZYuKNalg8k7RIbd7xhZ7tXy9SVRcT0+EfJ2mlpLWSxpQ8zfOgfV05JM1QsqL79vTrg5Ts5jqj4TBnSroq4+m/RdJ/t3rlmOTnm5Q8XUpKdtn/Wfp5T3rFOS79+tuSXt9wvJqSlenDGuZ4bsbz0DWLiTQtrOkyJX816ehfaGlaaNPB9DyfRc9q9ky3t0rSH6Rf/1BdsJhI03y3USWLT/1Knpb1MUm/kdRL02o2lfSq9DCfTS+TJyn5K//zaVrNphNO8zWSfifJOt2Tprl/954u6YH0chuT9Oed7knTXL97H6tkb62TlNyn/oOkuqR30LAaDSec5mSLiXdKOqXh6750m4fTs3o9G35W2GJi6W/A4u6r3f1sd18m6QmSDpF0/n6O8llJt7r7B9OvD1NyRV5vyRscbFay6nzgZEe25E0uvpm+cOhWSf+qZFW9JWb2VjNbnb4g6GYlu9Du3t6lkh5vZo+S9HxJW9z9Fw1zf7Rh5o2STMnrAOy2ptW5Oomm+Zqa2WJJ35X0CXf/Uqvno0g0zX87dfehtOd5nX5NIHq23PMvJN3k7te0Onu70LT126i7/9jdR9x9s6RzJR2h5IFrR9G05aa70n//2d13uftNki6W9KJWz0tRaFrIY96zJH3e0/8BdRpNW2tqZo+V9GVJr1Wy8HSMpLeZ2Wmtnpei0LS1pu7+WyW3z48peWroIkm3KFkAKhUN27a+sF3JnoK77f58W45tTome1VkvKn0xsVH6S2ilkivJXszsPCVvTtH4IpFrlKw0L3L3eenHXHc/Zh8n8x+SfivpKHefK+nvlURpmpk9W8nTaF6uZJfbeZK27N6euw8peVHLP1Hyl9T/nDD3GxpmnufuM9z96obDdMUDpTxo2lxTS97Y4buSLnP397VyHtqNprlvp31KXrC4K9CzqZ4nS3pp+uDifiVv8PAhM/tYK+elXWia+zbqrZ6XdqFpU01vynCYjqNp87dTM1uuZK+nz7dyHtqNpk01fYKS/+xf4e51d79V0reUPI2xa9C0udupu3/V3Z/g7gsl/ZOSxZDrWjkvRaFhoesLN0tq3CHiWEkPuPuGHNtsCj27e72o7Hdzfmy6Urss/Xq5kl1Ofz7JYU+V9NeSXuLuu//qLHdfr2Tx5UNmNtfMamZ2pJmduI+TnaPkxS+3p38Ve9OEnz+g7P/Rn6NkV9uHJPWa2T9qz9V6KXnAc7akP5T0hYbvf1LSO8zsmPT8HWBmL8t4ukqP029mg0qujH1mNmhmHV0QpmnrTS15t6orlOzqfF7W47UbTXM1fYaZPSu9rc4ws7cr2dX+2qzbKBo9c/3ePVvJHmtPTj9WKXlHuXc2sY3C0TTXbfQYM3uymfWY2WxJH1LyEhOrs26jHWjaelN3v1PSTyS908wGzOxxSp5G9M2s22gHmuZ7zJt6jZLXi7qzheMWjqa5mv5K0lFm9lxLHKnkXbpvbGIbhaNp7v+bPi29P12sZM+vb6SLP6WhYVvXFz4v6fVm9nhLdoB5l5KFvbahZ/t6pr97B5XsHa70ZwPNbH+isheitkk6XtK1ZrZDyZXiN0pemHKiV0haLGm1mW1PPz6Z/mz3LvK3KHkO+lclLdnHaf6tktfT2Sbp00p2sW/0bkkXWrI76cunmP8KJc9lv03SPUpeZHaPXU3d/WdKXi/iene/u+H7/y3pg5IutmT32d+o+b/GfVfJ03meKemC9PPnNLmNotG09aYvlfR0Sa9ruDy2m9mhTWyjHWjaetMBSR9X8sLB9yl5mt1p7r6uiW0UjZ4t9nT3ze5+/+4PJW+qs9Xdt2TdRpvQtPXb6EHp7Fsl3aXkhbZf7O6jTWyjHWia7/HRmUr2iNmgZG+nf3D3K5vcRtFomq+plJz3C1s4XrvQtPX70zuVvEvp/1Xy+/dHkr6m5OmJnUTTfLfTjyp5A6xb03//vMnjF4GGbVpfcPfvSPo3SVels92jZA/UdqJn+9aLDku/vjn9epeS227LzLvjJUhCMbMfSPqiu3+m07OgGDSNh6ax0DMemsZD03hoGg9N46Fp9dEwlig9WUwsmJk9Xclbdi9397a+OCnKQdN4aBoLPeOhaTw0jYem8dA0HppWHw1jidSzo6+3F42ZXSjp+5LeUvUrBhI0jYemsdAzHprGQ9N4aBoPTeOhafXRMJZoPXPtmWhmpyh5rYQeSZ9x9w8UNRg6g6bx0DQemsZD01joGQ9N46FpPDSNh6bx0DSOlhcTzaxHyQtLPl/SWiVvA3+mu99S3HgoE03joWk8NI2HprHQMx6axkPTeGgaD03joWkseZ7mfJykO9z9LncfkXSxpDOKGQsdQtN4aBoPTeOhaSz0jIem8dA0HprGQ9N4aBpIb47jLtWeb3O9VsnbeO9Tvw34oGblOElMZkg7NOLDVsCmaNolaBpPp5rSs322adPD7r64gE3RtEsU1JTfu12C+9J4aBoPTePhMW88POaNp4imeRYTJ/sFsddzps3sHEnnSNKgZup4OznHSWIy1/qVRW2Kpl2CpvGU2ZSe5fi+f/WegjZF0y5RUFN+73YJ7kvjoWk8NI2Hx7zx8Jg3niKa5nma81pJyxu+XiZp3cQDufsF7r7C3Vf0aSDHyaEENI2HpvFM2ZSelUPTWPi9Gw9N46FpPDSNh8dH8dA0kDyLiddJOsrMjjCzfkmvlHRZMWOhQ2gaD03joWk8NI2FnvHQNB6axkPTeGgaD00Daflpzu4+ZmZvlnSFkrf1/py731zYZCgdTeOhaTw0jYemsdAzHprGQ9N4aBoPTeOhaSx5XjNR7n65pMsLmgVdgKbx0DQemsZD01joGQ9N46FpPDSNh6bx0DSOPE9zBgAAAAAAADCNsJgIAAAAAAAAIBMWEwEAAAAAAABkwmIiAAAAAAAAgExYTAQAAAAAAACQCYuJAAAAAAAAADJhMREAAAAAAABAJiwmAgAAAAAAAMiExUQAAAAAAAAAmbCYCAAAAAAAACATFhMBAAAAAAAAZMJiIgAAAAAAAIBMWEwEAAAAAAAAkAmLiQAAAAAAAAAyYTERAAAAAAAAQCYsJgIAAAAAAADIhMVEAAAAAAAAAJlMuZhoZp8zswfN7DcN31tgZt8zs9vTf+e3d0wUiabx0DQemsZD03hoGgs946FpPDSNh6bx0HR6yLJn4kpJp0z43nmSrnT3oyRdmX6N6lgpmkazUjSNZqVoGs1K0TSalaJpJCtFz2hWiqbRrBRNo1kpmkazUjQNb8rFRHf/saSNE759hqQL088vlPSSYsdCO9E0HprGQ9N4aBoPTWOhZzw0jYem8dA0HppOD62+ZuJB7r5ektJ/DyxuJHQITeOhaTw0jYem8dA0FnrGQ9N4aBoPTeOhaTC97T4BMztH0jmSNKiZ7T45lICm8dA0FnrGQ9N4aBoPTeOhaTw0jYWe8dC0OlrdM/EBM1siSem/D+7rgO5+gbuvcPcVfRpo8eRQAprGQ9N4MjWlZ6XQNB6axsJ9aTw0jYem8XBfGg9Ng2l1MfEySWeln58l6dJixkEH0TQemsZD03hoGg9NY6FnPDSNh6bx0DQemgYz5WKimX1J0jWSjjaztWb2ekkfkPR8M7td0vPTr1ERNI2HpvHQNB6axkPTWOgZD03joWk8NI2HptPDlK+Z6O5n7uNHJxc8C0pC03hoGg9N46FpPDSNhZ7x0DQemsZD03hoOj20/Q1YKsWs4fOa5PXff+1e/jzIj6bx0DQWesZD03hoGg9N46FpPDSNhZ7xTPOmLCamVwDr6ZF6etJvmVSrSfW6fPeVYHxcPj6efD4NrhiVRtN4aBoLPeOhaTw0jYem8dA0HprGQs94aPqI6b2YaJasIEtST09yhTBL/q2ZVHdpfFxyl0vJ15KketgrROXRNB6axkLPeGgaD03joWk8NI2HprHQMx6a7mH6LiamVwTr65WZyQYHZIODyepyX+8jK8s2PJJcGYaG5DYsd5ePjinqFaLSaBoPTWOhZzw0jYem8dA0HprGQ9NY6BkPTfcyPRcTG64ItRmDUm+vbO4cjR8wS+qtaWxWn+r9NdVG6urdMSqN1dWzZYe0dZtsbEx1DYW9QlQWTeOhaSz0jIem8dA0HprGQ9N4aBoLPeOh6aSm52JiysySK0Jvr3ywX+Oz+uR9NY3O6tX4oKlnKNmFtTZaV224X7azV54eL85VIBaaxkPTWOgZD03joWk8NI2HpvHQNBZ6xkPTPU3LxcTdL5ZpgwOyuXPkg/3adegB2ra0V+MDpuGF0tgsV+9208DGXvUMu+as7dUMSTY0Io2NyTx5PryPjXX67EA0jYimsdAzHprGQ9N4aBoPTeOhaSz0jIemk5t+i4lmj7xYpg0OavyAWRqf1adtS3u19ShpbOa4Bg/ZoUPmbteDW2dry7pZ6t1Zk3mf+rbPVG1nn3p27pKNjCary+kLbKKDaBoPTWOhZzw0jYem8dA0HprGQ9NY6BkPTfdp+i0m7pZeKdRbk/fVND5gGps5Lp81rkVzdujQORs1Vq/pvpmDGpM0PlBTva+mWm8tfc68dfocYCKaxkPTWOgZD03joWk8NI2HpvHQNBZ6xkPTvUzLxUTb/fbdfb0am9Wn0dm9Gl4oDR6yQ4vm7NBLlt6o42feoavnHKVLdaw2bp+poQfnanR2cnH19Pclxx8fD/nc9yqiaTw0jYWe8dA0HprGQ9N4aBoPTWOhZzw0ndz0W0y0WvK23TWTajXV+9NV5VmuQ+Zu16FzNur4mXfohMGaxnWnbpy7XL21utbNnKPxAVPPyJ7HRxegaTw0jYWe8dA0HprGQ9N4aBoPTWOhZzw03afpt5jodalel+ou1euqjdTVM1RT73bTg1tna6xe09VzjtK47tS1O4/U3VsXaMO2WerdYeoZdtVG9jw+ugBN46FpLPSMh6bx0DQemsZD03hoGgs946HpPk2/xURJnr6Tjg2PqHfHqGTSwMZebVk3S/fNHNSlOlY3zl2uu7cu0H1rF8h29GreRlff9rHk8COj8vHxZDvoCjSNh6ax0DMemsZD03hoGg9N46FpLPSMh6aTm5aLiZKSd9Bxl8bqstG6eoZdvTtrGpO0cftM9dbq2rBtlmxHr3p3mXqGpdpoXRqr//646C40jYemsdAzHprGQ9N4aBoPTeOhaSz0jIeme5l+i4npqrJL8qEh9WzZodpwv+as7ZV5n8YHahp6cK7WzZyj3h2meRtdPcPS7PtG1btxh2x4VD40JB8fD/W23pVG03hoGgs946FpPDSNh6bx0DQemsZCz3houk/TbzFRSkLWXW7D0tZtsp29miGpb/tM1ftqGp3dm7xY5nCya2pttJ5cETZtlY+NyYeG5aNjyfPn0RVoGg9NY6FnPDSNh6bx0DQemsZD01joGQ9NJzctFxN3c3fZ2Jhckg2NqLazT7Xe5B12ekZqqo3Uk+e4j9WTFeWxMWlsLNxz3SOhaTw0jYWe8dA0HprGQ9N4aBoPTWOhZzw03dOUi4lmtlzS5yUdLKku6QJ3/6iZLZD0ZUmHS7pb0svdfVP7Ri2Qu6S6fHRMdQ3JzKSxMfXs3CWZqae/L3nb7npdGhmV3JNdU4eG5e6/X1Wu6JWCpjTt1JxNmcZN6Rmrp0RTmtK0CmhK007N2RSa0jRQU3rG6inRNGLTfcmyZ+KYpLe6+/VmNkfSL83se5LOlnSlu3/AzM6TdJ6kt7dv1II1XCFckrnLRkYlM1lPj1SzZFfW9HntPj6eXAmkCFcEmtK0GqZvU3rG6inRlKZVQVOa0rT70ZSmcZrSM1ZPiaYRm05qysVEd18vaX36+TYzWy1pqaQzJJ2UHuxCST9Ula4M0iNXCEmPvKjm7s93ryw/skvq+Pjvn+Ne8SsCTWnagRFbNw2b0jNWT4mmNKVpFdCUph0YsXU0pWmApvSM1VOiacSm+9LUayaa2eGSniLpWkkHpVcUuft6Mzuw+PFKkIb1sbEkuKRJUwe9AtA0HprGQs94aBoPTeOhaTw0jYemsdAzHprGlnkx0cxmS/qapLe4+1Yzy3q8cySdI0mDmtnKjOWZBsEb0TQemsZCz3hoGg9N46FpPDSNh6ax0DMemsZXy3IgM+tTckW4yN0vSb/9gJktSX++RNKDkx3X3S9w9xXuvqJPA0XMjALQNB6axkLPeGgaD03joWk8NI2HprHQMx6aTg9TLiZasoT8WUmr3f3DDT+6TNJZ6ednSbq0+PHQDjSNh6ax0DMemsZD03hoGg9N46FpLPSMh6bTR5anOZ8g6TWSfm1mN6Tf+3tJH5D0FTN7vaR7Jb2sLROiHWgaD01joWc8NI2HpvHQNB6axkPTWOgZD02niSzv5vxTSft6gvvJxY6DMtA0HprGQs94aBoPTeOhaTw0jYemsdAzHppOH5leMxEAAAAAAAAAWEwEAAAAAAAAkAmLiQAAAAAAAAAyYTERAAAAAAAAQCYsJgIAAAAAAADIhMVEAAAAAAAAAJmwmAgAAAAAAAAgExYTAQAAAAAAAGTCYiIAAAAAAACATFhMBAAAAAAAAJAJi4kAAAAAAAAAMmExEQAAAAAAAEAmLCYCAAAAAAAAyITFRAAAAAAAAACZsJgIAAAAAAAAIBMWEwEAAAAAAABkwmIiAAAAAAAAgExYTAQAAAAAAACQyZSLiWY2aGa/MLMbzexmM3tP+v0FZvY9M7s9/Xd++8dFEWgaD01joWc8NI2HpvHQNB6axkPTWOgZD02njyx7Jg5Leq67HyvpyZJOMbNnSDpP0pXufpSkK9OvUQ00jYemsdAzHprGQ9N4aBoPTeOhaSz0jIem08SUi4me2J5+2Zd+uKQzJF2Yfv9CSS9px4AoHk3joWks9IyHpvHQNB6axkPTeGgaCz3joen0kek1E82sx8xukPSgpO+5+7WSDnL39ZKU/nvgPo57jpmtMrNVoxouaGzkRdN4aBoLPeOhaTw0jYem8dA0HprGQs94aDo9ZFpMdPdxd3+ypGWSjjOzJ2Q9AXe/wN1XuPuKPg20OCaKRtN4aBoLPeOhaTw0jYem8dA0HprGQs94aDo9NPVuzu6+WdIPJZ0i6QEzWyJJ6b8PFj0c2o+m8dA0FnrGQ9N4aBoPTeOhaTw0jYWe8dA0tizv5rzYzOaln8+Q9DxJv5V0maSz0oOdJenSNs2IgtE0HprGQs94aBoPTeOhaTw0jYemsdAzHppOH70ZDrNE0oVm1qNk8fEr7v5NM7tG0lfM7PWS7pX0sjbOiWLRNB6axkLPeGgaD03joWk8NI2HprHQMx6aThNTLia6+02SnjLJ9zdIOrkdQ6G9aBoPTWOhZzw0jYem8dA0HprGQ9NY6BkPTaePpl4zEQAAAAAAAMD0Ze5e3omZPSRph6SHSzvRcixSZ8/TYe6+uBMnHLRpp3tKNC3atG2a9rxH3XEZFKkbzg9Ni9UN56eTTaP93pU635T70uLRNFbTTveUaFq0aduUx0dtRdNidcP5yd201MVESTKzVe6+otQTbbOI56kZ0c5/tPPTimiXQbTz04pol0G089OKaJdBtPPTrIjnP+J5akbE8x/xPDUj2vmPdn5aEe0yiHZ+WhHtMoh2floR7TKIcn54mjMAAAAAAACATFhMBAAAAAAAAJBJJxYTL+jAabZbxPPUjGjnP9r5aUW0yyDa+WlFtMsg2vlpRbTLINr5aVbE8x/xPDUj4vmPeJ6aEe38Rzs/rYh2GUQ7P62IdhlEOz+tiHYZhDg/pb9mIgAAAAAAAIBq4mnOAAAAAAAAADIpdTHRzE4xs1vN7A4zO6/M0y6CmS03s6vMbLWZ3Wxm56bfX2Bm3zOz29N/53d61rLQNJaq95RoOlHVm9JzbzSNh6bx0DQemsZS9Z4STSeqelN67o2m3a20pzmbWY+k2yQ9X9JaSddJOtPdbyllgAKY2RJJS9z9ejObI+mXkl4i6WxJG939A+mVfL67v71zk5aDprFE6CnRtFGEpvTcE03joWk8NI2HprFE6CnRtFGEpvTcE027X5l7Jh4n6Q53v8vdRyRdLOmMEk8/N3df7+7Xp59vk7Ra0lIl5+PC9GAXKrmCTAc0jaXyPSWaTlD5pvTcC03joWk8NI2HprFUvqdE0wkq35See6FplytzMXGppDUNX69Nv1dJZna4pKdIulbSQe6+XkquMJIO7OBoZaJpLKF6SjRVsKb0lETTiGgaD03joWksoXpKNFWwpvSURNOuV+Ziok3yvUq+lbSZzZb0NUlvcfetnZ6ng2gaS5ieEk1TYZrS8xE0jYem8dA0HprGEqanRNNUmKb0fARNu1yZi4lrJS1v+HqZpHUlnn4hzKxPyRXhIne/JP32A+nz4Xc/L/7BTs1XMprGEqKnRNMGIZrScw80jYem8dA0HprGEqKnRNMGIZrScw807XJlLiZeJ+koMzvCzPolvVLSZSWefm5mZpI+K2m1u3+44UeXSTor/fwsSZeWPVuH0DSWyveUaDpB5ZvScy80jYem8dA0HprGUvmeEk0nqHxTeu6Fpl2utHdzliQze5Gk8yX1SPqcu7+vtBMvgJk9S9JPJP1aUj399t8red77VyQdKuleSS9z940dGbJkNI2l6j0lmk5U9ab03BtN46FpPDSNh6axVL2nRNOJqt6UnnujaXcrdTERAAAAAAAAQHWV+TRnAAAAAAAAABXGYiIAAAAAAACATFhMBAAAAAAAAJAJi4kAAAAAAAAAMmExEQAAAAAAAEAmLCYCAAAAAAAAyITFRAAAAAAAAACZsJgIAAAAAAAAIJP/D8lrQCR9UAtpAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "gaussian = lambda x, mu, sigma: 1./(sigma * np.sqrt(2 * np.pi)) * np.exp( - (x - mu)**2 / (2 * sigma**2))\n", + "x = np.linspace(-1, 1, 33)\n", + "\n", + "figsize = (18, 4)\n", + "fig, axs = plt.subplots(nrows=2, ncols=11,figsize=figsize)\n", + "\n", + "counter = 0\n", + "for m in network.modules():\n", + " if isinstance(m, cknn.FlexConv):\n", + " axs[0,counter].set_title('Kernel at layer {}'.format(counter + 1))\n", + " axs[0,counter].imshow(m.conv_kernel[0,0].detach().cpu().numpy())\n", + " \n", + " # Plot gaussian mask by outer product of 1D gaussians\n", + " x_mu = m.mask_params[0,0].detach().numpy()\n", + " x_sigma = m.mask_params[0,1].detach().numpy()\n", + " y_mu = m.mask_params[1,0].detach().numpy()\n", + " y_sigma = m.mask_params[1,1].detach().numpy()\n", + " axs[1,counter].set_title('Size at layer {}'.format(counter + 1))\n", + " gauss = np.outer(gaussian(x, x_mu, x_sigma), gaussian(x, y_mu, y_sigma))\n", + " axs[1,counter].imshow(gauss)\n", + " \n", + " counter = counter + 1\n", + " \n", + "plt.tight_layout()\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As you can see, all filters are quite small at initialization, as the gaussian masks are initialized to have a low variance. If we were to train this FlexNet-5 model on CIFAR-10, we would find that the kernel sizes increase with depth, as in the middle row of Fig. 6 from the paper, reproduced here:\n", + "\n", + "![](kernel_sizes_flexnet.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, we look at the number of parameters and the dimension of the filters of the network:" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Convolutional Kernel Sizes:\n", + "torch.Size([22, 3, 5, 5])\n", + "torch.Size([22, 22, 5, 5])\n", + "torch.Size([22, 22, 5, 5])\n", + "torch.Size([33, 22, 5, 5])\n", + "torch.Size([33, 33, 5, 5])\n", + "torch.Size([33, 33, 5, 5])\n", + "torch.Size([33, 33, 5, 5])\n", + "torch.Size([44, 33, 5, 5])\n", + "torch.Size([44, 44, 5, 5])\n", + "torch.Size([44, 44, 5, 5])\n", + "torch.Size([44, 44, 5, 5])\n", + "Number of parameters: 454046\n" + ] + } + ], + "source": [ + "# ------------------------------\n", + "# Parameter counter\n", + "def num_params(model):\n", + " return sum(p.numel() for p in model.parameters() if p.requires_grad)\n", + "# -----------------------------\n", + "\n", + "print('Convolutional Kernel Sizes:')\n", + "for m in network.modules():\n", + " if isinstance(m, cknn.FlexConv):\n", + " print(m.conv_kernel.shape)\n", + " elif isinstance(m, torch.nn.Linear):\n", + " print(m.weight.shape)\n", + " \n", + "print('Number of parameters: {}'.format(num_params(network)))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Aliasing regularization\n", + "\n", + "To use FlexNets across different resolutions one needs to train with an anti-aliasing regularization term. The module `antialiasing.py` provides the necessary methods. \n", + "\n", + "## Using pre-defined method for whole network\n", + "\n", + "The method `regularize_gabornet()` applies regularization to a network with FlexConv modules that all have the same horizon. Internally, it supports a couple different settings for applying the regularization over multiple modules and filters, but using the default settings, which we also used in our experiments, should be sufficient. Specifically, the default setting uses the following algorithm, in parallel for all FlexConv modules in the network:\n", + "\n", + "1. We compute the frequency response $f^+_{\\textrm{MAGNet},l}$ of each layer $l \\in L$ of the MAGNet, as well as the frequency response of the FlexConv gaussian mask $f^+_{w_\\textrm{gauss}}$ for the whole MAGNet.\n", + "2. We distribute $f^+_{w_\\textrm{gauss}}$ uniformly over all MAGNet layer frequency responses $f^+_{\\textrm{MAGNet},l}$:\n", + "\n", + "$\\hat{f}^+_{\\textrm{MAGNet},l} = f^+_{\\textrm{MAGNet},l} + \\frac{f^+_{w_\\textrm{gauss}}}{L}$\n", + "\n", + "3. We then regularize each MAGNet layer $\\hat{f}^+_{\\textrm{MAGNet},l}$ against a uniform part of the Nyquist frequency:\n", + "\n", + "$\\mathcal{L}_{\\mathrm{HF}} = \\sum_l^L ||\\max\\{\\hat{f}^+_{\\textrm{MAGNet},l}, \\frac{1}{L} f_{\\mathrm{Nyq}}(k)\\} - \\frac{1}{L} f_{\\mathrm{Nyq}}(k)||^2 $\n", + "\n", + "This implementation of the regularization ensures that all layers of the MAGNet get an independent regularization signal, instead of making the regularization term of individual layers dependent on each other. Notably, it is slightly different from the \"naive\" implementation of the regularization in Eq. 25 of our paper.\n", + "\n", + "This is the init signature of `regularize_gabornet()`:\n", + "\n", + "```\n", + "regularize_gabornet()\n", + "\n", + ":param model: Model definition to be regularized (torch.nn.Module).\n", + ":param horizon: Integer horizon used in the FlexConvs of this network.\n", + ":param factor: Lambda weight for applying the regularization term to the loss.\n", + ":param target=\"gabor\": Which parts of the FlexConv to regularize. Use \"gabor+mask\" for including the gaussian mask, and \"gabor\" for excluding it.\n", + ":param fn=\"l2_relu\": Function applied over the excess frequency content to regularize it.\n", + ":param method=\"together\": How to collect the different excess frequencies for regularization.\n", + ":param gauss_stddevs=1.0: Standard deviations used to compute the effect of the gaussian term in the Gabor filters.\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor(433.4299, grad_fn=)\n" + ] + } + ], + "source": [ + "from antialiasing import regularize_gabornet\n", + "\n", + "\n", + "regularization_term = regularize_gabornet(network, config.conv.horizon, 0.1, target=\"gabor+mask\")\n", + "print(regularization_term)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Manually\n", + "\n", + "`regularize_gabornet()` is a custom piece of code, that supports many more features than reported in our paper. Here, we show the minimal code for reproducing the proposed regularization method, using the \"together\" strategy. We use the helper methods in `antialiasing.py`.\n", + "\n", + "First, we use the method `gabor_layer_frequencies()` to get the maximum frequency response for a single FlexConv module:" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Frequencies for each layer of the MAGNet:\n", + "tensor([20.0273, 10.9657, 8.2731, 3.4877], grad_fn=)\n", + "Frequency of the FlexConv gaussian mask:\n", + "tensor(2.1221, grad_fn=)\n" + ] + } + ], + "source": [ + "from antialiasing import gabor_layer_frequencies, nyquist_frequency, l2_relu\n", + "\n", + "\n", + "module = network.cconv1\n", + "magnet_layer_frequencies, mask_frequency = gabor_layer_frequencies(module, \"gabor+mask\", \"together\", gauss_stddevs=1.0)\n", + "\n", + "print(\"Frequencies for each layer of the MAGNet:\")\n", + "print(magnet_layer_frequencies)\n", + "\n", + "print(\"Frequency of the FlexConv gaussian mask:\")\n", + "print(mask_frequency)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Then, we distribute the mask frequency over the MAGNet layer frequencies." + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Frequencies for each layer of the MAGNet, after including uniformly distributed mask frequency:\n", + "tensor([20.5578, 11.4962, 8.8036, 4.0182], grad_fn=)\n" + ] + } + ], + "source": [ + "# Distribute mask frequency equally over all magnet layer frequencies\n", + "n_filters = magnet_layer_frequencies.shape[0]\n", + "mask_freqs = mask_frequency.unsqueeze(0).repeat([n_filters]) / torch.tensor(\n", + " n_filters, dtype=torch.float32\n", + ")\n", + "flexconv_freqs = magnet_layer_frequencies + mask_freqs\n", + "\n", + "print(\"Frequencies for each layer of the MAGNet, after including uniformly distributed mask frequency:\")\n", + "print(flexconv_freqs)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now, we compute the Nyquist frequency for the kernel size (\"horizon\") we are training with. We divide by the amount of layers, as per the \"together\" regularization strategy:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Compute Nyquist frequency\n", + "nyquist_freq = nyquist_frequency(config.conv.horizon)\n", + "print(\"\\nNyquist frequency:\")\n", + "print(nyquist_freq)\n", + "# We uniformly spread the Nyquist frequency over all layers of the MAGNet,\n", + "# following the \"together\" method\n", + "print(\"\\nNyquist frequency, per layer:\")\n", + "print(nyquist_freq / n_filters)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, we compute the regularization term by taking the difference between the MAGNet frequencies and the Nyquist frequencies, and compute the L2 norm:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Compute regularization term\n", + "frequencies_to_reg = flexconv_freqs - (nyquist_freq / n_filters)\n", + "print(\"\\nAmount of frequency to remove, per layer:\")\n", + "print(frequencies_to_reg)\n", + "print(\"\\nRegularization term = L2(freqs - nyquist):\")\n", + "print(l2_relu(flexconv_freqs, nyquist_freq / n_filters))" + ] + } + ], + "metadata": { + "interpreter": { + "hash": "c07c5c8510eb4113ecf49d62ba29a0bcaa1ec6bcd4eeac2966e4ed4a88286269" + }, + "kernelspec": { + "display_name": "Python 3.8.5 64-bit ('flexconv': conda)", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.5" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/demo/kernel_sizes_flexnet.pdf b/demo/kernel_sizes_flexnet.pdf new file mode 100644 index 0000000..d53cf10 Binary files /dev/null and b/demo/kernel_sizes_flexnet.pdf differ diff --git a/demo/kernel_sizes_flexnet.png b/demo/kernel_sizes_flexnet.png new file mode 100644 index 0000000..c2e6b55 Binary files /dev/null and b/demo/kernel_sizes_flexnet.png differ diff --git a/demo/truck5.png b/demo/truck5.png new file mode 100644 index 0000000..e032415 Binary files /dev/null and b/demo/truck5.png differ diff --git a/models/ckresnet.py b/models/ckresnet.py index 382b1ae..41685fa 100644 --- a/models/ckresnet.py +++ b/models/ckresnet.py @@ -80,37 +80,100 @@ def __init__( nonlinearity = net_config.nonlinearity # Unpack dim_linear - dim_linear = kernel_config.dim_linear - scale_sigma = kernel_config.srf.scale + kernel_scale_sigma = kernel_config.srf.scale # Unpack conv_type conv_type = conv_config.type + # Unpack kernel_config + kernel_type = kernel_config.type + kernel_dim_linear = kernel_config.dim_linear + kernel_no_hidden = kernel_config.no_hidden + kernel_no_layers = kernel_config.no_layers + kernel_activ_function = kernel_config.activ_function + kernel_norm = kernel_config.norm + kernel_omega_0 = kernel_config.omega_0 + kernel_learn_omega_0 = kernel_config.learn_omega_0 + kernel_weight_norm = kernel_config.weight_norm + kernel_steerable = kernel_config.steerable + kernel_init_spatial_value = kernel_config.init_spatial_value + kernel_bias_init = kernel_config.bias_init + kernel_input_scale = kernel_config.input_scale + kernel_sampling_rate_norm = kernel_config.sampling_rate_norm + # Define Convolution Type: # ------------------------- # Unpack other conv_config values in case normal convolutions are used. + conv_use_fft = conv_config.use_fft conv_horizon = conv_config.horizon conv_padding = conv_config.padding conv_stride = conv_config.stride conv_bias = conv_config.bias + # Unpack mask_config + mask_use = mask_config.use + mask_type = mask_config.type + mask_init_value = mask_config.init_value + mask_temperature = mask_config.temperature + mask_dynamic_cropping = mask_config.dynamic_cropping + mask_threshold = mask_config.threshold + # Define partials for types of convs if conv_type == "CKConv": ConvType = partial( ckconv.nn.CKConv, - kernel_config=kernel_config, - conv_config=conv_config, + horizon=conv_horizon, + kernel_type=kernel_type, + kernel_dim_linear=kernel_dim_linear, + kernel_no_hidden=kernel_no_hidden, + kernel_no_layers=kernel_no_layers, + kernel_activ_function=kernel_activ_function, + kernel_norm=kernel_norm, + kernel_omega_0=kernel_omega_0, + kernel_learn_omega_0=kernel_learn_omega_0, + kernel_weight_norm=kernel_weight_norm, + kernel_steerable=kernel_steerable, + kernel_init_spatial_value=kernel_init_spatial_value, + kernel_bias_init=kernel_bias_init, + kernel_input_scale=kernel_input_scale, + kernel_sampling_rate_norm=kernel_sampling_rate_norm, + conv_use_fft=conv_use_fft, + conv_bias=conv_bias, + conv_padding=conv_padding, + conv_stride=1, ) elif conv_type == "FlexConv": ConvType = partial( ckconv.nn.FlexConv, - kernel_config=kernel_config, - conv_config=conv_config, - mask_config=mask_config, + horizon=conv_horizon, + kernel_type=kernel_type, + kernel_dim_linear=kernel_dim_linear, + kernel_no_hidden=kernel_no_hidden, + kernel_no_layers=kernel_no_layers, + kernel_activ_function=kernel_activ_function, + kernel_norm=kernel_norm, + kernel_omega_0=kernel_omega_0, + kernel_learn_omega_0=kernel_learn_omega_0, + kernel_weight_norm=kernel_weight_norm, + kernel_steerable=kernel_steerable, + kernel_init_spatial_value=kernel_init_spatial_value, + kernel_bias_init=kernel_bias_init, + kernel_input_scale=kernel_input_scale, + kernel_sampling_rate_norm=kernel_sampling_rate_norm, + conv_use_fft=conv_use_fft, + conv_bias=conv_bias, + conv_padding=conv_padding, + conv_stride=conv_stride, + mask_use=mask_use, + mask_type=mask_type, + mask_init_value=mask_init_value, + mask_temperature=mask_temperature, + mask_dynamic_cropping=mask_dynamic_cropping, + mask_threshold=mask_threshold, ) elif conv_type == "Conv": ConvType = partial( - getattr(torch.nn, f"Conv{dim_linear}d"), + getattr(torch.nn, f"Conv{kernel_dim_linear}d"), kernel_size=int(conv_horizon), padding=conv_padding, stride=conv_stride, @@ -123,7 +186,7 @@ def __init__( init_order=4.0, init_scale=0.0, use_cuda=True, # NOTE(rjbruin): hardcoded for now - scale_sigma=scale_sigma, + scale_sigma=kernel_scale_sigma, ) else: raise NotImplementedError(f"conv_type = {conv_type}") @@ -131,7 +194,7 @@ def __init__( # Define NormType NormType = { - "BatchNorm": getattr(torch.nn, f"BatchNorm{dim_linear}d"), + "BatchNorm": getattr(torch.nn, f"BatchNorm{kernel_dim_linear}d"), "LayerNorm": ckconv.nn.LayerNorm, }[norm] @@ -140,7 +203,7 @@ def __init__( ] # Define LinearType - LinearType = getattr(ckconv.nn, f"Linear{dim_linear}d") + LinearType = getattr(ckconv.nn, f"Linear{kernel_dim_linear}d") # Create Input Layers self.cconv1 = ConvType(in_channels=in_channels, out_channels=hidden_channels) @@ -215,7 +278,7 @@ def __init__( # ------------------------- # Save variables in self - self.dim_linear = dim_linear + self.dim_linear = kernel_dim_linear def forward(self, x): # First layers diff --git a/models/cktcn.py b/models/cktcn.py index 382f259..681d85f 100644 --- a/models/cktcn.py +++ b/models/cktcn.py @@ -16,6 +16,7 @@ def __init__( in_channels: int, out_channels: int, ConvType: Union[CKConv, FlexConv, Conv1d, Conv2d], + NonlinearType: torch.nn.Module, NormType: torch.nn.Module, LinearType: torch.nn.Module, dropout: float, @@ -44,6 +45,7 @@ def __init__( in_channels=in_channels, out_channels=out_channels, ConvType=ConvType, + NonlinearType=NonlinearType, NormType=NormType, LinearType=LinearType, dropout=dropout, @@ -51,8 +53,8 @@ def __init__( def forward(self, x): shortcut = self.shortcut(x) - out = self.dp(torch.relu(self.norm1(self.cconv1(x)))) - out = torch.relu(self.dp(torch.relu(self.norm2(self.cconv2(out)))) + shortcut) + out = self.dp(self.nonlinear(self.norm1(self.cconv1(x)))) + out = self.nonlinear(self.dp(self.nonlinear(self.norm2(self.cconv2(out)))) + shortcut) return out @@ -74,38 +76,98 @@ def __init__( norm = net_config.norm dropout = net_config.dropout block_width_factors = net_config.block_width_factors + nonlinearity = net_config.nonlinearity - # Unpack dim_linear - dim_linear = kernel_config.dim_linear - - # Unpack conv_type - conv_type = conv_config.type + # Unpack kernel_config + kernel_type = kernel_config.type + kernel_dim_linear = kernel_config.dim_linear + kernel_no_hidden = kernel_config.no_hidden + kernel_no_layers = kernel_config.no_layers + kernel_activ_function = kernel_config.activ_function + kernel_norm = kernel_config.norm + kernel_omega_0 = kernel_config.omega_0 + kernel_learn_omega_0 = kernel_config.learn_omega_0 + kernel_weight_norm = kernel_config.weight_norm + kernel_steerable = kernel_config.steerable + kernel_init_spatial_value = kernel_config.init_spatial_value + kernel_bias_init = kernel_config.bias_init + kernel_input_scale = kernel_config.input_scale + kernel_sampling_rate_norm = kernel_config.sampling_rate_norm # Define Convolution Type: # ------------------------- # Unpack other conv_config values in case normal convolutions are used. + conv_type = conv_config.type conv_horizon = conv_config.horizon conv_padding = conv_config.padding conv_stride = conv_config.stride conv_bias = conv_config.bias + conv_use_fft = conv_config.use_fft + + # Unpack mask_config + mask_use = mask_config.use + mask_type = mask_config.type + mask_init_value = mask_config.init_value + mask_temperature = mask_config.temperature + mask_dynamic_cropping = mask_config.dynamic_cropping + mask_threshold = mask_config.threshold # Define partials for types of convs if conv_type == "CKConv": ConvType = partial( ckconv.nn.CKConv, - kernel_config=kernel_config, - conv_config=conv_config, + horizon=conv_horizon, + kernel_type=kernel_type, + kernel_dim_linear=kernel_dim_linear, + kernel_no_hidden=kernel_no_hidden, + kernel_no_layers=kernel_no_layers, + kernel_activ_function=kernel_activ_function, + kernel_norm=kernel_norm, + kernel_omega_0=kernel_omega_0, + kernel_learn_omega_0=kernel_learn_omega_0, + kernel_weight_norm=kernel_weight_norm, + kernel_steerable=kernel_steerable, + kernel_init_spatial_value=kernel_init_spatial_value, + kernel_bias_init=kernel_bias_init, + kernel_input_scale=kernel_input_scale, + kernel_sampling_rate_norm=kernel_sampling_rate_norm, + conv_use_fft=conv_use_fft, + conv_padding=conv_padding, + conv_stride=conv_stride, + conv_bias=conv_bias, ) elif conv_type == "FlexConv": ConvType = partial( ckconv.nn.FlexConv, - kernel_config=kernel_config, - conv_config=conv_config, - mask_config=mask_config, + horizon=conv_horizon, + kernel_type=kernel_type, + kernel_dim_linear=kernel_dim_linear, + kernel_no_hidden=kernel_no_hidden, + kernel_no_layers=kernel_no_layers, + kernel_activ_function=kernel_activ_function, + kernel_norm=kernel_norm, + kernel_omega_0=kernel_omega_0, + kernel_learn_omega_0=kernel_learn_omega_0, + kernel_weight_norm=kernel_weight_norm, + kernel_steerable=kernel_steerable, + kernel_init_spatial_value=kernel_init_spatial_value, + kernel_bias_init=kernel_bias_init, + kernel_input_scale=kernel_input_scale, + kernel_sampling_rate_norm=kernel_sampling_rate_norm, + conv_use_fft=conv_use_fft, + conv_padding=conv_padding, + conv_stride=conv_stride, + conv_bias=conv_bias, + mask_use=mask_use, + mask_type=mask_type, + mask_init_value=mask_init_value, + mask_temperature=mask_temperature, + mask_dynamic_cropping=mask_dynamic_cropping, + mask_threshold=mask_threshold, ) elif conv_type == "Conv": ConvType = partial( - getattr(torch.nn, f"Conv{dim_linear}d"), + getattr(torch.nn, f"Conv{kernel_dim_linear}d"), kernel_size=int(conv_horizon), padding=conv_padding, stride=conv_stride, @@ -117,12 +179,16 @@ def __init__( # Define NormType NormType = { - "BatchNorm": getattr(torch.nn, f"BatchNorm{dim_linear}d"), + "BatchNorm": getattr(torch.nn, f"BatchNorm{kernel_dim_linear}d"), "LayerNorm": ckconv.nn.LayerNorm, }[norm] + NonlinearType = {"ReLU": torch.nn.ReLU, "LeakyReLU": torch.nn.LeakyReLU}[ + nonlinearity + ] + # Define LinearType - LinearType = getattr(ckconv.nn, f"Linear{dim_linear}d") + LinearType = getattr(ckconv.nn, f"Linear{kernel_dim_linear}d") # Create Blocks # ------------------------- @@ -159,6 +225,7 @@ def __init__( in_channels=input_ch, out_channels=hidden_ch, ConvType=ConvType, + NonlinearType=NonlinearType, NormType=NormType, LinearType=LinearType, dropout=dropout, diff --git a/testcases/c10-fn5-reg.npy b/testcases/c10-fn5-reg.npy new file mode 100644 index 0000000..e95ff7f Binary files /dev/null and b/testcases/c10-fn5-reg.npy differ diff --git a/testcases/c10-fn5.npy b/testcases/c10-fn5.npy new file mode 100644 index 0000000..979f3da Binary files /dev/null and b/testcases/c10-fn5.npy differ diff --git a/testcases/mnist-fn7-randomsettings.npy b/testcases/mnist-fn7-randomsettings.npy new file mode 100644 index 0000000..aff78c9 Binary files /dev/null and b/testcases/mnist-fn7-randomsettings.npy differ diff --git a/testcases/scifar-tcn-flexconv.npy b/testcases/scifar-tcn-flexconv.npy new file mode 100644 index 0000000..3926e95 Binary files /dev/null and b/testcases/scifar-tcn-flexconv.npy differ diff --git a/testcases/smnist-tcn-ckconv.npy b/testcases/smnist-tcn-ckconv.npy new file mode 100644 index 0000000..8ea7247 Binary files /dev/null and b/testcases/smnist-tcn-ckconv.npy differ diff --git a/testcases/testcase_settings.txt b/testcases/testcase_settings.txt new file mode 100644 index 0000000..034a2a8 --- /dev/null +++ b/testcases/testcase_settings.txt @@ -0,0 +1,180 @@ +c10-fn5 +"debug=True", +"device=cpu", +"train.batch_size=2", +"conv.horizon=33", +"train.augment=resnet", +"net.block_width_factors=[1.0, 1, 1.5, 2, 2.0, 2]", +"net.no_blocks=5", +"net.no_hidden=22", +"conv.type=FlexConv", +"dataset=CIFAR10", +"net.dropout=0.2", +"net.dropout_in=0", +"mask.dynamic_cropping=True", +"train.epochs=350", +"kernel.input_scale=25.6", +"kernel.type=MAGNet", +"kernel.no_hidden=32", +"kernel.no_layers=3", +"train.lr=0.01", +"train.scheduler_params.warmup_epochs=5", +"train.mask_params_lr_factor=0.1", +"mask.init_value=0.075", +"mask.temperature=15.0", +"mask.type=gaussian", +"net.type=ResNet", +"net.norm=BatchNorm", +"train.optimizer=Adam", +"kernel.regularize=False", +"train.scheduler=cosine", +"train.weight_decay=0", +"wandb.project=ckconv_vision", +"summary=[64, 3, 32, 32]", +"seed=0", +"testcase.load=True", +"testcase.path=./testcases/c10-fn5.npy", + +c10-fn5-reg +"debug=True", +"device=cpu", +"train.batch_size=2", +"train.augment=resnet", +"net.block_width_factors=[1.0, 2, 1.5, 3, 2.0, 2]", +"conv.type=FlexConv", +"dataset=CIFAR10", +"net.dropout=0.2", +"net.dropout_in=0", +"mask.dynamic_cropping=True", +"train.epochs=100", +"cross_res.finetune_epochs=100", +"kernel.input_scale=25.6", +"kernel.type=MAGNet", +"kernel.no_hidden=32", +"kernel.no_layers=3", +"train.lr=0.01", +"train.scheduler_params.warmup_epochs=5", +"train.mask_params_lr_factor=0.1", +"mask.init_value=0.075", +"mask.temperature=15.0", +"mask.type=gaussian", +"net.type=ResNet", +"net.no_blocks=7", +"net.no_hidden=24", +"net.norm=BatchNorm", +"train.optimizer=Adam", +"kernel.regularize=True", +"kernel.regularize_params.factor=0.1", +"train.scheduler=cosine", +"cross_res.source_res=16", +"cross_res.target_res=32", +"wandb.dir=/tmp/wandb", +"train.weight_decay=0", +"wandb.project=ckconv_vision", +"comment=c10-cres-fixalias-gabortog-16x32-check", +"kernel.regularize_params.target=gabor", +"kernel.regularize_params.method=together", +"kernel.regularize_params.gauss_stddevs=2.0", +"seed=0", +"testcase.load=True", +"testcase.path=./testcases/c10-fn5-reg.npy", + + +"debug=True", +"device=cpu", +"conv.type=CKConv", +"train.batch_size=2", +"dataset=sMNIST", +"train.batch_size=64", +"net.dropout=0.1", +"net.dropout_in=0", +"train.epochs=200", +"kernel.type=Gabor", +"kernel.no_hidden=32", +"kernel.no_layers=3", +"train.lr=0.001", +"dataset_params.mfcc=False", +"net.type=TCN", +"net.no_blocks=2", +"net.no_hidden=30", +"net.norm=BatchNorm", +"train.optimizer=Adam", +"dataset_params.permuted=False", +"train.scheduler_params.decay_factor=5", +"train.scheduler_params.patience=20", +"train.scheduler=plateau", +"conv.use_fft=True", +"train.weight_decay=0.0001", +"seed=0", +"testcase.load=True", +"testcase.path=./testcases/smnist-tcn-ckconv.npy", + +"debug=True", +"device=cpu", +"conv.type=FlexConv", +"mask.type=gaussian", +"mask.temperature=15.0", +"train.mask_params_lr_factor=0.1", +"mask.init_value=0.075", +"train.batch_size=2", +"dataset=sCIFAR10", +"net.dropout=0.1", +"net.dropout_in=0", +"train.epochs=200", +"kernel.type=MAGNet", +"kernel.no_hidden=32", +"kernel.no_layers=3", +"train.lr=0.001", +"dataset_params.mfcc=False", +"net.type=TCN", +"net.no_blocks=2", +"net.no_hidden=30", +"net.norm=BatchNorm", +"train.optimizer=Adam", +"dataset_params.permuted=False", +"train.scheduler_params.decay_factor=5", +"train.scheduler_params.patience=20", +"train.scheduler=plateau", +"conv.use_fft=True", +"train.weight_decay=0.0001", +"seed=0", +"testcase.load=True", +"testcase.path=./testcases/scifar-tcn-flexconv.npy", + + +"debug=True", +"device=cpu", +"conv.horizon=29", +"train.batch_size=2", +"train.augment=standard", +"net.block_width_factors=[1.0, 2, 1.5, 1, 2.0, 2]", +"conv.type=CKConv", +"dataset=MNIST", +"net.dropout=0.2", +"net.dropout_in=0", +"mask.dynamic_cropping=True", +"train.epochs=100", +"cross_res.finetune_epochs=100", +"kernel.input_scale=25.6", +"kernel.type=MAGNet", +"kernel.no_hidden=34", +"kernel.no_layers=2", +"train.lr=0.01", +"train.scheduler_params.warmup_epochs=3", +"train.mask_params_lr_factor=0.1", +"mask.init_value=0.05", +"mask.temperature=12.0", +"mask.type=gaussian", +"net.type=ResNet", +"net.no_blocks=5", +"net.no_hidden=20", +"net.norm=BatchNorm", +"train.optimizer=Adam", +"train.scheduler=none", +"wandb.dir=/tmp/wandb", +"train.weight_decay=0.001", +"wandb.project=ckconv_vision", +"comment=c10-cres-fixalias-gabortog-16x32-check", +"seed=0", +"testcase.save=True", +"testcase.path=./testcases/mnist-fn7-randomsettings.npy", \ No newline at end of file diff --git a/trainer.py b/trainer.py index 9bf1b4e..a5e48ea 100644 --- a/trainer.py +++ b/trainer.py @@ -17,7 +17,6 @@ import probspec_routines as ps_routines from tester import test import ckconv -import timer from torchmetrics import Accuracy import antialiasing from optim import construct_optimizer, construct_scheduler, CLASSES_DATASET @@ -113,13 +112,6 @@ def train( epoch_start=epoch_start, ) - # save model both locally and on wandb - if cfg.debug: - torch.save( - model.state_dict(), - os.path.join(hydra.utils.get_original_cwd(), "saved/model.pt"), - ) - save_to_wandb(model, optimizer, lr_scheduler, cfg, name="final_model") return model, optimizer, lr_scheduler @@ -159,6 +151,9 @@ def classification_train( # Training parameters epochs = cfg.train.epochs + # Testcases: override epochs + if cfg.testcase.load or cfg.testcase.save: + epochs = cfg.testcase.epochs device = cfg.device criterion = criterion().to(device) @@ -176,6 +171,9 @@ def classification_train( epochs_no_improvement = 0 max_epochs_no_improvement = 100 + if cfg.testcase.save or cfg.testcase.load: + testcase_losses = [] + # iterate over epochs for epoch in range(epoch_start, epochs + epoch_start): print("Epoch {}/{}".format(epoch + 1, epochs + epoch_start)) @@ -258,12 +256,11 @@ def classification_train( gabor_reg = antialiasing.regularize_gabornet( model, cfg.kernel.regularize_params.res, + cfg.kernel.regularize_params.factor, cfg.kernel.regularize_params.target, cfg.kernel.regularize_params.fn, cfg.kernel.regularize_params.method, - cfg.kernel.regularize_params.factor, gauss_stddevs=cfg.kernel.regularize_params.gauss_stddevs, - gauss_factor=cfg.kernel.regularize_params.gauss_factor, ) loss += gabor_reg running_gabor_reg += gabor_reg @@ -280,6 +277,9 @@ def classification_train( # print(f"Resolution: {config.regularize_gabornet_res}") # print(f"Total regularization term (incl. lambda): {gabor_reg:.8f}") + if cfg.testcase.save or cfg.testcase.load: + testcase_losses.append(loss.item()) + # Backward pass: if phase == "train": loss.backward() @@ -308,6 +308,9 @@ def classification_train( # torchmetrics.Accuracy requires everything to be on CPU top5(pred_sm.to("cpu"), labels.to("cpu")) + if total >= cfg.testcase.batches: + break + # Log GaborNet frequencies if cfg.kernel.regularize and phase == "train": stats = antialiasing.get_gabornet_summaries( @@ -425,6 +428,21 @@ def classification_train( # Print learned limits _print_learned_limits(model) + # Testcases: load/save losses for comparison + if cfg.testcase.save: + testcase_losses = np.array(testcase_losses) + with open(hydra.utils.to_absolute_path(cfg.testcase.path), 'wb') as f: + np.save(f, testcase_losses, allow_pickle=True) + if cfg.testcase.load: + testcase_losses = np.array(testcase_losses) + with open(hydra.utils.to_absolute_path(cfg.testcase.path), 'rb') as f: + target_losses = np.load(f, allow_pickle=True) + if np.allclose(testcase_losses, target_losses): + print("Testcase passed!") + else: + diff = np.sum(testcase_losses - target_losses) + raise AssertionError(f"Testcase failed: diff = {diff:.8f}") + # Return model return model diff --git a/utils.py b/utils.py index 8701456..10e3654 100644 --- a/utils.py +++ b/utils.py @@ -12,3 +12,6 @@ def load_config_from_json(filepath): dot_list.append(f"{key}={data[key]['value']}") return OmegaConf.from_dotlist(dot_list) + +def omegaconf_to_dict(omegaconf, name): + return {name + '_' + k: v for (k, v) in omegaconf.to_dict()} \ No newline at end of file