keyword stringclasses 7 values | repo_name stringlengths 8 98 | file_path stringlengths 4 244 | file_extension stringclasses 29 values | file_size int64 0 84.1M | line_count int64 0 1.6M | content stringlengths 1 84.1M ⌀ | language stringclasses 14 values |
|---|---|---|---|---|---|---|---|
3D | feos-org/feos | docs/recipes/recipes_surface_tension_pure.ipynb | .ipynb | 3,012 | 137 | {
"cells": [
{
"cell_type": "markdown",
"id": "2f323a90-1e4f-4a27-a495-38dbf8dad3e3",
"metadata": {},
"source": [
"# Surface tension of a pure substance "
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "06f40029-24e4-4f91-b502-6b9265818ed8",
"metadata": {},
"outputs": [],
"source": [
"import si_units as si\n",
"import feos\n",
"\n",
"parameters = feos.Parameters.from_json(\n",
" substances=['methanol'], \n",
" pure_path='../../parameters/pcsaft/gross2002.json'\n",
")\n",
"functional = feos.HelmholtzEnergyFunctional.pcsaft(parameters)"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "38b998ad-4fd0-4a1e-8fa3-991a13fc0860",
"metadata": {},
"outputs": [
{
"data": {
"text/markdown": [
"||temperature|density|\n",
"|-|-|-|\n",
"|phase 1|350.00000 K|62.68366 mol/m³|\n",
"|phase 2|350.00000 K|23.13883 kmol/m³|\n"
],
"text/plain": [
"phase 0: T = 350.00000 K, ρ = 62.68366 mol/m³\n",
"phase 1: T = 350.00000 K, ρ = 23.13883 kmol/m³"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"vle = feos.PhaseEquilibrium.pure(functional, 350 * si.KELVIN)\n",
"vle"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "a48aff0e-cdb8-4553-96ff-57328272c184",
"metadata": {},
"outputs": [
{
"data": {
"text/latex": [
"$20.191\\,\\mathrm{\\frac{mN}{m}}$"
],
"text/plain": [
"20.190692939005643 mN/m"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"surface_tension_pdgt = (feos.PlanarInterface\n",
" .from_pdgt(vle, 1024)\n",
" .solve()\n",
" .surface_tension)\n",
"surface_tension_pdgt"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "1bb266d3-8dbf-4bbf-833d-42bfee960fe8",
"metadata": {},
"outputs": [
{
"data": {
"text/latex": [
"$20.191\\,\\mathrm{\\frac{mN}{m}}$"
],
"text/plain": [
"20.190693016626028 mN/m"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"surface_tension_tanh = (feos.PlanarInterface\n",
" .from_tanh(vle, 1024, 100 * si.ANGSTROM, 500 * si.KELVIN)\n",
" .solve()\n",
" .surface_tension\n",
")\n",
"surface_tension_tanh"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "feos",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.13.5"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
| Unknown |
3D | feos-org/feos | docs/recipes/recipes_automatic_differentiation.ipynb | .ipynb | 8,076 | 279 | {
"cells": [
{
"cell_type": "markdown",
"id": "9767dd5f",
"metadata": {},
"source": [
"# Phase equilibria including derivatives"
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "fb44d253",
"metadata": {},
"outputs": [],
"source": [
"import feos\n",
"import numpy as np"
]
},
{
"cell_type": "markdown",
"id": "a76652c9",
"metadata": {},
"source": [
"## Vapor pressure"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "ebabc49a",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"CPU times: user 1min 39s, sys: 654 ms, total: 1min 40s\n",
"Wall time: 2.3 s\n"
]
},
{
"data": {
"text/plain": [
"(array([ 99933.70173872, 99933.76369505, 99933.82565141, ...,\n",
" 4841855.06247647, 4841856.28251754, 4841857.50255883],\n",
" shape=(10000000,)),\n",
" array([[-1.04684676e+05, 1.37290974e+05, -2.94409278e+03],\n",
" [-1.04684730e+05, 1.37291045e+05, -2.94409438e+03],\n",
" [-1.04684784e+05, 1.37291116e+05, -2.94409599e+03],\n",
" ...,\n",
" [-3.06545385e+06, 2.31346293e+06, -8.79512373e+04],\n",
" [-3.06545458e+06, 2.31346323e+06, -8.79512573e+04],\n",
" [-3.06545531e+06, 2.31346353e+06, -8.79512774e+04]],\n",
" shape=(10000000, 3)),\n",
" array([ True, True, True, ..., True, True, True], shape=(10000000,)))"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"n = 10_000_000\n",
"fit_params = [\"m\", \"sigma\", \"epsilon_k\"]\n",
"\n",
"# order: m, sigma, epsilon_k, mu\n",
"parameters = np.array([[1.5, 3.4, 230.0, 2.3]] * n)\n",
"temperature = np.expand_dims(np.linspace(250.0, 400.0, n), 1)\n",
"eos = feos.EquationOfStateAD.PcSaftNonAssoc\n",
"%time feos.vapor_pressure_derivatives(eos, fit_params, parameters, temperature)"
]
},
{
"cell_type": "markdown",
"id": "4cc06692",
"metadata": {},
"source": [
"## Liquid density"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "f86f8e3e",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"CPU times: user 1min 24s, sys: 716 ms, total: 1min 25s\n",
"Wall time: 2.1 s\n"
]
},
{
"data": {
"text/plain": [
"(array([22.7191149 , 22.71911436, 22.71911382, ..., 0.03027266,\n",
" 0.03027266, 0.03027266], shape=(10000000,)),\n",
" array([[-1.68941171e+01, -2.24206688e+01, 2.73083514e-02],\n",
" [-1.68941167e+01, -2.24206685e+01, 2.73083541e-02],\n",
" [-1.68941164e+01, -2.24206681e+01, 2.73083568e-02],\n",
" ...,\n",
" [ 9.49488124e-05, 9.60641306e-06, 7.80107221e-07],\n",
" [ 9.49487960e-05, 9.60641033e-06, 7.80107128e-07],\n",
" [ 9.49487795e-05, 9.60640761e-06, 7.80107035e-07]],\n",
" shape=(10000000, 3)),\n",
" array([ True, True, True, ..., True, True, True], shape=(10000000,)))"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"n = 10_000_000\n",
"fit_params = [\"m\", \"sigma\", \"epsilon_k\"]\n",
"\n",
"# order: m, sigma, epsilon_k, mu\n",
"parameters = np.array([[1.5, 3.4, 230.0, 2.3]] * n)\n",
"temperature = np.linspace(250.0, 400.0, n)\n",
"pressure = np.array([1e5] * n)\n",
"input = np.stack((temperature, pressure), axis=1)\n",
"eos = feos.EquationOfStateAD.PcSaftNonAssoc\n",
"%time feos.liquid_density_derivatives(eos, fit_params, parameters, input)"
]
},
{
"cell_type": "markdown",
"id": "0705a58a",
"metadata": {},
"source": [
"## Bubble points"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "bb548624",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"CPU times: user 11min 21s, sys: 122 ms, total: 11min 21s\n",
"Wall time: 11.3 s\n"
]
},
{
"data": {
"text/plain": [
"(array([ 5142.13808145, 5142.20830389, 5142.27852715, ...,\n",
" 3828278.20909898, 3828290.44546341, 3828302.68185363],\n",
" shape=(1000000,)),\n",
" array([[ 40721.23744125],\n",
" [ 40721.73456205],\n",
" [ 40722.23168782],\n",
" ...,\n",
" [10996502.56325178],\n",
" [10996528.1146768 ],\n",
" [10996553.66610128]], shape=(1000000, 1)),\n",
" array([ True, True, True, ..., True, True, True], shape=(1000000,)))"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"n = 1_000_000\n",
"fit_params = [\"k_ij\"]\n",
"parameters = np.array([[\n",
" # Substance 1: m, sigma, epsilon_k, mu\n",
" 1.5, 3.4, 230.0, 2.3, \n",
" # Substance 2: m, sigma, epsilon_k, mu\n",
" 2.3, 3.5, 245.0, 1.4, \n",
" # k_ij\n",
" 0.01 \n",
"]] * n)\n",
"temperature = np.linspace(200.0, 406.0, n)\n",
"molefracs = np.array([0.5] * n)\n",
"pressure = np.array([1e5] * n)\n",
"input = np.stack((temperature, molefracs, pressure), axis=1)\n",
"eos = feos.EquationOfStateAD.PcSaftNonAssoc\n",
"%time feos.bubble_point_pressure_derivatives(eos, fit_params, parameters, input)"
]
},
{
"cell_type": "markdown",
"id": "a88f4a9e",
"metadata": {},
"source": [
"## Bubble points with cross-association"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "ca842395",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"CPU times: user 3min 11s, sys: 15.9 ms, total: 3min 11s\n",
"Wall time: 3.17 s\n"
]
},
{
"data": {
"text/plain": [
"(array([7.34250975e+02, 7.34379943e+02, 7.34508930e+02, ...,\n",
" 2.34916950e+06, 2.34925463e+06, 2.34933975e+06], shape=(100000,)),\n",
" array([[4.64318575e+03],\n",
" [4.64395838e+03],\n",
" [4.64473112e+03],\n",
" ...,\n",
" [7.29257342e+06],\n",
" [7.29279667e+06],\n",
" [7.29301993e+06]], shape=(100000, 1)),\n",
" array([ True, True, True, ..., True, True, True], shape=(100000,)))"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"n = 100_000\n",
"fit_params = [\"k_ij\"]\n",
"parameters = np.array([[\n",
" # Substance 1: m, sigma, epsilon_k, mu, kappa_ab, epsilon_k_ab, na, nb\n",
" 1.5, 3.4, 230.0, 2.3, 0.01, 1200.0, 1.0, 2.0, \n",
" # Substance 2: m, sigma, epsilon_k, mu, kappa_ab, epsilon_k_ab, na, nb\n",
" 2.3, 3.5, 245.0, 1.4, 0.005, 500.0, 1.0, 1.0,\n",
" # k_ij\n",
" 0.01,\n",
"]] * n)\n",
"temperature = np.linspace(200.0, 388.0, n)\n",
"molefracs = np.array([0.5] * n)\n",
"pressure = np.array([1e5] * n)\n",
"input = np.stack((temperature, molefracs, pressure), axis=1)\n",
"eos = feos.EquationOfStateAD.PcSaftFull\n",
"%time feos.bubble_point_pressure_derivatives(eos, fit_params, parameters, input)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "feos",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.13.5"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
| Unknown |
3D | feos-org/feos | docs/recipes/recipes_phase_diagram_pure.ipynb | .ipynb | 100,175 | 128 | {
"cells": [
{
"cell_type": "markdown",
"id": "4c4ed7f1-9e71-4d8c-bc51-9b972bf5a8cc",
"metadata": {},
"source": [
"# Phase diagram of a pure substance"
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "b6b2b5bb-4c0c-49c9-bea2-1d34055a57dd",
"metadata": {},
"outputs": [],
"source": [
"import feos\n",
"import si_units as si\n",
"import matplotlib.pyplot as plt\n",
"import seaborn as sns\n",
"import pandas as pd\n",
"\n",
"sns.set_context('talk')\n",
"sns.set_palette('Dark2')\n",
"sns.set_style('ticks')\n",
"colors = sns.palettes.color_palette('Dark2', 1)"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "a19bbe80-cc85-4911-a169-d9501c962983",
"metadata": {},
"outputs": [],
"source": [
"parameters = feos.Parameters.from_json(\n",
" substances=['methanol'], \n",
" pure_path='../../parameters/pcsaft/gross2002.json'\n",
")\n",
"eos = feos.EquationOfState.pcsaft(parameters)\n",
"phase_diagram = feos.PhaseDiagram.pure(eos, 150.0 * si.KELVIN, 201)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "588d342b-720f-4f59-9046-e468c31608fa",
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABRMAAAJgCAYAAAAZLvTIAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQABAABJREFUeJzs3XdUFFcbBvBnl967IGBXQFHsCmLH3jvGFqPGnsSSZoqJSYypX+w9scWGPWLvvaEiCiqKSJEiiCAddne+P3BHkCIoMJTnd47HnTtzd97ZnWVn37lFJgiCACIiIiIiIiIiIqI3kEsdABEREREREREREZUPTCYSERERERERERFRoTCZSERERERERERERIXCZCIREREREREREREVCpOJREREREREREREVChMJhIREREREREREVGhMJlIREREREREREREhcJkIhERERERERERERUKk4lERERERERERERUKEwmEhERERERERERUaEwmUhERERERERERESFwmQiEZWoK1euwNHREY6OjlKHQvTOTpw4gTFjxqBly5ZwcnKCo6Mj5s+fL3VYpSY8PFz8PIeHh0sdTqkaPXo0HB0dsWTJEqlDISKqML788ks4Ojriyy+/LNI6KvuUSiXWrVuHAQMGoEmTJuL1w/Hjx6UOrdRU1msHfnYrB02pAyCqTO7evYvjx4/DyMgIY8eOlTqcd/LixQts2LABAPD+++/D2NhY4oiIStaRI0fw8ccfAwA0NDRgZmYGuVwOQ0NDiSMrHuvXr0diYiK6dOmC+vXrSx0OEVGlNWfOHOzevfut6rq4uGDHjh3FHBFR0f3888/4999/AQBaWlqwtLQEAGhra0sZVrHYvXs3njx5glatWqF169ZSh0MkCSYTiUrR3bt3sXTpUtjZ2VWIZOLSpUsBAAMHDsw3mainp4datWqVZmhEJeLvv/8GAHTv3h2//vor9PT0JI6oeG3cuBFPnjyBnZ0dk4lERBIKDw8XEy/ZpaSkICUlBQDyXA8ArVq1KtHYSpOVlRVq1aoFKysrqUOhIkpKSsL27dsBAJ999hnGjx8PmUwmcVTFZ8+ePbh69SqmT5/OZCJVWkwmElGJcnFxweHDh6UOg+idBQYGAshKnle0RCIREZUdmzZtyrP822+/hZeXF6pWrYrTp0+XblASmD17NmbPni11GPQWHj16hMzMTADAe++9V6ESiUSUhWMmEhERFUJqaioAQF9fX+JIiIioMvL39wcANGjQQOJIiAqWlpYmPjYwMJAwEiIqKWyZSJXewYMHsXv3bgQEBCAhIQF6enowNzdH7dq10a5dOwwZMgQ6Ojri9qmpqThx4gTOnj2L+/fvIzo6GklJSTA1NYWLiws8PT3RoUOHXPvJPgHJkydPck1IMn36dHz00UcAsgbrVTedV5e9bsmSJVi6dClatWqV6w529vqTJ0/Gpk2b4O3tjdDQUCQmJmLjxo1o3bo1VCoVrly5ghMnTsDPzw9RUVGIi4uDgYEB6tWrh969e2PIkCHQ0tLK8/nVPDw8cqzPHtOVK1cwZswYAMD9+/fzPJaYmBj8888/OHv2LJ48eQIAsLOzQ4cOHTBu3Lg8u/KEh4eL+z1x4gR0dXWxcuVKnDx5EjExMTAyMkLr1q0xffp01KlTJ8/9FuT1uG/fvo01a9bgxo0bSEhIgI2NDbp06YIpU6bk2cV79+7dmDNnDuzs7HDy5ElcvnwZGzduhJ+fH549e4b+/fvjl19+EbdPSkrCli1bcOLECQQHByMlJQUWFhZo1qwZxowZg6ZNm+YZZ0JCAtavX4/Tp08jJCQEGRkZMDExgbm5OZo2bYqePXvCzc0tR520tDRs3rwZR48exaNHj5CSkgIjIyOYm5ujUaNG6Ny5M7p3756jjvp8VZ87eSnovM1ev27duli9ejVOnz6NqKgopKWl5To3Tp8+jV27dsHX1xfPnz+Hnp4eHBwcxHPyXcbbuXLlCjZv3oybN2/i+fPnMDAwgJOTE/r164cBAwZAQ0ND3Db7eaamPi/U8juvX/fll19iz549GDhwIH755Rfs3r0b27dvx8OHDyGXy+Hs7Ixp06ahZcuWAACFQoGtW7diz549ePz4MWQyGZo1a4YZM2bA2dk53/2oVCp4e3tj//798Pf3x4sXL2BoaIgGDRpg0KBB6N27d44WAuq/JWpz5szBnDlzCnWMsbGxRf7c+fr64tixY7h58yYiIyMRGxsLHR0d1K5dG126dMHIkSPz/eGR/TxydnbGmjVrcOTIEUREREBPTw9NmjTB1KlT0bhx43xfn/T0dGzduhWHDx9GUFAQ0tLSYGlpiZYtW+KDDz5gF28iKpMUCgUePHgAoGSSiZmZmdiyZQv27NmD0NBQpKWlwdjYGK1atcLMmTPfacia//77D1u2bMH9+/chl8tRu3ZtDBkyBMOGDSuw3uvfm2oJCQk4evQozp8/j0ePHiE6OhqpqamwtLREs2bNMHr0aDRp0qTA5xYEAbt374aXlxcCAwNzxTVnzpw89w0U/jr7XeLMvo8pU6bg33//xd69exESEgJdXV00bdoUH3/8MZycnABk/T5Zt24dDh48iPDwcOjo6MDNzQ2zZs1C9erV3/wmFaAo103q69/ssv/myet3S35K6zUIDw/Hhg0bcPHiRUREREClUqFq1apo27Ytxo0bB1tb23yPb+nSpTmuoYCs3yX29va59iMIAnbs2IEdO3YgKCgIgiDAwcEBI0aMQP/+/fOMrbjOoenTpxd532pFef+pcmEykSq11we41tfXh0KhQEhICEJCQnDq1Cl06NAhxxfCoUOHxC8RmUwGQ0NDaGpqIiYmBidOnMCJEycwbtw4fPHFFzn2ZWlpibS0NCQlJUEul8Pc3DzH+pJo7ZSeno7Ro0fj5s2b0NTUhIGBQY4kQkRERI6xG/X19aGrq4v4+Hhcu3YN165dg7e3N/7++2/o6uqK25mYmMDMzAzPnz8HAJiZmeX4IjExMSl0jFevXsW0adPw4sULMQYAePjwIR4+fIidO3di+fLlaNGiRb7P8fDhQ3z11Vd49uyZ2P302bNnOHjwIM6ePYvNmzeLFxpv4/jx45gxYwYyMzNhaGgIQRAQGhqKf/75B0eOHMHGjRvzvGhQ27BhAxYsWABBEGBkZJTrS/fu3buYPHkyoqKiAGRN7qGrq4uoqCgcPHgQhw4dwsyZMzFp0qQc9aKiovDee+8hIiICACCXy2FkZITnz58jNjYWgYGBCA4OzpFMTEpKwsiRI3Hv3j0AWeewkZEREhMT8fz5cwQFBeHatWu5konFJTQ0FLNmzRITSJqaOb+G0tLS8Pnnn+PIkSNimaGhIRITE+Hj4wMfHx/s27cPq1evLtJ5prZgwQKsX78eQM5jv3z5Mi5fvoz//vsPy5YtEydV0dDQEJPZsbGxALLO79cT7EWl/oGkqakJHR0dvHjxApcuXcK1a9ewdOlSuLu7Y8qUKTh//jy0tLSgpaWF5ORknD17FteuXcO///6Lhg0b5nre+Ph4TJ8+HdeuXRPL1OfEhQsXcOHCBRw4cACLFi0SE7L6+vqwtLREXFwcVCoVDA0Nc3ze8/O2nztPT0/xsZ6eHvT09JCQkIBbt27h1q1b2LdvHzZu3AgLC4t89x0TE4NBgwYhJCQEOjo6kMvliI+Px+nTp3HhwgWsXLkSbdu2zVUvOjoaEyZMELusa2lpQVdXFxEREdi3bx/279+Pr776CqNHj37j8RMRlaaHDx8iIyMDQPEnE4ODg/HJJ5+IN44MDQ2hoaGB58+f48iRI7h48SJ27NhR5ISiIAj46quvxGttmUwGY2Nj3LlzB35+frhy5cpb3RzcuHGjmMDR0NAQv7MjIiIQERGBAwcO4Kuvvsp1809NqVTi008/xcGDB/OM6+rVq4X6nn/Tdfa7xglkJZEnTJiAS5cuidcDcXFxOHHiBC5duiReg44bNw4BAQHQ0dGBTCZDfHw8Dh06hKtXr2Lnzp05EmJFUdTrJl1dXVhaWiIzMxMJCQkAco7v+TbXbiX5Gvz333/4+uuvxc+WtrY25HI5goODERwcjN27d2Px4sXiNYX6+BISEpCZmQl9ff1cv+HySq4plUpMmzYNJ06cgKamJnR1dZGcnAxfX1/4+voiJCREnOQvu+I4h95230DR33+qZASiSuratWuCg4OD4OTkJKxevVp4/vy5uC4uLk44d+6c8MUXXwhRUVE56h07dkz45ZdfBB8fHyElJUUsj46OFpYsWSI4OzsLDg4OwvHjx3Ptc9euXYKDg4PQqVOnAmMbNWqU4ODgICxevDjfbRYvXiw4ODgIo0aNyrd+kyZNhCZNmgi7du0SUlNTxWNTH2tkZKQwe/Zs4cSJEzmOPykpSdi1a5fQtm1bwcHBQfj5559z7SMsLExwcHAQHBwchLCwsHzjvHz5srjd6yIiIoQWLVoIDg4OQq9evQQfHx9x3bVr14Tu3bsLDg4OQqtWrXK9D9n337JlS2H48OGCn5+fIAiCkJmZKVy4cEFwd3cXHBwchBEjRuQbX2Hibt68uTBq1Cjh4cOH4vMfOHBAaNmypeDg4CAMHjxYUCgUOeqr3+tGjRoJ9evXF7788kshIiJCEARBUCgUQkhIiCAIWeeNm5ub4ODgIEyfPl24ffu2kJGRIQiCIMTGxgoLFy4UGjRoIDg4OAjHjh3LsY+vvvpKPJ8uXrwoxqBQKITw8HBhy5Ytwu+//56jzrJly8TX9MiRI0J6erogCIKgVCqFqKgoYc+ePcI333yT6/VQvxaXL1/O9zUr6LxV12/SpInQvXt34eLFi4JSqRQEQRAePXokbvfpp58KDg4OgoeHh/Dff/8JiYmJgiAIQlpamnD8+HHBw8NDcHBwEKZOnZpvHPnZtGmTGMe3334rPH36VBAEQUhOThbWrVsnvs4zZszIs35hXoOCfPHFF4KDg4PQokULwcXFRdi2bZv4uQwKChIGDhwovp8//PCD0KpVK+HgwYNCRkaGoFKphNu3bwtdunQRHBwchOHDh+d6foVCIb4H/fv3F06ePCn+jUpOThb27Nkjnmvz58/PVb9Tp06Cg4ODsGvXrnyPoTg+d5MmTRIOHDggvv6CIAipqanC0aNHxc/8tGnT8qybfd+9evUSLl26JCiVSkGlUgm3bt0S63fq1Ek8v7K/PkOHDhU/0/v27RPP/9DQUGHSpEmCg4OD4OjoKJw+fTrXvgvzd5mIqKTs3LlT/Bv4+jXRuwgKChJatWolODg4CB999JH4nZyZmSns2bNHvK6dPHlykZ97w4YNYsw//PCD8OzZM0EQBOHFixfCkiVLBEdHR/E68IsvvshVX/29+fq6bdu2CYsXLxZu374t/h1XqVRCaGio8NNPPwmOjo5C/fr1BX9//zzjWrVqlRjXggULhLi4OEEQBCExMVFYuXKl4OjoKF7j5RVXYa+z3yVO9T5atGghtGrVSjh06JB4PXDr1i3xesjT01OYNm2a0KlTJ+HcuXOCUqkUlEqlcPHiRcHV1VVwcHAQZs+eXZi3K5d3uW4q6Pq/sEr6NTh//rzg5OQkNGjQQPjtt9+EsLAwQaVSCSqVSggKChI+/vhjwcHBQWjWrJnw5MmTPGN70zWBeruWLVsKzZs3F3bv3i2eK5GRkeK1h5OTkxAcHJyrfnGcQ2+773d5//P77FLFwmQiVVqrV68WHBwchHHjxhXr865du1ZwcHAQ3n///VzrSjuZ6ODgIJw4caLIx6Dm5+cnXiylpaXlWFccycS5c+eKX3LZEwtqkZGRQrNmzQQHBwdh3rx5+e6/R48e4pdjdidOnBC3iYyMLOxh54q7W7dueT7/hQsXxG0OHjyYY536vVYnCfMzZ84cwcHBQZg1a1a+26xbt05wcHAQ+vXrl6O8Z8+egoODg7B///5CH9eECRMEBwcHYeXKlYWuIwjFl0xs1qxZvu+FOsHv5uYmJl5fFxkZKTRp0kRwcHAQAgICCh1/amqq+GMpv9d648aNYpy3b9/O9xjeNZno4OAg7Nu3L9f6kJAQcb2Dg4Nw7dq1XNtcvHgx33N6z5494ufhxYsXecZw+/ZtwdHRUXB2dhZiY2NzrCtqMrEkPndRUVFCw4YNBUdHx1wX7oLw6j1wdXXNFb8gCMK9e/fEbbLfnBAEQThw4IC47ty5c7nqZmZmisnGPn365FrPZCIRSenHH38UvyOLS0ZGhtCnTx8xWZCXX375RXBwcBAaNGiQ59/8/KSlpYnfu5999lme2/zxxx/i3+WiJBPfZN68eYKDg4Pw1Vdf5VqXnJwsXlvmtV4QXl1jvymZ+K7X2QXFmX0fb7oecHFxER4/fpxrmx07dojr1TeqC+tdr5uKM5lYEq+BUqkUunXrJjg4OAjbtm3LN4bJkycLDg4Owk8//ZRnbIVNJjo4OAiXLl3KtT49PV1svLF8+fICnysvhT2Hirrvd33/mUysHDgBC1Va6nHu4uLioFQqi+15O3bsCCBrXLDifN63Ua9ePXTu3Pmt6zdq1AgWFhZISUnB3bt3izGyrK4v6lmehw8fDisrq1zb2NjYYPjw4QCAAwcO5Ptc48aNy7NbZvv27cVuKoUd1y4vEyZMyPP527RpI45lqO4qk5eJEyfmWZ6eng5vb28AwIcffphvffVYJvfu3RO72gKvzuGYmJg3HMErb1OnOPXv3x82NjZ5rtu5cycAoG/fvqhatWqe29jY2IhjNp47d67Q+71w4QLi4+MBZI1PmpcRI0aI56H6fSkJtra26Nu3b67y6tWro0aNGgCAFi1a5Nm1v1WrVmKXsNfP6V27dgHImjXRyMgoz303bNgQ9erVQ2ZmJq5cufJOx1ESnztra2s4OTlBEATcvHkz3+2GDRuWZzdoR0dHcciB1/et/ow2bdo0zy7QmpqamDZtGoCsmbvf5W8GEVFxU0++Upzjuu7evRuBgYGws7PDN998k+c2nTp1ApDV1VQ9rnVhnD9/XvzeVf9tfd3EiRNzjEteXNRjl1+/fj3XugsXLiApKQkAMHny5Dzrf/DBB+LwHQV51+vsguJUa968+RuvB7p37y5eP2TXrl07AFlDyISEhBQptrJ03VQSr8G1a9fw+PFjmJmZYejQofnue8CAAQCyzud30axZM7i6uuYq19bWFq9J3ua6ozDn0Nvsuyy9/1R2ccxEqrTc3Nygo6ODgIAAjBw5EoMHD4arqyuqVav2xrqxsbHYsmULLly4gMePHyMxMTFX4jA1NRUJCQm5xkYsTc2aNXvjNhkZGdi1axeOHTuGwMBAxMfHIzMzM9d26vH8ikt4eLj4JfX6BCHZubu7Y+3atYiPj0dYWFie74+Li0uedTU1NWFubo7o6Ghx3Ja3kdcXcPZ1N2/exJ07d/Jcr6urm+9kGXfu3EF6ejoAYPz48YWKJSIiQhx7pmPHjrh58yb+/PNPPHr0CF27dkWzZs0KHLekY8eO8Pb2xr///ou4uDj06tULzZo1K7XztKBz8saNGwCykooFXZQkJiYCgDhWZGGo35+qVavmO+aThoYGXF1dsX///nzfz+LQsGHDHGMqZWdhYYGQkBA0atQo3xjNzMxyndNKpRK+vr4AsgYDX7VqVb77V9cryo/CvLzt506lUuHAgQM4cOAA7t27h7i4OPFzkF1Bf3MKmmClSpUqCA8Pz7Vv9Xta0N8bV1dXaGhoQKlU4s6dO7kmyiIikoJKpRLHOi5oAq6i2rZtG4CsicXyG7sw+zWFIAiFfu7s37t5JXmArDF9nZ2dxe//oggLC8OWLVtw5coVhIaGIjk5GSqVKsc20dHRueqpk7K2trb5XvMbGhrC2dkZPj4+BcZQmOvst41TLb/v2uzXA/ldM2S/6VbU6+CydN1UEq+B+pxLSkoSE455Uf8mKso1Z17edN3yenzZves59Db7LkvvP5VdTCZSpVW9enX89NNP+O6773Dz5k2xFYy5uTlat26NPn36wMPDI9eP/ps3b2LixInihCFA1gQGenp6kMlkUCqV4sQkqamppXdAeXhTgujZs2cYO3asOBkBAOjo6OSYUEU9KUNxH8uzZ8/Ex9bW1vlul31dXFxcnhd++c38CkCc4EOhULxNmIWOL/vxZGdqagq5PO9G4E+fPhUfZ29xWJDs78P48eNx7949HDp0CF5eXvDy8oJMJkO9evXQtm1bDB06FLVr185Rv2/fvvDz88O///4rJnQAoEaNGnB3d8fgwYPznNijuBQ0qYb69UhKShJbDRQkLS2t0PtVvz8FvZcAxFaT+b2fxaEw52tRz+mEhARx8PDC/mAoyuuXl7f53KWmpmLSpEk5WkVqaWnB1NRUrKMe1Lygvzlvs+/CnAPqv3+xsbEleg4QERVFcHAwUlJSALx58pXp06fn2bLbxsZGbMEOZCUfAgICAAAeHh75Pl/2ngzq78jC7KOo37tFcezYMcyaNUv83gOyEoDqiTfUk3+oX7Ps4uLiALxKouTnTXEDb77Ofpc41d7lmiH7JHdFvQ6uKNdN+b0G6mvOzMzMQl2DS3HNBJTeOfQ210xA6bz/VHYxmUiVWr9+/dC+fXscPnwYV65cwc2bNxEZGYlDhw7h0KFDaNGiBVatWiXelVUoFJg9ezZevHiB+vXrY+bMmWjevHmOu7ahoaHo2rUrgKLdwS0Jec0mlt3PP/+MwMBAmJqa4vPPP0f79u1zdTfu0KEDoqKiJD+W8qqg9yD7XUU/P78id/XR0tLCwoULMXnyZBw9ehTXr1+Hn58fAgMDERgYiA0bNuDTTz/FuHHjctT7+uuvMWrUKBw+fBg+Pj7iTG4hISHYsmULxowZg6+//rpoB1pI+SVWAYite7///nu89957JbL/iip7y+g1a9agffv2EkaTv5UrV+LKlSvQ1dXFzJkz0a1bN1StWjXHTZsRI0bg+vXr/JtDRPRS9qFm3tQyMSEhIc/kyOvXGOqWWaampgX2ylG3OKpZs6Z4vVvYfZSE58+f48svv0RGRgZcXV0xbdo0uLi45Bh249KlSxg7dmyBz5NfD4GiKOgar7jipJKhvm5q3LgxvLy8JI4mbzyHqKxjMpEqPVNTUwwfPlwcmy80NBQ7duzAmjVr4OPjgyVLlmDOnDkAssZBfPLkCTQ0NLBq1ao879YUx1h06ouTvLr+qam7er6tzMxMHDt2DAAwd+5c9O7dO9c22VtZFrfsLdSio6NztaDLvk5Nqi7j0dHR+V5oq+MrqMVdftTdlYGsLqf5vQZv4uTkBCcnJwBZCe9r165h2bJluHbtGn777Te0adNGXK9Wo0YNTJo0CZMmTYJKpYKfnx/WrFmD48ePY+PGjXB1dc3RUkHd9bMkz0krKys8efLknbuS5EX9/rypu756/du8n1JSt+xTKBQl8voVF3VL2GnTpuV78VvYVrpFZWFhgcjIyALPgfT0dHH4hfJ2DhBRxaXummtkZCSOC5ufTZs2Fek58xqzOrsTJ04AeDU2W2H3of4bWlD3y8Ksf92ZM2eQlJQEExMTrFy5Ms+xDQu6FldfS2bvHVIccRV3nFKr6NdN6vO+LF8zSXkOVfT3n4oHJ2Ahek316tUxe/Zs9OnTBwBw8eJFcV1kZCSArAuR/Jp9X7p0Kd/nVrfKelOLG/UkGer95cXPz6/A53iT7OOU5TeY9/Xr1/NNHmVvYfY2LYjs7e1hamoKoODXTP36v+nOeUm6fPlyvuvU3TXfpmtwo0aNxIkqTp069XbBvUZTUxNubm5YtWoVtLW1IQhCjnM4L3K5HE2aNMHixYtha2sLALnqqM/J/C4qkpKSEBQU9E6xqyezOX369Ds9T17U709UVBSCg4Pz3EapVIrvZ35j75RVWlpaYsxvey6pW2mUZItA9fmT39+c8PDwIg8SX1jqc+BNn2d1V5/ydg4QUcWl7o5cv379YmlRl/05CxpW5PLly/D394dMJsOwYcOK9Pzqv7mRkZEIDQ3Nc5ukpCQxqVlY6u+RWrVq5TtJSkHXleqWnU+ePEF4eHie2yQnJxc5ruKOU2oV/bpJPd5lTEwMbt++XeT6pXnNJMU5VNHffyoeTCZSpZV97Im8qJuQZ79oU8+QGhsbm2frmaioqALv1qq7h2QfbzEv6lZk58+fz3MMjEuXLhU402lhGBoaisemHtQ7O4VCgb/++qvA+mpv0yJNJpOhZ8+eAIDt27fneWctOjoa27dvBwAxuSuFf/75J8+k6uXLl8VuQupjKQp9fX1xVt81a9a88e6ousWUWkHnsLa2ttjCNXvit6A6GhoaYnLz9R8r6nPyyJEjedb9559/3viZehNPT08AWTPpbtmypcBtU1JSirQ/d3d3MXm9dOnSPLfZtm2b2FIhr5a6ZZ369Ttz5gzOnDlT4Lavn0vAq8/0u7YwLYh6H3n9zQGAP//8s8T23atXLwBZ497mNSujQqHA8uXLAQAODg5wcHAosViIiIqiJCZfUSfLIiMjc4ydrRYXFyfO8Dxw4EDUrVu3SM/v7u4OExMTABD/tr5uzZo1RR6LTn0t/vjx4zyvze7evYv9+/cXGJf6uyi/ycrWr1//zmOFv2ucUqvo102tW7cWJwZasGDBG68pX79uKuxvunch5TlU0d9/Kh5MJlKl9cMPP+CTTz7BkSNHcgwam5ycjK1bt2Lv3r0Asma/VWvevDn09fUhCAJmzJgh3qlRKpU4d+4cRo8eXeA+69WrByDrTuzBgwfz3a5nz56Qy+WIj4/HrFmzxDtTaWlp2LNnD6ZPny7+gX9bBgYG4l25X375BZcuXRLH8AsMDMTEiRNx584d6Ovr51nf2NhYbJ25e/fut5rgZPLkyTA2NkZ8fDw++OCDHLP5Xb9+HR988AFevHgBU1NTTJw4scjPX1xiYmIwceJEPHr0CEBW0uHw4cP45JNPAGRd3Hfr1u2tnnvmzJmoUqUKnj9/Dk9PT+zduzdHK4G4uDgcOXIE06ZNw+zZs3PU7dSpE/7880/4+vrmuAgKCQnBp59+itTUVMjlcrRt21ZcN3ToUPz000+4cuVKjkR1dHQ0fvzxR7FVWPbuTMCri4Tz589j8eLFYoxxcXH43//+hxUrVoitF99Wq1atMGjQIABZn8+ff/4ZYWFh4vqMjAz4+vrit99+Q6dOncRB1AtDV1cXH330EQDA29sbc+fOFW8IpKamYuPGjViwYAGArKRTSU5CU1L69euHNm3aQBAETJs2DcuXL8/RTSslJQWXL1/GvHnz0KVLl1z11X+fDh8+/E6znxdEPWPiihUrcPToUfHvRlhYGGbPno1Dhw6JPz6LW/fu3cUZDWfMmIH9+/eLszSGhYXho48+Em/SfPrppyUSAxFRUYWHh4uJjPxadRfVkydPxOc0MjLCp59+iocPHwLIusY5ffo0PD09ERYWhjp16uCrr74q8j50dXUxdepUAMCePXswf/58ceicpKQkLFu2DKtWrSrytYO7u7t4jfzpp5+K33MZGRk4ePAgxo0bV+CEE/r6+vjwww8BAF5eXvjtt9/E1yIpKQmrV6/G0qVL3/m76F3jlFpFv27S1NTEvHnzoKmpievXr2PUqFG4dOmSeF0AZF0bbN26FYMHD851k1t9zXT27Nl37hKfHynPoYr+/lPx4JiJVGmpE0KHDx8GkHVxoampmeMOU/PmzTF58mRx2cjICJ9//jm+//57XLt2DT169IC+vr44lpyZmRkWLFiAKVOm5LnPGjVqwM3NDZcuXcLMmTPxzTffiEnBMWPGiGOI1apVC1OmTMGyZctw6tQpnDp1CkZGRkhNTYVCoUCXLl1Qr149rFix4p1eg6+++gqjR49GdHQ0xo4dC21tbWhpaSE5ORmampqYP38+Fi9enO8MYcOHD8eiRYuwadMmbN++HRYWFpDL5WjcuHGBrRrVbGxssGzZMkydOhUPHjzAe++9JyYv1fs0NjbGsmXLCjWrXkn55ZdfMGPGDPTs2RNGRkZIT08Xk3e2trZYtGhRjtniiqJKlSpYv349pk6disePH+OLL76AXC6HsbExMjIycrz2bdq0yVE3NjYWq1evxurVqyGXy2FkZIS0tDTx7qVMJsMXX3yRozVBYmIiNm3ahE2bNkEmk8HIyAgKhSLHfsaOHSsmfdQGDRqE/fv348qVK1i2bBmWL18OY2Nj8fPy2Wef4fTp07h69epbvQ5q8+bNg4aGBnbs2IENGzZgw4YN0NfXh5aWFhITE3NMWlPUrl6jRo1CWFgY1q9fj+3bt8PLywvGxsZITk4Wk1qtW7fGjz/++E7HIBUNDQ0sWbIEn376KU6dOoVFixZh0aJFMDQ0hFwuR2JiotgdJ6/z1dPTE97e3rh58ybc3Nxgbm4ObW1tAMDJkyeLJcYZM2bg4sWLiI2NxUcffQRNTU3o6emJrSFnzZqF8+fPv/N5lBf16zN+/Hg8ePAAn376KebMmQM9PT3xPJbL5ZgzZ06uZDoRkVTU3ZGB4muZqH5OKysrzJgxA19//TV69+4NY2NjpKWlidc4zs7OWLFihdhCqqjGjBmDgIAA7Nu3Dxs3bsS///4LIyMjJCUlQalUonfv3tDW1saePXsK/Zw1a9bE+PHjsWbNGhw9ehRHjx4Vr38yMzNhb2+PGTNmFHhTaMKECQgICMCRI0fw999/Y926dTni6t+/P2QyGfbu3St+DxZVccQptYp+3eTm5oZFixbh888/x61btzB27FhoaWnBwMAgVw+Y12/CDhw4EOvWrUNISAg6duwIc3NzcQKiLVu2vNUs5a+T+hyq6O8/vTsmE6nSmjp1KpydnXHlyhUEBQUhNjYWKSkpsLCwgJOTE3r37o0BAwbkmqntvffeg62tLdauXYs7d+5AqVTC2toaHTp0wIcffpjjjlZeFi9ejGXLluH06dOIjIzEkydPAOTuWvjxxx+jRo0a2LJlCwIDA6FUKuHk5IShQ4fC09Mz3ybnRdGwYUPs2LEDS5cuxeXLl5GUlAQDAwO0b98e48aNg4uLCxYvXpxv/cmTJ8PQ0BD79u3Do0ePxFmf7ezsCh1Dq1atcPDgQaxbtw5nzpzBkydPIJPJUKdOHXTo0AHjxo174+DgJa1Lly7YunUr1qxZg+vXryMtLQ329vbo1q0bJk+e/M53r+vUqYP9+/djz549OHr0KO7evYuEhARoaWmhRo0aqF+/Ptzd3dG9e/cc9f755x9cuXIF169fR2RkpHjHsEaNGmjevDlGjhyZ607h//73P5w/fx4+Pj4IDw9HbGwsFAoF7Ozs0LhxYwwbNgxubm65YtTQ0MDq1auxdu1aHDhwAOHh4ZDJZHB3d8eECRPg5uZWLGMdamtr46effsLgwYPh5eUFHx8fPH36VPxs1qpVCy1btkT37t3fKsE8Z84cdOrUCVu2bMGNGzcQHx8PAwMDODk5oX///nl+5ssTQ0NDrFy5EmfOnMHevXvh6+uL2NhYCIIAa2tr1K1bF61bt86zW37Lli2xatUqrF+/HgEBAXj27FmO5G1xsLOzw65du7BkyRKcPXsWcXFx0NHRQYsWLTBq1Ci0bds2zy7IxcXa2hq7du3C1q1bcejQIQQFBSE1NRVVq1ZFq1at8MEHHxRbyx8iouKgTvzp6em99URtr1PP0NygQQMMGTIEBgYGWLduHR4+fAhNTU00aNAA/fv3x7Bhw976ZimQdYNGPRHc1q1bERgYCIVCIe7X09NTnOSwKD799FPUrVsXmzdvFp+zevXq6Nq1q5goLIimpiYWLVqEnTt3wsvLCw8fPoRCoUDDhg0xdOhQDB06VGwY8C69Lt41zrKgol83denSBceOHcOWLVtw9uxZhISEIDExUfy8NWrUCB07dkT79u1z1KtZsyY2btyIVatWwc/PD/Hx8WKC7W16a+VH6nOoor//9G5kQkmOGkpEVE5duXIFY8aMAQDcv39f4miIiIiIiseHH36Is2fPYvLkyZg5c6bU4ZQ5giCgY8eOiIqKwq+//ooBAwZIHRIRUZnDlolEREREVO4sWbLkja30v//+e7z33ns5yjp37iz2CsiPn5+f2GXtdWFhYVi+fDkuXLiAuLg4WFhYwN3dHVOmTEG1atWKdhBEElC3ZmrQoIHEkZRN+/btQ1RUFDQ1NXMNMUNERFmYTCQiIiKicsvCwkKclfN1BQ2T4eDgIM7I+br8xmS9efMmxo0bh5SUFJiYmMDBwQFhYWHYtWsXDh8+jPXr18PFxaXoB0FUSqKjo8VhUSpzMnHWrFno1q0bWrVqBXNzcwBZY1Hv3r1bHOKnf//+qFKlipRhEhGVWUwmEhEREVG51b59e/zyyy9FrvfNN9+gdevWhd4+JSUFH330EVJSUjB48GB899130NHRQXp6Or7//nvs3r0bH330EY4cOQJdXd0ix0NUGvz9/QFkTSpob28vcTTSOXv2LA4cOAAgazxKTU3NHOOXt2jR4q1msSYiqizkUgdARERERFTWeXl5ISYmBjVq1MC8efPEbtA6OjqYN28eqlevjqioKOzYsUPiSInyp+7iXL9+/Xxb4FYG33zzDXr37o1atWpBW1sbaWlpMDc3h7u7O+bPn4/169fn23KZiIjYMpGIKE+tW7fmxCtERCQ6fPgwAGDgwIHQ0tLKsU5bWxuDBg3CwoULcejQIYwePVqKEIneaPr06Zg+fbrUYUhuwIABnFiFiOgdMJlYzs2ePRvBwcGoVasW/vzzT6nDISIiIipV9+7dw+zZsxETEwMDAwM4Ojqid+/eqFevXoH1tm3bhn/++QdpaWmwtLREixYt0Ldv3zxbIymVSty5cwcA0LJlyzyfr0WLFgCA27dvQ6lUQkND4x2PjNd5REREVDYxmVjOBQcHi2OfEBEREVU2d+/exd27d8XlkydPYuXKlRgzZgy++OKLfJN6Bw8ezLHs7e2NRYsW4c8//4S7u3uOdU+ePEFmZiYA5Dtjc/Xq1QEAGRkZiIiIyHe7bdu2wcvLq1DHFhQUhLS0tEJtS0RERFRamEwkIiIionKnSpUq+Pjjj9GuXTvY29vD0NAQwcHB2LJlC7Zt24YNGzZAU1MTn3/+eY56rVq1gqurKxo1agRbW1tkZmbi+vXrWLx4MQICAjBlyhRs3boVzs7OYp34+HjxsampaZ7xmJiYiI8TEhLyTSbGxMTwRjARERGVa0wmEhEREVG54+npmavM0dER8+bNg729Pf744w9s2LABI0aMyDFr7eszP+vp6aFTp05wc3PDiBEj4O/vj99//x3r168Xt8nIyBAfvz5eopq2trb4uKDWhFZWVjkSlQVhy0QiIiIqi5hMJCIiIqIKZdy4cdi4cSOePn2KkydPYsyYMW+so6urixkzZuDDDz/ElStXkJCQILY2zJ4ozMzMFGdyzi57wlFXVzff/QwfPhzDhw8v1HEMGjSIrRiJiIiozJFLHQARERERUXHS0NBA48aNAQAhISGFrtesWTMAgEqlQlhYmFievQtz9i7P2SUkJOS5PREREVFFw2QiEREREVU46u7ICoWiyHWArBmc1ezs7MR1oaGhedZVl2tra8PW1rbI8RIRERGVF0wmEhEREVGF8+DBAwCAjY1NoesEBgaKj7PX09TURMOGDQEAPj4+edZVlzdq1CjfGaSJiIiIKgImE4mIiIioQjl9+rSYTHR3dy90vTVr1gAA6tatC2tr6xzrunfvDgDYs2cPMjMzc6zLyMjA7t27AQA9evR467iJiIiIygMmE4mIiIioXHnw4AHmzp2Le/fu5ShXqVTw9vbG7NmzAQCdOnWCi4uLuP7vv//Gpk2b8Pz58xz1nj9/jrlz5+LIkSMAgI8//jjXPj09PWFlZYWQkBB89913SE9PBwCkp6fju+++Q2hoKKpUqYKhQ4cW67ESERERlTWczZmIiIiIyhWFQoHt27dj+/btMDU1ha2tLTQ0NBAaGipOhNKiRQv89ttvOepFRUVh48aNmD9/Puzs7GBubo60tDQ8evQICoUCcrkcs2bNElshZqevr49FixZhwoQJ2LVrF44fPw57e3uEh4cjISEB+vr6WLJkCfT09ErlNSAiIiKSCpOJRERERFSu2NnZYcaMGfD19UVQUBBCQkKQkZEBExMTtG/fHn369EGfPn1yjV3Yu3dvAICfnx8iIiJw7949aGhowN7eHq1atcKIESNQv379fPfbvHlz7Nu3D8uXL8eFCxcQGBgIMzMzDBo0CFOnTkW1atVK9LiJiIiIygImE4mIiIioXDE2NsaUKVOKXK9JkyZo0qTJO+27evXq+OWXX97pOYiIiIjKM46ZSERERERERERERIXCZCIREREREREREREVCpOJREREREREREREVChMJhIREREREREREVGhMJlIREREREREREREhcJkIhERERERERERERUKk4lERERERERERERUKJpSB0AVU6oiA+lKBUx19KUOhYiIiIiISphCpcTz9BQEv4iFtZ4xqugbQU9TW+qwiIioBDCZSMXuQfxT9PVehnSlAp807oyPG3eCXMZGsERERERE5YUgCHiRkYZnaUl4lpaM2LQkPEvN+j/742dpSYhNTcbz9BQIEHI8h66GFix0DWChawCzl/9b6BrAXMcA5urH2f430dbl7wYionKAyUQqdpejHiEpMx0A8MfNY7gZE4ZF7YexlSIRERERkYQUKiWiUxKzEoKpLxOBacl4lpr0Mkn46vGztGRkqpTvtL80ZSaeJMfjSXJ8obbXkMlhpqMPc139l0lGQ5jr6OdKPFroGsBMxwDW+kZMPhIRSYDJRCp2fWq5YLX/eQS/iAUAnAi/h977l2JN51FoYG4rcXRERERERJVDVMoL3IwJxY2nYbgREwq/Z+FIVWRKHVa+lIJKbPlYGPqa2qhvZgNnC1s0MK8KZ3NbOJlZs3s1EVEJYzKRip2Zjj68+0zDjHNeOBZ2FwAQkhiHvt7L8WubQRhSt5nEERIRERERVSxpikzceRaBGzGh4r+I5IR3fl65TPayNaAhLHUNYaFnAMvXHlvoGsLy5WN9TW0oBBXi0pIRl5aCuJetHOPSU/AsLellWXLW4/SUl9slQyGoihxbiiID12NCcT0mNEe8dYyt4GxhC+eXCUZni6qw0DV859eCiIiyMJlIJcJERw9/e4zGUr/T+P3GMQgQkK5UYMY5L9yMCcV3rfpAW4OnHxERERHR2xAEAY9exOJE2D2cCL+Hq9GPC90t2URb92UC0BAWui8TgnqGL5OEBrDIVm6qo1fkrsRaMg1Y6xvDWt+40MeSNT5jVmIxLj351eO0l49fK0tWZOT5XCpBwIOEp3iQ8BR7H/mK5XYGpmhtUwutrWvB1aYWahtbQiaTFem4iIgoC7M5VGLkMjk+btwZjS3tMe3MNsSnpwAANty7jNvPIrCy00jYGphIHCURERERUfmQrlTgSlQwToTfw/GwewhJfPbGOrYGJmhmVV3852xRtcx1A5bJZDDR0YOJjh5qm1gWqk5yZjruPY9GQFwE/OMi4R8XgbtxUUhT5t2N+0lyPHYH3cTuoJsAAEtdQ7SyronWNrXgal0LTmY20JBz/EUiosJgMpFKXAc7Bxzu9xEmndqMW7HhAIAbMaHo+d9iLO84Au5V60gcIRERERFR2ZSpUuLMk0DsCfLF8bC7+bbIA7JmT25saYemLxOHTa2qoWoFvXlvoKWD5lWqo3mV6mKZUqVC8ItYMbno/ywCd+Ii8CwtOVf92LQkHAy5g4MhdwAAxtq6cLOpjU52juhk7wg7Q9PSOhQionKHyUQqFfaGZtjVcxK+vfIftgZeAwA8S0vGe0fWYk7zHpjcsD27GRARERERIavb742YUOwO8sX+YD/EpedOhqnVN7OBRzUneNg7oYlVNWjJNUox0rJFQy5HXdMqqGtaBf1rNwaQ9VpGpbyAz9MQXI4KxpXoYNx7HpWr7ouMNBwJDcCR0AAAQD2TKuhk74COdo5obVMLOhyiiYhIxL+IVGp0NbXwu/tgNLWqhm8v/4d0pQIqQcB8n0O4EROK/7UdCiNtXanDJCIiIiKSRGxqErYGXsO2Bz75dmHW1dBCO9u68LB3Qmd7R9iyBV2BZDIZqhqYoG8tF/St5QIAeJ6eAp/ox7gc/RhXooJx+9kTKF+bAEY97uJq//PQ09RC26p10aOGM7pVqw8zXQMpDoWIqMxgMpFK3QiHVmhobouJp/5FeFI8AOBQiD8C459iTedRcDC1ljZAIiIiIqJSIggCrj8NxYZ7l3Dg8W1k5DGJio6GJrpUq4+BtZugg50D9DS1JIi04jDT0UfX6g3QtXoDAFnjL16JfozT4fdx+kkgHr2IzbF9qiITx8Lu4ljYXWjI5HC1qYWeNRqie/UGFbYbORFRQZhMJEm4WNrjUN+PMP3MNpyJeAAACEqIQZ/9y/Bn2yHiXUMiIiIiooooU6XEvke+WON/Hv5xkbnWyyCDm00tDKrTFL1qNoIxe/CUGAMtHXS2d0Rne0cAQEjiM5wOD8SpJ/dxITIIqYpXk7ooBRUuRAbhQmQQvrm8D02tqqFvzaxWj0wsElFlwWQiScZM1wAbu36A//kex6JbJwEAKYoMTDm9BTdjQvFVi57QrMRjvhARERFRxZOqyMDWwGtYdeccniTH51pvrW+MkQ6tMLxeC3ZhlkgNIwu8X98N79d3E2fQPhLqj8Mh/ohOTcyx7c2YMNyMCcOP1w7CzaYWBtRpgl41GsJUR1+i6ImISh6TiSQpDbkcnzXrhiaW9vjknBdeZKQBAFa/vEO7vON7sNA1lDhKIiIiIqJ3k5SZjnUBF7E24Hyeswu72dTG+/Xd0L16g0o9iUpZo6OhifZ29dDerh5+dO2HmzFhOBjij8MhdxCSGCduJ0DAxahHuBj1CF9f2ofO9o4YVKcpularD21O3kJEFQz/qlGZ0LV6AxzoOx0fnvxXnF3tQmQQev23FGs7j0YjSzuJIyQiIiIiKro0RSb+vX8FS/xO5UoiasrkGFy3KSY6t4ejGccNL+vkMjmaV6mB5lVq4JsWPXH3eRT2B/th36NbCE16lVjMVCnFmaHNdPQxqE5TeNZrjgbmthJGT0RUfJhMpDKjlrEl/us9FZ9d3IV9j24BAJ4kx2PAwRX4zX0wBtdpKnGERERERESFo1SpsDPoBv68eQwRyQk51ulpamGEQytMcm7HrszllEwmQwPzqmhgXhWfN+uGGzFh2PvIF/uD/RCbliRu9zw9BX8HXMDfARfQyMIOnvVaYFCdphwDk4jKNSYTqUzR19LG0vbD4WJhh/k+h6ASBKQrFfjk7Hbcjg3HNy17cRxFIiIiIirTLkc9wndX9ueaWEVPUwvjG7hjonM7mOsaSBQdFTeZTIbmVaqjeZXq+K5Vb1yIDMLOhzdwMOQO0pUKcbvbz57g9rMnmO9zEP1rNcYYJ1e4WNpLGDkR0dthMpHKHJlMhkkN26OBeVVMPb0Vz9NTAABrAy7APy4SKzuN4DiKRERERFTmhCc9x0/XDsL78e0c5dpyDYxybI2PGneClZ6RRNFRadCUa6CDnQM62DlgfkYa/nt0C9sf+uBmTJi4TaoiE9se+GDbAx80trTHaMfW6F+7CfQ0tSSMnIio8JhMpDKrnW09HOw7HRNObhLv6l6KeoSe/y3B2s6jeRePiIiIiMoEhUqJtf4X8KfvMaQqMnOsG1SnKT5v1g32hmYSRUdSMdbWxSin1hjl1BqB8dHYHugDr4fXxcYSAHArNhy3YsPxs89hjKnvivedXJlwJqIyTy51AEQFqWZkjr29p2BA7SZiWURyAgYeXImdD29IFxgREREREYCbMWHotX8pfvI5mCOR2MSyGvb1norF7T2ZSCQ4mFrj21a9cW3YHCxq74kWVWrkWB+XnoyFvifQ2usXzD6/U5yUkoioLGLLRCrz9DS1saS9J1ws7PCTz0FxHMUZ57zgFxuOb1v1hhbHUSQiIiKiUpSmyMQfN49htf85qARBLLfQNcC3LXthUJ2mkMvYdoNy0tXUwuA6TTG4TlMExEVi073L2PHwBtKUWYnoDJUS2x/4YPsDH3Swc8BE57Zob1sPMplM4siJiF5hMpHKBZlMhokN28HZvComn94idg345+5FBDyPxMqOI2Gpx3EUiYiIiKjk3YoNx8xzXgiMf5qj/D2HlviqRU+Y6ehLFBmVJw3Mq2JBm4H4vFk3bA68inUBFxGdmiiuP/MkEGeeBMLR1BofOrfFgNpNoMtxFYmoDOCtMipX3G3r4lC/j9DQ3FYsuxwVjF77l+BWbLiEkRERERFRRadUqbDk1in0816eI5FYy9gSO3tOxO/ug5lIpCIz0zXAdJdOuDT0CyxsNwzO5lVzrL8fH41PL+yC+87fsNb/PFIVGRJFSkSUhclEKnfsDc2wp/dkDHxtHMVBB1dix4Pr0gVGRERERBVWZHIC3juyFr/eOAKloBLLJzRwx9H+H8PVpraE0VFFoK2hiSF1m+Fwv4+xvceH6FLNKcf66NREfH/VG247fsPK22eRnJkuUaREVNmxmzOVS3qa2ljc3hONLe3x47WDUAoqpCsVmHl+B/yePcFcjqNIRERERMXkzJNAfHRmO+LSk8Uye0NT/NVuGNyYRKRiJpPJ4F61Dtyr1kFQQgzW+J/HjofXka5UAABi05Lwk89BLL99BhMbtsPY+m4w1NKROGoiqkzYMpHKLZlMhgnObbGl+3iY6xiI5evuXsTww2sRk228ESIiIiKiolIJKiz0PYFRR9flSCT2qdkIR/p9wkQilbg6Jlb4pc1AXBzyOSY6t4WuxqsxE+PSk/HL9cNw3fErFvmewIuMNAkjJaLKhMlEKvfcq9bBwX7T0cjCTiy7Eh2MXv8thR/HUSQiIiKit5CUmY5xJzbij5vHICBrtmYdDU385j4IKzqOgImOnsQRUmVirW+Mua364PLQLzClYQfoa2qL6+LTU/D7zWNw3fELltw6hZRMjqlIRCWLyUSqEOwNzbC712QMrtNULItMScDAgyux95GvdIERERERUbnzJCkeAw6swPGwe2JZDSML7O8zFSMcWkEmk0kYHVVmlnqG+LplT1we+gU+cumUo3vzi4w0/HrjCNru+h3/3ruCTJVSwkiJqCJjMpEqDD1NLSxsNwzzWveFhizr1E5XKjD9zDb87HMISpXqDc9ARERERJWdb0wY+ngvxb3nUWJZt2r1cbDvdDQwt5UwMqJXzHUN8EXz7rg09AvMaOIBo2xJxaepifjy0h503vMXvIP9IAiChJESUUXEZCJVKDKZDOMbuGNzt3Ew1dEXy5ffPoMPTmzgOCJERERElK8Dj29jyKHViElNEsumNuqAtR6j2a2ZyiQzHX182rQrLg79ApMbtoeOxqs5VoNfxGLy6S3o470M5yMeShglEVU0TCZShdTWti4O9J0GR1Nrsexk+H309V6GoIQYCSMjIiKi4rBkyRI4OjoW+G/r1q151s3MzMTatWvRr18/NGnSBC1btsTo0aNx9OjRN+43ICAAM2bMgLu7Oxo1agQPDw/89NNPiIuLK+5DpFK24vYZTDq1GWnKTACApkyO390H46sWPSGX8WcTlW1mOvr4pmUvnBv8GYbXawF5tq74t2LDMfzIWow88neOFrdERG+L34pUYdUwssC+PlPRvXoDsSwoIQZ9vZfhZPh9CSMjIiKi4mJhYYFmzZrl+c/KyirX9unp6Xj//ffx+++/4+HDh6hevTpMTU1x9epVfPTRR/jjjz/y3dfRo0cxbNgwHDp0CIIgoF69eoiLi8OmTZvQr18/hIWFleShUgkRBAELfA5jvs8hscxEWxebu43Dew4tJYyMqOhsDUzwR9shODFgJnpUd86x7kzEA3TbtwhfX9qH52nJ+TwDEdGbab55E6Lyy1BLB2s6j8Jfvifwl+8JAFkDE79/bD2+atEDkxu25wDaRERE5Vj79u3xyy+/FHr733//HdevX4e9vT3WrFmD2rVrAwBOnDiBGTNmYM2aNWjWrBk6d+6co150dDQ+//xzZGZmYurUqZg2bRo0NTWRmJiImTNn4ty5c5gxYwZ27tzJa4tyRCWoMPfKfqy/e0ksq2Fkjg1dxqKuaRUJIyN6N/VMq2Ctx2hcfxqKBdcP4XJUMABAJQjYcO8S9j7yxeymXTDayRVacg2JoyWi8oYtE6nCk8vkmN20K1Z1Ggl9TW0AgAAB830O4eOz25GqyJQ4QiIiIioNsbGx2LZtGwBg/vz5YiIRADw8PDBhwgQAwNKlS3PVXbt2LVJTU9GyZUt88skn0NTMuidvZGSEP//8E0ZGRrhz5w5OnTpVCkdCxUGpUuHT87tyJBKdzGywt/cUJhKpwmhepTp29JiIdR5jUNPIQixPyEjF3Cv70X3fIpx98kDCCImoPGIykSqN3jUbYW/vKahmaCaW7XnkiyGHViEiOUHCyIiIiKg0nDx5EpmZmahZsyZcXV1zrR8+fDgAwN/fH6GhoTnWHTlyBAAwbNiwXPVMTEzQo0cPAMChQ4dyraeyJ1OlxPQz2+D18LpY1tjSHjt6ToSVnpGEkREVP5lMhq7VG+DkwJn4pkWvHDM/B8Y/xYijf+OD4xvw+MUzCaMkovKEyUSqVBqYV8WBvtPhZvOqJcKt2HD03r8EPtEhEkZGREREb+PevXuYPXs2xowZgylTpmDhwoV48CDvVja+vr4AgObNm+e53traGvb29jm2BYDIyEhER0cDAFq2zHsMvRYtWgAAbt269TaHQaVIqVLhk7Pbsf+xn1jW2roWtnWfADMdfQkjIypZ2hqamNyoPc4O/hTvObSEDK+GZDgWdhcee//CIt8TSFcqJIySiMoDjplIlY65rgG2dB+PeVe9xW4tMalJGHZ4NX52G4DhHGibiIio3Lh79y7u3r0rLp88eRIrV67EmDFj8MUXX0BD49VYYI8fPwYAVK9ePd/nq169OsLDwxEcHJyrnpaWFmxsbPKsV61aNQBAWFgYMjMzoaWlled227Ztg5eXV6GOLSgoqFDbUeGpBBU+v7gb/wW/SiR2sHPA2s6joPdyOByiis5Kzwi/uw/GGEdXfHd1P65GPwYApCsV+P3mMex+5Iuf3QbAvWodaQMlojKLycQSdubMGaxevRoBAQGQyWSoWbMmPvvsM7i5uUkdWqWmJdfAT679Ud+sKr65vA+ZKiUyVEp8emEX/OMiMbdVbw5ETEREVIZVqVIFH3/8Mdq1awd7e3sYGhoiODgYW7ZswbZt27BhwwZoamri888/F+skJGQNa2JiYpLv86rXvXjxQiyLj48X1+U3uYqpqSkAQKVSISkpCWZmZnluFxMTA39//0IfJxUfQRDw/RVvbH/gI5Z1sK2HfzzGQEeDP4uo8mlkaYddPSfhv2A/zLvqjaepiQCAoIQYeB5eg8F1muLblr1hqWcocaREVNbwW7MEbdu2DT/++CNGjhyJqVOnQqVS4e7du0hLS5M6NHpppGMr1DOtgokn/0VsWhIAYN3diwiMj8bKjiNgpmsgcYRERESUF09Pz1xljo6OmDdvHuzt7fHHH39gw4YNGDFihNh1OT09HQDybTUIANraWa3Tsl+vFaVe9u3zYmVlBWdn53zXZxcUFMTrxmL0x81j+OfuRXG5lXVNrPUYzUQiVWoymQz9azdGJ3tH/H7jKNbfvQQBAgBgV9BNHA+7iznNe2KEY0vIZRwljYiy8JuzhISHh+Pnn3/GZ599hrFjx4rl7dq1ky4oylMr65o42Hc6xp/chNvPngAALkQGoff+ZfjbYwzqm+fdnYmIiIjKpnHjxmHjxo14+vQpTp48iTFjxgAAdHSyJh3IzMzMt25GRgYAQFdXVywrSr3s2+dl+PDh4kQvbzJo0CC2YiwmfwdcwKJbJ8VlFws7rO8yll2biV4y1tbFj679MKRuM8y5uAd+L38XJWSk4ctLe7Dn0U387j4EtU0sJY6UiMoC3looIbt27YJcLsd7770ndShUCLaGptjdaxL6124sloUmxaH/geU4FHJHwsiIiIioqDQ0NNC4cdZ3ekjIqwnWjI2NAbzq7pwX9Tr1tsCrrs8JCQkQBCHPeuqu0HK5HIaG7BJYlhwNDcD3V7zFZUdTa2zuNg7G2roF1CKqnBpb2mN/n2n4ybV/jlmfr0Q/Rtd9C7HqzlkoVSoJIySisqDSJBNjYmKwd+9e/PTTT/D09ISLiwscHR0xevToQtW/fPkyJk2aBFdXV7i4uKBHjx5YuHAhUlJS8tz++vXrqF27Ng4cOIAuXbqgQYMG6Nq1KzZv3lych0XFSE9TG0vbD8ec5j3Emc1SFBn48OS/WHzrZL4/HoiIiKjsUXdJVihezUpas2ZNADkTjK8LDQ3NsW32x5mZmYiMjMyzXlhYGADA3t6+wO7QVLr8YsMx7cxWsdumrYEJNncfz6FsiAqgIZdjbH03nB40G71rNhLL05UK/HjtIPofWIH7z6MljJCIpFZpkokHDhzAF198gU2bNsHX17fAsWxet2nTJowdOxanT5+Gjo4O6tSpgydPnmDFihUYMmSIeCc6u6dPn+Lx48f47bffMHHiRPz9999o06YNfvjhB2zYsKEYj4yKk0wmwzSXjljf5f0cd+J+u3EU089uQ6oi/+5NREREVHY8ePAAAHLMvtykSRMAwI0bN/KsEx0djfDw8BzbAoCtrS2qVKkCAPDx8cmrqlievR5JKyIpHh8c3yBevxlq6WBDlw9go2/8hppEBADW+sZY1WkkVnUaCatsk7D4xoah53+Lscj3BDJVSgkjJCKpVJpkoqGhIdq0aYNJkyZh6dKlmDp1aqHq3blzBz///DMA4IcffsDp06exZ88eHD9+HM7OzggKCsK3336bq54gCEhOTsYPP/yAYcOGwc3NDfPmzUO7du2wevVqtnIr4zyqOWF/n2moaWQhlu17dAtDDq1CVMqLAmoSERGR1E6fPi0mE93d3cVyDw8PaGlp4fHjx7h8+XKuetu2bQMANGjQADVq1Mixrnv37gAALy+vXPUSEhJw+PBhAECPHj2K5yDonaRkZuD94+sR/XJ2Wg2ZHKs6jeRY2ERvoXfNRjg5cBaG1GkmlmWolPj95jH03b8MgfFspUhU2VSaZOKQIUOwbt06zJo1C127doWFhcWbKwFYvnw5VCoV+vfvD09PT8hkWd1fra2t8b///Q9yuRxHjx7FvXv3ctQzNTUFALRp0yZHedu2bREbG4unT5+++0FRiaprWgX7+05D26p1xbJbseHovX8pbsWGSxgZERFR5fbgwQPMnTs31/WXSqWCt7c3Zs+eDQDo1KkTXFxcxPWWlpbiLNBff/01Hj16JK47efIk1q5dCwCYNm1arn2OHz8eurq6uHbtGhYtWgSlMqs1TmJiImbPno3ExEQ0aNAAnTt3Lt6DpSITBAGfXdyFu8+jxLKf3Qagg52DhFERlW9mOvpY2H4YNnQZi6r6JmL5nbgI9PxvCdb6n4dK4FiKRJUFZ3MuQHJyMs6dOwcAGDZsWK71NWvWhKurKy5evIjDhw/DyclJXFe3bl34+vrm+9xyeaXJ45ZrZjr62NTtA3x/ZT823MtqwRCd8gKDDq7EX22Hol+2CVuIiIiodCgUCmzfvh3bt2+HqakpbG1toaGhgdDQUHEClRYtWuC3337LVfezzz6Dv78/bt68iT59+qBevXpISUkRx0ocN24cunTpkqte1apV8euvv2L27NlYvnw5tm/fDhsbGwQHByMlJQWWlpZYuHCheOOZpPN3wAXse3RLXJ7k3A4jHVtJGBFRxeFRzQknB87Ej9cOYkvgVQBZYyl+f9Ubx8Lu4n9th8LO0FTaIImoxDGZWIC7d+8iIyMD2traOe5qZ9e8eXNcvHgRt27dylHetWtX7Ny5E+fPn8/R3eXcuXOwsbGBlZVVvvvdtm1bnl1o8hIUFFSo7ejtack1MN9tABxMrTH3yn4oBRXSlQpMPbMVgQlPMauJB+QyJoeJiIhKi52dHWbMmAFfX18EBQUhJCQEGRkZMDExQfv27dGnTx/06dMHGhoauerq6upi48aNWL9+Pfbv34/Hjx9DS0sLrVq1wqhRo8TuzHnp0aMHqlWrhlWrVsHHxweBgYGoUqUKBg0ahKlTpxa65wuVnMtRj/DjtYPichub2pjTgl3PiYqTkbYufnMfhO7VG+DTCzsRk5oEALgQGYSu+xbiR9f+GFS7CW+uEFVgTCYWIDg4GEDWoNv5zcpXvXr1HNuqdejQAa1bt8Z3332H58+fo1q1ajh8+DDOnz+PBQsWFLjfmJgY+Pv7F8MRUHF6v74b6ppYYeKpzUjISAUALPQ9gcDn0VjYbhj0tbQljpCIiKhyMDY2xpQpU966vra2NiZOnIiJEycWua6zszMWL1781vumkhOV8gJTTm+B8mVXy6r6JljecQQ05bmTykT07jyqOeHEgJn48uIeHAy5AwB4kZGGT85ux/HQu/ilzUCY6OhJHCURlQQmEwug7iZjYmKS7zbqdept1WQyGZYvX44///wTS5YswYsXL1CrVi388ccf6Nu3b4H7tbKygrOzc6FiDAoKQlpaWqG2pXfnblsX3n2n4YPjG/AwIQYAcDDkDkIT4/CPxxjYskk/ERERUalTCSrMOOsltpDSlmtgdedRsMw2Ay0RFT9zXQOs6jQSu4Ju4tvL+5CYmQ4A2P/YDzdjQ7Gsw3toXqXGG56FiMobJhMLkJ6e9Ycwv1aJQNad7ezbZmdoaIjvvvsO3333XZH2O3z4cAwfPrxQ2w4aNIitGEtZLWNL7Os9FVPPbMWZJ4EAsgYe7u29FH97jEEzq+oSR0hERERUuay8cw7nIx+Ky/Na90VTq2oSRkRUechkMgyp2wyuNrUw89wOXIrKmtwqPCkegw6uwmfNumJqow4cGoqoAuGnuQA6OjoAgMzMzHy3ycjIyLEtVQ4mOnrY0OV9TGjgLpbFpCZh6KHV2BV0U8LIiIiIiCqXW7Hh+O36EXG5Zw1njHJsLWFERJWTvaEZtnWfgC+adYfGy8ShUlDhl+tHMOLIP4hOeSFxhERUXJhMLEB+XZizK0xXaKqYNOUa+L51X/zmPghaL8fiSVcq8MnZ7Vjgcxiql+P1EBEREVHJSM5Mx7TTW6HINk7ib+6DOfEDkUQ05HJ81LgTdvWcBDsDU7H8fORDdN27CKfC70sXHBEVGyYTC1CzZk0AQERERL6tE0NDQ3NsS5XPCIdW2Np9PMx09MWyZbdP48OT/yIpM3f3dyIiIiIqHvN9DuFx4jMAgAwyLO7gmeOajIik0cK6Bo70/xi9ajQUy+LSkzHm2Hr8cfMYlCo2vCAqz5hMLED9+vWhpaWFjIwM+Pn55bnN9evXAQBNmjQpxciorHG1qY0DfafD0dRaLDsSGoABB1YgPOm5hJERERERVUwXIh5i473L4vJ0l45ws6ktYURElJ2pjj5WdRqJX9oMhI5G1nQNAgQs9D2BUcf+wbO0JIkjJKK3xWRiAQwNDdG2bVsAgJeXV671jx8/xuXLWRcwPXr0KNXYqOypbmSOvb2noEs1J7Hs3vMo9N6/FFejH0sXGBEREVEFk5yZjk8v7BKXncxsMLOJh4QREVFeZDIZRjm2hnef6ahlbCmWn4t4iO77FsMnOkTC6IjobTGZ+AZTp06FTCbDvn37sH37dgiCAAB4+vQpZs2aBZVKhS5dusDJyekNz0SVgZG2Lv7uPAZTGnYQy56lJcPz8Bp4PfCRMDIiIiKiiuNnn8MIe9n7Q0Mmx19th0L7ZcsnIip76pvb4GDf6ehds5FYFpXyAkMOrcJa//Pi72wiKh9kQiX51EZGRmLAgAHickZGBlJSUqCpqQlDQ0OxfMKECfjwww9z1F2/fj1++eUXCIKAqlWrwszMDA8fPkRGRgZq1aqFLVu2wNzcvLQOJYdBgwbB398fzs7O2L17tyQxUN52PryOzy/sRoZKKZZNcm6Hr1r0hIaceXwiIiIqGK/z8nYt+jEGHlwpLn/SuDM+a9ZNwoiIqLAEQcDfARfw07WD4sRJADCoTlP82mYQ9DS1JIyOiAqr0mQ0lEol4uPjxX8pKSkAAIVCkaM8LS0tV92xY8di3bp1aN++PVJTU/Hw4UPY2tpi8uTJ2LVrl2SJRCrbhtRtDq8eE2Gp+ypZvcr/HD44sQGJGbnPMyIiIiIqmEKlxJxLe8VlR1NrfNy4s3QBEVGRyGQyTHBui129JsFG31gs3x10E0MOrUJEcoKE0RFRYVWalokVFe9Yl31PkuLxwYkNCIiLFMscTa2xvsv7qGbERDQRERHljdd5ua3xP4d5Vw+Iy3t7TUEL6xoSRkREb+tZWhImndqMy1HBYpmVniHWdBrNzzVRGVdpWiYSScXO0BR7ek1Gj+rOYtn9+Gj09V6O60854DARERFRYUQmJ+CPG8fE5eH1WjDhQFSOWegaYmv3CXjfyU0si0lNwtDDq7E18JqEkRHRmzCZSFQKDLR0sLrzSEx36SiWxaYlYdjhNdgT5CtZXERERETlxY/XDiBZkQEAMNPRx9ctekocERG9Ky25Bua79cevbQZBS64BAMhUKfHZhV345vI+ZGYbf56Iyg4mE4lKiVwmx5fNe+CvtkPFL8p0pQIfnd2GP24e4wxmRERERPnwiQ7Bf8F+4vJXLXrCTNdAwoiIqDiNdGwFrx4f5hhvfv3dSxhx5G/EpSVLGBkR5YXJRKJSNrRec2zrPgFmOvpi2ULfE5h2ZitSFZkSRkZERERU9qgEFb6/6i0uN7a0h2e95hJGREQloaV1TRzsOx0uFnZi2aWoR+izfxkexj+VMDIieh2TiUQSaG1TC/v7TENdEyux7L9gPww9tBpPUxIljIyIiIiobNn3yA++sWHi8tyWvSGX8WcMUUVka2iKXb0mY0DtJmJZaFIc+h9YjktRj6QLjIhy4LcwkURqGltgX++paG9bTyzzjQ1DH++lCIiLkDAyIiIiorIhVZGJBdcPicu9ajREa5taEkZERCVNT1MLS9p74qts46ImZKRhxJG/sSvopoSREZEak4lEEjLR0cPGrmMxxslVLItITsCAAytxLDRAwsiIiIiIpLfp3mVEJCcAALTlGjmSC0RUcclkMkxt1AErO42EjoYmgKyJWT45ux0LfU9wvHkiiTGZSCQxTbkG5rv2xw+t+0IukwEAUhQZGHdiE1bfOccvSiIiIqqUEjPSsMTvlLg82skVNY0tJIyIiEpbn5qN4NXjQ5jrvJpw6Y+bxzD7/E5kKBUSRkZUuTGZSFQGyGQyjGvgjvVdxsJQSwcAIEDAD9cO4POLu/lFSURERJXO2oDzeJ6eAgDQ19TGRy6dJI6IiKTQvEoN/NdnKmobW4plXg+vY8yx9UhIT5UwMqLKi8lEojKks70j9vaegmqGZmLZ1sBrGHX0H/FimoiIiKiie56WjFV3zonLHzq3haWeoYQREZGUssabn4LW1jXFsvORDzHo4EpxKAQiKj1MJhKVMU5mNtjfZxpaVKkhll2MeoR+3svxKCFGwsiIiIiISsfagAtIykwHAJho62GiczuJIyIiqZnpGmBL9wk5Znq+Hx+NgQdWIIi/k4hKFZOJRGWQpZ4htnWfgIHZviiDX8Sir/dyXIgMki4wIiIiohKWmJGG9XcvisuTGraDiY6ehBERUVmho6GJJe098XG2YQ+eJMdj4IGV8IsNlzAyosqFyUSiMkpXUwuL23vis6ZdxbKEjFSMPPI3tgRelTAyIiIiopKz6f4VJGSkAQCMtHTwvpObxBERUVkik8nwefPu+LF1P8iQNYFlXHoyhh5ajQsRDyWOjqhyYDKRqAyTyWT4pIkHVnQcAR0NTQCAQlDh8wu78ePVA1CqVBJHSERERFR80hSZWOP/aqzE9+u7sVUiEeXpgwZtsKSDJzRlWWmNZEUGRh9bh4OP70gcGVHFx2QiUTnQt5YLdvachCp6RmLZKv9zmHByE5JfjidEREREVN55PbyOmNQkAFndGSc0aCtxRERUlg2o3QTrurwPXQ0tAECGSonJpzdj83325CIqSUwmEpUTTa2qwbvPNDQwryqWHQu7i4EHV+JJUrx0gREREREVA4VKiRW3z4jL7zm05AzORPRGnewdsb3HBJhoZ7ViVgkCvri4G0v9TksbGFEFxmQiUTlia2iKPb0mo1u1+mJZQFwk+ngvxY2YUAkjIyIiIno3Bx7fQVjScwCApkyOyQ3bSxwREZUXzavUwK5ek2CtbyyW/XL9MH6/cRSCIEgYGVHFxGQiUTljoKWDNZ1HY1K2C+yY1CQMPbQa3o9vSxgZERER0dv7J+CC+Lh/7cawNzSTMBoiKm+czGywt9dk1DSyEMsW3TqJn30OM6FIVMyYTCQqhzTkcnzbshd+dx8sDjicrlRg8qnNWOp3ml+WREREVK74xYbjerZeFuMbuEsYDRGVV9WMzLG712Q4mlqLZSvunMH3V735G4moGDGZSFSOvefQEpu7j4eJtq5Y9sv1w/jswi5kKBUSRkZERERUeOvuXhQfN7eqDhdLewmjIaLyrIq+Ebx6fphjrPm/Ay7g68v7oBJUEkZGVHEwmUhUzrlXrYP/+kxDjWzN+bc98MHoY+sQn54iYWREREREb/YsLQn/BfuJyx80aCNhNERUEVjoGmJ7jw/hYmEnlm28dxlfXNzDhCJRMWAykagCqGNihf19pqJllRpi2YXIIAw4sAIhic8kjIyIiIioYFsDryH9ZY+KKnpG6FWjocQREVFFYKajj63dJ6CZVXWxbGvgNcw6txNKFROKRO+CyUSiCsJc1wBbu0/AgNpNxLKHCTHou385fKJDpAuMiIiIKB9KlQob710Wl0c6toK2hqaEERFRRWKio4ct3cejlXVNsWxn0A3MPL+DCUWid8BkIlEFoquphSXtPTGziYdYFpeeDM8ja7D3ka90gRERERHl4UzEA0QkJwAANGVyjHJsLXFERFTRGGrp4N+u49DGprZYtjvoJj6/uJtdnoneEpOJRBWMTCbD7KZdsai9J7TlGgCyZnqefmYbFvme4CxmREREVGZ4PfARH3tUc4K1vrGE0RBRRaWvpY0NXcfCvWodsWz7Ax98fWkffx8RvQUmE4kqqMF1mmJr9wkw09EXy36/eQwzz+0QxyUiIiIiksrz9BQcDQ0Ql4fXayFhNERU0elpamOdx/tona3L86b7V/Ddlf1MKBIVEQckIarAWtvUwn99pmLMsfUIfhELIGuMkLCk51jbeRTMdA0kjpCIiKh4nDlzBhMnTgQA2NnZ4eTJk7m2cXR0LPA5LC0tceHChXzXBwQEYPXq1bh27RpevHiBKlWqoFOnTpg6dSrMzc3f7QAqob1BvshQKQEAVnqG6GRf8PtDRPSusloofoCRR/7G9ZhQAMA/dy9CW0MTX7foCZlMJnGEROUDWyYSVXC1jC3xX5+pcLWpJZZdiQ5GvwMr8CghVsLIiIiIikdycjK+//77Qm/fsGFDNGvWLNc/FxeXfOscPXoUw4YNw6FDhyAIAurVq4e4uDhs2rQJ/fr1Q1hYWDEcSeWyPVsX50F1mkHz5fAsREQlyVBLB5u6jUNjS3uxbOWds/jj5jEJoyIqX9gykagSMNPRx5Zu4/H5hd3YGXQDABD8Ihb9DizH351Ho3W2RCMREVF589dffyEiIgIeHh44ceLEG7dftGgR7O3t37idWnR0ND7//HNkZmZi6tSpmDZtGjQ1NZGYmIiZM2fi3LlzmDFjBnbu3MlWLYUUEBeBO3ER4rJnveYSRkNElY2xti7+7TYOww+vgX9cJABg0a2T0NXQwkeNO0kcHVHZx5aJRJWEtoYm/mo3FJ836yaWxaenYPiRtdgVdFPCyIiIiN6er68vNm/eDA8PD3Tp0qVE9rF27VqkpqaiZcuW+OSTT6CpmXU/3sjICH/++SeMjIxw584dnDp1qkT2XxHtCvIVHzexrAYHU2vpgiGiSslMRx9bu0+AY7a/P7/eOIKN9y5LGBVR+cBkIlElIpPJ8HHjzljW4T3oaGT9EMpUKfHJ2e344+YxDjxMRETlSmZmJr799lvo6upi7ty5JbafI0eOAACGDRuWa52JiQl69OgBADh06FCJxVCRCIIA72A/cXlQnSbSBUNElZq5rgG29ZiA2saWYtnXl/Zh36NbEkZFVPYxmUhUCfWv3Rjbu38Ic51XE7As9D2Bj85uR5oiU8LIiIiICm/VqlUIDAzEJ598Ahsbm0LXW758OSZMmIAPPvgAX375Jfbu3YuMjIw8t42MjER0dDQAoGXLlnlu06JF1izEt27xx2dh3IgJxZPkeACAXCZDn5r5j1VJRFTSrPSMsLX7BFTVNwEACBDwydntOBV+X+LIiMoujplIVEm1sK6B/X2n4v1j6/EwIQYAsPeRL54kPcdaj9Gw0DWUOEIiIqL8BQUFYdWqVXB2dsbo0aOLVHfXrl05lvfs2YPFixdjyZIlcHZ2zrHu8ePHAAAtLa18E5bVqlUDAISFhSEzMxNaWlr57nvbtm3w8vIqVJxBQUGF2q68+S9bq0RX61qoom8kYTRERICdoSm2dB+PQQdX4nl6ChSCCh+e/Bfbuk9AC+saUodHVOYwmUhUidUwssDe3lMw6dRmXIjM+sFy7WkI+nkvx8auH6COiZXEERIREeUmCAK++eYbKBQKzJs3DxoahZsF2MPDA/3794eTkxNsbGyQnJyMS5cu4a+//kJYWBjGjRuHvXv3omrVqmKd+Ph4AFndmfObXMXU1BQAoFKpkJSUBDMzs3xjiImJgb+/f+EOtAJSCSp4P74tLver1VjCaIiIXqlnWgX/dhuHYYdWI1mRgTRlJt4/vg47ek5CA/Oqb34CokqE3ZyJKjlTHf2smczqtRDLQhLj0M97mZhgJCIiKku2bNmCGzduYOTIkWjUqFGh6y1fvhzdu3dHjRo1oKOjA3Nzc/Tu3RteXl6wtbVFfHw8li5dmqNOeno6ABTY2lBbWzvX9vmxsrKCs7Nzof7p6uoW+tjKi6vRjxGd8gIAoCGTo1fNhhJHRET0SmNLe/zjMQba8qybVAkZaRh19B+EJD6TODKisoUtE4kIWnIN/O4+GLWMLbHg+mEAWV+cI4/8jd/cB2FYtkQjERGRlKKjo/G///0P1tbWmDFjRrE8p7m5OSZOnIjvv/8ex48fx08//SS2QtTR0QGQNdlLfrKPt6jePj/Dhw/H8OHDCxXXoEGDKlwrxuytEtva1oW5rkEBWxMRlT5327pY1vE9TDq1GSpBwNPURIw6ug77ek/h3yyil9gykYgAZM30PM2lI1Z2GinO9KwQVJh1fid+vX4EKkElcYRERETAjz/+iKSkJHzzzTcwNCy+8X2bNm0KIKtbs7prM5DVvRkAEhISIAhCnnXV28vl8mKNqaIRBAHHQu+Ky73ZKpGIyqieNRritzaDxOXgF7H44PgGpHKySiIAbJlIRK/pU7MRbA1MMO74RsSmJQEAlvidwuMXz/C/dkOhp5l/Ny8iIqKSFhAQAACYN28e5s2bl2NdWloagKwZmN3d3QEAS5YsQbNmzd74vNm7MSuVSvFxzZo1AWS1TIyMjIStrW2uumFhYQAAe3v7ArtDV3Z3n0eKszjLIEPXavWlDYiIqADDHVriaWoifrtxFABwPSYUH53ZhlWdRkJDznZZVLnxE0BEuTSzqo79fabC0dRaLNv/2A+eh9cgNjVJwsiIiIiyxMbG5vqXlJT1HaVSqcSygronZ/fgwQMAWd2U1ROqAICtrS2qVKkCAPDx8cmzrrq8SZMmb3k0lcPRbK0Sm1pVg5UeZ3EmorLtI5dOGOHQSlw+HOqPeVe9822pTlRZMJlIRHmqZmSOPb2noINtPbHsRkwo+novw4P4pxJGRkREldnJkydx//79PP8tWLAAAGBnZyeWtW7d+o3PqVAosG7dOgCAq6srNDVzdt7p3r07AMDLyytX3YSEBBw+nDXecI8ePd7p2Cq6Y2GvkondqrNVIhGVfTKZDD+79Udne0ex7J+7F7HG/7yEURFJj8lEIsqXsbYu1ncdi1GOr36IhSU9x4ADyznTMxERlSt//PEH9uzZI7ZeVIuMjMTHH38MX19faGpqYtq0abnqjh8/Hrq6urh27RoWLVokdoNOTEzE7NmzkZiYiAYNGqBz586lcizlUWRyAm7FhovLXdjFmYjKCU25BlZ0HAEXCzux7IdrB7A/2E/CqIikxTETiahAWnINLHAbgFrGFvjp2iEIEMSZnn93H4yh9ZpLHSIREdEbPXr0CGvWrMHXX3+NatWqwcTEBImJiQgODoYgCNDR0cFPP/2Exo0b56pbtWpV/Prrr5g9ezaWL1+O7du3w8bGBsHBwUhJSYGlpSUWLlwozgBNuZ0Iuyc+rm5onmMoFSKiss5ASwcbuo5Ff+8VCE2KAwB8cnY7bA1M0LxKDYmjIyp9bJlIRG8kk8kwqWF7rOo0EroaWQPLKwQVZp7fgT9uHuOYIUREVOa999578PT0hJOTE5KTkxEQEICoqCjUq1cP77//Pry9vdGvX7986/fo0QNeXl5il+fAwECYmZlh1KhR+O+//1CjBn9MFuT0k0DxsUc1JyZeiajcsdIzwqZuH8BURx8AkKFSYvyJTQhLjJM4MqLSJxOYBSjXBg0aBH9/fzg7O2P37t1Sh0OVwM2YMHxwfIM40zMADK7TFL+5D4aOBhs7ExERFZeKcp2nUCnRaMsPSMxMBwBs6DIWHtWcJI6KiOjtXIkKxvAja5GpyhrywtHUGnt7T4GRtq7EkRGVHrZMJKIiaWpVDf/1mYq6JlZi2a6gmxh59G/Ep6dIGBkRERGVRbdiw8VEopZcA242tSWOiIjo7bW2qYXf2gwSl+/HR2P6mW1QqlQSRkVUuphMJKIiq25kjr29p+T4MXA5Khj9D6xASOIzCSMjIiKisubMkwfi4xZVakBfS1vCaIiI3t3Qes0xtVEHcflE+D385HNQwoiISheTiUT0Vkx19LG52zgMrtNULAtKiEE/7+W4/jRUwsiIiIioLDkb8SqZ2MGunoSREBEVny+bd0f36g3E5TX+57H5/lUJIyIqPUwmEtFb09bQxMJ2wzCrSRex7FlaMoYdXo0Dj29LGBkRERGVBS8y0nAzJkxcbm/LZCIRVQxymRxL2g9HQ3NbsezrS3txMTJIwqiISgeTiUT0TmQyGWY17YKF7YZBS64BAEhXKjD51BasvH2WMz0TERFVYlejg6EUssYRM9PRR0ML2zfUICIqP/S1tPFPl/dhrWcEAFAIKkw+tQXhSc8ljoyoZDGZSETFYkjdZtjcbRxMXs5iJkDATz4H8fXlfVC8nOmMiIiIKpdLUcHiY1ebWpDL+PODiCoWWwMT/O0xBjoamgCAuPRkTDixCamKDIkjIyo5/DYnomLTpmod7O09FdUNzcWyjfcuY9yJjUh6OYsjERERVR5XsiUTW1vXkjASIqKS08SqGn5tM1BcvhMXgc8u7GYvLaqwmEwkomJVz7QK/uszFU2tqollJ8PvY/DBlYhMTpAwMiIiIipNyZnpuP3sibjsasNkIhFVXEPqNsf4Bu7i8t5Hvljtf07CiIhKDpOJRFTsLPUM4dXjQ/Sq0VAs84+LRF/vZQiIi5AwMiIiIiot15+GiuMlGmvror5ZVYkjIiIqWd+07IU2NrXF5fk+h3D2yYMCahCVT0wmElGJ0NPUxspOIzCpYXuxLCrlBQYeWImT4fcljIyIiIhKw+WoR+LjllVqQkPOnx5EVLFpyTWwotMI2BuaAgBUgoApp7cgJPGZtIERFTN+oxNRiZHL5Pi2ZS/87DYAcpkMAJCsyMAHxzfg33tXJI6OiIiIStLVp4/Fx63ZxZmIKgkLXUOs7TwauhpaAICEjFRMOrkZaYpMiSMjKj5MJhJRiRvj5Ip1Hu9DX1MbAKAUVPjy0h7Mv3YIqpfdn4iIiKjiUKiUuBUbLi63qFJDwmiIiEpXQws7/NF2sLh8Jy4C31/1ljAiouLFZCIRlQqPak7Y3WsSrPWNxbIVd85g6umtSOVdOiIiogolMP6p+P2uIZOjkYWtxBEREZWuAbWbYGx9N3H53/tXsPPhDQkjIio+TCYSUalpaGGH/X2mob6ZjVjm/fg2hh9eg2dpSRJGRkRERMXJNzZMfFzfzAZ6L3snEBFVJt+27I0mltXE5TmX9uDe8ygJIyIqHkwmElGpsjUwwe5ek9HRzkEsux4Tin7eyxGUECNhZERERFRcbsa8SiY2sapWwJZERBWXjoYmVnYaARNtPQBAqiITk05tRlJmusSREb0bJhOJqNQZaetifZf3McqxtVgWkhiHft7Lc8z8SEREROWTb7ZkYlMmE4moErM3NMPi9p7iclBCDD47vwuCIEgYFdG7YTKRiCShKdfAArcB+LpFT7EsISMVI478jd1BNyWMjIiIiN5FcmY67sdHi8vZu/gREVVGHtWc8LFLJ3F5/2M/rL97ScKIiN4Nk4lEJBmZTIYpjTpgZccR0NHQBABkqJT4+Ox2LPI9wbt1RERE5dDtZ0+gevkdbqilg7omVhJHREQkvdlNu8K9ah1x+YdrB3AjJlTCiIjeHpOJRCS5PrVc4NXjQ5jrGIhlv988htnndyJDqZAwMiIiIioq35hw8bGLhR005PzJQUSkIZdjaYfhsNYzAgBkqpSYcmoLEtJTJY6MqOj4zU5EZULzKjXwX5+pqG1sKZZ5PbyO0cfW8QuWiIioHLkd90R87GJpL2EkRERli5WeEZZ3HAENWVYq5klyPL64uJs9sqjcYTKRiMqMmsYW2NdnKlpb1xTLLkQGYeDBFQhLjJMuMCIiIiq0e3FR4mNnc1sJIyEiKnta29TCp027isvej29j2wMfCSMiKjomE0vJ+PHj4ejoiL/++kvqUIjKNDMdfWzpPgEDajcRywLjn6LfgeU5ZoYkIiKisidNkYmHCTHicgPzqhJGQ0RUNk1t1AFtbGqLy3Ov/IeH8U8ljIioaJhMLAXe3t64f/++1GEQlRs6GppY0t4TnzTuLJbFpCZhyKHVOBziL2FkREREVJCHCU+hFFQAAG25BmqbWL6hBhFR5aMhl2NRe0+Y6egDAFIVmZh2ZivSOV48lRNMJpawhIQELFiwAF9++aXUoRCVKzKZDJ8164Y/2w6B5ssxRdKUmfjw5L/4O+CCxNERERFRXgLiIsXHDqbW0JJrSBgNEVHZVdXABH+2HSIu+8dFYoHPIQkjIio8JhNL2B9//IF69eqhT58+UodCVC551muBf7uNg7G2LgBAgIDvruzHvKveUL1s+UBERERlQ/ZkIrs4ExEVrFv1Bhhb301cXhtwASfC7kkYEVHhVJpkYkxMDPbu3YuffvoJnp6ecHFxgaOjI0aPHl2o+pcvX8akSZPg6uoKFxcX9OjRAwsXLkRKSkq+dXx8fLB3717MnTu3uA6DqFJqa1sXe3pNgZ2BqVi2xv88Jp3ajFRFpnSBERERUQ53n7+afIXJRCKiN/umRS84mdmIy7PO70B0ygsJIyJ6s0qTTDxw4AC++OILbNq0Cb6+vkhPTy903U2bNmHs2LE4ffo0dHR0UKdOHTx58gQrVqzAkCFDEB8fn6tORkYGvvvuO4wbNw61a9fO/aREVCSOZtb4r89UNLKwE8sOhfhj+OE1eJaWJGFkREREBACCIORomVg/249jIiLKm66mFpZ3fA+6GloAgGdpyZh5bgd7YVGZVmmSiYaGhmjTpg0mTZqEpUuXYurUqYWqd+fOHfz8888AgB9++AGnT5/Gnj17cPz4cTg7OyMoKAjffvttrnpr165FWloapkyZUqzHQVSZWesbY2fPiehs7yiWXY8JRT/vFXiUECthZERERBSTmoTn6a967dRny0QiokJxMLXG961eDY12NuIBNty9LGFERAWrNMnEIUOGYN26dZg1axa6du0KCwuLQtVbvnw5VCoV+vfvD09PT8hkMgCAtbU1/ve//0Eul+Po0aO4d+/VuAYRERFYuXIlPvnkE2RkZODFixd48SKrmbJ6WalUFv9BElUCBlo6+MdjDEY5thbLQhKfof+B5fCJDpEwMiIiosrtYcJT8bGlriHMdQ0kjIaIqHwZ6dgK3as3EJfn+xxCUEKMhBER5a/SJBPfRnJyMs6dOwcAGDZsWK71NWvWhKurKwDg8OHDYnlYWBjS09Px2WefoWXLluI/APjnn3/QsmVLBAYGlsIREFVMmnINLHAbgDnNe4hlz9NT4HlkDbwf35YwMiIiosor+4/eOiaWEkZCRFT+yGQy/OY+CJa6hgCANGUmPjnrBYWKDZGo7NGUOoCy7O7du8jIyIC2tjZcXFzy3KZ58+a4ePEibt26JZbVr18fGzduzLXtmDFj0K9fPwwZMgTVq1cvsbiJKgOZTIZpLh1hb2iGmee8kKFSIl2pwJRTWxDRshc+dG4rtiQmIiKikvcwRzKxioSREBGVTxa6hvi1zUCMP7kJAOAbG4alfqcxo4mHxJER5cRkYgGCg4MBALa2ttDS0spzG3VSUL0tABgbG6N169Z5bm9ra5vvOrVt27bBy8urUDEGBQUVajuiiqp/7caw1jfC+BObkJCRCgECfrh2AKFJcZjXqi805GyATUREVBqyJxPrmlhJGAkRUfnVvYYzPOs1x/YH1wEAC31PoLO9I1ws7SWOjOgVJhMLkJCQAAAwMTHJdxv1OvW2xSEmJgb+/v7F9nxEFZ2rTW3s6z0FY46tR2hSHABg/d1LiEiKx9IO70FfS1viCImIiCq+oGxjJtZhMpGI6K1936ovLkQGITwpHgpBhU/OeuFgv4+gp5l3Iyei0sZkYgHS09MBIN9WiQCgra2dY9uC3L9/v1D7tbKygrOzc6G2DQoKQlpaWqG2JarI6ppWwb4+UzD2+Abcig0HABwNu4uhh1djfZf3YaVnJHGEREREFVeqIgPhSfHicl1TJhOJiN6WkbYu/mo3DMMOrYEAAQ8SnuL3G0cwN9uMz0RSYjKxADo6OgCAzMzMfLfJyMjIsW1xGD58OIYPH16obQcNGsRWjEQvWekZYUePiZh+ZiuOht0FANyKDUc/7+XY1PUD1DXl+E1EREQlIfhFrPhYR0MT9gZmEkZDRFT+udnUxofO7ljtfx4AsMb/ArpUq482VetIHBkRZ3MuUGG6MBemKzQRlR59LW2s6TwaY+u7iWVhSc/R/8AKXI56JGFkREREFdfD+FfjJdY0suCYxURExeDzZt3h8LJBhAABs8/vRHLmm3tFEpU0fssXoGbNmgCAiIiIfFsnhoaG5tiWiKSnIZfjx9b9MLdlb8iQNaNzQkYqRhz5G3sf+UobHBERUQUUkhgnPq5lbClhJEREFYeuphYWthsGTVlW6iYs6Tl+vX5E4qiImEwsUP369aGlpYWMjAz4+fnluc3161kzLDVp0qQUIyOiN5HJZJjYsB1WdBoBHY2sER0yVEpMP7MNy/xOQxAEiSMkIqLidubMGTg6OsLR0RGdO3fOd7vk5GT89ddf6NGjB1xcXODq6opJkybhypUrb9zH5cuXMWnSJLi6usLFxQU9evTAwoULkZKSUpyHUu6EJD4TH9cwMpcwEiKiisXF0h5TG3UQl9fdvYSr0Y+lC4gITCYWyNDQEG3btgUAeHl55Vr/+PFjXL58GQDQo0ePUo2NiAqnT81G2N79Q5jp6ItlC64fxpxLe6FQKSWMjIiIilNycjK+//77N24XFxeHwYMHY+XKlXjy5Anq1KkDHR0dnD59Gu+//z42b96cb91NmzZh7NixOH36NHR0dFCnTh08efIEK1aswJAhQxAfH198B1TOhCU9Fx9XZzKRiKhYfdLEA/VMXnV3/vT8TqQq8p/bgaikMZn4BlOnToVMJsO+ffuwfft2sTXT06dPMWvWLKhUKnTp0gVOTk4SR0pE+WlhXQP7ek9FTSMLsezf+1cw7sRGjjlCRFRB/PXXX4iIiICHh0eB23399dcIDg6Gs7Mzjh8/jj179uD06dP44YcfIAgC5s+fj7t37+aqd+fOHfz8888AgB9++AGnT5/Gnj17cPz4cTg7OyMoKAjffvttiRxbeRCarZszk4lERMVLR0MTf7YbArksawinRy9i8ZfvcYmjosqs0iQTIyMj0bp1a/Hfn3/+CQC4ceNGjvI1a9bkqOfi4oIvv/wSADB37lx06tQJAwcOhIeHB/z9/VGrVi38+OOPpX48RFQ0tU0ssa/PFDS3qi6WnQy/j8EHVyEq5YWEkRER0bvy9fXF5s2b4eHhgS5duuS7XUBAAE6ePAm5XI6//voL1tbWALKGxvD09ET//v2hVCqxfPnyXHWXL18OlUqF/v37w9PTE7KXP+isra3xv//9D3K5HEePHsW9e/dK5iDLsEyVEk+S48VldnMmIip+zayq48MGbcXllXfOwjcmTMKIqDKrNMlEpVKJ+Ph48Z96XBuFQpGjPC0tLVfdsWPHYt26dWjfvj1SU1Px8OFD2NraYvLkydi1axfMzXnBRFQeWOgaYluPD9GrRkOx7E5cBPp5L8P959ESRkZERG8rMzMT3377LXR1dTF37twCtz1yJGvQeldXV9SoUSPXek9PTwBZYy9mHwMxOTkZ586dAwAMGzYsV72aNWvC1dUVAHD48OG3O5ByLCI5HqqXvXdkkMHO0EziiIiIKqZPm3UVe1uphKzZnTOUComjospIU+oASou9vT3u37//1vXd3Nzg5uZWjBERkRT0NLWwstMI/HTtIFb7nwcARCQnYODBFVjTaRTcbetKHCERERXFqlWrEBgYiDlz5sDGxqbAbX19fQEALVq0yHO9i4sLtLW1kZ6ejrt376J58+YAgLt37yIjIwPa2tpwcXHJs27z5s1x8eJF3Lp16+0PppzK3sXZRt9YnPiMiIiKl56mNn5vOxhDD60GANyPj8YSv1OY3bSrxJFRZcNveiKqdOQyOea26gN7QzN8d8UbAgS8yEjDqGPr8Lv7YAyp20zqEImIqBCCgoKwatUqODs7Y/To0W/c/vHjxwCA6tWr57leS0sLVatWRUhICIKDg8VkYnBwMADA1tYWWlpaedZVP6d62/xs27Ytz4n98hIUFFSo7aQWwvESiYhKjZtNbbzv5IoN97Img11y6xR61WiE+uYF31AjKk5MJhJRpTWugTvsDEwx7cw2pCkzkalSYsY5L4QnPccnjTuL42EREVHZIwgCvvnmGygUCsybNw8aGhpvrJOQkAAAMDExyXcb9boXL16Np1uUeupt8xMTEwN/f/83xlqePEmKFx9XYxdnIqISN6dFTxwPu4cnyfFQCCp8eXE39vSeDLms0oxkRxJjMpGIKrXuNZyxo+dEfHB8A2LTkgAAf9w8hrCk5/ilzUBoyd/845SIiErfli1bcOPGDYwePRqNGjUqVJ309HQAyLd1IQBoa2sDQI5xtItST71tfqysrODs7FyoeIOCgvIcz7usiUx+lUC1Ncg/4UpERMXDUEsHC9oMxJhj6wAA12NCseX+NYxyai1xZFRZMJlIRJVeU6tq2NdnCsYcW4+ghBgAwPYHPohMTsCqTiNhpK0rcYRERJRddHQ0/ve//8Ha2hozZswodD0dHR2kpqYiMzMz320yMjIAALq6r/726+joAECh6qm3zc/w4cMxfPjwQsU7aNCgctGKMTLlVTKxKpOJRESlorO9I/rWdMH+x34AgJ+vH0K36g1QRd9I4sioMmAbWCIiADWMLLC39xS0tq4plp2NeIBBB1ciIrngLmtERFS6fvzxRyQlJeGbb76BoaFhoesZGxsDKLgrsnqdelugcF2YC9MVuqLK3jKRyUQiotLzfes+MNLKuon1IiMN8656SxwRVRZMJhIRvWSmo4/N3cajf63GYtnd51Ho570MAXEREkZGRETZBQQEAADmzZsHd3f3HP/mz58PAIiMjBTLbty4AQCoWbMmACAkJCTP583MzERERESObbM/joiIyLd1YmhoaK56lYEgCDlbJuozmUhEVFqs9Y0xp0VPcXlf8C2cCr8vYURUWTCZSESUja6mFpZ08MTURh3EsqiUFxh0cBXOPAmUMDIiInpdbGxsrn9JSVnj36pUKrFMnQBs0qQJAOD69et5Pp+fnx8yMzOho6OD+vXri+X169eHlpYWMjIy4Ofnl2dd9XOq91FZvMhIQ6riVYKVLROJiErXKMdWaGpVTVz++tI+pCoyJIyIKgMmE4mIXiOXyfFVi55Y4DYA8pczOidlpuP9Y+uxLfCaxNEREdHJkydx//79PP8tWLAAAGBnZyeWtW6dNSB99+7dAQBXrlzJs3Xi9u3bAQDt27eHgYGBWG5oaIi2bdsCALy8vHLVe/z4MS5fvgwA6NGjRzEeadmXvVWijoYmzHT0JYyGiKjykcvk+LXNIGi8nMk5NCkOC31PShwVVXRMJhIR5WO0kyvWebwPfc2sGToVggqfXtiF328chSAIEkdHRERF5ezsjE6dOkGpVGLmzJl4+vQpgKyuutu3b8e+ffsgl8sxZcqUXHWnTp0KmUyGffv2Yfv27eL3wNOnTzFr1iyoVCp06dIFTk5OpXpMUss+XqKNvglkL2/CERFR6WlgXhUTnduJy6vunMXduCgJI6KKjslEIqICeFRzws6eE1FF79WsaItuncSMc17IUCokjIyIiN7Gzz//jJo1a8Lf3x8eHh4YOHAgOnXqhLlz50Imk+Grr76Cs7NzrnouLi748ssvAQBz585Fp06dMHDgQHh4eMDf3x+1atXCjz/+WNqHI7mcMzkbF7AlERGVpJlNPFDN0AxAViOIry/vZQMIKjFMJhIRvYGLpT3+6zMVDqZVxLJdQTcx6ug/SEhPlTAyIiIqKnNzc+zatQuTJ0+Gra0tHj58iNTUVLRv3x7r16/H6NGj8607duxYrFu3Du3bt0dqaioePnwIW1tbTJ48Gbt27YK5uXkpHknZ8HrLRCIikoa+ljZ+cu0vLl+Nfozdj3ylC4gqNE2pAyAiKg/sDc2wp9cUfHhyEy5GPQIAXIx6hEEHV2Jj1w9gZ2gqbYBERAQAGDRoEAYNGlTgNoaGhpg5cyZmzpxZ5Od3c3ODm5vb24ZX4eRMJrJlIhGRlDyqOaF79QY4EhoAAJh/7SC6VasPI21diSOjioYtE4mICslERw//dhuHQXWaimX346PR13sZbsc+kTAyIiIiacSmJYmPmUwkIpLed636QEcjq93Y09RE/M/3uMQRUUXEZCIRURFoa2hiUbth+KRxZ7HsaWoiBh9ahRNh9ySMjIiIqPTFpiaLjy10DSWMhIiIAKC6kTmmNeooLv8TcBH3nnMyFipeTCYSERWRTCbDZ8264Xf3wdCQZf0ZTVFkYNyJjdh8/6rE0REREZWe2LRE8bGlnoGEkRARkdqURh1Q3TBrHF+loMI3l/dxMhYqVkwmEhG9pfccWmJj17Ew0NQGkPVF/cXF3fj9xlF+WRMRUYUnCEKOlomWukYSRkNERGp6mlqY17qPuHw5Khj7gm9JGBFVNEwmEhG9gw52DtjdazKs9V79gFp06yRmntuBDKVCwsiIiIhKVooiA2nKTHGZLROJiMqOrtUbwMPeSVz+8dpBJGWmSxgRVSRMJhIRvSNnC1v812caHE2txbKdQTfw/vH1eJGRJmFkREREJScm9dXkKzLIYKajL2E0RET0unmt+4qTsUSnvMAi35MSR0QVBZOJRETFwM7QFLt7TYabTW2x7FzEQww+uBKRyQkSRkZERFQynmWbydlcVx+acg0JoyEiotfVNLbA5IbtxeW/A84jJPGZhBFRRcFkIhFRMTHR0cO/3cZhQO0mYtnd51Ho570cd+M4gxoREVUssdlaJlpyJmciojJpuktHVNU3AQBkqJSYf+2QxBFRRcBkIhFRMdLR0MTi9sMwrVFHsSwyJQGDDq7A+YiH0gVGRERUzGKytUy00OV4iUREZZGepjbmtOghLh8MuYNLUY8kjIgqAiYTiYiKmVwmx5wWPfCz2wDIZTIAQGJmOkYfW4edD29IHB0REVHxeJa9ZaIeWyYSEZVVA2o3RlOrauLyvCveUKpUEkZE5R2TiUREJWSMkyv+7jwaeppaAIBMlRIzznlhya1TEARB4uiIiIjezbO0ZPGxOVsmEhGVWXKZHN+36isu34mLwI6H1yWMiMo7JhOJiEpQ1+oNsKPHxBzdv369cQRfXtwDhUopYWRERETvJiEjVXxsypmciYjKtOZVqucY2/3XG0eQlJkuXUBUrjGZSERUwppYVcO+3lNRy9hSLNsceBXjT2xCMr/AiYionMqRTNTWkzASIiIqjK+a94CuRlavqZjUJCz1OyVxRFReMZlIRFQKahpbYF/vKWhuVV0sOxF+D0MPrUZMaqKEkREREb2dhPRXyUQTJhOJiMo8W0NTTG7UXlxe438eYYlxEkZE5RWTiUREpcRc1wDbenyInjWcxTK/Z0/Q33sFghJiJIyMiIio6HJ2c2YykYioPJjasAOs9Y0BAOlKBX72OSxxRFQeMZlIRFSK9DS1sLLjSIxv4C6WhSbFof+BFbga/Vi6wIiIiIooPnvLRI6ZSERULuhraWNO8x7i8v7HfvwdQkXGZCIRUSnTkMsxr3VffNeqt1gWn56C946shXewn4SRERERFV72lons5kxEVH4MqtMEjS3txeUfrx2AIAgSRkTlDZOJREQS+dC5HVZ2HAEdDU0AWd0MppzeijX+5ySOjIiIqGCpikykKxXisgm7ORMRlRtymRzft+ojLt+MCcOBkDsSRkTlDZOJREQS6lPLBVu7T4Dpy+5hAgTMu3oA313ZD6VKJXF0REREecveKhFgy0QiovKmpXVNdK/eQFz+9foRZKqUEkZE5QmTiUREEmtlXRP7ek9BNUMzsezvgAuYfHozUhWZEkZGRESUt+wzOetoaEJPU0vCaIiI6G182bwH5DIZACD4RSy2Bl6TOCIqL5hMJCIqA+qYWGFf76lwsbATyw6F+OO9I2sRl5YsYWRERES5xaeniI9N2SqRiKhcqmdaBcPrtRSX//I9juTMdAkjovKCyUQiojKiir4RdvSciM72jmKZz9MQ9D+wAiGJzySMjIiIKKcck69wvEQionJrVtMu0NXIal0ek5qEVXc4fju9GZOJRERliIGWDv7xGIORDq3EsuAXsejvvQK+MWESRkZERPRK9m7OHC+RiKj8stE3xkTntuLyyjtnEZOaKGFEVB5oFseT3Lt3D5s3b8b169cRFRWF1NTUfLeVyWQICAgojt0SEVVImnIN/NJmIOwNzfDrjSMAgNi0JAw9vBorOo5Al2r1JY6QiIgqu6Rs3eCMtHUljISIiN7V5EYdsOn+FTxPT0GKIgMLfU9gvtsAqcOiMuydWyb++++/GDJkCHbu3IlHjx4hJSUFgiAU+I+IiAomk8nwUeNOWNhuGDRlWX+qUxWZGHdiIzbduyxxdEREVNklK14lEw00dSSMhIiI3pWxti4+adxZXN58/yoeJcRKGBGVde+UTLx16xbmz58PpVKJESNGYPXq1QAAExMTrFu3Dr///jsGDhwILS0tmJmZ4c8//8SGDRuKJXAiospgSN1m+LfbOBhpZf1QUwkC5lzaiwU+h6ESVBJHR0RElVVyZob42EBLW8JIiIioOIx2ckV1Q3MAgEJQiT2kiPLyTsnEjRs3QhAEjBkzBt9++y3at28PANDS0oKbmxv69u2LBQsWwMvLCzKZDIsWLUKDBg2KJXAiosqirW1d7O41BTb6xmLZstun8fFZL6QrFdIFRkRElVb2bs4GWmyZSERU3uloaOLz5t3E5QOPb3PMdsrXOyUT/8/efYdHUa59HP9uekIgtBAIvYVAIDRpiihNonikCihVQJpiP3Z9j9hQjwX10KWIKKAIqPR+FAGRKkloIdRQApjes/v+kZMxMQkEUmaT/D7XlcudZ+bZuYfHzU7uecr+/fuxWCyMGDHiusc1bdqUV199lTNnzvDFF18U5JQiImVS08rV+eH+x2haqbpRtvLkAYZtmJdtEnwREZHikJBlmLOnkokiIqXCA/UDaVGlprH9wb4NJkYj9qxAycQrV67g4uJCzZp//c/m4OBAcnJyjmN79uyJk5MTGzduLMgpRUTKLN9yXiy/bwKdazQyynZePEn/NTM5HxdlXmAiIlLmxGUZ5uzhpGHOIiKlgYPFgefb/NU7cXvEcXZdPGliRGKvCpRMdHd3x80t++pt5cqVIy4ujpSUlGzlzs7OuLu7c/78+YKcUkSkTKvg4saXPUcxsGEbo+xo1CUeWD2d4KsRJkYmImVNXFxcgd9j+vTphRCJmCE+VT0TRURKo7tr+tHep56x/cG+DVpIV3IoUDKxWrVqxMfHk5b215xdtWvXBuDQoUPZjr106RKxsbEFOZ2IiAAujk58fOeD2VZcu5QQw4C1s/jv+eMmRiYiZcnEiRNzPDy+GTNmzOCzzz4rxIikOCWkaQEWEZHSyGKx8M8svRN3XzrFfyP0N4ZkV6BkYsOGDUlPT+fYsWNGWYcOHbDZbEyfPt0Y7pySksLbb78NgJ+fX0FOKSIi/PUl/97t/XG0ZPwqj0tNZsTG+Sw7/rvJ0YlIWbBnzx6eeuoprNabX1l+9uzZTJs2rQiikuKiBVhEREqvTtUbcKfvX1Mrva/eifI3BUom3nHHHdhsNrZs2WKUPfzww7i4uLBz5066dOnCkCFD6NKlCxs3bsRisTB06NACBy0iIhmGNmnPvO4jjPmq0mxWnvnlOz4+sElf+CJSpBwdHdm6dSsvvfTSTdWbM2cOH330EZBxLyklU3yWORPLOSmZKCJS2mTtnXjwyjk2nAkxMRqxNwVKJvbq1YvHH38cHx8fo6x27dp8+OGHlCtXjujoaA4cOEBUVBQWi4WxY8fywAMPFDhoERH5S/fa/nx37zi83T2Nsg/3b+K5HctJtaabGJmIlGZTpkwB4IcffjBGoNzIF198wYcffghkJBI1Z2LJFa+eiSIipVob7zr0rN3U2P5g/0astpsfjSClk1NBKleoUIHHH388R3nPnj1p164d27dv5+LFi3h6etK5c2fq1q1bkNOJiEgeAqvWYlXvSYzYOJ8T0ZEALD3+OxcTYpjVdagmxxeRQjdgwABiY2OZOnUqX331FV5eXrneF2aaP38+H3zwAQC3334706dPx8VFc+2VVPFpWZOJakcRkdLon216svFsKABH/rzIj+F/0KdBS5OjEntQoJ6J11OxYkX69OnD+PHjGTp0qBKJIiJFrE75yqzoPZEOWVZf237+GAPXzOJSQox5gYlIqTVq1CgmTJiAzWbjP//5D4sXL871uAULFvDee+8B0LFjR2bMmIGrqx5ylFTpViuJaanGth5YiYiUTs0q+/KPeoHG9of7N5KmkU9CAZKJVquVEydOcODAAcLDwwszJhERuUWVXD1YfM+YbF/6h69F8MBP0zkWdcnEyESktHrqqad4+OGHsdlsvP322/z444/Z9n/55ZdMnToVgPbt2zNz5kwlEku4rCs5A5RzUs9EEZHS6tnWPXCwWAA4GXOF707sMzkisQc3Pcw5NTWVTz75hKVLlxIfH2+Ue3l5MXLkSCZMmIDlf/+jiYhI8XNzcuY/dw/Bd48Xs4J/BuB8fBT9Vs9gbvcRdKrewOQIRaS0ef3114mJieGnn37ipZdeokKFCtx1110sWrSId999F4B27doxa9Ys3NzcCuWca9eu5ddffyU4OJjLly8TFRWFs7Mz9erV46677mLkyJFUqlQpR70mTZpc932rVq3Kjh078twfEhLC7Nmz2bNnDzExMVSrVo2uXbsyadIkKleuXODrKgni/5ZMdFcyUUSk1GpUsRoDGrbm2/8lET89uJUBjdrg7OBocmRipptOJj722GP8/PPPOVYJjYqK4tNPP+X06dPG02cRETGHg8WB19r3pqZnRf5v90/YsBGdksTQ9V/wyZ2DeEBznYhIIXvvvfeIi4tj27ZtPPnkkwwcOJCvvvoKgNtuu43Zs2fj7u5eaOebOXMmR44cwcXFBW9vb5o0acK1a9cICQkhJCSEZcuWMW/ePPz9/XOt37x581znbKxYsWKe59ywYQPPPPMMqampVKlShcaNGxMeHs6iRYtYt24d33zzDbVr1y6sS7RbSVmGOFuw4OpYoGnYRUTEzj3Vqjvfhx0g3WblTNw1vg/bz+DGt5kdlpjopr75165dy3//+18A6tatS1BQED4+Ppw/f54ff/yRy5cvs2rVKvr370/79u2LJGAREcm/0c3uoEY5Lx7fvoTk9DRSrOlM2v4NFxKiGRdwp3qSi0ihcXR05NNPP2XMmDHs2bPHmD+xTZs2hZ5IBBg6dCj169enVatWODs7G+VHjx7lueee49ixYzz77LOsXr061/rTpk2jVq1a+T7fpUuXeP7550lNTWXSpEk89thjODk5ERsby9NPP83PP//MU089xXfffVfqf7cmp6cZr92cnEr99YqIlHV1y1dhYKPWLD2+F/hf78SGrXFS78Qy66bmTPzhhx8AuOOOO/jxxx95+umnefjhh/nnP//JmjVraNasGUCOuXJERMQ899ZtzrKgR6nk6mGUvblnDf/67SfSrVYTIxOR0sbFxYUZM2bQrFkzbDYbbdq0Ye7cuXh4eNy48k0aNGgQ7dq1y5ZIhIxhzG+//TYAJ06cICwsrFDON3fuXBITE2nXrh1PPvkkTk4Zz+TLly/Phx9+SPny5Tl8+DBbt24tlPPZs+T0v3omujo6X+dIEREpLSYHdsPRkpFCOh17lRVhB8wNSEx1Uz0TQ0JCsFgsvPzyyzmGhXh6evLPf/6TRx55hJCQkEINUkRECqZttbqs6j2RYRvmcybuGgBfhOzgQnw0n3YZjJuT/hgUkZvTtGnT6+63WCzs37+ftm3bXveYorhvbNDgr7lhExMTC+U9169fD2QkMf/Oy8uLoKAgvv32W9auXUu3bt0K5Zz2KilLz0QNcRYRKRvqVahCvwat+C4sY+7EaQe30K9hK/VOLKNuqmfin3/+iaurKw0bNsx1f/PmzY3jRETEvjTw8mbV/RNpWfWvYX1rTh/m4Q1f8GdygomRiUhJZLPZCuWnKOzdmzEMy8PDg/r16+d6zPTp0xk7diyPPPIIL774IitXriQlJSXXYy9cuMClS5eAjIVkcnPbbRlzRx08eLCg4du9rD0T3dQzUUSkzHiiZTdjZedTsVdZebL0f+dJ7m7qUWJKSgpVq1bNc3/58uWN40RExP54u5dnWdCjTNz2NVvOHQXgt0un6L96JovueYRanjlXPhURyc3jjz9udgjZWK1WIiMj2bFjB//+978BeO655yhXrlyuxy9fvjzb9ooVK/j000/57LPPCAgIyLbv1KlTADg7O1O9evVc3y9z4ZWzZ8+SmpqaY/h1piVLlrBs2bJ8XVNhDdEubNnmTFTPRBGRMqOBV1X6NmjF92H7Afj04Bb6NWiFo8NN9VOTUkDf/iIiZUw5Z1fmdR/Bi7+uYMnx3wE4Hn2ZB36azpc9R9G8Sk2TIxSRksBekokLFizg3XffzVYWGBjI1KlT6dKlS47ju3fvTp8+ffD396d69erEx8ezc+dOPv74Y86ePcvo0aNZuXIlNWrUMOpERUUBGcOZ81psJHMVaKvVSlxcHJUq5f5wJjIykuDg4Fu4UvuRdTVnDXMWESlbnmzZjZUnD2C12TgZc4Ufwg/Rr2Ers8OSYqZvfxGRMsjJwZEP7hiAb7mKfHRgEwCXE2MZsGYWs7sN466afiZHKCKSPz4+PrRp04b09HQiIiK4cuUKoaGhrFq1ilatWlGhQoVsx0+fPj3btqurK71796ZTp04MGDCAiIgIPv/8c2MRF4Dk5GSAPHsbAtnmE888Pjfe3t45ej7mJSwsjKSkpHwdW5w0Z6KISNnV0MubB+q3ZOXJAwBMO7iZB+oHqndiGXPT3/5Xr1697oTbFovluscU1UTbIiJycywWC8+07oFvOS9e+HUF6TYr8WkpjNy4gA/uGMCDjfNeNEFExF7ce++93Hvvvcb2kSNHePPNN/npp58ICwtj+fLlODreeHL4ypUrM27cOP71r3+xadMm3nrrLaMXoqurKwCpqal51s86zU/m8bkZMmQIQ4YMuWE8AP3797fLXozZhjlrAS8RkTLnyZbdWHXyIDZsnIiO5KdTf9CnQUuzw5JidNOpY3udaFtERG7NEL92zO8xEg+njF41aTYrT//yLZ8e3KLf2SJS4vj7+zNr1iwqVapEaGgoq1evznfd1q1bAxnDmjOHNkPG8GaA6OjoPH8vZh7v4OCAp6fnrQVfQiSla5iziEhZ1rhiNR6oH2hsTzu4GavNamJEUtxu6tvfXubGERGRwtWtVhO+vXccIzcu4EpSHADv79tARHw0b3V8ACeHG/fqERGxF56enrRv357169cTHBzMAw88kK96WYcxp6enG6/r1asHZPRMvHDhAr6+vjnqnj17FoBatWpddzh0aZCcbZhz6b5WERHJ3RMtu/FD+CFs2DgWdZkNZ0IJqpu/aTyk5FMyUUREAGhZtRar7p/IsA3zCY+5AsBXR3dzKSGG/9z1EB7OLjd4BxER+5GWlpHwypoUvJHjx48DGcOUMxdUAfD19aVatWpcvnyZ33//Pdfk5O+/Zyxo1apVq1sPuoRIztIzUas5i4iUTU0q+dCrTjPWncmYjuPzQ9voVadZnguVSemiGTJFRMRQt3wVVvWeSFvvOkbZxrOhDFo3h6v/67EoImLvoqKi+O233wCuO9d3VmlpacyfPx+Ajh074uSUPUnWq1cvAJYtW5ajbnR0NOvWrQMgKCjoluMuKZLS1DNRRETg8cC7jdcHrpzl1wth5gUjxUrJRBERyaayWzmWBI2lV51mRtmBK2fp89MMTsVcNTEyEZEMv/32G9OnT+fcuXM59gUHBzNmzBhiY2Px8fHJltz797//zYoVK4iLy/5w5MKFCzzxxBMcOHAAJycnHnvssRzvO2bMGNzc3NizZw/Tpk0zejzGxsby7LPPEhsbS7NmzejWrVshX639SdaciSIiArTyrs2dvo2M7c8ObTMvGClW+vYXEZEc3J1cmN11GK/v/pGFR3YCcCr2Kn1WT2dBj1G09q5tcoQiUpbFxMQwbdo0pk2bhre3N9WqVcPR0ZELFy4QGRkJgI+PD7NmzaJcuXJGvZMnTzJnzhxeeeUVateujZeXF7GxsYSHh2Oz2XB1deWtt96iZcucK1LWqFGD9957j2effZbp06ezdOlSqlevTnh4OAkJCVStWpVPPvmkTAzvyj5nov6cEBEpyx4P7MrPEScA+OXCCfZHntXfCmWAeiaKiEiuHB0ceKvjA7zU9q9ePVeT4hm0bjabzoaaGJmI2IOdO3ditZqzcmPr1q156aWX6NatG+7u7pw6dYrQ0FCsVisdOnTgpZdeYs2aNTmGOD/00EMMHjwYf39/4uPjCQkJ4eLFizRu3JiRI0fy008/XXexlqCgIJYtW2YMeT527BiVKlVi2LBh/PDDD9StW7dIr9teZF3N2c1Jw5xFRMqy26s3yJY8/PzQVhOjkeKiR4lFaN26daxevZrDhw9z9epVatSowT333MP48ePx9PQ0OzwRkRuyWCw8Fng3Ncp58ewv35FqTScxLZXRm7/knU59Gdakg9khiohJHnnkEby8vLjrrrvo1q0bXbp0wcPDo1jOXaVKFUaNGsWoUaNuqt6dd97JnXfeWaBzBwQE8OmnnxboPUq6lCyL2rg66M8JEZGyzGKxMDmwK6M3fwnA+jMhHIu6hF9FH5Mjk6KknolFaN68eTg4OPD0008zd+5cHnroIb755htGjx5t2pN8EZFb0b9haxb1fITyzq4AWG02Xvx1BR/s24DNZjM5OhExQ5MmTYiOjuaHH37g6aefpmPHjowfP56lS5caQ42ldEqx/jXM2clBf06IiJR1PWr70yRL8vA/mjux1NOjxCI0c+ZMKleubGy3b9+eihUr8sILL7B79246depkYnQiIjens28jlt83geEb53MpIQaAaQe3EBEfxft3DMDZwdHkCEWkOK1atYqIiAg2b97Mpk2b2Lt3L9u3b+e///0vb7zxBs2bN6dHjx50796dhg0bmh2uFKL0LA/F9btfREQcLA5MCrybJ/+7FICVJw/yXOue1C5f+QY1paTSo8QilDWRmKlFixYAXLp0qbjDEREpsGaVa/BD70n4VaxmlH17Yh+jNi4gLjXZxMhExAy+vr4MHz6chQsX8uuvv/L+++/Ts2dP3NzcOHToEB9//DH3338/vXr14v333+f3339Xb+ZSIDVLMtFJyUQREQH61A+ktmclANJtVmYe/tnkiKQolalkYmRkJCtXruStt95i8ODBBAYG0qRJE4YPH56v+rt27WL8+PF07NiRwMBAgoKC+OSTT0hISMh3DL/99huAntCLSIlV07Mi3983gY7V6xtl2yOOM3DNLKPHooiUPRUqVOCBBx7g008/ZdeuXcyePZsHH3yQqlWrcvr0aebNm8fw4cO54447eOWVV9i8eTPJyXoIURKl2f6aM1HDnEVEBDIeLk1scZexveT4HiITY02MSIrSTX37m7lqX2FYvXo1L7zwAosWLeLAgQM3dQO7aNEiRo0axbZt23B1daVhw4acP3+eGTNmMHDgQKKiom74HpcuXeLTTz/l9ttvN3ooioiURBVdPVh8zxj+US/QKDt8LYI+q6dzIuqyiZGJiD1wcXGhS5cuTJkyhZ9//plly5Yxbtw4GjRowLVr11i+fDmPP/44HTt25LHHHuP7778nOjra7LAln1KtfyUTNcxZREQyDWrUFm/3jMVmk9PTmBu8w+SIpKjcVDLxkUceoVOnTjz//POsW7fupnrk2QNPT09uv/12xo8fz+eff86kSZPyVe/w4cO88847AEyZMoVt27axYsUKNm3aREBAAGFhYbz22mvXfY/4+HgmTpyIo6Mj7777boGvRUTEbK6OTvzn7iGMC+hslJ2Li6LvmpnsuXTKvMBExO4EBgbyzDPPsHr1atavX8/zzz9P69atSU5OZvPmzbzyyissWrTI7DAln9I0zFlERHLh5uTMowF3GttfHtlJbEqSiRFJUbmpZGJJX7Vv4MCBzJ8/n2eeeYaePXtSpUqVfNWbPn06VquVPn36MHjwYCwWCwA+Pj589NFHODg4sGHDBo4cOZJr/aSkJCZMmMC5c+f44osvqF69eqFdk4iImRwsDrze/n7+1f5+LGT8boxKTmDI+rmsOXXY5OhExB7VrVuX0aNH8/XXX/PLL7/w1ltv0bVrV9zd3c0OTfIpa89EJ4uGOYuIyF+GN+lABRc3AGJTk/nm2B6TI5KicFPf/qtWrWLLli288sortG/fHqvVyvbt2/nXv/7FXXfdxaBBg5g9ezZhYWFFFW+xi4+P5+efMyYOHTRoUI799erVo2PHjgCsW7cux/7U1FSeeOIJDh8+zOzZs2nSpEnRBiwiYoKxAZ2ZcfdDuDo6ARnDGsZvXcy8EA1tEJG8Va5cmYEDBzJ9+nTGjBljdjiST2lWzZkoIiK5K+/ixlC/Dsb23JBfsj2EktLB6WYrZK7aN3z4cGJiYti2bRubNm3il19+4dChQ/zxxx98/PHH1KlTh+7du9OtWzfatm1r9OYraUJDQ0lJScHFxYXAwMBcj2nbti2//vorBw8ezFZutVp57rnn2LVrF7NmzaJVq1b5OueSJUtYtmxZvo4tTYlbESnZ7q8fiLd7eUZv/pLolERs2Hh994+cj4/mlduCcFDvFRGRUiHrMGfNmSgiIn83utntRhIxIj6aH8MP0b9ha7PDkkJ008nErDJX7XvggQdISUlh165dbNq0ia1btxqr9s2fP59KlSrRtWtXunXrRufOnXF1dS2s+ItceHg4kJFEdXZ2zvWYOnXqZDs20xtvvMG6deuYMGEC7u7uHDhwwNhXvXr1PIc7R0ZGEhwcXAjRi4gUrw7V67Oy90SGbZjH+fgoAGYd/i8X46P56M4HjZ6LIiJScqXZNGeiiIjkrUY5L/o2aMm3J/YBGX8P9GvQqsR2MpOcCu2vusxV+7p06QLAoUOH2LRpE5s3byYsLIzly5fz/fff4+bmxu2330737t3p3r07Xl5ehRVCkchcWfB6cWbu+/sqhJnDo2fOnMnMmTOz7Xv88ceZPHlyru/n7e1NQEBAvuILCwsjKUkTmoqI/WhcsRqr7p/EiI3zCbl2AYBV4Qe5nBjL3G7D8XLVvGgiIiVZWrbVnNXrXEREchoX0MVIJgZfu8AvF05wp29jk6OSwlJkXUQCAwONlftOnz7N5s2b2bRpEwcOHGDz5s1s2bKFiIgIHn/88aIKoVAkJycD5NkrETISqVmPzbRly5ZbOueQIUMYMmRIvo7t37+/ejGKiN2p7lGB5feOZ/zWxfw34jgAOy+epP+amSzq+Qi+nhXNDVBERG5ZtgVY1DNRRERy0bRyde6u6ce288cAmHn4ZyUTS5FieZRYklftyxySnZqamucxKSkp2Y4VEZGMyZcX9BjJwIZtjLKjUZd4YPV0Qq9dNDEyEREpiGxzJlqUTBQRkdxNaN7FeL39/DFj1JKUfMU+LqGkrdqX1xDmrPIzFFpEpCxycXTi4zsfZHJgV6PsYkIM/dfMYMcFLSAlUpINHjyYjz76yOwwxASpWs1ZRETy4Y4aDQmoXMPYnn34ZxOjkcKkb/8bqFevHgARERF59k48c+ZMtmNFROQvFouFF9r24t1OfXH436TLsanJDNswj5UnD5gbnIjcsoMHD/L999+bHYaYID3bAiz6c0JERHJnsVgYn6V34sqTB4iIz7ujlpQc+va/gaZNm+Ls7ExKSgqHDh3K9Zi9e/cC0KpVq2KMTESkZBnu35G53Ybj5pgxB22qNZ3Hty9hxh/bsdlsJkcnIiL5pTkTRUQkv/5RPxDfchmjONNsVuaH/GpyRFIYlEy8AU9PTzp37gzAsmXLcuw/deoUu3btAiAoKKhYYxMRKWnuqdOMpUGPUtm1nFH29u9reX33j6RnmYNLRETsV7Y5E5VMFBGR63B2cGRss87G9ldHdxGbkmRiRFIYlEzMh0mTJmGxWFi1ahVLly41etBcvnyZZ555BqvVSo8ePfD39zc5UhER+9e2Wh1W9p5I3fKVjbL5ob8yfutiEtPyXuxKRETMZ7PZNGeiiIjclIf82lHeOWPB2tjUZL45tsfkiKSgytS3/4ULF+jQoYPx8+GHHwKwb9++bOVz5szJVi8wMJAXX3wRgNdff52uXbvSr18/unfvTnBwMPXr1+fNN98s9usRESmpGnhVZVXvSbSsWssoW3cmmIfWz+XPpHgTIxMRkevJOl8iaDVnERG5sfIubgxr0tHYnhe6g7QsD6ak5ClTycT09HSioqKMn4SEBADS0tKylScl5exyO2rUKObPn0+XLl1ITEzkxIkT+Pr6MmHCBJYvX07lypVz1BERkbxVdffk26BxdK/1V6/u3y+fpu+amZyJvWZiZCIikpfUv01JoTkTRUQkPx5p2glHS0YK6lxcFBvOhJockRSEk9kBFKdatWpx9OjRW67fqVMnOnXqVIgRiYiUbR7OLnzRfTgv71zF18d+AyAsOpI+q6fzZY9HaFG1pskRisj1XL16lW7dutGsWbNsP9WqVTM7NCkif++ZqGHOIiKSH76eFeldrzk/hGcsbPtFyC/cV6+5yVHJrSqUZOLevXvZsmULZ8+eBaBq1ao0bdqUu+66SzeTIiJyXU4Ojrx3ez98y3nx7/0bAYhMjGPg2lnM6jaMu2v6mRyhiFxPREQEFy5cYPPmzUZZlSpVaNq0Kc2aNSMgIIBmzZpRq1at67yLlBTW/80dnsnBYjEpEhERKWnGNOtsJBN3XzrFH1fOq/NACVWgZKLVauXFF1/kxx9/NMpsNhuW/91UWCwWgoKCmDx5MvXr1y9YpCIiUmpZLBaeatWdGuW8eH7H96TbrMSnpTBy4wI+uKM/gxrfZnaIIpILDw8PevfuTXBwMMePHyclJQWAK1eu8Msvv/DLL78Yx5YvX95IMGb+NGzY0KzQ5RZZ/9Yz0cGinokiIpI/bavVobV3bfZHZnRE+yJkB590GWRyVHIrCpRMnDt3Lj/88AMA5cqVo3Xr1nh7exMVFcWhQ4e4evUqa9asYcuWLUyZMoUHHnigUIIWEZHSaXDj2/DxqMC4LV+RkJZCus3KM798x8WEGCYHdjUeVomIfXB3d2fKlClAxtzUx48fJyQkxPg5cuSIMUd1TEwMu3fv5rfffstWf9++fabELrfGpp6JIiJSAGOa3cHj25cAsCr8IC/fdi/VPMqbHJXcrAIlE1esWIHFYqFDhw588sknVKxYMdv+3bt3M2fOHH755RdeeOEFYmJiGDZsWEFOKSIipdzdNf1Yfu94RmyaT2RiHADv79vAxYQY3uzwAI6an0vELjk6OuLv74+/vz/9+/cHMhJP4eHhhIaGEhwcTEhICKGhoURHRwOQmJhoZshyC6z8LZmIkokiIpJ/veu14M09a7iUEEOqNZ1FR3fxbOueZoclN6lAycRz584B8NZbb+VIJAJ06NCBDh06sHTpUt544w2mTp1K+/bt8fPT/FciIpK3FlVrsqr3JIZtmMfJmCsAfHlkF5cSYvj8rodwd3I2OUIRyQ+LxUKDBg1o0KABvXv3NsrPnz9v9F6UkiXdmj2ZqB7jIiJyM5wdHHmkaSem7l0PwKIju3msxd246f6+RClQ9w53d3c8PDxuOKH24MGDGTlyJGlpacyfP78gpxQRkTKiTvnKrOw9kTbedYyy9WdCeGj9XP5MijcxMhEpqJo1a9KzZ0+efPJJs0ORm/T3nomOSiaKiMhNGurXHlfHjL5tV5Li+CH8oMkRyc0qUDKxbt26JCYm5muIyujRowHYuXNnQU4pIiJlSGW3ciwNGkvP2k2Nst8vn6bfmpmci/vTxMhERMomreYsIiIFVcmtHAMatjG2vwjZkWNOXrFvBUomduvWDavVyvfff3/DY729vfH09OTatWsFOaWIiJQx7k4uzOk2jIf92htlJ6Ij6fPTdIKvRpgYmUjZ1q9fPxo0aGB2GFLMtACLiIgUhjHN7jBeB1+7wK5L4SZGIzerQMnEESNGUKtWLT7++GNCQ0Ove2x8fDzx8fFUrVq1IKcUEZEyyMnBkfdu78dzWSZnvpQYy4C1s/gl4oSJkYmUXe+++y5ffvml2WFIMbParNm2LVqARUREbkGTSj508W1sbM8N/sXEaORmFSiZOGnSJAICAoiLi2PYsGEsWrSIlJSUXI+dO3cuNpuNbt26FeSUIiJSRlksFp5q1Z0P7hiAoyXj6ysuNZnhG+fzfdh+k6MTESkbss6ZaMGiBVhEROSWZe2duOFMKKdjr5oYjdyMAq3mvHv3biyWjJuIhIQE3nnnHT7//HO6du1K8+bNqVSpEteuXeO///0vv/zyC35+fppoW0RECuQhv3ZUcy/PhG2LSUxLJdWazhP/XcqlhBgmNO+iP2xFRIpQ1jkTNcRZREQKomstP+pXqEp4zBVs2FgQupP/a3+/2WFJPhQomTh58mRCQ0MJCQkhIiJj3qro6GhWrlzJqlWrsh1boUIFHnzwQcLDw/H398fFxaUgpxYRkTKse21/lgWNY+TGBVxLzljZ+e3f13IxIYb/a98bB0uBOt6LiEgelEwUEZHC4mBxYEyzO3h1V0b+aMmxPTzXuiflnF1NjkxupEDJxMcee8x4HR0dTUhICMHBwYSGhhIcHMzp06eNSZqjo6N55513AHB0dKR+/fr4+/vTrFkz/P396dSpU0FCERGRMqa1d21W9p7I8I3zOB2bsbjXFyE7uJQQwyd3DsLNydnkCEVESh8lE0VEpDA92KgN7+9bT0xKErGpyXwftp/h/h3NDktuoEDJxKy8vLzo1KlTtqRgQkKC0XMx8ycsLIy0tDSOHz/O8ePH+emnn7BYLISEhBRWKCIiUkY08KrKyt4TGblxAYeungfgp1N/cCUpji+6jcDL1d3kCEVEShclE0VEpDCVc3blwUZt+SJkBwALQncyrEkHTV1k5wotmZgbDw8P2rZtS9u2bY2ylJQUjh49avReDA4O5vjx40UZhoiIlGLe7uX59t5xjNu6mO3njwGw62I4/dfMZNE9o/Et52VyhCIipUe2ZKJWchYRkUIw0r+jkUw8GnWJXZfC6VS9gclRyfUUaTIxNy4uLrRo0YIWLVoYZenp6cUdhoiIlCLlnF1Z0GMkz+9Yzrcn9gEZNyJ9fprOonsewb9SdZMjFBEpHWxYjdfqmSgiIoWhgZc3d/k2ZntERkezBaE7lUy0c8WeTMyNo6Oj2SGIiEgJ5+zgyEedH6SGhxefHtoKwIWEaAasmckX3UfQUTckIqXK2rVr+fXXXwkODuby5ctERUXh7OxMvXr1uOuuuxg5ciSVKlXKtW58fDyzZ89m/fr1RERE4OHhQcuWLRk9ejQdOnS47nl37drF/PnzOXjwIAkJCfj6+hIUFMS4cePw8PAoiku1KxrmLCIiRWFk005GMnHd6WAuxEdTQyOM7JaWuxQRkVLDYrHwfNtevNOpr/FHbnRKEg+v/4KfTv1hcnQiUphmzpzJsmXLOH78OC4uLjRp0oSKFSsSEhLCjBkz6N27N0eOHMlR79q1awwYMICZM2dy/vx5GjZsiKurK9u2bWPkyJEsXrw4z3MuWrSIUaNGsW3bNlxdXWnYsCHnz59nxowZDBw4kKioqCK8YvuQNZlosehPCRERKRzda/lTy7MiAOk2K4uP/WZuQHJdugMQEZFSZ4R/R2Z3HYarY0YH/BRrOhO3fs28/83FIiIl39ChQ/nqq6/Yt28fW7ZsYfny5WzdupUffvgBPz8/rl69yrPPPpuj3iuvvEJ4eDgBAQFs2rSJFStWsG3bNqZMmYLNZuPtt98mNDQ0R73Dhw/zzjvvADBlyhS2bdvGihUr2LRpEwEBAYSFhfHaa68V+XWbLV1zJoqISBFwdHBgeJO/VnFefHQ3KelpJkYk16NkooiIlEpBdQNY0mssXi4ZKzrbsPH67h95e89arDbrDWqLiL0bNGgQ7dq1w9nZOVt5kyZNePvttwE4ceIEYWFhxr6QkBC2bNmCg4MDH3/8MT4+PkBGr+bBgwfTp08f0tPTmT59eo7zTZ8+HavVSp8+fRg8eLCxyqSPjw8fffQRDg4ObNiwIdfekKVJ1p6Jjg5KJoqISOF5yK+d0RkgMjGOtaeDTY5I8qJkooiIlFrtfOqxsvdEaparaJTNOLydJ/+7TE86RUqxBg3+miM1MTHReL1+/XoAOnbsSN26dXPUGzx4MADbt28nISHBKI+Pj+fnn38GMpKYf1evXj06dszoTbFu3bpCuAL7ZVPPRBERKSKV3crxQP1AY3vhkZ0mRiPXo2SiiIiUao0rVmPV/ZNoVrmGUbbi5AFGblpAbEqSiZGJSFHZu3cvAB4eHtSvX98oP3DgAAC33XZbrvUCAwNxcXEhOTk521Dn0NBQUlJScHFxITAwMNe6bdu2BeDgwYOFcQl2y0rWOROVTBQRkcI10r+T8fq3S6cIuRZhYjSSF7tYzVlERKQoVfeowHf3jufRLYvYcSFjyOPPEScYuHYWX/Z8BB+PCiZHKCIFZbVaiYyMZMeOHfz73/8G4LnnnqNcuXLGMadOnQKgTp06ub6Hs7MzNWrU4PTp04SHhxsJwvDwcAB8fX1zDKvOlPmemcfmZcmSJSxbtixf15R1iLa9yDpNhFZzFhGRwtbKuzatqtbmwJWzACwI3cX7d/Q3OSr5OyUTRUSkTKjg4saino/w9C/fsupkRs+h4GsX6Lt6Bl/dM5qGXt4mRygit2LBggW8++672coCAwOZOnUqXbp0yVYeHR0NgJeXV57vl7kvJibmluplHpuXyMhIgoNL7hxQWedMVDJRRESKwqimHXnq54xk4vdh+3n5tiAqunqYHJVkpWSiiIiUGS6OTnzWZTDVPbyYdfi/AJyN+5M+q2ewsMdI2lbLOYeaiNg3Hx8f2rRpQ3p6OhEREVy5coXQ0FBWrVpFq1atqFDhr57HycnJAHn2LgRwcXEBICnpr2kQbqZe5rF58fb2JiAg4AZXlSEsLCxbHPYg+5yJmjFJREQK3/31Apny2xquJceTlJ7Ktyf28mjAnWaHJVkomSgiImWKg8WB19rdRw2PCrzx22ps2IhKTmDQujlMv+shetXN3x/5ImIf7r33Xu69915j+8iRI7z55pv89NNPhIWFsXz5chwdHQFwdXUlMTGR1NTUPN8vJSUFADc3N6PM1dUVIF/1Mo/Ny5AhQxgyZMgNripD//797a4XY7pNcyaKiEjRcnNy5iG/dvznj20ALAzdxZhmd+Bg0UMse6GWEBGRMmlsQGem3/0QLg4ZSYbk9DQe3foVXx3ZbXJkIlIQ/v7+zJo1i0qVKhEaGsrq1auNfZm9FK83FDlzX9YejfkZwpyfodClQdYFWByVTBQRkSIy3L+DMZ3GqdirbD9/3OSIJCslE0VEpMz6R/1AvrpnNBVcMnogWW02Xty5gg/2bcg2lE9EShZPT0/at28PkK1nX7169QA4ffp0rvVSU1OJiIjIdmzW1xEREXn2Tjxz5kyOeqWR5kwUEZHiUMuzEj1q+RvbC4/sNDEa+TslE0VEpEy7vUZDlt87gepZVnSednALz+34jlRruomRiUhBpKWlAZCe/tfnuFWrVgDs3bs31zqHDh0iNTUVV1dXmjZtapQ3bdoUZ2dnUlJSOHToUK51M98z8xylVdYHLRaUTBQRkaIzquntxuvNZ49yNvaaidFIVkomiohImde0cnV+6D0Jv4rVjLKlx/cyetOXxKdefzEFEbE/UVFR/PbbbwDZkoK9evUCYPfu3bn2Tly6dCkAXbp0oVy5cka5p6cnnTt3BmDZsmU56p06dYpdu3YBEBQUVEhXYZ9sqGeiiIgUj86+DWlQoSqQ8f3z9bE9JkckmZRMFBERAXw9K/L9fRPo4FPPKNt6/iiD1s3hSmKceYGJSA6//fYb06dP59y5czn2BQcHM2bMGGJjY/Hx8cmW3AsICKBr166kp6fz9NNPc/nyZSCjt93SpUtZtWoVDg4OTJw4Mcf7Tpo0CYvFwqpVq1i6dKnRQ+/y5cs888wzWK1WevTogb+/f466IiIicvMcLA4Ma9LB2F5yfA8p6WkmRiSZtJqziIjI/1R09WDxPWN48udlrD71BwAHr5yjz+oZfHXPI9T/35NRETFXTEwM06ZNY9q0aXh7e1OtWjUcHR25cOECkZGRAPj4+DBr1qxsPQwB3nnnHR566CGCg4Pp3r07jRo14s8//+TChQtYLBZefvllAgJyruoeGBjIiy++yNSpU3n99deZMWMGlSpV4sSJE6SkpFC/fn3efPPNYrl+ERGRsuLBRm14b996ktPTiEyMY8OZEO6vH2h2WGWeeiaKiIhk4ebkzPS7HuKRLHO0nI69St/VMzgQedbEyEQkU+vWrXnppZfo1q0b7u7unDp1itDQUKxWKx06dOCll15izZo12YY4Z6pcuTLLly9nwoQJ+Pr6cuLECRITE+nSpQsLFixg+PDheZ531KhRzJ8/ny5dupCYmMiJEyfw9fVlwoQJLF++nMqVKxflZYuIiJQ5ldzKcX+9Fsb2V0d/MzEayaSeiSIiIn/j6ODAlA7/wLecF2//vhaAq0nxPLhuNrO6DqNbrSYmRyhStlWpUoVRo0YxatSoW6rv6enJ008/zdNPP33TdTt16kSnTp1u6bwiIiJy84Y16cDysP0A/HLhBCejI2ng5W1yVGWbeiaKiIjkwmKxMLHFXUzrMhgnS8bXZWJaKo9sWsjS47+bHJ2IiDmyruYsIiJSHG6rVpcmFX2M7cXqnWg6JRNFRESuY0DD1nzZ8xHKObkAkG6z8uwv3zHtwGb9US0iIiIiUsQsFgvD/Tsa20tP7CUpLdXEiETJRBERkRvoUrMxy+8bTzX38kbZB/s38vLOlaRbrSZGJiIiIiJS+vVv2Bp3J2cAopITWH36sMkRlW1KJoqIiORD8yo1Wdl7Ig2yrOi86Ohuxm39isS0FBMjExEREREp3Sq4uNG3QStj+6sju8wLRpRMFBERya865SuzsvdE2njXMcrWnwlhyLq5/JkUb2JkIiIiIiKl2/AmHYzXey6f5sifF02MpmxTMlFEROQmVHYrx9KgsdxTu6lRtjfyDH3XzORs7DUTIxMRERERKb0Cq9aiZdVaxvZXR3ebGE3ZpmSiiIjITXJ3cmF2t2EM9WtvlIVFR9Jn9QyCr0aYGJmIiIiISOk1tMlf99/LT+wjIVXTDZlByUQREZFb4OTgyNTb+/Fc655G2eXEWAasncXPEcdNjExEpOhoDXsRETFTn/otKe/sCkBsajKrwg+aHFHZpGSiiIjILbJYLDzVqjv/vmMAjpaMr9S41GSGb5jP92H7TY5ORKRoWSwWs0MQEZEyppyzK/0btjG2NdTZHEomioiIFNAQv3bM6z4CdydnANJsVp7471Km/7Edm039eERERERECsuwLAuxHLxyjkNXzpkYTdmkZKKIiEgh6F7bn2VB46jsWs4oe+f3tfzf7h9Jt1pNjExEREREpPRoWrk67arVNba/OvqbidGUTUomioiIFJLW3rVZdf9E6pavbJTNC/2VSdu/ISkt1cTIRERERERKj2H+HY3XK08eICYlycRoyh4lE0VERApR/QpVWdl7Ii2r1jLKVp/6g2Eb5xGdnGhiZCIiIiIipUPvus2p6OoBQEJaCis0X3mxUjJRRESkkHm7l2dZ0KN0rdnEKNt1MZwBa2dxIT7axMhERApG88CKiIg9cHNyZlCjvxZiWXR0t76jipGSiSIiIkWgnLMr83qMYFCjtkbZkT8v0mf1dI5HXTYxMhGRwqG1nEVExExDsyzEcuTPi+y/ctbEaMoWJRNFRESKiLODIx92HsjkwK5GWUR8NH1Xz+D3S6dNjExEREREpGRr6OVNx+r1je2vtRBLsVEyUUREpAhZLBZeaNuLNzs8gOV//XiiUxIZvH4OG86EmBydiIiIiEjJ9bBfe+P1D+GHiEtNNjGaskPJRBERkWLwSLPbmdH1YVwcHAFITk9j7JZFLNYTVBERERGRW3Jf3eZ4ubgDGQuxrDx5wNyAygglE0VERIrJ/fVa8NU9oynv7AqA1WbjhV+/5+MDmzRhtIiIiIjITXJzcmZAw9bGtoY6Fw8lE0VERIrR7TUasvy+Cfh4VDDKPty/iZd2riTdajUxMhGRG9NjDxERsTcPN/lrqPOhq+c5fPW8idGUDUomioiIFLNmlWuwqvdEGnl5G2VfHd3NuK1fkZiWamJkIiL5Z7FoPWcRETGff6XqtPWuY2x/fWyPidGUDUomioiImKCWZyVW3Dch243P+jMhPLx+Ln8mJ5gYmYiIiIhIyZK1d+L3YftJSE0xMZrST8lEERERk1RyK8eSoLH0rN3UKNtz+TQD1swkIi7KvMBEREREREqQf9QLNOYlj0tN5sdTh0yOqHRTMlFERMRE7k4uzOk2jIf9/nqaeizqMg+sns6RPy+aGJmIiIiISMng4exCPy3EUmyUTBQRETGZk4Mj793ej6dbdTfKLibE0H/NTHZdPGliZCIiIiIiJcPDfu2M13sjz+jBfBFSMlFERMQOWCwWnm3dk6md+uHwv0UNYlKSGLphHqtP/WFydCIiGWxaz1lEROxU8yo1CaxS09hW78Sio2SiiIiIHRnm34HZXYfh6ugEQHJ6GhO2fs3C0J0mRyYikp3WchYREXuTdeqg5WH7SUpLNTGa0kvJRBERETsTVDeAb3qNxcvFDcjoCfTKrlW8v3c9Npt6BYmIiIiI5KZPg5Z4OLkAEJ2SyJrTh02OqHRSMlFERMQOtfepx/f3TaSGh5dR9umhrTy3Yzlp1nQTIxMRERERsU/lXdx4oH6gsb1YQ52LhJKJIiIidqpJJR9W3T+JJhV9jLKlx39nzOZFJKSmmBiZiIiIiIh9erjJX0Odd18KJyw60sRoSiclE0VEROyYbzkvlt83ng4+9YyyzeeOMHj9HK4lxZsXmIiIiIiIHWpdtTb+laob218f22NiNKWTkolF6MKFCzzxxBO0bduWNm3a8PjjjxMREWF2WCIiUsJUdPXgq3vGEFQnwCjbH3mWvqtncDb2momRiUhZo2lbRUTE3lksFoZmWYjl2+N7SU5PMzGi0kfJxCKSmJjIyJEjOXnyJO+99x7vv/8+p0+fZsSIESQkJJgdnoiIlDDuTs7M6jqUEf4djbKTMVfos3oGIdf0oEpEip9F6zmLiIid6tewFa6OTgBcS45nw5kQkyMqXZRMLCLLli3j7Nmz/Oc//6FHjx706NGD6dOnExERwdKlS80OT0RESiBHBwfe7tiH59vcY5RdToxlwJpZ7Ig4YWJkIiIiIiL2o6KrB/fXa2Fsf31MC7EUJiUTi8iWLVto2bIldevWNcpq165NmzZt2Lx5s4mRiYhISWaxWHiiZTc+uGMAjpaMr/HY1GSGbZzPDycPmhydiIiIiIh9eDjLUOefI05wKuaqidGULmUmmRgZGcnKlSt56623GDx4MIGBgTRp0oThw4fnq/6uXbsYP348HTt2JDAwkKCgID755JM8hyyfOHECPz+/HOWNGjXixAn1HhERkYJ5yK8d87qPwM3RGYBUazqTtn/D3OBfTI5MRERERMR87X3q0cjL29heclwLsRSWMpNMXL16NS+88AKLFi3iwIEDJCcn57vuokWLGDVqFNu2bcPV1ZWGDRty/vx5ZsyYwcCBA4mKispRJzo6mgoVKuQo9/LyIiYmpiCXIiIiAkD32v4sDXqUSq4eRtm/fvuJt/esxWqzmhiZiIiIiIi5LBZLtt6Jy47vJdWabmJEpYeT2QEUF09PT26//XZatGhBixYtCAkJYfr06Tesd/jwYd555x0ApkyZwqBBg7BYLFy6dImJEycSHBzMa6+9xmeffVbUlyAiIpJD22p1WNl7IkM3fMG5uCgAZhzezuXEGP7deSDODo7mBihSBGw2G/v372fLli3s3buXkydPEhcXR/ny5WnWrBl9+/blH//4BxZLzgVCmjRpct33rlq1Kjt27Mhzf0hICLNnz2bPnj3ExMRQrVo1unbtyqRJk6hcuXKBr83e2dByziIiUnIMbNSGqXvXkWJN53JiLJvPHiGoboDZYZV4ZSaZOHDgQAYOHGhsX7p0KV/1pk+fjtVqpW/fvgwePNgo9/Hx4aOPPuLee+9lw4YNHDlyBH9/f2N/hQoVcu2BmFePRRERkVvV0Mublb0nMWLjfEKuXQBgedh+riTFM7vrUMo5u5ocoUjh2rVrF6NGjTK2a9euTc2aNTl//jw7duxgx44drF69ms8++wwXF5dc36N58+a57qtYsWKe592wYQPPPPMMqampVKlShcaNGxMeHs6iRYtYt24d33zzDbVr1y7o5ZUYuSVrRURE7Ellt3IE1Q3gh/BDACw++puSiYWgzCQTb0V8fDw///wzAIMGDcqxv169enTs2JFff/2VdevWZUsmNmrUiOPHj+eoExYWRqNGjYouaBERKZOqe1Tgu3vHM3bzl/x68SQA288fY9C6OSzsMYqq7p4mRyhSeGw2G7Vq1WLkyJH07t2bKlWqGPtWrlzJa6+9xrZt25g2bRr//Oc/c32PadOmUatWrXyf89KlSzz//POkpqYyadIkHnvsMZycnIiNjeXpp5/m559/5qmnnuK7775Tkk1ERMSODPVrbyQTt50/xvm4KGp6VjQ3qBKuzMyZeCtCQ0NJSUnBxcWFwMDAXI9p27YtAAcPZl9Bs1u3bhw8eJCzZ88aZefOnWPfvn1069at6IIWEZEyq4KLG4vuGc0/6v31nXXwyjn6rJ6h1eukVAkMDGTdunWMGDEiWyIRoG/fvjz22GMAfPfdd1ithTN/6Ny5c0lMTKRdu3Y8+eSTODllPJMvX748H374IeXLl+fw4cNs3bq1UM4nIiIihaNTjQbULZ9xv2DDxjdaiKXA1DPxOsLDwwHw9fXF2dk512Pq1KmT7dhMgwYNYvHixUyaNIknn3wSi8XCtGnTqF69erbh0rlZsmQJy5Yty1eMYWFh+TpORETKBldHJ/5z9xC8d3syL/RXAE7HXqXv6hl82XMUgVXz3xNLxF55el6/p22XLl34+OOPiYqK4tq1a1StWrXA51y/fj2Q+2gVLy8vgoKC+Pbbb1m7dq0eHIuIiNgRB4sDD/m1Y+redQAsPfY7T7fsjqOD+tfdKiUTryM6OhrIuEHMS+a+zGMzeXh4sHDhQt59912ef/55bDYbnTp14uWXX6ZcuXLXPW9kZCTBwcEFjF5ERMoqB4sDb3T4B9XLefHO72sBuJIUx4NrZzOn23C61GxscoQiRSspKcl47ebmlusx06dP5/Lly6Snp+Pj40PHjh257777cp1H8cKFC8Z82+3atcv1/W677Ta+/fbbHKNVRERExHyDGrXl3/s2kGazciEhmm3nj9G9tv+NK0qulEy8juTkZIA8eyUCxg1n5rFZ+fr63tIqz97e3gQE5G9C0LCwsGw3zCIiIpCxMMKkFndRzd2T535ZTprNSnxaCiM2zufDOx9kQMPWZocoUmRWr14NgL+/f569GJcvX55te8WKFXz66ad89tlnOe7DTp06BWTcE1avXj3X98tceOXs2bOkpqbmef9Y0kegaC1nEREpiap5lKdnnaasPZ3RcevrY78pmVgASiZeh6trxuqXqampeR6TkpKS7djCMGTIEIYMGZKvY/v3769ejCIikqeBjdpSxc2T8VsXk5CWQprNypP/XUpkQizjm9+phSKk1Dl8+DBLliwBYNy4cTn2d+/enT59+uDv70/16tWJj49n586dfPzxx5w9e5bRo0ezcuVKatSoYdSJiooCMkak5PWZyVwF2mq1EhcXR6VKlXI9rjSNQNFvDxERKUke9mtvJBM3nT3CxYQYqntUMDmqkknJxOvIawhzVvkZCi0iImKmrrWasCzoUUZuWsDVpHgA3vp9DZcSY3it3X04WDRfjJQOV65cYfLkyaSlpdGzZ0969+6d45jp06dn23Z1daV379506tSJAQMGEBERweeff87bb79tHHMzo1WyHp8bjUARERExRxffxtTyrMi5uCjSbVaWHf+dJ1pqnuNboWTiddSrVw+AiIiIPIernDlzJtuxIiIi9qiVd21W9p7IsA3zOB17DYA5wb9wOSGWj+58EFdH3RJIyRYbG8ujjz5KREQEAQEBTJ069abqV65cmXHjxvGvf/2LTZs28dZbbxm9EG9mtErW43OjESgiIiLmcHRwYEjjdvx7/0YAvjm2h8cD79aD9Vugf7HraNq0Kc7OzqSkpHDo0KFcj9m7dy8ArVq1KsbIREREbl79ClVZ2XsiLarUNMpWhR9kxMb5xKao95OUXPHx8YwdO5aQkBAaN27MF198ccMVn3PTunXGXKJRUVHG0GbIPlrFZst91sDM4x0cHG7p3CIiIlL0BjW+DYf/PSw8G/cnP0ecMDmikknJxOvw9PSkc+fOALlOlH3q1Cl27doFQFBQULHGJiIiciu83cvz7b3j6OL714rOOy6EMXDtLC4lxJgYmcitSUxMZPz48Rw4cIB69eoxf/78POcrvJGso1DS09ON15kjUFJTU7lw4UKudc+ePQtArVq1rjscWkRERMzjW86L7rX+Wnjl62N7TIym5FIy8QYmTZqExWJh1apVLF261HgaffnyZZ555hmsVis9evTA31+rAImISMng6ezKgh4j6deglVEWfO0CfVfP4GR0pHmBidyk5ORkJk6cyJ49e6hZsyYLFizA29v7lt/v+PHjQMYw5cwFVQB8fX2pVq0aAL///nuudTPLNVpFRETEvj3s1854vf50MJGJsSZGUzKVmWTihQsX6NChg/Hz4YcfArBv375s5XPmzMlWLzAwkBdffBGA119/na5du9KvXz+6d+9OcHAw9evX58033yz26xERESkIF0cnpnUZxPjmXYyys3F/0nf1TPZFnjExMpH8SU1NZfLkyezcuRMfHx8WLlyYbQXmm5WWlsb8+fMB6NixI05O2ecR7dWrF5D7aJXo6GjWrVsHlIHRKnkM8xYRESkputZqgs//VnFOs1n59sQ+kyMqecpMMjE9Pd2Y/yYqKoqEhAQg48Yxa3luK+aNGjWK+fPn06VLFxITEzlx4gS+vr5MmDCB5cuXU7ly5eK+HBERkQJzsDjwWrv7eL3dXyveXkuOZ/C6OWw+e8TEyESuLz09nWeffZbt27fj7e3NwoULqV279g3r/fvf/2bFihXExcVlK79w4QJPPPEEBw4cwMnJicceeyxH3TFjxuDm5saePXuYNm2aMQw6NjaWZ599ltjYWJo1a0a3bmVnVcjMBWpERERKEicHR4Y0vs3Y/ubYnjznRJbclZmlG2vVqsXRo0dvuX6nTp3o1KlTIUYkIiJiH8Y1v5NqHuV5+udvSbWmk5iWyujNX/LBHf0ZlOVGS8RerF27lvXr1wPg4uLCyy+/nOexr732Gs2aNQPg5MmTzJkzh1deeYXatWvj5eVFbGws4eHh2Gw2XF1deeutt2jZsmWO96lRowbvvfcezz77LNOnT2fp0qVUr16d8PBwEhISqFq1Kp988okSbCIiIiXAQ37t+PTgVmzYCI+5ws6LJ7m9RkOzwyoxykwyUURERPLWt0ErqriVY+zmRcSnpZBus/LML99xKSGWxwPvVoJE7EpKSorx+vz585w/fz7PY2Nj/5oH6aGHHqJq1aocPnyYy5cvc/78eZydnWncuDGdOnVi2LBh1KlTJ8/3CgoKonbt2syaNYvff/+dY8eOUa1aNfr378+kSZOoUqVK4VygiIiIFKlanpXoUrMx288fA2Dxsd+UTLwJSiaKiIgIAHf6Nmb5feMZvnE+kYkZw0Df27eeiwkxTOnwDxwdyszsKGLn+vfvT//+/W+63p133smdd95ZoHMHBATw6aefFug9RERExHxD/dobycS1pw7zZ4d4KrmVMzmqkkF/FYiIiIiheZWarOw9kfoVqhplC4/sZOK2r0lKSzUxMhERERGRwtOzTlO83T0BSLGmszxsv8kRlRxKJoqIiEg2dctXYWXvCbSsWssoW3P6MMM2ziM6OdHEyETEbJqeXkRESgtnB0cGNfprfvDFR3/TQiz5pGSiiIiI5FDFzZNlQY/StWYTo2zXxXAGrp3FxYQYEyMTERERESkcD/m1M14fj77M75dPmxhNyaFkooiIiOSqnLMr83qM4MFGbYyy0D8v0nf1dMKiI02MTERERESk4OpVqMIdWRZe+frYbyZGU3IomSgiIiJ5cnZw5KPODzKpxV1G2bm4KPqunsH+yLMmRiYiIiIiUnBD/dobr38M/0PT+uSDkokiIiJyXRaLhZdvu5d/tb/fKPszOYFB62az9dxREyMTERERESmYXnUDqOTqAUBSeiorTh4wN6ASQMlEERERyZexAZ35/K4hODs4ApCYlsojmxby3Yl9JkcmIiIiInJrXB2dsk3rs/jobi3EcgNKJoqIiEi+9W3QioU9RlHOyQWANJuVp35exsw//mtyZCJSHPTHlYiIlEYPZxnqHPrnRQ5cOWdiNPZPyUQRERG5KV1qNubbe8dRxa2cUfbW72uY8ttPWG1WEyMTkeJkwWJ2CCIiIoWiUcVqdPCpZ2xrIZbrUzJRREREblpg1Vqs7D2RuuUrG2Wzg3/hyf8uIyU9zcTIRERERERu3kNZeieuOnmQuNRkE6Oxb0omioiIyC2pX6EqK+6bSEDlGkbZipMHeGTTQuJ18yUiIiIiJUjvei3wcnEDICEthZVaiCVPSiaKiIjILavmUZ7v7h3PHTUaGmXbI44zaN0criTGmRiZiIiIiEj+uTs507/hXwuxfHNsj4nR2DclE0VERKRAyru48WXPR/hHvUCj7OCVc/RdPYMzsddMjExEREREJP+yLsRy8Mo5Dl89b2I09kvJRBERESkwV0cn/nP3EB5pertRdir2Kn1XzyD4aoSJkYlIYdJaziIiUpo1rVydNt51jO2v1TsxV0omioiISKFwsDgwpcM/eLFtL6PscmIsA9fOYseFMBMjE5GiYNFiziIiUgo97NfOeL0ibD8JqSkmRmOflEwUERGRQmOxWHg8sCsfdh6IoyXjNiM2NZnhG+bxU/ghk6MTEREREbm+B+q3xNPZFci4j/3xlO5h/07JRBERESl0gxvfxhfdh+Pm6AxAijWdidu+YUHoTpMjExERERHJm4ezC/0atDK2vz76m3nB2CklE0VERKRI9KjdlKVBY/FycQfAho1Xd63ig30bsNk085qIiIiI2KesC7HsjTzDkT8vmhiN/VEyUURERIpM22p1WdF7AjU8vIyyaQe38Pyv35NmTTcxMhERERGR3LWoWpPAKjWNbfVOzE7JRBERESlSfhV9WHX/JJpU9DHKvjm2h3FbviIxTRNai5QkNq3nLCIiZUTW3onLw/aTlJZqYjT2RclEERERKXK+5bxYft942lWra5RtOBvKw+u/4M/kBBMjE5FbZUHLOYuISOnVp0FL3J0y5v+OTklkzenDJkdkP5RMFBERkWJR0dWDr3uN5Z7aTY2yPZdPM2DNTCLioswLTERERETkb8q7uNGnfktje/HR3SZGY1+UTBQREZFi4+7kzOxuw3jIr51RdizqMn1Wz+BY1CUTIxMRERERyW5okw7G692XTnH0T92vgpKJIiIiUsycHBx5//b+PNmym1F2ISGafqtn8vul0yZGJiIiIiLyl1ZVa9G8sq+xvejoLhOjsR9KJoqIiEixs1gs/LPNPbzdsY8x71p0SiJD1s9l45kQk6MTEREREcm4Zx3u39HYXn5iH/GpySZGZB+UTBQRERHTjGzaiRldH8bFwRGApPRUxm75iiXH9pgcmYjkxqbFnEVEpIzp26Al5Z1dAYhNTWZV+EGTIzKfkokiIiJiqvvrtWDRPaONm7R0m5Xndizns4NbsSlzIWK3tJaziIiUBeWcXenfsI2xvejI7jJ/j6pkooiIiJjujhoNWX7feKq5lzfK3tu3ntd3/4jVZjUxMhEREREp64b7/7UQyx9Xz3PwyjkTozGfkokiIiJiF5pV9mVl74nUr1DVKJsf+iuPbVtCcnqaiZGJiIiISFnmX6k67X3qGdtlfSEWJRNFRETEbtQpX5mVvSfQsmoto+zHU4cYsXE+sSlJJkYmIiIiImXZ8CZ/LcSy6uQhopITTIzGXEomioiIiF2p4ubJsqBHuaumn1G240IYA9fO4nJCrImRiYiIiEhZdV+95lR2LQdkLBr43Yl9JkdkHiUTRURExO6Uc3ZlfvcR9G/Y2igLvnaBvqtnEB5zxcTIRMo2G2V7wnkRESm7XB2dGNz4NmP7q6NldyEWJRNFRETELrk4OvHJnQ8yPuBOo+xM3DX6rp7BoTI+6bWIPbBYtJ6ziIiULcP82xuvT0RHsvPiSROjMY+SiSIiImK3HCwOvNa+N6+1u88ou5oUz4NrZ/Pf88dNjExEREREypq65atkm4pn0ZHdJkZjHiUTRURExO6Nb96FaV0G42TJuHWJT0th5KYFrDx5wNzARERERKRMGdGkg/F67enDZXJObyUTRUREpEQY0LA183uMxMPJBYBUazqPb1/C3OBfTI5MRERERMqK7rX9qeHhBUCazcrS47+bHFHxUzJRRERESoyutZqwNOhRYyU9gH/99hNv71lbZifAFhEREZHi4+TgyMNN2hnbXx3dTbrVamJExc/J7ABEREREbkZr79qs7D2BoRvmcTbuTwBmHN5OZGIsH3QegLODo8kRSlGz2Wzs37+fLVu2sHfvXk6ePElcXBzly5enWbNm9O3bl3/84x95LhASHx/P7NmzWb9+PREREXh4eNCyZUtGjx5Nhw4dcq2TadeuXcyfP5+DBw+SkJCAr68vQUFBjBs3Dg8Pj6K4XLuilL2IiAg85NeeTw5sId1m5Xx8FNvOH6N7bX+zwyo26pkoIiIiJU4DL29W9p5I00rVjbLvwvYxetOXJKSmmBiZFIddu3bx0EMPMWfOHPbt20f58uVp0qQJNpuNHTt28M9//pMJEyaQkpLz/4Vr164xYMAAZs6cyfnz52nYsCGurq5s27aNkSNHsnjx4jzPu2jRIkaNGsW2bdtwdXWlYcOGnD9/nhkzZjBw4ECioqKK8KpFRETEXlT3qMA9dZoa218e2WViNMVPyUQREREpkXw8KrD8vgl0rF7fKNt6/iiD18/hWlK8iZFJUbPZbNSqVYtXXnmFX3/9lU2bNvH999+ze/du3nvvPVxcXNi2bRvTpk3LUfeVV14hPDycgIAANm3axIoVK9i2bRtTpkzBZrPx9ttvExoamqPe4cOHeeeddwCYMmUK27ZtY8WKFWzatImAgADCwsJ47bXXivza7Unu/T5FRETKhhH+HY3XW84d5WzsNROjKV5KJoqIiEiJVcHFja96jua+us2Nsv2RZ+m3Zibn/jcEWkqfwMBA1q1bx4gRI6hSpUq2fX379uWxxx4D4LvvvsOaZQ6jkJAQtmzZgoODAx9//DE+Pj4AWCwWBg8eTJ8+fUhPT2f69Ok5zjl9+nSsVit9+vRh8ODBxhBqHx8fPvroIxwcHNiwYQNHjhwpqssWERERO3JHjYbUr1AVABs2vjr6m8kRFR8lE0VERKREc3NyZsbdD2d7OhwWHUmf1TMIvXbRxMikqHh6euLs7Jzn/i5dugAQFRXFtWt/9RJYv349AB07dqRu3bo56g0ePBiA7du3k5CQYJTHx8fz888/AzBo0KAc9erVq0fHjhn//61bt+5mL0dERERKIAeLAyP8/5pr+etjv5GUlmpiRMVHyUQREREp8RwdHHi7Yx+ea93TKLuUEMOAtTPZdfGkiZGJGZKSkozXbm5uxusDBw4AcNttt+VaLzAwEBcXF5KTk7MNdQ4NDSUlJQUXFxcCAwNzrdu2bVsADh48WNDwRUREpIR4sFFb3J0yHnD+mZzAD+Fl4z5AqzmLiIhIqWCxWHiqVXe83cvz0s4VWG02YlKSGLphHp/fNYR7swyFltJt9erVAPj7++Pp6WmUnzp1CoA6derkWs/Z2ZkaNWpw+vRpwsPDjQRheHg4AL6+vnn2iMx8z8xj87JkyRKWLVuWr+sICwvL13EiIiJijoquHvRv0JrFxzKGOM8P3cmDjdoa06GUVkomioiISKkytEl7qriV47Ht35CcnkZyehrjty7mnY59GZZlKIqUTocPH2bJkiUAjBs3Ltu+6OhoALy8vPKsn7kvJibmluplHpuXyMhIgoODr3uMPbPZbGaHICIiYldGNb3dSCb+cfU8+yLP0rZa7g8uSwslE0VERKTUCaobwDe9xvLIpgVEpyRhtdl4cecKLifG8nSr7qX+aXFZdeXKFSZPnkxaWho9e/akd+/e2fYnJycDXHe+RRcXFyD7UOmbqZd5bF68vb0JCAi47jGZwsLCssVhb/Q5EhERgaaVq9Oxen12XcwYnTA/9FclE0VERERKovY+9Vh+3wSGbZjHxYSMXmYfHdjE5cRY3u7YB0cHTR1dmsTGxvLoo48SERFBQEAAU6dOzXGMq6sriYmJpKbmPTl6SkoKkH2uRVdXV4B81cs8Ni9DhgxhyJAh1z0mU//+/Ut0L0YREZGyYlTT241k4upTf/B6u95U8yhvclRFR3fRIiIiUmr5V6rOqt6TaOTlbZR9dXQ347cuLjOr7ZUF8fHxjB07lpCQEBo3bswXX3yRba7ETBUqVACuPxQ5c1/msZC/Icz5GQotIiIipVNQnWbU8Mi4B0i1pvP1/4Y9l1ZKJoqIiEipVtOzIivum0Ab77+Gm6w7E8zQDfOITk40MTIpDImJiYwfP54DBw5Qr1495s+fT6VKlXI9tl69egCcPn061/2pqalERERkOzbr64iIiDx7J545cyZHPRERESkbnBwcGZ5lbu5FR3eTak03MaKipWSiiIiIlHqV3MqxNGgs3Wv5G2W7L4UzcO0sLiXEXKem2LPk5GQmTpzInj17qFmzJgsWLMDb2zvP41u1agXA3r17c91/6NAhUlNTcXV1pWnTpkZ506ZNcXZ2JiUlhUOHDuVaN/M9M88hIiIiZcvDfu1xcXAE4FJCDOtOl96pSpRMFBERkTLB3cmFud2HM6hRW6Ms9M+L9F09g5PRV0yMTG5FamoqkydPZufOnfj4+LBw4UJq1Khx3Tq9evUCYPfu3bn2Tly6dCkAXbp0oVy5cka5p6cnnTt3BmDZsmU56p06dYpdu3YBEBQUdGsXVGJoNWcREZHcVHX35B/1A43tBaG/mhhN0VIyUURERMoMZwdHPuw8kEkt7jLKzsb9Sb81Mzh05ZyJkcnNSE9P59lnn2X79u14e3uzcOFCateufcN6AQEBdO3alfT0dJ5++mkuX74MgM1mY+nSpaxatQoHBwcmTpyYo+6kSZOwWCysWrWKpUuXYrNlJNUuX77MM888g9VqpUePHvj7++eoW1ppLWcREZHsRjW93Xi9+9IpQq5FmBhN0dFqziIiIlKmWCwWXr7tXrzdPXnjt9UAXE2K58G1s5nbfTh3+jY2OUK5kbVr17J+/XoAXFxcePnll/M89rXXXqNZs2bG9jvvvMNDDz1EcHAw3bt3p1GjRvz5559cuHAh4/+Nl18mICAgx/sEBgby4osvMnXqVF5//XVmzJhBpUqVOHHiBCkpKdSvX58333yz8C9WRERESozW3rVpVbU2B66cBWB+6E4+uGOAyVEVPiUTRUREpEx6NOBOqrh58szP35JmsxKflsKIjQuYducgHmjQ0uzw5DpSUlKM1+fPn+f8+fN5HhsbG5ttu3Llyixfvpw5c+awbt06Tpw4gYeHB126dGHMmDF07Ngxz/caNWoUTZo0Yd68eRw6dIirV6/i6+tLUFAQ48aNyzY0WkRERMqmUU078dTPGcnEFWEHePm2e6nk6mFyVIVLyUQREREps/o3bE0lVw/Gbf2KxLRUUq3pPLZ9CVeS4hjd7A6zw5M89O/fn/79+99yfU9PT55++mmefvrpm67bqVMnOnXqdMvnFhERkdLtH/UDeXPPaq4mxZOUnsqSY3uYmGWKndJAcyaKiIhImda1VhOWBj1qPDG2YeP13T/ywb4Nxrx4IiIiIiL54eroxFC/9sb2gtCdpFnTTYyo8CmZKCIiImVeG+86rLhvAr7lvIyyaQe38MKvK0rdzZ9IQSi/LiIicmPD/TviZMlIuZ2Pj2Ld6WCTIypcSiaKiIiIAI0qVmNl70k0qehjlH197DfGb11MUlqqiZGJ2Cut5ywiIpKbGuW8uL9+oLE9J/gXE6MpfEomioiIiPyPbzkvvrtvPLdVq2uUrT8TwtAN84hOTjQxMhEREREpSR4N6Gy83ht5hn2RZ0yMpnApmSgiIiKSRSVXD77pNYYetf2Nst2Xwhm4dhaXEmJMjExERERESoqWVWvR3qeesT23FPVOVDJRRERE5G/cnVyY2204gxq1NcpC/7xI39UzOBl9xcTIRERERKSkGNvsr96Jq08dJiIuyrxgCpGSiSIiIiK5cHJw5MPOA5nU4i6j7Gzcn/RbM4NDV86ZGJmIiIiIlAS96jSjtmclANJtVuaH7jQ5osKhZKKIiIhIHiwWCy/fdi//1763UXY1KZ4H187m54jjJkYmYg4bWs5ZREQkvxwdHBjd7HZj++tju4lPTTYxosKhZKKIiIjIDTwacCfTugzGyZJx6xSflsKIjQv4MfyQyZGJmMeixZxFRERuaEjjdng6uwIQnZLEtyf2mRxRwSmZWITWrVvH5MmT6dq1K4GBgfTq1YsPP/yQuLg4s0MTERGRmzSgYWvm9xiJu5MzAKnWdCZt+4b5Ib+aHJmIiIiI2KvyLm4MaXybsf1FyA6sNquJERWckolFaN68eTg4OPD0008zd+5cHnroIb755htGjx6N1Vqy/8cREREpi7rWasLSoEep5OoBZAz5fG33D3ywbwM2m4Z/ioiIiEhOo5vdgcP/uvSHx1xh89kjJkdUME5mB1CazZw5k8qVKxvb7du3p2LFirzwwgvs3r2bTp06mRidiIiI3Io23nVYcd8EHt7wBRHx0QBMO7iFK4lxvNOpL44OelYrIiIiIn+pU74yveo0Y+3pYADmhuygZ51mJkd163S3W4SyJhIztWjRAoBLly4VdzgiIiJSSBpVrMbK3pPwq1jNKFt87DfGb11MUlqqiZGJiIiIiD16NOBO4/WOC2GEXIswMZqCKbHJxMjISFauXMlbb73F4MGDCQwMpEmTJgwfPjxf9Xft2sX48ePp2LEjgYGBBAUF8cknn5CQkFCkcf/2228ANGzYsEjPIyIiIkXLt5wXy++bwG3V6hpl684EM2zjPGJSkkyMTKToaDC/iIjIrWlXrS6BVWoa23ODd5gYTcGU2GTi6tWreeGFF1i0aBEHDhwgOTn/S2svWrSIUaNGsW3bNlxdXWnYsCHnz59nxowZDBw4kKioqCKJ+dKlS3z66afcfvvtRg9FERERKbkquXrwTa8xdK/lb5TtuhjOwLWzuJQQY2JkIkXPgpZzFhERyS+LxcLYgM7G9sqTB0rs/WKJTSZ6enpy++23M378eD7//HMmTZqUr3qHDx/mnXfeAWDKlCls27aNFStWsGnTJgICAggLC+O1117LUe/XX3+lSZMmN/zJq2dkfHw8EydOxNHRkXfffffWL1xERETsiruTC3O7D+fBRm2MspBrF+i3eiYno6+YGJmIiIiI2JP767WgukcFAFKs6cwP/dXkiG5NiV2AZeDAgQwcONDYzu8chNOnT8dqtdK3b18GDx5slPv4+PDRRx9x7733smHDBo4cOYK//1+9DFq3bs2aNWtu+P7u7u45ypKSkpgwYQLnzp1j0aJFVK9ePV+xioiISMng7ODIR50fpKpbeWYc3g7Ambhr9Fszg0U9HyGwai2TIxQRERERs7k4OjG2WWfe+j0jv/TlkV08HtgVT2dXkyO7OSW2Z+KtiI+P5+effwZg0KBBOfbXq1ePjh07ArBu3bps+9zd3WnYsOENf3x9fbPVS01N5YknnuDw4cPMnj2bJk2aFNHViYiIiJksFguvtLuX19v1NsquJsXz4NrZ/Bxx3MTIRERERMReDG3SnvL/Sx7GpCTx9dHfTI7o5pWpZGJoaCgpKSm4uLgQGBiY6zFt27YF4ODBgwU+n9Vq5bnnnmPXrl1Mnz6dVq1aFfg9RURExL6Na34n07oMxsmScZsVn5bCiI0L+DH8kMmRiYiIiIjZyru4MaxJR2N7TvAvpKSnmRjRzSuxw5xvRXh4OAC+vr44OzvnekydOnWyHVsQb7zxBuvWrWPChAm4u7tz4MABY1/16tXzHO68ZMkSli1blq9zhIWFFThOERERKVwDGramkqsH47d+RWJaKqnWdCZt+4arSfGMatrJ7PBEbpnNpvWcRURECmpMwB3MDfmFVGs6FxKi+SH8IAMbtTU7rHwrU8nE6OhoALy8vPI8JnNf5rEFkTmkeubMmcycOTPbvscff5zJkyfnWi8yMpLg4OACn19ERETM061WE5b0epSRmxYQlZyADRuv7lpFZGIsz7XuicWilXClZNP/wSIiIremukcF+jdszdLjvwMw44//MqBhmxJzf1imkonJyckAefZKBHBxccl2bEFs2bLllup5e3sTEBCQr2PDwsJISkq6pfOIiIhI0WpbrQ4r7pvA0A1fEBGf8aBy2sEtXEmM451OfXF0KFMzzoiIiIjI/0xo3sVIJh6NusSWc0fpXtv/BrXsQ5lKJrq6ZkxwmZqamucxKSkp2Y41w5AhQxgyZEi+ju3fv796MYqIiNixxhWrsbL3JIZt+IJjUZcBWHzsN64mxfP5XUNwc8r7IaeIiIiIlE6NK1bjntpN2XA2FIAZh7eXmGRimXocnp8hzPkZCi0iIiJyM3zLebH8vgncVq2uUbbuTDDDNs4jJkUjDERERETKookt7jJe77oYzr7IMyZGk39lKplYr149ACIiIvLsnXjmzJlsx4qIiIgUhkquHnzTawzda/31xHnXxXAGrp3FpYQYEyMTERERETO086lHW+86xvbMP/5rYjT5V6aSiU2bNsXZ2ZmUlBQOHTqU6zF79+4FoFWrVsUYmYiIiJQF7k4uzO0+nAcbtTHKQq5doN/qmYTHXDExMpH80VrOIiIihStr78S1p4M5GW3/94RlKpno6elJ586dAVi2bFmO/adOnWLXrl0ABAUFFWtsIiIiUjY4OzjyUecHmdj8rxvHM3HX6Lt6Bn9cOW9iZCI3p6SsOCkiImLP7qnTlAYVqgJgw8asw/bfO7FMJRMBJk2ahMViYdWqVSxduhSbLeP56uXLl3nmmWewWq306NEDf/+SMemliIiIlDwWi4VX2t3La+3uM8quJsUzcO0sfok4YWJkIiIiIlKcHCwOTGjRxdj+Lmyf3U+BU2KTiRcuXKBDhw7Gz4cffgjAvn37spXPmTMnW73AwEBefPFFAF5//XW6du1Kv3796N69O8HBwdSvX58333yz2K9HREREyp7xzbvwyZ2DcLJk3JLFp6UwYuN8fgrPfToWERERESl9BjRsg497eQCS09OYffhnkyO6vhKbTExPTycqKsr4SUhIACAtLS1beVJSzhUSR40axfz58+nSpQuJiYmcOHECX19fJkyYwPLly6lcuXJxX46IiIiUUQMbtWFej5G4OzkDkGJNZ+K2b1gYutPkyERERESkOLg6OmXrnfjl0V1cS4o3MaLrczI7gFtVq1Ytjh49esv1O3XqRKdOnQoxIhEREZFb061WE5b0epSRmxYQlZyADRuv7FpFZFIcz7bqobnpREREREq5oX4d+OzgNq4lx5OYlsoXITv4Z5t7zA4rVyW2Z6KIiIhIadK2Wh1W3DcB33JeRtknBzbz0s6VpFutJkYmIiIiIkXNw9mFsQF3GNvzQ38lJiXnaFt7oGSiiIiIiJ1oXLEaK++bSGOvakbZV0d3M2HbYpLSUk2MTCSDDZvZIYiIiJRao5reTgUXNwBiUpLsdtobJRNFRERE7IivZ0W+7z2Btt51jLK1p4MZvnG+3T6dlrLJgobfi4iIFKYKLm6M8v9rSr45wb+QkJpiYkS5UzJRRERExM5UcvVgSdBYutVqYpTtvHiSgWtncTkh1sTIRERERKQojQ3obCzMdy05nsXHdpscUU5KJoqIiIjYIXcnF77oPoKBDdsYZSHXLtBvzQzCY66YGJmIiIiIFJXKbuUY3qSDsT3z8M8kp6eZGFFOSiaKiIiI2ClnB0c+vvNBJjTvYpSdjr1Gv9Uz+ePKeRMjExEREZGiMq55F1wdnQC4lBDDt8f3mhxRdk5mByAiIiIiebNYLLza7j683T15c88aAK4kxfHgutl80W04d/g2MjlCc0RGRrJjxw4OHz7MH3/8QWhoKMnJybRv355FixblWa9bt26cP3/9ROyhQ4dwdXXNdd/Zs2eZPn06O3bs4Nq1a1SpUoU77riDiRMnUrt27QJdk4iIiAhAdY8KDG58G18e2QXAf/7YxmC/23B2cDQ3sP9RMlFERESkBBjfvAtV3Dx57pfvSLNZiUtNZvjG+XzaZTD31w80O7xit3r1at59991bru/n54enp2eu+yyW3BcW2b9/P6NHjyYhIQEvLy/8/Pw4e/Ysy5cvZ926dSxYsIDAwNLdFjYt5iwiIlIsJjbvwtdHfyPNZuVs3J+sOnmAgY3amh0WoGSiiIiISIkxsFEbKruVY9yWr0hKTyXFms7Ebd9wNSmekU073fgNShFPT09uv/12WrRoQYsWLQgJCWH69On5rv/qq6/SoUOHGx/4PwkJCUyePJmEhAQGDBjA//3f/+Hq6kpycjL/+te/+P7775k8eTLr16/Hzc3tVi6pxMkj5yoiIiKFoHb5yvRv2JplJzKGOH92aBv9GrTG0cH8GQvNj0BERERE8q1brSYsDXqUiq4eANiw8cquVfx7/0ZsZajb2MCBA5k/fz7PPPMMPXv2pEqVKkV6vmXLlhEZGUndunV54403jGHQrq6uvPHGG9SpU4eLFy/y7bffFmkcIiIiUnY8Hng3FjKe3oVFR/LjqUMmR5RByUQRERGREqZttTqsuG8CNTy8jLJPDmzmpZ0rSbdaTYys9Fq3bh0A/fr1w9nZOds+FxcX+vfvD8DatWuLPTYREREpnRp4edOnQUtj+5MDm+3iXk/JRBEREZESqHHFaqzqPZHGXtWMsq+O7mbCtsUkpaWaGFnJsGTJEsaPH8/IkSN59tln+eabb4iLi8v12PT0dA4fPgxAu3btcj3mtttuA+CPP/4gPT29aIIWERGRMufJlt2M3oknoiP56dQfJkekORNFRERESixfz4p8f994Rm5ayL7IMwCsPR3MczuW8/ldQ0yOzr6tWbMm2/ZPP/3EtGnT+PDDD7njjjuy7Tt//jypqRkJ2rxWbK5Tpw4AKSkpRERE5HnckiVLWLZsWb5iDAsLy9dxIiIiUno1rliNBxoEsurkQQA+PbglW29FMyiZKCIiIlKCVXIrx5JeY5mwbTFbzh0FYOv//is5tW/fno4dO9KiRQt8fX1JTU1l7969fPrpp4SEhDBx4kS++eYbAgICjDpRUVHG64oVK+b6vl5efw05j46OzjOZGBkZSXBwcKFcixkquLhlee1uYiQiIiJlx1Mtu/PDyUPYsHE8+jLJ6Wm4OpqX0lMyUURERKSE83B24YvuI3j397WsPxPCowF3mh2S3Zo6dWq2bXd3d7p27UqnTp14+OGHCQ4O5oMPPmDBggXGMSkpKcbrv8+XmMnFxcV4nZSUlOf5vb29syUqrycsLOy672WGrrWa0KO2P2dj/2RC8y5mhyMiIlImNK5Yjf9r35v5ob8yuPFtpiYSQclEERERkVLB2cGR19vfz+vt7zc7lBLJzc2Np556ikcffZTdu3cTHR1t9DbMmihMTU01VnLOKmvC0c3NLcf+TEOGDGHIkPwNQe/fv7/d9WL0dHZlQY9RZochIiJS5owN6MzYgM5mhwFoARYREREREQDatGkDgNVq5ezZs0Z51iHMWYc8ZxUdHZ3r8SIiIiKljZKJIiIiIiJkH8KcdUXmmjVrGvvOnDmTa93MchcXF3x9fYswShERERFzKZkoIiIiIgIcO3bMeF29enXjtZOTE82bNwfg999/z7VuZnmLFi1wdHQswihFREREzKVkooiIiIgIMGfOHAAaNWqEj49Ptn29evUCYMWKFaSmpmbbl5KSwvfffw9AUFBQMUQqIiIiYh4lE0VERESkTPjiiy9YtGgRf/75Z7byP//8k9dff53169cD8MQTT+SoO3jwYLy9vTl9+jT/93//R3JyMgDJycn83//9H2fOnKFatWo8+OCDRX8hIiIiIibSas4iIiIiUuJcuHCBvn37GtuZqynv27ePDh06GOVjx47l0UcfBeDixYt8+eWXvP3229SsWZPKlSuTlJTEyZMnSUtLw8HBgWeeecbohZiVh4cH06ZNY+zYsSxfvpxNmzZRq1Ytzp07R3R0NB4eHnz22We4u7sX7YWLiIiImEzJRBEREREpcdLT03NdWTktLS1beVJSkvG6d+/eABw6dIiIiAiOHDmCo6MjtWrVon379jz88MM0bdo0z3O2bduWVatWMX36dHbs2MGxY8eoVKkS/fv3Z9KkSdSuXbvQrk9ERETEXimZKCIiIiIlTq1atTh69OhN1WnVqhWtWrUq0Hnr1KnD1KlTC/QeIiIiIiWZ5kwUERERERERERGRfFEyUURERERERERERPJFyUQRERERERERERHJFyUTRUREREREREREJF+UTBQREREREREREZF8UTJRRERERERERERE8kXJRBEREREREREREckXJRNFREREREREREQkX5RMFBERERERERERkXxRMlFERERERERERETyRclEERERERERERERyRcnswOQgqlfv362/4qIiIhI6aD7PBEREbFHFpvNZjM7CBEREREREREREbF/GuYsIiIiIiIiIiIi+aJkooiIiIiIiIiIiOSLkokiIiIiIiIiIiKSL0omioiIiIiIiIiISL4omSgiIiIiIiIiIiL5omSiiIiIiIiIiIiI5IuSiSIiIiIiIiIiIpIvSiaKiIiIiIiIiIhIviiZKCIiIiIiIiIiIvmiZKKIiIiIiIiIiIjki5PZAYg5nn32WcLDw80OQ0REREqg+vXr8+GHH5odhuRB93kiIiJSEDe611MysYwKDw8nODjY7DBEREREpJDpPk9ERESKkpKJZVT9+vXNDiGbsLAwkpKScHNzo2HDhmaHU+apPeyP2sS+qD3sj9qkeNnbfYRkV9Tto8+bfVP72D+1kX1T+9g3tU/xuNG9hMVms9mKKRaRPPXv35/g4GACAgL4/vvvzQ6nzFN72B+1iX1Re9gftYlI8dHnzb6pfeyf2si+qX3sm9rHPmgBFhEREREREREREckXJRNFREREREREREQkX5RMFBERERERERERkXxRMlFERERERERERETyRclEERERERERERERyRclE0VERERERERERCRflEwUERERERERERGRfFEyUURERERERERERPJFyUQRERERERERERHJFyUTRUREREREREREJF+czA5ABGDQoEFERkbi7e1tdiiC2sMeqU3si9rD/qhNRIqPPm/2Te1j/9RG9k3tY9/UPvbBYrPZbGYHISIiIiIiIiIiIvZPw5xFREREREREREQkX5RMFBERERERERERkXxRMlFERERERERERETyRclEERERERERERERyRclE0VERERERERERCRfnMwOQOzXrl27mD9/PgcPHiQhIQFfX1+CgoIYN24cHh4et/Se69ev56uvvuLIkSOkpqZSt25dHnjgAUaMGIGzs3Oe9a5evcqMGTPYunUrly9fpkKFCrRr147x48fTtGnTHMenp6eza9cutm3bxv79+zl16hRJSUlUrFiRFi1aMHjwYO6+++5bugazlOT2AFi6dCn79+8nJCSEK1euEB0djbu7Ow0aNKBnz54MGzYMd3f3W7oOs5T0NsnN4sWLmTJlCgDt27dn0aJFt3QdZijp7fHiiy+yYsWK68YzZ84cunTpckvXUtxKentkstlsrF69mhUrVhAaGkpMTAwVK1akYcOGdOnShTFjxtzStYiUREXxuZYMNpuN/fv3s2XLFvbu3cvJkyeJi4ujfPnyNGvWjL59+/KPf/wDi8WSa/34+Hhmz57N+vXriYiIwMPDg5YtWzJ69Gg6dOhw3XOrXW/d9u3bGTduHAA1a9Zky5YtuR6n9il+27dv59tvv+XAgQNERUXh5eVF7dq16dChA5MnT8bJKXsqJDU1lYULF/LDDz9w5swZnJ2d8ff3Z/jw4dxzzz3XPVdISAizZ89mz549xMTEUK1aNbp27cqkSZOoXLlyUV5mifPnn38yf/58tm7dyrlz50hNTaVy5cq0bt2a4cOHc9ttt+VaT58h+2Ox2Ww2s4MQ+7No0SLefvttbDYb1atXp3Llypw4cYKUlBQaNmzI119/TcWKFW/qPd977z3mzZsHQJ06dXB3d+fEiROkp6fTrl075s2bh4uLS456p0+f5uGHH+bKlSt4eHhQv359Ll68yNWrV3F2dmbatGl07949W51vv/2WV199FQAHBwfq1KlDuXLlOH36NHFxcQAMHjyYN954I8+bMntS0tsD4LbbbiM2NhY3Nzd8fHwoX748ly5dIjIyEoB69eqxYMECatSocfP/QCYoDW3yd5cuXeK+++4zPiMlKZlYGtojM5lYo0aNPD8HL774Ii1btryp6zBDaWgPyLhxffzxx/n1118BqF27NhUrVuTq1atcunSJ8uXLs3v37pv7xxEpoYricy1/2blzJ6NGjTK2a9euTYUKFTh//jxRUVEA3H333Xz22Wc5ftddu3aNhx9+mPDwcFxcXGjUqBHXrl3j4sWLWCwWXnvtNYYOHZrredWuty4+Pp7777+fiIgIIO9kotqneKWlpfHSSy/xww8/AFCjRg2qVq1KVFQUFy9eJDU1lX379lGuXDmjTnJyMo888gh79+7F0dGRRo0akZiYyJkzZwB49NFHee6553I934YNG3jmmWdITU2lSpUqVK9enfDwcBISEvD29uabb76hdu3aRX/hJcCpU6cYNmwYkZGRODg4ULNmTTw9PTlz5gzx8fFYLBZefPHFbL8LQZ8hu2UT+Zs//vjD5u/vb2vSpIltyZIlNqvVarPZbLaLFy/a+vXrZ/Pz87M9/vjjN/WeGzZssPn5+dmaN29u27Rpk1F+4sQJW7du3Wx+fn62d999N0c9q9Vq69Onj83Pz882ZswYW0xMjM1ms9lSU1Ntn3zyic3Pz8/WqlUr26VLl7LVW7Zsme0f//iHbdmyZUadzHpz5861NWnSxObn52dbvHjxTV2HGUpDe9hsNtv8+fNtBw8etKWnp2cr//33322dO3e2+fn52R599NGbug6zlJY2+bsJEybYmjZtahs/frzNz8/PNmzYsJu6BrOUlvZ44YUXbH5+frZPP/30pmK1N6WlPaxWq+2RRx4x6p4+fTrb/ujo6GyxiJRmRfG5lux27Nhh69atm23hwoW2K1euZNu3YsUKW/PmzW1+fn62999/P0fdCRMm2Pz8/Gz9+vWzXbx40WazZfwOW7Jkic3Pz8/WtGlTW0hISI56ateCefPNN21+fn62iRMn2vz8/Gxdu3bN9Ti1T/F65ZVXbH5+frYBAwbYgoODs+1LSEiwbdq0yZaSkpKtPLMtu3XrZgsLCzPKN23aZHz2Nm/enONcFy9etLVs2dLm5+dn++STT2ypqak2m81mi4mJsY0ZM8bm5+dn69+/v9F2Zd2IESNsfn5+tnvuucd2/PhxozwpKck2depUm5+fn61Zs2a28PDwbPX0GbJPSiZKDplfiM8//3yOfeHh4TZ/f3+bn5+fLTQ0NN/v+cADD9j8/Pxs06ZNy7Hv119/Nf5IvHr1arZ9GzdutPn5+dnatm1ri4qKylF36NChuf4R+eeff173l/arr75q8/Pzsz3wwAP5vgazlIb2uJHVq1fb/Pz8bP7+/rb4+PibqmuG0tgmmW3w1ltv2T799NMSlUwsLe1RWpKJpaU9vvvuO5ufn5/twQcfNP44ECmriuJzLdnFxsbmSHBkNWPGDJufn5+tffv22R7MBgcHG/dQp06dylHvn//8Z55/MKtdb93+/ftt/v7+tokTJ9qWL1+eZzJR7VO8du7cabRFbGxsvupERkbaAgICbH5+fradO3fm2J/58LFfv3459r311ls2Pz8/29ChQ3Psi4qKsrVt2zbPRGRZExsba3To2bhxY479VqvV1rNnT5ufn59t0aJFRrk+Q/ZLC7BINvHx8fz8888ADBo0KMf+evXq0bFjRwDWrVuXr/c8deoUR44cATKGFv9dp06dqFu3LikpKWzevDnbvrVr1wIQFBSEl5dXjrqZMWYel6lixYrXHb6cOedYeHh4vq7BLKWlPW6kYcOGAFitVpKTk2+qbnErjW0SHR3N22+/TfXq1XnqqafyFbO9KI3tUZKVpvZYsGABABMnTswxr5JIWVIUn2vJydPT87pzv2beu0ZFRXHt2jWjfP369QB07NiRunXr5qiX+Xtz+/btJCQkGOVq11uXmprKa6+9hpubG6+//vp1j1X7FK/58+cDMHr0aDw9PfNVZ8uWLaSmpmb7N81qyJAhAAQHBxvDnjNltm9ubeTl5UVQUBBQeu/7bkZKSgq2/82wV6dOnRz7LRaLMRw8LS3NKNdnyH4pmSjZhIaGkpKSgouLC4GBgbke07ZtWwAOHjyYr/c8cOAAkDH3i4+Pz029Z+Z2XhOxZpZfvHiRS5cu5SsegKSkJAC7X/CjrLTH3r17gYy5ZipVqpTvemYojW0ydepUrly5wmuvvZZt/piSoDS2x+7du3niiScYMWIEjz/+ODNmzOD8+fP5it1spaU9zpw5w7Fjx3BwcKBDhw4cPHiQ119/nVGjRjFp0iRmz56d7Y95kdKsKD7XcvMy710B3NzcjNeZvyPz+j0XGBiIi4sLycnJhIaGGuVq11s3a9Ysjh07xpNPPkn16tWve6zap/gkJyezY8cOIONB44kTJ3j77bcZPXo0EyZMYNq0abneT2W2Uea/59/5+PhQq1atbMcCXLhwwbh3aNeuXa51M9tdbQSVK1c2Pi/79+/PsT8hIcF4eNyiRQujXJ8h+6VkomST2VPP19c3z6ejmU8S8tur79SpU9nq5fc9U1JSjF/4edWtUaOGEefJkyfzFQ/A6tWrgby/NOxFaW6PtLQ0IiIi+PLLL/nggw9wdnbm5Zdfztc1mKm0tcnOnTv5/vvv6datGz169MhXvPaktLUHwJ49e1i/fj27d+9m48aNfPLJJ/Tq1Ys5c+bkK34zlZb2OHz4MJDRy33x4sUMHjyYpUuXsnPnTjZv3syHH35Ir1692LVrV76uQaQkK4rPtdy8zHtXf3//bD2ubvQ70tnZ2VjUK2v7qF1vTVhYGLNmzSIgIIDhw4ff8Hi1T/E5cuQIqampQEZHhb59+/Lll1+yY8cOtm7dyvTp0wkKCuKnn37KVu9W7zMy6zk7O+eZVM7saXf27FkjtrLs2WefxWKx8P777/Ptt98SGRlJYmIihw4dYuLEiVy5coUHHngg29/o+gzZL43bkWyio6MBch0OlilzX+axhfmeMTExRllcXBxWq/W6dS0WCxUqVODq1avZ6l7Ppk2b2Lp1KxaLhbFjx+arjllKY3u8/fbbfPnll9nKOnfuzOTJk2nVqlW+rsFMpalNkpKSeP311/Hw8LjhMB17VZrao27durz44ot07NiRmjVr4uLiwtGjR5k3bx7r1q3j3//+Nx4eHnmuVmcPSkt7XL582Xi/f//739x9993885//pE6dOoSHh/POO++wa9cuJk+ezI8//njDnikiJVlRfK7l5hw+fJglS5YAMG7cuGz7bvV3pNr15tlsNl599VXS0tJ44403cHR0vGEdtU/xiYyMNF5PmTKFZs2a8eqrr+Lv78+FCxf4+OOPWbt2LS+++CINGjSgWbNmwK23UeYq615eXnlOr5W5SrDVaiUuLs7uR2AVtQceeIDy5cszY8YMXn311Wz7vL29+de//mUMK8+kz5D9Us9EySZzvrrrzdni4uKS7djCfM+sQziyvn/m/vzWzUtYWBgvvvgiACNHjqRNmzY3rGOm0tgetWvXpk2bNjRv3pzKlSsDsG/fPn744QdSUlLydQ1mKk1t8umnn3LmzBmefPJJ44leSVOa2mPixIk88sgjNG3alAoVKuDm5kbLli2ZNm0aDz/8MACffPIJ8fHx+boOM5SW9siccyctLY06derw+eef06hRI1xcXGjSpAkzZ87E29ubmJgYFi5cmK/rECmpiuJzLfl35coVJk+eTFpaGj179qR3797Z9hf0d6TaNf++/vpr9u3bx9ChQ7MNw7wetU/xyXp/5Obmxpw5c4whsHXr1uWjjz6iadOmpKamMnPmTOPY4mijrMeXdadPn+bq1as4ODhQs2ZNmjRpgru7O5GRkarAzTMAABs4SURBVKxYsYLjx49nO16fIfulZKJk4+rqCnDdbtiZCZ/MYwvzPbPOAZP1/a+XZMqtbm4uXLjA2LFjiY2N5a677uK55567cfAmK43tMWLECL755huWL1/Ozp07Wbx4MbVq1WLx4sU88cQT+boGM5WWNgkJCWHhwoU0a9YsX8N07FVpaY8beeaZZ3B2diYmJsauh9aWlvbIWnfo0KE5bkTd3d2NJ+eZk3uLlFZF8bmW/ImNjeXRRx8lIiKCgIAApk6dmuOYgv6OVLvmz6VLl/joo4/w8fG5qcXq1D7FJ+u/Q79+/XL0SHNwcGDUqFEA/PLLL8boheJoo7/HV1a98cYbvPvuu1SqVIk1a9awZcsWfvjhB3bt2sWYMWM4ePAgDz30ULa5LfUZsl9KJko2+enqm58uw1lVqFAh3++ZeSxkrGrn4OBw3bo2m83ozpy17t9FRkYyatQoIiIiaN++PZ999tl1n1LYi9LaHlnddtttzJ49G2dnZ7Zu3WosxmKvSkubvPLKK1itVqZMmZKvYTr2qrS0x42UL1+exo0bAxlPdO1VaWmPrK8zV5v/u8zyc+fO3fAaREqyovhcy43Fx8czduxYQkJCaNy4MV988UWuq9Pe6u9ItevNefPNN4mLi+PVV1/N9yrBoPYpTln/HfL67m7QoAGQ8fnKHKZcGG2UuUrx32Wew8HB4ab+vymNjhw5wjfffIOzszPTpk2jfv36xj43Nzeef/55OnXqRFxcHLNmzTL26TNkv5RMlGzq1asHQERERJ5Z/DNnzmQ79kYyf1Fc7w/g3N7TxcUFX1/fbPv/7sKFC0acWX8hZXX16lVGjhzJqVOnaN26NTNnziwxTx9KY3vkpkaNGvj5+QEQHByc73pmKC1tEhISgsViYcKECdxxxx3ZfubNmwdkrLSWWXbhwoV8XUtxKy3tkR+ZD0DS0tJuql5xKi3tkfnHBuQ9PCbzeySzZ4NIaVUUn2u5vsTERMaPH8+BAweoV68e8+fPz3Outcx/87x+R6amphIREZHt2Kyv1a75ExISAmT0rPr7fdPbb78NZHynZJbt27cPUPsUp5v57oa/vr9v1EaQ+7915uvU1NQ875PPnj0LQK1atUpER5aitHfvXmw2G3Xr1qVmzZq5HnPHHXcAfy2EB/oM2TMlEyWbpk2b4uzsTEpKCocOHcr1mMyeY/ldLKNly5ZARu+NS5cu3dR7Zm7//vvvudbLLK9evXquE+BHRUXxyCOPEBYWRkBAAHPmzKFcuXL5itselLb2uJ709PRs/7VXpalN0tPTuXLlSo6fzPniUlNTjTJ7bZfS1B7Xk5aWZqw2bM+LfZSW9mjWrJkxVCbzD4G/y7wBtef2ECkMRfG5lrwlJyczceJE9uzZQ82aNVmwYAHe3t55Hp/5b57XyI5Dhw6RmpqKq6srTZs2NcrVrrcmt/umuLg4ICM5lVmWmbxQ+xQfHx8fI0mV13d3Zrmrq6uxOErmv19mAvjvLl26ZIxCyPpv7evrS7Vq1YAb32eojbipOb+zDg/XZ8h+KZko2Xh6etK5c2cAli1blmP/qVOnjPm6goKC8vWe9evXN3qdLV26NMf+nTt3cvr0aZydnenevXu2fb169QJg3bp1uXZRzowxt1ji4uIYPXo0R48exc/Pjy+++ILy5cvnK2Z7UZra43pOnTrFsWPHALJ9Cdij0tImR48ezfPn8ccfB6B9+/ZGWa1atfJ1LcWttLTHjSxdupTY2FicnJzo2LHjTdUtTqWlPdzd3enatSsAK1euzFHPZrOxYsUKALtuD5HCUBSfa8ldamoqkydPZufOnfj4+LBw4cIbLpCW+Xtu9+7dufbcyfy92aVLl2wP1NWuN2fLli153je9++67ANSsWdMo69ChA6D2KW733nsvAD/++GOuIzm+++47ANq1a4eTkxMA3bt3x9nZOdu/aVaZK6k3a9aMunXrZtuX2b65tVF0dDTr1q0D1EaQfaRJ1jkRs9qxY0e2Y0GfIXumZKLkMGnSJCwWC6tWrWLp0qXGHBCXL1/mmWeewWq10qNHD/z9/bPV69atG926dTN+aWaVmZyYM2cOW7ZsMcpPnjxpLAv/8MMPG6v7ZurRowdNmjQhNjaW5557jtjYWCCjR9W0adPYs2cP7u7ujB49Olu9xMRExo0bR3BwMA0aNGDBggV5Dg+xd6WhPdauXcuXX35JZGRkjlh27drFo48+itVqpVmzZrRv3/5m/4mKXWlok9KkNLTHjh07+OCDDzh16lS28pSUFBYtWmT8oTJkyBDjKbi9Kg3tkXlOJycnfv/9d/7zn/8YvXPT0tL44IMPOHLkCK6ursZk7iKl2a1+riX/0tPTefbZZ9m+fTve3t4sXLiQ2rVr37BeQEAAXbt2JT09naeffprLly8DGQ89li5dyqpVq3BwcGDixIk56qpdi57ap3iNGTOG8uXLc+7cOaZMmWKs0muz2fjyyy/ZunUrFouFcePGGXWqVq3K4MGDgYw5xTNHgkBGEnnu3LkAPPbYY7mez83NjT179jBt2jTjXiE2NpZnn32W2NhYmjVrRrdu3YrsmkuKO+64gypVqpCamsqTTz5JeHi4sS8pKYn333+fnTt3AtCnTx9jnz5D9stiy2u2UCnTFixYwNSpU7HZbNSoUYNKlSpx4v/bu/egKK/7j+OfleUSBCIIikiNRK2KkYxRscTmQqNi2qbEjDGT0aQYNVyicaJpq5HkDzNWxniPjYLclMgkJpG2MTGdxsvQBBqGkY4BFWuroqgoclFEru7vD2b3JxV1ye5mubxfM8zAPofzfNln0IfPec45J0+qqalJISEhys7Ovu2PtpEjR0qSVq9ereeee+62Pv/4xz9qx44dkqQhQ4bI09NT//73v9Xa2qrx48crIyOjw7UMT506pdmzZ+vKlSvy9PRUSEiILl68qCtXrsjV1VUbNmzQ1KlT231PcnKy1q9fL6lt/QzzY+wd2bx5812nj3QF3f16ZGZmWsKQQYMGyd/fXyaTSeXl5aqurpYkDR8+XNu3b7escdbVdfdrcjfvv/++tmzZovDwcGVlZXXmbXGa7n49vv76a8tNqr+/vwYOHGjpyzztPCoqSmvXrpWbm5uN75bjdffrYZaTk6MVK1aotbVVfn5+Cg4OVllZmWpqauTq6qqkpCT9+te/tvXtArqFH/J7Devt3btXS5culdT2hJv5/4GOvP322woNDbV8XVVVpRdffFGnT5+Wm5ubhg8frurqal24cEEGg0ErVqzQSy+91GFfXFfb7dmzR8uXL9fgwYPbDXiZcX1+XHl5eYqPj1dDQ4O8vb01dOhQXbx4UZcvX5bBYNDvfvc7zZs3r933NDQ0KCYmRkVFRXJxcdGIESNUX19vWdLklVde0R/+8IcOz/fVV19p6dKlamlpUf/+/RUYGGi5f/P391d2dvZtTzT2Vnl5eXrttddUX1+vPn36KCgoSH379lVZWZlu3LghSZo9e7beeeeddt/H71DXRJiIO8rPz1d6erqOHDmi+vp6BQUFafr06Xr11Vc7XHfwXn8ISm1PqGVnZ+vYsWNqbm7WkCFD9Jvf/EYxMTF3XZS2srJSW7du1cGDB3Xp0iX5+PhowoQJiouLa3czZWYOQ6yxf//+LjuF81bd+XqcO3dOX3zxhQoKCnTq1ClVVVWppaVFvr6+GjVqlKZNm6bo6OhuEZLcqjtfk7vpjmGi1L2vx4ULF7R7927961//0pkzZ1RdXa3m5mb5+fnp4Ycf1owZM7rdqHZ3vh63+v7775WamqrCwkLV1taqX79+mjRpkhYsWMBINnqdzv5ew3rmQMoaO3futEyjNaurq9P27dv11Vdf6fz58/L09FRYWJjmzZt3z+UYuK62uVeYKHF9fmynT59WcnKy8vLydOXKFXl5eWncuHGaO3fuHWdBNTU1KTMzU59//rnKysrk6uqq0aNHa86cOZaptndSUlKi5ORkFRYW6urVqxowYIAiIyOVkJCg/v37O+JH7LbOnj2rzMxM5eXl6fz582ptbVW/fv0UFhamWbNm6cknn+zw+/gd6noIEwEAAAAAAABYhTUTAQAAAAAAAFiFMBEAAAAAAACAVQgTAQAAAAAAAFiFMBEAAAAAAACAVQgTAQAAAAAAAFiFMBEAAAAAAACAVQgTAQAAAAAAAFiFMBEAAAAAAACAVQgTAQAAAAAAAFjF6OwCAAAAAAAAbHX+/HmtX79excXFunTpkkwmkwYPHqzp06crJiZGXl5ezi4R6BEIEwHAia5fv6709HQVFxeruLhYlZWVmjFjhpKSkpxdGgAAANCtVFZWqqKiQlOnTlVgYKBcXFxUXFysbdu26cCBA/r444/l6urq7DKBbo8wEQCcqLq6Wlu2bFFAQIAeeughHTp0yNklOR0BKwAAAH6IsLAwZWVl3fb60KFD9d577ykvL09PPPGEEyoDehbCRABwogEDBig3N1cDBw5UY2OjwsLCnF2S0xGwAgAAwJ6Cg4MlSbW1tU6uBOgZCBMBwInc3Nw0cOBAZ5fRpRCwAgAAtHnppZdUUFCghQsXatGiRc4up9tobGzU9evX1djYqNLSUm3YsEHu7u6aOHGis0sDegR2cwYAdCkErAAAoCeKjo7WyJEjlZeX5+xSerxPPvlEERERevLJJxUbGyuDwaAPPvhAgwYNcnZpQI/Ak4kAAAAAADhQeXm5jh8/Lh8fH4WHhzu7nC7PZDKpqanJqrYGg0Fubm7tXpsyZYoefPBBXbt2TUVFRSooKFBdXZ0jSgV6JcJEAOgloqOjdfz4cWVkZOjRRx+9Z/v3339fW7ZskSSVlpbesd1nn32mt99+W62trZowYYK2bdsmb29vu9UNAADQ3e3fv1+S9Pjjj8to5M/weykvL9dTTz1lVdsRI0Zo79697V4LDAxUYGCgJCkqKkr79u3T4sWLrb4PBnB3/CsGAL2Ao0bDMzMzlZSUJJPJpMjISG3cuFEeHh6SbB9RBgAA6CnMYaK1AVlv5+vrq9WrV1vV1sfH555tpk2bJnd3d3322WeEiYAdECYCQC/giNHwjRs3auvWrZKkZ555RklJSe36tnVEGQAA4MfW3Nys7Oxs5eTkqKysTA0NDZbB2DfeeEMhISGd7rO2tlaFhYVydXXV448/btd6c3JylJiYqJaWFsXFxemNN96wHDOZTNqzZ492796tEydOqE+fPnrwwQc1c+ZMzZo1S8uXL1dOTo5mzJihpKSkTp331o1h4uPj9eGHH+rPf/6zzpw5Iw8PD40bN06vv/66Ro0aJUm6ceOGMjIy9OWXX+rcuXNyd3dXRESElixZoiFDhtzWf9++ffXcc8/Z9ubcorW1VS0tLbp69ard+gR6M8JEAOgF7DkabjKZ9O6772rXrl2SpDlz5igxMVEGg6FdO3uPKAMAADjSqVOntHjxYsvyLl5eXnJxcVF1dbX+9re/KS8vT5988kmnA8VDhw6ppaVFjz32mLy8vOxWb0pKitatW6c+ffronXfe0ezZsy3HWltb9eabb+rLL7+U1DYLxMfHR8XFxTpy5IgKCgrk6upqcw0tLS2aP3++8vPz5erqKldXV1VVVWn//v3Kz8/Xzp07FRwcrFdeeUVHjx6Vu7u7DAaDampqtG/fPhUUFOjTTz9VUFCQzbVIUmVlpfz9/W97/aOPPlJra6sefvhhu5wH6O0IEwGgh7PnaHhLS4uWLVumzz//XJL02muv6fXXX++wrb1HlAEAABzlv//9r1588UXV1NQoKirK8hRiS0uL9u7dq8TERF27dk1r1qyxzMywlr2nOJtMJq1atUpZWVlyc3PTe++9p+nTp7drk5aWZgkS586dq9jYWPn6+qqurk67du3Shg0b7DKYm52drT59+mjTpk166qmnZDQa9f3332vJkiU6e/asVq1aJX9/f9XW1iotLc0yxfi7777TkiVLdOXKFa1fv15r1661uRZJWrt2rU6ePKnJkycrKChI9fX1Kigo0MGDBxUSEqKXX37ZLucBejvCRABwsg8//FBXr15Va2urpLbNTj744ANJ0sSJEzVx4kSb+rfXaHhjY6MWL16sgwcPymAw6K233uKGDAAAdHvNzc1avHixampq9MILL2jlypWWY0ajUc8++6xKS0uVnp6u3NxcNTQ0WNaIvpempib94x//kMFg0C9+8Quba21qatLvf/977du3T97e3vrTn/6kSZMmtWtTX1+v5ORkSdLMmTO1bNkyyzEvLy/FxsaqqanJstGeLa5evapdu3ZpwoQJltfCwsL07rvvKiYmRkVFRfLw8NBf//pXPfDAA5Y2ERERWrp0qVasWKG///3vam5utsuTklOnTlVNTY1ycnJUVVUlo9GoIUOGKD4+XvPmzbPrk6FAb0aYCABOlp6ervLycsvXR48e1dGjRyVJCxcutDlMtMdoeF1dneLj41VQUCCj0ahVq1bp2Weftamuu3F0wAoAAGC2Z88enThxQoMHD1ZiYmKHbSIjI5Wenq6WlhaVl5dr2LBhVvWdn5+v+vp6jR07VgMHDrSpzrq6OiUkJOi7775TQECAUlNTLWsS3urbb79VXV2dJCkuLq7DvubOnau0tDTduHHDpprGjx/fLkg0Cw8Pl5ubm5qamhQVFdUuSDR77LHHJEkNDQ06c+aMhg8fblMtUtv9LpvcAI5HmAgATnbgwAGH9W2v0fCXX35ZJSUlcnd318aNG+0ysn43jg5YAQAAzD766CNJbfc7bm5uHba59Yk2k8lkdd/2muJ8+fJlzZkzR8eOHdPQoUOVlpam4ODgDtuWlJRIkoKCgvSTn/ykwzZeXl4aM2aMCgsLbaorLCysw9ddXFzk6+uriooKjR07tsM2/fv3t3xeW1trUx0AflyEiQDQg9lrNNx8Uzpz5kyHB4mSYwNWAAAAs4qKCsuA5d0Cv8uXL1s+DwwMtKpvk8lkuaeZMmWKDVVKH3/8sSTJ3d1dmZmZGjRo0B3bVlVVSZIGDBhw1z5tfVJSalsj+06MRuNd25iPS23rcgPoPvo4uwAAgOPYazR83LhxkqRdu3Zp586dNtcFAADQFRw+fFiS1K9fvzs+xSdJxcXFkqShQ4dave7ekSNHdPnyZQ0ZMkQjRoywqc7IyEh5e3ursbFRy5cvt2p6ssFgsOmcAHAnhIkA0EPZczQ8NTVVjzzyiCRp1apVyszMtLU8AAAApzPPvggICLhrO/MA7RNPPGF13/bcxXnMmDHKyMjQ/fffr/z8fL366quqr6/vsK2fn58k6dKlS3fts6Kiwua6APROhIkA0EPZczTcy8tLqampGj9+vCRp9erVysjIsEeZAAAATmOe4mzesKQj//znP1VSUiKDwaBZs2ZZ3ffXX38tyT5hoiSNHTtWmZmZ6tevnwoKCrRgwQJdv379tnZjxoyRJJWXl+vcuXMd9nX9+nVLkAoAnUWYCABOMnLkyE5/dIY9R8OltvVuUlNTLZufJCUlKS0tzS59AwAAOIM5ULtw4YJOnDhx2/GqqirLDs8zZsywesfh06dP6z//+Y98fX0tszvsITQ0VDt27JCvr68KCws1f/7824LQyZMnW6ZiJycnd9hPZmamzTs5A+i9CBMBwElKS0s7/dEZ9h4NlyRPT0+lpKQoPDxckrRmzRpt377dbv0DAAD8WMrLy1VTUyNJ8vb21ptvvqmTJ09KatsQ5NChQ3rhhRd09uxZDRs2TG+99ZbVfZsHdSMjI+Xi4mLXukeNGqUdO3bIz89Phw8fvi1Q9PT01IIFCyRJu3fv1po1ayw/Z11dnVJSUrRlyxbdf//9dq0LQO9BmAgAPZCjRsOl/w8UJ02aJElau3atUlJS7HoOAAAARzNPcQ4ICNCyZctUWlqqX/3qV5o4caLGjRun2NhYlZWVWdYr9Pb2trpve88Q+V8jR45UVlaW/P39VVRUpLlz5+rq1auW4/Pnz1dUVJQkKS0tTREREQoPD1d4eLjWrVunZ555RpGRkZIkNzc3h9QIoOcy3rsJAMARqqur9bOf/cyqtj//+c87NaXYkaPhknTfffcpJSVFcXFxys/P17p163Tz5k3FxcXZ/VwAAACOYN6hOTQ0VDNnzlTfvn2VkZGhkydPymg0KjQ0VNHR0Zo1a5aMRuv/dK6qqlJRUZE8PDw0efJkR5Wv4cOHa+fOnfrtb3+rI0eOKCYmxrJJi9Fo1KZNm/Tpp59q9+7dOnnypFpaWvTQQw/p+eef1/PPP6/4+HhJko+Pj8NqBNAzESYCgBOtWbOm3debN29WZWWlVq5c2e71kJCQTvXr6NFwSfLw8NC2bduUkJCgb7/9Vhs2bNDNmzeVkJDgsHMCAADYi/nJxNGjR0uSnn76aT399NM293vgwAHdvHlTERERuu+++2zqKysr667Hhw0bpm+++abDYwaDwRIc/i+TyWT5+a1dB7IzdUlt78O9dHYZHwBdA2EiADiJr6+voqOjLV+bTCatXLlSP/3pT9u93lk/1mi41BYobt26VQkJCfrmm2+0adMm3bx5UwsXLnToeQEAAGxlDtNCQ0Pt2q95UHfKlCl27dee/vKXv+jixYsyGo169NFHnV0OgG6GMBEAuoizZ8+qrq7OMjr+Q9lrNHzRokVatGjRPdu5u7uzqzMAAOhWKioqVFlZKcn+YeIjjzyi0NBQh84QscaSJUs0bdo0hYeHy8/PT5JUWVmpPXv2aPPmzZKk6OhoDRgwwJllAuiGCBMBoIv436k2P1R3GA0HAABwppKSEkltuzgHBwfbtW/zTsrOlpubqy+++EJS23rXRqNR165dsxyfMGFCp3aoBgAzwkQA6CKOHTsmyfYwsauMhgMAAHRVtw7iGgwGJ1fjGImJicrNzdXRo0dVVVWl+vp6+fn5afTo0frlL3+p6Ohoubq6OrtMAN2QwWQymZxdBABAio2NVW5urg4fPmzzYt0AAAAAADhCH2cXAABoc+zYMT3wwAMEiQAAAACALoswEQC6gKqqKlVUVNg8xRkAAAAAAEciTASALuD48eOSbF8vEQAAAAAARyJMBIAuwLz5yqhRo5xcCQAAAAAAd0aYCABdgL12cgYAAAAAwJEIEwGgCzh+/Lj8/f0VEBDg7FIAAAAAALgjg8lkMjm7CAAAAAAAAABdH08mAgAAAAAAALAKYSIAAAAAAAAAqxAmAgAAAAAAALAKYSIAAAAAAAAAqxAmAgAAAAAAALAKYSIAAAAAAAAAqxAmAgAAAAAAALAKYSIAAAAAAAAAqxAmAgAAAAAAALAKYSIAAAAAAAAAqxAmAgAAAAAAALDK/wFlO5kDWK95LwAAAABJRU5ErkJggg==",
"text/plain": [
"<Figure size 1500x600 with 2 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"fig, ax = plt.subplots(1, 2, figsize=(15, 6))\n",
"ax[0].set_title(\n",
" \"saturation pressure of {}\".format(\n",
" parameters.pure_records[0].identifier.name\n",
" )\n",
")\n",
"sns.lineplot(\n",
" y=phase_diagram.vapor.pressure / si.PASCAL, \n",
" x=1.0/phase_diagram.vapor.temperature * si.KELVIN, \n",
" ax=ax[0]\n",
")\n",
"ax[0].set_yscale('log')\n",
"ax[0].set_xlabel(r'$\\frac{1}{T}$ / K$^{-1}$')\n",
"ax[0].set_ylabel(r'$p$ / Pa')\n",
"#ax[0].set_xlim()\n",
"#ax[0].set_ylim()\n",
"\n",
"ax[1].set_title(\n",
" r\"$T$-$\\rho$-diagram of {}\".format(\n",
" parameters.pure_records[0].identifier.name\n",
" )\n",
")\n",
"sns.lineplot(\n",
" y=phase_diagram.vapor.temperature / si.KELVIN, \n",
" x=phase_diagram.vapor.mass_density / si.KILOGRAM * si.METER**3, \n",
" ax=ax[1], \n",
" color=colors[0]\n",
")\n",
"sns.lineplot(\n",
" y=phase_diagram.liquid.temperature / si.KELVIN, \n",
" x=phase_diagram.liquid.mass_density / si.KILOGRAM * si.METER**3, \n",
" ax=ax[1], \n",
" color=colors[0]\n",
")\n",
"ax[1].set_ylabel(r'$T$ / K')\n",
"ax[1].set_xlabel(r'$\\rho$ / kg m$^{-3}$')\n",
"#ax[1].set_ylim()\n",
"#ax[1].set_xlim()\n",
"\n",
"sns.despine(offset=10)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "feos",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.13.5"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
| Unknown |
3D | feos-org/feos | docs/theory/eos/index.md | .md | 349 | 12 | # Equations of state
This section explains the thermodynamic principles and algorithms used for equation of state modeling in $\text{FeO}_\text{s}$.
```{eval-rst}
.. toctree::
:maxdepth: 1
properties
critical_points
```
It is currently still under construction. You can help by [contributing](https://github.com/feos-org/feos/issues/70). | Markdown |
3D | feos-org/feos | docs/theory/eos/properties.md | .md | 10,218 | 118 | # Properties
(Bulk) equilibrium properties can be calculated as derivatives of a thermodynamic potential. In the case of equations of state, this thermodynamic potential is the Helmholtz energy $A$ as a function of its characteristic variables temperature $T$, volume $V$, and amount of substance of each component $n_i$. Examples for common (measurable) properties that are calculated from an equation of state are the pressure
$$p=-\left(\frac{\partial A}{\partial V}\right)_{T,n_i}$$
and the isochoric heat capacity
$$c_V=\frac{1}{n}\left(\frac{\partial U}{\partial T}\right)_{V,n_i}=\frac{T}{n}\left(\frac{\partial S}{\partial T}\right)_{V,n_i}=-\frac{T}{n}\left(\frac{\partial^2 A}{\partial T^2}\right)_{V,n_i}$$
with the total number of particles $n=\sum_i n_i$, the internal energy $U$, and the entropy $S$.
## Residual properties
In most cases, the total value of a property is required because it is the one which can actually be measured. However, for some applications, e.g. phase equilibria or entropy scaling, residual properties are useful. Residual properties are based on the separation of the Helmholtz energy in an ideal gas ($\mathrm{ig}$) contribution and a residual ($\mathrm{res}$) contribution:
$$A=A^\mathrm{ig}+A^\mathrm{res}$$
The ideal gas contribution contains only kinetic and intramolecular energies and can be derived from statistical mechanics as
$$A^\mathrm{ig}(T,V,n_i)=RT\sum_in_i\left(\ln\left(\frac{n_i\Lambda_i^3}{V}\right)-1\right)$$(eqn:a_ig)
with the thermal de Broglie wavelength $\Lambda_i$ that only depends on the temperature. Residual properties depend on the reference state. The two commonly used reference states are at constant volume or at constant pressure, i.e.,
$$A=A^\mathrm{ig,V}(T,V,n_i)+A^\mathrm{res,V}(T,V,n_i)=A^\mathrm{ig,p}(T,p,n_i)+A^\mathrm{res,p}(T,p,n_i)$$(eqn:a_res)
Because the Helmholtz energy is expressed in $T$, $V$ and $n_i$, residual properties at constant volume can be evaluated straightforwardly. If a property $X$ is calculated from the Helmholtz energy via the differential operator $\mathcal{D}$, i.e., $X=\mathcal{D}\left(A\right)$, then the residual contributions is calculated from
$$X^\mathrm{res,V}=\mathcal{D}\left(A\right)-\mathcal{D}\left(A^\mathrm{ig,V}\right)$$
In cases where the operator $\mathcal{D}$ is linear, the expression can be simplified to
$$X^\mathrm{res,V}=\mathcal{D}\left(A-A^\mathrm{ig,V}\right)=\mathcal{D}\left(A^\mathrm{res,V}\right)$$
For residual properties at constant pressure, the reference state has to be evaluated for an ideal gas volume that corresponds to the constant pressure
$$A^\mathrm{ig,p}(T,p,n_i)=A^\mathrm{ig,V}\left(T,V^\mathrm{ig}(T,p,n_i),n_i\right)=A^\mathrm{ig,V}\left(T,\frac{nRT}{p},n_i\right)$$
Then the residual contribution for $X$ can be evaluated as
$$X^\mathrm{res,p}=\mathcal{D}\left(A\right)-\mathcal{D}\left(A^\mathrm{ig,p}\right)$$
For linear operators $\mathcal{D}$ eqs. {eq}`eqn:a_ig` and {eq}`eqn:a_res` can be used to simplify the expression
$$X^\mathrm{res,p}=\mathcal{D}\left(A-A^\mathrm{ig,p}\right)=\mathcal{D}\left(A^\mathrm{ig,V}\right)-\mathcal{D}\left(nRT\ln Z\right)$$
with the compressiblity factor $Z=\frac{pV}{nRT}$.
For details on how the evaluation of properties from Helmholtz energy models is implemented in $\text{FeO}_\text{s}$ check out the [Rust guide](../../rustguide/core/state.rst).
## List of properties available in $\text{FeO}_\text{s}$
The table below lists all properties that are available in $\text{FeO}_\text{s}$, their definition, and whether they can be evaluated as residual contributions as well.
In general, the evaluation of (total) Helmholtz energies and their derivatives requires a model for the residual Helmholtz energy and a model for the ideal gas contribution, specifically for the temperature dependence of the thermal de Broglie wavelength $\Lambda_i$. However, for many properties like the pressure including its derivatives and fugacity coefficients, the de Broglie wavelength cancels out.
Due to different language paradigms, $\text{FeO}_\text{s}$ handles the ideal gas term slightly different in Rust and Python.
- In **Rust**, if no ideal gas model is provided, users can only evaluate properties for which no ideal gas model is required because the de Broglie wavelength cancels. For those properties that require an ideal gas model but the table below indicates that they can be evaluated as residual, extra functions are provided.
- In **Python**, no additional functions are required, instead the property evaluation will throw an exception if an ideal gas contribution is required but not provided.
| Name | definition | ideal gas model required? | residual? |
|-|:-:|-|-|
| Total molar weight $MW$ | $\sum_ix_iMW_i$ | no | no |
| Mass of each component $m_i$ | $n_iMW_i$ | no | no |
| Total mass $m$ | $\sum_im_i=nMW$ | no | no |
| Mass density $\rho^{(m)}$ | $\frac{m}{V}$ | no | no |
| Mass fractions $w_i$ | $\frac{m_i}{m}$ | no | no |
| Pressure $p$ | $-\left(\frac{\partial A}{\partial V}\right)_{T,n_i}$ | no | yes |
| Compressibility factor $Z$ | $\frac{pV}{nRT}$ | no | yes |
| Partial derivative of pressure w.r.t. volume | $\left(\frac{\partial p}{\partial V}\right)_{T,n_i}$ | no | yes |
| Partial derivative of pressure w.r.t. density | $\left(\frac{\partial p}{\partial \rho}\right)_{T,n_i}$ | no | yes |
| Partial derivative of pressure w.r.t. temperature | $\left(\frac{\partial p}{\partial T}\right)_{V,n_i}$ | no | yes |
| Partial derivative of pressure w.r.t. moles | $\left(\frac{\partial p}{\partial n_i}\right)_{T,V,n_j}$ | no | yes |
| Second partial derivative of pressure w.r.t. volume | $\left(\frac{\partial^2 p}{\partial V^2}\right)_{T,n_i}$ | no | yes |
| Second partial derivative of pressure w.r.t. density | $\left(\frac{\partial^2 p}{\partial \rho^2}\right)_{T,n_i}$ | no | yes |
| Partial molar volume $v_i$ | $\left(\frac{\partial V}{\partial n_i}\right)_{T,p,n_j}$ | no | no |
| Chemical potential $\mu_i$ | $\left(\frac{\partial A}{\partial n_i}\right)_{T,V,n_j}$ | yes | yes |
| Partial derivative of chemical potential w.r.t. temperature | $\left(\frac{\partial\mu_i}{\partial T}\right)_{V,n_i}$ | yes | yes |
| Partial derivative of chemical potential w.r.t. moles | $\left(\frac{\partial\mu_i}{\partial n_j}\right)_{V,n_k}$ | no | yes |
| Logarithmic fugacity coefficient $\ln\varphi_i$ | $\beta\mu_i^\mathrm{res}\left(T,p,\lbrace n_i\rbrace\right)$ | no | no |
| Pure component logarithmic fugacity coefficient $\ln\varphi_i^\mathrm{pure}$ | $\lim_{x_i\to 1}\ln\varphi_i$ | no | no |
| Logarithmic (symmetric) activity coefficient $\ln\gamma_i$ | $\ln\left(\frac{\varphi_i}{\varphi_i^\mathrm{pure}}\right)$ | no | no |
| Henry's law constant $H_{i,s}$ | $\lim_{x_i\to 0}\frac{y_ip}{x_i}=p_s^\mathrm{sat}\frac{\varphi_i^{\infty,\mathrm{L}}}{\varphi_i^{\infty,\mathrm{V}}}$ | no | no |
| Partial derivative of the logarithmic fugacity coefficient w.r.t. temperature | $\left(\frac{\partial\ln\varphi_i}{\partial T}\right)_{p,n_i}$ | no | no |
| Partial derivative of the logarithmic fugacity coefficient w.r.t. pressure | $\left(\frac{\partial\ln\varphi_i}{\partial p}\right)_{T,n_i}=\frac{v_i^\mathrm{res,p}}{RT}$ | no | no |
| Partial derivative of the logarithmic fugacity coefficient w.r.t. moles | $\left(\frac{\partial\ln\varphi_i}{\partial n_j}\right)_{T,p,n_k}$ | no | no |
| Thermodynamic factor $\Gamma_{ij}$ | $\delta_{ij}+x_i\left(\frac{\partial\ln\varphi_i}{\partial x_j}\right)_{T,p,\Sigma}$ | no | no |
| Molar isochoric heat capacity $c_v$ | $\left(\frac{\partial u}{\partial T}\right)_{V,n_i}$ | yes | yes |
| Partial derivative of the molar isochoric heat capacity w.r.t. temperature | $\left(\frac{\partial c_V}{\partial T}\right)_{V,n_i}$ | yes | yes |
| Molar isobaric heat capacity $c_p$ | $\left(\frac{\partial h}{\partial T}\right)_{p,n_i}$ | yes | yes |
| Entropy $S$ | $-\left(\frac{\partial A}{\partial T}\right)_{V,n_i}$ | yes | yes |
| Partial derivative of the entropy w.r.t. temperature | $\left(\frac{\partial S}{\partial T}\right)_{V,n_i}$ | yes | yes |
| Second partial derivative of the entropy w.r.t. temperature | $\left(\frac{\partial^2 S}{\partial T^2}\right)_{V,n_i}$ | yes | yes |
| Molar entropy $s$ | $\frac{S}{n}$ | yes | yes |
| Specific entropy $s^{(m)}$ | $\frac{S}{m}$ | yes | yes |
| Enthalpy $H$ | $A+TS+pV$ | yes | yes |
| Molar enthalpy $h$ | $\frac{H}{n}$ | yes | yes |
| Specific enthalpy $h^{(m)}$ | $\frac{H}{m}$ | yes | yes |
| Helmholtz energy $A$ | | yes | yes |
| Molar Helmholtz energy $a$ | $\frac{A}{n}$ | yes | yes |
| Specific Helmholtz energy $a^{(m)}$ | $\frac{A}{m}$ | yes | yes |
| Internal energy $U$ | $A+TS$ | yes | yes |
| Molar internal energy $u$ | $\frac{U}{n}$ | yes | yes |
| Specific internal energy $u^{(m)}$ | $\frac{U}{m}$ | yes | yes |
| Gibbs energy $G$ | $A+pV$ | yes | yes |
| Molar Gibbs energy $g$ | $\frac{G}{n}$ | yes | yes |
| Specific Gibbs energy $g^{(m)}$ | $\frac{G}{m}$ | yes | yes |
| Partial molar entropy $s_i$ | $\left(\frac{\partial S}{\partial n_i}\right)_{T,p,n_j}$ | yes | no |
| Partial molar enthalpy $h_i$ | $\left(\frac{\partial H}{\partial n_i}\right)_{T,p,n_j}$ | yes | no |
| Joule Thomson coefficient $\mu_\mathrm{JT}$ | $\left(\frac{\partial T}{\partial p}\right)_{H,n_i}$ | yes | no |
| Isentropic compressibility $\kappa_s$ | $-\frac{1}{V}\left(\frac{\partial V}{\partial p}\right)_{S,n_i}$ | yes | no |
| Isothermal compressibility $\kappa_T$ | $-\frac{1}{V}\left(\frac{\partial V}{\partial p}\right)_{T,n_i}$ | no | no |
| Isenthalpic compressibility $\kappa_h$ | $-\frac{1}{V}\left(\frac{\partial V}{\partial p}\right)_{H,n_i}$ | yes | no |
| Thermal expansivity $\alpha_p$ | $-\frac{1}{V}\left(\frac{\partial V}{\partial T}\right)_{p,n_i}$ | yes | no |
| Grüneisen parameter $\phi$ | $V\left(\frac{\partial p}{\partial U}\right)_{V,n_i}=\frac{v}{c_v}\left(\frac{\partial p}{\partial T}\right)_{v,n_i}=\frac{\rho}{T}\left(\frac{\partial T}{\partial \rho}\right)_{s, n_i}$ | yes | no |
| (Static) structure factor $S(0)$ | $RT\left(\frac{\partial\rho}{\partial p}\right)_{T,n_i}$ | no | no |
| Speed of sound $c$ | $\sqrt{\left(\frac{\partial p}{\partial\rho^{(m)}}\right)_{S,n_i}}$ | yes | no |
| Markdown |
3D | feos-org/feos | docs/theory/eos/critical_points.md | .md | 2,006 | 31 | # Stability and critical points
The implementation of critical points in $\text{FeO}_\text{s}$ follows the algorithm by [Michelsen and Mollerup](https://tie-tech.com/new-book-release/). A necessary condition for stability is the positive-definiteness of the quadratic form ([Heidemann and Khalil 1980](https://doi.org/10.1002/aic.690260510))
$$\sum_{ij}\left(\frac{\partial^2 A}{\partial N_i\partial N_j}\right)_{T,V}\Delta N_i\Delta N_j$$
The **spinodal** or limit of stability consists of the points for which the quadratic form is positive semi-definite. Following Michelsen and Mollerup, the matrix $M$ can be defined as
$$M_{ij}=\sqrt{z_iz_j}\left(\frac{\partial^2\beta A}{\partial N_i\partial N_j}\right)$$
with the molar compositon $z_i$. Further, the variable $s$ is introduced that acts on the mole numbers $N_i$ via
$$N_i=z_i+su_i\sqrt{z_i}$$
with $u_i$ the elements of the eigenvector of $M$ corresponding to the smallest eigenvector $\lambda_1$. Then, the limit of stability can be expressed as
$$c_1=\left.\frac{\partial^2\beta A}{\partial s^2}\right|_{s=0}=\sum_{ij}u_iu_jM_{ij}=\lambda_1=0$$
A **critical point** is defined as a stable point on the limit of stability. This leads to the second criticality condition
$$c_2=\left.\frac{\partial^3\beta A}{\partial s^3}\right|_{s=0}=0$$
The derivatives of the Helmholtz energy can be calculated efficiently in a single evaluation using [generalized hyper-dual numbers](https://doi.org/10.3389/fceng.2021.758090). The following methods of `State` are available to determine spinodal or critical points for different specifications:
||specified|unkonwns|equations|
|-|-|-|-|
|`spinodal`|$T,N_i$|$\rho$|$c_1(T,\rho,N_i)=0$|
|`critical_point`|$N_i$|$T,\rho$|$c_1(T,\rho,N_i)=0$<br/>$c_2(T,\rho,N_i)=0$|
|`critical_point_binary_t`|$T$|$\rho_1,\rho_2$|$c_1(T,\rho_1,\rho_2)=0$<br/>$c_2(T,\rho_1,\rho_2)=0$|
|`critical_point_binary_p`|$p$|$T,\rho_1,\rho_2$|$c_1(T,\rho_1,\rho_2)=0$<br/>$c_2(T,\rho_1,\rho_2)=0$<br/>$p(T,\rho_1,\rho_2)=p$|
| Markdown |
3D | feos-org/feos | docs/theory/models/index.md | .md | 296 | 12 | # Models
This section describes the thermodynamic models implemented in $\text{FeO}_\text{s}$.
It is currently still under construction. You can help by [contributing](https://github.com/feos-org/feos/issues/70).
```{eval-rst}
.. toctree::
:maxdepth: 1
hard_spheres
association
``` | Markdown |
3D | feos-org/feos | docs/theory/models/hard_spheres.md | .md | 4,842 | 57 | # Hard spheres
$\text{FeO}_\text{s}$ provides an implementation of the Boublík-Mansoori-Carnahan-Starling-Leland (BMCSL) equation of state ([Boublík, 1970](https://doi.org/10.1063/1.1673824), [Mansoori et al., 1971](https://doi.org/10.1063/1.1675048)) for hard-sphere mixtures which is often used as reference contribution in SAFT equations of state. The implementation is generalized to allow the description of non-sperical or fused-sphere reference fluids.
The reduced Helmholtz energy density is calculated according to
$$\frac{\beta A}{V}=\frac{6}{\pi}\left(\frac{3\zeta_1\zeta_2}{1-\zeta_3}+\frac{\zeta_2^3}{\zeta_3\left(1-\zeta_3\right)^2}+\left(\frac{\zeta_2^3}{\zeta_3^2}-\zeta_0\right)\ln\left(1-\zeta_3\right)\right)$$
with the packing fractions
$$\zeta_k=\frac{\pi}{6}\sum_\alpha C_{k,\alpha}\rho_\alpha d_\alpha^k,~~~~~~~~k=0\ldots 3$$
The geometry coefficients $C_{k,\alpha}$ and the segment diameters $d_\alpha$ depend on the context in which the model is used. The following table shows how the expression can be reused in various models. For details on the fused-sphere chain model, check the [repository](https://github.com/feos-org/feos-fused-chains) or the [publication](https://doi.org/10.1103/PhysRevE.105.034110).
||Hard spheres|PC-SAFT|Fused-sphere chains|
|-|:-:|:-:|:-:|
|$d_\alpha$|$\sigma_\alpha$|$\sigma_\alpha\left(1-0.12e^{\frac{-3\varepsilon_\alpha}{k_\mathrm{B}T}}\right)$|$\sigma_\alpha$|
|$C_{0,\alpha}$|$1$|$m_\alpha$|$1$|
|$C_{1,\alpha}$|$1$|$m_\alpha$|$A_\alpha^*$|
|$C_{1,\alpha}$|$1$|$m_\alpha$|$A_\alpha^*$|
|$C_{1,\alpha}$|$1$|$m_\alpha$|$V_\alpha^*$|
## Fundamental measure theory
An model for inhomogeneous mixtues of hard spheres is provided by fundamental measure theory (FMT, [Rosenfeld, 1989](https://doi.org/10.1103/PhysRevLett.63.980)). Different variants have been proposed of which only those that are consistent with the BMCSL equation of state in the homogeneous limit are currently considered in $\text{FeO}_\text{s}$ (exluding, e.g., the original Rosenfeld and White-Bear II variants).
The Helmholtz energy density is calculated according to
$$\beta f=-n_0\ln\left(1-n_3\right)+\frac{n_{12}}{1-n_3}+\frac{1}{36\pi}n_2n_{22}f_3(n_3)$$
The expressions for $n_{12}$ and $n_{22}$ depend on the FMT version.
|version|$n_{12}$|$n_{22}$|references|
|-|:-:|:-:|:-:|
|WhiteBear|$n_1n_2-\vec n_1\cdot\vec n_2$|$n_2^2-3\vec n_2\cdot\vec n_2$|[Roth et al., 2002](https://doi.org/10.1088/0953-8984/14/46/313), [Yu and Wu, 2002](https://doi.org/10.1063/1.1520530)|
|KierlikRosinberg|$n_1n_2$|$n_2^2$|[Kierlik and Rosinberg, 1990](https://doi.org/10.1103/PhysRevA.42.3382)|
|AntiSymWhiteBear|$n_1n_2-\vec n_1\cdot\vec n_2$|$n_2^2\left(1-\frac{\vec n_2\cdot\vec n_2}{n_2^2}\right)^3$|[Rosenfeld et al., 1997](https://doi.org/10.1103/PhysRevE.55.4245), [Kessler et al., 2021](https://doi.org/10.1016/j.micromeso.2021.111263)|
For small $n_3$, the value of $f(n_3)$ numerically diverges. Therefore, it is approximated with a Taylor expansion.
$$f_3=\begin{cases}\frac{n_3+\left(1-n_3\right)^2\ln\left(1-n_3\right)}{n_3^2\left(1-n_3\right)^2}&\text{if }n_3>10^{-5}\\
\frac{3}{2}+\frac{8}{3}n_3+\frac{15}{4}n_3^2+\frac{24}{5}n_3^3+\frac{35}{6}n_3^4&\text{else}\end{cases}$$
The weighted densities $n_k(\mathbf{r})$ are calculated by convolving the density profiles $\rho_\alpha(\mathbf{r})$ with weight functions $\omega_k^\alpha(\mathbf{r})$
$$n_k(\mathbf{r})=\sum_\alpha n_k^\alpha(\mathbf{r})=\sum_\alpha\int\rho_\alpha(\mathbf{r}')\omega_k^\alpha(\mathbf{r}-\mathbf{r}')\mathrm{d}\mathbf{r}'$$
which differ between the different FMT versions.
||WhiteBear/AntiSymWhiteBear|KierlikRosinberg|
|-|:-:|:-:|
|$\omega_0^\alpha(\mathbf{r})$|$\frac{C_{0,\alpha}}{\pi\sigma_\alpha^2}\,\delta\!\left(\frac{d_\alpha}{2}-\|\mathbf{r}\|\right)$|$C_{0,\alpha}\left(-\frac{1}{8\pi}\,\delta''\!\left(\frac{d_\alpha}{2}-\|\mathbf{r}\|\right)+\frac{1}{2\pi\|\mathbf{r}\|}\,\delta'\!\left(\frac{d_\alpha}{2}-\|\mathbf{r}\|\right)\right)$|
|$\omega_1^\alpha(\mathbf{r})$|$\frac{C_{1,\alpha}}{2\pi\sigma_\alpha}\,\delta\!\left(\frac{d_\alpha}{2}-\|\mathbf{r}\|\right)$|$\frac{C_{1,\alpha}}{8\pi}\,\delta'\!\left(\frac{d_\alpha}{2}-\|\mathbf{r}\|\right)$|
|$\omega_2^\alpha(\mathbf{r})$|$C_{2,\alpha}\,\delta\!\left(\frac{d_\alpha}{2}-\|\mathbf{r}\|\right)$|$C_{2,\alpha}\,\delta\!\left(\frac{d_\alpha}{2}-\|\mathbf{r}\|\right)$|
|$\omega_3^\alpha(\mathbf{r})$|$C_{3,\alpha}\,\Theta\!\left(\frac{d_\alpha}{2}-\|\mathbf{r}\|\right)$|$C_{3,\alpha}\,\Theta\!\left(\frac{d_\alpha}{2}-\|\mathbf{r}\|\right)$|
|$\vec\omega_1^\alpha(\mathbf{r})$|$C_{3,\alpha}\frac{\mathbf{r}}{2\pi\sigma_\alpha\|\mathbf{r}\|}\,\delta\!\left(\frac{d_\alpha}{2}-\|\mathbf{r}\|\right)$|-|
|$\vec\omega_2^\alpha(\mathbf{r})$|$C_{3,\alpha}\frac{\mathbf{r}}{\|\mathbf{r}\|}\,\delta\!\left(\frac{d_\alpha}{2}-\|\mathbf{r}\|\right)$|-| | Markdown |
3D | feos-org/feos | docs/theory/models/association.md | .md | 8,431 | 109 | # Association
The Helmholtz contribution due to short range attractive interaction ("association") in SAFT models can be conveniently expressed as
$$\frac{A^\mathrm{assoc}}{k_\mathrm{B}TV}=\sum_\alpha\rho_\alpha\left(\ln X_\alpha-\frac{X_\alpha}{2}+\frac{1}{2}\right)$$
Here, $\alpha$ is the index of all distinguishable **association sites** in the system. The site density $\rho_\alpha$ is the density of the segment or molecule on which the association site is located times the multiplicity of the site. The fraction of non-bonded association sites $X_\alpha$ is calculated implicitly from
$$\frac{1}{X_\alpha}=1+\sum_\beta\rho_\beta X_\beta\Delta^{\alpha\beta}$$ (eqn:x_assoc)
where $\Delta^{\alpha\beta}$ is the association strength between sites $\alpha$ and $\beta$. The exact expression for the association strength varies between different SAFT versions. We implement the expressions used in PC-SAFT but a generalization to other models is straightforward.
The number of degrees of freedom in the association strength matrix $\Delta$ is vast and for practical purposes it is useful to reduce the number of non-zero elements in $\Delta$. In $\text{FeO}_\text{s}$ we use 3 kinds of association sites: $A$, $B$ and $C$. Association sites of kind $A$ only associate with $B$ and vice versa, whereas association sites of kind $C$ only associate with other sites of kind $C$. By sorting the sites by their kind, the entire $\Delta$ matrix can be reduced to two smaller matrices: $\Delta^{AB}$ and $\Delta^{CC}$.
```{image} FeOs_Association.png
:alt: Association strength matrix
:class: bg-primary
:width: 300px
:align: center
```
In practice, the $AB$ association can be linked to hydrogen bonding sites. The $CC$ association is less widely used but implemented to ensure that all the association schemes defined in [Huang and Radosz 1990](https://pubs.acs.org/doi/10.1021/ie00107a014) are covered.
## Calculation of the fraction of non-bonded association sites
The algorithm to solve the fraction of non-bonded sites for general association schemes follows [Michelsen 2006](https://pubs.acs.org/doi/full/10.1021/ie060029x). The problem is rewritten as an optimization problem with gradients
$$g_\alpha=\frac{1}{X_\alpha}-1-\sum_\beta\rho_\beta X_\beta\Delta^{\alpha\beta}$$
The analytic Hessian is
$$H_{\alpha\beta}=-\frac{\delta_{\alpha\beta}}{X_\alpha^2}-\rho_\beta\Delta^{\alpha\beta}$$
with the Kronecker delta $\delta_{\alpha\beta}$. However, [Michelsen 2006](https://pubs.acs.org/doi/full/10.1021/ie060029x) shows that it is beneficial to use
$$\hat{H}_{\alpha\beta}=-\frac{\delta_{\alpha\beta}}{X_\alpha}\left(1+\sum_\beta\rho_\beta X_\beta\Delta^{\alpha\beta}\right)-\rho_\beta\Delta^{\alpha\beta}$$
instead. $X_\alpha$ can then be solved robustly using a Newton algorithm with a check that ensures that the values of $X_\alpha$ remain positive. With the split in $AB$ and $CC$ association the two kinds different versions could be solved separately from each other. This is currently not implemented, because use cases are rare to nonexistent and the benefit is small.
A drastic improvement in performance, however, can be achieved by solving eq. {eq}`eqn:x_assoc` analytically for simple cases. If there is only one $A$ and one $B$ site the corresponding fractions of non-bonded association sites can be calculated from
$$X_A=\frac{2}{\sqrt{\left(1+\left(\rho_A-\rho_B\right)\Delta^{AB}\right)^2+4\rho_B\Delta^{AB}}+1+\left(\rho_B-\rho_A\right)\Delta^{AB}}$$
$$X_B=\frac{2}{\sqrt{\left(1+\left(\rho_A-\rho_B\right)\Delta^{AB}\right)^2+4\rho_B\Delta^{AB}}+1+\left(\rho_A-\rho_B\right)\Delta^{AB}}$$
The specific form of the expressions (with the square root in the denominator) is particularly robust for cases where $\Delta$ and/or $\rho$ are small or even 0.
Analogously, for a single $C$ site, the expression becomes
$$X_C=\frac{2}{\sqrt{1+4\rho_C\Delta^{CC}}+1}$$
## PC-SAFT expression for the association strength
In $\text{FeO}_\text{s}$ every association site $\alpha$ is parametrized with the dimensionless association volume $\kappa^\alpha$ and the association energy $\varepsilon^\alpha$. The association strength between sites $\alpha$ and $\beta$ is then calculated from
$$\Delta^{\alpha\beta}=\left(\frac{1}{1-\zeta_3}+\frac{\frac{3}{2}d_{ij}\zeta_2}{\left(1-\zeta_3\right)^2}+\frac{\frac{1}{2}d_{ij}^2\zeta_2^2}{\left(1-\zeta_3\right)^3}\right)\sqrt{\sigma_i^3\kappa^\alpha\sigma_j^3\kappa^\beta}\left(e^{\frac{\varepsilon^\alpha+\varepsilon^\beta}{2k_\mathrm{B}T}}-1\right)$$
with
$$d_{ij}=\frac{2d_id_j}{d_i+d_j},~~~~d_k=\sigma_k\left(1-0.12e^{\frac{-3\varepsilon_k}{k_\mathrm{B}T}}\right)$$
and
$$\zeta_2=\frac{\pi}{6}\sum_k\rho_km_kd_k^2,~~~~\zeta_3=\frac{\pi}{6}\sum_k\rho_km_kd_k^3$$
The indices $i$, $j$ and $k$ are used to index molecules or segments rather than sites. $i$ and $j$ in particular refer to the molecule or segment that contain site $\alpha$ or $\beta$ respectively.
On their own the parameters $\kappa^\alpha$ and $\varepsilon^\beta$ have no physical meaning. For a pure system of self-associating molecules it is therefore natural to define $\kappa^A=\kappa^B\equiv\kappa^{A_iB_i}$ and $\varepsilon^A=\varepsilon^B\equiv\varepsilon^{A_iB_i}$ where $\kappa^{A_iB_i}$ and $\varepsilon^{A_iB_i}$ are now parameters describing the molecule rather than individual association sites. However, for systems that are not self-associating, the more generic parametrization is required.
## SAFT-VR Mie expression for the association strength
We provide an implementation of the association strength as published by [Lafitte et al. (2013)](https://doi.org/10.1063/1.4819786).
Every association site $\alpha$ is parametrized with two distances $r_c^\alpha$ and $r_d^\alpha$, and the association energy $\varepsilon^\alpha$. Whereas $r_c^\alpha$ is a parameter that is adjusted for each substance, $r_d^\alpha$ is kept constant as $r_d^\alpha = 0.4 \sigma$. Note that the parameter $r_c^\alpha$ has to be provided as dimensionless quantity in the input, i.e. divided by the segment's $\sigma$ value.
The association strength between sites $\alpha$ and $\beta$ is then calculated from
$$\Delta^{\alpha\beta}=\left(\frac{1}{1-\zeta_3}+\frac{\frac{3}{2}\tilde{d}_{ij}\zeta_2}{\left(1-\zeta_3\right)^2}+\frac{\frac{1}{2}\tilde{d}_{ij}^2\zeta_2^2}{\left(1-\zeta_3\right)^3}\right)K_{ij}^{\alpha\beta}\sigma_{ij}^3\left(e^{\frac{\sqrt{\varepsilon^\alpha\varepsilon^\beta}}{k_\mathrm{B}T}}-1\right)$$
with
$$\tilde{d}_{ij}=\frac{2d_id_j}{d_i+d_j},~~~~\sigma_{ij} = \frac{\sigma_i + \sigma_j}{2}$$
and
$$\zeta_2=\frac{\pi}{6}\sum_k\rho_km_kd_k^2,~~~~\zeta_3=\frac{\pi}{6}\sum_k\rho_km_kd_k^3$$
where
$$d_i = \int_{l}^{\sigma_i} \left[1 - e^{-\beta u_i^\text{Mie}(r)}\right]\mathrm{d}r.$$
The integral is solved using a Gauss-Legendre quadrature of fifth order where the lower limit, $l$, is determined using the method of [Aasen et al.](https://doi.org/10.1063/1.5111364) The dimensionless bonding volume $K^{\alpha\beta}_{ij}$ (see [A43](https://doi.org/10.1063/1.4819786)) utilizes arithmetic combining rules for $r_c^{\alpha\beta}$, $r_d^{\alpha\beta}$ and $d_{ij}$ of unlike sites and segments, respectively.
## Helmholtz energy functional
The Helmholtz energy contribution proposed by [Yu and Wu 2002](https://aip.scitation.org/doi/abs/10.1063/1.1463435) is used to model association in inhomogeneous systems. It uses the same weighted densities that are used in [Fundamental Measure Theory](hard_spheres) (the White-Bear version). The Helmholtz energy density is then calculated from
$$\beta f=\sum_\alpha\frac{n_0^\alpha\xi_i}{m_i}\left(\ln X_\alpha-\frac{X_\alpha}{2}+\frac{1}{2}\right)$$
with the equations for the fraction of non-bonded association sites adapted to
$$\frac{1}{X_\alpha}=1+\sum_\beta\frac{n_0^\beta\xi_j}{m_j}X_\beta\Delta^{\alpha\beta}$$
The association strength, again using the PC-SAFT parametrization, is
$$\Delta^{\alpha\beta}=\left(\frac{1}{1-n_3}+\frac{\frac{1}{4}d_{ij}n_2\xi}{\left(1-n_3\right)^2}+\frac{\frac{1}{72}d_{ij}^2n_2^2\xi}{\left(1-n_3\right)^3}\right)\sqrt{\sigma_i^3\kappa^\alpha\sigma_j^3\kappa^\beta}\left(e^{\frac{\varepsilon^\alpha+\varepsilon^\beta}{2k_\mathrm{B}T}}-1\right)$$
The quantities $\xi$ and $\xi_i$ were introduced to account for the effect of inhomogeneity and are defined as
$$\xi=1-\frac{\vec{n}_2\cdot\vec{n}_2}{n_2^2},~~~~\xi_i=1-\frac{\vec{n}_2^i\cdot\vec{n}_2^i}{{n_2^i}^2}$$
| Markdown |
3D | feos-org/feos | docs/theory/dft/enthalpy_of_adsorption.md | .md | 9,511 | 111 | # Enthalpy of adsorption and the Clausius-Clapeyron relation
## Enthalpy of adsorption
The energy balance in differential form for a simple adsorption process can be written as
$$\mathrm{d}U=h^\mathrm{in}\delta n^\mathrm{in}-h^\mathrm{b}\delta n^\mathrm{out}+\delta Q$$ (eqn:energy_balance)
Here the balance is chosen to only include the fluid in the porous medium. The molar enthalpy $h^\mathrm{b}$ of the (bulk) fluid that leaves the adsorber is at a state that is in equilibrium with the porous medium. In contrast, the incoming stream can be at any condition. Analogously, the component balance is
$$\mathrm{d}N_i=x_i^\mathrm{in}\delta n^\mathrm{in}-x_i\delta n^\mathrm{out}$$ (eqn:mass_balance)
The differential of the internal energy can be replaced with the total differential in its variables temperature $T$ and number of particles $N_i$. The volume of the adsorber is fixed and thus not considered as a variable.
$$\mathrm{d}U=\left(\frac{\partial U}{\partial T}\right)_{N_k}\mathrm{d}T+\sum_i\left(\frac{\partial U}{\partial N_i}\right)_{T,N_j}\mathrm{d}N_i$$ (eqn:U_differential)
Eqs. {eq}`eqn:energy_balance`, {eq}`eqn:mass_balance` and {eq}`eqn:U_differential` can be combined into an expression for the heat of adsorption $\delta Q$
$$\delta Q=\left(\frac{\partial U}{\partial T}\right)_{N_k}\mathrm{d}T-\left(h^\mathrm{in}-\sum_ix_i^\mathrm{in}\left(\frac{\partial U}{\partial N_i}\right)_{T,N_j}\right)\delta n^\mathrm{in}+\left(h^\mathrm{b}-\sum_ix_i\left(\frac{\partial U}{\partial N_i}\right)_{T,N_j}\right)\delta n^\mathrm{out}$$
The heat of adsorption can thus be split into a sensible part that depends on the change in temperature, and a latent part that depends on the change in loading. The expression can be simplified by using the definitions of the isochoric heat capacity $C_v=\left(\frac{\partial U}{\partial T}\right)_{N_k}$ and the **partial molar enthalpy of adsorption**
$$\Delta h_i^\mathrm{ads}=h_i^\mathrm{b}-\left(\frac{\partial U}{\partial N_i}\right)_{T,N_j}$$
yielding
$$\delta Q=C_v\mathrm{d}T-\sum_ix_i^\mathrm{in}\left(h_i^\mathrm{in}-h_i^\mathrm{b}+\Delta h_i^\mathrm{ads}\right)\delta n^\mathrm{in}+\sum_ix_i\Delta h_i^\mathrm{ads}\delta n^\mathrm{out}$$
or
$$\delta Q=C_v\mathrm{d}T-\sum_ix_i^\mathrm{in}\left(h_i^\mathrm{in}-h_i^\mathrm{b}+\Delta h_i^\mathrm{ads}\right)\delta n^\mathrm{in}+\Delta h^\mathrm{ads}\delta n^\mathrm{out}$$
with the **enthalpy of adsorption**
$$\Delta h^\mathrm{ads}=\sum_ix_i\Delta h_i^\mathrm{ads}=h^\mathrm{b}-\sum_ix_i\left(\frac{\partial U}{\partial N_i}\right)_{T,N_j}$$
For **pure components** the balance equations simplify to
$$\delta Q=C_v\mathrm{d}T-\left(h^\mathrm{in}-h^\mathrm{b}\right)\delta n^\mathrm{in}-\Delta h^\mathrm{ads}\mathrm{d}N$$
## Clausius-Clapeyron relation for porous media
The Clausius-Clapeyron relation relates the $p-T$ slope of a pure component phase transition line to the corresponding enthalpy of phase change. For a vapor-liquid phase transition, the exact relation is
$$\frac{\mathrm{d}p^\mathrm{sat}}{\mathrm{d}T}=\frac{s^\mathrm{V}-s^\mathrm{L}}{v^\mathrm{V}-v^\mathrm{L}}=\frac{h^\mathrm{V}-h^\mathrm{L}}{T\left(v^\mathrm{V}-v^\mathrm{L}\right)}$$ (eqn:temp_dep_press)
In this expression, the enthalpy of vaporization $\Delta h^\mathrm{vap}=h^\mathrm{V}-h^\mathrm{L}$ can be identified. The molar volumes $v$ of the two phases can be replaced by the compressibility factor $Z=\frac{pv}{RT}$. Then, eq. {eq}`eqn:temp_dep_press` simplifies to
$$\frac{\mathrm{d}p^\mathrm{sat}}{\mathrm{d}T}=\frac{p}{R T^2}\frac{\Delta h^\mathrm{vap}}{Z^\mathrm{V}-Z^\mathrm{L}}$$
which can be compactly written as
$$\frac{\mathrm{d}\ln p^\mathrm{sat}}{\mathrm{d}\frac{1}{RT}}=-\frac{\Delta h^\mathrm{vap}}{Z^\mathrm{V}-Z^\mathrm{L}}$$ (eqn:Clausius_Clapeyron_exact)
Eq. {eq}`eqn:Clausius_Clapeyron_exact` is still an exact expression. In practice, the volume (and hence the compressibility factor) of the liquid phase can often be neglected compared to the volume of the gas phase. Additionally assuming an ideal gas phase ($Z^\mathrm{V}\approx1$), leads to the expression commonly referred to as Clausius-Clapeyron relation:
$$\frac{\mathrm{d}\ln p^\mathrm{sat}}{\mathrm{d}\frac{1}{RT}}=-\Delta h^\mathrm{vap}$$ (eqn:Clausius_Clapeyron)
A similar relation can be derived for fluids adsorbed in a porous medium that is in equilibrium with a bulk phase. At this point it is important to clarify which variables describe the system
- The adsorbed fluid and the bulk phase are in equilibrium. Therefore, the temperature $T$ and chemical potentials $\mu_i$ are the same for both phases.
- The density profiles and hence the number of particles $N_i$ in the porous medium is determined by $T$ and $\mu_i$. The volume of the porous medium is not considered as a thermodynamic variable but rather as a (constant) property of the adsorbent.
- All intensive properties of the bulk phase are fully determined by $T$ and $\mu_i$. In practice it can be useful to relate these properties to measurable properties like the pressure $p$ and the composition $x_i$.
To find an expression of the slope of an isostere (constant $N_i$), the pressure, which is only defined for the bulk phase, has to be related to properties of the adsorbed fluid.
$$\frac{\mathrm{d}\ln p}{\mathrm{d}\frac{1}{RT}}=-\frac{RT^2}{p}\frac{\mathrm{d}p}{\mathrm{d}T}$$
First, the pressure can be replaced using the Gibbs-Duhem relation for the bulk phase (index $\mathrm{b}$)
$$\frac{\mathrm{d}\ln p}{\mathrm{d}\frac{1}{RT}}=-\frac{RT^2}{pv^\mathrm{b}}\left(s^\mathrm{b}+\sum_ix_i\left(\frac{\partial\mu_i}{\partial T}\right)_{N_k}\right)$$ (eqn:clausius_clapeyron_intermediate)
Here the directional derivative $\frac{\mathrm{d}\mu_i}{\mathrm{d}T}$ could be replaced with a partial derivative amongst the variables describing the adsorbed fluid. The partial derivative can then be replaced using a Maxwell relation based on the Helmholtz energy $F$ as follows
$$\left(\frac{\partial\mu_i}{\partial T}\right)_{N_k}=\left(\frac{\partial^2 F}{\partial T\partial N_i}\right)=-\left(\frac{\partial S}{\partial N_i}\right)_{T,N_j}$$
Using the Maxwell relation together with the compressibility factor of the bulk phase $Z^\mathrm{b}=\frac{pv^\mathrm{b}}{RT}$ in eq. {eq}`eqn:clausius_clapeyron_intermediate` results in
$$\frac{\mathrm{d}\ln p}{\mathrm{d}\frac{1}{RT}}=-\frac{T}{Z^\mathrm{b}}\left(s^\mathrm{b}-\sum_ix_i\left(\frac{\partial S}{\partial N_i}\right)_{T,N_j}\right)$$
Finally, using $h^\mathrm{b}=Ts^\mathrm{b}+\sum_ix_i\mu_i$ and $\mathrm{d}U=T\mathrm{d}S+\sum_i\mu_i\mathrm{d}N_i$ leads to
$$\frac{\mathrm{d}\ln p}{\mathrm{d}\frac{1}{RT}}=-\frac{1}{Z^\mathrm{b}}\left(h^\mathrm{b}-\sum_ix_i\left(\frac{\partial U}{\partial N_i}\right)_{T,N_j}\right)=-\frac{\Delta h^\mathrm{ads}}{Z^\mathrm{b}}$$ (eqn:deriv_relation_hads)
The relation is exact and valid for an arbitrary number of components in the fluid phase.
## Calculation of the enthalpy of adsorption from classical DFT
In a DFT context, the introduction of entropies and internal energies are just unnecessary complications. The most useful definition of the (partial molar) enthalpy of adsorption is
$$\Delta h_i^\mathrm{ads}=T\left(s_i^\mathrm{b}+\left(\frac{\partial\mu_i}{\partial T}\right)_{N_k}\right)$$
The derivative at constant number of particles is problematic and has to be replaced. This is done starting from the total differential of the number of particles
$$\mathrm{d}N_i=\sum_j\left(\frac{\partial N_i}{\partial\mu_j}\right)_T\mathrm{d}\mu_j+\left(\frac{\partial N_i}{\partial T}\right)_{\mu_k}\mathrm{d}T$$ (eqn:dn)
Calculating the derivative with respect to $T$ at constant $N_i$ leads to
$$0=\sum_j\left(\frac{\partial N_i}{\partial\mu_j}\right)_T\left(\frac{\partial\mu_j}{\partial T}\right)_{N_k}+\left(\frac{\partial N_i}{\partial T}\right)_{\mu_k}$$ (eqn:dndt_1)
from which the unknown derivative $\left(\frac{\partial\mu_i}{\partial T}\right)_{N_k}$ can be calculated. In practice the expression has the disadvantage that $\left(\frac{\partial N_i}{\partial T}\right)_{\mu_k}$ depends on the (sometimes unknown) thermal de Broglie wavelength which cancels later with $s_i^\mathrm{b}$. This can be remedied by first calculating the derivative of eq. {eq}`eqn:dn` with respect to $T$ at constant (bulk) pressure and composition.
$$\left(\frac{\partial N_i}{\partial T}\right)_{p,x_k}=\sum_j\left(\frac{\partial N_i}{\partial\mu_j}\right)_T\left(\frac{\partial\mu_j}{\partial T}\right)_{p,x_k}+\left(\frac{\partial N_i}{\partial T}\right)_{\mu_k}$$ (eqn:dndt_2)
From classical bulk thermodynamics we know $\left(\frac{\partial\mu_j}{\partial T}\right)_{p,x_k}=-s_j^\mathrm{b}$ and therefore, eq. {eq}`eqn:dndt_2` can be used in eq. {eq}`eqn:dndt_1` to give
$$0=\sum_j\left(\frac{\partial N_i}{\partial\mu_j}\right)_T\left(s_j^\mathrm{b}+\left(\frac{\partial\mu_j}{\partial T}\right)_{N_k}\right)+\left(\frac{\partial N_i}{\partial T}\right)_{p,x_k}$$
After multiplying with $T$, the following elegant expression remains
$$0=\sum_j\left(\frac{\partial N_i}{\partial\mu_j}\right)_T\Delta h_j^\mathrm{ads}+T\left(\frac{\partial N_i}{\partial T}\right)_{p,x_k}$$
which is a symmetric linear system of equations due to $\left(\frac{\partial N_i}{\partial\mu_j}\right)_T=-\left(\frac{\partial^2\Omega}{\partial\mu_i\partial\mu_j}\right)_T$. The derivatives of the particle numbers are obtained by integrating over the respective derivatives of the density profiles which were discussed [previously](derivatives.md). | Markdown |
3D | feos-org/feos | docs/theory/dft/functional_derivatives.md | .md | 3,971 | 42 | # Functional derivatives
In the last section the functional derivative
$$\hat F_{\rho_\alpha}^\mathrm{res}(r)=\left(\frac{\delta\hat F^\mathrm{res}}{\delta\rho_\alpha(r)}\right)_{T,\rho_{\alpha'\neq\alpha}}$$
was introduced as part of the Euler-Lagrange equation. The implementation of these functional derivatives can be a major difficulty during the development of a new Helmholtz energy model. In $\text{FeO}_\text{s}$ it is fully automated. The core assumption is that the residual Helmholtz energy functional $\hat F^\mathrm{res}$ can be written as a sum of contributions that each can be written in the following way:
$$F=\int f[\rho(r)]\mathrm{d}r=\int f(\lbrace n_\gamma\rbrace)\mathrm{d}r$$
The Helmholtz energy density $f$ which would in general be a functional of the density itself can be expressed as a *function* of weighted densities $n_\gamma$ which are obtained by convolving the density profiles with weight functions $\omega_\gamma^\alpha$
$$n_\gamma(r)=\sum_\alpha\int\rho_\alpha(r')\omega_\gamma^\alpha(r-r')\mathrm{d}r'\tag{1}$$
In practice the weight functions tend to have simple shapes like step functions (i.e. the weighted density is an average over a sphere) or Dirac distributions (i.e. the weighted density is an average over the surface of a sphere).
For Helmholtz energy functionals that can be written in this form, the calculation of the functional derivative can be automated. In general the functional derivative can be written as
$$F_{\rho_\alpha}(r)=\int\frac{\delta f(r')}{\delta\rho_\alpha(r)}\mathrm{d}r'=\int\sum_\gamma f_{n_\gamma}(r')\frac{\delta n_\gamma(r')}{\delta\rho_\alpha(r)}\mathrm{d}r'$$
with $f_{n_\gamma}$ as abbreviation for the *partial* derivative $\frac{\partial f}{\partial n_\gamma}$. Using the definition of the weighted densities (1), the expression can be rewritten as
$$\begin{align}
F_{\rho_\alpha}(r)&=\int\sum_\gamma f_{n_\gamma}(r')\frac{\delta n_\gamma(r')}{\delta\rho_\alpha(r)}\mathrm{d}r'=\int\sum_\gamma f_{n_\gamma}(r')\sum_{\alpha'}\int\underbrace{\frac{\delta\rho_{\alpha'}(r'')}{\delta\rho_\alpha(r)}}_{\delta(r-r'')\delta_{\alpha\alpha'}}\omega_\gamma^{\alpha'}(r'-r'')\mathrm{d}r''\mathrm{d}r'\\
&=\sum_\gamma\int f_{n_\gamma}(r')\omega_\gamma^\alpha(r'-r)\mathrm{d}r
\end{align}$$
At this point the parity of the weight functions has to be taken into account. By construction scalar and spherically symmetric weight functions (the standard case) are even functions, i.e., $\omega(-r)=\omega(r)$. In contrast, vector valued weight functions, as they appear in fundamental measure theory, are odd functions, i.e., $\omega(-r)=-\omega(r)$. Therefore, the sum over the weight functions needs to be split into two contributions, as
$$F_{\rho_\alpha}(r)=\sum_\gamma^\mathrm{scal}\int f_{n_\gamma}(r')\omega_\gamma^\alpha(r-r')\mathrm{d}r-\sum_\gamma^\mathrm{vec}\int f_{n_\gamma}(r')\omega_\gamma^\alpha(r-r')\mathrm{d}r\tag{2}$$
With this distinction, the calculation of the functional derivative is split into three steps
1. Calculate the weighted densities $n_\gamma$ from eq. (1)
2. Evaluate the partial derivatives $f_{n_\gamma}$
3. Use eq. (2) to obtain the functional derivative $F_{\rho_\alpha}$
A fast method to calculate the convolution integrals required in steps 1 and 3 is shown in the next section.
The implementation of partial derivatives by hand can be cumbersome and error prone. $\text{FeO}_\text{s}$ uses automatic differentiation with dual numbers to facilitate this step. For details about dual numbers and their generalization, we refer to [our publication](https://www.frontiersin.org/articles/10.3389/fceng.2021.758090/full). The essential relation is that the Helmholtz energy density evaluated with a dual number as input for one of the weighted densities evaluates to a dual number with the function value as real part and the partial derivative as dual part.
$$f(n_\gamma+\varepsilon,\lbrace n_{\gamma'\neq\gamma}\rbrace)=f+f_{n_\gamma}\varepsilon$$ | Markdown |
3D | feos-org/feos | docs/theory/dft/solver.md | .md | 11,534 | 113 | # DFT solvers
Different solvers can be used to calculate the density profiles from the Euler-Lagrange equation introduced previously. The solvers differ in their stability, the rate of convergence, and the execution time. Unfortunately, the optimal solver and solver parameters depend on the studied system.
## Picard iteration
The form of the Euler-Lagrange equation
$$\rho_\alpha(r)=\underbrace{\rho_\alpha^\mathrm{b}e^{\frac{\beta}{m_\alpha}\left(\hat F_{\rho_\alpha}^\mathrm{b,res}-\hat F_{\rho_\alpha}^\mathrm{res}(r)-V_\alpha^\mathrm{ext}(r)\right)}\prod_{\alpha'}I_{\alpha\alpha'}(r)}_{\equiv \mathcal{P}_\alpha(r;[\rho(r)])}$$
suggests using a simple fixed point iteration
$$\rho_\alpha^{(k+1)}(r)=\mathcal{P}_\alpha\left(r;\left[\rho^{(k)}(r)\right]\right)$$
Except for some systems – typically at low densities – this iteration is unstable. Instead the new solution is obtained as combination of the old solution and the projected solution $\mathcal{P}$
$$\rho_\alpha^{(k+1)}(r)=(1-\nu)\rho_\alpha^{(k)}(r)+\nu\mathcal{P}_\alpha\left(r;\left[\rho^{(k)}(r)\right]\right)$$
The weighting between the old and projected solution is specified by the damping coefficient $\nu$. The expression can be rewritten as
$$\rho_\alpha^{(k+1)}(r)=\rho_\alpha^{(k)}(r)+\nu\Delta\rho_\alpha^{(k)}(r)$$
with the search direction $\Delta\rho_\alpha(r)$ which is identical to the residual $\mathcal{F}_\alpha\left(r;\left[\rho(r)\right]\right)$
$$\Delta\rho_\alpha(r)=\mathcal{F}_\alpha\left(r;\left[\rho(r)\right]\right)\equiv\mathcal{P}_\alpha\left(r;\left[\rho(r)\right]\right)-\rho_\alpha(r)$$
The Euler-Lagrange equation can be reformulated as the "logarithmic" version
$$\ln\rho_\alpha(r)=\ln\mathcal{P}_\alpha\left(r;\left[\rho(r)\right]\right)$$
Then repeating the same steps as above leads to the "logarithmic" Picard iteration
$$\ln\rho_\alpha^{(k+1)}(r)=\ln\rho_\alpha^{(k)}(r)+\nu\Delta\ln\rho_\alpha^{(k)}(r)$$
or
$$\rho_\alpha^{(k+1)}(r)=\rho_\alpha^{(k)}(r)e^{\nu\Delta\ln\rho_\alpha^{(k)}(r)}$$
with
$$\Delta\ln\rho_\alpha(r)=\mathcal{\hat F}_\alpha\left(r;\left[\rho(r)\right]\right)\equiv\ln\mathcal{P}_\alpha\left(r;\left[\rho(r)\right]\right)-\ln\rho_\alpha(r)$$
## Newton algorithm
A Newton iteration is a more refined approach to calculate the roots of the residual $\mathcal{F}$. From a Taylor expansion of the residual
$$\mathcal{F}_\alpha\left(r;\left[\rho(r)+\Delta\rho(r)\right]\right)=\mathcal{F}_\alpha\left(r;\left[\rho(r)\right]\right)+\int\sum_\beta\frac{\delta\mathcal{F}_\alpha\left(r;\left[\rho(r)\right]\right)}{\delta\rho_\beta(r')}\Delta\rho_\beta(r')\mathrm{d}r'+\ldots$$
the Newton step is derived by setting the updated residual $\mathcal{F}_\alpha[\rho(r)+\Delta\rho(r)]$ to 0 and neglecting higher order terms.
$$\mathcal{F}_\alpha\left(r;\left[\rho(r)\right]\right)=-\int\sum_\beta\frac{\delta\mathcal{F}_\alpha\left(r;\left[\rho(r)\right]\right)}{\delta\rho_\beta(r')}\Delta\rho_\beta(r')\mathrm{d}r'$$ (eqn:newton)
The linear integral equation has to be solved for the step $\Delta\rho(r)$. Explicitly evaluating the functional derivatives of the residuals is not feasible due to their high dimensionality. Instead, a matrix-free linear solver like GMRES can be used. For GMRES only the action of the linear system on the variable is required (an evaluation of the right-hand side in the equation above for a given $\Delta\rho$). This action can be approximated numerically via
$$\int\sum_\beta\frac{\delta\mathcal{F}_\alpha\left(r;\left[\rho(r)\right]\right)}{\delta\rho_\beta(r')}\Delta\rho_\beta(r')\mathrm{d}r'\approx\frac{\mathcal{F}_\alpha\left(r;\left[\rho(r)+s\Delta\rho(r)\right]\right)-\mathcal{F}_\alpha\left(r;\left[\rho(r)\right]\right)}{s}$$
However this approach requires the choice of an appropriate step size $s$ (something that we want to avoid in $\text{FeO}_\text{s}$) and also an evaluation of the full residual in every step of the linear solver. The solver can be sped up by doing parts of the functional derivative analytically beforehand. Using the definition of the residual in the rhs of eq. {eq}`eqn:newton` leads to
$$\begin{align*}
q_\alpha(r)&\equiv-\int\sum_\beta\frac{\delta\mathcal{F}_\alpha\left(r;\left[\rho(r)\right]\right)}{\delta\rho_\beta(r')}\Delta\rho_\beta(r')\mathrm{d}r'\\
&=\int\sum_\beta\frac{\delta}{\delta\rho_\beta(r')}\left(\rho_\alpha(r)-\rho_\alpha^\mathrm{b}e^{\frac{\beta}{m_\alpha}\left(\hat F_{\rho_\alpha}^\mathrm{b,res}-\hat F_{\rho_\alpha}^\mathrm{res}(r)-V_\alpha^\mathrm{ext}(r)\right)}\prod_{\alpha'}I_{\alpha\alpha'}(r)\right)\Delta\rho_\beta(r')\mathrm{d}r'
\end{align*}$$
The functional derivative can be simplified using $\hat F_{\rho_\alpha\rho_\beta}^\mathrm{res}(r,r')=\frac{\delta \hat F_{\rho_\alpha}^\mathrm{b,res}(r)}{\delta\rho_\beta(r')}=\frac{\delta^2\hat F^\mathrm{b,res}}{\delta\rho_\alpha(r)\delta\rho_\beta(r')}$
$$\begin{align*}
q_\alpha(r)&=\int\sum_\beta\left(\delta_{\alpha\beta}\delta(r-r')+\left(\frac{\beta}{m_\alpha}\hat F_{\rho_\alpha\rho_\beta}^\mathrm{res}(r,r')-\sum_{\alpha'}\frac{1}{I_{\alpha\alpha'}(r)}\frac{\delta I_{\alpha\alpha'}(r)}{\delta\rho_\beta(r')}\right)\right.\\
&~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\left.\times\rho_\alpha^\mathrm{b}e^{\frac{\beta}{m_\alpha}\left(\hat F_{\rho_\alpha}^\mathrm{b,res}-\hat F_{\rho_\alpha}^\mathrm{res}(r)-V_\alpha^\mathrm{ext}(r)\right)}\prod_{\alpha'}I_{\alpha\alpha'}(r)\right)\Delta\rho_\beta(r')\mathrm{d}r'\\
&=\Delta\rho_\alpha(r)+\left(\frac{\beta}{m_\alpha}\underbrace{\int\sum_\beta\hat F_{\rho_\alpha\rho_\beta}^\mathrm{res}(r,r')\Delta\rho_\beta(r')\mathrm{d}r'}_{\Delta\hat F_{\rho_\alpha}^\mathrm{res}(r)}-\sum_{\alpha'}\frac{1}{I_{\alpha\alpha'}(r)}\underbrace{\int\sum_\beta\frac{\delta I_{\alpha\alpha'}(r)}{\delta\rho_\beta(r')}\Delta\rho_\beta(r')\mathrm{d}r'}_{\Delta I_{\alpha\alpha'}(r)}\right)\\
&~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\times\rho_\alpha^\mathrm{b}e^{\frac{\beta}{m_\alpha}\left(\hat F_{\rho_\alpha}^\mathrm{b,res}-\hat F_{\rho_\alpha}^\mathrm{res}(r)-V_\alpha^\mathrm{ext}(r)\right)}\prod_{\alpha'}I_{\alpha\alpha'}(r)
\end{align*}$$
and finally
$$q_\alpha(r)=\Delta\rho_\alpha(r)+\left(\frac{\beta}{m_\alpha}\Delta\hat F_{\rho_\alpha}^\mathrm{res}(r)-\sum_{\alpha'}\frac{\Delta I_{\alpha\alpha'}(r)}{I_{\alpha\alpha'}(r)}\right)\mathcal{P}_\alpha\left(r;\left[\rho(r)\right]\right)$$ (eqn:newton_rhs)
Neglecting the second term in eq. {eq}`eqn:newton_rhs` leads to $\Delta_\alpha(r)=\mathcal{F}_\alpha\left(r;\left[\rho(r)\right]\right)$ which is the search direction of the Picard iteration. This observation implies that the Picard iteration is an approximation of the Newton solver that neglects the residual contribution to the Jacobian. Only using the ideal gas contribution to the Jacobian is a reasonable approximation for low densities and therefore, the Picard iteration converges quickly (with a large damping coefficient $\nu$) for low densities.
The second functional derivative of the residual Helmholtz energy can be rewritten in terms of the weight functions.
$$\hat F_{\rho_\alpha\rho_\beta}^\mathrm{res}(r,r')=\int\frac{\delta\hat f^\mathrm{res}(r'')}{\delta\rho_\alpha(r)\delta\rho_\beta(r')}\mathrm{d}r''=\int\sum_{\alpha\beta}\hat f^\mathrm{res}_{\alpha\beta}(r'')\frac{\delta n_\alpha(r'')}{\delta\rho_\alpha(r)}\frac{\delta n_\beta(r'')}{\delta\rho_\beta(r')}\mathrm{d}r''$$
Here $\hat f^\mathrm{res}_{\alpha\beta}=\frac{\partial^2\hat f^\mathrm{res}}{\partial n_\alpha\partial n_\beta}$ is the second partial derivative of the reduced Helmholtz energy density with respect to the weighted densities $n_\alpha$ and $n_\beta$. The definition of the weighted densities $n_\alpha(r)=\sum_\alpha\int\rho_\alpha(r')\omega_\alpha^i(r-r')\mathrm{d}r'$ is used to simplify the expression further.
$$\hat F_{\rho_\alpha\rho_\beta}^\mathrm{res}(r,r')=\int\sum_{\alpha\beta}\hat f^\mathrm{res}_{\alpha\beta}(r'')\omega_\alpha^i(r''-r)\omega_\beta^j(r''-r')\mathrm{d}r''$$
With that, $\Delta\hat F_{\rho_\alpha}^\mathrm{res}(r)$ can be rewritten as
$$\begin{align*}
\Delta\hat F_{\rho_\alpha}^\mathrm{res}(r)&=\int\sum_{\alpha\beta}\hat f^\mathrm{res}_{\alpha\beta}(r'')\omega_\alpha^i(r''-r)\underbrace{\sum_\beta\int\omega_\beta^j(r''-r')\Delta\rho_\beta(r')\mathrm{d}r'}_{\Delta n_\beta(r'')}\mathrm{d}r''\\
&=\int\sum_\alpha\sum_\beta \hat f^\mathrm{res}_{\alpha\beta}(r'')\Delta n_\beta(r'')\omega_\alpha^i(r''-r)\mathrm{d}r''
\end{align*}$$ (eqn:newton_F)
To simplify the expression for $\Delta I_{\alpha\alpha'}(r)$, the recursive definition of the bond integrals is used.
$$\begin{align*}
\Delta I_{\alpha\alpha'}(r)&=\iint\sum_\beta\frac{\delta}{\delta\rho_\beta(r'')}\left(e^{\frac{\beta}{m_{\alpha'}}\left(\hat F_{\rho_{\alpha'}}^\mathrm{b,res}-\hat F_{\rho_{\alpha'}}^\mathrm{res}(r')-V_{\alpha'}^\mathrm{ext}(r')\right)}\left(\prod_{\alpha''\neq\alpha}I_{\alpha'\alpha''}(r')\right)\omega_\mathrm{chain}^{\alpha\alpha'}(r-r')\right)\\
&~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\times\Delta\rho_\beta(r'')\mathrm{d}r'\mathrm{d}r''\\
&=\iint\sum_\beta\left(-\frac{\beta}{m_{\alpha'}}\hat F_{\rho_{\alpha'}\rho_\beta}^\mathrm{res}(r',r'')+\sum_{\alpha''\neq\alpha}\frac{1}{I_{\alpha'\alpha''}(r')}\frac{\delta I_{\alpha'\alpha''}(r')}{\delta\rho_\beta(r'')}\right)e^{\frac{\beta}{m_{\alpha'}}\left(\hat F_{\rho_{\alpha'}}^\mathrm{b,res}-\hat F_{\rho_{\alpha'}}^\mathrm{res}(r')-V_{\alpha'}^\mathrm{ext}(r')\right)}\\
&~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\times\left(\prod_{\alpha''\neq\alpha}I_{\alpha'\alpha''}(r')\right)\omega_\mathrm{chain}^{\alpha\alpha'}(r-r')\Delta\rho_\beta(r'')\mathrm{d}r'\mathrm{d}r''
\end{align*}$$
Here, the definition of $\Delta\hat F_{\rho_\alpha}^\mathrm{res}(r)$ and $\Delta I_{\alpha\alpha'}(r)$ can be inserted leading to a recursive calculation of $\Delta I_{\alpha\alpha'}(r)$ similar to the original bond integrals.
$$\begin{align*}
\Delta I_{\alpha\alpha'}(r)&=\int\left(-\frac{\beta}{m_{\alpha'}}\Delta\hat F_{\rho_{\alpha'}}^\mathrm{res}(r')+\sum_{\alpha''\neq\alpha}\frac{\Delta I_{\alpha'\alpha''}(r')}{I_{\alpha'\alpha''}(r')}\right)\\
&~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\times e^{\frac{\beta}{m_{\alpha'}}\left(\hat F_{\rho_{\alpha'}}^\mathrm{b,res}-\hat F_{\rho_{\alpha'}}^\mathrm{res}(r')-V_{\alpha'}^\mathrm{ext}(r')\right)}\left(\prod_{\alpha''\neq\alpha}I_{\alpha'\alpha''}(r')\right)\omega_\mathrm{chain}^{\alpha\alpha'}(r-r')\mathrm{d}r'
\end{align*}$$ (eqn:newton_I)
In every iteration of GMRES, $q(r)$ needs to be evaluated from eqs. {eq}`eqn:newton_rhs`, {eq}`eqn:newton_F` and {eq}`eqn:newton_I`. The operations required for that are analogous to the calculation of weighted densities and the functional derivative in the Euler-Lagrange equation itself. Details of GMRES, including the pseudocode that the implementation in $\text{FeO}_\text{s}$ is based on, are given on [Wikipedia](https://de.wikipedia.org/wiki/GMRES-Verfahren) (German).
The Newton solver converges exceptionally fast compared to a simple Picard iteration. The faster convergence comes at the cost of requiring multiple steps for solving the linear subsystem. With the algorithm outlined here, the evaluation of the second partial derivatives of the Helmholtz energy density is only required once for every Newton step. The GMRES algorithm only uses the very efficient convolution integrals and no additional evaluation of the model.
## Anderson mixing
| Markdown |
3D | feos-org/feos | docs/theory/dft/pdgt.md | .md | 3,464 | 54 | # Predictive density gradient theory
Predictive density gradient theory (pDGT) is an efficient approach for the prediction of surface tensions, which is derived from non-local DFT, see [Rehner et al. (2018)](https://journals.aps.org/pre/abstract/10.1103/PhysRevE.98.063312). A gradient expansion is applied to the weighted densities of the Helmholtz energy functional to second order as well as to the Helmholtz energy density to first order.
Weighted densities (in non-local DFT) are determined from
$$ n_\alpha(\mathbf{r})=\sum_in_\alpha^i(\mathbf{r})=\sum_i\int\rho_i(\mathbf{r}- \mathbf{r}')\omega_\alpha^i(\mathbf{r}')\mathrm{d}\mathbf{r}'.$$
These convolutions are time-consuming calculations. Therefore, these equations are simplified by using a Taylor expansion around $\mathbf{r}$ for the density of each component $\rho_i$ as
$$\rho_i(\mathbf{r}-\mathbf{r}')=\rho_i(\mathbf{r})-\nabla\rho_i(\mathbf{r})\cdot \mathbf{r}'+\frac{1}{2}\nabla\nabla\rho(\mathbf{r}):\mathbf{r}'\mathbf{r}'+\ldots$$
In the convolution integrals, the integration over angles can now be performed analytically for the spherically symmetric weight functions $\omega_\alpha^i(\mathbf{r})=\omega_\alpha^i(r)$
which provides
$$ n_\alpha^i(\mathbf{r})=\rho_i(\mathbf{r})\underbrace{4\pi\int_0^\infty \omega_\alpha^i(r)r^2\mathrm{d} r}_{\omega_\alpha^{i0}}
+\nabla^2\rho_i(\mathbf{r})\underbrace{\frac{2}{3}\pi\int_0^\infty\omega_\alpha^i(r)r^4\mathrm{d} r}_{\omega_\alpha^{i2}}+\ldots$$
with the weight constants $\omega_\alpha^{i0}$ and $\omega_\alpha^{i2}$.
The resulting weighted densities can be split into a local part $n_\alpha^0(\mathbf{r})$ and an excess part $\Delta n_\alpha(\mathbf{r})$ as
$$n_\alpha(\mathbf{r})=\underbrace{\sum_i\rho_i(\mathbf{r}) \omega_\alpha^{i0}}_{n_\alpha^0} +\underbrace{\sum_i\nabla^2\rho_i(\mathbf{r})\omega_\alpha^{i2}+\ldots}_{\Delta n_\alpha}.$$
The second simplification is the expansion of the reduced residual
Helmholtz energy density $\Phi(\{ n_\alpha\})$ around the local density approximation truncated after the second term
$$ \Phi(\lbrace n_\alpha\rbrace)
=\Phi(\lbrace n_\alpha^0\rbrace)
+\sum_i\sum_\alpha\frac{\partial\Phi}{\partial n_\alpha}\omega_\alpha^{i2}\nabla^2\rho_i + \ldots $$
The Helmholtz energy functional (which was introduced in the section about the [Euler-Lagrange equation](euler_lagrange_equation.md)) then reads
$$ F[\mathbf{\rho}(\mathbf{r})]=\int\left(f(\mathbf{\rho})+\sum_{ij}\frac{c_{ij}(\mathbf{\rho})}{2}\nabla\rho_i\cdot\nabla\rho_j\right)\mathrm{d}\mathbf{r}$$
with the density dependent influence parameter
$$ \beta c_{ij}(\mathbf{\rho})=-\sum_{\alpha\beta}\frac{\partial^2\Phi}{\partial n_\alpha\partial n_\beta}\left(\omega_\alpha^{i2}\omega_\beta^{j0}+ \omega_\alpha^{i0}\omega_\beta^{j2}\right).$$
and the local Helmholtz energy density $f(\mathbf{\rho})$.
For pure components, as derived in the original publication, the surface tension can be calculated from the surface excess grand potential per area according to
$$ \gamma=\frac{F-\mu N+pV}{A}=\int_{\rho^\mathrm{V}}^{\rho^\mathrm{L}} \sqrt{2c \left(f(\rho)-\rho\mu+p\right) } d\rho $$
Thus, no iterative solver is necessary to calculate the surface tension of pure components, which is a major advantage of pDGT. Finally, the density profile can be calculated from
$$ z(\rho)=\int_{\rho^\mathrm{V}}^{\rho} \sqrt{\frac{c/2}{ f(\rho)-\rho\mu+p} } d\rho $$
| Markdown |
3D | feos-org/feos | docs/theory/dft/derivatives.md | .md | 7,333 | 76 | # Derivatives of density profiles
For converged density profiles equilibrium properties can be calculated as partial derivatives of thermodynamic potentials analogous to classical (bulk) thermodynamics. The difference is that the derivatives have to be along a path of valid density profiles (solutions of the [Euler-Lagrange equation](euler_lagrange_equation.md)).
The density profiles are calculated implicitly from the Euler-Lagrange equation, which can be written simplified as
$$\Omega_{\rho_i}(T,\lbrace\mu_k\rbrace,[\lbrace\rho_k(\mathbf{r})\rbrace])=F_{\rho_i}(T,[\lbrace\rho_k(\mathbf{r})\rbrace])-\mu_i+V_i^\mathrm{ext}(\mathbf{r})=0$$ (eqn:euler_lagrange)
Incorporating bond integrals can be done similar to the section on the [Newton solver](solver.md) but will not be discussed in this section. The derivatives of the density profiles can then be calculated from the total differential of eq. {eq}`eqn:euler_lagrange`, leading to
$$\mathrm{d}\Omega_{\rho_i}(\mathbf{r})=\left(\frac{\partial\Omega_{\rho_i}(\mathbf{r})}{\partial T}\right)_{\mu_k,\rho_k}\mathrm{d}T+\sum_j\left(\frac{\partial\Omega_{\rho_i}(\mathbf{r})}{\partial\mu_j}\right)_{T,\mu_k,\rho_k}\mathrm{d}\mu_j+\int\sum_j\left(\frac{\delta\Omega_{\rho_i}(\mathbf{r})}{\delta\rho_j(\mathbf{r}')}\right)_{T,\mu_k,\rho_k}\delta\rho_j(\mathbf{r}')\mathrm{d}\mathbf{r}'=0$$
Using eq. {eq}`eqn:euler_lagrange` and the shortened notation for derivatives of functionals in their natural variables, e.g., $F_T=\left(\frac{\partial F}{\partial T}\right)_{\rho_k}$, the expression can be simplified to
$$F_{T\rho_i}(\mathbf{r})\mathrm{d}T-\mathrm{d}\mu_i+\int\sum_j F_{\rho_i\rho_j}(\mathbf{r},\mathbf{r}')\delta\rho_j(\mathbf{r}')\mathrm{d}\mathbf{r}'=0$$ (eqn:gibbs_duhem)
Similar to the Gibbs-Duhem relation for bulk phases, eq. {eq}`eqn:gibbs_duhem` shows how temperature, chemical potentials and the density profiles in an inhomogeneous system cannot be varied independently. The derivatives of the density profiles with respect to the intensive variables can be directly identified as
$$\int\sum_j F_{\rho_i\rho_j}(\mathbf{r},\mathbf{r}')\left(\frac{\partial\rho_j(\mathbf{r}')}{\partial T}\right)_{\mu_k}\mathrm{d}\mathbf{r}'=-F_{T\rho_i}(\mathbf{r})$$
and
$$\int\sum_j F_{\rho_i\rho_j}(\mathbf{r},\mathbf{r}')\left(\frac{\partial\rho_j(\mathbf{r}')}{\partial\mu_k}\right)_{T}\mathrm{d}\mathbf{r}'=\delta_{ik}$$ (eqn:drho_dmu)
Both of these expressions are implicit (linear) equations for the derivatives. They can be solved rapidly analogously to the implicit expression appearing in the [Newton solver](solver.md). In practice, it is useful to explicitly cancel out the (often unknown) thermal de Broglie wavelength $\Lambda_i$ from the expression where it has no influence. This is done by splitting the intrinsic Helmholtz energy into an ideal gas and a residual part.
$$F=k_\mathrm{B}T\int\sum_im_i\rho_i(\mathbf{r})\left(\ln\left(\rho_i(\mathbf{r})\Lambda_i^3\right)-1\right)\mathrm{d}\mathbf{r}+\mathcal{\hat F}^\mathrm{res}$$
Then $F_{\rho_i\rho_j}(\mathbf{r},\mathbf{r}')=m_i\frac{k_\mathrm{B}T}{\rho_i(\mathbf{r})}\delta_{ij}\delta(\mathbf{r}-\mathbf{r}')+\mathcal{\hat F}_{\rho_i\rho_j}^\mathrm{res}(\mathbf{r},\mathbf{r}')$ and eq. {eq}`eqn:drho_dmu` can be rewritten as
$$m_i\frac{k_\mathrm{B}T}{\rho_i(\mathbf{r})}\left(\frac{\partial\rho_i(\mathbf{r})}{\partial\mu_k}\right)_T+\int\sum_j\mathcal{\hat F}_{\rho_i\rho_j}^\mathrm{res}(\mathbf{r},\mathbf{r}')\left(\frac{\partial\rho_j(\mathbf{r}')}{\partial\mu_k}\right)_{T}\mathrm{d}\mathbf{r}'=\delta_{ik}$$
In practice, the division by the density should be avoided for numerical reasons and the energetic properties are reduced with the factor $\beta=\frac{1}{k_\mathrm{B}T}$. The final expression is
$$m_i\left(\frac{\partial\rho_i(\mathbf{r})}{\partial\beta\mu_k}\right)_T+\rho_i(\mathbf{r})\int\sum_j\beta\mathcal{\hat F}_{\rho_i\rho_j}^\mathrm{res}(\mathbf{r},\mathbf{r}')\left(\frac{\partial\rho_j(\mathbf{r}')}{\partial\beta\mu_k}\right)_{T}\mathrm{d}\mathbf{r}'=\rho_i(\mathbf{r})\delta_{ik}$$
For the temperature derivative, it is more convenient to express eq. {eq}`eqn:gibbs_duhem` in terms of the pressure of a bulk phase that is in equilibrium with the inhomogeneous system. In the following, only paths along **constant bulk composition** are considered. With this constraint, the total differential of the chemical potential simplifies to
$$\mathrm{d}\mu_i=-s_i\mathrm{d}T+v_i\mathrm{d}p$$
which can be used in eq. {eq}`eqn:gibbs_duhem` to give
$$\left(F_{T\rho_i}(\mathbf{r})+s_i\right)\mathrm{d}T+\int\sum_j F_{\rho_i\rho_j}(\mathbf{r},\mathbf{r}')\delta\rho_j(\mathbf{r}')\mathrm{d}\mathbf{r}'=v_i\mathrm{d}p$$
Even though $s_i$ is readily available in $\text{FeO}_\text{s}$ it is useful at this point to rewrite the partial molar entropy as
$$s_i=v_i\left(\frac{\partial p}{\partial T}\right)_{V,N_k}-F_{T\rho_i}^\mathrm{b}$$
Then, the intrinsic Helmholtz energy can be split into an ideal gas and a residual part again, and the de Broglie wavelength cancels.
$$\begin{align*}
&\left(m_ik_\mathrm{B}\ln\left(\frac{\rho_i(\mathbf{r})}{\rho_i^\mathrm{b}}\right)+F_{T\rho_i}^\mathrm{res}(\mathbf{r})-F_{T\rho_i}^\mathrm{b,res}+v_i\left(\frac{\partial p}{\partial T}\right)_{V,N_k}\right)\mathrm{d}T\\
&~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+m_i\frac{k_\mathrm{B}T}{\rho_i(\mathbf{r})}\delta\rho_i(\mathbf{r})+\int\sum_j\mathcal{\hat F}_{\rho_i\rho_j}^\mathrm{res}(\mathbf{r},\mathbf{r}')\delta\rho_j(\mathbf{r}')\mathrm{d}\mathbf{r}'=v_i\mathrm{d}p
\end{align*}$$
Finally the expressions for the derivatives with respect to pressure
$$m_i\left(\frac{\partial\rho_i(\mathbf{r})}{\partial\beta p}\right)_{T,x_k}+\rho_i(\mathbf{r})\int\sum_j\beta\mathcal{\hat F}_{\rho_i\rho_j}^\mathrm{res}(\mathbf{r},\mathbf{r}')\left(\frac{\partial\rho_j(\mathbf{r}')}{\partial\beta p}\right)_{T,x_k}\mathrm{d}\mathbf{r}'=v_i\rho_i(\mathbf{r})$$
and temperature
$$\begin{align*}
&m_i\left(\frac{\partial\rho_i(\mathbf{r})}{\partial T}\right)_{p,x_k}+\rho_i(\mathbf{r})\int\sum_j\beta\mathcal{\hat F}_{\rho_i\rho_j}^\mathrm{res}(\mathbf{r},\mathbf{r}')\left(\frac{\partial\rho_j(\mathbf{r}')}{\partial T}\right)_{p,x_k}\mathrm{d}\mathbf{r}'\\
&~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~=-\frac{\rho_i(\mathbf{r})}{k_\mathrm{B}T}\left(m_ik_\mathrm{B}\ln\left(\frac{\rho_i(\mathbf{r})}{\rho_i^\mathrm{b}}\right)+F_{T\rho_i}^\mathrm{res}(\mathbf{r})-F_{T\rho_i}^\mathrm{b,res}+v_i\left(\frac{\partial p}{\partial T}\right)_{V,N_k}\right)
\end{align*}$$
follow. All derivatives $x_i$ shown here can be calculated from the same linear equation
$$m_ix_i+\rho_i(\mathbf{r})\int\sum_j\beta\mathcal{\hat F}_{\rho_i\rho_j}^\mathrm{res}(\mathbf{r},\mathbf{r}')x_i\mathrm{d}\mathbf{r}'=y_i$$
by just replacing the right hand side $y_i$.
|derivative|right hand side|
|-|-|
|$\left(\frac{\partial\rho_i(\mathbf{r})}{\partial\beta\mu_k}\right)_T$|$\rho_i(\mathbf{r})\delta_{ik}$|
|$\left(\frac{\partial\rho_i(\mathbf{r})}{\partial\beta p}\right)_{T,x_k}$|$\rho_i(\mathbf{r})v_i$|
|$\left(\frac{\partial\rho_i(\mathbf{r})}{\partial T}\right)_{p,x_k}$|$-\frac{\rho_i(\mathbf{r})}{k_\mathrm{B}T}\left(m_ik_\mathrm{B}\ln\left(\frac{\rho_i(\mathbf{r})}{\rho_i^\mathrm{b}}\right)+F_{T\rho_i}^\mathrm{res}(\mathbf{r})-F_{T\rho_i}^\mathrm{b,res}+v_i\left(\frac{\partial p}{\partial T}\right)_{V,N_k}\right)$| | Markdown |
3D | feos-org/feos | docs/theory/dft/ideal_gas.md | .md | 2,890 | 32 | # Ideal gas properties
Classical DFT can be used to rapidly determine the ideal gas limit of fluids in porous media. In an ideal gas, there are no interactions between the fluid molecules and therefore the residual Helmholtz energy $F^\mathrm{res}$ and its derivatives vanish. Note that this is only the case for spherical or heterosegmented molecules ($m_\alpha=1$), as the chain contribution in the homosegmented model contains intramolecular interactions. The ideal gas density profile can then be obtained directly from the [Euler-Lagrange equation](euler_lagrange_equation.md):
$$\rho_\alpha^\mathrm{ig}(\mathbf{r})=\rho_\alpha^\mathrm{b}e^{-\beta V_\alpha^\mathrm{ext}(\mathbf{r})}\prod_{\alpha'}I^\mathrm{ig}_{\alpha\alpha'}(\mathbf{r})$$ (eqn:rho_ideal_gas)
$$I^\mathrm{ig}_{\alpha\alpha'}(\mathbf{r})=\int e^{-\beta V_{\alpha'}^\mathrm{ext}(\mathbf{r'})}\left(\prod_{\alpha''\neq\alpha}I^\mathrm{ig}_{\alpha'\alpha''}(\mathbf{r'})\right)\omega_\mathrm{chain}^{\alpha\alpha'}(\mathbf{r}-\mathbf{r'})\mathrm{d}\mathbf{r'}$$
Either from the derivatives presented [previously](derivatives.md), or from directly calculating derivatives of eq. {eq}`eqn:euler_lagrange_mu`, the **Henry coefficient** $H_\alpha$ can be calculated, as
$$H_\alpha(T)=\left(\frac{\partial N_\alpha^\mathrm{ig}}{\partial p_\alpha}\right)_{T,x_k}=\int\left(\frac{\partial\rho_\alpha^\mathrm{ig}(\mathbf{r})}{\partial p_\alpha}\right)_{T,x_k}\mathrm{d}\mathbf{r}=\beta\int e^{-\beta V_\alpha^\mathrm{ext}(\mathbf{r})}\prod_{\alpha'}I^\mathrm{ig}_{\alpha\alpha'}(\mathbf{r})\mathrm{d}\mathbf{r}$$
By construction the Henry coefficients for all segments $\alpha$ of a molecule $i$ are identical. Therefore, the number of adsorbed molecules in an ideal gas state (the Henry regime) can be calculated from
$$N_i^\mathrm{ig}=k_\mathrm{B}T\rho_i^\mathrm{b}H_i(T)$$
The expression can be used in the general equation for the **enthalpy of adsorption** (see [here](enthalpy_of_adsorption.md))
$$0=\sum_j\left(\frac{\partial N_i^\mathrm{ig}}{\partial\mu_j}\right)_T\Delta h_j^\mathrm{ads,ig}+T\left(\frac{\partial N_i^\mathrm{ig}}{\partial T}\right)_{p,x_k}$$
to simplify to
$$0=\rho_i^\mathrm{b}H_i(T)\Delta h_i^\mathrm{ads,ig}+k_\mathrm{B}T^2\rho_i^\mathrm{b}H_i'(T)$$
and finally
$$\Delta h_i^\mathrm{ads,ig}=-k_\mathrm{B}T^2\frac{H_i'(T)}{H_i(T)}$$
For a spherical molecule without bond integrals, the derivative can be evaluated straightforwardly to yield
$$\Delta h_i^\mathrm{ads,ig}=\frac{\int\left(k_\mathrm{B}T-V_i^\mathrm{ext}(\mathbf{r})\right)e^{-\beta V_i^\mathrm{ext}(\mathbf{r})}\mathrm{d}\mathbf{r}}{\int e^{-\beta V_i^\mathrm{ext}(\mathbf{r})}\mathrm{d}\mathbf{r}}$$
Analytical derivatives of the bond integrals can be determined, however, in $\text{FeO}_\text{s}$ automatic differentiation with dual numbers is used to obtain correct derivatives with barely any implementation overhead. | Markdown |
3D | feos-org/feos | docs/theory/dft/index.md | .md | 462 | 17 | # Classical density functional theory
This section explains the implementation of the core expressions from classical density functional theory in $\text{FeO}_\text{s}$.
```{eval-rst}
.. toctree::
:maxdepth: 1
euler_lagrange_equation
functional_derivatives
solver
derivatives
enthalpy_of_adsorption
ideal_gas
pdgt
```
It is currently still under construction. You can help by [contributing](https://github.com/feos-org/feos/issues/70). | Markdown |
3D | feos-org/feos | docs/theory/dft/euler_lagrange_equation.md | .md | 8,467 | 111 | # Euler-Lagrange equation
The fundamental expression in classical density functional theory is the relation between the grand potential functional $\Omega$ and the intrinsic Helmholtz energy functional $F$.
$$\Omega(T,\mu,[\rho(r)])=F(T,[\rho(r)])-\sum_i\int\rho_i(r)\left(\mu_i-V_i^\mathrm{ext}(r)\right)\mathrm{d}r$$
What makes this expression so appealing is that the intrinsic Helmholtz energy functional only depends on the temperature $T$ and the density profiles $\rho_i(r)$ of the system and not on the external potentials $V_i^\mathrm{ext}(r)$.
For a given temperature $T$, chemical potentials $\mu_i$ and external potentials $V_i^\mathrm{ext}(r)$ the grand potential reaches a minimum at equilibrium. Mathematically this condition can be written as
$$\left.\frac{\delta\Omega}{\delta\rho_i(r)}\right|_{T,\mu}=F_{\rho_i}(r)-\mu_i+V_i^{\mathrm{ext}}(r)=0$$ (eqn:euler_lagrange_mu)
where $F_{\rho_i}(r)=\left.\frac{\delta F}{\delta\rho_i(r)}\right|_T$ is short for the functional derivative of the intrinsic Helmholtz energy. In this context, eq. (1) is commonly referred to as the Euler-Lagrange equation, an implicit nonlinear integral equation which needs to be solved for the equilibrium density profiles of the system.
For a homogeneous (bulk) system, $V_i^\mathrm{ext}=0$ and we get
$$F_{\rho_i}^\mathrm{b}-\mu_i=0$$ (eqn:euler_lagrange_bulk)
The functional derivative of the Helmholtz energy of a bulk system $F_{\rho_i}^\mathrm{b}$ is a function of the temperature $T$ and bulk densities $\rho^\mathrm{b}$. At this point, it can be advantageous to relate the grand potential of an inhomogeneous system to the densities of a bulk system that is in equilibrium with the inhomogeneous system. This approach has several advantages:
- The thermal de Broglie wavelength $\Lambda$ cancels out.
- If the chemical potential of the system is not known, all variables are the same quantity (densities).
- The bulk system is described by a Helmholtz energy which is explicit in the density, so there are no internal iterations required.
Using eq. {eq}`eqn:euler_lagrange_bulk` in eq. {eq}`eqn:euler_lagrange_mu` leads to the Euler-Lagrange equation
$$\left.\frac{\delta\Omega}{\delta\rho_i(r)}\right|_{T,\rho^\mathrm{b}}=F_{\rho_i}(r)-F_{\rho_i}^\mathrm{b}+V_i^{\mathrm{ext}}(r)=0$$ (eqn:euler_lagrange_rho)
## Spherical molecules
In the simplest case, the molecules under consideration can be described as spherical. Then the Helmholtz energy can be split into an ideal and a residual part:
$$\beta F=\sum_i\int\rho_i(r)\left(\ln\left(\rho_i(r)\Lambda_i^3\right)-1\right)\mathrm{d}r+\beta F^\mathrm{res}$$
with the thermal de Broglie wavelength $\Lambda_i$. The functional derivatives for an inhomogeneous and a bulk system follow as
$$\beta F_{\rho_i}(r)=\ln\left(\rho_i(r)\Lambda_i^3\right)+\beta F_{\rho_i}^\mathrm{res}$$
$$\beta F_{\rho_i}^\mathrm{b}=\ln\left(\rho_i^\mathrm{b}\Lambda_i^3\right)+\beta F_{\rho_i}^\mathrm{b,res}$$
Using these expressions in eq. {eq}`eqn:euler_lagrange_rho` results in
$$\left.\frac{\delta\beta\Omega}{\delta\rho_i(r)}\right|_{T,\rho^\mathrm{b}}=\ln\left(\frac{\rho_i(r)}{\rho_i^\mathrm{b}}\right)+\beta\left(F_{\rho_i}^\mathrm{res}(r)-F_{\rho_i}^\mathrm{b,res}+V_i^{\mathrm{ext}}(r)\right)=0$$
The Euler-Lagrange equation can be recast as
$$\rho_i(r)=\rho_i^\mathrm{b}e^{\beta\left(F_{\rho_i}^\mathrm{b,res}-F_{\rho_i}^\mathrm{res}(r)-V_i^\mathrm{ext}(r)\right)}$$
which is convenient because it leads directly to a recurrence relation known as Picard iteration.
## Homosegmented chains
For chain molecules that do not resolve individual segments (essentially the PC-SAFT Helmholtz energy functional) a chain contribution is introduced as
$$\beta F^\mathrm{chain}=-\sum_i\int\rho_i(r)\left(m_i-1\right)\ln\left(\frac{y_{ii}\lambda_i(r)}{\rho_i(r)}\right)\mathrm{d}r$$
Where $m_i$ is the number of segments (i.e., the PC-SAFT chain length parameter), $y_{ii}$ is the cavity correlation function at contact in the reference fluid, and $\lambda_i$ is a weighted density.
The presence of $\rho_i(r)$ in the logarithm poses numerical problems. Therefore, it is convenient to rearrange the expression as
$$\begin{align}
\beta F^\mathrm{chain}=&\sum_i\int\rho_i(r)\left(m_i-1\right)\left(\ln\left(\rho_i(r)\Lambda_i^3\right)-1\right)\mathrm{d}r\\
&\underbrace{-\sum_i\int\rho_i(r)\left(m_i-1\right)\left(\ln\left(y_{ii}\lambda_i(r)\Lambda_i^3\right)-1\right)\mathrm{d}r}_{\beta\hat{F}^\mathrm{chain}}
\end{align}$$
Then the total Helmholtz energy
$$\beta F=\sum_i\int\rho_i(r)\left(\ln\left(\rho_i(r)\Lambda_i^3\right)-1\right)\mathrm{d}r+\beta F^\mathrm{chain}+\beta F^\mathrm{res}$$
can be rearranged to
$$\beta F=\sum_i\int\rho_i(r)m_i\left(\ln\left(\rho_i(r)\Lambda_i^3\right)-1\right)\mathrm{d}r+\underbrace{\beta\hat{F}^\mathrm{chain}+\beta F^\mathrm{res}}_{\beta\hat{F}^\mathrm{res}}$$
The functional derivatives are then similar to the spherical case
$$\beta F_{\rho_i}(r)=m_i\ln\left(\rho_i(r)\Lambda_i^3\right)+\beta\hat{F}_{\rho_i}^\mathrm{res}(r)$$
$$\beta F_{\rho_i}^\mathrm{b}=m_i\ln\left(\rho_i^\mathrm{b}\Lambda_i^3\right)+\beta\hat{F}_{\rho_i}^\mathrm{b,res}$$
and lead to a slightly modified Euler-Lagrange equation
$$\rho_i(r)=\rho_i^\mathrm{b}e^{\frac{\beta}{m_i}\left(\hat F_{\rho_i}^\mathrm{b,res}-\hat F_{\rho_i}^\mathrm{res}(r)-V_i^\mathrm{ext}(r)\right)}$$
## Heterosegmented chains
The expressions are more complex for models in which density profiles of individual segments are considered. A derivation is given in the appendix of [Rehner et al. (2022)](https://journals.aps.org/pre/abstract/10.1103/PhysRevE.105.034110). The resulting Euler-Lagrange equation is given as
$$\rho_\alpha(r)=\Lambda_i^{-3}e^{\beta\left(\mu_i-\hat F_{\rho_\alpha}(r)-V_\alpha^\mathrm{ext}(r)\right)}\prod_{\alpha'}I_{\alpha\alpha'}(r)$$
with the bond integrals $I_{\alpha\alpha'}(r)$ that are calculated recursively from
$$I_{\alpha\alpha'}(r)=\int e^{-\beta\left(\hat{F}_{\rho_{\alpha'}}(r')+V_{\alpha'}^\mathrm{ext}(r')\right)}\left(\prod_{\alpha''\neq\alpha}I_{\alpha'\alpha''}(r)\right)\omega_\mathrm{chain}^{\alpha\alpha'}(r-r')\mathrm{d}r$$
The index $\alpha$ is used for every segment on component $i$, $\alpha'$ refers to all segments bonded to segment $\alpha$ and $\alpha''$ to all segments bonded to $\alpha'$.
For bulk systems the expressions simplify to
$$\rho_\alpha^\mathrm{b}=\Lambda_i^{-3}e^{\beta\left(\mu_i-\sum_\gamma\hat F_{\rho_\gamma}^\mathrm{b,res}\right)}$$
which shows that, by construction, the density of every segment in a molecule is identical in a bulk system. The index $\gamma$ refers to all segments on moecule $i$. The expressions can be combined in a similar way as for the molecular DFT:
$$\rho_\alpha(r)=\rho_\alpha^\mathrm{b}e^{\beta\left(\sum_\gamma\hat F_{\rho_\gamma}^\mathrm{b,res}-\hat F_{\rho_\alpha}^\mathrm{res}(r)-V_\alpha^\mathrm{ext}(r)\right)}\prod_{\alpha'}I_{\alpha\alpha'}(r)$$
At this point it can be numerically useful to redistribute the bulk contributions back into the bond integrals
$$\rho_\alpha(r)=\rho_\alpha^\mathrm{b}e^{\beta\left(\hat F_{\rho_\alpha}^\mathrm{b,res}-\hat F_{\rho_\alpha}^\mathrm{res}(r)-V_\alpha^\mathrm{ext}(r)\right)}\prod_{\alpha'}I_{\alpha\alpha'}(r)$$
$$I_{\alpha\alpha'}(r)=\int e^{\beta\left(\hat F_{\rho_{\alpha'}}^\mathrm{b,res}-\hat F_{\rho_{\alpha'}}^\mathrm{res}(r')-V_{\alpha'}^\mathrm{ext}(r')\right)}\left(\prod_{\alpha''\neq\alpha}I_{\alpha'\alpha''}(r)\right)\omega_\mathrm{chain}^{\alpha\alpha'}(r-r')\mathrm{d}r$$
## Combined expression
To avoid having multiple implementations of the central part of the DFT code, the different descriptions of molecules can be combined in a single version of the Euler-Lagrange equation:
$$\rho_\alpha(r)=\rho_\alpha^\mathrm{b}e^{\frac{\beta}{m_\alpha}\left(\hat F_{\rho_\alpha}^\mathrm{b,res}-\hat F_{\rho_\alpha}^\mathrm{res}(r)-V_\alpha^\mathrm{ext}(r)\right)}\prod_{\alpha'}I_{\alpha\alpha'}(r)$$
$$I_{\alpha\alpha'}(r)=\int e^{\frac{\beta}{m_{\alpha'}}\left(\hat F_{\rho_{\alpha'}}^\mathrm{b,res}-\hat F_{\rho_{\alpha'}}^\mathrm{res}(r')-V_{\alpha'}^\mathrm{ext}(r')\right)}\left(\prod_{\alpha''\neq\alpha}I_{\alpha'\alpha''}(r')\right)\omega_\mathrm{chain}^{\alpha\alpha'}(r-r')\mathrm{d}r'$$
If molecules consist of single (possibly non-spherical) segments, the Euler-Lagrange equation simplifies to that of the homosegmented chains shown above. For heterosegmented chains, the correct expression is obtained by setting $m_\alpha=1$.
| Markdown |
3D | bu-cisl/3D-Fourier-ptychography-on-LED-array-microscope | MultiSlice_SuperRes.m | .m | 8,795 | 286 | % By Lei Tian, lei_tian@berkeley.edu
% last modified 5/27/2014
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
clear all; clc;
addpath(['..\3D_code']);
% % Define Fourier operators
F = @(x) fftshift(fft2(ifftshift(x)));
Ft = @(x) fftshift(ifft2(ifftshift(x)));
% F = @(x) fftshift(fft2(x));
% Ft = @(x) ifft2(ifftshift(x));
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% inverse problem use alternating projection
% 2/28/2014
% experiments, 4/1/2014
% account for geometry WITHOUT condenser, 3/22/2014
% By Lei Tian, lei_tian@alum.mit.edu
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% 1 LED expt
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
numlit = 1;
filedir = ['G:\Project_Backup\LED_Array_Microscopy\Expt\NoCondenser\TE300\2014-5-20\CardiacTissue\1LED\'];
imglist = dir([filedir,'ILED*.tif']);
% out_dir = ['.\Res',num2str(numlit),'LED-Result'];
% mkdir(out_dir);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% define the current processing patch starting coordinates
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% nstart = [784,1375];
% nstart = [981,1181];
% nstart = [1444,701];
Np = 140;
ns1 = 1:Np-Np/10:2160; ns1 = ns1(1:end-1);
ns2 = 11:Np-Np/10:2560; ns2 = ns2(1:end-1);
[ns2,ns1] = meshgrid(ns2,ns1);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% parameters for multi-slice model
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% distance between slices, unit: um
dz = 10;
% distance from the Nth slice to the focal plane
z0 = 0;
Nslice = 2;
%%
for l = 100%1:length(ns1(:))
nstart = [ns1(l),ns2(l)];
fn = [filedir,'Iled_0147.tif'];
I = imread(fn);
figure(30); imagesc(I(nstart(1):nstart(1)+Np-1,nstart(2):nstart(2)+Np-1));
axis image; colormap gray; axis off; drawnow;
% setup output folder for each patch
out_dir = ['.\Res-patch-',num2str(nstart(1)),'-',num2str(nstart(2)),'-',...
num2str(numlit),'LED-Result'];
mkdir(out_dir);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% read in general system parameters
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
SystemSetup4x_Multislice();
%% load in data
% LED intensity normalization from the calibration data
% load('..\Intensity-LED-calibrate\ILEDMean40x');
Nimg = Nled;
Imea = zeros(Np,Np,Nimg);
Ibk = zeros(Nimg,1);
for m = 1:Nimg
fn = [filedir,imglist(m).name];
I = imread(fn);
Imea(:,:,m) = double(I(nstart(1):nstart(1)+Np-1,nstart(2):nstart(2)+Np-1));
bk1 = mean2(double(I(2101:end,1:400)));
bk2 = mean2(double(I(2101:end,1501:1800)));
% bk3 = mean2(double(I(650:700,1100:1500)));
Ibk(m) = min([bk1,bk2]);
% Inorm(:,:,m) = Imea(:,:,m)/ILEDMean40x(m);
if Ibk(m)>300
Ibk(m) = Ibk(m-1);
end
end
%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% pre-processing the data to DENOISING is IMPORTANT
% Denoise I. remove high freq noise beyond support of OTF
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Ilpf = zeros(Np,Np,Nimg);
for m = 1:Nimg
% filter out the high freq noise
Ilpf(:,:,m) = Ft(F(Imea(:,:,m)).*Ps_otf);
end
%% corresponding LED locations
% find the on-LED indices
ledidx = 1:Nled;
ledidx = reshape(ledidx,numlit,Nimg);
lit = Litidx(ledidx);
lit = reshape(lit,numlit,Nimg);
[litv,lith] = ind2sub([32,32],lit);
% find the index to reorder the measurements so that the image contains the
% central LEDs will be used first during the updates
dis_lit = sqrt((litv-lit_cenv-1).^2+(lith-lit_cenh-1).^2);
[dis_lit2,idx_led] = sort(min(dis_lit,[],1));
Nsh_lit = zeros(numlit,Nimg);
Nsv_lit = zeros(numlit,Nimg);
for m = 1:Nimg
% should make sure it always covers all the leds
% index of LEDs are lit for each pattern
%lit = condenseridx(ceil(rand(numlit,1)*Nled));
% corresponding index of spatial freq for the LEDs are lit
lit0 = lit(:,m);
Nsh_lit(:,m) = idx_u(lit0);
Nsv_lit(:,m) = idx_v(lit0);
end
% reorder the LED indices and intensity measurements according the previous
% dis_lit
Ns = [];
Ns(:,:,1) = Nsv_lit;
Ns(:,:,2) = Nsh_lit;
Imea_reorder = Imea(:,:,idx_led);
Ilpf_reorder = Ilpf(:,:,idx_led);
Ibk_reorder = Ibk(idx_led);
%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% pre-processing the data to DENOISING is IMPORTANT
% Denoise II. background subtraction
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Ithresh_reorder = Ilpf_reorder;
Ithresh_reorder = Ilpf_reorder;
for m = 1:Nimg
Itmp = Ithresh_reorder(:,:,m);
% Itmp(Itmp<mean2(Itmp(1:10,:))) = 0;
% Itmp = Itmp-mean2(Itmp(1:6,:));
% Itmp(Itmp<0) = 0;
% Ithresh_reorder(:,:,m) = Itmp;
Itmp = Itmp-Ibk_reorder(m);
Ithresh_reorder(:,:,m) = Itmp;
% Ithresh_reorder(:,:,m) = Itmp-min(Itmp(:));
Ithresh_reorder(:,:,m) = Ft(F(Ithresh_reorder(:,:,m)).*Ps_otf);
Ithresh_reorder(Ithresh_reorder<0) = 0;
end
% Imea_norm_reorder = Imea_norm(:,:,idx_led);
Ns_reorder = Ns(:,idx_led,:);
%% this part check if the calculation of brightfield and darkfield matches the experiments
illumination_na_reorder = illumination_na_used(idx_led);
for m = 1:Nimg
Imean(m) = mean2(Ithresh_reorder(:,:,m));
end
snr = Imean(:)./Ibk_reorder(:);
%% reconstruction algorithm
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% use only a sub-set number of the measurements to reconstruct
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Nused_vec = 10:10:293;
% for qq = 1:length(Nused_vec)
% Nused = Nused_vec(qq);
% Nused = 293;
Nused = 293;
% NBF = 37;
% Ntrans = 46;
% if Nused<=NBF
% I = Imea_reorder(:,:,1:Nused);
% Ns2 = Ns_reorder(:,1:Nused,:);
% else
% I = Imea_reorder(:,:,[1:NBF,Ntrans:Ntrans+Nused-NBF-1]);
% Ns2 = Ns_reorder(:,[1:NBF,Ntrans:Ntrans+Nused-NBF-1],:);
% end
% I = Ithresh_reorder(:,:,1:Nused);
idx_used = [1:9,find(Imean(10:Nused)<Imean(1)/5)+9];
idx_err = find(Imean(10:Nused)>Imean(1)/5)+9;
disp(['problematic frames are ',num2str(idx_err),' and are discarded']);
% idx_used = [1:11,13:16,18:Nused];
I = Ithresh_reorder(:,:,idx_used);
Ns2 = Ns_reorder(:,idx_used,:);
% reconstruction algorithm
opts.tol = 1;
opts.maxIter = 300;
opts.minIter = 150;
opts.monotone = 1;
% 'full', display every subroutin,
% 'iter', display only results from outer loop
% 0, no display
opts.display = 'iter';
% opts.saveIterResult = 0;
% opts.out_dir = ['.\tmp2'];
% mkdir(opts.out_dir);
% upsample the intensity
% I0interp = real(Ft(padarray(F(I(:,:,1)),[(N_obj-Np)/2,(N_obj-Np)/2])));
% opts.O0 = F(sqrt(I0interp));
% this method does not work for global minimization method
opts.Ps = w_NA;
opts.iters = 1;
% index of led used in the experiment
% opts.ledidx = ledidx(:,idx_led);
opts.OP_alpha = 1e-2;
opts.OP_beta = 1e-2;
opts.BP_alpha = 1e-2;
opts.BP_beta = 1e-2;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% step 0: initializatin
% idea: initialize with lightfield refocused intensities at different
% slices with only bright field data
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
O0 = F(sqrt(I(:,:,1))).*w_NA;
O0 = padarray(O0,[(N_obj-Np)/2,(N_obj-Np)/2]);
% estimated object field at focal plane
o0 = Ft(O0);
% define propagation operator, f: input field, h: propagation transfer
% function
Prop = @(f,h) Ft(F(f).*h);
% propagate to the last slice
o0 = Prop(o0,conj(H0));
% one reasonable (or trial solution) is the last slice = o0 and the rest
% slices = 1:
o_slice0 = ones(N_obj,N_obj,Nslice);
% o_slice0(:,:,Nslice) = o0;
for l = 1:Nslice
o_slice0(:,:,l) = o0;
end
opts.O0 = o_slice0;
opts.P0 = w_NA;
%%
[O,P,err] = AlterMin_MultiSlice(I,[N_obj,N_obj],Nslice, round(Ns2),H, H0, opts);
f3 = figure(88);
for m = 1:Nslice
subplot(2,2,2*m-1); imagesc(abs(O(:,:,m))); axis image; colormap gray; colorbar;
title('ampl(o)');
subplot(2,2,2*m); imagesc(angle(O(:,:,m))); axis image; colormap gray; colorbar;
title('phase(o)');
% subplot(223); imagesc(abs(P)); axis image; colormap gray; colorbar;
% title('ampl(P)');
% subplot(224); imagesc(angle(P)); axis image; colormap gray; colorbar;
% title('phase(P)');
end
% f4 = figure(79); plot(c(1:Nused));
% title('adaptive intensity correction factor');
fn = ['RandLit-',num2str(numlit),'-',num2str(Nused)];
save([out_dir,'\',fn],'O','P','err_pc','c','idx_err','snr');
saveas(f3,[out_dir,'\R-',fn,'.png']);
% saveas(f2,[out_dir,'\err-',fn,'.png']);
fprintf([fn,' saved\n']);
end
| MATLAB |
3D | bu-cisl/3D-Fourier-ptychography-on-LED-array-microscope | 3D_code/Fwd_Prop_MultiSlice_Intensity.m | .m | 1,375 | 48 | function I_est = Fwd_Prop_MultiSlice_Intensity( i0, o_slice, k2, dz, P, H0)
%FWD_PROP_MULTISLICE computes the field using multislice approach, with
%propagator H
% Inputs:
% H: fwd propagator between slices
% H0: fwd propagator from Nth slice to focal plane of objective
% o_slice0: current estimate of multi-slice object
% i0: KNOWN illumination
% by Lei Tian (lei_tian@alum.mit.edu)
% last modified on 5/28/2014
% % Define Fourier operators
F = @(x) fftshift(fft2(ifftshift(x)));
Ft = @(x) fftshift(ifft2(ifftshift(x)));
% define propagation operator, f: input field, h: propagation transfer
% function
Prop = @(f,h) Ft(F(f).*h);
% N: lateral dimension, Nslice: # of total z-slices
[N,~,Nslice] = size(o_slice);
Np = size(P,1);
cen0 = round((N+1)/2);
downsamp = @(x) x(cen0-Np/2:cen0+Np/2-1,cen0-Np/2:cen0+Np/2-1);
% initialize incident field at each slice
% phi = zeros(N,N,Nslice);
% phi = i0; % incident field of 1st slice is illumination
% initialize output field at each slice
% psi = zeros(N,N,Nslice);
psi = i0.*o_slice(:,:,1);
for m = 2:Nslice
H = exp(1i*k2*dz(m-1));
% propagate from neiboring slices
phi = Prop(psi,H);
% output field = incidence * object
psi = phi.*o_slice(:,:,m);
end
% estimated intensity w/o correction term
I_est = abs(Ft(downsamp(F(psi)).*P.*H0)).^2;
end
| MATLAB |
3D | bu-cisl/3D-Fourier-ptychography-on-LED-array-microscope | 3D_code/Back_Prop_MultiSlice_v2.m | .m | 1,955 | 54 | function [ o_slice ] = Back_Prop_MultiSlice_v2( O, k2, dz, o_slice0, phi0, psi0, ...
i0, alpha, beta, iters)
%FWD_PROP_MULTISLICE computes the field using multislice approach, with
%propagator H
% Inputs:
% O: total object field (from multi-slice propagation) at the pupil plane
% H: fwd propagator between slices
% H0: fwd propagator from Nth slice to focal plane of objective
% o_slice0: current estimate of multi-slice object
% phi0: current estimate of incident field at each slice
% psi0: current estimate of output field at each slice
% i0: KNOWN illumination
% by Lei Tian (lei_tian@alum.mit.edu)
% last modified on 5/28/2014
% % Define Fourier operators
F = @(x) fftshift(fft2(ifftshift(x)));
Ft = @(x) fftshift(ifft2(ifftshift(x)));
% define propagation operator, f: input field, h: propagation transfer
% function
Prop = @(f,h) Ft(F(f).*h);
%% backpropagate means take conj of transfer function
% H = conj(H);
% H0 = conj(H0);
% N: lateral dimension, Nslice: # of total z-slices
[N,~,Nslice] = size(o_slice0);
o_slice = zeros(N,N,Nslice);
%% backprop starts from here
% 1) the last slice is special from Pupil plane to Obj plan
% psi = Ft(O.*H0);
% [o_slice(:,:,Nslice), phi] = Proj_OslicePhi(O0,phi0(:,:,Nslice),psi,psi0(:,:,Nslice),alpha,beta,iters)
psi = Ft(O);
% 2) from Nslice-1 to 2nd slice
for m = Nslice:-1:2
% update o_slice at current slice
[o_slice(:,:,m), phi] = Proj_OslicePhi(o_slice0(:,:,m),phi0(:,:,m),...
psi,psi0(:,:,m),alpha,beta,iters);
H = exp(-1i*k2*dz(m-1));
% propagate next plane
psi = Prop(phi, H);
end
% 3) the 1st slice also is different
o_slice(:,:,1) = psi./i0;
% dpsi = psi-psi0;
%o_slice(:,:,1) = o_slice(:,:,1) ...
% +abs(i0).*conj(i0)./(abs(i0).^2+alpha).*(psi-psi0(:,:,1))/max(abs(i0(:)));
% o_slice(:,:,1) = o_slice(:,:,1)+(psi-psi0(:,:,1))./i0/(1+alpha);
end
| MATLAB |
3D | bu-cisl/3D-Fourier-ptychography-on-LED-array-microscope | 3D_code/SystemSetup4x_Multislice.m | .m | 7,713 | 224 | % function [ varargout ] = SystemSetup( varargin )
%SYSTEMSETUP initilize general system parameters for LED array microscope
% Last modofied on 4/22/2014
% Lei Tian (lei_tian@berkeley.edu)
% addpath(['..\..\Source_coding']);
% % Define Fourier operators
F = @(x) fftshift(fft2(ifftshift(x)));
Ft = @(x) fftshift(ifft2(ifftshift(x)));
% F = @(x) fftshift(fft2(x));
% Ft = @(x) ifft2(ifftshift(x));
% Define RMSE function operator
RMSE = @(x,x0) sqrt(sum(abs(x(:)-x0(:)).^2)/sum(x0(:).^2));
%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% wavelength of illumination, assume monochromatic
% R: 624.4nm +- 50nm
% G: 518.0nm +- 50nm
% B: 476.4nm +- 50nm
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
lambda = 0.6292;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% numerical aperture of the objective
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
NA = 0.1;
% maximum spatial frequency set by NA
um_m = NA/lambda;
% system resolution based on the NA
dx0 = 1/um_m/2;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% magnification of the system,
% need to calibrate with calibration slides
% on 2x objective, front port with an extra 2x
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
mag = 8.0883;
dpix_c = 6.5; %6.5um pixel size on the sensor plane
% effective image pixel size on the object plane
dpix_m = dpix_c/mag;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% # of pixels at the output image patch
% each patch will assign a single k-vector, the image patch size cannot be
% too large to keep the single-k assumption holds
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Np = 100;
% FoV in the object space
FoV = Np*dpix_m;
% sampling size at Fourier plane set by the image size (FoV)
% sampling size at Fourier plane is always = 1/FoV
if mod(Np,2) == 1
du = 1/dpix_m/(Np-1);
else
du = 1/FoV;
end
% low-pass filter diameter set by the NA = bandwidth of a single measurment
% in index
% N_NA = round(2*um_m/du_m);
% generate cutoff window by NA
m = 1:Np;
[mm,nn] = meshgrid(m-round((Np+1)/2));
ridx = sqrt(mm.^2+nn.^2);
um_idx = um_m/du;
% assume a circular pupil function, lpf due to finite NA
w_NA = double(ridx<um_idx);
% h = fspecial('gaussian',10,5);
% w_NA = imfilter(w_NA,h);
% support of OTF is 2x of ATF(NA)
Ps_otf = double(ridx<2*um_idx);
phC = ones(Np);
% phC(ridx<0.8*um_idx&ridx>0.7*um_idx) = 0.5;
% aberration modeled by a phase function
aberration = ones(Np);
% aberration = exp(pi/2*1i*(exp(-(mm-20).^2/50^2-(nn+40).^2/150^2))-...
% pi/8*1i*(exp(-(mm+40).^2/100^2-(nn-80).^2/80^2))+...
% pi/3*1i*(exp(-(mm).^2/60^2-(nn-10).^2/30^2)));
%aberration = ones(N_m);
pupil = w_NA.*phC.*aberration;
clear m mm nn
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% set up image corrdinates
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% original image size: 2160x2560
% can calibrate the center of the illumination with respect to the image by
% looking at the data from the dark/bright field image transitions
ncent = [1080,1280];
% nstart = ncent-Np/2;
% start pixel of the image patch
% nstart = [981,1181];
% center, start & end of the image patch
img_center = (nstart-ncent+Np/2)*dpix_m;
img_start = nstart*dpix_m;
img_end = (nstart+Np)*dpix_m;
%% LED array geometries and derived quantities
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% spacing between neighboring LEDs
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
ds_led = 4e3; %4mm
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% distance from the LED to the object
% experientally determined by placing a grating object
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
z_led = 68e3;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% diameter of # of LEDs used in the experiment
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
dia_led = 19;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% set up LED coordinates
% h: horizontal, v: vertical
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
lit_cenv = 13;
lit_cenh = 13;
vled = [0:31]-lit_cenv;
hled = [0:31]-lit_cenh;
[hhled,vvled] = meshgrid(hled,vled);
rrled = sqrt(hhled.^2+vvled.^2);
LitCoord = rrled<dia_led/2;
% total number of LEDs used in the experiment
Nled = sum(LitCoord(:));
% index of LEDs used in the experiment
Litidx = find(LitCoord);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% need to flip x&y(spatial frequency u&v) based on expt geometry
% also with minus (-) on both v & h based on scanning order
% also need to know the relative orientation between led array and the
% image, otherwise get garbge near the edge of the FoV
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% corresponding angles for each LEDs
dd = sqrt((-hhled*ds_led-img_center(1)).^2+(-vvled*ds_led-img_center(2)).^2+z_led.^2);
sin_thetav = (-hhled*ds_led-img_center(1))./dd;
sin_thetah = (-vvled*ds_led-img_center(2))./dd;
Tanv = -(-hhled*ds_led)/z_led;
Tanh = -(-vvled*ds_led)/z_led;
Tanh_lit = Tanh(LitCoord);
Tanv_lit = Tanv(LitCoord);
illumination_na = sqrt(sin_thetav.^2+sin_thetah.^2);
% corresponding spatial freq for each LEDs
%
vled = sin_thetav/lambda;
uled = sin_thetah/lambda;
% spatial freq index for each plane wave relative to the center
idx_u = round(uled/du);
idx_v = round(vled/du);
illumination_na_used = illumination_na(LitCoord);
% number of brightfield image
NBF = sum(illumination_na_used<NA);
idx_BF = find(illumination_na_used<NA);
% maxium spatial frequency achievable based on the maximum illumination
% angle from the LED array and NA of the objective
um_p = max(illumination_na_used)/lambda+um_m;
% resolution achieved after freq post-processing
dx0_p = 1/um_p/2;
NA_s = um_p*lambda;
disp(['synthetic NA is ',num2str(NA_s)]);
% assume the max spatial freq of the original object
% um_obj>um_p
% assume the # of pixels of the original object
% upsample by 2 for intensity
N_obj = round(um_p/du)*2*2;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% need to enforce N_obj/Np = integer to ensure no FT artifacts
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
N_obj = ceil(N_obj/Np)*Np;
% max spatial freq of the original object
um_obj = du*N_obj/2;
% sampling size of the object (=pixel size of the test image)
dx_obj = 1/um_obj/2;
% spatial coordiates
[x,y] = meshgrid([-N_obj/2:N_obj/2-1]*dx_obj);
[xn,yn] = meshgrid(-1/2:1/N_obj:1/2-1/N_obj);
%% spatial frequency coordinates
[u,v] = meshgrid(-um_obj:du:um_obj-du);
%% compute depth sectioning capability
DOF0 = lambda/NA^2/2;
DOFs = lambda/NA_s^2/2;
%% resolution
res0 = lambda/NA/2;
res_s = lambda/NA_s/2;
%% define propagation transfer function
% Fresnel
H = exp(1i*2*pi/lambda*dz)*exp(-1i*pi*lambda*dz*(u.^2+v.^2));
% angular spectrum
% H = exp(1i*2*pi*sqrt(1/lambda^2-u.^2-v.^2)*dz);
% mask_evanescent = double(sqrt(u.^2+v.^2)<1/lambda);
% H = H.*mask_evanescent;
H0 = exp(1i*2*pi/lambda*z0)*exp(-1i*pi*lambda*z0*(u.^2+v.^2));
% H0 = exp(1i*2*pi*sqrt(1/lambda^2-u.^2-v.^2)*z0).*mask_evanescent;
| MATLAB |
3D | bu-cisl/3D-Fourier-ptychography-on-LED-array-microscope | 3D_code/SystemSetup4x.m | .m | 6,735 | 190 | % function [ varargout ] = SystemSetup( varargin )
%SYSTEMSETUP initilize general system parameters for LED array microscope
% Last modofied on 4/22/2014
% Lei Tian (lei_tian@berkeley.edu)
% addpath(['..\..\Source_coding']);
% % Define Fourier operators
F = @(x) fftshift(fft2(ifftshift(x)));
Ft = @(x) fftshift(ifft2(ifftshift(x)));
% F = @(x) fftshift(fft2(x));
% Ft = @(x) ifft2(ifftshift(x));
% Define RMSE function operator
RMSE = @(x,x0) sqrt(sum(abs(x(:)-x0(:)).^2)/sum(x0(:).^2));
%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% wavelength of illumination, assume monochromatic
% R: 624.4nm +- 50nm
% G: 518.0nm +- 50nm
% B: 476.4nm +- 50nm
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
lambda = 0.6292;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% numerical aperture of the objective
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
NA = 0.1;
% maximum spatial frequency set by NA
um_m = NA/lambda;
% system resolution based on the NA
dx0 = 1/um_m/2;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% magnification of the system,
% need to calibrate with calibration slides
% on 2x objective, front port with an extra 2x
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
mag = 8.0883;
dpix_c = 6.5; %6.5um pixel size on the sensor plane
% effective image pixel size on the object plane
dpix_m = dpix_c/mag;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% # of pixels at the output image patch
% each patch will assign a single k-vector, the image patch size cannot be
% too large to keep the single-k assumption holds
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Np = 100;
% FoV in the object space
FoV = Np*dpix_m;
% sampling size at Fourier plane set by the image size (FoV)
% sampling size at Fourier plane is always = 1/FoV
if mod(Np,2) == 1
du = 1/dpix_m/(Np-1);
else
du = 1/FoV;
end
% low-pass filter diameter set by the NA = bandwidth of a single measurment
% in index
% N_NA = round(2*um_m/du_m);
% generate cutoff window by NA
m = 1:Np;
[mm,nn] = meshgrid(m-round((Np+1)/2));
ridx = sqrt(mm.^2+nn.^2);
um_idx = um_m/du;
% assume a circular pupil function, lpf due to finite NA
w_NA = double(ridx<um_idx);
% h = fspecial('gaussian',10,5);
% w_NA = imfilter(w_NA,h);
% support of OTF is 2x of ATF(NA)
Ps_otf = double(ridx<2*um_idx);
phC = ones(Np);
% phC(ridx<0.8*um_idx&ridx>0.7*um_idx) = 0.5;
% aberration modeled by a phase function
aberration = ones(Np);
% aberration = exp(pi/2*1i*(exp(-(mm-20).^2/50^2-(nn+40).^2/150^2))-...
% pi/8*1i*(exp(-(mm+40).^2/100^2-(nn-80).^2/80^2))+...
% pi/3*1i*(exp(-(mm).^2/60^2-(nn-10).^2/30^2)));
%aberration = ones(N_m);
pupil = w_NA.*phC.*aberration;
clear m mm nn
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% set up image corrdinates
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% original image size: 2160x2560
% can calibrate the center of the illumination with respect to the image by
% looking at the data from the dark/bright field image transitions
ncent = [1080,1280];
% start pixel of the image patch
% nstart = [981,1181];
% center, start & end of the image patch
img_center = (nstart-ncent+Np/2)*dpix_m;
img_start = nstart*dpix_m;
img_end = (nstart+Np)*dpix_m;
%% LED array geometries and derived quantities
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% spacing between neighboring LEDs
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
ds_led = 4e3; %4mm
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% distance from the LED to the object
% experientally determined by placing a grating object
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
z_led = 70e3;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% diameter of # of LEDs used in the experiment
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
dia_led = 19;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% set up LED coordinates
% h: horizontal, v: vertical
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
lit_cenv = 13;
lit_cenh = 13;
vled = [0:31]-lit_cenv;
hled = [0:31]-lit_cenh;
[hhled,vvled] = meshgrid(hled,vled);
rrled = sqrt(hhled.^2+vvled.^2);
LitCoord = rrled<dia_led/2;
% total number of LEDs used in the experiment
Nled = sum(LitCoord(:));
% index of LEDs used in the experiment
Litidx = find(LitCoord);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% need to flip x&y(spatial frequency u&v) based on expt geometry
% also with minus (-) on both v & h based on scanning order
% also need to know the relative orientation between led array and the
% image, otherwise get garbge near the edge of the FoV
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% corresponding angles for each LEDs
dd = sqrt((-hhled*ds_led-img_center(1)).^2+(-vvled*ds_led-img_center(2)).^2+z_led.^2);
sin_thetav = (-hhled*ds_led-img_center(1))./dd;
sin_thetah = (-vvled*ds_led-img_center(2))./dd;
illumination_na = sqrt(sin_thetav.^2+sin_thetah.^2);
% corresponding spatial freq for each LEDs
%
vled = sin_thetav/lambda;
uled = sin_thetah/lambda;
% spatial freq index for each plane wave relative to the center
idx_u = round(uled/du);
idx_v = round(vled/du);
illumination_na_used = illumination_na(LitCoord);
% number of brightfield image
NBF = sum(illumination_na_used<NA);
% maxium spatial frequency achievable based on the maximum illumination
% angle from the LED array and NA of the objective
um_p = max(illumination_na_used)/lambda+um_m;
% resolution achieved after freq post-processing
dx0_p = 1/um_p/2;
disp(['synthetic NA is ',num2str(um_p*lambda)]);
% assume the max spatial freq of the original object
% um_obj>um_p
% assume the # of pixels of the original object
N_obj = round(um_p/du)*2*3;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% need to enforce N_obj/Np = integer to ensure no FT artifacts
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
N_obj = ceil(N_obj/Np)*Np;
% max spatial freq of the original object
um_obj = du*N_obj/2;
% sampling size of the object (=pixel size of the test image)
dx_obj = 1/um_obj/2;
% end
| MATLAB |
3D | bu-cisl/3D-Fourier-ptychography-on-LED-array-microscope | 3D_code/MultiSlice_SuperRes_10x_v9.m | .m | 24,179 | 566 | % By Lei Tian, lei_tian@berkeley.edu
% last modified 5/27/2014
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
clear all; clc; close all;
%addpath(['C:\Users\Lei\Dropbox\Berkeley\LEDArray\MatlabCodes\Coded_Illumination\Source_coding']);
% % Define Fourier operators
F = @(x) fftshift(fft2(ifftshift(x)));
Ft = @(x) fftshift(ifft2(ifftshift(x)));
% F = @(x) fftshift(fft2(x));
% Ft = @(x) ifft2(ifftshift(x));
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% inverse problem use alternating projection
% 2/28/2014
% experiments, 4/1/2014
% account for geometry WITHOUT condenser, 3/22/2014
% By Lei Tian, lei_tian@alum.mit.edu
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% 1 LED expt
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
numlit = 1;
filedir = ['G:\Project_Backup\LED_Array_Microscopy\Expt\NoCondenser\NewArray\3D-2\Spiral2\10x_3\'];
imglist = dir([filedir,'ILED*.tif']);
% out_dir = ['.\Res',num2str(numlit),'LED-Result'];
% mkdir(out_dir);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% define the current processing patch starting coordinates
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Np = 1024;
% original image size
n1 = 2160;
n2 = 2560;
% ns1 = 1:Np-Np/10:2160; ns1 = ns1(1:end-1);
% ns2 = 11:Np-Np/10:2560; ns2 = ns2(1:end-1);
% [ns2,ns1] = meshgrid(ns2,ns1);
nstart = [1,1];
%%
close all;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% parameters for multi-slice model
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% vector to define distance between slices, unit: um
dz = [20*ones(1,5)];
% dz = [dz0(ll),40*ones(1,1)];
% distance from the Nth slice to the focal plane
z0 = 10;
% number of z-slices
Nslice = length(dz)+1;
% z vector
% z = (-z0-[Nslice-1:-1:0]*dz);
z = fliplr([-z0,-z0-(cumsum(fliplr(dz)))]);
m0 = find(abs(z)==min(abs(z)));
m0 = m0(1);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% preprocess the data by shift and add
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
SystemSetupV2Array10x_Multislice_v4();
% I0 = zeros(2160,2560,Nslice);
if Np<n1
Ibf = zeros(n1,n2,Nslice);
for m = 1:length(z)
for n = 1:NBF % BF only
nn = idx_BF(n);
fn = [filedir,imglist(nn).name];
tp = double(imread(fn));
shift_x = z(m) * Tanh_lit(nn);
shift_y = z(m) * Tanv_lit(nn);
shift_xp = round(shift_x/dpix_m);
shift_yp = round(shift_y/dpix_m);
% shift
tp = circshift(tp,[shift_yp,shift_xp]);
Ibf(:,:,m) = Ibf(:,:,m)+tp;
end
end
Ibf = (Ibf/NBF);
% Ibf = (Ibf/NBF).^(1/Nslice);
a = min(min(Ibf(:,:,m0)));
b = max(max(Ibf(:,:,m0)));
% for m = 1:Nslice
% figure; imagesc(Ibf(:,:,m),[a,b]); axis image; colormap gray;
% title(['z=',num2str(z(m))])
% end
else
Ibf = zeros(Np,Np,Nslice);
for m = 1:length(z)
for n = 1:NBF % BF only
nn = idx_BF(n);
fn = [filedir,imglist(nn).name];
tp = double(imread(fn));
bk1 = mean2(tp(100:200,400:600));
bk2 = mean2(tp(1800:1900,500:600));
% bk3 = mean2(double(I(650:700,1100:1500)));
bk = min([bk1,bk2]);
% Inorm(:,:,m) = Imea(:,:,m)/ILEDMean40x(m);
tp = padarray(tp,[(Np-n1)/2,(Np-n2)/2],bk);
shift_x = z(m) * Tanh_lit(nn);
shift_y = z(m) * Tanv_lit(nn);
shift_xp = round(shift_x/dpix_m);
shift_yp = round(shift_y/dpix_m);
% shift
tp = circshift(tp,[shift_yp,shift_xp]);
Ibf(:,:,m) = Ibf(:,:,m)+tp;
end
end
Ibf = (Ibf/NBF);
% Ibf = (Ibf/NBF).^(1/Nslice);
a = min(min(Ibf(:,:,m0)));
b = max(max(Ibf(:,:,m0)));
for m = 1:Nslice
figure; imagesc(Ibf(:,:,m),[a,b]); axis image; colormap gray;
title(['z=',num2str(z(m))])
end
end
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% clear up some memory space
clear tp u u0 v v0 yn y xn x phC pupil ridx aberration dd hhled vvled
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
% for l = 1%1:length(ns1(:))
nstart = [700,500];
% nstart = [1600,800]; % check z0
% nstart = [800,1320]; % check z0
% fn = [filedir,'Iled_0147.tif'];
% I = imread(fn);
if Np<n1
figure(30); imagesc(Ibf(nstart(1):nstart(1)+Np-1,nstart(2):nstart(2)+Np-1,2));
axis off; axis image; colormap gray; drawnow;
end
% setup output folder for each patch
out_dir = ['.\Res-patch-',num2str(nstart(1)),'-',num2str(nstart(2)),'-',...
num2str(numlit),'LED-Result'];
mkdir(out_dir);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% read in general system parameters
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
SystemSetupV2Array10x_Multislice_v4();
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% clear up some memory space
clear tp u u0 v v0 yn y xn x phC pupil ridx aberration dd hhled vvled
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% load in data
% LED intensity normalization from the calibration data
% load('..\Intensity-LED-calibrate\ILEDMean40x');
Nimg = Nled;
Imea = zeros(Np,Np,Nimg);
Ibk = zeros(Nimg,1);
for m = 1:Nimg
fn = [filedir,imglist(m).name];
I = double(imread(fn));
bk1 = mean2(I(200:400,200:400));
bk2 = mean2(I(1800:2000,2000:2200));
% bk3 = mean2(double(I(650:700,1100:1500)));
Ibk(m) = min([bk1,bk2]);
% Inorm(:,:,m) = Imea(:,:,m)/ILEDMean40x(m);
if Np<n1
Imea(:,:,m) = I(nstart(1):nstart(1)+Np-1,nstart(2):nstart(2)+Np-1);
else
Imea(:,:,m) = padarray(I,[(Np-n1)/2,(Np-n2)/2],Ibk(m));
end
if Ibk(m)>600
Ibk(m) = Ibk(m-1);
end
% Ibk(m) = 100;
end
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% clear up some memory space
clear I
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% % pre-processing the data to DENOISING is IMPORTANT
% % Denoise I. remove high freq noise beyond support of OTF
% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Ilpf = zeros(Np,Np,Nimg);
% for m = 1:Nimg
% % filter out the high freq noise
% Ilpf(:,:,m) = Ft(F(Imea(:,:,m)).*Ps_otf);
% m
% end
%
% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% % clear up some memory space
% clear Imea I
% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% corresponding LED locations
% find the on-LED indices
ledidx = 1:Nled;
ledidx = reshape(ledidx,numlit,Nimg);
lit = Litidx(ledidx);
lit = reshape(lit,numlit,Nimg);
[litv,lith] = ind2sub([32,32],lit);
% find the index to reorder the measurements so that the image contains the
% central LEDs will be used first during the updates
dis_lit = sqrt((litv-lit_cenv-1).^2+(lith-lit_cenh-1).^2);
[dis_lit2,idx_led] = sort(min(dis_lit,[],1));
Nsh_lit = zeros(numlit,Nimg);
Nsv_lit = zeros(numlit,Nimg);
for m = 1:Nimg
% should make sure it always covers all the leds
% index of LEDs are lit for each pattern
%lit = condenseridx(ceil(rand(numlit,1)*Nled));
% corresponding index of spatial freq for the LEDs are lit
lit0 = lit(:,m);
Nsh_lit(:,m) = idx_u(lit0);
Nsv_lit(:,m) = idx_v(lit0);
end
% reorder the LED indices and intensity measurements according the previous
% dis_lit
Ns = [];
Ns(:,:,1) = Nsv_lit;
Ns(:,:,2) = Nsh_lit;
% Imea_reorder = Imea(:,:,idx_led);
% Ilpf_reorder = Ilpf(:,:,idx_led);
% Ilpf_reorder = Imea(:,:,idx_led);
% Ithresh_reorder = Imea(:,:,idx_led);
Imea = Imea(:,:,idx_led);
Ibk_reorder = Ibk(idx_led);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% clear up some memory space
clear Itmp Ibk tp
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% pre-processing the data to DENOISING is IMPORTANT
% Denoise II. background subtraction
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Ithresh_reorder = Ilpf_reorder;
for m = 1:Nimg
% Itmp = Ithresh_reorder(:,:,m);
Itmp = Imea(:,:,m);
% Itmp(Itmp<mean2(Itmp(1:10,:))) = 0;
% Itmp = Itmp-mean2(Itmp(1:6,:));
% Itmp(Itmp<0) = 0;
% Ithresh_reorder(:,:,m) = Itmp;
Itmp = Itmp-Ibk_reorder(m);
% Ithresh_reorder(:,:,m) = Itmp;
Imea(:,:,m) = Itmp;
% Ithresh_reorder(:,:,m) = Itmp-min(Itmp(:));
% Ithresh_reorder(:,:,m) = Ft(F(Ithresh_reorder(:,:,m)).*Ps_otf);
end
Imea(Imea<0) = 0;
% for m = NBF+1:Nimg
% % Itmp = Ithresh_reorder(:,:,m);
% Itmp = Imea(:,:,m);
% % Itmp(Itmp<mean2(Itmp(1:10,:))) = 0;
% % Itmp = Itmp-mean2(Itmp(1:6,:));
% % Itmp(Itmp<0) = 0;
% % Ithresh_reorder(:,:,m) = Itmp;
% min(Itmp(:))
% Itmp = Itmp-min(Itmp(:));
% min(Itmp(:))
% % Ithresh_reorder(:,:,m) = Itmp;
% Imea(:,:,m) = Itmp;
% % Ithresh_reorder(:,:,m) = Itmp-min(Itmp(:));
% % Ithresh_reorder(:,:,m) = Ft(F(Ithresh_reorder(:,:,m)).*Ps_otf);
% end
%
% % Ithresh_reorder(Ithresh_reorder<0) = 0;
% Imea(Imea<0) = 0;
% Imea_norm_reorder = Imea_norm(:,:,idx_led);
Ns_reorder = Ns(:,idx_led,:);
Tanh_reorder = Tanh(idx_led);
Tanv_reorder = Tanv(idx_led);
%% this part check if the calculation of brightfield and darkfield matches the experiments
illumination_na_reorder = illumination_na_used(idx_led);
for m = 1:Nimg
Imean(m) = mean2(Imea(:,:,m));
end
snr = Imean(:)./Ibk_reorder(:);
%% reconstruction algorithm
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% use only a sub-set number of the measurements to reconstruct
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
d = 9;
switch d
case 3
Nused = 9;
case 5
Nused = 21;
case 7
Nused = 37;
case 9
Nused = 69;
case 11
Nused = 97;
case 13
Nused = 137;
case 15
Nused = 177;
case 17
Nused = 225;
case 19
Nused = 293;
end
if Nused>NBF
idx_used = [find(Imean(1:NBF)>Imean(1)*2/3),find(Imean(NBF+1:Nused)<Imean(1)/4)+NBF];
idx_err = [find(Imean(1:NBF)<Imean(1)*2/3),find(Imean(NBF+1:Nused)>Imean(1)/4)+NBF];
else
idx_used = find(Imean(1:Nused)>Imean(1)*2/3);
idx_err = [find(Imean(1:Nused)<Imean(1)*2/3)];
end
disp(['problematic frames are ',num2str(idx_err),' and are discarded']);
% I = Ithresh_reorder(:,:,idx_used);
I = Imea(:,:,idx_used);
Ns2 = Ns_reorder(:,idx_used,:);
Tanh2 = Tanh_reorder(idx_used);
Tanv2 = Tanh_reorder(idx_used);
% reconstruction algorithm
opts.tol = 1;
opts.monotone = 1;
% 'full', display every subroutin,
% 'iter', display only results from outer loop
% 0, no display
opts.display = 'iter';
% opts.saveIterResult = 0;
% opts.out_dir = ['.\tmp2'];
% mkdir(opts.out_dir);
% upsample the intensity
% I0interp = real(Ft(padarray(F(I(:,:,1)),[(N_obj-Np)/2,(N_obj-Np)/2])));
% opts.O0 = F(sqrt(I0interp));
% this method does not work for global minimization method
opts.Ps = w_NA;
opts.iters = 1;
% index of led used in the experiment
% opts.ledidx = ledidx(:,idx_led);
opts.OP_alpha = 1e-2;
opts.OP_beta = 1e-2;
opts.BP_alpha = 1e-8;
opts.BP_beta = 1e-2;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% step 0: initializatin
% idea: initialize with lightfield refocused intensities at different
% slices with only bright field data
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% one reasonable (or trial solution) is the last slice = o0 and the rest
% slices = 1:
o_slice0 = zeros(N_obj,N_obj,Nslice);
o_slice = Ibf(nstart(1):nstart(1)+Np-1,nstart(2):nstart(2)+Np-1,:);
for n = 1:Nslice
O0 = F(sqrt(o_slice(:,:,n)));
O0 = padarray(O0,[(N_obj-Np)/2,(N_obj-Np)/2]);
% estimated object field at focal plane
o_slice0(:,:,n) = abs(Ft(O0));
end
% o_slice0(:,:,Nslice) = o0;
% for l = 1:Nslice
o_slice0 = o_slice0.^(1/Nslice);
% end
% O0 = F(sqrt(I(:,:,1))).*w_NA;
% O0 = padarray(O0,[(N_obj-Np)/2,(N_obj-Np)/2]);
% % estimated object field at focal plane
% o0 = Ft(O0);
% opts.O0 = o0;
% % define propagation operator, f: input field, h: propagation transfer
% % function
% Prop = @(f,h) Ft(F(f).*h);
% % propagate to the last slice
% o0 = Prop(o0,conj(H0));
opts.O0 = o_slice0;
opts.P0 = w_NA;
opts.maxIter = 1;
opts.minIter = 1;
% title(['slice',num2str(m0)],'fontsize',10); axis off;
if Nslice>1
f4 = figure(87);
switch Nslice
case 2
p = 2; q = 1;
case {3,4}
p = 2; q = 2;
case {5,6}
p = 3; q = 2;
otherwise
p = 3; q = 3;
end
for m = 1:min(Nslice,9)
subplot(p,q,m); imagesc(abs(opts.O0(:,:,m))); axis image; colormap gray;
h= colorbar; set(h,'fontsize',8); axis off;
title(['z=',num2str(z(m)),'um'],'fontsize',10);axis off;
end
end
% title('phase(P)'); axis off;
drawnow;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% clear up some memory space
clear pupil phC o_slice0 o_slice aberration Ps_otf O0
clear Itmp Imea_reorder Ilpf_reorder Ilpf Ibk_reorder
clear Ibk tp
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% start optimization
[O,P,err] = AlterMin_MultiSlice_v2(I,[N_obj,N_obj], round(Ns2),k2, dz, H0, opts);
f3 = figure(88);
% title(['slice',num2str(m0)],'fontsize',10); axis off;
if Nslice>1
switch Nslice
case 2
p = 2; q = 1;
case {3,4}
p = 2; q = 2;
case {5,6}
p = 3; q = 2;
otherwise
p = 3; q = 3;
end
for m = 1:min(Nslice,9)
subplot(p,q,m); imagesc(abs(O(:,:,m))); axis image; colormap gray;
h= colorbar; set(h,'fontsize',8); axis off;
title(['z=',num2str(z(m)),'um'],'fontsize',10);axis off;
end
else
subplot(221); imagesc(abs(O(:,:,Nslice))); axis image; colormap gray;
h= colorbar; set(h,'fontsize',8); axis off;
subplot(222); imagesc(angle(O(:,:,Nslice))); axis image; colormap gray; colorbar;
h= colorbar; set(h,'fontsize',8); axis off;
% title('phase(o)','fontsize',10); axis off;
subplot(223); imagesc(abs(P)); axis image; colormap gray; colorbar;
h= colorbar; set(h,'fontsize',8); axis off;
% title('ampl(P)'); axis off;
subplot(224); imagesc(angle(P).*abs(P)); axis image; colorbar;
h= colorbar; set(h,'fontsize',8); axis off;
end
% title('phase(P)'); axis off;
drawnow;
% f4 = figure(79); plot(c(1:Nused));
% title('adaptive intensity correction factor');
fn = ['3D-',num2str(Np),'-',num2str(Nused),'-ns-',num2str(Nslice),'-dz-',num2str(dz),'-z0-',num2str(z0)];
saveas(f3,[out_dir,'\R-',fn,'.png']);
if Nslice>1
saveas(f4,[out_dir,'\LF-',fn,'.png']);
end
save([out_dir,'\',fn],'O','P','err');
% saveas(f2,[out_dir,'\err-',fn,'.png']);
fprintf([fn,' saved\n']);
%% equalizing background
bk0 = mean2(abs(O(770:860,560:690,3)));
mb = 3;
ma = 0;
s0 = 50;
for m = 1:Nslice
oo = abs(O(s0+1:end-s0,s0+1:end-s0,m));
bkt = mean2(abs(oo(720:810,510:640)));
oo = oo-(bkt-bk0);
f1 = figure('Colormap',...
[0 0 0;0.0011834615143016 0.0011834615143016 0.0011834615143016;0.0023669230286032 0.0023669230286032 0.0023669230286032;0.00355038465932012 0.00355038465932012 0.00355038465932012;0.00473384605720639 0.00473384605720639 0.00473384605720639;0.00591730745509267 0.00591730745509267 0.00591730745509267;0.00710076931864023 0.00710076931864023 0.00710076931864023;0.00828423071652651 0.00828423071652651 0.00828423071652651;0.00946769211441278 0.00946769211441278 0.00946769211441278;0.0106511535122991 0.0106511535122991 0.0106511535122991;0.0118346149101853 0.0118346149101853 0.0118346149101853;0.0130180763080716 0.0130180763080716 0.0130180763080716;0.0142015386372805 0.0142015386372805 0.0142015386372805;0.0153850000351667 0.0153850000351667 0.0153850000351667;0.016568461433053 0.016568461433053 0.016568461433053;0.0177519228309393 0.0177519228309393 0.0177519228309393;0.0189353842288256 0.0189353842288256 0.0189353842288256;0.0476649329066277 0.0476649329066277 0.0476649329066277;0.0763944834470749 0.0763944834470749 0.0763944834470749;0.105124033987522 0.105124033987522 0.105124033987522;0.133853584527969 0.133853584527969 0.133853584527969;0.162583127617836 0.162583127617836 0.162583127617836;0.191312685608864 0.191312685608864 0.191312685608864;0.22004222869873 0.22004222869873 0.22004222869873;0.248771786689758 0.248771786689758 0.248771786689758;0.277501344680786 0.277501344680786 0.277501344680786;0.306230872869492 0.306230872869492 0.306230872869492;0.334960430860519 0.334960430860519 0.334960430860519;0.363689988851547 0.363689988851547 0.363689988851547;0.392419517040253 0.392419517040253 0.392419517040253;0.421149075031281 0.421149075031281 0.421149075031281;0.449878633022308 0.449878633022308 0.449878633022308;0.478608191013336 0.478608191013336 0.478608191013336;0.507337749004364 0.507337749004364 0.507337749004364;0.536067306995392 0.536067306995392 0.536067306995392;0.564796805381775 0.564796805381775 0.564796805381775;0.593526363372803 0.593526363372803 0.593526363372803;0.622255921363831 0.622255921363831 0.622255921363831;0.650985479354858 0.650985479354858 0.650985479354858;0.679715037345886 0.679715037345886 0.679715037345886;0.708444595336914 0.708444595336914 0.708444595336914;0.737174153327942 0.737174153327942 0.737174153327942;0.765903651714325 0.765903651714325 0.765903651714325;0.794633209705353 0.794633209705353 0.794633209705353;0.823362767696381 0.823362767696381 0.823362767696381;0.852092325687408 0.852092325687408 0.852092325687408;0.874098122119904 0.874098122119904 0.874098122119904;0.896103858947754 0.896103858947754 0.896103858947754;0.918109655380249 0.918109655380249 0.918109655380249;0.940115451812744 0.940115451812744 0.940115451812744;0.962121188640594 0.962121188640594 0.962121188640594;0.98412698507309 0.98412698507309 0.98412698507309;0.985449731349945 0.985449731349945 0.985449731349945;0.986772477626801 0.986772477626801 0.986772477626801;0.988095223903656 0.988095223903656 0.988095223903656;0.989417970180511 0.989417970180511 0.989417970180511;0.990740716457367 0.990740716457367 0.990740716457367;0.992063522338867 0.992063522338867 0.992063522338867;0.993386268615723 0.993386268615723 0.993386268615723;0.994709014892578 0.994709014892578 0.994709014892578;0.996031761169434 0.996031761169434 0.996031761169434;0.997354507446289 0.997354507446289 0.997354507446289;0.998677253723145 0.998677253723145 0.998677253723145;1 1 1]);
imagesc(oo,[ma,mb]); axis image; %colormap gray
axis off;
fn = ['ampl_z=',num2str(z(m)),'um.tif'];
export_fig(f1, fn, '-m2');
% title(['z=',num2str(z(m)),'um'],'fontsize',10);axis off;
end
%%
bk0 = mean2(angle(O(770:860,560:690,1)));
% cm = ColormapRGW( -.6, 6 ,1);
for m = 1:Nslice
oo = angle(O(s0+1:end-s0,s0+1:end-s0,m));
bkt = mean2(oo(720:810,510:640));
oo = oo-(bkt-bk0);
f1 = figure('Colormap',...
[0 0.498039215803146 0;0.00230654748156667 0.499197006225586 0.00230654748156667;0.00461309496313334 0.500354826450348 0.00461309496313334;0.0069196424447 0.501512587070465 0.0069196424447;0.00922618992626667 0.502670407295227 0.00922618992626667;0.0115327378734946 0.503828227519989 0.0115327378734946;0.0138392848894 0.504985988140106 0.0138392848894;0.016145832836628 0.506143808364868 0.016145832836628;0.0184523798525333 0.507301568984985 0.0184523798525333;0.0207589287310839 0.508459389209747 0.0207589287310839;0.0230654757469893 0.509617209434509 0.0230654757469893;0.0253720227628946 0.510774970054626 0.0253720227628946;0.0276785697788 0.511932790279388 0.0276785697788;0.0299851186573505 0.513090550899506 0.0299851186573505;0.0322916656732559 0.514248371124268 0.0322916656732559;0.0908203125 0.543627440929413 0.0908203125;0.149348959326744 0.573006510734558 0.149348959326744;0.207877606153488 0.602385640144348 0.207877606153488;0.266406238079071 0.631764709949493 0.266406238079071;0.324934899806976 0.661143779754639 0.324934899806976;0.383463531732559 0.690522909164429 0.383463531732559;0.441992193460464 0.719901978969574 0.441992193460464;0.500520825386047 0.749281048774719 0.500520825386047;0.559049487113953 0.778660118579865 0.559049487113953;0.617578148841858 0.80803918838501 0.617578148841858;0.676106750965118 0.8374183177948 0.676106750965118;0.734635412693024 0.866797387599945 0.734635412693024;0.793164074420929 0.89617645740509 0.793164074420929;0.851692736148834 0.92555558681488 0.851692736148834;0.910221338272095 0.954934656620026 0.910221338272095;0.96875 0.984313726425171 0.96875;0.984375 0.992156863212585 0.984375;1 1 1;1 0.983870983123779 0.983870983123779;1 0.967741906642914 0.967741906642914;1 0.912778854370117 0.912778854370117;1 0.857815861701965 0.857815861701965;1 0.802852809429169 0.802852809429169;1 0.747889816761017 0.747889816761017;1 0.69292676448822 0.69292676448822;1 0.637963712215424 0.637963712215424;1 0.583000719547272 0.583000719547272;1 0.528037667274475 0.528037667274475;1 0.473074644804001 0.473074644804001;1 0.418111622333527 0.418111622333527;1 0.36314857006073 0.36314857006073;1 0.308185547590256 0.308185547590256;1 0.253222525119781 0.253222525119781;1 0.198259502649307 0.198259502649307;1 0.143296465277672 0.143296465277672;1 0.088333435356617 0.088333435356617;1 0.0333704091608524 0.0333704091608524;1 0.0305895414203405 0.0305895414203405;1 0.0278086736798286 0.0278086736798286;1 0.0250278078019619 0.0250278078019619;1 0.02224694006145 0.02224694006145;1 0.0194660723209381 0.0194660723209381;1 0.0166852045804262 0.0166852045804262;1 0.0139043368399143 0.0139043368399143;1 0.011123470030725 0.011123470030725;1 0.00834260229021311 0.00834260229021311;1 0.0055617350153625 0.0055617350153625;1 0.00278086750768125 0.00278086750768125;1 0 0]);
imagesc(oo,[-.6,.6]); axis image; axis off
fn = ['ph_z=',num2str(z(m)),'um.tif'];
export_fig(f1, fn, '-m2');
% title(['z=',num2str(z(m)),'um'],'fontsize',10);axis off;
end
% %% light field refocusing
% bk0 = mean2(abs(opts.O0(770:860,560:690,1)).^2);
% ma = min(min(abs(opts.O0(s0+1:end-s0,s0+1:end-s0,1)).^2));
% mb = max(max(abs(opts.O0(s0+1:end-s0,s0+1:end-s0,1)).^2));
% for m = 1:Nslice
% oo = abs(opts.O0(s0+1:end-s0,s0+1:end-s0,m)).^2;
% bkt = mean2(oo(720:810,510:640));
% oo = oo-(bkt-bk0);
% f1 = figure; imagesc(oo,[ma,mb]); axis image; colormap gray; axis off;
% fn = ['lf_z=',num2str(z(m)),'um.tif'];
% export_fig(f1, fn, '-m2');
%
% % title(['z=',num2str(z(m)),'um'],'fontsize',10);axis off;
% end
%
% for m = 1:Nslice
% f1 = figure; imagesc(Ibf(:,:,m)); axis image; colormap gray; axis off;
% fn = ['lf-bf_z=',num2str(z(m)),'um'];
% export_fig(f1, fn, '-m1');
%
% % title(['z=',num2str(z(m)),'um'],'fontsize',10);axis off;
% end
% %% physical actual focus
% fdir = ['G:\Project_Backup\LED_Array_Microscopy\Expt\NoCondenser\NewArray\3D-2\Spiral2\10x_m3\'];
% ilist = dir([fdir,'*.tif']);
% Nc = Np-2*s0;
% nstartc = nstart+s0;
% for n = 1:length(ilist) % BF only
% fn = [fdir,ilist(n).name];
% Iz = double(imread(fn));
% Iz = Iz(nstartc(1):nstartc(1)+Nc-1,nstartc(2):nstartc(2)+Nc-1);
% if n ==1
% bk0 = mean2(Iz(720:810,510:640));
% ma = 0;
% mb = max(Iz(:));
% end
% bkt = mean2(Iz(720:810,510:640));
% Iz = Iz-(bkt-bk0);
% f1 = figure; imagesc(Iz,[ma,mb]); axis image; colormap gray; axis off;
% fn = ['mf_z=',ilist(n).name];
% export_fig(f1, fn, '-m2');
% end
| MATLAB |
3D | bu-cisl/3D-Fourier-ptychography-on-LED-array-microscope | 3D_code/AlterMin_MultiSlice_v2.m | .m | 15,993 | 450 | function [o_slice, P, err] = AlterMin_MultiSlice_v2( I, No, Ns, k2, dz, H0, opts)
%AlterMinGlobal_Adaptive Implement alternative minimization sequentially on a stack of
%measurement I (n1 x n2 x nz). It consists of 2 loop. The main loop update
%the reconstruction results r. the inner loop applies projectors/minimizers
%P1 and P2 on each image I and steps through the entire dataset
% Outputs:
% r: reconsturcted high-res image
% err: errors at each iteration
%
% Inputs:
% Measurements data
% I: intensity measurements by different LEDs
% du: sampling pixel size in spatial freq domain
% um: Max spatial freq of I set by NA
% Reconstruction parameters
% No = [Ny_obj,Nx_obj]: size of the reconstructed image
% k2 = pi*lambda*(u^2+v^2), used in calculating Fresnel propagation
% dz: % vector to define distance between slices, unit: um
% z0: % distance from the Nth slice to the focal plane
%
% Illumination coding parameters
% Ns = [Nsy,Nsx]: centers of corresponding lpf regions for
% the illumination pattern
% Iteration parameters: opts
% tol: maximum change of error allowed in two consecutive iterations
% maxIter: maximum iterations allowed
% minIter: minimum iterations
% monotone (1, default): if monotone, error has to monotonically dropping
% when iters>minIter
% display: display results (0: no (default) 1: yes)
% saveIterResult: save results at each step as images (0: no (default) 1: yes)
% R0: initial guess of R
%
%% reconstruction algorithm: partial coherence effect in both spatial and
% Fourier domain
% spatial updating method:
% ref [1] C. Rydberg, J. Bengtsson, Opt. Express, 2007
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% the idea of the inverse algorithm is that each point on LED is a coherent
% mode. The total intensity is the sum of the intensities due to all the
% modes.
% although we do not know the actual intensity for each modes, as show in
% [1], an update rule for amplitude can be found by properly scale the
% estimated amplitude for each modes. Again, the phase part is left
% unchanged.
%
% within each LED, different sub-images' spectrum are replaced sequentially
% last modified 08/19/2013
%
% within each LED, weighted average of the spectrum overlapping patches of
% all the sub-images
% last modified 08/19/2013
%
% last modified 3/1/2014
%
% implementing the adaptive routine described in
% Z. Bian, S. Dong & G. Zheng, OE, 2013
% Last modified on 03/25/2014
% Last modified 4/24/2014
% by Lei Tian, lei_tian@alum.mit.edu
% Last modified on 9/7/2014
% by Lei Tian, lei_tian@alum.mit.edu
%% derived constants
% size of measurement
[Nmy,Nmx,Nimg] = size(I);
Np = [Nmy,Nmx];
% r = # of LEDs lit up in each pattern, # of coherent modes
[r,~,~] = size(Ns);
cen0 = round((No+1)/2);
% % Define Fourier operators
F = @(x) fftshift(fft2(ifftshift(x)));
Ft = @(x) fftshift(ifft2(ifftshift(x)));
% F = @(x) fftshift(fft2(x));
% Ft = @(x) ifft2(ifftshift(x));
% col = @(x) x(:);
% row = @(x) x(:).';
%% options for the method
if nargin<4
% default values
opts.tol = 1;
opts.maxIter = 50;
opts.minIter = 3;
opts.monotone = 1;
opts.display = 0;
opts.saveIterResult = 0;
opts.out_dir = [];
opts.O0 = Ft(sqrt(I(:,:,1)))/r;
opts.O0 = padarray(opts.O0,(No-Np)/2);
opts.P0 = ones(Np);
opts.OP_alpha = 1;
opts.OP_beta = 1;
opts.BP_alpha = 1;
opts.BP_beta = 1;
% index of led used in the experiment
% opts.ledidx = 1:Nled;
% opts.scale_tau = 1e-6;
% opts.min_mode = 'seq';
% opts.fourier_mode = 'projection';
else
if ~isfield(opts,'tol')
opts.tol = 1;
end
if ~isfield(opts,'maxIter')
opts.maxIter = 50;
end
if ~isfield(opts,'minIter')
opts.minIter = 3;
end
if ~isfield(opts,'monotone')
opts.monotone = 1;
end
if ~isfield(opts,'display')
opts.display = 0;
end
if ~isfield(opts,'saveIterResult')
opts.saveIterResult = 0;
end
if ~isfield(opts,'out_dir')
opts.out_dir = ['IterResults'];
if opts.saveIterResult
mkdir(opts.out_dir);
end
end
if ~isfield(opts,'O0')
opts.O0 = Ft(sqrt(I(:,:,1)))/r;
opts.O0 = padarray(opts.O0,(No-Np)/2);
end
if ~isfield(opts,'P0')
opts.P0 = ones(Np);
end
if ~isfield(opts,'OP_alpha')
opts.OP_alpha = 1;
end
if ~isfield(opts,'OP_beta')
opts.OP_beta = 1;
end
if ~isfield(opts,'BP_alpha')
opts.BP_alpha = 1;
end
if ~isfield(opts,'BP_beta')
opts.BP_beta = 1;
end
if ~isfield(opts,'Ps')
opts.Ps = 1;
end
if ~isfield(opts,'iters')
opts.iters = 1;
end
% if ~isfield(opts,'scalecorrect')
% opts.scalecorrect = 0;
% end
% if ~isfield(opts,'scale')
% opts.scale = ones(Nled,1);
% end
% if ~isfield(opts,'ledidx')
% opts.ledidx = 1:Nled;
% end
% if ~isfield(opts,'scale_tau')
% opts.scale_tau = 1e-6;
% end
% if ~isfield(opts,'min_mode')
% opts.min_mode = 'seq';
% end
% if ~isfield(opts,'fourier_mode')
% opts.fourier_mode = 'projection';
% end
end
T0 = clock;
fprintf('| iter | rmse |\n');
for j=1:20, fprintf('-'); end
fprintf('\n');
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% step 0: initializatin
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
P = opts.P0; opts.P0 = 0;
o_slice = opts.O0; opts.O0 = 0;
err1 = inf;
err2 = 50;
iter = 0;
Ps = opts.Ps;
iters = opts.iters;
% Num of slices
m0 = length(dz)+1;
if opts.display
f1 = figure(88);
% title(['slice',num2str(m0)],'fontsize',10); axis off;
if m0>1
switch m0
case 2
p = 2; q = 1;
case {3,4}
p = 2; q = 2;
case {5,6}
p = 3; q = 2;
otherwise
p = 3; q = 3;
end
for m = 1:min(m0,9)
subplot(p,q,m); imagesc(abs(o_slice(:,:,m))); axis image; colormap gray;
h= colorbar; set(h,'fontsize',8); axis off;
% title(['slice',num2str(m0-m)],'fontsize',10);axis off;
end
else
subplot(221); imagesc(abs(o_slice(:,:,m0))); axis image; colormap gray;
h= colorbar; set(h,'fontsize',8); axis off;
subplot(222); imagesc(angle(o_slice(:,:,m0))); axis image; colormap gray; colorbar;
h= colorbar; set(h,'fontsize',8); axis off;
% title('phase(o)','fontsize',10); axis off;
subplot(223); imagesc(abs(P)); axis image; colormap gray; colorbar;
h= colorbar; set(h,'fontsize',8); axis off;
% title('ampl(P)'); axis off;
subplot(224); imagesc(angle(P).*abs(P)); axis image; colorbar;
h= colorbar; set(h,'fontsize',8); axis off;
end
% title('phase(P)'); axis off;
drawnow;
end
if opts.saveIterResult
saveas(f1,[opts.out_dir,'\R_',num2str(iter),'.png']);
% saveas(f2,[opts.out_dir,'\Ph_',num2str(iter),'.png']);
end
%% main algorithm starts here
% stopping criteria: when relative change in error falls below some value,
% can change this value to speed up the process by using a larger value but
% will trading off the reconstruction accuracy
% error is defined by the difference b/w the measurement and the estimated
% images in each iteration
% fprintf('| %2d | %.2e |\n',iter,err1);
downsamp = @(x) x(cen0(1)-Np(1)/2:cen0(1)+Np(1)/2-1,...
cen0(2)-Np(2)/2:cen0(2)+Np(2)/2-1);
% project onto the intensity measurement space
Proj_FT = @(f,I) F(sqrt(I).*exp(1i*angle(f)));
if mod(No(1),2) == 1
x = [-1/2:1/(No(2)-1):1/2];
y = [-1/2:1/(No(1)-1):1/2];
else
x = [-1/2:1/No(2):1/2-1/No(2)];
y = [-1/2:1/No(2):1/2-1/No(2)];
end
[x,y] = meshgrid(x,y);
% H0 = exp(1i*k02*z0);
while abs(err1-err2)>opts.tol&&iter<opts.maxIter
err1 = err2;
% err2 = 0;
err = zeros(1,Nimg);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% incremetal updating method, process image by image
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
for m = 1:Nimg
% compute illumination
i0 = exp(1i*2*pi*(x*Ns(:,m,2)+y*Ns(:,m,1)));
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% step 1.1: Fwd propagation:
% compute incident (phi) and output (psi) field for each slice
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
[phi_fwd, psi_fwd] = Fwd_Prop_MultiSlice_v2( i0, o_slice, k2, dz);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% step 1.2: compute object field & estimating intensity
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
O = F(psi_fwd(:,:,m0));
G_fwd = downsamp(O).*P;
%G_fwd2 = G_fwd.*H0;
% compute field in real/measurement space
%g = Ft(G_fwd2);
g = Ft(G_fwd.*H0);
% estimated intensity w/o correction term
I_est = abs(g).^2;
% measured intensity
I_mea = I(:,:,m);
%G_new2 = Proj_FT(g, I_mea);
%G_new = G_new2./H0;
G_new = Proj_FT(g, I_mea)./H0;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% stopping criteria: residual between estimated & measured intensities
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
err(m) = sum((I_mea(:)-I_est(:)).^2);
% err2 = err2+sum((I_mea(:)-I_est(:)).^2);
% if opts.scalecorrect
% if iter>1
% %% Step 1a) Update scaling factor due to LED non-uniformity
% scale(m) = scaling_update(scale(m), I_mea, I_est, ...
% opts.scale_tau, opts.scale_mode, opts.scale_alpha);
% end
% end
% I_est = scale(m)*I_est;
% Psi = Proj_Fourier(G, I_mea, scale(m));
% Psi = Psi.*Ps;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% step 2: update O & P from product constraint G = O*P
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
[O, P] = Proj_ObjPupil(O, P, G_new, G_fwd, Ps,...
opts.OP_alpha, opts.OP_beta, iters);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% step 3: back-propgate field based on multi-slice approach & update
% estimationo of o_slice
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
o_slice = Back_Prop_MultiSlice_v2(O, k2, dz, o_slice, phi_fwd, psi_fwd, ...
i0, opts.BP_alpha, opts.BP_beta, iters);
end
err2 = sqrt(sum(err));
fprintf('| %2d | %.2e |\n',iter,err2);
iter = iter+1;
if strcmp(opts.display,'full')
f1 = figure(88);
% title(['slice',num2str(m0)],'fontsize',10); axis off;
if m0>1
switch m0
case 2
p = 2; q = 1;
case {3,4}
p = 2; q = 2;
case {5,6}
p = 3; q = 2;
otherwise
p = 3; q = 3;
end
for m = 1:min(m0,9)
subplot(p,q,m); imagesc(abs(o_slice(:,:,m))); axis image; colormap gray;
h= colorbar; set(h,'fontsize',8); axis off;
% title(['slice',num2str(m0-m)],'fontsize',10);axis off;
end
else
subplot(221); imagesc(abs(o_slice(:,:,m0))); axis image; colormap gray;
h= colorbar; set(h,'fontsize',8); axis off;
subplot(222); imagesc(angle(o_slice(:,:,m0))); axis image; colormap gray; colorbar;
h= colorbar; set(h,'fontsize',8); axis off;
% title('phase(o)','fontsize',10); axis off;
subplot(223); imagesc(abs(P)); axis image; colormap gray; colorbar;
h= colorbar; set(h,'fontsize',8); axis off;
% title('ampl(P)'); axis off;
subplot(224); imagesc(angle(P).*abs(P)); axis image; colorbar;
h= colorbar; set(h,'fontsize',8); axis off;
end
% title('phase(P)'); axis off;
drawnow;
end
%end
% %% compute error
% % record the error and can check the convergence later.
% err = [err,err2];
if strcmp(opts.display,'iter')
f1 = figure(88);
% title(['slice',num2str(m0)],'fontsize',10); axis off;
if m0>1
switch m0
case 2
p = 2; q = 1;
case {3,4}
p = 2; q = 2;
case {5,6}
p = 3; q = 2;
otherwise
p = 3; q = 3;
end
for m = 1:min(m0,9)
subplot(p,q,m); imagesc(abs(o_slice(:,:,m))); axis image; colormap gray;
h= colorbar; set(h,'fontsize',8); axis off;
% title(['slice',num2str(m0-m)],'fontsize',10);axis off;
end
else
subplot(221); imagesc(abs(o_slice(:,:,m0))); axis image; colormap gray;
h= colorbar; set(h,'fontsize',8); axis off;
subplot(222); imagesc(angle(o_slice(:,:,m0))); axis image; colormap gray; colorbar;
h= colorbar; set(h,'fontsize',8); axis off;
% title('phase(o)','fontsize',10); axis off;
subplot(223); imagesc(abs(P)); axis image; colormap gray; colorbar;
h= colorbar; set(h,'fontsize',8); axis off;
% title('ampl(P)'); axis off;
subplot(224); imagesc(angle(P).*abs(P)); axis image; colorbar;
h= colorbar; set(h,'fontsize',8); axis off;
end
% title('phase(P)'); axis off;
drawnow;
end
if opts.saveIterResult
saveas(f1,[opts.out_dir,'\R_',num2str(iter),'.png']);
end
if opts.monotone&&iter>opts.minIter
if err2>err1
break;
end
end
end
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% prepare to end the process, calculate the error
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
err = zeros(1,Nimg);
for m = 1:Nimg
% compute illumination
i0 = exp(1i*2*pi*(x*Ns(:,m,2)+y*Ns(:,m,1)));
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% step 1.1: Fwd propagation:
% compute incident (phi) and output (psi) field for each slice
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
I_est = Fwd_Prop_MultiSlice_Intensity(i0, o_slice, k2, dz, P, H0);
I_mea = I(:,:,m);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% stopping criteria: residual between estimated & measured intensities
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
err(m) = sum((I_mea(:)-I_est(:)).^2);
end
fprintf('| %2d | %.2e |\n',iter,sqrt(sum(err)));
fprintf('elapsed time: %.0f seconds\n',etime(clock,T0));
end
| MATLAB |
3D | bu-cisl/3D-Fourier-ptychography-on-LED-array-microscope | 3D_code/Fwd_Prop_MultiSlice_v2.m | .m | 1,258 | 40 | function [ phi, psi ] = Fwd_Prop_MultiSlice_v2( i0, o_slice, k2, dz)
%FWD_PROP_MULTISLICE computes the field using multislice approach, with
%propagator H
% Inputs:
% H: fwd propagator between slices
% H0: fwd propagator from Nth slice to focal plane of objective
% o_slice0: current estimate of multi-slice object
% i0: KNOWN illumination
% by Lei Tian (lei_tian@alum.mit.edu)
% last modified on 5/28/2014
% % Define Fourier operators
F = @(x) fftshift(fft2(ifftshift(x)));
Ft = @(x) fftshift(ifft2(ifftshift(x)));
% define propagation operator, f: input field, h: propagation transfer
% function
Prop = @(f,h) Ft(F(f).*h);
% N: lateral dimension, Nslice: # of total z-slices
[N,~,Nslice] = size(o_slice);
% initialize incident field at each slice
phi = zeros(N,N,Nslice);
phi(:,:,1) = i0; % incident field of 1st slice is illumination
% initialize output field at each slice
psi = zeros(N,N,Nslice);
psi(:,:,1) = phi(:,:,1).*o_slice(:,:,1);
for m = 2:Nslice
H = exp(1i*k2*dz(m-1));
% propagate from neiboring slices
phi(:,:,m) = Prop(psi(:,:,m-1),H);
% output field = incidence * object
psi(:,:,m) = phi(:,:,m).*o_slice(:,:,m);
end
% psi(:,:,Nslice) = Prop(psi(:,:,Nslice),H0);
end
| MATLAB |
3D | bu-cisl/3D-Fourier-ptychography-on-LED-array-microscope | 3D_code/Proj_OslicePhi.m | .m | 908 | 31 | function [ O,phi ] = Proj_OslicePhi(O0,phi0,psi,psi0,alpha,beta,iters)
%GDUPDATE_MULTIPLICATION update estimate of O and P according to gradient
%descent method, where psi = O*P
% Inputs:
% O0: object estimate, n1xn2
% P0: pupil function estimate: m1xm2
% psi: update estimate field estimate
% psi0: previous field estimate
% alpha: gradient descent step size for O
% betta: gradient descent step size for P
% Ps: support constraint for P0, e.g. spatially confined probe or
% objective with known NA
% iters: # of iterations to run on updates
%
% last modified by Lei Tian, lei_tian@alum.mit.edu, 5/27/2014
% init guess
O = O0;
phi = phi0;
it = 0;
dpsi = psi-psi0;
while (it<iters)
O = O+abs(phi).*conj(phi)./(abs(phi).^2+alpha).*dpsi/max(abs(phi(:)));
phi = phi+abs(O).*conj(O)./(abs(O).^2+beta).*dpsi/max(abs(O(:)));
it = it+1;
end
end
| MATLAB |
3D | bu-cisl/3D-Fourier-ptychography-on-LED-array-microscope | 3D_code/Proj_ObjPupil.m | .m | 1,485 | 49 | function [ O,P ] = Proj_ObjPupil(O0,P0,G,G0,Ps,alpha,beta,iters)
%GDUPDATE_MULTIPLICATION update estimate of O and P according to gradient
%descent method, where psi = O*P
% Inputs:
% O0: object estimate, n1xn2
% P0: pupil function estimate: m1xm2
% psi: update estimate field estimate
% psi0: previous field estimate
% alpha: gradient descent step size for O
% betta: gradient descent step size for P
% Ps: support constraint for P0, e.g. spatially confined probe or
% objective with known NA
% iters: # of iterations to run on updates
%
% last modified by Lei Tian, lei_tian@alum.mit.edu, 5/27/2014
% size of O
No = size(O0); No = No(:);
% size of P, Np<=No
Np = size(P0); Np = Np(:);
cen0 = round((No+1)/2);
% init guess
O = O0;
P = P0;
it = 0;
dG = G-G0;
while (it<iters)
% operator to put P at proper location at the O plane
upsamp = @(x) padarray(x,(No-Np)/2);
% operator to crop region of O from proper location at the O plane
if mod(Np(1),2) == 1
downsamp = @(x) x(cen0(1,m)-(Np(1)-1)/2:cen0(1,m)+(Np(1)-1)/2,...
cen0(2,m)-(Np(2)-1)/2:cen0(2,m)+(Np(2)-1)/2);
else
downsamp = @(x) x(cen0(1)-Np(1)/2:cen0(1)+Np(1)/2-1,...
cen0(2)-Np(2)/2:cen0(2)+Np(2)/2-1);
end
O = O+upsamp(abs(P).*conj(P)./(abs(P).^2+alpha).*dG)/max(abs(P(:)));
P = P+downsamp(abs(O).*conj(O)./(abs(O).^2+beta)).*dG/max(abs(O(:))).*Ps;
it = it+1;
end
end
| MATLAB |
3D | bu-cisl/3D-Fourier-ptychography-on-LED-array-microscope | 3D_code/SystemSetupV2Array10x_Multislice_v4.m | .m | 7,729 | 221 | % function [ varargout ] = SystemSetup( varargin )
%SYSTEMSETUP initilize general system parameters for LED array microscope
% Last modofied on 4/22/2014
% Lei Tian (lei_tian@berkeley.edu)
% addpath(['..\..\Source_coding']);
% % Define Fourier operators
F = @(x) fftshift(fft2(ifftshift(x)));
Ft = @(x) fftshift(ifft2(ifftshift(x)));
% F = @(x) fftshift(fft2(x));
% Ft = @(x) ifft2(ifftshift(x));
% Define RMSE function operator
RMSE = @(x,x0) sqrt(sum(abs(x(:)-x0(:)).^2)/sum(x0(:).^2));
%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% wavelength of illumination, assume monochromatic
% R: 643nm +- 50nm
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
lambda = 0.643;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% numerical aperture of the objective
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
NA = 0.25;
% maximum spatial frequency set by NA
um_m = NA/lambda;
% system resolution based on the NA
dx0 = 1/um_m/2;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% magnification of the system,
% need to calibrate with calibration slides
% on 2x objective, front port with an extra 2x
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
mag = 8/4*10;
dpix_c = 6.5; %6.5um pixel size on the sensor plane
% effective image pixel size on the object plane
dpix_m = dpix_c/mag;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% # of pixels at the output image patch
% each patch will assign a single k-vector, the image patch size cannot be
% too large to keep the single-k assumption holds
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Np = 100;
% FoV in the object space
FoV = Np*dpix_m;
% sampling size at Fourier plane set by the image size (FoV)
% sampling size at Fourier plane is always = 1/FoV
if mod(Np,2) == 1
du = 1/dpix_m/(Np-1);
else
du = 1/FoV;
end
% low-pass filter diameter set by the NA = bandwidth of a single measurment
% in index
% N_NA = round(2*um_m/du_m);
% generate cutoff window by NA
m = 1:Np;
[mm,nn] = meshgrid(m-round((Np+1)/2));
ridx = sqrt(mm.^2+nn.^2);
um_idx = um_m/du;
% assume a circular pupil function, lpf due to finite NA
w_NA = double(ridx<um_idx);
% h = fspecial('gaussian',10,5);
% w_NA = imfilter(w_NA,h);
% support of OTF is 2x of ATF(NA)
Ps_otf = double(ridx<2*um_idx);
phC = ones(Np);
% phC(ridx<0.8*um_idx&ridx>0.7*um_idx) = 0.5;
% aberration modeled by a phase function
aberration = ones(Np);
% aberration = exp(pi/2*1i*(exp(-(mm-20).^2/50^2-(nn+40).^2/150^2))-...
% pi/8*1i*(exp(-(mm+40).^2/100^2-(nn-80).^2/80^2))+...
% pi/3*1i*(exp(-(mm).^2/60^2-(nn-10).^2/30^2)));
%aberration = ones(N_m);
pupil = w_NA.*phC.*aberration;
clear m mm nn
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% set up image corrdinates
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% original image size: 2160x2560
% can calibrate the center of the illumination with respect to the image by
% looking at the data from the dark/bright field image transitions
ncent = [1080,1280];
% nstart = ncent-Np/2;
% start pixel of the image patch
% nstart = [981,1181];
% center, start & end of the image patch
img_center = (nstart-ncent+Np/2)*dpix_m;
img_start = nstart*dpix_m;
img_end = (nstart+Np)*dpix_m;
%img_center = [0,0];
%% LED array geometries and derived quantities
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% spacing between neighboring LEDs
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
ds_led = 4e3; %4mm
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% distance from the LED to the object
% experientally determined by placing a grating object
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
z_led = 74e3; %mm
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% diameter of # of LEDs used in the experiment
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
dia_led = 19;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% set up LED coordinates
% h: horizontal, v: vertical
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
lit_cenv = 13;
lit_cenh = 13;
vled = [0:31]-lit_cenv;
hled = [0:31]-lit_cenh;
[hhled,vvled] = meshgrid(hled,vled);
rrled = sqrt(hhled.^2+vvled.^2);
LitCoord = rrled<dia_led/2;
% total number of LEDs used in the experiment
Nled = sum(LitCoord(:));
% index of LEDs used in the experiment
Litidx = find(LitCoord);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% need to flip x&y(spatial frequency u&v) based on expt geometry
% also with minus (-) on both v & h based on scanning order
% also need to know the relative orientation between led array and the
% image, otherwise get garbge near the edge of the FoV
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% corresponding angles for each LEDs
dd = sqrt((-hhled*ds_led-img_center(1)).^2+(-vvled*ds_led-img_center(2)).^2+z_led.^2);
sin_thetav = (-hhled*ds_led-img_center(1))./dd;
sin_thetah = (-vvled*ds_led-img_center(2))./dd;
Tanv = -(-hhled*ds_led)/z_led;
Tanh = -(-vvled*ds_led)/z_led;
Tanh_lit = Tanh(LitCoord);
Tanv_lit = Tanv(LitCoord);
illumination_na = sqrt(sin_thetav.^2+sin_thetah.^2);
% corresponding spatial freq for each LEDs
%
vled = sin_thetav/lambda;
uled = sin_thetah/lambda;
% spatial freq index for each plane wave relative to the center
idx_u = round(uled/du);
idx_v = round(vled/du);
illumination_na_used = illumination_na(LitCoord);
% number of brightfield image
NBF = sum(illumination_na_used<NA);
idx_BF = find(illumination_na_used<NA);
% darkfield images indices
NDF = sum(illumination_na_used>NA);
idx_DF = find(illumination_na_used>NA);
% maxium spatial frequency achievable based on the maximum illumination
% angle from the LED array and NA of the objective
um_p = max(illumination_na_used)/lambda+um_m;
% resolution achieved after freq post-processing
dx0_p = 1/um_p/2;
NA_s = um_p*lambda;
disp(['synthetic NA is ',num2str(NA_s)]);
% assume the max spatial freq of the original object
% um_obj>um_p
% assume the # of pixels of the original object
% upsample by 2 for intensity
N_obj = round(um_p/du)*2;%*2;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% need to enforce N_obj/Np = integer to ensure no FT artifacts
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
N_obj = ceil(N_obj/Np)*Np;
% max spatial freq of the original object
um_obj = du*N_obj/2;
% sampling size of the object (=pixel size of the test image)
dx_obj = 1/um_obj/2;
% spatial coordiates
[x,y] = meshgrid([-N_obj/2:N_obj/2-1]*dx_obj);
[xn,yn] = meshgrid(-1/2:1/N_obj:1/2-1/N_obj);
%% spatial frequency coordinates
[u,v] = meshgrid(-um_obj:du:um_obj-du);
[u0,v0] = meshgrid(-du*Np/2:du:du*Np/2-du);
%% compute depth sectioning capability
DOF0 = lambda/NA^2/2;
DOFs = lambda/NA_s^2/2;
%% resolution
res0 = lambda/NA/2;
res_s = lambda/NA_s/2;
%% define propagation transfer function
eva = double(u0.^2+v0.^2<1/lambda^2);
H0 = exp(-1i*2*pi*sqrt(1/lambda^2-u0.^2-v0.^2)*z0).*eva;%.*mask_evanescent;
% H0 = exp(1i*2*pi*sqrt(1/lambda^2-u.^2-v.^2)*z0).*mask_evanescent;
k2 = pi*lambda*(u.^2+v.^2);
% k02 = pi*lambda*(u0.^2+v0.^2);
| MATLAB |
3D | fsahli/EikonalNet | models_tf.py | .py | 17,528 | 470 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 1 16:15:14 2018
@author: Paris
"""
import tensorflow as tf
import numpy as np
import time
from pyDOE import lhs
tf.random.set_random_seed(1234)
np.random.seed(1234)
class Eikonal2DnetCV2:
# Initialize the class
def __init__(self, x, y, x_e, y_e, T_e, layers, CVlayers, C = 1.0, alpha = 1e-5, alphaL2 = 1e-6, jobs = 4):
X = np.concatenate([x, y], 1)
# X_e = np.concatenate([x_e, t_e], 1)
self.lb = X.min(0)
self.ub = X.max(0)
self.X = X
# self.X_e = X_e
self.x = x
self.y = y
self.T_e = T_e
self.x_e = x_e
self.y_e = y_e
self.layers = layers
self.CVlayers = CVlayers
# Initialize NN
self.weights, self.biases = self.initialize_NN(layers)
self.CVweights, self.CVbiases = self.initialize_NN(CVlayers)
self.C = tf.constant(C)
self.alpha = tf.constant(alpha)
self.alphaL2 = alphaL2
# tf placeholders and graph
self.sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True,
log_device_placement=True,
intra_op_parallelism_threads=jobs,
inter_op_parallelism_threads=jobs,
device_count={'CPU': jobs}))
self.x_tf = tf.placeholder(tf.float32, shape=[None, self.x.shape[1]])
self.y_tf = tf.placeholder(tf.float32, shape=[None, self.y.shape[1]])
self.T_e_tf = tf.placeholder(tf.float32, shape=[None, self.T_e.shape[1]])
self.x_e_tf = tf.placeholder(tf.float32, shape=[None, self.x_e.shape[1]])
self.y_e_tf = tf.placeholder(tf.float32, shape=[None, self.y_e.shape[1]])
self.T_pred, self.CV_pred, self.f_T_pred, self.f_CV_pred = self.net_eikonal(self.x_tf, self.y_tf)
self.T_e_pred, self.CV_e_pred, self.f_T_e_pred, self.f_CV_e_pred = self.net_eikonal(self.x_e_tf, self.y_e_tf)
self.loss = tf.reduce_mean(tf.square(self.T_e_tf - self.T_e_pred)) + \
tf.reduce_mean(tf.square(self.f_T_e_pred)) + \
tf.reduce_mean(tf.square(self.f_T_pred)) + \
self.alpha*tf.reduce_mean(tf.square(self.f_CV_e_pred)) + \
self.alpha*tf.reduce_mean(tf.square(self.f_CV_pred)) + \
sum([self.alphaL2*tf.nn.l2_loss(w) for w in self.weights])
self.optimizer_Adam = tf.train.AdamOptimizer()
self.train_op_Adam = self.optimizer_Adam.minimize(self.loss)
# Define optimizer (use L-BFGS for better accuracy)
self.optimizer = tf.contrib.opt.ScipyOptimizerInterface(self.loss,
method = 'L-BFGS-B',
options = {'maxiter': 10000,
'maxfun': 50000,
'maxcor': 50,
'maxls': 50,
'ftol' : 1.0 * np.finfo(float).eps})
# Initialize Tensorflow variables
init = tf.global_variables_initializer()
self.sess.run(init)
# Initialize network weights and biases using Xavier initialization
def initialize_NN(self, layers):
# Xavier initialization
def xavier_init(size):
in_dim = size[0]
out_dim = size[1]
xavier_stddev = 1. / np.sqrt((in_dim + out_dim) / 2.)
return tf.Variable(tf.random_normal([in_dim, out_dim], dtype=tf.float32) * xavier_stddev, dtype=tf.float32)
weights = []
biases = []
num_layers = len(layers)
for l in range(0,num_layers-1):
W = xavier_init(size=[layers[l], layers[l+1]])
b = tf.Variable(tf.zeros([1,layers[l+1]], dtype=tf.float32), dtype=tf.float32)
weights.append(W)
biases.append(b)
return weights, biases
def neural_net(self, X, weights, biases):
num_layers = len(weights) + 1
H = 2.0*(X - self.lb)/(self.ub - self.lb) - 1.0
for l in range(0,num_layers-2):
W = weights[l]
b = biases[l]
H = tf.tanh(tf.add(tf.matmul(H, W), b))
W = weights[-1]
b = biases[-1]
Y = tf.add(tf.matmul(H, W), b)
return Y
def net_eikonal(self, x, y):
C = self.C
T = self.neural_net(tf.concat([x,y], 1), self.weights, self.biases)
CV = self.neural_net(tf.concat([x,y], 1), self.CVweights, self.CVbiases)
CV = C*tf.sigmoid(CV)
T_x = tf.gradients(T, x)[0]
T_y = tf.gradients(T, y)[0]
CV_x = tf.gradients(CV, x)[0]
CV_y = tf.gradients(CV, y)[0]
f_T = tf.sqrt(T_x**2 + T_y**2) - 1.0/CV
f_CV = tf.sqrt(CV_x**2 + CV_y**2)
return T, CV, f_T, f_CV
def callback(self, loss):
self.lossit.append(loss)
print('Loss: %.5e' % (loss))
def train(self):
tf_dict = {self.x_tf: self.x, self.y_tf: self.y,
self.x_e_tf: self.x_e, self.y_e_tf: self.y_e, self.T_e_tf: self.T_e}
# Call SciPy's L-BFGS otpimizer
self.optimizer.minimize(self.sess,
feed_dict = tf_dict,
fetches = [self.loss],
loss_callback = self.callback)
def train_Adam(self, nIter):
self.lossit = []
tf_dict = {self.x_tf: self.x, self.y_tf: self.y,
self.x_e_tf: self.x_e, self.y_e_tf: self.y_e, self.T_e_tf: self.T_e}
start_time = time.time()
for it in range(nIter):
self.sess.run(self.train_op_Adam, tf_dict)
loss_value = self.sess.run(self.loss, tf_dict)
self.lossit.append(loss_value)
# Print
if it % 10 == 0:
elapsed = time.time() - start_time
C_value = np.exp(self.sess.run(self.C))
print('It: %d, Loss: %.3e, C: %.3f, Time: %.2f' %
(it, loss_value, C_value, elapsed))
start_time = time.time()
self.optimizer.minimize(self.sess,
feed_dict = tf_dict,
fetches = [self.loss],
loss_callback = self.callback)
def train_Adam_minibatch(self, nIter, size = 50):
self.lossit = []
start_time = time.time()
for it in range(nIter):
X = lhs(2, size)
tf_dict = {self.x_tf: X[:,:1], self.y_tf: X[:,1:],
self.x_e_tf: self.x_e, self.y_e_tf: self.y_e, self.T_e_tf: self.T_e}
self.sess.run(self.train_op_Adam, tf_dict)
loss_value = self.sess.run(self.loss, tf_dict)
self.lossit.append(loss_value)
# Print
if it % 10 == 0:
elapsed = time.time() - start_time
C_value = np.exp(self.sess.run(self.C))
print('It: %d, Loss: %.3e, C: %.3f, Time: %.2f' %
(it, loss_value, C_value, elapsed))
start_time = time.time()
def predict(self, x_star, y_star):
tf_dict = {self.x_tf: x_star, self.y_tf: y_star,
self.x_e_tf: self.x_e, self.y_e_tf: self.y_e}
T_star = self.sess.run(self.T_pred, tf_dict)
CV_star = self.sess.run(self.CV_pred, tf_dict)
return T_star, CV_star
def get_adaptive_points(self, N = 1000, M = 10):
X = lhs(2, N)
tf_dict = {self.x_tf: X[:,:1], self.y_tf: X[:,1:],
self.x_e_tf: self.x_e, self.y_e_tf: self.y_e}
f_T_star = self.sess.run(self.f_T_pred, tf_dict)
ind = f_T_star[:,0].argsort()[-M:]
return X[ind], f_T_star[ind]
class Eikonal3DnetCV2:
# Initialize the class
def __init__(self, X, normals,X_e, T_e, layers, CVlayers, Tmax, C = 1.0, alpha = 1e-5, alphaL2 = 1e-6, jobs = 4):
# X_e = np.concatenate([x_e, t_e], 1)
self.Tmax = Tmax
self.lb = X.min(0)
self.ub = X.max(0)
self.X = X
self.normals = normals
# self.X_e = X_e
self.T_e = T_e
self.X_e = X_e
self.layers = layers
self.CVlayers = CVlayers
# Initialize NN
self.weights, self.biases = self.initialize_NN(layers)
self.CVweights, self.CVbiases = self.initialize_NN(CVlayers)
self.C = tf.constant(C)
self.alpha = tf.constant(alpha)
self.alphaL2 = alphaL2
# tf placeholders and graph
self.sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True,
log_device_placement=True,
intra_op_parallelism_threads=jobs,
inter_op_parallelism_threads=jobs,
device_count={'CPU': jobs}))
self.X_tf = tf.placeholder(tf.float32, shape=[None, self.X.shape[1]])
self.normals_tf = tf.placeholder(tf.float32, shape=[None, self.X.shape[1]])
self.T_e_tf = tf.placeholder(tf.float32, shape=[None, self.T_e.shape[1]])
self.X_e_tf = tf.placeholder(tf.float32, shape=[None, self.X_e.shape[1]])
self.T_pred, self.CV_pred, self.f_T_pred, self.f_CV_pred, self.f_N_pred = self.net_eikonal(self.X_tf, self.normals_tf)
self.T_e_pred, self.CV_e_pred = self.net_data(self.X_e_tf)
self.pde_loss = tf.reduce_mean(tf.square(self.f_T_pred))
self.normal_loss = 1e-2*tf.reduce_mean(tf.square(self.f_N_pred))
self.loss = tf.reduce_mean(tf.square(self.T_e_tf - self.T_e_pred)) + \
self.pde_loss + \
self.normal_loss + \
self.alpha*tf.reduce_mean(tf.square(self.f_CV_pred)) + \
sum([self.alphaL2*tf.nn.l2_loss(w) for w in self.weights])
self.optimizer_Adam = tf.train.AdamOptimizer()
self.train_op_Adam = self.optimizer_Adam.minimize(self.loss)
# Define optimizer (use L-BFGS for better accuracy)
self.optimizer = tf.contrib.opt.ScipyOptimizerInterface(self.loss,
method = 'L-BFGS-B',
options = {'maxiter': 10000,
'maxfun': 50000,
'maxcor': 50,
'maxls': 50,
'ftol' : 1.0 * np.finfo(float).eps})
# Initialize Tensorflow variables
init = tf.global_variables_initializer()
self.sess.run(init)
# Initialize network weights and biases using Xavier initialization
def initialize_NN(self, layers):
# Xavier initialization
def xavier_init(size):
in_dim = size[0]
out_dim = size[1]
xavier_stddev = 1. / np.sqrt((in_dim + out_dim) / 2.)
return tf.Variable(tf.random_normal([in_dim, out_dim], dtype=tf.float32) * xavier_stddev, dtype=tf.float32)
weights = []
biases = []
num_layers = len(layers)
for l in range(0,num_layers-1):
W = xavier_init(size=[layers[l], layers[l+1]])
b = tf.Variable(tf.zeros([1,layers[l+1]], dtype=tf.float32), dtype=tf.float32)
weights.append(W)
biases.append(b)
return weights, biases
def neural_net(self, X, weights, biases):
num_layers = len(weights) + 1
H = 2.0*(X - self.lb)/(self.ub - self.lb) - 1.0
for l in range(0,num_layers-2):
W = weights[l]
b = biases[l]
H = tf.tanh(tf.add(tf.matmul(H, W), b))
W = weights[-1]
b = biases[-1]
Y = tf.add(tf.matmul(H, W), b)
return Y
def net_eikonal(self, X, normals):
C = self.C
T = self.neural_net(X, self.weights, self.biases)
CV = self.neural_net(X, self.CVweights, self.CVbiases)
CV = C*tf.sigmoid(CV)
T_x = tf.gradients(T, X)[0]
CV_x = tf.gradients(CV, X)[0]
f_T = CV*tf.norm(self.Tmax*(T_x), axis = -1, keepdims = True) - 1.0
f_CV = tf.norm(CV_x, axis = -1)
f_N = self.C*self.Tmax*tf.reduce_sum(T_x*normals, axis = -1)
#f_N = 0.0
return T, CV, f_T, f_CV, f_N
def net_data(self, X):
C = self.C
T = self.neural_net(X, self.weights, self.biases)
CV = self.neural_net(X, self.CVweights, self.CVbiases)
CV = C*tf.sigmoid(CV)
return T, CV
def callback(self, loss):
self.lossit.append(loss)
print('Loss: %.5e' % (loss))
def train(self):
tf_dict = {self.X_tf: self.X,
self.normals_tf: self.normals,
self.X_e_tf: self.X_e,
self.T_e_tf: self.T_e}
# Call SciPy's L-BFGS otpimizer
self.optimizer.minimize(self.sess,
feed_dict = tf_dict,
fetches = [self.loss],
loss_callback = self.callback)
def train_Adam(self, nIter):
self.lossit = []
tf_dict = {self.X_tf: self.X,
self.normals_tf: self.normals,
self.X_e_tf: self.X_e,
self.T_e_tf: self.T_e}
start_time = time.time()
for it in range(nIter):
self.sess.run(self.train_op_Adam, tf_dict)
loss_value = self.sess.run(self.loss, tf_dict)
self.lossit.append(loss_value)
# Print
if it % 10 == 0:
elapsed = time.time() - start_time
C_value = np.exp(self.sess.run(self.C))
print('It: %d, Loss: %.3e, C: %.3f, Time: %.2f' %
(it, loss_value, C_value, elapsed))
start_time = time.time()
self.optimizer.minimize(self.sess,
feed_dict = tf_dict,
fetches = [self.loss],
loss_callback = self.callback)
def train_Adam_minibatch(self, nEpoch, size = 50):
self.lossit = []
start_time = time.time()
idx_global = np.arange(self.X.shape[0])
np.random.shuffle(idx_global)
splits = np.array_split(idx_global, idx_global.shape[0]//size)
for ep in range(nEpoch):
for it, idx in enumerate(splits):
tf_dict = {self.X_tf: self.X[idx],
self.normals_tf: self.normals[idx],
self.X_e_tf: self.X_e,
self.T_e_tf: self.T_e}
self.sess.run(self.train_op_Adam, tf_dict)
loss_value = self.sess.run(self.loss, tf_dict)
self.lossit.append(loss_value)
# Print
if it % 10 == 0:
elapsed = time.time() - start_time
pde_loss = self.sess.run(self.pde_loss, tf_dict)
normal_loss = self.sess.run(self.normal_loss, tf_dict)
print('Epoch: %d, It: %d, Loss: %.3e, pde loss: %.3e, normal_loss: %.3e, Time: %.2f' %
(ep, it + ep*idx_global.shape[0]//size, loss_value, pde_loss, normal_loss, elapsed))
start_time = time.time()
def predict(self, X_star):
tf_dict = {self.X_e_tf: X_star}
T_star = self.sess.run(self.T_e_pred, tf_dict)
CV_star = self.sess.run(self.CV_e_pred, tf_dict)
return T_star, CV_star
def get_adaptive_points(self, N = 1000, M = 10):
X = lhs(2, N)
tf_dict = {self.x_tf: X[:,:1], self.y_tf: X[:,1:],
self.x_e_tf: self.x_e, self.y_e_tf: self.y_e}
f_T_star = self.sess.run(self.f_T_pred, tf_dict)
ind = f_T_star[:,0].argsort()[-M:]
return X[ind], f_T_star[ind]
| Python |
3D | fsahli/EikonalNet | active_learning_2Dexample.py | .py | 5,065 | 169 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 12 14:17:28 2019
@author: fsc
"""
import numpy as np
import matplotlib.pyplot as plt
from pyDOE import lhs
from models_para_tf import Eikonal2DnetCV2RPF
import entropy_estimators as ee
np.random.seed(1234)
def plot_ensemble(T_star, CV_star, X_train, Y_train, filename = None):
plt.set_cmap('jet_r')
scale = np.linspace(0,0.75, 16)
scaleCV = np.linspace(0.9,1.5, 16)
plt.close('all')
fig = plt.figure(1)
fig.set_size_inches((10,15))
plt.subplot(321)
plt.contourf(X_m, Y_m, T_star.mean(1).reshape(X_m.shape), scale)
plt.colorbar()
plt.scatter(X_train, Y_train)
plt.xlim([0,1])
plt.ylim([0,1])
plt.title('predicted activation times [ms]')
plt.subplot(322)
plt.contourf(X_m, Y_m, (T_star.mean(1) - T[:,0]).reshape(X_m.shape))
plt.colorbar()
plt.scatter(X_train, Y_train)
plt.xlim([0,1])
plt.ylim([0,1])
plt.title('activation time error [ms]')
plt.subplot(323)
plt.contourf(X_m, Y_m, CV_star.mean(1).reshape(X_m.shape))
plt.colorbar()
plt.scatter(X_train, Y_train)
plt.xlim([0,1])
plt.ylim([0,1])
plt.title('predicted conduction velocity [mm/ms]')
plt.subplot(324)
plt.contourf(X_m, Y_m, (CV[:,0] - CV_star.mean(1)).reshape(X_m.shape))
plt.colorbar()
plt.scatter(X_train, Y_train)
plt.xlim([0,1])
plt.ylim([0,1])
plt.title('conduction velocity error [mm/ms]')
plt.set_cmap('jet')
plt.subplot(325)
plt.contourf(X_m, Y_m, T_star.std(1).reshape(X_m.shape))
plt.colorbar()
plt.scatter(X_train, Y_train)
plt.xlim([0,1])
plt.ylim([0,1])
plt.title('activation times std [ms]')
ent = np.apply_along_axis(lambda x: ee.entropy(x[:,None]), 1, T_star)
plt.subplot(326)
plt.contourf(X_m, Y_m, ent.reshape(X_m.shape))
plt.colorbar()
plt.scatter(X_train, Y_train)
plt.xlim([0,1])
plt.ylim([0,1])
plt.title('activation times entropy [-]')
plt.tight_layout()
if filename is not None:
plt.savefig(filename)
if __name__ == "__main__":
def exact(X, Y):
return np.minimum(np.sqrt(X**2 + Y**2), 0.7*np.sqrt((X - 1)**2 + (Y - 1)**2))
def CVexact(X, Y):
mask = np.less_equal(np.sqrt(X**2 + Y**2), 0.7*np.sqrt((X - 1)**2 + (Y - 1)**2))
return mask*1.0 + ~mask*1.0/0.7
# create the data
N_grid = 50
x = y = np.linspace(0,1,N_grid)[:,None]
# start training with a low number of samples
N_train = 10
X_m, Y_m = np.meshgrid(x,y)
X = X_m.flatten()[:,None]
Y = Y_m.flatten()[:,None]
T = exact(X,Y)
CV = CVexact(X,Y)
X_train_all = lhs(2, N_train)
X_train = X_train_all[:,:1]
Y_train = X_train_all[:,1:]
T_train = exact(X_train, Y_train)
# network parameters
X_pde = X
Y_pde = Y
layers = [2,20,20,20,20,20,1]
CVlayers = [2,5,5,5,5,1]
Batch = 30 # this is the number of networks to train in parallel
CVmax = 1.5
model = Eikonal2DnetCV2RPF(X_pde, Y_pde, X_train, Y_train, T_train,
layers, CVlayers, Batch, C = CVmax, alpha = 1e-7, alphaL2 = 1e-9)
#start training the model with the initial dataset
model.train_Adam_minibatch(20000 + 5000*(N_train - 10), size = 50)
T_star, CV_star = model.predict(X,Y)
plot_ensemble(T_star, CV_star, X_train, Y_train, filename = 'results/AL_NNpara_0.pdf')
N_AL = 40 # number of samples to acquire with active learning
# store how the error is evolving
errorsT = [np.sqrt(np.average((T_star.mean(1) - T[:,0])**2))]
errorsCV = [np.average(np.abs(CV_star.mean(1) - CV[:,0]))]
T_stars = [T_star.mean(1)]
CV_stars = [CV_star.mean(1)]
print('RMSE:',errorsT[-1])
print('RMSE CV:',errorsCV[-1])
# list of available candidates for sample during active learning
available = list(range(X.shape[0]))
# start active learning
for i in range(N_AL):
# compute the entropy for the available candidates
ent = np.apply_along_axis(lambda x: ee.entropy(x[:,None]), 1, T_star[available])
idx_next = ent.argmax() # find the point of maximum entropy
# add it to the dataset
x_next, y_next = X[available][idx_next], Y[available][idx_next]
T_next = exact(x_next, y_next)
model.add_point(x_next, y_next, T_next)
available.remove(available[idx_next])
# and continue training
model.train_Adam_minibatch(5000, size = 96)
# predict and evaluate the error
T_star, CV_star = model.predict(X,Y)
plot_ensemble(T_star, CV_star, model.x_e, model.y_e, filename = 'results/AL_NNpara_%i.pdf' % (i+1))
errorsT.append(np.sqrt(np.average((T_star.mean(1) - T[:,0])**2)))
errorsCV.append(np.average(np.abs(CV_star.mean(1) - CV[:,0])))
print(i,'RMSE:',errorsT[-1])
print(i,'RMSE CV:',errorsCV[-1])
| Python |
3D | fsahli/EikonalNet | 2Dexample.ipynb | .ipynb | 668,457 | 14,421 | {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# EikonalNet: 2D example\n",
"\n",
"We first import the packages. Note when we import `models_tf`, we will import `tensorflow`. This code is written in tensorflow 1.0"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"WARNING: Logging before flag parsing goes to stderr.\n",
"W0225 14:28:18.280732 4686839232 module_wrapper.py:139] From /Users/fsc/Google Drive/Stanford/Research/NeuralNetworks/EikonalNet/models_tf.py:15: The name tf.random.set_random_seed is deprecated. Please use tf.compat.v1.random.set_random_seed instead.\n",
"\n"
]
}
],
"source": [
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"from pyDOE import lhs\n",
"from models_tf import Eikonal2DnetCV2"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We define a benchmark problem, which exactly satisfies the Eikonal equation and has a discontinuity in conduction velocity"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"def exact(X, Y):\n",
" return np.minimum(np.sqrt(X**2 + Y**2), 0.7*np.sqrt((X - 1)**2 + (Y - 1)**2))\n",
"\n",
"def CVexact(X, Y):\n",
" mask = np.less_equal(np.sqrt(X**2 + Y**2), 0.7*np.sqrt((X - 1)**2 + (Y - 1)**2))\n",
" return mask*1.0 + ~mask*1.0/0.7"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we generate a grid to show the exact solution and also some training points and plot them"
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAA0YAAAFgCAYAAACfTmkaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAgAElEQVR4nOzdd3zU9f3A8df7dvYgCWHvIYJMFRRBXChu0daBolaprVhr1WqtrdafddU6Sq0VrdRtVZw4URFURAVkiIa9R0jIJOtyd5/fH3fBJGSRXHLr/Xw87kHy+a73JeH7uff3s8QYg1JKKaWUUkrFMkuoA1BKKaWUUkqpUNPESCmllFJKKRXzNDFSSimllFJKxTxNjJRSSimllFIxTxMjpZRSSimlVMzTxEgppZRSSikV8zQxUh1KRHqJyK/a63wi8oyInBLqczVy/utFpEvg63NE5PFgnVsppdRPgl3XHOK1R4vI5jYcnyIif6j1/T0ickVwomtVPHeIyKw2HH8gfhE5W0TGBS86pYJLdB0j1ZFEZCLwT2PMsHA7X7Bja+D8m4HzjTHL2uP8Siml/Nr7ft7MtUcDrxlj+rTy+F7A98aYpOBG1joicgeQYYy5LgjnmoP/vf297ZEpFXzaYqQOEJETRWSJiKwTkW9FZKiIuERkuYj8JrDPJBHZJCKdRCRBRJ4TkS0isk1EFohI58B+x4jI14FzrRSRC0TkROBZYKCI/CgiSfWu3+bzBY6ZKiL/JyJP1jr3ZBFZFPj6KhFZKyLrRWSjiJzX1LkCx5wmIktFJCfw72mB8uki8qaIzA3EliMi4xv42X4DdAPmishlgePeCWxbICJ/FpHPA+/7gcATuuUislNEflHrPL8VkRWBn9FcEUkLlN8pIhsCMSwUkd7B+JtQSqlgC4O6xikiDwfqgBwReUVEUgLbDvleLyIZgXNsEpEfgUtqXWuOiNxY6/sDrS8icpiIfBqIY42IzBSRwcAnQEIg9sG1zyEiR4nIF4HrrxKR6YHyiSLylYg8GajfNtXUX/Xee1N140gR+Sxw7jUicnwDxw8UkQ9r7fP7WtvOCvwO1gZ+r5Nq/wxE5J/AucAtIvJPEXlLRG6odfw0EXm7+b8gpdqRMUZf+gLoDSwB0gLfjwPWAzagH5ALnAhsBo4M7PMr4HH8CbYACwNlqcBeYEJgv77APiADmAisbiSGNp8PWABMrbWPI1D+H+BKIB3YgP/pF4GyrwNfN3aufoFzjQyUjwIKAuXTgTJgWGDbdcD8Rt7fZmB04OvpwDu1rvNW4GfdAzDAdYFtZwGbAl//DJgD2ALf3wS8APQCqoCEQPn1wE2h/pvSl770pa/6rzCpa/4MzAOcge8fAx5q7b0+cB9+LBCbE3gT2BzYNge4sda17wBmBd7vWmBaoLwTsBUYHrinl9Y6Zg5wY+D95gKnBsr7ALuA8YH36wVODmw7E1jfwHtvrG5MBpYDPQLlgwLxpNeLOQe4OrBPJrAafyLYD8gDhgS2jQV2B4458DOo9/Uk/K1HNbF9DEwO9d+ovmL7ZUMpv9OAgcBiEakpSwb6GWPWBp7izcf/gftbAGPM44GnW9OBIcBQIAE4FthojFkU2G8T/ps+tc59kGCezxizSURWAZNF5APgFOC3xphS8XdzOCFwrbMD12jKqcBHxpjvAudeLiIfA5PxV5RfGWNWB/ZdDcxo5nwNecEY4wG2i0gZ8G6gfB1Q87RzKv6fxerA+7YCPvyVzxfAIhF5D/gM+LQVMSilVHsLeV2DP2m43RhTFTju2sAx19K6e/2pwDhjjAGqRORh4L/N/BwGAUnGmOcD19qHPyGq6UrXkGPxPyj7IHDMZhF5Hv8DtHfxJ2Pza8WXXf8EjdWN+JOrAcBHtX52dvzJYe2YU4wxTwbOlSf+sbJn40/aPjbG/BDYtgSoGVPb4JsxxiwQEa+IHI0/wesBfNTIe1eqQ2hXOlXDCrxvjDms5oX/CdTGwPYj8T8pmiIiFgARuRWYjf+J2tP4n8DVnKu69slFZJiIJDYVQLDPFzjHRcDJwCeBpGgwsBJ/hfEJcHcz5wD/U8D6TK3yinrlreGtdw5vra9rWIG/1Pr9DAPONMa4jTEnAhfif8p6B/BqK+NQSqn2FPK6pv5xIpIWqBtae683tPzeH1crBk/tDYFuap2aODZYddFBdWMgnlX1fi/jgS9beP2GfhdjRMTeRBwAjwJX4U96nwgkl0qFjCZGqsZHwGkicjiA+GeNmQd4RWQKcAZwVGDfPwb+nQL81xjzFlCE/yZqAb4ChgWeAiEig/C3YFjx30StjcQQ7PPNxV/hXsFPT++OAbYaYx4HluF/Eljz/6Cxc30AnCIiwwPXH4G/Qvmwkes2pqlYW+Jd4Fcikhr4/gbgD4EPAiuAPcaYp4E/ASPacB2llGov4VDXzAdmiogtkHz9DZhG6+/184CbRcQiIlb83exq5AE177UTcE6gPCdQVjOOtTP+h3UZTcT+JdBXArOlBlqWLgHeaSa++hqqGxcDg2uNC+qHvydC7ThygBIJjHsNvJ9rgLfx/9yniEj/wLYJwOvUfehHA+/tReAE/A/25hzi+1Aq6LQrnQLAGLNORK4CXhIRB/4nc+fjbwp/CjjbGLNf/FNufhcYrPko8GBg8OR2/Dfb3wT2/znwuIgk4H+KdZkxplhE1gKJIrIO/3ib0lphtPl89d5TuYjMw99VYGGg+F3gShHZhL/CegVIF5FL8VfYB53LGLNBRC4D/isi8UA5cGmg/NhD+DEvAN4MPP1sjWfwd7X4RkR8wI/AlcaYQhF5A//vxY1/vNG1rbyGUkq1mzCpa/4PeBj/PdTgH/P0V2NMRSvv9TfgHwO1ASjFn1DUeAz4XyCe7QS6ORtj3CJyHvCoiNyLv/Xoj4HuhA5gU6CemlLrZ1coImcDD4t/AgcP8CdjzCLxz8LXIg3VjcaYAhE5N/BzTsHfdfDSwO+i5jiPiJwFPCb+SRcM/oT1OfAvSQG8FUgOi4Gpxhhfva50XwD3iUhPY8xMY0yl+CdcSDLGFLb0PSjVXnS6bqWUUkop1eFExAZ8C1xujFkZ6niU0q50SimllFKqQwW67W0EPtCkSIULbTFSSimllFJKxbwWtxiJf+GtlxooHyP+hdq+FJHXRMQZ3BCVUkqFgojMCtzfl4rI6fW2TRT/wpoLxb/4ZlsmFmlNbFonKaVUDGrs/l9r+3kiMqdemU38i0g3Nh0+0ILESETSRWQZ/llbGjIL/2KUx+Kfh/6q5s6plFIqvInIZKCvMWYs/nVSHgvM4FXjAfyTf0zEP13vqR0Ul9ZJSikVg1pw/ycwc+9vG9h0I/4105rU7Kx0xpgCYHRgxpNrGtilb80ibPinuvxZA0HOILAQmiPOOjqjT3NLDISnEo8r1CEcMrc7MiceFHeTi/OFLas71BEcOmtlZHanlcrI+2GXePLyjTGZwTrfcce7TGGBr9XHr1ldvQaorFU02xgzO/D1KPzTGmOM2SUie4GewJbA9gIgI5AspQP7Wh3IIQh2nRQXL6P79ovM+6RSSrXVmtXVQa2Xxh/vMkWtrJeaqZNacv8HuAe4l1r3fhEZAPQFPm8uhmDUBrXffRlwULeFwJuaDTD0CIe56KXxQbhs+/JW+1j0n40snbudypJq+o3tRNeLTyOhV1Nrr4WfLTuC9rfeoZzbHKEOoVWStkZmkpGysSrUIRwyR86OUIdwyD7Y86+twTxfYYGPue9mtPr4wT13VxpjxjSyuWYq5Rr17++z8K/fsifwCpfB04dcJ73ybvjfJ40xvPxcOS/MKSN3j5cjRtiZeWMyI8dE5r1SKRUeDu+5K6j1UlGBj9beUw/vuaupOqlZgQk99gI/1CoT/InSr/D3dGhSMGalq32OOPxrqES8t+5azdblBVz62Bhu/HASvUans/KmV6jKK23+YKWUinzVQO1m8gP398CaMY8DA40xA/AvDtna9bmCLSrrpCdm7ef1l8u5+8FUPv26M2dNjee6qwr4YXXktZwqpVSwiUgccBP+JKi2q4F5xpi8lpwnGInRZhEZFfj6FGBpEM4ZUkW7K/jx01wuemQ02QOTiU91cOxlfcmaNJidb68IdXhKKdURlgMnAohIFyAL/wKV4F/Y0eBfxBH8T+g6dPKFJkRdnVRR4ePZp/bz6Ow0Rox2kJRs4ezz47nmN4k8/URZqMNTSqlwcATQDX8X6peBUwOLQk8CLheRBfjHwr4sImmNnaRVXelEZDqQbYy5H5gJzBKRaiAf+ENrzhlO8jbtp+thKTjj6/54Uod3Z88H34coKqVUbe7B3ZvsTmeMj3z3dso8RSTY0shw9KDeCuyqCcaYD0TkDBH5ArAD1wHTRCTbGHO/iPwFWCQi5UAJcFmoYo32Oil3t4/kFAtdu9etk44c5+TVF8tDFJVS6lAYY1j+jZvvV1fTtZuViSe6cDi0Tmqrevf/EYGyXsCdxpiH6+07J1Be2Nj5WpwYGWMWAgsDXz9Tq3wpcOyhvIlw16lnAnvWlVBd5cXu/OkhaMmPe4jr3miSqZQKE25fBUsL5gGQ5shmV8U6Nsg3jE47A4cl8iZRCRVjzMwmts0B5jS2vb3FUp2U2dlCcaGPvFwvmZ1/qpNWLnfTWyeOUCrsVVYarp9RwK4dXsaNd7JgfiV/v6eEp17oRPee+n/4UDV2/6+1fStwRQPlB5XVF4yudFEnvUc8fY7sxGt/WEHxngo81T6Wv7WDPR+spuuZI0IdnlKqGWtLvyLNkc24TudzWPJxjOt0Psm2TNaXLgl1aEodsoQECxdcksBNMwvZvNGDz2f47ONK/vlQKZdfHZmzvCoVS+Y8sR+HQ3jjo0xuuyuF//4vgwsujucvfyhu/mDVoTQxasTUvw4nJTuOWecu4q4xH/Ddm9sZevd5xHVNDXVoSqkmGGPYU7mBvgmjD3SdExH6JY5md+XGEEenVOtc//skxh3nZPoF+Yzot5vHHirlr39P1VnplIoAH7xTwdXXJmKz/dR17tIrE1n5nZuiotYvuaCCT9vvGmF3WZny+yGcetNhGK/Barcwf092qMNSSrWAMQaL1J0LQLBg0ApIRSarVbjmN0n88rpEPB6w23VsglKRwucDW71P3BYrWCzg80bmMh/RSluMmmGxCFZ75P6Yendv0eyESkUNESHL1YctZXWX1dlSvpLOzj4hikqp4BARTYqUijAnnOLiuf+UYcxPSdAbr5TTf6Cd9E7hMqGnAm0xUkpFsMZmphuUNI5vCt6ixLOXVHsXCt27KfcWc1T6OSGIUimlVCy76teJXHXJPi6duo/jJjlZ92M1y75x8+QLnUIdmqpHEyOlVNSJsyYxPuPn7KncyH5PIV3i+pPt6o9V9JanlFKqYyUlW3j+9QwWzK/k+5XVHDXOyZ33pZKUHLk9kqKVfkpQKsqU7FzL3h++pLq8hISsnnQeejzOpPRQh9XhrGKnW9zgUIehlFIxbfVKN888Wca2zR76D7JxxS8TGTDIHuqwOpzdLpwyJY5TpsSFOhTVBE1VVViq6ukOdQgRKX/d12z9/FXSeg+jx7hzsdqcrJ03i6rSgiaPK+7n7KAIlVJKxYrFiyq59ooCRo5xcPtfU+g3wMYVP9/H6pVax6vwpC1GSgVRaS8haWtoZpjxeT3sXPo+A06dQXx6VwASs3qBCLmrF9DzmKkhiau9NTbOSCmlVGg9cn8pd96Xwgmn+FtJjhjhICXVwj//XsoTz+r4GhV+tMVIqSjhLi3AanMcSIpqpPYexv7cLaEJSimlVExyuw1rf6zm+JNcdcpPOi2OFcu0xUiFJ02MlIoSNlcCnqoyvO7KOuVVxXnY45NDFFXHcA/uHuoQlFJK1WK3Q2KShe3bvHXKt27ykJmlU1Sr8KSJkVJRwuZKIKXH4Wz76o0DyVFlUS47l31A5mHHhjg6pZRSsUREuPCyeP7vtiIKC/zJUe4eL/feWcxFl8WHODqlGqZjjJSKIj2Pncq2L19l9f/uxh6XhKeqnC4jTyG155Bmjy3u5yRlY1UHRKmUUioW/Or6JP52dwlTJuwls7OVvFwvF1+RwMWXJ4Q6NKUapImRUmHEU1VO+b6d2OOSiEvLPuTjrXYnfY6fRnXFfjwVpTiTM7DYYmNaVJ2EQSmlgquiwsfqFdUkJgqHDbUjIod0vM0m/OHOFK69IYk9u710624lIVE7K6nwFZLE6Pzk5bxWMioUl1YqbO1Z+Ql7Vi0gLr0LVaX7cCZl0PeES7HHJR3yuexxidjjEtshyvCmyZFSSgXHW6+Vc/9dxfTpZ6OwwIfTKTz873R69z30j47JKRaSUzQhUuFPW4yUCrLWTNldtGU1+eu/Zch5N+FISMX4fOxc9h5bFr7EgFNntFOkSiml1MF+/L6ah+4t4dlXM+g/yI4xhv89X87MXxTw9ieZWCyH1nKkVKTQ9P0QnJydE+oQYkosLfKav+5ruo48BUdCKgBisdB11KmU79uBe39hh8URDQu96gx1SinVNq+/Us7FlyfQf5C/K7aI8PNp8TicsPzb2KmbVezRxCgG9O6eF+oQVDM8VeXY41PqlFmsNmyuRDxVFSGKSh2qal8V+VXbKa7eizGhWehXKaXaqrjIR1Z23Sm1RYTO2VaKCn0hikodqspKw5Ivq/huqRuvV+ukltDESKkwkNRlAAUbl9cpK8/fgaeqHFdqVoiiilyhaDXaWraKhXnPs3H/MlYWzeerfa9S7inp8DiUUqqtjj7GwXtvVtR5wLN3j5cVS92MOtIRwshUS330XgUnjc1l1oMl3HVbEadP3MuaVdra1xwdY6RUGOg89Dhy3pnF5oUvkdZ7GFUl+eR+v5AeR5+Fxar/TVujIydi2Fe1k81lKzim0wXE25IxxrClfCUrij5kXKfzD3kmJ6WUCqUzzonn9f+Vc+0VBZzzs3gK9/mYM3s/V/4qkfROujhruNu6xcNdtxXzxLPpHH6EP5H9cF4FM39RwIdfdMbh1DqpMdpipFQ7KO11iFOauhIZfNb1uFKzyMtZQkVRLv1OuoL0fh0/e2M0jDOq0VEtRzsrfqRPwkjibcmAv8tJ7/jhVJsqSj37OiQGpZQKFqdLeOrFToyf5OLNV8tZ9q2bP/01hauvPfRZUlXHe3tuOWefH3cgKQKYfEYcffrZWLSgMoSRhT99FK1UmLA54+ky/EQYHupI1KGqNlW4rHUXLBQRnJZ4qn26aK5SKvLExVm4eHoCF0/XxVgjTUmxoVuPg1v2sjpbKS7SMWJN0RYjpVRU64hWo06O7uyqWFunP36Zp4gyTyEpdh0jppRSquOMG+8fI+bx/FQnFRX5+PyzKo4+Jnp6hbQHTYxUWIulKbvDSTR1p4P2T466xw2hwruf74o+YE/lBraUreTbgrcYkDQWm8XertdWSimlapt4oouMTAu/uGgf894s55Xny5h2bj7nXxRP957aWawp+tNRSsWE9pyMwWaxc1T6Oeyo+JFdFeuwW1wMTz2FNEeXdrmeUkop1RirVXj0yXTmvVnB/PcqcbqEm29PZsIJ0fXQsz1oYhQjenfPY8uOzFCHEVNKewlJWyN33YDifk5SNkbX+Jj2To56JxxB74Qj2uX8SimlVEvZ7cK5F8Rz7gXxoQ4looSsK935ycub3ykMnZydE+oQlGpQdcV+CjevpHhHDsbnDXU4YSsUaxwppVSs2V/q4+MPKvj0o0oqKnTAv4oMOsZIqSiQ+/0i1rx2H/s2LGX3dx+x+pV7KM/vmDV8IpF7cHdNkJRSqp18OK+Ck4/J5ZUXynn+6f2cPG4vXyzUaaJV+NOudCrsVfV049wWmSttd0R3uv25m8n9/jOGnHsjjsQ0AAo3r2TjJ/9l6AV/QCytX4wvGrvT1daRi8AqpVQs2LXDw19uK2LO/zIYPMQ/+czyb6uY+YsC3lvUmdRUfSavwpf+dSoV4fZtWEbWkOMOJEUAaX2GY3MlUrpnUwgjiwzacqSUUsHz3tsVnHpm3IGkCGDUkU7GjXfy8fsVIYxMqeZpYqRUhPNVV2FzHjy40uaMx1fd9taeaJu6uyGaHDVMRGaJyBIRWSoip9fb9p6ILAi8fhSRv4cqTqVU+KioMKQ00CqUkmqhvCxyJyRS4UNEponISw2UnykiXwVeDwXKrCIyR0QWB16Tmjq3JkYxpHf3vCa3+6rcFL+3kD13/4s99z5B6SdfYbw6iD/cJXcfTP66b+pMuFBVkk9Z3jYSs/uFMLLIouOO6hKRyUBfY8xY4CzgMRE5UGcYY6YYYyYBZwDrgXtCE6mKVl6v4ZXny7j8Z/lccm4eTz5WSnm5DuIPd+MnunjvzQrKyn76XRUV+vj4g0qOm+QKYWQq0olIuogsA/7WyC73A6cYY8YBI0VkBHAKkGiMOQa4DHigqWvoGCMFgPF62fvwf7HEOUk57xTweCl+9zMq120h81cXhTq8iNbe44zS+46gYONy1r77GJ0GHImnYj95OYvpduTp2JxxQblGtI81qq0mOdKxR4wC5gMYY3aJyF6gJ7Cl3n5/Bf5tjNnXseGpaHf7TUXs2Oblql8n4ooTXnymjBnTCpjzv07Y7RLq8FQjRoy2M+44Jxedlc/PLonH44GXnyvjvJ/H06effuxUrWeMKQBGi8hE4Jra20TEBlxvjCkVkXggBSgDCoEUEbECWUB+U9fQv9BWODk7h/l7Boc6jKCqWJmDqXKT+furEIv/obDrsH7svPVBqrbswNk7tE/SI3kChvYmFiv9T76Swi2rKNmxFqvdRb+TryQho0eoQ4tokZAgFXrjea1kVBvO8G6GiCytVTDbGDM78LUDKKi1rQyo069SRPoCo4wxv21DEEodJOeHar7+sor3FnXG5fInQUeOdXDp1Hw+/bCSyWcE56GPCj4R4Y57U/h8QRUff1CJzQZ/uT+Vo8ZpHR4LCr3xvFY8upVH72qqTmqSMcYDzBeRk4D/AKvwP8jbBtiBHCAbuKSp84Q0MTo/eXkbK3UVLFUbthE/6vADSRGA2G3EDR9M1fqtIU+MVNPEYiW970jS+45st2vEUqtRbZGQILVBvjFmTCPbqoHa/V7igPp/ANOBF9ojMBXbVi5zM/5414GkCMBiEU6cHMd3y9yaGIU5EWHCCS4mnKBd59QhaapOapKIOIAEY8zHItIbeBK4GsgEFhtj/igiPYDFIvKJMaasofPoGCMFgDUliercg1sXPbn5WFOTQxBRdCntpd0+Il3NGKQYGoe0HDgRQES64O+CsL3ePlOA9zs4LhUDMjtb2LrZc1D51s0eMrP0o4tS6iDdgI9ExGqMMUBRoNwO1AyyLwR8QKPjG/TuEmMam4AhYdwIKlbmUL7se4wxGJ+P0s++pnp3HvEjDuvgKFW4ioUZ6lqidpIUrYmSMeYDYJuIfAG8CVwHTBORW2rt1p2DkyWl2mz88S727PLy0rNleL0GYwwL5lfyyYeVnDX14Fk4lVKxSUSmi8gtxpjNwFxgSaDe6gw8DTwMTBaRz4EFwO3GmPLGzqdjjBQA1uREsq6/jH1Pz6XgxXng8WJNT6HzTb9A7OHxZ6LjjFS4qp8cRUu3O2PMzGa2d+moWFRscTiEJ57rxG2/K+TxR0txOgWnU3j0iTQys1q/aLVSKvIZYxYCCwNfP1Or/D7gvnq7VwKntfTc4fGJV4UFZ/9edPnrDXj25IPVgj2rU6hDiirtPTtdR+mosUZebzX7CtZSXV1OWmpf4uMz2v2awdJoK9Kejo1DqUjWu6+NF9/MZMc2D9XV0LuvFRHtlqxCw+MxfPV5Fbt3ehk63M6QYfqgNhppYtRK0TgzHfgHTNq7ZIY6DBXm2js5Ki3dxarVzxIfn4nTlcKmzfPJ7jyC/v2m6AcjpWJM9576UUWF1q6dHn55aQGJScKAQXZmP7afYcPtPDArTaeOjzJ6t1ERJdK700VLq1F7MsbHmh9epl+/08juPBwAj6eSZd/NJi9/DVmZQ0McoVJKqVhyxy3FnHleHDNmJgHgdhuuvbKA558u44pfJoY4OhVMIZ984fzk5aEOIeY0NgGDUoeivSZiKC3dBSJ0zjriQJnN5qJnj/Hk5q5sl2sqpZRSDSks8LJ6hZvLr/4pAXI4hGuuS+TdtypCGJlqDyFPjJRSkas9kiOf8WK12A/qMme12PH5Dp6+VymllGov1W6wWgVrvT5WrjjBXaU9QKKNJkZKdTBd06hpyUndqHKXUly89UCZz+dlx66vycjQqeOVUkp1nMzOFrp1t/L+Oz+1DhljePGZMiadrAvYRhsdY9QG0ToBQ7iL9HFG0SbYEzFYLDYGDzyXVaufo3Pn4TidKezduxq7I4Eu2aOCdh2llFKqOSLCn+9J4ddXFLDkiyoGDLaz6JNKiop8zHk5JdThqSDTFqMYpeOMQivaWo2C3aUuI2MwY8Zci92eQFVVCb17T2L4sMuwWPRZjlJKqY41dLiDNz7KpP9AO7u2eznngnheejOT5BT9GB1tIv5TRllBFR8+lMP3H+0GA0NOymbyjYNJytDmTaU6UrBbjuJcafTpfULQzqdUR6io8PGPv5Xy9twKysp8HHe8i9/9IZk+/SK+ulUqpnXKsHL5DJ2BLtqFRarb2pnpvB4fc67+BmeCjd+9N4kbP5xEUqaTp69YgsftDXKUKpxU9XSHOoQ2i7ZWI6UU3HRtIXm5Xl6Zl8EX32Uz+mgHV/w8n335WicppVS4C4vEqLXWLdqL3WVhyq1DSMxwkpDuZPLvDiMpy8UPH+eGOjylYk57TeGtVCRYl1NNzppq7n0kjW49bCQmWbj86kQmnODi9ZfLQx2eUkqpZkR0YrR30356jUo/aFrfXqPS2buptENiODk7p0Ou0x50nJFqD5ocqVi1ab2HYSMc2O1166RRRzrYuF6nmldKqXAX0YlRVt9Etn1XiDF155Hf+l0BmX21H2ik8lW5cW/fg3d/WZP7aXe68KXJkYpFffrbWL3CjcdTt05a/q2bvgN0jFGkcrsN69dWk5er3SGVinYRnRgNnJCFu9zD+w/8yP59VZQVuvnw4RxKcis5/KTsUIenDpExhuL3FrLzd/eR//iL7Lz5b+x7ei7GXR3q0FQraHKkYs2gw+wMGiqHGmgAACAASURBVGLn1t8WsWuHh7L9Pp55aj8LP6nkvJ/Hhzo81Qpvzy3n5HG53HBNIWedtJffXF1AUZEv1GEppdpJ2CRGrZmAwWqzcMVTR1NRWs1Dpy7gwZM/oSS3kl88PRabw9oOUar2VPbVCsq+XE6XO2fS9Z7f0f3BW/DuL6Pwf++FOrR2Fa2tRqDJkYo9Dz6WRmaWhfOn5HHM8D0sXeJmzv86kZGpdVKkWfZ1FY/cX8ITz3Vi3oIsFnybTecuVm77bWGoQ1NKtZMWJUYiMktElojIUhE5vd62iSLytYgsFJHnRKRD7/4J6U6m3j2cP30zmT9/eyoX3DeCpMyOnapbxxkFR+knX5H28ynYMtMBsCTE0Wn6uexf/B2+RlqNoqE7XbTT5EgFWzjXSfHxFm75cwqLV3VhxYYuzHoqnb797R0ZggqSl58vZ8Z1SQwe4v/9uVzCzX9MZvXKanZu1zFjSkWjZhMjEZkM9DXGjAXOAh4TkdrHPQBcaYyZCAhwartE2gwROWgShvZUUVLNkpe28P7ffmDluzvxufUm2VbeohJs2Rl1yizJiYjVgqmoDFFUHSOaW41AkyMVPJFSJwEdWidVVhreeLWcv91dzKsvllFWpt292ipvr5deveuODXM4hS7drOTn6c9XqWjUkhajUcB8AGPMLmAv0LPW9gIgI1AxpQP7gh1kuMndUMo/zl7I1uWFJKQ7WTp3O99d9yLVJRWhDi2iOQf0onzZmjplVes2Y0mIw5KU0Ohx0dJqFO3JkVJBonVSPXm5XqaeupcP51WQ3snC5wuqOOfkPHZs0wd2bTFytIOPP6xbr+/Y5mH7Vg/9B+lkGkpFo5b8z3bgr2hqlAG1H//OAuYBewKvlfVPICIzgBkAXbtFfj/rd+7+nknXDOCon/cC4Lgr+/KvW7az7cWv6XfN8aENrhV6d89jy47MUIdBypknkHv/k5gqN3HDBuLevpviNz8m7ZKzEEvYDIdTrVTTapSysSrEkagIF9Q6qUsU1EmPPFDCCafEceNtyQfKnvxnKQ/8Xwn/eDI9hJFFtkuuSOCis/O5945iTj0zjp07PDz2UCnXXJ9EQoLWSUpFo5b8z64Gag/aiQOqAEQkAXgcGGiMGQAsBm6tfwJjzGxjzBhjzJi09MYv2ZoJGDpaRUk1u34oZvR5PQ6UiQjnz0gj/4sNIYws8jm6dSb7tmvwFpVQ8PzbVH6/noxfXUzCkcNCHVqHiYVWI+1Wp9ooqHVSehN1UqT49KNKpl1Zt1X94isSWPhpJV6vaeQo1ZyMTCsvvpGBzQ733lnMu29WcPPtKUy/SpcDUSpataTFaDnwa+BREekCZAHbA9tM4FUc+H4vkBbsIMOJWAADXo/BWms8rafKh9gjv4INNXt2Bp0uP++Qj6vq6ca5zdEOEan2UNzPqS1HqrW0TqrHbhfcVXUToGo3WC3QgcOcolJmZys3354S6jCUUh2k2U/yxpgPgG0i8gXwJnAdME1EbjHGlAN/ARaJyCJgEvBQewYcaq5EO32O6sQX/910oMzr8bFw9nqyjh8cwsjaJpxmp4t1sdBqBNpypFpH66SDnXZmHE/MKsXn8ydHxhj+/Y9SJp8eh8USG/cTpZQKhhaNHjTGzGxi2xxgTtAiakRFcTUr39tJ0a4Kuh2ewmEnZmMLUQvNmbcP5ZkZX7Phyzy6DE5m45J80rrF0+PCI0MSj/LTVqPIoy1HqjXCok6q8PHBvEo2rqumb387p57pIj4+NHXSdTclcc30Aqaemsfoox2sWl6Nz8Ds53R8kVJKHYqw6/vV0DijPetKeDQwC1xcsp0lL23lyWmLqSxteG2b9pbaJY6Zb0xgwlX9yOiTwDl3HcFl/z6SU3ttDEk8KvrESqsRaMuRijy5e7ycNzmP+e9WkJJq4ZMPKzjvlDx27QzNLHCJSRaem9uJW+9MoXcfG7/5fRKvzMsgvVPkTyyhlFIdKSLmm3z7ru856bqBjJnqn5F1wlX9eP32VSx6aiOn3BCa7mtWm4XBx3cOybXbS7jMTqf8SnsJSVtjY+C0thypSPLQvSWcemYc19/80yxw/3q4lIfuKeHBx0LTSiMiHH2Mk6OP0QcNSinVWmHXYlRfWUEVezeWMvLs7gfKRIRx03rz46d7QhiZCkfRsqZRLCru59TWIxURPvmwkkvrzQI37coEPvmoEmNi42GGUkpFo7BPjMQqGB8YX93Kxuv2YbGFffhKtUksdamrocmRCnd2G1TX68ntdhus1tj7/6qUUtEkLDOL2uOM4lMc9BieyuLnthwo83p8LHxqI8NO7RKC6Jp2cnZOqENok2iYnS7aWo1iNTnSBEmFq1PPjOPfj5YeaB0yxvDErP2cdmYcovNjK6VUxIqIMUZn3zGM/874hrWL9pI9MIkNi/Pp1DOeYy/vG+rQlFLtSMceqXD021uSuWb6Ps6fkseoI52sWOpGLDD7uU6hDk0ppVQbRERilNYtnt+8OYG1i/ZStKuCI07rSs+RafpkTjUq2qbujqWJGOqraTnSBEmFi5RUCy+8kcGSL6rYuN7DxBOTOOY4p64ZpJRSES4iEiMAq93CkBOzQx1Gi5ycncP8PZG92KvOThd+Yjk5Am09UuHFYhGOmeDimAmhjkQppVSwhOUYI2h4PSOlDkW0jTVSOvaoo4nILBFZIiJLReT0ets6icgHIvK1iCwWkT6hilMppVTsEJFpIvJSA+VnishXgddDgbIEEXlZRBYFysc1de6wTYxUaEXDJAytYXw+KrZvoXzTenzu8EusYnEihoZogtT+RGQy0NcYMxY4C3hMRGrXGY8CdxljjgZuAgaGIEylopoxhh/XVLN4USUlxb5Qh6NUSIlIuogsA/7WyC73A6cYY8YBI0VkBPALYJ0xZgIwA3/d1aiI6UoXaSK9O120OJSxRlV7drHrf88gFgsWVxzufXlkTTmX5CNGtXOUhybWu9TVpuOP2tUoYD6AMWaXiOwFegJbAtuHAlNF5J5A2W9CEKNSUWv3Li83XFNA4T4fXbpZWftDNb/8TRKXz0gMdWhKhYQxpgAYLSITgWtqbxMRG3C9MaZUROKBFKAM+AHYENitBLA2dQ1NjJQCjNfLzhf+Q8ZJp5F0xGhEhKo9u9jxzL9xZnfFmRVe49s0OaorVhOkEo+rjQ9g3s0QkaW1CmYbY2YHvnYABbW2lQFOgEClMxx/i9GNInI78Cfg5jYEo5Sq5eaZhRx/kosZMxOxWITdu7xc8bN8+g+yMX6iK9ThKdWgEo+LT3Nb3YGgqTqpScYYDzBfRE4C/gOsArYYY9YDiEgP4Dngz02dJ6y70uk4o9CKlu50LRlrVL5xHbbkFJKHjzkw26Ezuyspo8dSsuLb9g5RBYl2sTtk+caYMbVetSugaqD2p684oCbzrALcwBuB7+fib2FSSgXBpg3V7Nrh4eprEw/Mdtilq5Wrrk3k9ZfLQxydUu2mqTqpSSLiEJE0Y8zHQG8gF7g6sG068CzwO2PMu02dJ6wTo0gX6Yu9xhJvZTm25JSDym3JKXgrwrMS0vFGjdMEKSiWAycCiEgXIAvYDmCM8QLfADWDWMcDq0MQo1JRqaTYkJFpxWqte5/vnG2luEjHGinVgG7ARyJiNf7Vt4sAROQc/HXZScaYpU2dADQxUs2IlVajuF79KN+0Dk/Z/gNlxuejZOUyEvqG75hyTY6apglS6xljPgC2icgXwJvAdcA0EbklsMtVwCMi8hlwGvB/IQlUqSg0+HA7u3Z62bC2uk75269XMHa83tOUqiEi00XkFmPMZvy9F5YE6q3OwNPAFcAw4GMRWSAiTzd1vpgYY1RV7mHT1/sQgb5HZ+CIa3LclYpB9pRU0sZOYPuT/yDt2OOxuOIoXroEi91B4pAjQh1ek3S8UfNqJ0exNg6pLYwxM5vYthY4qgPDiRput+HrxVVUVRqOGuckOUWfUaq6XC7hxj8mc/W0fUyfkUjXblbef7uCLZs8/PmvB/duUCqWGGMWAgsDXz9Tq/w+4L56u599KOcO+7txW8cZ/fDxHh48+VOWvLCFxc9t5sGTP2Hd53uDFF3ztDtd+Giu1ajTpMlknX4eFVs3Ufr9dyQfMZKu065CrOGfSGvLUcvVtCJpS5IKhRXL3JxyTC6zZ+3ntZfKmXxsLnNfLgt1WCoMnXtBPI88kc6m9R7enlvB8NEOnpubQVJy2H90UypiRXWLUcneSt64YxWXzz6abof7n7BsW1HIc9d+yw3vHk98asumcY51vbvnsWVHZqjD6BAJAwaTMCAyp1nXlqNDpy1JqtAb32HXqqo0/GZGAXf/LZUJJ/jntdi62cOlU/MZNsLBwMH2DotFRYbhoxwMH6WfVZTqKFH92GH1+7sYclL2gaQIoOeINAaMz+T7D3eHMDIVKi2ZoU7FptotSdqaFDtKPC5eKx7dIdf6YmEl/QfYDiRFAL362Jh6UTzvvF7RITEopZRqXEQkRq3tTldV5mmwVSg+xUFVmaetYbVYNHSni5ZJGCC6kyPtUhc89RMlTZaik9tt49PcgR2SHJXtN6SmHVztpqZZKCvTmcaUUirUIiIxaq2Bx2Wx+v1ddZKgipJq1szfzcDjskIYmYpFPncVpWtWUrJyGd5as98FmyZH7aehZEmTpshmKxV2f9mtQ5Kjscc6+erzKvbu8R4oc7sNb88t57jjdcFO1bHcbsNnH1fy1mvl7NrZcQ+LlQpnUT3GqNvQFAYel8kTF3/JkRf0xBj4+uWtHDGlK50HJIU6vIgTTWONqnq6cW7ruH7bZRvWsvu153F17Y7F4WDvu6+TccoZpI4Z1/zBraDjjTpei5Kjz9o9DHWIrFWGpK2G3XTj02P9ZeenLGuXa2VlW7l6ZhIXnZPPRZfFk5hk4fX/ldOjp42JJ2pyrTpOzg/VXHvFPrr1sJGVbeX+u4q58LIErrsx6cAi50rFoqhOjESEM28fyvov8lgzfw8InPnHw+k3LqPDYzk5O4f5eyJzUL9qG19VJbtfe56uF15BfO++ALgL8tn+5D+I69kHZ1Z2u1xXkyOlmieVblI2VQFOdtONp3tlwuj2S46uvCaRkWMcvPNGOVWVHq76dSInTnZhseiHUdUxfD7D735VwA1/SOaMc/yTjxQWeJl2Xj4jRjnqjIFTKtaEfWL0WsmoNh0vIgw8Lku7zqmDdFSr0f61PxDXvdeBpAjAkZ5B8sijKF21HOdJU9rt2pocKdU8R84OUugOOAEHT3MsNNOrri2J08gxDkaO0ZnGVGisXlGN3S6cfnbcgbK0dCvTr07knTcqNDFSMS2sE6O2JkXhJhpajaKpOx10THJkqt1YXAdXNBanC295+69f0hHJkc/jxlNVgT0uCbFE9dBFFaUaTI4aUTMZTXu1KinVnioqDIlJclCXucQkC1WV0fEgzV1lKCz0kZZuweHQ1ljVcmGZGEVbQqRiW3z/weR99A7VJUXYk1MB/0QMJSuX0vmMqR0SQ3slRz6vh53fzGPfhm8Rqx2LxUbXMafRqX/HTH+sVDDVT44as4VMPg18rcmRijQjRjvYutnLj99Xc9hQ/9pZXq/h1RfL6rQiRSJjDE/M2s9z/ynDZgOfz9999fIZCTp2SrVIhydGhd54TXwinLYaHRp7SirpE05m2xOPkDpmHGJ3UPLdN8T17E1cn/7tdt362iM52vH127j3F3L41FuwxydTlreNjZ88g92VSHL3QUG9llIdoW5y1OhemhypiOVyCX+6O4UZl+7jnAvi6Zxt4d23KkhMsnDW1I5b8Lg9PPNUGQvmV/Ly2xn06GVj80YPv/t1AQkJws+mJYQ6PBUBwrLFKJpFQ3e6aNTeyVH6sccT37sfpauX4yvbT+Zp5xDfb2CHP8EKZnLkdVdSsHEZQy/4AzZXIgAJmT3pNmYKuWsWaWKkItZPyVFj/C1KmhypSDX5jDgGHmbn7bnlbNrg4fIZ/klAbLbIbVUxxvDsk/t5/JlO9Ojl/3jbp5+NP/81lT//vkgTI9UimhipVom2VqOO4OrWA1e3HqEOI2jJUXXlfqyO+ANJUY249C64Vxa2+fxKhZIjZ0ej2zJzIGVwd3ZMcmlypCJWn342rv99cqjDCBqfD/bm+hgwqO5H24GH2di5XddpUi2jiZFqNxWr1lL8zqe4t+/G3jmD5NMmkjB2eKjDalRHr20USsFIjhwJqfg8biqKcolL7XygvGR7DvEZoU8AlWpPjpwddOfg5KgpmjiF1qoVbv7591JWLHOT1dnKRZfFc/HlOvYkWlitwqAhNr5cWMVxk36a8OiLBVUMHR4bdbtqO02MQiBautM11WpU8f069j39GunTzsY1pD9Vm7ZT8OybmOpqEo8b08GRtpwmRy1nsdroMvJkNs5/mu5HnYkrtTPF29aQ+/1CBk75VRAjVSo81SRHxX2dbJmgyVE4W5dTza8vL+CGW5N4YFYaWzd5uOeOYgoLfMy8MXpaTWLddTcmc/tNRdx4WzLDRztY9rWbR+4v4YFZaaEOTUUITYxUuyh++1PSLjmL+DFDAYgbOoCMGT8n/4mXSRg/Wp/QhYnSXv7fQ2sTpKwh47HHJZH7/ULcZcUkZPZk4GnXEJfWPovWKhVuak/WsGVC092Ltctd6Px39n6uvCaRqRf6x5mkjnLwjyfTOeekvVzxy0QSEnWZgWhw/Eku7v9HGnOe2M9jD5XSb6CNR55IY9SRTU2motRPNDEKkWhvNaremYtrcN86ZY5+PfAWlWDc1YgzfFtlYqnVqEZbWo/S+gwnrU/4dpFUqr3VTo5qHjY0RMcjhc76HA8XTa87+L5ztpWMLCu7dnoZMEgTo2gx9lgnY4/VREi1jiZGql3YOmdQtXE78SN+Sv6qt+/GkhiPOOwhjKxlNDlSSh2KmuQoZVPD24v76kx2odSrr41V31UzrNZYk4J9XvL2eumcbQ1hZEqpcKKJUQhFc6tRypSJFDz3Jpa4n+Ec2IfqbbvJf+oVkqdM1G50YUyTI6Var6mZ7GovHKvJUce77KoEZl5ZQJeuFiae6GLHNi933VbEORfEk5yirUVKKT9NjFRQ1E+O4scMxVR72DfnDTx5+7CmJJF82gSSTjomhFEemlhsNQJNjpRqD3UXjtXkqKMdMcLBvY+k8ugDpdxwTSFJyRYuvDSea65PCnVoSqkwoomRapGCpVvY9dYKqvbtJ/mwLvS4YAyu7JQmj0kYN4L4scPB4wWbNSJbimI5OYLWT8qglDqYJkfBs2qFm2efKmPrJg8DBtu4fEYiAwc33U372Akujp3gwu022O1EZJ2klGpf2n4cYidn54Q6hGbt+eB71j30ERnHDWDAdSdijXfw3fUvUbG7uM5+vbvnHXSsiCB2W0RXQFU93aEOIWSaGkiulDp0jpwdpGyqImmrwbnVwZYdmXyaO5DXikeHOrSIsXhRJTOvLGDkGAd/vjeF/gPtXHnhPlataNm92uGQiK6TlFLtR1uMVJN81V42P/0Fw+6bSmJff1e55MO6ICLseOVbBlx/UogjVO1Nu9YpFVyOnB1k5gBT+qEtR4fukftLufO+FE44JQ6AYcMdpKZZ+OffS5n9XKcQR6eUimTaYhQGwrnVqDK3GIvTdiApqpFx3ACK1+w6aP+GWo2iQSy3GoE/OdLWI6WCK/O9jdpydIiqqw05P1Qz8URXnfITJ7tYsSy279NKqbbTxEg1yZ4cR3VJJZ6yqjrlFdsLcHRKaOSo6BTryRFo1zqlgi3zvY10X1CpyVEL2WyQnGJhxzZvnfItmzxkZulHGqVU2+hdJEyEa6uRPTmOjHH9WD/rEzzl/sSgfNs+Ns/5kq5njWjwmGhtNQJNjkBbj5QKNkfODk2OWkhEuPDSeO66rYiCff7kKHePl3vvLObi6bH1sE4pFXw6xqgJ7govBdvLSMp0kZAWezOT1Rhw/Ymse/Rjvr54Nva0eDyllfS+7BgyxvVr9JiG1jaKFrE6U119OvZIqeBx5OygO90p7utkywT/mKNPcwfW2cfn8VKxoxBboovTDt8Vs+ORrrk+iQf/WsLpE/eS2dlKXq6Xi69I4OLLNTFSSrWNJkaN+PLZTSycvYGEdCel+ZUcNimbs/40FLur/VbIDtcFX61xDg67dQruonKqC8uJ65aKxaF/Okqn9VYqmGpP571lQt0HS2VLVlL48jzE5cRXWs66YdmUPTCU6b2/D02wIWSzCbfekcKvf5tE7h4vXbtZSUjUDjBKqbbTT7cN+P7D3Xz76jZ++eKxdOqZQOX+at68YzXv3v8D59wxLNThhYwjNR5HanyL99dWo9ihrUdKBUft5KjmwUPFjq0Uvvwu3S66Cle3Hviqq8n99h1m/zaHhGdGx2zLUXKKheQUTYiUUsGjd5QGLHlpC5NvGEynnv5meVeinbP+NJTvP9hFVZmnXa8drmON1MF0vFFdOvZIqeCovdZR0lZD+WeL6TJkEpme7iRtNaTsspF99NmUbipk3rIMHY+klFJBoolRA0rzqujUq25f5fhUB/Y4KxUl1SGKKjJF80QMoMlRQzRBih4iMktElojIUhE5vd6280UkR0QWBF6jQhVnNKpJjlI2VWHyi0gvTznwfcqmKpJ3WLAnZ7Atx66TNSilYoqITBORlxooPzNQZy0RkcdFxBIov1JElovIChH5Y1Pn1q50Deg5Mo0fPt5DVr+kA2XbVxVisVpIznI1cWRwhOtYo9aK5i51oN3qGqPjjyKbiEwG+hpjxopIV2CxiLxvjPEFdhkJzDDGLApdlNHNkbMDgPTqVAo2fEvXvMQD25yVSVTv2UMivdiyw6oLxCqlop6IpAPzga7AZ/W2CfAwcLQxZp+IzAVOFZENwPnAGMAAt4uIpVZdVocmRg2YeHV/nrrsKzzVPgZNyCJ3fSmf/ms9k28cjMWqT8IVeEv2U7FqLVgsxA8fTFVPNDlqhI4/ilij8FdAGGN2icheoCewJbB9JDBARO4FvgJuNca0b1/jGNUzfihf7ZvLDyWL6OIaQKV3Pxu+/ZZePSeQutdFaZywhUxNjmLY/lIfn31SSXU1jJ/oJDOr/SaKUipUjDEFwGgRmQhcU2+zE7jTGLMv8H0JYAVOBzYDbwMpwN8bS4pAu9I1KKNXAlc/N47yAjfz/rqGdYv2cv69wxk+pVuHxRBtY42iqUtd6cJv2HXrg1SsWkv5t6vZ+fsHKF++RrvVNUG714WtjEA3uZrXjFrbHEBFre/L8Fc8NeYD1wMT8D+9+0W7RxujHJY4xqafh1Xs5JR+yc6KtQxMGsvAin4HxiLpGkixa9GnlZxybC7vv13Bl59VctYJe3np2bJQh6VUazRVJzXJGFNpjHkeQESuAXoA7wNd8D/o+xkwFXhQRDIaO4+2GDWiU88Ezvpz7M5A1x6ioUtd9Z58il77kOw7ZmLv7P9/VbVlB3v/9h+cA3try1EztHtdcLndtrb+n8o3xoxpZFs1ULvvcBxQBQe6LDxpjNkf+P4F4FzgibYEoxrntMYzKGncQeWOnB1k5gBT+gEObTmKMaUlPm79bSGP/7cTw0f5656d2z1cdE4+Y452MGCQPcQRqljTxnqpqTqpWSKSBfwH+A6YYozxiEgF8J4xphwoF5FlwGHA5w2dQ1uMwli0tRpFg7KvV5IwbsSBpAjA2bs7rsMHUL7sB0AnZGgJbUGKCMuBEwFEpAuQBWwPbLMAK0UkNfD9ccCaDo9QHZD53kZtOYpBn31cyeijnAeSIoBuPWyce0E8771d0cSRSkWXwEQLrwJ3GGP+bIyp+TC2CDhW/OKAocC6xs6jLUaqQ0V6q5HxeBDnwS1C4nRA9U/DK3RChpbRFqTwZYz5QETOEJEvADtwHTBNRLKNMfeLyE3ARyJSDmwA7ghlvMqfHKUM7s6OSS5tOYoRbrchLv7gh0xx8UJpSaPDKJSKGiIyHcgGPgKOAP7u79QA+JOkT0TkBGAZ/i7h9xljchs7nyZGYS7aZqiDyE6O4kccRt6/XiTltIlY4v29jDxFJVQs/4HUs06os68mRy1Xu/VIk6TwYYyZ2cS2N4A3OjAc1QKOnB10R5OjWDH+eBcP3l3C7l1eunT1T7hQVubjrdfKueuB1GaOVioyGWMWAgsDXz9Ta1NaI/v/EWhymu4aLUqMRGQWcGRg/zuMMe/W2tYJeCEQjBe4xBizuSXnVSrSOPv1JH704ey+4x8kHjcG4/Gwf9FSkk+fiC0zPdThRQVtRVLN0TqpaY0lR03RxCkydc628usbkrjwzDzOuzCeuDjhzVfLGTfeyZij9cGcUoeq2cSoBWtZPArcZYxZLCLHAAPxT4sXsyqKq1n2xnZ2rikmtWscY6b2oFPPhOYPbIS2GoWXtIvOIH70UMqXr0GsVjJ/Ox1n74ZnLNRWo9bTViTVEK2TWqYmOSru62TT2FR2zaukeOV27ClxZJ82lMR+WQcdo8lRZLr0F4kcOc7J+29XUFjg4877UjlyrINa3YmUUi3Ukhaj5tayGApMFZF7AmW/CX6YkaM0r5LZl35Fj+GpDJqQSe76Up64ZDEXPTyKPmM6hTq8sBKpyZGI4BrUB9egPi3aX5OjttNWJFWL1kkt5MjZQaI3ix/ffhpf1zgSjjqCqn1FfHfzG6RffAYJY0cc2Fe73EW2wUPsDB6iM9Ap1VYtSYwcQEGt7w+sZSEi8cBw/E/nbhSR24E/ATfXPkFgHvIZAM6spGZbPyJ5NraFT25gyImdOe3mIQfKegxP49171nDt3ONa/QQnGluNYknNTHWaILWNtiIpglwnuSyJHRFzyOxZ8REJzkR6jL6S/T0s0ANSOh/BzueeIDVjJBa7/8O0jkdSSqmWJUaNrmUR+NfNTwNw5wL/rH8CY8xsYDZA0qDsZj/NtCYBCJdkav0XeVz8j7pTsA+e1Jk371jF/vwqkjJdjRwZmyK11ai1tPUoeDRJillBrZNS7FlRZeBMegAAIABJREFU/ceTX7WdXtZhpG52I+JfnzeJruS7UrF+t53EzjUt37oGklJKtSQxWg78Gni0/loWxhiviHwDjAMWA+OB1e0Ua5PCpRXKmWijrKDuOjbVFV681Qa7y9qmc0drq5EmR6qtNEmKKRFRJ4ULm9hx+yp+WggWMMbgKywmY0keSXYf7sHd8Te6aXKklIptzSZGza1lAVwFPBdYy6IAuLpdI26lxhKKYCdMI8/uzif/XEu3x4/CmWDD5zN88s91DBifiSup7f1/NTmKDpoctZ/6C8dqohRdoqVO6ijd4gaTU7qYTGcvnNZ4jDFsLV+Fw+Ii0eafSdORs4MUNDlSSqkWTdfdzFoWa4GjghZRB2soyWhLsnT0hb3Zu2E/f5/8KT1HppO7vpTkzi4ufkRXIFd1aXLUMTRRij7RXCcFW5arNyWefD7Pf4k0RzYV3lLAMCp1Sp0xr5ocKaWULvDaoPrJ0qEkSharcPYdwzjuF/3Y/WMxqV3j6TokOajTZmqrUfTQ5Kjj1U+UQJMlFd36J46hR9wQiqr34LDEkWrPbrBOOtDdbko/NDlSSsUiTYxaoDWJUnr3eNK7x7dXSFFLkyMVCg0lS6AJk4oeTms8na19W7Rv5nsbNTlSSsUkTYxaoS0tSsESra1GoMlRrPKUluDel4+jUwa2pORQhwM0njDV0MRJRStNjlSsKy7ysWFtNdldrXTroR+XY4X+poMgVIlSNCdHsShWkyPj9bJ33lz+n737jpOqPPs//rmnz+zO9ga7LL1KBwVBRUDFrokmlmDaY4wxmphq8pg8apIn5Unxl2hMolFTjBqjscSCYlBREJSm0mFhF5albW9Tdmbu3x+7bLYX2JlzZuZ6v168sntm5swVWfac71x3adj+AY6cfIKVR/GeNoO8S65CWU9tJcdo6y84CRHPcl8uIX1SEeWLXRKORNLQWnP/rxr426NNjB1v40BpmFlzHfzk3gxSUi1GlyeiTIJRFHQMK2bZXyneJGPXCJJzI9iqt1bSUlfD6K99H6vLRdjv5/Df/0zVWyvJWXKh0eUJkdQcO8spQsKRSB7PPe3jzdf9/GtVHrl5VgJ+zQ+/V8uPvl/HT+7NNLo8EWUSfaNs5ZFJ7X+iIZGD16ii40aXYJgTASkZ1G14l7yLrsTqat2z0+pykXvxldRtXGdwZUIIaAtHb/jJWW2ntDyXVUcn8HSdrLQqEtPTjzdx+7fTyM1rHbHgdCm+/f10/v2qn8aGiMHViWiTjlEMSSdp8JK1cwTJM7Qu3NyELaPzp3D29EzCzU0GVdRd2O+nYetmwg31uItH4x4zfkhXmhTC7Dou5116jnSOROKqqY4wrLDzMG5vmsLlUjQ2alK9BhXWQcCvWfmKjwOlYSZOsbFoqQubTa5JQ0E6RgYZyk5Soocs6RwlNs+osTR8tLnTsYatm/GMHGtQRZ35K8op/c1PaC7Z3Tof6pXnOPTYQ0RCIaNLEyKmHDvLSd8XIGe1ncNrCqVzJBLS6fOdvPyCr9Ox994NkupV5OUbf9tcUR7iivOO8cI/fYTCmkd+38h1l1dSVyvdrKEgHSMTOBGOTiXgyEIMiSvR5x3lnH8Jh/76EC21NbiLR+M7sJ+699dSuPwLRpeG1pojzz5B7oWXkza99QYwe/EyDv3tj9S9t4bMBYsMrlCI2OrYOTpMIasWth6XzpFIFDfdmsoNV1XSWK85Z6mTPTtDPPpgI/f8NAOLxfiuzI/vqudj13j44m2trSv9Dc3d36njgXsb+O496QZXF/+Mj76iXbTnI8WzZO4anZCo3SNXYTEjbryNiK+ZmndWEfE1M+LG23AVjjC6NFqqK4n4fHinzW4/pqxWMhecS8P2Dw2sTAjjnOgcecs0h9cU8sjGhdI5EgmjcISNJ1/Ixe1R/OkPjZTsaeF3j2ax+HyX0aURDGjWrPZzw3+ltB9TSvH5m1N57WVfH68UAyUdI5Maii5Soknm+UYnJOq8I0dOHnmXfNzoMnqhQWvoOKdIa4z/3FAI43TsHIGDR1gIc6RzJBJDXoGVr33HHPvp9UTr7t/LtNehYUgwCjUFKH10Dcfe2EmkJUzOgnGMvvEsnDkmmNFmMgNdsCFZukwSjhJ/aJ2Z2LNysHpSaPhwE2kz5wKt+y7VrH2T1NNmGFucGDIRHaakcSPlvh2EdIBsxwgmeOeRassyujRTk3AkRGw5nIqzz3Xx54eauOX2tqF0unWe0fkXuw2uLjHEPhhp+OjOZ3EXZjD7t5/C4rRx6NnNbPn6U8z9w6exuu0xLyleJEv46Y+Eo1aJ2j0yE6UU+R+7jkOPPUTDjg9xZOXStGsb9uxcMk5fYHR5YohsrXuDFu3n9KzLcFo8HPLt4r3q51mQ/Qlc1lSjyzO1nsLRqqIJPDDhCaNLEyIhfefuNG68voqN6wNMneFg/doAAA/+NdvgyhJDzOcYhX1BQo1+Jn5jGa6CdByZKYz+/FmkjMzi2JsybEwMjMw5apWo847MxDWskNFf+S6pk6ZicbnJu+xqhl//eZRNRiIngghhjgcOMDNjGam2LOwWF6NSZjDMNY4DzVuNLi8udJxz5CxzUFqeyy27rzO6LCES0vBCG8+9lscnrk/Bk6K4+Ste/vZsDmnpsmzAUIj5lT0SDJE1cwyqy8oe6dNH0FRaFetyhIh7MrQu+ixOJ+mzzjC6DBEFER0m3Z6LVXUerZDpGE6Fb7dBVcWfrp2jUnK5hetYkr9bhtYJMcQcTsWFl8nQuWiIeTCy2G3U76hAa91pg8T6bRVkzh0Z63JEHJMhdZ3J0DohBs+irNSHKonoMBb1n00da4NHSbFlGFhZ/OkpHMlGsEKIeBLzvpvV48Bis7L3/lUEa5sJNQc58Ph6GnYfIW/J5FiXI+KcDKnrLFAclOF1QgyCBStZjuF8UPs6vnADER3mYPN2Kvy7GOE+zejy4k5Pw+pkI1ghRLyI/SB5BdN+/HH2PfgW6z/1EDocIXv+GGb84pPYPPJptxg86Rx1J90jIQZuWvpSdjesZ03l3wnrFrIchczJvBSPzbzL9ZqZdI6EEPHKkNnD9nQ3E791IRO+uQw03eYbCQGtS1BWPL+FQ89tJlDZgHfiMEZ9dgEZ04q6PVfCUXcy90iIgbEqG5PTFjLJuwDQKCWTmE9VooajV17w8cffNVBaEmbsBBtfvC2VpctkrocQicLQZZWUUsguiaI3Bx5fT+U7e5h0x0V4RmZT9W4J2+95gak/+jhpkwq6PV/CUc8kIAkxMK3zXuWiNFQSLRy9+Fwz9/28gbt+ks6MOQ42rg9yz3drASQcCZEg5GMxYUrhQAvlT2/ktLsuJ23yMGweB/lLJzPyhjM5+NT7vb5O5hz1TuYeCSFiLZHmHP3hN438768yWHCOi5QUC+cscXH3TzP4w28ajS5NCDFEZCMOYUrBqiZsKQ5cBemdjmdMH0HF81v6fK10jnon3SMhRKwlQucoEtHs2xtizhmdf3fOmedg7+4Wg6oSQgw16RgJU3JkpRBqChI43tDpeP32Ctwjsvp9vXSO+iar1wkhYineO0cWi6J4lJUPNnUOQR9uamHUWPmMWYhEIcFImJLVZWf4ZTPY/qMXaSqrQocjVL5bwv4/rWHEJ+biP1bPgcfXU/L7N6l8twQdjnQ7h4Sj/kk4En1RSt2nlFqnlNqglLqkl+d8XCn1aKxrE/En3sPRjbek8r1v1vLBpiBaazauD3D3d2q58RYvNdVh/vLHRn56Tx0vPtdMMKCNLleIhKWUWq6UeqKH45e1XbPWKaV+p9pW0hnItewE+ZhDmNaozy7k4N/f48Nv/4NgdROp4/OZ9K0LCTUH2XrzX8k7dyLOvDTK/rKWw//6gNPuuQKL3dr5HDKsrl8yvE70RCm1DBijtZ6vlBoOrFVKvaK1jnR4TgZwO1BiVJ0ivvQ1rK43Zhlud9W1KQDc8dUayg+EGTXGxpe/7mX0GBtXnHechYucTJhk55knm/nTg0088kQ2aeny+bMQQ0UplQWsBIYDb3Z5TAH3AvO01lVKqWeAC5VSYfq5lnUkwUiYlrIoiq+bR/F189DhCMpqIRIKs/5TD3HaPVe0L9s94hNz+fCOpzny6jaGXzq923kkHA2MBCTRxWxaL0BorSuUUseAYqC0w3N+DPwE+GTMqxNxq6dw9Egfv6NXFU3ggQndPhw2xFXXpnDVtSmEwxqrtXUFw2svP843/juNK672APDZm1K48xu1PPy7Rr72HdkLS4ihorWuBuYopRYBN3d52AncrbWuavu+HrACM+j/WtYu5sEoGLQN+CZVhkKJE5S19VO3ht1HcWR4Ou1lpKwWhl8+kyMrtvYYjEDC0WBIQIofKqhO9e8pRym1ocP3D2qtH2z72gFUd3isidYLT+t7K7UYOAZsP5UCRHLqGo76Ukout3AdS/J3m6Z7dCIUVR4PU7Y/xKUf+89y3Uopln8uhTu+WivBSCQdFVQ4y076utTXNalPWms/8BiAUupmYATwCq0f8vV6LevK1B0jCVCiK4vNSjgYQmvdtudIq0gghOoyjK4rCUeDIwEpKVRqref28lgL4OrwvRsIACil3MA3gY8D3TcVE2IAHDvLyd0JXDy2v2eadiU7m00RiUA4BNYOlyC/X2OXX51CDFZf16R+KaXygIeBzcDFWuuQUqrXa1lPTB2MBqqvm10JTYkldVweaDj+5i7yFk8CINQUoPzpDRRfP6/f10s4GjwJSElrE3AL8Gul1DAgDzjY9th0oBBYQesFZ5RS6mta63sNqVTEtdyXSwhOKurjGeZd5jsj08L0WQ7+/FAjX7jVC0BLi+bB+xu5+HLZ9FWIWGlbaOEfwNe01ps6PNTXtaybhAhGfenpJljCUvxSFsXkOy9h6/ee48iKrTjzvFSt20fuORPIOWfCgM4h4ejkSEBKLlrrFUqpS5VS7wB24DZguVKqQGv9M2AmgFJqJK3juiUUiZPm2Fne62Nm3wPpnp+mc9MN1bz5bz/jJ9pZ+3aASVPsfPrGVKNLEyLhKaU+Q+vIhddo/dDulx1GFN3V07VMax3u7XwJH4x6ImEpvnnH53PGXz5P9bp9tNT7Kbp6Likjswd1DglHJ08CUvLQWt86gOeUAZ+LQTkiSZl9g9jhRTaeW5nLmrcCHK4Ic9V1HqbNkN+PQkSL1vot4K22r//c4aHMXp7f77XshKQMRj3pepMsQcncrE47uYsmntI5JBydmo57IElIEkJEk9nDkc2mWLTU1f8ThRCmJsGoFxKUkoOEo6EhXSQhRLSZPRwJIeKfBKMB6njzLCEpsZz4+5SAdOokIHWnw2GaS0uIBPxGlyJE3JNwJMSp0VqzeUOQ48ciTJtpN7oc05FgdBKGKiTpcASUQllU/08WUSfdo6Ejw+xaBY5UcOjxR7ClpGJNkYnYwty01gCdtkIwIwlHQpycwxVhbv2vKlqCMGqMjXu+W2t0SaYjwegUnUxIai6vYd8f3qT6vVKU3ULe4smMuekc7F4Zn2w0CUdDL1m7SDoSoeKJR8lZehFpM+YAsPt/vm5wVUJ0Fwg3s7NhDUf9+9Bo8pyjmZS2ALfVa3RpvZJwJMTg3fn1Gs6/yM0Xb0tFKUV9XYQzpx0xuixTsRhdQCIpLc9t/9OblgY/H3zzKdJnjGDh87cy769fQFkVW7/3bPundcJYMlQyOgLFwfY/ycB3YD8Wp6s9FAlhRhEd4f2aF3BaUlic9xmW5n0erz2L96ufJ6xDRpfXJ8fOctL3BfCWaZxlDkrLc1l1dAJP18m/OSG6OlwRZs+uEP/1pdT2rnBausSAruS/SJT0FpCOvraNjOlFjLh6LlaXHUemh/FfPY9Qg5/6rYcMqFT0RMJRdCVDSIoEAlg9HqPLEKJPlYEybMrORO+Z2C0ubBYH41JPx2PN4Ih/r9Hl9UvCkRAD09QYISVFYbebe6is0SQYRVnXLpKvvIa0KcM7PUcphXfyMJrLa4woUfRiVNFxCUgxkKghyT1yNP6KcoJV8jMkzKspXEuGvaDbvKIMRwFNofiYfyDhSIj+jR5rI6LhvXcD7cdkpFJ3MscohkrLc/GnjyL44Q4Kr5zVflyHI9R9dIjhl0w3sDrRG5l3FDtdw1E8z0uyutzkXHApBx/5LRlnLJTFF4QppdqyOOzbi9a6UziqCVZQ6J5sYGWD49hZTu5O4OKxyJwjIbqzWhXf/1E6X/9SDVdd62HUWBuvv+IzuizTkY5RjKUsnEXN9uNsuW8LwZpmfBW17PzZK7gK0vBOHmZ0eaIX0jkyRrx3kzLmnknhp/6LcGMD/gP7jS5HiG5yHCNQSrGt/i184Qb84SZ21K8hEGmiwDXG6PIGLfflEukcCdGLsxe7eOyfOYTDmvXvBDh7sSz61ZV0jGLM4nZR8N0vUvv0CtZ9+hGU3U7KmTOZcc8s0y+RGg+C1U1Uri0BNNlnjsWZPXSf0kvnyFjx2k1yDR+Ba/gIAOq3bDC4GiE6U8rC3MxL2dP4Hmsr/4FGU+Aaw+mZV2BRVqPLOym5L5eQPqmI8sUuwztH9XUR/v2aH1+z5qxFTopHyW2XMNaoMTa+eWd6+/c/vLPOwGrMR/6FGsCWnUHOF6/tdOxgVev/Smfi5B15dSslv3uTrPljUCj2P/wOY246h2EXTRuy95BwZB7xGpSEMBu7xcWUtHOYknaO0aUMGcfOcoowNhytWe3nW7fWcPqZTtLTLTxwbwPXfdrDl7+eFrMahBCDI8HIZE7cdEtAGhz/sXpKfv8Ws+6/Hk9RFgC+QzVsuvVxMmcV4ypI7+cMA3fi70YCkrn0NNxOwpIQyetEOKob46T0nNiGI58vwh1fqeW+P2Yx5wwnALffEebayyuZv9DJnHnOqNcghBg8mWNkUv3thyQ6q3x7D7lnj28PRQDuwkxyF03g+Nt7ovKeEl7Nr+McpXieqySEODknVqzLWW2P6Zyjd98OMGmKrT0UAWRlW7lmuYeX/yUT3oUwK+kYmZx0kAZGhyIoe/fx8Ba7FR0KR+19ZWhd/OkrHEmHSYjE49hZTjpFQOw6Ry0tYHd0nzfscCjC5t43V4ikJh2jOCE3333LXjiW42/tIlDV2H4sWNPEsTd2kbNwXFTfW/Y7Shw9dZik6yRE/OvYOTq8pjDqnaMFZzvZsjHI3l0t7ceamyP84/Fmll4oK4EJYVbSMYoj0j3qnacoi6Kr5rDpS4+Rf/4UAI6+vp3Cj83CU5wdkxqke5Q8JBwJEX86do4OU8iqha3Ho9E58qZZuPOH6Xzmk1VccoWbtHTFS8/7OH2+k7MWyfwiIcxKglEckoDUs+Lr5pE1bwyVb+9Ga5j244+TOjYvpjXIwgxCCGFeXcPRIyNzYU50wtGlV3qYMcvByy/48DVrfvSLDGaf7pCtOYQwMQlGcUwCUnepY3JJHWN8KJHukRBCmFPHcAQOHmFh1MLRiJE2vnibd8jPK4SIDpljlADkBtycJLAKIYQ5nZhz5C3TOMscPLJxIbfsvs7osoQQBpNglCBkeW9zkoUZhBDCnLqGo9LyXAlHQiQ5GUqXYErLc+VG3ISSdWidjkTwb9uLb+tuLB4XKWfOwp4Xm8UwhBCiP12H1ZWSyy1cx5L83THZCFbE3paNQf79qh+bDZZd6mbSFLvRJQkTkY5RApLukTklW/dIRyJU/u4Jav7+ElZvCpFmP0d+8Fua3vvQ6NKEEKJdT52jWG0EK2LrF/9bx7duq8Htbl0A44s3VPGnBxv7eZVIJgPqGCml7gNOb3v+XVrrl3p4zseBy7TWnxvaEsXJku6ROcXbynWB/eU0rd2MDgRxT5+Ie/YUlKX/z1Sa3/+IUGUNw+66DWVv/VWTunA2R3/2R9wzJmFxymaq4uTINUkMtZ46R7HYCFYM3v6SEM8+1Ux1VZg585xcfJkbp6v/lf4++iDIK//y8+yruaSlt17Drv10Ch+74BjnX+SicIQMohID6BgppZYBY7TW84HLgd8qpSxdnpMB3B6dEsWpkO6RecVDaG14/V2O//ovWLweHMXDqXvpTY7/9m/oSKTf1zZv3kHquWe0hyIAR/Fw7CMKCOzaH82yRQKTa5KIFukcmd+q13x8+upKlILpsxy89Gwzn7u2kubm/q9Jb6z0c/nH3e2hCCC/wMqSC1ysfiMQzbJFHBnIULrZwEoArXUFcAwo7vKcHwM/6e0ESqmblFIblFIbwg1NJ1urOAUSjszJzMPrwg1N1P7zVQruvJmMy5fiPe9MCu68mXBVLb5N2/t9vbJb0cGWbsd1sAVs1miULJLDkF6TghFf1AoV8UfCkXm1tGh+8N913P9wFl/7Thqf/FQKD/0tm5xcK0891tzv6+12hd+vux33+8Eu04xEm4EEIwfQ8crRRGuvGQCl1GJaL0y93ilprR/UWs/VWs+1OVNxHnAM+I8YOtI9Mi8zhiP/jhKcE0djy81qP6ZsNlLPmovvw539vj5l3kwaXn+XcON/PgzxfbiLcHUdrgmjo1KzSApDek1yWNxRK1TEJwlH5rRrewuZWRZmzP7PvaFSiquv87B6lb/f1190mZt//dPHwbJQ+7Ed21pY85afpctcUalZxJ+BDKhsATr+xLiBAIBSyg18E/g4UDDk1UG/4ShQHIzG2yY0mXtkTmabe6QcDiJN3T9NDzc3o5zOHl7Rmeu0cXjOmEbFd3+Fe/pEwg1NBEsPkXvbcpR0jMTJM/SaJJKDzDkyH7dHUV+viUQ0Fst/5hTV10Vwe/qfYzRqjI2vfNPLJy85zlmLXbQENevXBrjnZxlkZsk1SbQaSMdoE7AUQCk1DMgDDrY9Nh0oBFYATwIXKqW+FoU6eyWdppNjlptv0Z1Zhte5TxtH6FgVzVt2tB8LVdbQuGo9KQtm9ft6pRSZVy2j4Pu34Bw/ktSFsyn8+bdxjR8VxapFEjD1NUkkDukcmcuYcTaysy088Zf/DJurrY3wxwcaufwqz4DO8cnlKbywKo/5Cx0sPt/FK2/nc8HF0jUW/9Fvx0hrvUIpdalS6h3ADtwGLFdKFWitfwbMBFBKjQTu1lrfG9WKB6hrOJLOUncnwpEZbsJFd0bvfaTsNnJvvYHj9/+V+rxsLClu/Dv3k3HVBThHFw34PPa8bNm7SAyZeL0mifgknSPzUErx8/szueVzVTz/j2aGF1l5790AV12bwgUXD3woXG6elauuTYlipSKeDWhtQq31rQN4Thlg2mVRJSj1TobWmZfRw+uc44op/MUd+LftJRIIkv35q7F65YKSLPpaFlspdRnw323fvqu1/nqs6kqEa5KIHxKOzGPkaBsv/DuP998NUlUV5tv/k8bwQllmO9kopZYDl2itr+vhsUzgNWC51npXl8deAH6ptX6rt3Mn7U+TBKXOJByZm5EBSdlsuGdMivn7CmN1XBZbKTUcWKuUekVrfWJd3J8B87TWDUqpN5RSM7XWW4yrWIjokXBkHlarYv5Z/c9zFYlHKZVF66qkw4E3e3j8m8BNwIgeHrsGmNzfewxkjlFSkPlJMu8oHphl/pFICr0ui62UsgFfbQtFHiCd1tXhhEhYMudICGNprau11nOAa3t5/Bda6wnAuo7H2wLV1cDj/b1H0naM+tIxHCVbJ0nmHcUHo4fYnYyIL0Dz5u3oQBD31PGdlgEXhslRSm3o8P2DWusH2752ANUdHmtfFltrHQJWKqXOAx4GPgRKo1+uEMaSzlHiCPg1b63yU10VYc4ZDsZPlM2MTKCva9Kp+CnwfeCa/p4owagfyRqSZGhdfIiXgOTbvpfKBx7HOW4klhQ3tc+8ivf8hWRcsdTo0uKaNQjesu4bFg5CpdZ6bi+P9bUstgNI0Vq/rpQaBTwEfAF44FSKESIeOHaWk7sTuHgsEo7i055dLdz86SpGjrZRVGzj979uYNFSF3f9JL3TUuBi8KyBU7ou9XVNOilKqQuACq31TqX6/7uVYDQIJ0JSsgQkCUfxw8wBKRJsofL3T5L75U/hmjwWgHB9I4fvuR/X5LG4JoyKWS3Bg0fwfbAT5bDhOX06tsy0mL13HNoE3AL8uodlsQuBp5RS87XWYaVUrVFFCmGU3JdLJBzFIa013/5KDbd+M42PfaJ1me/m5gifu6aKF5/1DXjp76FQfiDE6yv8RCKwdJmLkaPltjwKzgfOVEotAkYBVyqlrtda7+jpyTLH6CQk03wkM95oi96ZcQ6Sf/te7MPz2kMRgDUtFe/SM2laF7u5+jVPr+DYLx8mXNdA8OARDn/vXpre+zBm7x9vtNYrgANty2I/x3+Wxb5Da70feAZY1/Z4PvCIcdUKYYzcl0tkzlGcKdkdoqlRc+XV/9m/yOOx8PmbU3np+e6bmkfLU481cc1llRwoDVFxKMSnPlbJnx5qjNn7Jwql1GeUUnf09rjW+lta67O01ouBPwG39xaKQDpGpywZukjSOYo/puoghcIoR/ex28pug1A4JiX495TStO4Dhv3odqyprcuNBy9YyNGf/AH3aeOxpMgGfz3pa1lsrfVPaR23LURSy325hPRJRZQvdknnKA60tIDTqeg6rMrlUrQET2lo8oBVHArx//6vnqdezKWouPVW/AtfDnP1RcdZtMTF6LFye96XtuW232r7+s89PL64l9fd09+5pWM0RBK9g2SKG2wxaGboILmmjCO4t4yWimPtxyLBFhpXv497Vr8rZw6J5vc/wrvojPZQBOAYMQznxNH4PtrVxyuFEKJ/jp3lFL3hl85RHJgw2Ybfr3n37UD7sXBY88RfmlhywcA3ij0Vq171c96F7vZQBJBfYOWSK92sfCV2XSvRnUTSIZbIHSTpHMUvIztIFo+LzE9dzpGf/IGUBbOwpnhofHczzjEjYrc/Umw+BBRCJDHHznKKKKJujJPSc6RzZFZWq+KH/5fBN2+t4fyLXBQVW3ntZT+pqYpPXC8bmCc76RhFSaJ2kKRzFN+M6iAAks5mAAAgAElEQVSlLpxNwZ1fwuJ2EWn2k3XDFWTf+AmUJTa/gjynT6PhrfcINza3HwsePEJg137c0ybGpAYhROI7sddRzmq7dI5MbP5ZTp5ZkcvwIiuVxyPcdGsqf/hrNk5XbFakW7LMxesrfJQfCLUfO3okzEvP+Tjvwth0rUTPpGMUZYnYQZLOUfzr+PcXq7BrL8gh48rzYvJeXbkmjCJl3gwO33kvnnnT0f4AzRu3kfXZj8v8IiHEkOq419HhkYWsWth6XDpH5pJfYOULX/Ya8t7DC2189VtpfPLS41x4qRuLFVb8y8+Nt6QyZpzsp2QkCUYxkmgBScJR4jDVQg1RlPmJC0mZP7N1ue6cTIZ97AJZrlsIERWdwhESjkR319yQwsJFTla+4icS0fzt2RxZrtsE5G8gxowKSJFAkMbV7+P7aDcWl5PUhbNPeX6HhKPEYkQXKdYcIwpwjCgwugwhkl5ER6jw7eZoYB8KRYFrLMNc47utFBbPuoajR0bmwhwJR+I/ioptfO6LqUaXITqQYGQQ5wFHzMKRDrZw7OcPY0lx4110OuHGZqoff5GU0kNkXLH0lM4t4SgxJUsXSQgRe1prPqxbiT/cxEjPNDSa0qYPqAqWMy19idHlDamO4QgcPMJCCUdCmJgEIwPFqnvUtP4DlMNO7u2faf80zjNzMhXf/WXrEsYZpzbGVsJR4kqGLpIQIraqgxU0tFSxMOcaLMoKQL5rNG8ff5K6lmOk2/MMrnBoSTgSIn7IqnQmEO0V7Pw7SkiZP6PTEAVruhfnxNH495QOyXvITXPiO7GinYRgIcSpqA4eIt81tj0UAViVnXzXaKqDhwysLHpOrFbnLdM4yxw8snEht+y+zuiyhBBdSDAykWiFI0uqh1B1Xbfj4eo6rKmeIXsfCUfJQwKSEOJkOSwu/OHGbsf94UbsKnGXKu4ajkrLcyUcCWEyEoxMJhrdo9Sz59Kwah3BAxVA6/juhlXriASCOCeOHtL3knCUXKSLJIQYrGHu8RwPlFEZONh+7Ji/lJqWwxS4xhpYWfT1Fo5kryMhzEHmGJnUUC7O4BgxjKzrL+Xozx/GlpNJpMmHctrJu/0zUdlgU+YcJSeZjySEGAiHxc3MjGV8VPc6dosLjSasW5iVcSE2S+JtjC6EiB8SjExsKBdnSJk/E/fs0wjuL8ficmIvHhbVZVElHCW3rn/3EpSEEB1lOws5J/cG6lqOoVCk23NRSgaxCCGMlXTBSGtN8749NG77AJTCO2U67jHm3jthqLpHFocd1xAPneuLhCNxgnSThOhdXctRKny7CesQOc5i8p2jkyIkWJSFTIfsKyaEMI+kC0bHVzxP0+4dZMw9E4CjLz5DyoQp5F10hcGV9S2W+x4JEU3STRLiP0qbPmB/0xaKPadhU072NW7isG8PMzMuSIpwJIQQZpJUwch/uJzGbR8w8tZvY3W5AUibPY/S+39G+qzTcRYMN7jCvsVq36OhJF0j0R8JSiJZBcLN7G18n4U51+C2tu4nN8IzhXVV/+RYoIx8V+w6/EIIIZJsVbrmPTvxTp3ZHooArG433qkzadq9w8DKBieaex5FQ7Ld6IZ9QarW76Nm8wF0OGJ0OXGn40p3EqpFIqsOHiLLUdgeigAsyspw90SOB8oMrEwkkmBAs3a1n7ff9OP3a6PLEcLUkqpjpBwOwlWV3Y5HfD5UVrYBFZ28eBtalyydo6P/3sHe+1eROi6PcHOQYHUjU/7nctImDzO6tLjV089NsoVtkZisyk5LJNDteEgHsCq7ARWJWHDsLCedIsAJOKAoeu+1bk2Ab3+lhhHFVmw2xXdvr+UH/5fBkgsSd78oIU5FUgUj72kzqXrjNfwVB3ENHwGA/9BBGndtJeeCSw2ubvAkHJlL84EqSn73BjN/dQ0po3MAqFy7l213Pc+8x27E4kiqf25RJWFJJIJsZxHb6t/kmL+UPNcoAJpD9Rxs3sbszEuMLU5EVcdwdJhCVi1sPX51+sYhe4+62gjfuKWGX/0uk3kLnAB8uCXIzZ+u4rnX8sgrsA7ZewmRKGJ+p2YNgrds4K3chpFDt1qczZtGwZXXUP7nP7QFI42/opyCK6/Dlurt9/VmFG/hKJEdfX0HBcumtocigJwF4zj07Gaq1u8n9+zxBlaX+HoL3RKYhFlZlY2ZGReypXYF+5vSsVmc1AQPM8E7j3S7/Nwmuq7h6JGRuTBn6MLR6yt8zFvgaA9FANNnOjj/Ijcvv+DjszelDsn7CJFITP8Rdm8h6mQDU+rkaYwZM4Gmkl0opfCMmYDF6ez/hSYWT4syJHLXKNQUwFWQ3u24I8NDuLn7cBkRG/39vElwEkbKdBSwKPcGqoLlhHUL09KX4LDIMKdk0XVY3SMsHLJw1NCgycrpPpU8K8dCQ4PMfxWiJ6YPRr3pKTANNCxZnE68U6YPdUmGi5fuUaKGo8y5oyj781oKPzYLi611iEKwppnqDaWM/sLZBlcnejPQn0UJUCJaLMpKrnOk0WUIg0QrHC08x8kXPtXIV78VwZvWGpB8vgivvODjJ/dmnnrhQiSguA1GPekaloZyGF68kHBknOwzRnNkxVa2fO3vDLt4GmFfC4ee3UThx2fjykszujxxiobi51XWGRNC9KSncLSqaAIPTHjipM85fqKdCy9zc/2VlVz/2RSsVvj7Y03MOcPBzDmyuIcQPUmoYNRVx6CUTCEpXsJRolFWC6f9z2Ucf2sXVe/uw+K0MeHrF5A5q9jo0oQQQphc13BUSi63cN0phaM7/ieNt98I8Mq/fOgIfOmrXpZc4EKp5LknEmIwEjoYdZRsISkewlEido2U1ULeksnkLZlsdClCCCHiTG/haEn+7pMaWqeU4pwlLs5ZIvPWhBiIpAlGHSVLSJJwJIQQQsSXnsLRqrbHhnI5byFEd0kZjDo6EZLMHpCClccIVlfizBuGPWPgkyYlHAkhhBhqvnADjaFq3NY0Um0ykX+oSTgSwhhJH4xOMGsXKRIIcPiZv+EvL8OZPxx/xUG8U6aTd+lVKOvANmeLh3AUL84v2AnAyiOTDK5ECCFiL6IjbK9fzVF/CWn2PBpDVaTb85iefj42i0zoH0oSjoSIPQlGPTBTF+n4qy9gcToZ8/Xvo2w2IoEAhx5/hJq1b5J19tIBn8fs4cjsXaMTgajr9xKQhBDJpLTpA5rDdSzK/TQ2i52IDrO17g12Nqxhavq5RpeXcCQcCRFb3Xf+Eu28ZbrXDWZjQYfD1H+4idxll6NsrRnW4nSSe8El1G16b9DnO7ERrFnF4z4xXQOTEEIksnLfDiZ657d3hyzKykTvAg779xDRYYOrS0yOneWk7wvgLdM4yxyUluey6ugEnq6bY3RpQiQcCUYDYFRA0uEQRMJY3Z5Ox62paUT8vpM6p9nDkVn11Rk6v2CnBCQhRFII6QBOS0qnY3aLC60jRHTEoKoSn4QjIWJDgtEgxDocWRxOnMOKaNj2Qafj9Vs24Bk74aTPa+ZwFI9doxMkIIlEo5S6Tym1Tim1QSl1SZfHLmt7bJ1S6ndKKbmeJIFsRxGHfJ1/z7XON8qROUZRJuFIiFZKqeVKqW4bfCml5rZdk9YopZ5WSjnbjv9YKfVe22PX9HVumWM0SLGef5S77HIqHn+YwNEKXIXFNJfspnHnVkZ8/ssxeX8xeDL/SCQCpdQyYIzWer5SajiwVin1itY6olp3h7wXmKe1rlJKPQNcCLxsZM0i+sannsH66mfxh5vIdhZR33Kcg83bmZ15kdGlJQWZcySSmVIqC1gJDAfe7OEp9wG3aa3fV0r9BrhRKbUKOA+YB6QC24C/9/YeEoxOUrQCUri5iaq3VtK4YyvKovBOnUXRZ79E/ZYN1G/ZgLNgOCNv/jo2b9opvY+ZF2Mw+0IMA2W2gNR8sJqG3Udx5XlJm1ooO5+L/sym9QKE1rpCKXUMKAZKab0ru1trXdX23HpgYMtkirgS1iH2NW5qn0OU5xrF3MzLOOIvocK3G481jfnZV5FiSze61KSRKOHoyOEwm94PkpGhOGOBE5tNrkmib1rramCOUmoRcHMPTxmjtX6/7esVwCeBFwAHrf9gsoDavt5DgtEp8pbpIQtHOhSi/M+/xzm8iMLlN0IkTNXqf3Pslecp+uzNQ34jK+EoNs4v2GloONLhCLt+9RrV7+0nY3oRTaVVWJ02pv7oYziyUvo/gTAtq1+TXhI4lVPkKKU2dPj+Qa31g21fO4DqDo810XphQWvtBx4DUErdDIwAXjmVQoT5aK3ZUvsqCgszMi7AqmwcaN7KltrXWJBzNVYlQ+eMEs/hSGvNb37RwJN/aWLeAieHK8LU1dbxwKNZjBknP1PxzhrQpO876etSX9ekgeg40fHENesQrV2iXUAa8P2+TiDBaAgMVfeocedWLE4X+Zd/sj0EDbt6OWUP/AJfaQme0eNOudauzByOEomR3aNDz23Gf6SeeY/diNVpR2vN/offYff/W8nUH1wZ83qEqVRqref28lgL4OrwvRtov9oppfKAh4HNwMVa61DUqhSGqA8dozFUw9k512Npm0I2Je1sNta8yGHfXoo8kw2uMLnFazh6Y2WA11/x8/JbeWRmtTaan/pbE9/4cg3/XJEroxmSW1/XpIHoONf1xDXrBiAMjKY1GK1TSr2std7X3wnEKTrVxRn8FeV4xk7o9EtBWSx4xowncPjQqZbXK7MuxmC2hRiGItQYsUDD0de3M+rTZ2J1tn4Sp5Ri5PL51G45SEuDP6a1iLiyCVgKoJQaBuQBB9u+twD/AO7SWv+P1lo+XUlA9S2VZDsK20PRCdmOEdSHKg2qSnQUjwsyvPBMM5//Ump7KAL4xPUefE2aXTvk8xVxSvYrpWa3fX0BsAGwA9Va6wjQCDTTR/6RjtEQO5XukT0rm+a9u7odDxw+FJVukTBOLDtIYX8LNq+r0zGLw4ayWYgE5SIkeqa1XqGUulQp9Q6tF5bbgOVKqQLgNWA68MsOH+TcpbVebUy1Ihrc1jQONm9Ha93pA7v6lkrS7DkGViY6cuwsJ3cncPFY4qFz5PNp0tI63yMppUhLt9DcJEu+i8FRSn0GKNBa/wy4FbhPKdUCVALfBRTwqFJqLa3Xsie11nt7O58Eoyg5mblHadNmUf3mSmrWrSZ97gKIhKlZ8yahpkZSJkyJUqWtzDqkLpHmGvUkFgEp6/TRHH7pQ8bftrT9WOU7e3DlemWOkeiT1vrWPh7OjFkhwhDZjiI0EXY3rmNsyhwsysoh306qggeZlLbQ6PJEF7kvl8RFODpnsZNnnmhm8fkuLJbW+6RtHwY5cjjM1OnmHMEizEVr/RbwVtvXf+5wfAPQ0y+n6wZ6bglGUTTYcGRxuij67M0ce/EZKle2rnrrGTOeos98EWWN/oJPZg1HZrLyyKSoDIXreM6hDknF189jy9eeZNvdz5N1xmia9ldy7I2dnHb3FTKWWwjRK6UUczMvY3v9at44/icA0mx5zM28DIfF1feLhSFyXy4hfVIR5Ytdpg1HV12XwooX/Xz+2iouvtxNxaEwzzzZzPd+lI7DKdckYSwJRlE22KF1jpw8ij77JSIBPyiFxeGMZnndmDEcJXrXqKuh7iI5MjzM/u1yjq7cTt22Clx5acx+YDmuvFNb8l0IkficVg+zMi8kFGkBNDaLfKJvdo6d5RRh3nDkcikefjybFS/6eH9dkPQMxZ+eymbseFmRThhPglGMnEz3SPyHmcJRtLpGXQ1lQLJ5HBReMfOUzyOESE42i9y0xpMT4ahujJPSc8wXjhxOxeVXebj8Ko/RpQjRiQSjGBrKPY+iyYxdo2Rmto1ihRBCmF/H5bzNGI6EMCNZrjvGTnVJ71gx4xLeZlq+24iQYsRS30IIIeLXieW8c1bbObym0PRLeQthNOkYGSBeOkfCnKK5UEOy0VpTu/kAlWv2oqwWcs+dSPqU4UaXJYQQQ6Zj5+gwhaxqW7NLOkfmtPWDIC897yMY1Jx7nouzFjlloaQYko6RQeKhcyRdI/OTLtLJ01qz9/5V7PnNv3HlpWHP8LDjRy9S+td3jS5NCCGGVMeNYKVzZF5/erCRr3yhGm+ahRHFNn7xo3q+981atDb/PWOiGFDHSCl1H3B62/Pv0lq/1OGxy4A7277dDHy5bXdZ0Y946BzJfKPexWoRhoGQLtLgNew4TPX6fcx58DPYPK0fAgy7aBobbvwT+Usn4x6eYXCFojdyTRJi8Lp2jh4ZmQtzpHNkFkcOh/nDfQ08tzKP/ILWLVquucHDJy+pZP2aIPPPiu0qxcmq346RUmoZMEZrPR+4HPitUsrS9pgC7gUuaXs8D7gwivUmnHjoHJmNdI36Jl2kgalat4+8pVPaQxGAI9NDzsJxVK3fZ2Bloi9yTRLi5HXsHDnLHDyycaF0jkxi7eoAZy92tYciALfbwuUfd/Pmv/0GVpZcBtIxmg2sBNBaVyiljgHFQCngBO7WWle1PbceiP5OpAnG7J2j3rpGwfIj1L/6Ni3lR7Hl55C27Cyco4sMqNA4ZuoadSVdpL5ZXHZaqpu6HQ81B7G6ZGliE5NrkuiRL9zA/qYt1LUcxWlJodgzlRznCKPLMp2OnaPASFh1dAIgnSOjudyKxvruze3GRo3bbd57xEQzkDlGDsDX4fsmWi8+aK39WuvHAJRSNwMjgFe6nkApdZNSaoNSakPI1/1GRJi/c9R1vlFg30GO/uwh7AW5ZC2/HOfYERz71aP4tu81qELRlxNdJLOGOCPknTuRo6t20FRW1X6sfudhajaWkbNwnIGViX4M6TUpGPF1fVjEoeZQPeuqnsGqbEzynkWecxRb696gvFl+54n4sGiJkw82t7BhfaD92MGyEM8+1cwlV7gNrCy5DKRj1AJ03G3UDbT/rSml8oCHaR3LfbHWOtT1BFrrB4EHAdK8RTq9JND1Kd3UjU2+sZRm7xx1VPvPlWRcvQzvojMAcI4txpaVTu0/VuC+69aov7+ZNnyNN9JJauUensG4Ly1m81eeIGN6ETocoX7HYSbdcSH2NLkImdiQXpPS7Xnm/lRKDMj+ps0UuicxwTsfgExHAWn2XDbWvMhw93gsShqHwtxSUi384v5Mbv9iDZOn2vF4FOvXBvj6d9MYN1FGMcTKQILRJuAW4NdKqWG0jtk+CNA2rvsfwNe01puGsrCewlMyhCUzh6OOQ+oCe8vI+dK1nR53z5rC8d8+jg6FULbkWQnezMPp+pPsm8fmnz+FrHmjqdlYhrJamPy9SzvNORKmZMg1SZhbbcsRpqYv7nQszZ6DRdloDteTass0qDIhBu7Ms528tjaPNW8FCAQ0d/80ncwsCfWx1O/dq9Z6hVLqUqXUO4AduA1YrpQqAF4DpgO/7LDG+l1a69XRKLZrWErUoGTmcHSCNcNL6Egl1rHF7cdCx6uxpLjBGpt/xNI1Gjpdg10yBSV7mpu8xcnz/zfememaJMzDafHQHKoj3Z7XfiwUCdIS8eOwSAdYxA+Px8L5F8nPrFEG9LG+1rqvsVGGfQzTMSglWkgyazg60TXyLplP9d/+Re5XbsCWkUa4sYnqvzyHd/E82YgsAchwO2FmZr0mCeMUe6axs+EdvPYcUm2ZhCItbK9/m1znSBwWV/8nEEIIBhiM4kEihiQzhyPveQsINzRx+L9/hTUrnXB1HSkLZ5N+xdKY1mKWrlE8D6frj9lCUuB4A6V/eZeaDfuxuh3kX3AaIz4xF2WV/aqFSFZ5rlH4wg2sr3oWh8VNINJMrrOY09LONbo0keDq6yL87tcN/PtVPxYFyy51c9NtqaSkyDUpHiVMMOroREhKlIBkRspiIfOqZaRfvIhQZQ3WrHSsKR6jyxJRZnRIaqn3sfn2J8lbMokZv7yGljof+x99h+ayKibdcVHM6xFCmMfIlGkUeSbTHKrFYfHgtMo1qTcdl+wuPSeXVW3HZcnuwWlp0dx4fRWTTrPzwKNZRCLw0G8bueWz1Tz692wsFvN9uC36lpDB6IRECEhm7hoFioNY3C4cI4YZXY4wgBHzko68spWMaUWM+a+zgdaV5ab+4ErWL38I36Ea3IUyikqIZGZVNrz2HKPLiAsSjk7dGyv9OJyKe36W3j6N4Ge/zuCqi46zfk2QM8+O3/vPZJUUfb70kkCPq9zFC7PvcWS00vJco0sAzDHMzEgd90qK1rDCxpJjZM4Z2emY1WUnfWohjSXGD6kUQoh44thZTvq+ADmr7RxeU8iqoxN4um6O0WXFjR3bWlhwjrPT3GqLRXHmWU52bm8xsDJxspIiGJ0QzwHJjOGo66avQnQUjaDkKkinYc/RTsd0OELj3uO4hqUPyXsIIUQyORGOvGVawtEgjSi2sWNr9wC0fWsLhSNkme14lFTB6IR4DkiiZ2bpGoneDUVQGnbJdI6t2sHRf+9AhyO0NPjZe/8qnPlpeMfnD3HFQgiRHCQcnZxll7rYvrWFv/yxkYBf4/NF+MN9DRw7Eubc82Q1xHiU0HOM+pNeEoir+UdmnG/UcdNXkdir0w21nv479Tcc0ZWfxtQfXMne373J7ntXohRkLxzHaXdfHq0yhRAiKXScc3SYQh4ZmQtzZM5RX1JSLDz8eDb/+/06fvPzBgDOWODgj3/LxuEw1/2aGJikDkYQfws0mDEcCTFUBhKW0qYMZ/Z91xNq9KPsVqxOe6zKE0KIhNYxHIGDR1go4agfo8bYeOhv2TQ1RkAhy3THuaQPRifEW/dIdGeWPY3E0OqtA5fsi10IIUQ09BSOVhVN4IEJTxhdmqmlpEogSgQSjDqIl3Bktq5RPA6nCxxvoOrdErBYyFk4FkdmitEliUGSwCSESBTBiJ9j/n2EdZhc50g8tjRD6+kajkrJ5Rauk3AkEp4Eoy4kHCW+ihe2sP/RNWSfORYdjrD/j6sZd+sS8s+b0uPzm8qqqN1Uhi3VRfbCcdg8fa/GJ/OMjNXff3sJTkIIMznm389HdavIdhZhUw72Nr7PqJQZjE3tefEDX7iB44EyFBbyXaNxWNxRqau3cLQkf7cMrRMJS4JRD+Jt3pEZmKVr1N9wuubyGkr/vJbZDyzH3ba8c1NZFVtuf5KMWcU4s1Pbn6u1puR3b3LsjZ3kLBhHsLqJkj+8yWl3X0H61MKo/38R0TGY0CohSggRTaFIkI/qVjE361LS7a0ra45Pncfaqn+Q4xxBuj2v0/NLmz6gpHEDea7RhHWI3Q3vclr6uRS4xkalvp7CkWwEKxKZBKM+mL17JF2jwatcvZu8JZPaQxFAyshssuePofKdvRReMbP9eNW7JdRsKuOMRz+PLbX156Bq/T52/O+LzHvsCyirjCdOdEPd+XtrSM8mhIh3xwNlZDiGtYciAKfVwwjPFA779nQKRg0tlexv2szCnGtwWVs/xKtvqeT96ufJchTisERneWgJRyKZyJ1dP2S/o4GLhw1fdSSCsnXfdM1it6LDkU7Hjr+5i6KPzW4PRQDZ88Zgz/BQt60i6rUKIYRIbJoIlh5uxSxY0XTe2P2wv4RC96T2UASQZs8hy1HI8UBpVOvsuM+Rs8z813ohTpYEowEwczjylun+n5Rk+trsNXvBOI6t2kGwtrn9WOB4A8ff2UP2mZ2HIuhQBGXvJUSFwn3WIEOwhBBC9CfHWUxV8BBNodr2Y6FIkHLfDvJdozs9VxNB9RSilJWIjnQ7LoQYPBlKN0BmH1YnBiZ1TC7DLp7Oxi/+lfzzp6DDEY6+vp3i6+d1Gl4HrRuHHnp2M3nnTsTiaP2nUr/jML6KWpljJIQQ4pQ5LG4meRewruqfDHdPwKbsVPh3k+ccRaZ9eKfn5jtHs6V2JaNSZmC3tN6PNIfqqQwcYKJ3gRHlC5FwJBgNglnDkZnmGpllEYa+jPrMAnIWjuP4O3uwOKzM+MUnSRmZ3e15eedOpGrtXjZ+6TFyz51IsKqJ46t3MfFbF7YHJSESmVLqPuB0Wq8Vd2mtX+ryeCbwGrBca73LgBKFiHtFnslkOoZzxL+HsA4zI/18MhwF3Z6X4SigwDWWNZV/Z7h7IhEdosK/m/HeebissuWESB5KqeXAJVrr67ocnwvcD4SBw8CngBbgt8A0WifK/VBr/UJv55a7u0GScJQYUsflkTour8/nKKuFyd+7lNrNB6jZWIarII05v78BV56x+0sIEQtKqWXAGK31fKXUcGCtUuoVrVvH7CilvgncBIwwsk4hEkGKLZ2xqXP7fd6ktAUUuMZyLFCKzeLgjKwrSbVlxqBCIYynlMoCVgLDgTd7eMp9wG1a6/eVUr8BbgQOAila67OUUnnAe0CvwUjmGJ0EM885Eq36mmc0GEopMmePZMwXzqH42jMGFYpknpGIAzlKqQ0d/tzU4bHZtF6A0FpXAMeA4hMPaq1/obWeAKyLacVCJLkMRz4TvPMYl3q6hCKRaPq6JqG1rtZazwGu7eX1Y7TW77d9vYLWEQ+HgV+2HWsAuk8e70A6RgnELF2jeBhOJ0QiUP4gjp3lp3KKSq11bx9TO4DqDt830ToMQQiR5ErLcyF/t9FlCBM6xetSX9ekgei4CkkT4DwRlNq6TX8CftjXCSQYnSSzDqnrKFhdRcOHG4kEAqSMn4R79DiUMj44CSHiQgvQcWMUNyDtcnHSAuFmKvy7CISbyXIMJ9c5EqVk4Eq86Lqf0SMshDmyl5EwlY6/UNqvWUqpi4A7gXu01isHegIxSGYeUlf/4SYOPPj/CDc3YXE6OfriMxx99gl0JHmW9Byq4XRCJKlNwFIApdQwII/WsdpCDFp18BDvVD5JU6gWh8VNSdNGNtS8SFiHjC5NDELX/Ywe2biQp+vmGF2WECfsV0rNbvv6AmCDUmoO8FXg/P5CEUjH6JSZrXPkLdPU5gc49tIzjPj8rTjzhwGQufBcDjz4a5p2bSd18tSo1yHD6YSIb1rrFUqpS5VS7wB24DZguVKqQOYYhUkAABKoSURBVGv9M4PLE3FEa81HdW8wPWMpuc6RAIxOmcmm2pc52LyNUSkzDK5QDEZPnaNVRRN4YMITRpcmkpBS6jPAievSrcB9SqkWoBL4LvB/wEjg5bZRUz6t9cW9nU+CUQLy7d+La3hxeygCsNgdZMw9k8adW2MSjESrlUcmcX7BTqPLEOKkaK1vHcBzFseiFhG/GkNVKBQ5jva1O1DKQrFnGvubNkswikNdw1EpudzCdRKORExord8C3mr7+s8djm8AFnZ5+lcHc24ZSjcEzDakzlNlQYdauh2PhEIoq/yVCyGEiB2lLER0uNvxiA6j5DYkbnUdVldansstu6+ToXUirknHaIgYNaSuuaqCIx+uwld1CIc3i/ypi/AOG0dwzZM079uDZ8x4AMJNjdS99w75V/a2wuHQk+F0QgiRXJpD9exr2kRN8DBOq5sR7qkUuMZitzg55NtJkWcyAGHdwv6mzRS5pxhcsTgVPXWOVrU9JosyiHgU82Ck/HKjPFSaK8vZ8+qDFMxYyrCZ59FcWU7p6icpmnc5wz5xA4ef+guu4tFYPR6adm4j/YyFeEaPM7rsmCotz2VU0XGjyxBCiITnCzewvvqfFLmnMDPjfJrD9exuWIc/0sD09PPYWPMSh/178FjTOB4oI9s5gkK37PcW7yQciURiSMeor/XNg5OKYljJ0Ip116hi82sMn30huZMXAODOLMDhzaLs7b9z2ujv4PranTTu3EokECDrnPNwZOXErDYhhBDJpbTpA4a5JjDeewYAXnsOafZc1lY+xYjcqZyT+ymOBUoJhJsp9kzFa5drUqKQcCQShemG0nUMTfEckmKhufIgxWd+rNOx1PzRhPyNhIM+LM4U0macyj5ZYijIAgxCiGRQ13KMCd75nY65rV5cVi9N4RrS7XkUuMYaVJ2INglHIhGYLhh1FI8hKZZdI3tKBr7aozhSM9uPtTTVgrJgtZtjCXGZZySEEMnBZU2lMVRNlmN4+7GwbsEfbsBpSTGwMhErEo5EvIub5WD6Gn6XrPJPO5vydc/jrz0GQEtzPaVvP0XuxPkoixVvmTa4QiGEEMlipGcaJY0bqQ0eBaAlEmBb3WqynSNwWSUYJYueVqtbdXSCrFYn4oKpO0ZdnQhHZu8exaprlDV2Ni2+Rna99FssNjvhoJ/sCWcwfM6FUX/veCILMAghRPRlOoYxybuALbWvApqQbiHPOZqpabLVVbKRzpGIV6YMRsGIj5ZIAI81DaW6N7XiJSDFQv7Uc8idvICW5jpsrlTTDKETQohE0RIJEIg047GmYVFWo8sxtWHu8RS4xuILN2K3OLFb5JqUrHoKR+TvNrosIfpkqmAUjPjZVvcGVcEKbMoBaCalLex1sqZjZ7lpw1Es5xpZrDac3uweH/OWaRpGqpjUIYQQiSSsQ+yof5sj/hIcFhch3cK41NMp9kw1ujRTU8qCx5ZmdBlCCDFopgpGH9SuJMWWweK8T2NVdmqCh9lcuwK31Uu6Pa/H15g5HIlWsgCDECIe7ax/h5ZIgEW5y7FbXDSGqtlU8wpOi4d81xijyxNCCDHETLP4QlOolsZQFZO8C7AqO9A6Xnl0ykwONm/r87VmXZghvSRgdAnCJFYekU0MhYgnoUiQw/69nJZ+LnaLC4BUWxYTvPMpa/7I4OqEEEJEg2mCkT/chMea3m38doo1E3+4qd/XmzUcCSGEiD8tOoBV2XG0haITUqyZBAZwTRJCCBF/TBOMvPZsGkLV3ULQscB+Mhz5AzqHhKOeybLdrSvTie589S08f89H/HD+q9w9+xWe/MYmag41G12WEIY7se9OXcuxTsdbr0kFRpQkRMILBjT3/rSes2cdYcaYCm66oYod21qMLkskEdMEI4fFxaiU6Wyo+RdH/CXUtRxlR/07VAXLBzXR1WzhSIbTCbPSWvPXL7+P1pqvvrCIO948j/wJXh7+3DoCTSGjyxPCUBZlYYJ3HptrVnDIt4v6luPsbdxAWfOHjE6ZZXR5QsSl/vYz+v63aynZE+KxZ3NYt62A85a5uGl5FRXlck0SsWGqxRfGpszFY83gQPNW/n97dx9b113fcfz9tX397Dh27DwnLimhGQt9dJqQFrU0Wls1iwahopQ+TBVbV7UgmEBoQ7BMndhENQ2V0IYFJLa0qGVrKSqtCLRrm46UBCel0KwEKHEckubJcRInfrbvd3+cm+Ymse+9TnzvOT7385KudO89J+d8fz7275vvPb/7+w0l+2ksn8vSxtWUl1SN6ziakCF6NAFD9LS3ddF/Yoi/WPMBzIKZCz/8NwvZ/5tufvXcPq6+rSXkCEXCNadqERUl1XT0vsnukZPUJ6aztPGj1JRNDTs0kUlhPOsZ7d0zzOZNA7y4ZQaVlUFO+vidNezpGOaJDb18/kua6VDyL1KFkZkxu2ohs6sWXvCxVByJZNbZfpL5lze8WxSd0nJlA4d3nQwpKpFoaaqYT1PF/LDDEJm0ci2OdrcPs+j9Ze8WRadc0VrOD76vId5SGJEZShdnGk4nUdS8oJY9bxzF/czvoHW8fpTmi2tDikpEROKmfOde6ncNUNfhVHSUs3tv8znD6t6zoIydbw3T15c849/+ctsgC96bKHTIUqRiXRhF7ftGYdIEDHK2i1obqaxL8MM1b3L8QB+9xwd5ad3veeet41y2ck7Y4YmISIxkK47mzCvj2usq+MIDR+loH6anJ8mTj/Xw7NN93H53dcjRS7GI1FA6KR7DR46R7B8gMasZK4l1fR5ZZsZTG8r5+kNHWLt6PyODSRZdP4NP/ccyKmrUNYhI8RgY6WUw2UdN2dRzlg2RiZNtWN2DD01l3cMnuOvWTo4fS7L8QxWsf3was+cqJ0lhxP43Td81ipbhI8fo/M5/MbT3ICXVlfjwCI13fYTqy7UAaqHdOuV1oIQ1X61nzVdPv/9Utz6ZE5HiMJQcYMfxl+ka3Ed5SRVDPsD7apcxt/pPwg4ttrIVR5/94hQ++0VNtCDhiH1hFBX1fxjg+MUVYYcRKnfn0MP/SXXrYmZ8/lNYWSn9O3dx+JHvkfjSfSRmxXutoRcOLOLPZu4MO4ysgoLptKe6rwwpEhGR/Npx/CXKS6q4fvrdlFqCE0NH2H70eapK65hWoQ9VRYpNUYxh0neNomHg7Q4YHqF+1Q1YWTBUoXLRAmqvu5qTm9pCjq64nF38ZNs3/SEiEgf9IyfpGtzPoinXUmrBl/vrEtO4uLaVPb07Qo5ORMKgO0ZSMCPHTlA2o+mc6aETM6bR/9bbeT//7r3NXDT3cN7PE3UXWtzojpKIxMFAso/K0hpK7cz/CtWU1bOvT9NDixQjFUZSMHWVC+j63Q8Y6emltCb4Hou707v9/6i69JKQo5PzpUJJRCaj2rIG+kd66Bk+dsaivQf725mamBliZCISlqIpjDQJQzBl94kWy75jniTqG6j9UCuHHvoO9atuoKSumpOvbmO48yg1y68ILa5iUoihcKOdQ8WSiERNqZXx3tolbDv6HAtrl1BVWs/B/j9wsH8Xy6Z9LOzwRCQERVMYSTRMve0Wen7+Bide3kKyb4CqSy+h8ZOrKKks7okp4k7FkohEUUvNB6gum8Ke3h0MjPTSUD6bZdNWU1laE3ZoIhICFUYFpJnpgrVzapdfQa3uEBW9se5eqWASkUJqrmihuaIl7DCKylhTdt9avz3kyKTYqTASibgje3rY+mQHne0nmX5xHUs/2ULD7PiuNaSCSUQkugZGetnTu4PjQ4eoKq1jfvVi6hLTxn2c0Yqj+7mdG2b8TgWShEaFkUiE7dtxjA33t3HV6nlcfVsLu7d38a1PbOaeby9l5iXFtQBepu9HqWgSEcm/3uFuftH1DM0VFzG/ejEnhjtpO/osl9avoKli/riPl22xV5FCy6kwMrO1wJLU/mvc/fm0ba3AN4ERYD9wh7sP5CHWC6YJGGSy+cnXd3Lj5xZx1ep5ACy6fgYNc6p44eHfctejS0KOLjqyTSqhwun8RLXvj2pcInG3q2c7c6oWsbBuKQDTuYgpiWZ2ntjMNeXzzlmOIxcqjiRXWfr+FcCDQCnwa+B+dx82s1uAf0rttsXdH8h0jqyFkZndBCxw92VmNht4zcx+7O7J1C5rgc+4e5uZfQP4K+CRcbVURM6RTDrtbUe4e92ZBdBlK+ew8V9/E1JUk9N4Z+NTIRXdvj+qcYkUgyOD+7iqYeUZ7zWVz+fXIy8ymOyjovT8hnmrOJJsMvX9ZlYCrAeWununmT0J3GFmzwBfAa5x934z+5yZNbj70bHOk8sdoyuBFwDc/R0zOwTMB3anti9w97bU843Ax8+jvSJyFjOorEvQfWiAxrmnk033oX6q6stDjCz+8jWt+ZfzctS8iWrfH9W4RGIvYRUMjPRQW9bw7nvDPkiSJKWWuKBjqziSLDL1/U1At7t3pvZ9FVgOdANvA0+YWRPweKaiCHIrjMqBrrTXPQS/tackM2wDwMzuBe5NvRzYeODRHTmcNz8OhHbmwCs0AZ1Z94uvUNvfEdaJUzadR/v/7eaXR33/y4ufH/X9TCLwH/Ni//2f0JWMu4cP/2TjgUebLuAQlWa2Le31endfn3p+wX1/nsQrJ4Wv2P8mQT+DcbW/7eizo77/4qFvX3gkB4BXTr/sADYBay78yJkU+/WPUl4635x0GJhiZu8Hfg+sAg4Cs4BrCIbf9QM/M7PN7j5mn59LYTQEVKa9rgLSx2uXZNgGQKpR6wHMbJu7t+Zw3lhS+9V+tb+42z+Rx3P3myfyeGe54L4/T5STJlCxtx/0M1D71f6JPF4e89KYfb+7u5ndRTCUGmALQY3TB7zs7kcAzOynwBXAmIVRyVgb0rwOrEgdcBYwHfhj2vZ2Mzs1IP9GYEJ/wCIiEoqo9v1RjUtERPInW9+/ArjF3VcAMwiG3f0vsMTMys2sFPggGYoiyOGOkbtvNLM/N7OfAQngM8CdZjbT3b8GfBpYa2ZDBLci/3587RQRkaiJat8f1bhERCR/cuj7O4CtZtYNbAX+J3Un6d+BXxB83+g5d/9lpvPkNF23u386w7ZtBOP3crU++y6xpvYXN7W/uE2q9k9w3z9hlJMmVLG3H/QzUPuL26Rpf5a+fwOwYZT3H2EcM5Oau59fdCIiIiIiIjGRy3eMREREREREYk2FkYiIiIiIFL28FkZmttbMtpjZNjNbeda21tS2zWb2lJkVag2MgsnS/lWpbVvMbF1q1d5YydT+tH1Wm9l3Cx1bIWS5/tPMbKOZbTWz18zsPWHFmS9Z2n9dqu2bzOyx1GwxsWNmd5rZE6O8H/v+L4qUk5STlJOUk5STlJMycve8PICbgOdTz2cTrExbkrb958CS1PNvAA/kK5YwHpnaDxjBSrzTUq+fJphiMPS4C3X9U+9PJVid+Lthx1vo9gOPA8tTz5cDN4Udc4HbvxX407SfxcqwY57g9jcC24H9wBOjbI91/xfFh3KScpJyknJShvYrJ8W4/xvPI5+fCF1JMIc47v4OcAiYn7Z9gbu3pZ5vJFiVNk4ytb8C+EdPLThFMIVg3D6dyHb9Af4Z+JcCx1Uo2dq/GPiYmb0C3EvQKcVJtvZ3AU2pT6UbgSPnHGESc/cud78K+MQYu8S9/4si5STlJOUklJOUk0YV9/4vZ/ksjMoJVpw9pYeg8z0lmWFbHIzZfnfvd/fHAczsPmAe8OOCR5hfGa+/mX2YoGN6q8BxFcqY7TezauAyYLO7X0/wSe1XCh1gnmX7+18LPAf8FqgDflW40CIh7v1fFCknKScpJ52mnKSclC7u/V/O8lkYDQGVaa+rgIExzn32tjjI2H4zm25mPyK4pXuLuw8XOL58G7P9ZlYFfIH4fjIHma//ADAIPJN6/TTBp1lxkun61wDrgPe5+0LgNeDvCh5huOLe/0WRcpJyknLSacpJyknp4t7/5SyfhdHrwAoAM5sFTAf+mLa93cxO/eHdCGzLYyxhGLP9qVu1/w2scfd/cPfB0KLMn0zX/1JgDsHt2ieBm83sb8MIMo/GbL+7jxCswvzB1L7XAm+GEGM+Zbr+nnocT70+RPyG7WQT9/4vipSTlJOUk1BOUk4aVdz7v5zldYFXM/smcDmQAB4EmoCZ7v41M2sFHiao4juBO9w9VhXqWO0Hfgq8BLyRtvsad3+14EHmUabrn7ZPC8HY9nvCiTJ/svz+XwI8BvQSjG3+67Tx/bGQpf33AA8QtL8buNvdu0ILNk/M7DrgPne/3cz+kiLq/6JIOUk5CeUk5STlJOWkDPJaGImIiIiIiEwGsVunQEREREREZLxUGImIiIiISNFTYSQiIiIiIkVPhZGIiIiIiBQ9FUYiIiIiIlL0VBiJiIiIiEjRU2EkIiIiIiJF7/8Bbn+OD/9WQlYAAAAASUVORK5CYII=\n",
"text/plain": [
"<Figure size 864x360 with 4 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"N_grid = 50 \n",
"x = y = np.linspace(0,1,N_grid)[:,None]\n",
"\n",
"N_train = 50\n",
"\n",
"X_m, Y_m = np.meshgrid(x,y)\n",
"X = X_m.flatten()[:,None]\n",
"Y = Y_m.flatten()[:,None]\n",
"T = exact(X,Y)\n",
"CV = CVexact(X,Y)\n",
"\n",
"X_train_all = lhs(2, N_train)\n",
"X_train = X_train_all[:,:1]\n",
"Y_train = X_train_all[:,1:]\n",
"T_train = exact(X_train, Y_train)\n",
"\n",
"fig = plt.figure()\n",
"fig.set_size_inches((12,5))\n",
"plt.subplot(121)\n",
"plt.contourf(X_m, Y_m, T.reshape(X_m.shape))\n",
"plt.colorbar()\n",
"plt.scatter(X_train, Y_train, facecolors = 'none', edgecolor = 'k') \n",
"plt.xlim([0,1])\n",
"plt.ylim([0,1])\n",
"plt.title('exact activation times')\n",
"\n",
"plt.subplot(122)\n",
"plt.contourf(X_m, Y_m, CV.reshape(X_m.shape))\n",
"plt.colorbar()\n",
"plt.scatter(X_train, Y_train, facecolors = 'none', edgecolor = 'k') \n",
"plt.xlim([0,1])\n",
"plt.ylim([0,1])\n",
"plt.title('exact conduction velocity')\n",
" \n",
"plt.tight_layout()\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We define the architecture of the network, the collocation points for training and some parameters:"
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Device mapping:\n",
"/job:localhost/replica:0/task:0/device:XLA_CPU:0 -> device: XLA_CPU device\n",
"\n"
]
}
],
"source": [
"layers = [2,20,20,20,20,20,1]\n",
"CVlayers = [2,5,5,5,5,1]\n",
"\n",
"# collocation points\n",
"X_pde = X\n",
"Y_pde = Y\n",
"\n",
"# maximum value for the conduction velocity\n",
"CVmax = 1.5\n",
"\n",
"model = Eikonal2DnetCV2(X_pde, Y_pde, X_train, Y_train, T_train, \n",
" layers,CVlayers, C = CVmax, alpha = 1e-7, alphaL2 = 1e-9)\n",
" "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We train the model first with ADAM and then we finalize with BFGS"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 0, Loss: 2.717e+00, C: 4.482, Time: 1.27\n",
"It: 10, Loss: 8.794e-01, C: 4.482, Time: 0.03\n",
"It: 20, Loss: 4.381e-01, C: 4.482, Time: 0.03\n",
"It: 30, Loss: 4.092e-01, C: 4.482, Time: 0.03\n",
"It: 40, Loss: 3.336e-01, C: 4.482, Time: 0.03\n",
"It: 50, Loss: 2.726e-01, C: 4.482, Time: 0.03\n",
"It: 60, Loss: 2.491e-01, C: 4.482, Time: 0.03\n",
"It: 70, Loss: 2.188e-01, C: 4.482, Time: 0.03\n",
"It: 80, Loss: 2.061e-01, C: 4.482, Time: 0.03\n",
"It: 90, Loss: 1.908e-01, C: 4.482, Time: 0.03\n",
"It: 100, Loss: 1.833e-01, C: 4.482, Time: 0.04\n",
"It: 110, Loss: 1.739e-01, C: 4.482, Time: 0.03\n",
"It: 120, Loss: 1.616e-01, C: 4.482, Time: 0.03\n",
"It: 130, Loss: 1.529e-01, C: 4.482, Time: 0.03\n",
"It: 140, Loss: 1.451e-01, C: 4.482, Time: 0.03\n",
"It: 150, Loss: 1.397e-01, C: 4.482, Time: 0.03\n",
"It: 160, Loss: 1.338e-01, C: 4.482, Time: 0.03\n",
"It: 170, Loss: 1.292e-01, C: 4.482, Time: 0.04\n",
"It: 180, Loss: 1.251e-01, C: 4.482, Time: 0.03\n",
"It: 190, Loss: 1.199e-01, C: 4.482, Time: 0.03\n",
"It: 200, Loss: 1.160e-01, C: 4.482, Time: 0.03\n",
"It: 210, Loss: 1.115e-01, C: 4.482, Time: 0.03\n",
"It: 220, Loss: 1.080e-01, C: 4.482, Time: 0.03\n",
"It: 230, Loss: 1.047e-01, C: 4.482, Time: 0.03\n",
"It: 240, Loss: 1.018e-01, C: 4.482, Time: 0.03\n",
"It: 250, Loss: 1.006e-01, C: 4.482, Time: 0.03\n",
"It: 260, Loss: 9.785e-02, C: 4.482, Time: 0.03\n",
"It: 270, Loss: 9.561e-02, C: 4.482, Time: 0.03\n",
"It: 280, Loss: 9.322e-02, C: 4.482, Time: 0.03\n",
"It: 290, Loss: 9.166e-02, C: 4.482, Time: 0.03\n",
"It: 300, Loss: 8.936e-02, C: 4.482, Time: 0.03\n",
"It: 310, Loss: 8.796e-02, C: 4.482, Time: 0.03\n",
"It: 320, Loss: 8.643e-02, C: 4.482, Time: 0.03\n",
"It: 330, Loss: 8.461e-02, C: 4.482, Time: 0.03\n",
"It: 340, Loss: 8.307e-02, C: 4.482, Time: 0.03\n",
"It: 350, Loss: 8.160e-02, C: 4.482, Time: 0.03\n",
"It: 360, Loss: 7.980e-02, C: 4.482, Time: 0.03\n",
"It: 370, Loss: 7.868e-02, C: 4.482, Time: 0.03\n",
"It: 380, Loss: 7.740e-02, C: 4.482, Time: 0.03\n",
"It: 390, Loss: 7.603e-02, C: 4.482, Time: 0.03\n",
"It: 400, Loss: 7.473e-02, C: 4.482, Time: 0.03\n",
"It: 410, Loss: 7.343e-02, C: 4.482, Time: 0.03\n",
"It: 420, Loss: 7.179e-02, C: 4.482, Time: 0.03\n",
"It: 430, Loss: 7.128e-02, C: 4.482, Time: 0.03\n",
"It: 440, Loss: 6.968e-02, C: 4.482, Time: 0.03\n",
"It: 450, Loss: 6.883e-02, C: 4.482, Time: 0.03\n",
"It: 460, Loss: 6.732e-02, C: 4.482, Time: 0.03\n",
"It: 470, Loss: 6.662e-02, C: 4.482, Time: 0.03\n",
"It: 480, Loss: 6.569e-02, C: 4.482, Time: 0.03\n",
"It: 490, Loss: 6.484e-02, C: 4.482, Time: 0.03\n",
"It: 500, Loss: 6.414e-02, C: 4.482, Time: 0.03\n",
"It: 510, Loss: 6.355e-02, C: 4.482, Time: 0.03\n",
"It: 520, Loss: 6.248e-02, C: 4.482, Time: 0.03\n",
"It: 530, Loss: 6.183e-02, C: 4.482, Time: 0.03\n",
"It: 540, Loss: 6.079e-02, C: 4.482, Time: 0.03\n",
"It: 550, Loss: 6.031e-02, C: 4.482, Time: 0.03\n",
"It: 560, Loss: 5.968e-02, C: 4.482, Time: 0.03\n",
"It: 570, Loss: 5.936e-02, C: 4.482, Time: 0.03\n",
"It: 580, Loss: 5.772e-02, C: 4.482, Time: 0.03\n",
"It: 590, Loss: 5.758e-02, C: 4.482, Time: 0.03\n",
"It: 600, Loss: 5.650e-02, C: 4.482, Time: 0.03\n",
"It: 610, Loss: 5.607e-02, C: 4.482, Time: 0.03\n",
"It: 620, Loss: 5.533e-02, C: 4.482, Time: 0.03\n",
"It: 630, Loss: 5.446e-02, C: 4.482, Time: 0.03\n",
"It: 640, Loss: 5.388e-02, C: 4.482, Time: 0.03\n",
"It: 650, Loss: 5.273e-02, C: 4.482, Time: 0.03\n",
"It: 660, Loss: 5.198e-02, C: 4.482, Time: 0.03\n",
"It: 670, Loss: 5.142e-02, C: 4.482, Time: 0.03\n",
"It: 680, Loss: 4.991e-02, C: 4.482, Time: 0.03\n",
"It: 690, Loss: 4.906e-02, C: 4.482, Time: 0.03\n",
"It: 700, Loss: 4.742e-02, C: 4.482, Time: 0.03\n",
"It: 710, Loss: 4.627e-02, C: 4.482, Time: 0.03\n",
"It: 720, Loss: 4.478e-02, C: 4.482, Time: 0.03\n",
"It: 730, Loss: 4.359e-02, C: 4.482, Time: 0.03\n",
"It: 740, Loss: 4.229e-02, C: 4.482, Time: 0.03\n",
"It: 750, Loss: 4.101e-02, C: 4.482, Time: 0.03\n",
"It: 760, Loss: 3.985e-02, C: 4.482, Time: 0.03\n",
"It: 770, Loss: 3.900e-02, C: 4.482, Time: 0.03\n",
"It: 780, Loss: 3.881e-02, C: 4.482, Time: 0.03\n",
"It: 790, Loss: 3.781e-02, C: 4.482, Time: 0.03\n",
"It: 800, Loss: 3.711e-02, C: 4.482, Time: 0.03\n",
"It: 810, Loss: 3.697e-02, C: 4.482, Time: 0.03\n",
"It: 820, Loss: 3.724e-02, C: 4.482, Time: 0.03\n",
"It: 830, Loss: 3.689e-02, C: 4.482, Time: 0.03\n",
"It: 840, Loss: 3.616e-02, C: 4.482, Time: 0.03\n",
"It: 850, Loss: 3.596e-02, C: 4.482, Time: 0.03\n",
"It: 860, Loss: 3.646e-02, C: 4.482, Time: 0.03\n",
"It: 870, Loss: 3.595e-02, C: 4.482, Time: 0.03\n",
"It: 880, Loss: 3.576e-02, C: 4.482, Time: 0.03\n",
"It: 890, Loss: 3.539e-02, C: 4.482, Time: 0.03\n",
"It: 900, Loss: 3.516e-02, C: 4.482, Time: 0.03\n",
"It: 910, Loss: 3.538e-02, C: 4.482, Time: 0.03\n",
"It: 920, Loss: 3.462e-02, C: 4.482, Time: 0.03\n",
"It: 930, Loss: 3.559e-02, C: 4.482, Time: 0.03\n",
"It: 940, Loss: 3.487e-02, C: 4.482, Time: 0.03\n",
"It: 950, Loss: 3.500e-02, C: 4.482, Time: 0.03\n",
"It: 960, Loss: 3.495e-02, C: 4.482, Time: 0.03\n",
"It: 970, Loss: 3.467e-02, C: 4.482, Time: 0.03\n",
"It: 980, Loss: 3.465e-02, C: 4.482, Time: 0.03\n",
"It: 990, Loss: 3.492e-02, C: 4.482, Time: 0.03\n",
"It: 1000, Loss: 3.398e-02, C: 4.482, Time: 0.03\n",
"It: 1010, Loss: 3.442e-02, C: 4.482, Time: 0.03\n",
"It: 1020, Loss: 3.381e-02, C: 4.482, Time: 0.03\n",
"It: 1030, Loss: 3.443e-02, C: 4.482, Time: 0.03\n",
"It: 1040, Loss: 3.453e-02, C: 4.482, Time: 0.03\n",
"It: 1050, Loss: 3.395e-02, C: 4.482, Time: 0.03\n",
"It: 1060, Loss: 3.431e-02, C: 4.482, Time: 0.03\n",
"It: 1070, Loss: 3.326e-02, C: 4.482, Time: 0.03\n",
"It: 1080, Loss: 3.313e-02, C: 4.482, Time: 0.03\n",
"It: 1090, Loss: 3.327e-02, C: 4.482, Time: 0.03\n",
"It: 1100, Loss: 3.396e-02, C: 4.482, Time: 0.03\n",
"It: 1110, Loss: 3.321e-02, C: 4.482, Time: 0.03\n",
"It: 1120, Loss: 3.361e-02, C: 4.482, Time: 0.03\n",
"It: 1130, Loss: 3.357e-02, C: 4.482, Time: 0.03\n",
"It: 1140, Loss: 3.347e-02, C: 4.482, Time: 0.03\n",
"It: 1150, Loss: 3.356e-02, C: 4.482, Time: 0.03\n",
"It: 1160, Loss: 3.353e-02, C: 4.482, Time: 0.03\n",
"It: 1170, Loss: 3.253e-02, C: 4.482, Time: 0.03\n",
"It: 1180, Loss: 3.309e-02, C: 4.482, Time: 0.03\n",
"It: 1190, Loss: 3.309e-02, C: 4.482, Time: 0.03\n",
"It: 1200, Loss: 3.306e-02, C: 4.482, Time: 0.03\n",
"It: 1210, Loss: 3.244e-02, C: 4.482, Time: 0.03\n",
"It: 1220, Loss: 3.251e-02, C: 4.482, Time: 0.03\n",
"It: 1230, Loss: 3.280e-02, C: 4.482, Time: 0.03\n",
"It: 1240, Loss: 3.261e-02, C: 4.482, Time: 0.03\n",
"It: 1250, Loss: 3.303e-02, C: 4.482, Time: 0.03\n",
"It: 1260, Loss: 3.217e-02, C: 4.482, Time: 0.03\n",
"It: 1270, Loss: 3.304e-02, C: 4.482, Time: 0.03\n",
"It: 1280, Loss: 3.268e-02, C: 4.482, Time: 0.03\n",
"It: 1290, Loss: 3.215e-02, C: 4.482, Time: 0.03\n",
"It: 1300, Loss: 3.228e-02, C: 4.482, Time: 0.03\n",
"It: 1310, Loss: 3.196e-02, C: 4.482, Time: 0.03\n",
"It: 1320, Loss: 3.199e-02, C: 4.482, Time: 0.03\n",
"It: 1330, Loss: 3.212e-02, C: 4.482, Time: 0.03\n",
"It: 1340, Loss: 3.171e-02, C: 4.482, Time: 0.03\n",
"It: 1350, Loss: 3.174e-02, C: 4.482, Time: 0.03\n",
"It: 1360, Loss: 3.272e-02, C: 4.482, Time: 0.03\n",
"It: 1370, Loss: 3.204e-02, C: 4.482, Time: 0.03\n",
"It: 1380, Loss: 3.186e-02, C: 4.482, Time: 0.03\n",
"It: 1390, Loss: 3.165e-02, C: 4.482, Time: 0.03\n",
"It: 1400, Loss: 3.207e-02, C: 4.482, Time: 0.04\n",
"It: 1410, Loss: 3.225e-02, C: 4.482, Time: 0.03\n",
"It: 1420, Loss: 3.156e-02, C: 4.482, Time: 0.03\n",
"It: 1430, Loss: 3.190e-02, C: 4.482, Time: 0.03\n",
"It: 1440, Loss: 3.090e-02, C: 4.482, Time: 0.03\n",
"It: 1450, Loss: 3.138e-02, C: 4.482, Time: 0.03\n",
"It: 1460, Loss: 3.222e-02, C: 4.482, Time: 0.03\n",
"It: 1470, Loss: 3.126e-02, C: 4.482, Time: 0.04\n",
"It: 1480, Loss: 3.164e-02, C: 4.482, Time: 0.03\n",
"It: 1490, Loss: 3.165e-02, C: 4.482, Time: 0.03\n",
"It: 1500, Loss: 3.108e-02, C: 4.482, Time: 0.04\n",
"It: 1510, Loss: 3.136e-02, C: 4.482, Time: 0.03\n",
"It: 1520, Loss: 3.134e-02, C: 4.482, Time: 0.03\n",
"It: 1530, Loss: 3.085e-02, C: 4.482, Time: 0.03\n",
"It: 1540, Loss: 3.156e-02, C: 4.482, Time: 0.03\n",
"It: 1550, Loss: 3.062e-02, C: 4.482, Time: 0.03\n",
"It: 1560, Loss: 3.137e-02, C: 4.482, Time: 0.03\n",
"It: 1570, Loss: 3.095e-02, C: 4.482, Time: 0.03\n",
"It: 1580, Loss: 3.071e-02, C: 4.482, Time: 0.03\n",
"It: 1590, Loss: 3.107e-02, C: 4.482, Time: 0.04\n",
"It: 1600, Loss: 3.143e-02, C: 4.482, Time: 0.04\n",
"It: 1610, Loss: 3.056e-02, C: 4.482, Time: 0.04\n",
"It: 1620, Loss: 3.057e-02, C: 4.482, Time: 0.03\n",
"It: 1630, Loss: 3.004e-02, C: 4.482, Time: 0.03\n",
"It: 1640, Loss: 3.119e-02, C: 4.482, Time: 0.03\n",
"It: 1650, Loss: 2.919e-02, C: 4.482, Time: 0.03\n",
"It: 1660, Loss: 3.035e-02, C: 4.482, Time: 0.03\n",
"It: 1670, Loss: 2.955e-02, C: 4.482, Time: 0.03\n",
"It: 1680, Loss: 2.963e-02, C: 4.482, Time: 0.03\n",
"It: 1690, Loss: 2.991e-02, C: 4.482, Time: 0.03\n",
"It: 1700, Loss: 2.954e-02, C: 4.482, Time: 0.03\n",
"It: 1710, Loss: 2.971e-02, C: 4.482, Time: 0.03\n",
"It: 1720, Loss: 2.996e-02, C: 4.482, Time: 0.03\n",
"It: 1730, Loss: 3.018e-02, C: 4.482, Time: 0.04\n",
"It: 1740, Loss: 2.902e-02, C: 4.482, Time: 0.04\n",
"It: 1750, Loss: 2.961e-02, C: 4.482, Time: 0.03\n",
"It: 1760, Loss: 3.027e-02, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 1770, Loss: 2.901e-02, C: 4.482, Time: 0.03\n",
"It: 1780, Loss: 2.815e-02, C: 4.482, Time: 0.03\n",
"It: 1790, Loss: 2.918e-02, C: 4.482, Time: 0.03\n",
"It: 1800, Loss: 2.910e-02, C: 4.482, Time: 0.03\n",
"It: 1810, Loss: 2.846e-02, C: 4.482, Time: 0.03\n",
"It: 1820, Loss: 2.904e-02, C: 4.482, Time: 0.03\n",
"It: 1830, Loss: 2.901e-02, C: 4.482, Time: 0.03\n",
"It: 1840, Loss: 2.799e-02, C: 4.482, Time: 0.03\n",
"It: 1850, Loss: 2.827e-02, C: 4.482, Time: 0.03\n",
"It: 1860, Loss: 2.911e-02, C: 4.482, Time: 0.03\n",
"It: 1870, Loss: 2.883e-02, C: 4.482, Time: 0.03\n",
"It: 1880, Loss: 2.906e-02, C: 4.482, Time: 0.03\n",
"It: 1890, Loss: 2.759e-02, C: 4.482, Time: 0.03\n",
"It: 1900, Loss: 2.763e-02, C: 4.482, Time: 0.03\n",
"It: 1910, Loss: 2.772e-02, C: 4.482, Time: 0.03\n",
"It: 1920, Loss: 2.867e-02, C: 4.482, Time: 0.03\n",
"It: 1930, Loss: 2.803e-02, C: 4.482, Time: 0.03\n",
"It: 1940, Loss: 2.928e-02, C: 4.482, Time: 0.03\n",
"It: 1950, Loss: 2.777e-02, C: 4.482, Time: 0.03\n",
"It: 1960, Loss: 2.795e-02, C: 4.482, Time: 0.03\n",
"It: 1970, Loss: 2.667e-02, C: 4.482, Time: 0.03\n",
"It: 1980, Loss: 2.650e-02, C: 4.482, Time: 0.03\n",
"It: 1990, Loss: 2.809e-02, C: 4.482, Time: 0.03\n",
"It: 2000, Loss: 2.725e-02, C: 4.482, Time: 0.03\n",
"It: 2010, Loss: 2.779e-02, C: 4.482, Time: 0.03\n",
"It: 2020, Loss: 2.693e-02, C: 4.482, Time: 0.03\n",
"It: 2030, Loss: 2.888e-02, C: 4.482, Time: 0.03\n",
"It: 2040, Loss: 2.744e-02, C: 4.482, Time: 0.03\n",
"It: 2050, Loss: 2.812e-02, C: 4.482, Time: 0.03\n",
"It: 2060, Loss: 2.699e-02, C: 4.482, Time: 0.03\n",
"It: 2070, Loss: 2.642e-02, C: 4.482, Time: 0.03\n",
"It: 2080, Loss: 2.790e-02, C: 4.482, Time: 0.03\n",
"It: 2090, Loss: 2.633e-02, C: 4.482, Time: 0.03\n",
"It: 2100, Loss: 2.791e-02, C: 4.482, Time: 0.03\n",
"It: 2110, Loss: 2.661e-02, C: 4.482, Time: 0.03\n",
"It: 2120, Loss: 2.687e-02, C: 4.482, Time: 0.03\n",
"It: 2130, Loss: 2.713e-02, C: 4.482, Time: 0.03\n",
"It: 2140, Loss: 2.727e-02, C: 4.482, Time: 0.03\n",
"It: 2150, Loss: 2.504e-02, C: 4.482, Time: 0.03\n",
"It: 2160, Loss: 2.732e-02, C: 4.482, Time: 0.03\n",
"It: 2170, Loss: 2.690e-02, C: 4.482, Time: 0.03\n",
"It: 2180, Loss: 2.534e-02, C: 4.482, Time: 0.03\n",
"It: 2190, Loss: 2.690e-02, C: 4.482, Time: 0.03\n",
"It: 2200, Loss: 2.723e-02, C: 4.482, Time: 0.03\n",
"It: 2210, Loss: 2.711e-02, C: 4.482, Time: 0.03\n",
"It: 2220, Loss: 2.606e-02, C: 4.482, Time: 0.03\n",
"It: 2230, Loss: 2.498e-02, C: 4.482, Time: 0.03\n",
"It: 2240, Loss: 2.388e-02, C: 4.482, Time: 0.03\n",
"It: 2250, Loss: 2.543e-02, C: 4.482, Time: 0.03\n",
"It: 2260, Loss: 2.401e-02, C: 4.482, Time: 0.04\n",
"It: 2270, Loss: 2.461e-02, C: 4.482, Time: 0.03\n",
"It: 2280, Loss: 2.447e-02, C: 4.482, Time: 0.03\n",
"It: 2290, Loss: 2.357e-02, C: 4.482, Time: 0.03\n",
"It: 2300, Loss: 2.336e-02, C: 4.482, Time: 0.03\n",
"It: 2310, Loss: 2.261e-02, C: 4.482, Time: 0.03\n",
"It: 2320, Loss: 2.292e-02, C: 4.482, Time: 0.03\n",
"It: 2330, Loss: 2.401e-02, C: 4.482, Time: 0.03\n",
"It: 2340, Loss: 2.400e-02, C: 4.482, Time: 0.03\n",
"It: 2350, Loss: 1.942e-02, C: 4.482, Time: 0.03\n",
"It: 2360, Loss: 2.224e-02, C: 4.482, Time: 0.03\n",
"It: 2370, Loss: 1.859e-02, C: 4.482, Time: 0.03\n",
"It: 2380, Loss: 2.133e-02, C: 4.482, Time: 0.03\n",
"It: 2390, Loss: 2.163e-02, C: 4.482, Time: 0.03\n",
"It: 2400, Loss: 1.899e-02, C: 4.482, Time: 0.03\n",
"It: 2410, Loss: 1.814e-02, C: 4.482, Time: 0.03\n",
"It: 2420, Loss: 2.331e-02, C: 4.482, Time: 0.03\n",
"It: 2430, Loss: 2.027e-02, C: 4.482, Time: 0.03\n",
"It: 2440, Loss: 2.099e-02, C: 4.482, Time: 0.03\n",
"It: 2450, Loss: 2.514e-02, C: 4.482, Time: 0.03\n",
"It: 2460, Loss: 1.619e-02, C: 4.482, Time: 0.03\n",
"It: 2470, Loss: 1.735e-02, C: 4.482, Time: 0.03\n",
"It: 2480, Loss: 1.910e-02, C: 4.482, Time: 0.03\n",
"It: 2490, Loss: 1.933e-02, C: 4.482, Time: 0.03\n",
"It: 2500, Loss: 1.716e-02, C: 4.482, Time: 0.03\n",
"It: 2510, Loss: 2.141e-02, C: 4.482, Time: 0.03\n",
"It: 2520, Loss: 1.742e-02, C: 4.482, Time: 0.03\n",
"It: 2530, Loss: 2.058e-02, C: 4.482, Time: 0.03\n",
"It: 2540, Loss: 1.700e-02, C: 4.482, Time: 0.04\n",
"It: 2550, Loss: 1.920e-02, C: 4.482, Time: 0.03\n",
"It: 2560, Loss: 2.042e-02, C: 4.482, Time: 0.03\n",
"It: 2570, Loss: 2.126e-02, C: 4.482, Time: 0.03\n",
"It: 2580, Loss: 1.792e-02, C: 4.482, Time: 0.03\n",
"It: 2590, Loss: 1.906e-02, C: 4.482, Time: 0.03\n",
"It: 2600, Loss: 1.856e-02, C: 4.482, Time: 0.03\n",
"It: 2610, Loss: 1.334e-02, C: 4.482, Time: 0.03\n",
"It: 2620, Loss: 1.539e-02, C: 4.482, Time: 0.03\n",
"It: 2630, Loss: 2.106e-02, C: 4.482, Time: 0.03\n",
"It: 2640, Loss: 2.195e-02, C: 4.482, Time: 0.03\n",
"It: 2650, Loss: 1.795e-02, C: 4.482, Time: 0.03\n",
"It: 2660, Loss: 1.801e-02, C: 4.482, Time: 0.03\n",
"It: 2670, Loss: 1.593e-02, C: 4.482, Time: 0.03\n",
"It: 2680, Loss: 2.019e-02, C: 4.482, Time: 0.03\n",
"It: 2690, Loss: 1.599e-02, C: 4.482, Time: 0.03\n",
"It: 2700, Loss: 1.425e-02, C: 4.482, Time: 0.03\n",
"It: 2710, Loss: 1.440e-02, C: 4.482, Time: 0.03\n",
"It: 2720, Loss: 1.698e-02, C: 4.482, Time: 0.03\n",
"It: 2730, Loss: 1.609e-02, C: 4.482, Time: 0.03\n",
"It: 2740, Loss: 1.519e-02, C: 4.482, Time: 0.03\n",
"It: 2750, Loss: 1.470e-02, C: 4.482, Time: 0.03\n",
"It: 2760, Loss: 1.778e-02, C: 4.482, Time: 0.03\n",
"It: 2770, Loss: 1.946e-02, C: 4.482, Time: 0.03\n",
"It: 2780, Loss: 1.760e-02, C: 4.482, Time: 0.03\n",
"It: 2790, Loss: 1.499e-02, C: 4.482, Time: 0.03\n",
"It: 2800, Loss: 1.759e-02, C: 4.482, Time: 0.03\n",
"It: 2810, Loss: 2.090e-02, C: 4.482, Time: 0.03\n",
"It: 2820, Loss: 1.421e-02, C: 4.482, Time: 0.03\n",
"It: 2830, Loss: 1.579e-02, C: 4.482, Time: 0.03\n",
"It: 2840, Loss: 1.576e-02, C: 4.482, Time: 0.03\n",
"It: 2850, Loss: 1.714e-02, C: 4.482, Time: 0.03\n",
"It: 2860, Loss: 1.648e-02, C: 4.482, Time: 0.03\n",
"It: 2870, Loss: 1.821e-02, C: 4.482, Time: 0.03\n",
"It: 2880, Loss: 1.654e-02, C: 4.482, Time: 0.03\n",
"It: 2890, Loss: 1.720e-02, C: 4.482, Time: 0.03\n",
"It: 2900, Loss: 1.259e-02, C: 4.482, Time: 0.03\n",
"It: 2910, Loss: 1.540e-02, C: 4.482, Time: 0.03\n",
"It: 2920, Loss: 1.610e-02, C: 4.482, Time: 0.03\n",
"It: 2930, Loss: 1.551e-02, C: 4.482, Time: 0.03\n",
"It: 2940, Loss: 1.410e-02, C: 4.482, Time: 0.03\n",
"It: 2950, Loss: 1.220e-02, C: 4.482, Time: 0.03\n",
"It: 2960, Loss: 1.970e-02, C: 4.482, Time: 0.03\n",
"It: 2970, Loss: 1.699e-02, C: 4.482, Time: 0.03\n",
"It: 2980, Loss: 1.434e-02, C: 4.482, Time: 0.03\n",
"It: 2990, Loss: 1.986e-02, C: 4.482, Time: 0.03\n",
"It: 3000, Loss: 1.315e-02, C: 4.482, Time: 0.03\n",
"It: 3010, Loss: 1.895e-02, C: 4.482, Time: 0.03\n",
"It: 3020, Loss: 1.398e-02, C: 4.482, Time: 0.03\n",
"It: 3030, Loss: 1.645e-02, C: 4.482, Time: 0.03\n",
"It: 3040, Loss: 1.251e-02, C: 4.482, Time: 0.03\n",
"It: 3050, Loss: 1.203e-02, C: 4.482, Time: 0.03\n",
"It: 3060, Loss: 1.201e-02, C: 4.482, Time: 0.03\n",
"It: 3070, Loss: 1.571e-02, C: 4.482, Time: 0.03\n",
"It: 3080, Loss: 1.180e-02, C: 4.482, Time: 0.03\n",
"It: 3090, Loss: 1.621e-02, C: 4.482, Time: 0.03\n",
"It: 3100, Loss: 1.315e-02, C: 4.482, Time: 0.03\n",
"It: 3110, Loss: 1.693e-02, C: 4.482, Time: 0.03\n",
"It: 3120, Loss: 1.876e-02, C: 4.482, Time: 0.03\n",
"It: 3130, Loss: 1.544e-02, C: 4.482, Time: 0.03\n",
"It: 3140, Loss: 1.580e-02, C: 4.482, Time: 0.03\n",
"It: 3150, Loss: 1.174e-02, C: 4.482, Time: 0.03\n",
"It: 3160, Loss: 1.590e-02, C: 4.482, Time: 0.03\n",
"It: 3170, Loss: 1.262e-02, C: 4.482, Time: 0.03\n",
"It: 3180, Loss: 1.716e-02, C: 4.482, Time: 0.03\n",
"It: 3190, Loss: 1.523e-02, C: 4.482, Time: 0.03\n",
"It: 3200, Loss: 1.644e-02, C: 4.482, Time: 0.04\n",
"It: 3210, Loss: 1.544e-02, C: 4.482, Time: 0.03\n",
"It: 3220, Loss: 1.393e-02, C: 4.482, Time: 0.03\n",
"It: 3230, Loss: 1.154e-02, C: 4.482, Time: 0.03\n",
"It: 3240, Loss: 1.306e-02, C: 4.482, Time: 0.03\n",
"It: 3250, Loss: 1.181e-02, C: 4.482, Time: 0.03\n",
"It: 3260, Loss: 1.373e-02, C: 4.482, Time: 0.03\n",
"It: 3270, Loss: 2.663e-02, C: 4.482, Time: 0.04\n",
"It: 3280, Loss: 1.513e-02, C: 4.482, Time: 0.03\n",
"It: 3290, Loss: 1.255e-02, C: 4.482, Time: 0.03\n",
"It: 3300, Loss: 1.228e-02, C: 4.482, Time: 0.03\n",
"It: 3310, Loss: 1.008e-02, C: 4.482, Time: 0.03\n",
"It: 3320, Loss: 1.035e-02, C: 4.482, Time: 0.03\n",
"It: 3330, Loss: 9.980e-03, C: 4.482, Time: 0.03\n",
"It: 3340, Loss: 1.451e-02, C: 4.482, Time: 0.03\n",
"It: 3350, Loss: 1.221e-02, C: 4.482, Time: 0.03\n",
"It: 3360, Loss: 1.318e-02, C: 4.482, Time: 0.03\n",
"It: 3370, Loss: 1.784e-02, C: 4.482, Time: 0.03\n",
"It: 3380, Loss: 1.072e-02, C: 4.482, Time: 0.03\n",
"It: 3390, Loss: 1.061e-02, C: 4.482, Time: 0.03\n",
"It: 3400, Loss: 1.967e-02, C: 4.482, Time: 0.03\n",
"It: 3410, Loss: 1.707e-02, C: 4.482, Time: 0.03\n",
"It: 3420, Loss: 1.742e-02, C: 4.482, Time: 0.03\n",
"It: 3430, Loss: 1.336e-02, C: 4.482, Time: 0.03\n",
"It: 3440, Loss: 1.502e-02, C: 4.482, Time: 0.03\n",
"It: 3450, Loss: 1.039e-02, C: 4.482, Time: 0.03\n",
"It: 3460, Loss: 1.134e-02, C: 4.482, Time: 0.03\n",
"It: 3470, Loss: 1.292e-02, C: 4.482, Time: 0.03\n",
"It: 3480, Loss: 1.634e-02, C: 4.482, Time: 0.03\n",
"It: 3490, Loss: 1.143e-02, C: 4.482, Time: 0.03\n",
"It: 3500, Loss: 1.319e-02, C: 4.482, Time: 0.03\n",
"It: 3510, Loss: 1.291e-02, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 3520, Loss: 1.180e-02, C: 4.482, Time: 0.03\n",
"It: 3530, Loss: 1.328e-02, C: 4.482, Time: 0.03\n",
"It: 3540, Loss: 1.384e-02, C: 4.482, Time: 0.03\n",
"It: 3550, Loss: 2.073e-02, C: 4.482, Time: 0.03\n",
"It: 3560, Loss: 1.635e-02, C: 4.482, Time: 0.03\n",
"It: 3570, Loss: 1.789e-02, C: 4.482, Time: 0.03\n",
"It: 3580, Loss: 1.185e-02, C: 4.482, Time: 0.03\n",
"It: 3590, Loss: 1.488e-02, C: 4.482, Time: 0.03\n",
"It: 3600, Loss: 1.777e-02, C: 4.482, Time: 0.03\n",
"It: 3610, Loss: 1.632e-02, C: 4.482, Time: 0.03\n",
"It: 3620, Loss: 1.496e-02, C: 4.482, Time: 0.03\n",
"It: 3630, Loss: 1.352e-02, C: 4.482, Time: 0.03\n",
"It: 3640, Loss: 1.156e-02, C: 4.482, Time: 0.03\n",
"It: 3650, Loss: 1.292e-02, C: 4.482, Time: 0.03\n",
"It: 3660, Loss: 1.253e-02, C: 4.482, Time: 0.03\n",
"It: 3670, Loss: 9.945e-03, C: 4.482, Time: 0.03\n",
"It: 3680, Loss: 1.042e-02, C: 4.482, Time: 0.03\n",
"It: 3690, Loss: 1.385e-02, C: 4.482, Time: 0.03\n",
"It: 3700, Loss: 1.104e-02, C: 4.482, Time: 0.04\n",
"It: 3710, Loss: 1.601e-02, C: 4.482, Time: 0.03\n",
"It: 3720, Loss: 1.090e-02, C: 4.482, Time: 0.03\n",
"It: 3730, Loss: 1.402e-02, C: 4.482, Time: 0.03\n",
"It: 3740, Loss: 1.289e-02, C: 4.482, Time: 0.03\n",
"It: 3750, Loss: 1.066e-02, C: 4.482, Time: 0.03\n",
"It: 3760, Loss: 1.595e-02, C: 4.482, Time: 0.03\n",
"It: 3770, Loss: 9.577e-03, C: 4.482, Time: 0.03\n",
"It: 3780, Loss: 1.059e-02, C: 4.482, Time: 0.03\n",
"It: 3790, Loss: 1.579e-02, C: 4.482, Time: 0.03\n",
"It: 3800, Loss: 1.052e-02, C: 4.482, Time: 0.03\n",
"It: 3810, Loss: 1.426e-02, C: 4.482, Time: 0.03\n",
"It: 3820, Loss: 1.137e-02, C: 4.482, Time: 0.03\n",
"It: 3830, Loss: 1.138e-02, C: 4.482, Time: 0.03\n",
"It: 3840, Loss: 1.033e-02, C: 4.482, Time: 0.03\n",
"It: 3850, Loss: 9.088e-03, C: 4.482, Time: 0.03\n",
"It: 3860, Loss: 1.157e-02, C: 4.482, Time: 0.03\n",
"It: 3870, Loss: 7.900e-03, C: 4.482, Time: 0.03\n",
"It: 3880, Loss: 9.893e-03, C: 4.482, Time: 0.03\n",
"It: 3890, Loss: 1.020e-02, C: 4.482, Time: 0.03\n",
"It: 3900, Loss: 9.400e-03, C: 4.482, Time: 0.03\n",
"It: 3910, Loss: 6.853e-03, C: 4.482, Time: 0.03\n",
"It: 3920, Loss: 9.228e-03, C: 4.482, Time: 0.03\n",
"It: 3930, Loss: 9.066e-03, C: 4.482, Time: 0.03\n",
"It: 3940, Loss: 7.302e-03, C: 4.482, Time: 0.03\n",
"It: 3950, Loss: 7.385e-03, C: 4.482, Time: 0.03\n",
"It: 3960, Loss: 8.189e-03, C: 4.482, Time: 0.03\n",
"It: 3970, Loss: 1.140e-02, C: 4.482, Time: 0.03\n",
"It: 3980, Loss: 8.158e-03, C: 4.482, Time: 0.03\n",
"It: 3990, Loss: 1.034e-02, C: 4.482, Time: 0.03\n",
"It: 4000, Loss: 1.118e-02, C: 4.482, Time: 0.03\n",
"It: 4010, Loss: 7.124e-03, C: 4.482, Time: 0.03\n",
"It: 4020, Loss: 9.261e-03, C: 4.482, Time: 0.03\n",
"It: 4030, Loss: 1.253e-02, C: 4.482, Time: 0.03\n",
"It: 4040, Loss: 7.074e-03, C: 4.482, Time: 0.03\n",
"It: 4050, Loss: 8.469e-03, C: 4.482, Time: 0.03\n",
"It: 4060, Loss: 8.901e-03, C: 4.482, Time: 0.03\n",
"It: 4070, Loss: 7.591e-03, C: 4.482, Time: 0.03\n",
"It: 4080, Loss: 8.579e-03, C: 4.482, Time: 0.03\n",
"It: 4090, Loss: 1.032e-02, C: 4.482, Time: 0.03\n",
"It: 4100, Loss: 7.709e-03, C: 4.482, Time: 0.03\n",
"It: 4110, Loss: 1.069e-02, C: 4.482, Time: 0.03\n",
"It: 4120, Loss: 9.823e-03, C: 4.482, Time: 0.03\n",
"It: 4130, Loss: 1.301e-02, C: 4.482, Time: 0.03\n",
"It: 4140, Loss: 8.959e-03, C: 4.482, Time: 0.03\n",
"It: 4150, Loss: 7.336e-03, C: 4.482, Time: 0.03\n",
"It: 4160, Loss: 9.050e-03, C: 4.482, Time: 0.03\n",
"It: 4170, Loss: 7.112e-03, C: 4.482, Time: 0.03\n",
"It: 4180, Loss: 8.254e-03, C: 4.482, Time: 0.03\n",
"It: 4190, Loss: 8.784e-03, C: 4.482, Time: 0.03\n",
"It: 4200, Loss: 7.483e-03, C: 4.482, Time: 0.03\n",
"It: 4210, Loss: 9.189e-03, C: 4.482, Time: 0.03\n",
"It: 4220, Loss: 1.282e-02, C: 4.482, Time: 0.03\n",
"It: 4230, Loss: 8.057e-03, C: 4.482, Time: 0.03\n",
"It: 4240, Loss: 8.922e-03, C: 4.482, Time: 0.03\n",
"It: 4250, Loss: 1.046e-02, C: 4.482, Time: 0.03\n",
"It: 4260, Loss: 1.198e-02, C: 4.482, Time: 0.03\n",
"It: 4270, Loss: 7.128e-03, C: 4.482, Time: 0.03\n",
"It: 4280, Loss: 8.288e-03, C: 4.482, Time: 0.03\n",
"It: 4290, Loss: 5.702e-03, C: 4.482, Time: 0.03\n",
"It: 4300, Loss: 7.691e-03, C: 4.482, Time: 0.03\n",
"It: 4310, Loss: 9.539e-03, C: 4.482, Time: 0.03\n",
"It: 4320, Loss: 8.896e-03, C: 4.482, Time: 0.03\n",
"It: 4330, Loss: 8.926e-03, C: 4.482, Time: 0.03\n",
"It: 4340, Loss: 5.514e-03, C: 4.482, Time: 0.03\n",
"It: 4350, Loss: 7.541e-03, C: 4.482, Time: 0.03\n",
"It: 4360, Loss: 5.921e-03, C: 4.482, Time: 0.03\n",
"It: 4370, Loss: 1.017e-02, C: 4.482, Time: 0.03\n",
"It: 4380, Loss: 9.462e-03, C: 4.482, Time: 0.03\n",
"It: 4390, Loss: 9.610e-03, C: 4.482, Time: 0.03\n",
"It: 4400, Loss: 9.180e-03, C: 4.482, Time: 0.03\n",
"It: 4410, Loss: 7.945e-03, C: 4.482, Time: 0.03\n",
"It: 4420, Loss: 6.995e-03, C: 4.482, Time: 0.03\n",
"It: 4430, Loss: 6.097e-03, C: 4.482, Time: 0.03\n",
"It: 4440, Loss: 7.664e-03, C: 4.482, Time: 0.03\n",
"It: 4450, Loss: 1.275e-02, C: 4.482, Time: 0.03\n",
"It: 4460, Loss: 1.157e-02, C: 4.482, Time: 0.03\n",
"It: 4470, Loss: 5.841e-03, C: 4.482, Time: 0.03\n",
"It: 4480, Loss: 6.603e-03, C: 4.482, Time: 0.03\n",
"It: 4490, Loss: 8.802e-03, C: 4.482, Time: 0.03\n",
"It: 4500, Loss: 6.365e-03, C: 4.482, Time: 0.03\n",
"It: 4510, Loss: 8.918e-03, C: 4.482, Time: 0.03\n",
"It: 4520, Loss: 8.742e-03, C: 4.482, Time: 0.03\n",
"It: 4530, Loss: 8.101e-03, C: 4.482, Time: 0.03\n",
"It: 4540, Loss: 9.055e-03, C: 4.482, Time: 0.03\n",
"It: 4550, Loss: 6.812e-03, C: 4.482, Time: 0.03\n",
"It: 4560, Loss: 5.561e-03, C: 4.482, Time: 0.03\n",
"It: 4570, Loss: 7.907e-03, C: 4.482, Time: 0.03\n",
"It: 4580, Loss: 6.837e-03, C: 4.482, Time: 0.03\n",
"It: 4590, Loss: 6.255e-03, C: 4.482, Time: 0.03\n",
"It: 4600, Loss: 7.101e-03, C: 4.482, Time: 0.03\n",
"It: 4610, Loss: 9.134e-03, C: 4.482, Time: 0.03\n",
"It: 4620, Loss: 6.321e-03, C: 4.482, Time: 0.03\n",
"It: 4630, Loss: 8.165e-03, C: 4.482, Time: 0.03\n",
"It: 4640, Loss: 8.478e-03, C: 4.482, Time: 0.03\n",
"It: 4650, Loss: 7.612e-03, C: 4.482, Time: 0.03\n",
"It: 4660, Loss: 6.451e-03, C: 4.482, Time: 0.03\n",
"It: 4670, Loss: 7.506e-03, C: 4.482, Time: 0.03\n",
"It: 4680, Loss: 6.586e-03, C: 4.482, Time: 0.03\n",
"It: 4690, Loss: 9.428e-03, C: 4.482, Time: 0.03\n",
"It: 4700, Loss: 7.473e-03, C: 4.482, Time: 0.03\n",
"It: 4710, Loss: 9.132e-03, C: 4.482, Time: 0.03\n",
"It: 4720, Loss: 7.085e-03, C: 4.482, Time: 0.03\n",
"It: 4730, Loss: 8.203e-03, C: 4.482, Time: 0.03\n",
"It: 4740, Loss: 9.558e-03, C: 4.482, Time: 0.03\n",
"It: 4750, Loss: 7.380e-03, C: 4.482, Time: 0.03\n",
"It: 4760, Loss: 7.579e-03, C: 4.482, Time: 0.03\n",
"It: 4770, Loss: 5.225e-03, C: 4.482, Time: 0.03\n",
"It: 4780, Loss: 5.524e-03, C: 4.482, Time: 0.03\n",
"It: 4790, Loss: 6.031e-03, C: 4.482, Time: 0.03\n",
"It: 4800, Loss: 7.934e-03, C: 4.482, Time: 0.03\n",
"It: 4810, Loss: 7.223e-03, C: 4.482, Time: 0.03\n",
"It: 4820, Loss: 5.112e-03, C: 4.482, Time: 0.03\n",
"It: 4830, Loss: 8.219e-03, C: 4.482, Time: 0.03\n",
"It: 4840, Loss: 8.969e-03, C: 4.482, Time: 0.03\n",
"It: 4850, Loss: 7.228e-03, C: 4.482, Time: 0.03\n",
"It: 4860, Loss: 6.250e-03, C: 4.482, Time: 0.03\n",
"It: 4870, Loss: 7.335e-03, C: 4.482, Time: 0.03\n",
"It: 4880, Loss: 6.411e-03, C: 4.482, Time: 0.03\n",
"It: 4890, Loss: 8.461e-03, C: 4.482, Time: 0.04\n",
"It: 4900, Loss: 6.622e-03, C: 4.482, Time: 0.03\n",
"It: 4910, Loss: 1.062e-02, C: 4.482, Time: 0.03\n",
"It: 4920, Loss: 1.029e-02, C: 4.482, Time: 0.03\n",
"It: 4930, Loss: 6.518e-03, C: 4.482, Time: 0.03\n",
"It: 4940, Loss: 8.694e-03, C: 4.482, Time: 0.03\n",
"It: 4950, Loss: 6.448e-03, C: 4.482, Time: 0.03\n",
"It: 4960, Loss: 8.191e-03, C: 4.482, Time: 0.03\n",
"It: 4970, Loss: 9.666e-03, C: 4.482, Time: 0.03\n",
"It: 4980, Loss: 9.760e-03, C: 4.482, Time: 0.03\n",
"It: 4990, Loss: 8.377e-03, C: 4.482, Time: 0.03\n",
"It: 5000, Loss: 1.005e-02, C: 4.482, Time: 0.03\n",
"It: 5010, Loss: 7.201e-03, C: 4.482, Time: 0.03\n",
"It: 5020, Loss: 7.155e-03, C: 4.482, Time: 0.03\n",
"It: 5030, Loss: 5.834e-03, C: 4.482, Time: 0.03\n",
"It: 5040, Loss: 5.887e-03, C: 4.482, Time: 0.03\n",
"It: 5050, Loss: 7.029e-03, C: 4.482, Time: 0.03\n",
"It: 5060, Loss: 8.829e-03, C: 4.482, Time: 0.03\n",
"It: 5070, Loss: 7.770e-03, C: 4.482, Time: 0.03\n",
"It: 5080, Loss: 7.815e-03, C: 4.482, Time: 0.03\n",
"It: 5090, Loss: 6.345e-03, C: 4.482, Time: 0.03\n",
"It: 5100, Loss: 5.657e-03, C: 4.482, Time: 0.03\n",
"It: 5110, Loss: 5.938e-03, C: 4.482, Time: 0.03\n",
"It: 5120, Loss: 7.154e-03, C: 4.482, Time: 0.03\n",
"It: 5130, Loss: 7.253e-03, C: 4.482, Time: 0.03\n",
"It: 5140, Loss: 7.321e-03, C: 4.482, Time: 0.03\n",
"It: 5150, Loss: 5.754e-03, C: 4.482, Time: 0.03\n",
"It: 5160, Loss: 5.778e-03, C: 4.482, Time: 0.03\n",
"It: 5170, Loss: 9.872e-03, C: 4.482, Time: 0.03\n",
"It: 5180, Loss: 8.302e-03, C: 4.482, Time: 0.03\n",
"It: 5190, Loss: 5.356e-03, C: 4.482, Time: 0.03\n",
"It: 5200, Loss: 6.782e-03, C: 4.482, Time: 0.03\n",
"It: 5210, Loss: 5.991e-03, C: 4.482, Time: 0.03\n",
"It: 5220, Loss: 8.851e-03, C: 4.482, Time: 0.03\n",
"It: 5230, Loss: 7.179e-03, C: 4.482, Time: 0.03\n",
"It: 5240, Loss: 6.592e-03, C: 4.482, Time: 0.03\n",
"It: 5250, Loss: 8.474e-03, C: 4.482, Time: 0.04\n",
"It: 5260, Loss: 8.873e-03, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 5270, Loss: 7.215e-03, C: 4.482, Time: 0.03\n",
"It: 5280, Loss: 6.415e-03, C: 4.482, Time: 0.03\n",
"It: 5290, Loss: 6.574e-03, C: 4.482, Time: 0.03\n",
"It: 5300, Loss: 8.568e-03, C: 4.482, Time: 0.03\n",
"It: 5310, Loss: 7.355e-03, C: 4.482, Time: 0.03\n",
"It: 5320, Loss: 6.277e-03, C: 4.482, Time: 0.03\n",
"It: 5330, Loss: 8.406e-03, C: 4.482, Time: 0.03\n",
"It: 5340, Loss: 7.585e-03, C: 4.482, Time: 0.03\n",
"It: 5350, Loss: 8.834e-03, C: 4.482, Time: 0.03\n",
"It: 5360, Loss: 8.862e-03, C: 4.482, Time: 0.03\n",
"It: 5370, Loss: 8.015e-03, C: 4.482, Time: 0.03\n",
"It: 5380, Loss: 6.615e-03, C: 4.482, Time: 0.03\n",
"It: 5390, Loss: 9.565e-03, C: 4.482, Time: 0.03\n",
"It: 5400, Loss: 7.995e-03, C: 4.482, Time: 0.03\n",
"It: 5410, Loss: 6.193e-03, C: 4.482, Time: 0.03\n",
"It: 5420, Loss: 5.730e-03, C: 4.482, Time: 0.03\n",
"It: 5430, Loss: 5.327e-03, C: 4.482, Time: 0.03\n",
"It: 5440, Loss: 5.299e-03, C: 4.482, Time: 0.03\n",
"It: 5450, Loss: 9.128e-03, C: 4.482, Time: 0.03\n",
"It: 5460, Loss: 9.040e-03, C: 4.482, Time: 0.03\n",
"It: 5470, Loss: 6.015e-03, C: 4.482, Time: 0.03\n",
"It: 5480, Loss: 5.035e-03, C: 4.482, Time: 0.03\n",
"It: 5490, Loss: 5.629e-03, C: 4.482, Time: 0.03\n",
"It: 5500, Loss: 5.980e-03, C: 4.482, Time: 0.03\n",
"It: 5510, Loss: 5.328e-03, C: 4.482, Time: 0.03\n",
"It: 5520, Loss: 8.478e-03, C: 4.482, Time: 0.03\n",
"It: 5530, Loss: 5.066e-03, C: 4.482, Time: 0.03\n",
"It: 5540, Loss: 1.106e-02, C: 4.482, Time: 0.04\n",
"It: 5550, Loss: 7.708e-03, C: 4.482, Time: 0.03\n",
"It: 5560, Loss: 7.497e-03, C: 4.482, Time: 0.03\n",
"It: 5570, Loss: 5.943e-03, C: 4.482, Time: 0.03\n",
"It: 5580, Loss: 5.060e-03, C: 4.482, Time: 0.03\n",
"It: 5590, Loss: 6.220e-03, C: 4.482, Time: 0.03\n",
"It: 5600, Loss: 5.074e-03, C: 4.482, Time: 0.03\n",
"It: 5610, Loss: 5.958e-03, C: 4.482, Time: 0.03\n",
"It: 5620, Loss: 5.668e-03, C: 4.482, Time: 0.03\n",
"It: 5630, Loss: 6.901e-03, C: 4.482, Time: 0.03\n",
"It: 5640, Loss: 4.948e-03, C: 4.482, Time: 0.03\n",
"It: 5650, Loss: 5.757e-03, C: 4.482, Time: 0.03\n",
"It: 5660, Loss: 6.092e-03, C: 4.482, Time: 0.03\n",
"It: 5670, Loss: 5.227e-03, C: 4.482, Time: 0.03\n",
"It: 5680, Loss: 6.247e-03, C: 4.482, Time: 0.03\n",
"It: 5690, Loss: 5.108e-03, C: 4.482, Time: 0.03\n",
"It: 5700, Loss: 7.041e-03, C: 4.482, Time: 0.03\n",
"It: 5710, Loss: 5.407e-03, C: 4.482, Time: 0.03\n",
"It: 5720, Loss: 7.649e-03, C: 4.482, Time: 0.03\n",
"It: 5730, Loss: 5.534e-03, C: 4.482, Time: 0.03\n",
"It: 5740, Loss: 5.042e-03, C: 4.482, Time: 0.03\n",
"It: 5750, Loss: 5.186e-03, C: 4.482, Time: 0.03\n",
"It: 5760, Loss: 6.198e-03, C: 4.482, Time: 0.03\n",
"It: 5770, Loss: 4.892e-03, C: 4.482, Time: 0.03\n",
"It: 5780, Loss: 8.237e-03, C: 4.482, Time: 0.03\n",
"It: 5790, Loss: 4.990e-03, C: 4.482, Time: 0.03\n",
"It: 5800, Loss: 6.531e-03, C: 4.482, Time: 0.03\n",
"It: 5810, Loss: 6.226e-03, C: 4.482, Time: 0.03\n",
"It: 5820, Loss: 8.355e-03, C: 4.482, Time: 0.03\n",
"It: 5830, Loss: 5.588e-03, C: 4.482, Time: 0.03\n",
"It: 5840, Loss: 6.246e-03, C: 4.482, Time: 0.03\n",
"It: 5850, Loss: 6.500e-03, C: 4.482, Time: 0.03\n",
"It: 5860, Loss: 5.472e-03, C: 4.482, Time: 0.03\n",
"It: 5870, Loss: 7.649e-03, C: 4.482, Time: 0.03\n",
"It: 5880, Loss: 5.528e-03, C: 4.482, Time: 0.03\n",
"It: 5890, Loss: 5.491e-03, C: 4.482, Time: 0.03\n",
"It: 5900, Loss: 7.074e-03, C: 4.482, Time: 0.03\n",
"It: 5910, Loss: 6.146e-03, C: 4.482, Time: 0.03\n",
"It: 5920, Loss: 6.791e-03, C: 4.482, Time: 0.03\n",
"It: 5930, Loss: 1.089e-02, C: 4.482, Time: 0.04\n",
"It: 5940, Loss: 6.836e-03, C: 4.482, Time: 0.03\n",
"It: 5950, Loss: 1.031e-02, C: 4.482, Time: 0.03\n",
"It: 5960, Loss: 6.534e-03, C: 4.482, Time: 0.03\n",
"It: 5970, Loss: 8.742e-03, C: 4.482, Time: 0.03\n",
"It: 5980, Loss: 6.769e-03, C: 4.482, Time: 0.03\n",
"It: 5990, Loss: 5.571e-03, C: 4.482, Time: 0.03\n",
"It: 6000, Loss: 5.630e-03, C: 4.482, Time: 0.03\n",
"It: 6010, Loss: 5.825e-03, C: 4.482, Time: 0.03\n",
"It: 6020, Loss: 6.259e-03, C: 4.482, Time: 0.03\n",
"It: 6030, Loss: 6.474e-03, C: 4.482, Time: 0.03\n",
"It: 6040, Loss: 7.148e-03, C: 4.482, Time: 0.03\n",
"It: 6050, Loss: 1.019e-02, C: 4.482, Time: 0.04\n",
"It: 6060, Loss: 5.072e-03, C: 4.482, Time: 0.03\n",
"It: 6070, Loss: 5.185e-03, C: 4.482, Time: 0.03\n",
"It: 6080, Loss: 5.789e-03, C: 4.482, Time: 0.04\n",
"It: 6090, Loss: 7.894e-03, C: 4.482, Time: 0.03\n",
"It: 6100, Loss: 6.243e-03, C: 4.482, Time: 0.03\n",
"It: 6110, Loss: 5.175e-03, C: 4.482, Time: 0.03\n",
"It: 6120, Loss: 6.010e-03, C: 4.482, Time: 0.03\n",
"It: 6130, Loss: 7.111e-03, C: 4.482, Time: 0.03\n",
"It: 6140, Loss: 4.908e-03, C: 4.482, Time: 0.03\n",
"It: 6150, Loss: 6.558e-03, C: 4.482, Time: 0.04\n",
"It: 6160, Loss: 4.947e-03, C: 4.482, Time: 0.04\n",
"It: 6170, Loss: 4.720e-03, C: 4.482, Time: 0.04\n",
"It: 6180, Loss: 5.420e-03, C: 4.482, Time: 0.03\n",
"It: 6190, Loss: 5.119e-03, C: 4.482, Time: 0.03\n",
"It: 6200, Loss: 4.673e-03, C: 4.482, Time: 0.03\n",
"It: 6210, Loss: 7.706e-03, C: 4.482, Time: 0.03\n",
"It: 6220, Loss: 5.129e-03, C: 4.482, Time: 0.03\n",
"It: 6230, Loss: 5.165e-03, C: 4.482, Time: 0.03\n",
"It: 6240, Loss: 6.170e-03, C: 4.482, Time: 0.03\n",
"It: 6250, Loss: 7.799e-03, C: 4.482, Time: 0.03\n",
"It: 6260, Loss: 5.290e-03, C: 4.482, Time: 0.03\n",
"It: 6270, Loss: 6.414e-03, C: 4.482, Time: 0.03\n",
"It: 6280, Loss: 6.150e-03, C: 4.482, Time: 0.03\n",
"It: 6290, Loss: 6.684e-03, C: 4.482, Time: 0.03\n",
"It: 6300, Loss: 5.479e-03, C: 4.482, Time: 0.03\n",
"It: 6310, Loss: 4.893e-03, C: 4.482, Time: 0.03\n",
"It: 6320, Loss: 5.611e-03, C: 4.482, Time: 0.03\n",
"It: 6330, Loss: 7.637e-03, C: 4.482, Time: 0.03\n",
"It: 6340, Loss: 6.920e-03, C: 4.482, Time: 0.03\n",
"It: 6350, Loss: 8.276e-03, C: 4.482, Time: 0.04\n",
"It: 6360, Loss: 5.766e-03, C: 4.482, Time: 0.04\n",
"It: 6370, Loss: 6.837e-03, C: 4.482, Time: 0.03\n",
"It: 6380, Loss: 8.432e-03, C: 4.482, Time: 0.03\n",
"It: 6390, Loss: 5.958e-03, C: 4.482, Time: 0.03\n",
"It: 6400, Loss: 6.373e-03, C: 4.482, Time: 0.04\n",
"It: 6410, Loss: 7.159e-03, C: 4.482, Time: 0.04\n",
"It: 6420, Loss: 1.220e-02, C: 4.482, Time: 0.04\n",
"It: 6430, Loss: 6.169e-03, C: 4.482, Time: 0.04\n",
"It: 6440, Loss: 1.054e-02, C: 4.482, Time: 0.04\n",
"It: 6450, Loss: 6.178e-03, C: 4.482, Time: 0.03\n",
"It: 6460, Loss: 8.071e-03, C: 4.482, Time: 0.03\n",
"It: 6470, Loss: 5.728e-03, C: 4.482, Time: 0.04\n",
"It: 6480, Loss: 5.761e-03, C: 4.482, Time: 0.04\n",
"It: 6490, Loss: 6.146e-03, C: 4.482, Time: 0.03\n",
"It: 6500, Loss: 6.673e-03, C: 4.482, Time: 0.03\n",
"It: 6510, Loss: 6.216e-03, C: 4.482, Time: 0.03\n",
"It: 6520, Loss: 7.417e-03, C: 4.482, Time: 0.03\n",
"It: 6530, Loss: 7.652e-03, C: 4.482, Time: 0.03\n",
"It: 6540, Loss: 8.448e-03, C: 4.482, Time: 0.03\n",
"It: 6550, Loss: 6.047e-03, C: 4.482, Time: 0.03\n",
"It: 6560, Loss: 5.954e-03, C: 4.482, Time: 0.03\n",
"It: 6570, Loss: 6.161e-03, C: 4.482, Time: 0.03\n",
"It: 6580, Loss: 5.362e-03, C: 4.482, Time: 0.03\n",
"It: 6590, Loss: 8.064e-03, C: 4.482, Time: 0.04\n",
"It: 6600, Loss: 8.300e-03, C: 4.482, Time: 0.03\n",
"It: 6610, Loss: 6.469e-03, C: 4.482, Time: 0.04\n",
"It: 6620, Loss: 6.198e-03, C: 4.482, Time: 0.03\n",
"It: 6630, Loss: 5.446e-03, C: 4.482, Time: 0.04\n",
"It: 6640, Loss: 5.804e-03, C: 4.482, Time: 0.03\n",
"It: 6650, Loss: 6.363e-03, C: 4.482, Time: 0.03\n",
"It: 6660, Loss: 7.431e-03, C: 4.482, Time: 0.04\n",
"It: 6670, Loss: 6.339e-03, C: 4.482, Time: 0.04\n",
"It: 6680, Loss: 5.446e-03, C: 4.482, Time: 0.03\n",
"It: 6690, Loss: 4.978e-03, C: 4.482, Time: 0.03\n",
"It: 6700, Loss: 5.655e-03, C: 4.482, Time: 0.03\n",
"It: 6710, Loss: 6.134e-03, C: 4.482, Time: 0.03\n",
"It: 6720, Loss: 8.655e-03, C: 4.482, Time: 0.03\n",
"It: 6730, Loss: 6.830e-03, C: 4.482, Time: 0.04\n",
"It: 6740, Loss: 6.049e-03, C: 4.482, Time: 0.03\n",
"It: 6750, Loss: 5.763e-03, C: 4.482, Time: 0.03\n",
"It: 6760, Loss: 6.963e-03, C: 4.482, Time: 0.03\n",
"It: 6770, Loss: 7.793e-03, C: 4.482, Time: 0.03\n",
"It: 6780, Loss: 5.072e-03, C: 4.482, Time: 0.04\n",
"It: 6790, Loss: 7.833e-03, C: 4.482, Time: 0.04\n",
"It: 6800, Loss: 5.812e-03, C: 4.482, Time: 0.03\n",
"It: 6810, Loss: 6.530e-03, C: 4.482, Time: 0.03\n",
"It: 6820, Loss: 4.959e-03, C: 4.482, Time: 0.03\n",
"It: 6830, Loss: 5.314e-03, C: 4.482, Time: 0.03\n",
"It: 6840, Loss: 6.502e-03, C: 4.482, Time: 0.04\n",
"It: 6850, Loss: 9.954e-03, C: 4.482, Time: 0.03\n",
"It: 6860, Loss: 4.983e-03, C: 4.482, Time: 0.03\n",
"It: 6870, Loss: 5.469e-03, C: 4.482, Time: 0.03\n",
"It: 6880, Loss: 6.326e-03, C: 4.482, Time: 0.03\n",
"It: 6890, Loss: 7.423e-03, C: 4.482, Time: 0.03\n",
"It: 6900, Loss: 4.624e-03, C: 4.482, Time: 0.03\n",
"It: 6910, Loss: 4.925e-03, C: 4.482, Time: 0.04\n",
"It: 6920, Loss: 7.624e-03, C: 4.482, Time: 0.03\n",
"It: 6930, Loss: 8.292e-03, C: 4.482, Time: 0.03\n",
"It: 6940, Loss: 4.850e-03, C: 4.482, Time: 0.03\n",
"It: 6950, Loss: 8.606e-03, C: 4.482, Time: 0.03\n",
"It: 6960, Loss: 5.831e-03, C: 4.482, Time: 0.03\n",
"It: 6970, Loss: 8.291e-03, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 6980, Loss: 5.638e-03, C: 4.482, Time: 0.04\n",
"It: 6990, Loss: 5.164e-03, C: 4.482, Time: 0.03\n",
"It: 7000, Loss: 5.095e-03, C: 4.482, Time: 0.03\n",
"It: 7010, Loss: 4.670e-03, C: 4.482, Time: 0.03\n",
"It: 7020, Loss: 5.209e-03, C: 4.482, Time: 0.03\n",
"It: 7030, Loss: 6.953e-03, C: 4.482, Time: 0.03\n",
"It: 7040, Loss: 5.262e-03, C: 4.482, Time: 0.03\n",
"It: 7050, Loss: 5.319e-03, C: 4.482, Time: 0.04\n",
"It: 7060, Loss: 5.493e-03, C: 4.482, Time: 0.04\n",
"It: 7070, Loss: 6.798e-03, C: 4.482, Time: 0.03\n",
"It: 7080, Loss: 5.253e-03, C: 4.482, Time: 0.03\n",
"It: 7090, Loss: 4.408e-03, C: 4.482, Time: 0.03\n",
"It: 7100, Loss: 5.171e-03, C: 4.482, Time: 0.04\n",
"It: 7110, Loss: 6.738e-03, C: 4.482, Time: 0.04\n",
"It: 7120, Loss: 7.239e-03, C: 4.482, Time: 0.03\n",
"It: 7130, Loss: 7.094e-03, C: 4.482, Time: 0.03\n",
"It: 7140, Loss: 6.518e-03, C: 4.482, Time: 0.03\n",
"It: 7150, Loss: 6.476e-03, C: 4.482, Time: 0.03\n",
"It: 7160, Loss: 4.854e-03, C: 4.482, Time: 0.04\n",
"It: 7170, Loss: 5.524e-03, C: 4.482, Time: 0.04\n",
"It: 7180, Loss: 6.465e-03, C: 4.482, Time: 0.03\n",
"It: 7190, Loss: 7.389e-03, C: 4.482, Time: 0.04\n",
"It: 7200, Loss: 4.685e-03, C: 4.482, Time: 0.03\n",
"It: 7210, Loss: 5.368e-03, C: 4.482, Time: 0.03\n",
"It: 7220, Loss: 6.420e-03, C: 4.482, Time: 0.04\n",
"It: 7230, Loss: 5.815e-03, C: 4.482, Time: 0.04\n",
"It: 7240, Loss: 6.673e-03, C: 4.482, Time: 0.03\n",
"It: 7250, Loss: 6.260e-03, C: 4.482, Time: 0.03\n",
"It: 7260, Loss: 6.332e-03, C: 4.482, Time: 0.03\n",
"It: 7270, Loss: 7.768e-03, C: 4.482, Time: 0.03\n",
"It: 7280, Loss: 5.600e-03, C: 4.482, Time: 0.03\n",
"It: 7290, Loss: 5.989e-03, C: 4.482, Time: 0.03\n",
"It: 7300, Loss: 5.439e-03, C: 4.482, Time: 0.03\n",
"It: 7310, Loss: 7.045e-03, C: 4.482, Time: 0.03\n",
"It: 7320, Loss: 6.311e-03, C: 4.482, Time: 0.03\n",
"It: 7330, Loss: 5.394e-03, C: 4.482, Time: 0.03\n",
"It: 7340, Loss: 6.360e-03, C: 4.482, Time: 0.03\n",
"It: 7350, Loss: 6.500e-03, C: 4.482, Time: 0.04\n",
"It: 7360, Loss: 5.978e-03, C: 4.482, Time: 0.03\n",
"It: 7370, Loss: 5.371e-03, C: 4.482, Time: 0.03\n",
"It: 7380, Loss: 4.894e-03, C: 4.482, Time: 0.04\n",
"It: 7390, Loss: 5.797e-03, C: 4.482, Time: 0.03\n",
"It: 7400, Loss: 5.625e-03, C: 4.482, Time: 0.03\n",
"It: 7410, Loss: 4.912e-03, C: 4.482, Time: 0.04\n",
"It: 7420, Loss: 5.152e-03, C: 4.482, Time: 0.03\n",
"It: 7430, Loss: 7.056e-03, C: 4.482, Time: 0.03\n",
"It: 7440, Loss: 6.496e-03, C: 4.482, Time: 0.03\n",
"It: 7450, Loss: 6.547e-03, C: 4.482, Time: 0.03\n",
"It: 7460, Loss: 5.695e-03, C: 4.482, Time: 0.03\n",
"It: 7470, Loss: 7.439e-03, C: 4.482, Time: 0.03\n",
"It: 7480, Loss: 4.684e-03, C: 4.482, Time: 0.03\n",
"It: 7490, Loss: 5.375e-03, C: 4.482, Time: 0.03\n",
"It: 7500, Loss: 4.899e-03, C: 4.482, Time: 0.03\n",
"It: 7510, Loss: 4.958e-03, C: 4.482, Time: 0.03\n",
"It: 7520, Loss: 7.995e-03, C: 4.482, Time: 0.03\n",
"It: 7530, Loss: 7.077e-03, C: 4.482, Time: 0.03\n",
"It: 7540, Loss: 6.025e-03, C: 4.482, Time: 0.03\n",
"It: 7550, Loss: 7.096e-03, C: 4.482, Time: 0.03\n",
"It: 7560, Loss: 5.497e-03, C: 4.482, Time: 0.03\n",
"It: 7570, Loss: 4.229e-03, C: 4.482, Time: 0.03\n",
"It: 7580, Loss: 7.146e-03, C: 4.482, Time: 0.04\n",
"It: 7590, Loss: 4.938e-03, C: 4.482, Time: 0.04\n",
"It: 7600, Loss: 5.378e-03, C: 4.482, Time: 0.03\n",
"It: 7610, Loss: 4.895e-03, C: 4.482, Time: 0.03\n",
"It: 7620, Loss: 5.615e-03, C: 4.482, Time: 0.03\n",
"It: 7630, Loss: 4.870e-03, C: 4.482, Time: 0.03\n",
"It: 7640, Loss: 5.374e-03, C: 4.482, Time: 0.04\n",
"It: 7650, Loss: 7.261e-03, C: 4.482, Time: 0.03\n",
"It: 7660, Loss: 6.962e-03, C: 4.482, Time: 0.04\n",
"It: 7670, Loss: 4.913e-03, C: 4.482, Time: 0.03\n",
"It: 7680, Loss: 6.220e-03, C: 4.482, Time: 0.03\n",
"It: 7690, Loss: 5.468e-03, C: 4.482, Time: 0.03\n",
"It: 7700, Loss: 5.371e-03, C: 4.482, Time: 0.03\n",
"It: 7710, Loss: 4.586e-03, C: 4.482, Time: 0.03\n",
"It: 7720, Loss: 5.681e-03, C: 4.482, Time: 0.04\n",
"It: 7730, Loss: 6.363e-03, C: 4.482, Time: 0.03\n",
"It: 7740, Loss: 6.760e-03, C: 4.482, Time: 0.03\n",
"It: 7750, Loss: 5.717e-03, C: 4.482, Time: 0.04\n",
"It: 7760, Loss: 4.863e-03, C: 4.482, Time: 0.03\n",
"It: 7770, Loss: 4.936e-03, C: 4.482, Time: 0.03\n",
"It: 7780, Loss: 4.433e-03, C: 4.482, Time: 0.03\n",
"It: 7790, Loss: 5.473e-03, C: 4.482, Time: 0.03\n",
"It: 7800, Loss: 5.203e-03, C: 4.482, Time: 0.03\n",
"It: 7810, Loss: 6.830e-03, C: 4.482, Time: 0.03\n",
"It: 7820, Loss: 8.485e-03, C: 4.482, Time: 0.03\n",
"It: 7830, Loss: 5.174e-03, C: 4.482, Time: 0.03\n",
"It: 7840, Loss: 4.777e-03, C: 4.482, Time: 0.04\n",
"It: 7850, Loss: 4.913e-03, C: 4.482, Time: 0.04\n",
"It: 7860, Loss: 4.727e-03, C: 4.482, Time: 0.03\n",
"It: 7870, Loss: 6.010e-03, C: 4.482, Time: 0.04\n",
"It: 7880, Loss: 6.275e-03, C: 4.482, Time: 0.04\n",
"It: 7890, Loss: 5.012e-03, C: 4.482, Time: 0.03\n",
"It: 7900, Loss: 6.113e-03, C: 4.482, Time: 0.03\n",
"It: 7910, Loss: 6.536e-03, C: 4.482, Time: 0.03\n",
"It: 7920, Loss: 6.094e-03, C: 4.482, Time: 0.03\n",
"It: 7930, Loss: 4.928e-03, C: 4.482, Time: 0.03\n",
"It: 7940, Loss: 5.825e-03, C: 4.482, Time: 0.03\n",
"It: 7950, Loss: 4.939e-03, C: 4.482, Time: 0.03\n",
"It: 7960, Loss: 4.892e-03, C: 4.482, Time: 0.03\n",
"It: 7970, Loss: 4.407e-03, C: 4.482, Time: 0.03\n",
"It: 7980, Loss: 4.332e-03, C: 4.482, Time: 0.03\n",
"It: 7990, Loss: 4.538e-03, C: 4.482, Time: 0.03\n",
"It: 8000, Loss: 5.917e-03, C: 4.482, Time: 0.03\n",
"It: 8010, Loss: 4.823e-03, C: 4.482, Time: 0.03\n",
"It: 8020, Loss: 7.401e-03, C: 4.482, Time: 0.03\n",
"It: 8030, Loss: 6.798e-03, C: 4.482, Time: 0.03\n",
"It: 8040, Loss: 6.599e-03, C: 4.482, Time: 0.03\n",
"It: 8050, Loss: 8.859e-03, C: 4.482, Time: 0.03\n",
"It: 8060, Loss: 6.681e-03, C: 4.482, Time: 0.03\n",
"It: 8070, Loss: 5.778e-03, C: 4.482, Time: 0.03\n",
"It: 8080, Loss: 5.198e-03, C: 4.482, Time: 0.03\n",
"It: 8090, Loss: 7.966e-03, C: 4.482, Time: 0.03\n",
"It: 8100, Loss: 5.326e-03, C: 4.482, Time: 0.03\n",
"It: 8110, Loss: 7.359e-03, C: 4.482, Time: 0.03\n",
"It: 8120, Loss: 5.070e-03, C: 4.482, Time: 0.03\n",
"It: 8130, Loss: 6.119e-03, C: 4.482, Time: 0.03\n",
"It: 8140, Loss: 5.261e-03, C: 4.482, Time: 0.03\n",
"It: 8150, Loss: 6.358e-03, C: 4.482, Time: 0.03\n",
"It: 8160, Loss: 5.641e-03, C: 4.482, Time: 0.04\n",
"It: 8170, Loss: 4.256e-03, C: 4.482, Time: 0.04\n",
"It: 8180, Loss: 5.178e-03, C: 4.482, Time: 0.04\n",
"It: 8190, Loss: 5.388e-03, C: 4.482, Time: 0.03\n",
"It: 8200, Loss: 5.774e-03, C: 4.482, Time: 0.03\n",
"It: 8210, Loss: 6.031e-03, C: 4.482, Time: 0.03\n",
"It: 8220, Loss: 5.265e-03, C: 4.482, Time: 0.03\n",
"It: 8230, Loss: 6.958e-03, C: 4.482, Time: 0.03\n",
"It: 8240, Loss: 5.110e-03, C: 4.482, Time: 0.03\n",
"It: 8250, Loss: 4.892e-03, C: 4.482, Time: 0.03\n",
"It: 8260, Loss: 6.891e-03, C: 4.482, Time: 0.03\n",
"It: 8270, Loss: 7.859e-03, C: 4.482, Time: 0.03\n",
"It: 8280, Loss: 4.698e-03, C: 4.482, Time: 0.03\n",
"It: 8290, Loss: 1.032e-02, C: 4.482, Time: 0.03\n",
"It: 8300, Loss: 5.165e-03, C: 4.482, Time: 0.03\n",
"It: 8310, Loss: 5.058e-03, C: 4.482, Time: 0.03\n",
"It: 8320, Loss: 5.143e-03, C: 4.482, Time: 0.03\n",
"It: 8330, Loss: 6.252e-03, C: 4.482, Time: 0.03\n",
"It: 8340, Loss: 5.271e-03, C: 4.482, Time: 0.03\n",
"It: 8350, Loss: 4.770e-03, C: 4.482, Time: 0.03\n",
"It: 8360, Loss: 8.304e-03, C: 4.482, Time: 0.04\n",
"It: 8370, Loss: 4.290e-03, C: 4.482, Time: 0.03\n",
"It: 8380, Loss: 7.234e-03, C: 4.482, Time: 0.03\n",
"It: 8390, Loss: 4.968e-03, C: 4.482, Time: 0.03\n",
"It: 8400, Loss: 5.554e-03, C: 4.482, Time: 0.03\n",
"It: 8410, Loss: 6.558e-03, C: 4.482, Time: 0.03\n",
"It: 8420, Loss: 6.284e-03, C: 4.482, Time: 0.03\n",
"It: 8430, Loss: 4.807e-03, C: 4.482, Time: 0.03\n",
"It: 8440, Loss: 4.320e-03, C: 4.482, Time: 0.03\n",
"It: 8450, Loss: 4.324e-03, C: 4.482, Time: 0.03\n",
"It: 8460, Loss: 4.253e-03, C: 4.482, Time: 0.03\n",
"It: 8470, Loss: 5.249e-03, C: 4.482, Time: 0.03\n",
"It: 8480, Loss: 5.637e-03, C: 4.482, Time: 0.03\n",
"It: 8490, Loss: 5.005e-03, C: 4.482, Time: 0.03\n",
"It: 8500, Loss: 5.134e-03, C: 4.482, Time: 0.03\n",
"It: 8510, Loss: 4.312e-03, C: 4.482, Time: 0.03\n",
"It: 8520, Loss: 7.363e-03, C: 4.482, Time: 0.03\n",
"It: 8530, Loss: 6.150e-03, C: 4.482, Time: 0.03\n",
"It: 8540, Loss: 5.815e-03, C: 4.482, Time: 0.03\n",
"It: 8550, Loss: 4.908e-03, C: 4.482, Time: 0.03\n",
"It: 8560, Loss: 4.766e-03, C: 4.482, Time: 0.03\n",
"It: 8570, Loss: 4.852e-03, C: 4.482, Time: 0.03\n",
"It: 8580, Loss: 4.314e-03, C: 4.482, Time: 0.03\n",
"It: 8590, Loss: 4.777e-03, C: 4.482, Time: 0.03\n",
"It: 8600, Loss: 5.218e-03, C: 4.482, Time: 0.03\n",
"It: 8610, Loss: 4.298e-03, C: 4.482, Time: 0.03\n",
"It: 8620, Loss: 5.244e-03, C: 4.482, Time: 0.03\n",
"It: 8630, Loss: 5.001e-03, C: 4.482, Time: 0.03\n",
"It: 8640, Loss: 4.767e-03, C: 4.482, Time: 0.03\n",
"It: 8650, Loss: 5.890e-03, C: 4.482, Time: 0.03\n",
"It: 8660, Loss: 4.075e-03, C: 4.482, Time: 0.03\n",
"It: 8670, Loss: 4.802e-03, C: 4.482, Time: 0.03\n",
"It: 8680, Loss: 4.145e-03, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 8690, Loss: 5.456e-03, C: 4.482, Time: 0.03\n",
"It: 8700, Loss: 6.707e-03, C: 4.482, Time: 0.03\n",
"It: 8710, Loss: 4.730e-03, C: 4.482, Time: 0.03\n",
"It: 8720, Loss: 4.973e-03, C: 4.482, Time: 0.03\n",
"It: 8730, Loss: 5.564e-03, C: 4.482, Time: 0.03\n",
"It: 8740, Loss: 6.852e-03, C: 4.482, Time: 0.03\n",
"It: 8750, Loss: 5.892e-03, C: 4.482, Time: 0.03\n",
"It: 8760, Loss: 4.028e-03, C: 4.482, Time: 0.04\n",
"It: 8770, Loss: 5.176e-03, C: 4.482, Time: 0.03\n",
"It: 8780, Loss: 5.074e-03, C: 4.482, Time: 0.03\n",
"It: 8790, Loss: 5.075e-03, C: 4.482, Time: 0.03\n",
"It: 8800, Loss: 4.183e-03, C: 4.482, Time: 0.03\n",
"It: 8810, Loss: 5.228e-03, C: 4.482, Time: 0.03\n",
"It: 8820, Loss: 4.791e-03, C: 4.482, Time: 0.03\n",
"It: 8830, Loss: 4.296e-03, C: 4.482, Time: 0.03\n",
"It: 8840, Loss: 4.763e-03, C: 4.482, Time: 0.03\n",
"It: 8850, Loss: 4.066e-03, C: 4.482, Time: 0.03\n",
"It: 8860, Loss: 4.639e-03, C: 4.482, Time: 0.03\n",
"It: 8870, Loss: 5.909e-03, C: 4.482, Time: 0.03\n",
"It: 8880, Loss: 7.413e-03, C: 4.482, Time: 0.03\n",
"It: 8890, Loss: 4.353e-03, C: 4.482, Time: 0.03\n",
"It: 8900, Loss: 5.384e-03, C: 4.482, Time: 0.03\n",
"It: 8910, Loss: 4.239e-03, C: 4.482, Time: 0.03\n",
"It: 8920, Loss: 5.597e-03, C: 4.482, Time: 0.03\n",
"It: 8930, Loss: 5.326e-03, C: 4.482, Time: 0.03\n",
"It: 8940, Loss: 5.589e-03, C: 4.482, Time: 0.03\n",
"It: 8950, Loss: 4.747e-03, C: 4.482, Time: 0.03\n",
"It: 8960, Loss: 5.789e-03, C: 4.482, Time: 0.03\n",
"It: 8970, Loss: 4.186e-03, C: 4.482, Time: 0.03\n",
"It: 8980, Loss: 5.412e-03, C: 4.482, Time: 0.03\n",
"It: 8990, Loss: 4.573e-03, C: 4.482, Time: 0.03\n",
"It: 9000, Loss: 3.933e-03, C: 4.482, Time: 0.03\n",
"It: 9010, Loss: 6.439e-03, C: 4.482, Time: 0.03\n",
"It: 9020, Loss: 4.152e-03, C: 4.482, Time: 0.03\n",
"It: 9030, Loss: 5.265e-03, C: 4.482, Time: 0.03\n",
"It: 9040, Loss: 4.804e-03, C: 4.482, Time: 0.03\n",
"It: 9050, Loss: 5.547e-03, C: 4.482, Time: 0.03\n",
"It: 9060, Loss: 7.443e-03, C: 4.482, Time: 0.03\n",
"It: 9070, Loss: 5.450e-03, C: 4.482, Time: 0.03\n",
"It: 9080, Loss: 7.108e-03, C: 4.482, Time: 0.03\n",
"It: 9090, Loss: 6.575e-03, C: 4.482, Time: 0.03\n",
"It: 9100, Loss: 5.892e-03, C: 4.482, Time: 0.03\n",
"It: 9110, Loss: 4.498e-03, C: 4.482, Time: 0.03\n",
"It: 9120, Loss: 4.168e-03, C: 4.482, Time: 0.03\n",
"It: 9130, Loss: 4.886e-03, C: 4.482, Time: 0.03\n",
"It: 9140, Loss: 5.276e-03, C: 4.482, Time: 0.03\n",
"It: 9150, Loss: 5.162e-03, C: 4.482, Time: 0.03\n",
"It: 9160, Loss: 4.503e-03, C: 4.482, Time: 0.03\n",
"It: 9170, Loss: 5.299e-03, C: 4.482, Time: 0.03\n",
"It: 9180, Loss: 4.953e-03, C: 4.482, Time: 0.03\n",
"It: 9190, Loss: 5.780e-03, C: 4.482, Time: 0.03\n",
"It: 9200, Loss: 8.452e-03, C: 4.482, Time: 0.03\n",
"It: 9210, Loss: 5.478e-03, C: 4.482, Time: 0.03\n",
"It: 9220, Loss: 5.352e-03, C: 4.482, Time: 0.03\n",
"It: 9230, Loss: 7.815e-03, C: 4.482, Time: 0.03\n",
"It: 9240, Loss: 6.510e-03, C: 4.482, Time: 0.03\n",
"It: 9250, Loss: 6.085e-03, C: 4.482, Time: 0.03\n",
"It: 9260, Loss: 5.106e-03, C: 4.482, Time: 0.03\n",
"It: 9270, Loss: 4.240e-03, C: 4.482, Time: 0.03\n",
"It: 9280, Loss: 5.451e-03, C: 4.482, Time: 0.04\n",
"It: 9290, Loss: 4.349e-03, C: 4.482, Time: 0.03\n",
"It: 9300, Loss: 5.727e-03, C: 4.482, Time: 0.03\n",
"It: 9310, Loss: 6.286e-03, C: 4.482, Time: 0.04\n",
"It: 9320, Loss: 7.733e-03, C: 4.482, Time: 0.04\n",
"It: 9330, Loss: 6.595e-03, C: 4.482, Time: 0.03\n",
"It: 9340, Loss: 6.768e-03, C: 4.482, Time: 0.03\n",
"It: 9350, Loss: 4.871e-03, C: 4.482, Time: 0.03\n",
"It: 9360, Loss: 6.051e-03, C: 4.482, Time: 0.03\n",
"It: 9370, Loss: 6.005e-03, C: 4.482, Time: 0.03\n",
"It: 9380, Loss: 9.061e-03, C: 4.482, Time: 0.04\n",
"It: 9390, Loss: 5.346e-03, C: 4.482, Time: 0.03\n",
"It: 9400, Loss: 8.092e-03, C: 4.482, Time: 0.03\n",
"It: 9410, Loss: 4.926e-03, C: 4.482, Time: 0.03\n",
"It: 9420, Loss: 8.929e-03, C: 4.482, Time: 0.03\n",
"It: 9430, Loss: 5.535e-03, C: 4.482, Time: 0.03\n",
"It: 9440, Loss: 5.336e-03, C: 4.482, Time: 0.03\n",
"It: 9450, Loss: 5.818e-03, C: 4.482, Time: 0.03\n",
"It: 9460, Loss: 5.527e-03, C: 4.482, Time: 0.03\n",
"It: 9470, Loss: 6.043e-03, C: 4.482, Time: 0.03\n",
"It: 9480, Loss: 4.831e-03, C: 4.482, Time: 0.03\n",
"It: 9490, Loss: 5.969e-03, C: 4.482, Time: 0.03\n",
"It: 9500, Loss: 6.095e-03, C: 4.482, Time: 0.03\n",
"It: 9510, Loss: 6.975e-03, C: 4.482, Time: 0.03\n",
"It: 9520, Loss: 5.314e-03, C: 4.482, Time: 0.03\n",
"It: 9530, Loss: 4.837e-03, C: 4.482, Time: 0.03\n",
"It: 9540, Loss: 4.266e-03, C: 4.482, Time: 0.03\n",
"It: 9550, Loss: 3.980e-03, C: 4.482, Time: 0.03\n",
"It: 9560, Loss: 4.077e-03, C: 4.482, Time: 0.03\n",
"It: 9570, Loss: 4.165e-03, C: 4.482, Time: 0.03\n",
"It: 9580, Loss: 5.340e-03, C: 4.482, Time: 0.03\n",
"It: 9590, Loss: 8.996e-03, C: 4.482, Time: 0.03\n",
"It: 9600, Loss: 6.239e-03, C: 4.482, Time: 0.03\n",
"It: 9610, Loss: 6.234e-03, C: 4.482, Time: 0.03\n",
"It: 9620, Loss: 5.507e-03, C: 4.482, Time: 0.03\n",
"It: 9630, Loss: 5.997e-03, C: 4.482, Time: 0.03\n",
"It: 9640, Loss: 4.781e-03, C: 4.482, Time: 0.03\n",
"It: 9650, Loss: 4.988e-03, C: 4.482, Time: 0.03\n",
"It: 9660, Loss: 5.085e-03, C: 4.482, Time: 0.03\n",
"It: 9670, Loss: 5.250e-03, C: 4.482, Time: 0.03\n",
"It: 9680, Loss: 4.531e-03, C: 4.482, Time: 0.03\n",
"It: 9690, Loss: 7.544e-03, C: 4.482, Time: 0.03\n",
"It: 9700, Loss: 4.771e-03, C: 4.482, Time: 0.03\n",
"It: 9710, Loss: 6.076e-03, C: 4.482, Time: 0.03\n",
"It: 9720, Loss: 6.372e-03, C: 4.482, Time: 0.03\n",
"It: 9730, Loss: 5.548e-03, C: 4.482, Time: 0.03\n",
"It: 9740, Loss: 6.795e-03, C: 4.482, Time: 0.03\n",
"It: 9750, Loss: 5.608e-03, C: 4.482, Time: 0.03\n",
"It: 9760, Loss: 4.116e-03, C: 4.482, Time: 0.03\n",
"It: 9770, Loss: 4.424e-03, C: 4.482, Time: 0.03\n",
"It: 9780, Loss: 4.444e-03, C: 4.482, Time: 0.03\n",
"It: 9790, Loss: 4.601e-03, C: 4.482, Time: 0.03\n",
"It: 9800, Loss: 5.522e-03, C: 4.482, Time: 0.03\n",
"It: 9810, Loss: 4.964e-03, C: 4.482, Time: 0.03\n",
"It: 9820, Loss: 5.612e-03, C: 4.482, Time: 0.03\n",
"It: 9830, Loss: 5.549e-03, C: 4.482, Time: 0.03\n",
"It: 9840, Loss: 4.019e-03, C: 4.482, Time: 0.03\n",
"It: 9850, Loss: 3.862e-03, C: 4.482, Time: 0.03\n",
"It: 9860, Loss: 4.852e-03, C: 4.482, Time: 0.03\n",
"It: 9870, Loss: 5.717e-03, C: 4.482, Time: 0.03\n",
"It: 9880, Loss: 4.726e-03, C: 4.482, Time: 0.03\n",
"It: 9890, Loss: 8.102e-03, C: 4.482, Time: 0.03\n",
"It: 9900, Loss: 5.454e-03, C: 4.482, Time: 0.03\n",
"It: 9910, Loss: 5.137e-03, C: 4.482, Time: 0.03\n",
"It: 9920, Loss: 3.777e-03, C: 4.482, Time: 0.03\n",
"It: 9930, Loss: 3.927e-03, C: 4.482, Time: 0.03\n",
"It: 9940, Loss: 4.023e-03, C: 4.482, Time: 0.03\n",
"It: 9950, Loss: 5.565e-03, C: 4.482, Time: 0.03\n",
"It: 9960, Loss: 3.728e-03, C: 4.482, Time: 0.03\n",
"It: 9970, Loss: 4.645e-03, C: 4.482, Time: 0.03\n",
"It: 9980, Loss: 5.792e-03, C: 4.482, Time: 0.03\n",
"It: 9990, Loss: 3.972e-03, C: 4.482, Time: 0.03\n",
"It: 10000, Loss: 4.074e-03, C: 4.482, Time: 0.03\n",
"It: 10010, Loss: 7.169e-03, C: 4.482, Time: 0.03\n",
"It: 10020, Loss: 3.874e-03, C: 4.482, Time: 0.03\n",
"It: 10030, Loss: 5.157e-03, C: 4.482, Time: 0.03\n",
"It: 10040, Loss: 4.511e-03, C: 4.482, Time: 0.03\n",
"It: 10050, Loss: 4.672e-03, C: 4.482, Time: 0.03\n",
"It: 10060, Loss: 4.297e-03, C: 4.482, Time: 0.03\n",
"It: 10070, Loss: 4.867e-03, C: 4.482, Time: 0.03\n",
"It: 10080, Loss: 4.412e-03, C: 4.482, Time: 0.03\n",
"It: 10090, Loss: 4.724e-03, C: 4.482, Time: 0.03\n",
"It: 10100, Loss: 4.322e-03, C: 4.482, Time: 0.03\n",
"It: 10110, Loss: 3.937e-03, C: 4.482, Time: 0.03\n",
"It: 10120, Loss: 6.418e-03, C: 4.482, Time: 0.03\n",
"It: 10130, Loss: 4.689e-03, C: 4.482, Time: 0.03\n",
"It: 10140, Loss: 5.729e-03, C: 4.482, Time: 0.03\n",
"It: 10150, Loss: 6.117e-03, C: 4.482, Time: 0.03\n",
"It: 10160, Loss: 6.887e-03, C: 4.482, Time: 0.03\n",
"It: 10170, Loss: 5.030e-03, C: 4.482, Time: 0.03\n",
"It: 10180, Loss: 4.115e-03, C: 4.482, Time: 0.03\n",
"It: 10190, Loss: 5.311e-03, C: 4.482, Time: 0.03\n",
"It: 10200, Loss: 4.710e-03, C: 4.482, Time: 0.03\n",
"It: 10210, Loss: 4.548e-03, C: 4.482, Time: 0.04\n",
"It: 10220, Loss: 4.876e-03, C: 4.482, Time: 0.03\n",
"It: 10230, Loss: 5.446e-03, C: 4.482, Time: 0.03\n",
"It: 10240, Loss: 4.707e-03, C: 4.482, Time: 0.03\n",
"It: 10250, Loss: 5.168e-03, C: 4.482, Time: 0.03\n",
"It: 10260, Loss: 4.059e-03, C: 4.482, Time: 0.03\n",
"It: 10270, Loss: 4.403e-03, C: 4.482, Time: 0.03\n",
"It: 10280, Loss: 4.759e-03, C: 4.482, Time: 0.04\n",
"It: 10290, Loss: 4.810e-03, C: 4.482, Time: 0.03\n",
"It: 10300, Loss: 5.394e-03, C: 4.482, Time: 0.03\n",
"It: 10310, Loss: 4.684e-03, C: 4.482, Time: 0.03\n",
"It: 10320, Loss: 5.426e-03, C: 4.482, Time: 0.03\n",
"It: 10330, Loss: 5.055e-03, C: 4.482, Time: 0.03\n",
"It: 10340, Loss: 3.906e-03, C: 4.482, Time: 0.03\n",
"It: 10350, Loss: 6.188e-03, C: 4.482, Time: 0.03\n",
"It: 10360, Loss: 4.257e-03, C: 4.482, Time: 0.03\n",
"It: 10370, Loss: 4.034e-03, C: 4.482, Time: 0.03\n",
"It: 10380, Loss: 5.127e-03, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 10390, Loss: 4.128e-03, C: 4.482, Time: 0.03\n",
"It: 10400, Loss: 4.459e-03, C: 4.482, Time: 0.03\n",
"It: 10410, Loss: 7.812e-03, C: 4.482, Time: 0.03\n",
"It: 10420, Loss: 4.701e-03, C: 4.482, Time: 0.03\n",
"It: 10430, Loss: 4.327e-03, C: 4.482, Time: 0.03\n",
"It: 10440, Loss: 3.947e-03, C: 4.482, Time: 0.03\n",
"It: 10450, Loss: 7.363e-03, C: 4.482, Time: 0.03\n",
"It: 10460, Loss: 5.296e-03, C: 4.482, Time: 0.03\n",
"It: 10470, Loss: 5.338e-03, C: 4.482, Time: 0.03\n",
"It: 10480, Loss: 4.771e-03, C: 4.482, Time: 0.03\n",
"It: 10490, Loss: 5.487e-03, C: 4.482, Time: 0.03\n",
"It: 10500, Loss: 4.140e-03, C: 4.482, Time: 0.03\n",
"It: 10510, Loss: 5.191e-03, C: 4.482, Time: 0.03\n",
"It: 10520, Loss: 4.003e-03, C: 4.482, Time: 0.03\n",
"It: 10530, Loss: 4.294e-03, C: 4.482, Time: 0.03\n",
"It: 10540, Loss: 4.430e-03, C: 4.482, Time: 0.03\n",
"It: 10550, Loss: 3.905e-03, C: 4.482, Time: 0.03\n",
"It: 10560, Loss: 5.391e-03, C: 4.482, Time: 0.03\n",
"It: 10570, Loss: 4.505e-03, C: 4.482, Time: 0.03\n",
"It: 10580, Loss: 4.818e-03, C: 4.482, Time: 0.03\n",
"It: 10590, Loss: 7.049e-03, C: 4.482, Time: 0.04\n",
"It: 10600, Loss: 4.120e-03, C: 4.482, Time: 0.03\n",
"It: 10610, Loss: 3.817e-03, C: 4.482, Time: 0.03\n",
"It: 10620, Loss: 4.784e-03, C: 4.482, Time: 0.03\n",
"It: 10630, Loss: 7.334e-03, C: 4.482, Time: 0.04\n",
"It: 10640, Loss: 5.727e-03, C: 4.482, Time: 0.04\n",
"It: 10650, Loss: 4.537e-03, C: 4.482, Time: 0.03\n",
"It: 10660, Loss: 4.048e-03, C: 4.482, Time: 0.03\n",
"It: 10670, Loss: 3.809e-03, C: 4.482, Time: 0.03\n",
"It: 10680, Loss: 7.542e-03, C: 4.482, Time: 0.03\n",
"It: 10690, Loss: 3.433e-03, C: 4.482, Time: 0.03\n",
"It: 10700, Loss: 4.622e-03, C: 4.482, Time: 0.03\n",
"It: 10710, Loss: 4.455e-03, C: 4.482, Time: 0.03\n",
"It: 10720, Loss: 3.573e-03, C: 4.482, Time: 0.03\n",
"It: 10730, Loss: 6.336e-03, C: 4.482, Time: 0.10\n",
"It: 10740, Loss: 1.294e-02, C: 4.482, Time: 0.03\n",
"It: 10750, Loss: 5.962e-03, C: 4.482, Time: 0.03\n",
"It: 10760, Loss: 5.589e-03, C: 4.482, Time: 0.03\n",
"It: 10770, Loss: 4.743e-03, C: 4.482, Time: 0.03\n",
"It: 10780, Loss: 4.848e-03, C: 4.482, Time: 0.03\n",
"It: 10790, Loss: 5.247e-03, C: 4.482, Time: 0.03\n",
"It: 10800, Loss: 4.879e-03, C: 4.482, Time: 0.03\n",
"It: 10810, Loss: 5.108e-03, C: 4.482, Time: 0.03\n",
"It: 10820, Loss: 5.798e-03, C: 4.482, Time: 0.03\n",
"It: 10830, Loss: 5.301e-03, C: 4.482, Time: 0.03\n",
"It: 10840, Loss: 3.963e-03, C: 4.482, Time: 0.03\n",
"It: 10850, Loss: 4.449e-03, C: 4.482, Time: 0.04\n",
"It: 10860, Loss: 4.092e-03, C: 4.482, Time: 0.03\n",
"It: 10870, Loss: 4.812e-03, C: 4.482, Time: 0.03\n",
"It: 10880, Loss: 4.071e-03, C: 4.482, Time: 0.03\n",
"It: 10890, Loss: 4.361e-03, C: 4.482, Time: 0.03\n",
"It: 10900, Loss: 6.119e-03, C: 4.482, Time: 0.03\n",
"It: 10910, Loss: 4.719e-03, C: 4.482, Time: 0.03\n",
"It: 10920, Loss: 3.876e-03, C: 4.482, Time: 0.03\n",
"It: 10930, Loss: 5.173e-03, C: 4.482, Time: 0.03\n",
"It: 10940, Loss: 6.048e-03, C: 4.482, Time: 0.03\n",
"It: 10950, Loss: 5.601e-03, C: 4.482, Time: 0.03\n",
"It: 10960, Loss: 4.180e-03, C: 4.482, Time: 0.03\n",
"It: 10970, Loss: 5.735e-03, C: 4.482, Time: 0.03\n",
"It: 10980, Loss: 4.694e-03, C: 4.482, Time: 0.03\n",
"It: 10990, Loss: 4.167e-03, C: 4.482, Time: 0.03\n",
"It: 11000, Loss: 3.949e-03, C: 4.482, Time: 0.03\n",
"It: 11010, Loss: 4.485e-03, C: 4.482, Time: 0.03\n",
"It: 11020, Loss: 4.927e-03, C: 4.482, Time: 0.03\n",
"It: 11030, Loss: 4.837e-03, C: 4.482, Time: 0.03\n",
"It: 11040, Loss: 3.969e-03, C: 4.482, Time: 0.03\n",
"It: 11050, Loss: 3.620e-03, C: 4.482, Time: 0.03\n",
"It: 11060, Loss: 3.814e-03, C: 4.482, Time: 0.03\n",
"It: 11070, Loss: 3.677e-03, C: 4.482, Time: 0.03\n",
"It: 11080, Loss: 4.806e-03, C: 4.482, Time: 0.03\n",
"It: 11090, Loss: 4.315e-03, C: 4.482, Time: 0.03\n",
"It: 11100, Loss: 5.923e-03, C: 4.482, Time: 0.03\n",
"It: 11110, Loss: 7.554e-03, C: 4.482, Time: 0.03\n",
"It: 11120, Loss: 6.310e-03, C: 4.482, Time: 0.03\n",
"It: 11130, Loss: 4.331e-03, C: 4.482, Time: 0.03\n",
"It: 11140, Loss: 4.036e-03, C: 4.482, Time: 0.03\n",
"It: 11150, Loss: 4.241e-03, C: 4.482, Time: 0.03\n",
"It: 11160, Loss: 3.972e-03, C: 4.482, Time: 0.03\n",
"It: 11170, Loss: 3.572e-03, C: 4.482, Time: 0.03\n",
"It: 11180, Loss: 3.855e-03, C: 4.482, Time: 0.03\n",
"It: 11190, Loss: 6.107e-03, C: 4.482, Time: 0.03\n",
"It: 11200, Loss: 5.740e-03, C: 4.482, Time: 0.03\n",
"It: 11210, Loss: 4.970e-03, C: 4.482, Time: 0.03\n",
"It: 11220, Loss: 6.088e-03, C: 4.482, Time: 0.03\n",
"It: 11230, Loss: 5.279e-03, C: 4.482, Time: 0.03\n",
"It: 11240, Loss: 4.185e-03, C: 4.482, Time: 0.03\n",
"It: 11250, Loss: 4.378e-03, C: 4.482, Time: 0.03\n",
"It: 11260, Loss: 3.846e-03, C: 4.482, Time: 0.03\n",
"It: 11270, Loss: 4.878e-03, C: 4.482, Time: 0.03\n",
"It: 11280, Loss: 4.204e-03, C: 4.482, Time: 0.03\n",
"It: 11290, Loss: 4.482e-03, C: 4.482, Time: 0.03\n",
"It: 11300, Loss: 4.524e-03, C: 4.482, Time: 0.03\n",
"It: 11310, Loss: 6.455e-03, C: 4.482, Time: 0.03\n",
"It: 11320, Loss: 3.990e-03, C: 4.482, Time: 0.03\n",
"It: 11330, Loss: 6.944e-03, C: 4.482, Time: 0.03\n",
"It: 11340, Loss: 6.031e-03, C: 4.482, Time: 0.03\n",
"It: 11350, Loss: 3.833e-03, C: 4.482, Time: 0.03\n",
"It: 11360, Loss: 3.799e-03, C: 4.482, Time: 0.03\n",
"It: 11370, Loss: 4.034e-03, C: 4.482, Time: 0.03\n",
"It: 11380, Loss: 3.951e-03, C: 4.482, Time: 0.03\n",
"It: 11390, Loss: 3.630e-03, C: 4.482, Time: 0.03\n",
"It: 11400, Loss: 3.605e-03, C: 4.482, Time: 0.03\n",
"It: 11410, Loss: 4.062e-03, C: 4.482, Time: 0.03\n",
"It: 11420, Loss: 5.026e-03, C: 4.482, Time: 0.03\n",
"It: 11430, Loss: 3.852e-03, C: 4.482, Time: 0.03\n",
"It: 11440, Loss: 4.765e-03, C: 4.482, Time: 0.03\n",
"It: 11450, Loss: 4.307e-03, C: 4.482, Time: 0.03\n",
"It: 11460, Loss: 3.870e-03, C: 4.482, Time: 0.03\n",
"It: 11470, Loss: 4.275e-03, C: 4.482, Time: 0.03\n",
"It: 11480, Loss: 4.193e-03, C: 4.482, Time: 0.03\n",
"It: 11490, Loss: 4.041e-03, C: 4.482, Time: 0.03\n",
"It: 11500, Loss: 4.547e-03, C: 4.482, Time: 0.03\n",
"It: 11510, Loss: 3.740e-03, C: 4.482, Time: 0.03\n",
"It: 11520, Loss: 3.333e-03, C: 4.482, Time: 0.03\n",
"It: 11530, Loss: 4.487e-03, C: 4.482, Time: 0.03\n",
"It: 11540, Loss: 4.561e-03, C: 4.482, Time: 0.03\n",
"It: 11550, Loss: 4.276e-03, C: 4.482, Time: 0.03\n",
"It: 11560, Loss: 3.560e-03, C: 4.482, Time: 0.03\n",
"It: 11570, Loss: 4.719e-03, C: 4.482, Time: 0.03\n",
"It: 11580, Loss: 5.491e-03, C: 4.482, Time: 0.03\n",
"It: 11590, Loss: 4.945e-03, C: 4.482, Time: 0.03\n",
"It: 11600, Loss: 4.072e-03, C: 4.482, Time: 0.03\n",
"It: 11610, Loss: 3.747e-03, C: 4.482, Time: 0.03\n",
"It: 11620, Loss: 3.542e-03, C: 4.482, Time: 0.03\n",
"It: 11630, Loss: 3.817e-03, C: 4.482, Time: 0.03\n",
"It: 11640, Loss: 5.316e-03, C: 4.482, Time: 0.03\n",
"It: 11650, Loss: 5.925e-03, C: 4.482, Time: 0.03\n",
"It: 11660, Loss: 4.529e-03, C: 4.482, Time: 0.03\n",
"It: 11670, Loss: 6.129e-03, C: 4.482, Time: 0.03\n",
"It: 11680, Loss: 3.878e-03, C: 4.482, Time: 0.03\n",
"It: 11690, Loss: 4.190e-03, C: 4.482, Time: 0.03\n",
"It: 11700, Loss: 4.338e-03, C: 4.482, Time: 0.03\n",
"It: 11710, Loss: 4.835e-03, C: 4.482, Time: 0.03\n",
"It: 11720, Loss: 3.923e-03, C: 4.482, Time: 0.03\n",
"It: 11730, Loss: 5.366e-03, C: 4.482, Time: 0.03\n",
"It: 11740, Loss: 3.954e-03, C: 4.482, Time: 0.03\n",
"It: 11750, Loss: 4.354e-03, C: 4.482, Time: 0.03\n",
"It: 11760, Loss: 5.068e-03, C: 4.482, Time: 0.03\n",
"It: 11770, Loss: 4.446e-03, C: 4.482, Time: 0.03\n",
"It: 11780, Loss: 4.094e-03, C: 4.482, Time: 0.03\n",
"It: 11790, Loss: 4.158e-03, C: 4.482, Time: 0.03\n",
"It: 11800, Loss: 5.340e-03, C: 4.482, Time: 0.03\n",
"It: 11810, Loss: 4.008e-03, C: 4.482, Time: 0.03\n",
"It: 11820, Loss: 4.823e-03, C: 4.482, Time: 0.03\n",
"It: 11830, Loss: 4.859e-03, C: 4.482, Time: 0.03\n",
"It: 11840, Loss: 5.788e-03, C: 4.482, Time: 0.03\n",
"It: 11850, Loss: 5.547e-03, C: 4.482, Time: 0.03\n",
"It: 11860, Loss: 4.913e-03, C: 4.482, Time: 0.03\n",
"It: 11870, Loss: 3.805e-03, C: 4.482, Time: 0.03\n",
"It: 11880, Loss: 4.019e-03, C: 4.482, Time: 0.03\n",
"It: 11890, Loss: 3.791e-03, C: 4.482, Time: 0.03\n",
"It: 11900, Loss: 4.244e-03, C: 4.482, Time: 0.03\n",
"It: 11910, Loss: 4.173e-03, C: 4.482, Time: 0.03\n",
"It: 11920, Loss: 3.856e-03, C: 4.482, Time: 0.03\n",
"It: 11930, Loss: 3.745e-03, C: 4.482, Time: 0.03\n",
"It: 11940, Loss: 3.816e-03, C: 4.482, Time: 0.03\n",
"It: 11950, Loss: 3.282e-03, C: 4.482, Time: 0.03\n",
"It: 11960, Loss: 3.950e-03, C: 4.482, Time: 0.03\n",
"It: 11970, Loss: 5.723e-03, C: 4.482, Time: 0.03\n",
"It: 11980, Loss: 3.505e-03, C: 4.482, Time: 0.03\n",
"It: 11990, Loss: 3.751e-03, C: 4.482, Time: 0.03\n",
"It: 12000, Loss: 3.599e-03, C: 4.482, Time: 0.03\n",
"It: 12010, Loss: 3.553e-03, C: 4.482, Time: 0.03\n",
"It: 12020, Loss: 3.965e-03, C: 4.482, Time: 0.03\n",
"It: 12030, Loss: 3.176e-03, C: 4.482, Time: 0.03\n",
"It: 12040, Loss: 3.686e-03, C: 4.482, Time: 0.03\n",
"It: 12050, Loss: 7.489e-03, C: 4.482, Time: 0.03\n",
"It: 12060, Loss: 5.869e-03, C: 4.482, Time: 0.03\n",
"It: 12070, Loss: 5.165e-03, C: 4.482, Time: 0.03\n",
"It: 12080, Loss: 4.760e-03, C: 4.482, Time: 0.03\n",
"It: 12090, Loss: 3.858e-03, C: 4.482, Time: 0.03\n",
"It: 12100, Loss: 4.825e-03, C: 4.482, Time: 0.03\n",
"It: 12110, Loss: 3.466e-03, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 12120, Loss: 3.955e-03, C: 4.482, Time: 0.03\n",
"It: 12130, Loss: 4.150e-03, C: 4.482, Time: 0.03\n",
"It: 12140, Loss: 3.662e-03, C: 4.482, Time: 0.03\n",
"It: 12150, Loss: 7.063e-03, C: 4.482, Time: 0.03\n",
"It: 12160, Loss: 3.945e-03, C: 4.482, Time: 0.03\n",
"It: 12170, Loss: 3.453e-03, C: 4.482, Time: 0.03\n",
"It: 12180, Loss: 5.552e-03, C: 4.482, Time: 0.03\n",
"It: 12190, Loss: 3.401e-03, C: 4.482, Time: 0.03\n",
"It: 12200, Loss: 4.221e-03, C: 4.482, Time: 0.03\n",
"It: 12210, Loss: 3.830e-03, C: 4.482, Time: 0.03\n",
"It: 12220, Loss: 3.508e-03, C: 4.482, Time: 0.03\n",
"It: 12230, Loss: 4.783e-03, C: 4.482, Time: 0.03\n",
"It: 12240, Loss: 5.733e-03, C: 4.482, Time: 0.03\n",
"It: 12250, Loss: 3.785e-03, C: 4.482, Time: 0.03\n",
"It: 12260, Loss: 3.881e-03, C: 4.482, Time: 0.03\n",
"It: 12270, Loss: 4.371e-03, C: 4.482, Time: 0.03\n",
"It: 12280, Loss: 3.486e-03, C: 4.482, Time: 0.04\n",
"It: 12290, Loss: 3.535e-03, C: 4.482, Time: 0.04\n",
"It: 12300, Loss: 2.863e-03, C: 4.482, Time: 0.03\n",
"It: 12310, Loss: 3.102e-03, C: 4.482, Time: 0.03\n",
"It: 12320, Loss: 3.314e-03, C: 4.482, Time: 0.03\n",
"It: 12330, Loss: 3.406e-03, C: 4.482, Time: 0.03\n",
"It: 12340, Loss: 3.568e-03, C: 4.482, Time: 0.03\n",
"It: 12350, Loss: 3.655e-03, C: 4.482, Time: 0.03\n",
"It: 12360, Loss: 2.948e-03, C: 4.482, Time: 0.03\n",
"It: 12370, Loss: 3.391e-03, C: 4.482, Time: 0.03\n",
"It: 12380, Loss: 3.026e-03, C: 4.482, Time: 0.04\n",
"It: 12390, Loss: 3.623e-03, C: 4.482, Time: 0.04\n",
"It: 12400, Loss: 3.556e-03, C: 4.482, Time: 0.03\n",
"It: 12410, Loss: 3.963e-03, C: 4.482, Time: 0.03\n",
"It: 12420, Loss: 4.089e-03, C: 4.482, Time: 0.03\n",
"It: 12430, Loss: 3.914e-03, C: 4.482, Time: 0.03\n",
"It: 12440, Loss: 5.520e-03, C: 4.482, Time: 0.03\n",
"It: 12450, Loss: 3.779e-03, C: 4.482, Time: 0.03\n",
"It: 12460, Loss: 3.361e-03, C: 4.482, Time: 0.03\n",
"It: 12470, Loss: 3.601e-03, C: 4.482, Time: 0.03\n",
"It: 12480, Loss: 3.379e-03, C: 4.482, Time: 0.03\n",
"It: 12490, Loss: 4.133e-03, C: 4.482, Time: 0.03\n",
"It: 12500, Loss: 3.440e-03, C: 4.482, Time: 0.03\n",
"It: 12510, Loss: 3.783e-03, C: 4.482, Time: 0.03\n",
"It: 12520, Loss: 3.148e-03, C: 4.482, Time: 0.03\n",
"It: 12530, Loss: 2.955e-03, C: 4.482, Time: 0.03\n",
"It: 12540, Loss: 4.051e-03, C: 4.482, Time: 0.03\n",
"It: 12550, Loss: 3.973e-03, C: 4.482, Time: 0.03\n",
"It: 12560, Loss: 3.516e-03, C: 4.482, Time: 0.03\n",
"It: 12570, Loss: 4.017e-03, C: 4.482, Time: 0.03\n",
"It: 12580, Loss: 4.983e-03, C: 4.482, Time: 0.03\n",
"It: 12590, Loss: 4.295e-03, C: 4.482, Time: 0.03\n",
"It: 12600, Loss: 3.251e-03, C: 4.482, Time: 0.03\n",
"It: 12610, Loss: 3.588e-03, C: 4.482, Time: 0.03\n",
"It: 12620, Loss: 4.818e-03, C: 4.482, Time: 0.03\n",
"It: 12630, Loss: 3.595e-03, C: 4.482, Time: 0.03\n",
"It: 12640, Loss: 3.761e-03, C: 4.482, Time: 0.03\n",
"It: 12650, Loss: 3.804e-03, C: 4.482, Time: 0.03\n",
"It: 12660, Loss: 3.527e-03, C: 4.482, Time: 0.03\n",
"It: 12670, Loss: 3.170e-03, C: 4.482, Time: 0.03\n",
"It: 12680, Loss: 3.060e-03, C: 4.482, Time: 0.03\n",
"It: 12690, Loss: 3.289e-03, C: 4.482, Time: 0.03\n",
"It: 12700, Loss: 4.126e-03, C: 4.482, Time: 0.03\n",
"It: 12710, Loss: 3.618e-03, C: 4.482, Time: 0.03\n",
"It: 12720, Loss: 3.463e-03, C: 4.482, Time: 0.03\n",
"It: 12730, Loss: 2.816e-03, C: 4.482, Time: 0.03\n",
"It: 12740, Loss: 2.513e-03, C: 4.482, Time: 0.03\n",
"It: 12750, Loss: 3.712e-03, C: 4.482, Time: 0.04\n",
"It: 12760, Loss: 2.559e-03, C: 4.482, Time: 0.04\n",
"It: 12770, Loss: 4.059e-03, C: 4.482, Time: 0.03\n",
"It: 12780, Loss: 4.265e-03, C: 4.482, Time: 0.03\n",
"It: 12790, Loss: 3.938e-03, C: 4.482, Time: 0.03\n",
"It: 12800, Loss: 4.416e-03, C: 4.482, Time: 0.03\n",
"It: 12810, Loss: 5.884e-03, C: 4.482, Time: 0.03\n",
"It: 12820, Loss: 4.023e-03, C: 4.482, Time: 0.03\n",
"It: 12830, Loss: 3.237e-03, C: 4.482, Time: 0.03\n",
"It: 12840, Loss: 3.552e-03, C: 4.482, Time: 0.03\n",
"It: 12850, Loss: 4.030e-03, C: 4.482, Time: 0.03\n",
"It: 12860, Loss: 4.461e-03, C: 4.482, Time: 0.03\n",
"It: 12870, Loss: 3.646e-03, C: 4.482, Time: 0.04\n",
"It: 12880, Loss: 3.246e-03, C: 4.482, Time: 0.03\n",
"It: 12890, Loss: 3.160e-03, C: 4.482, Time: 0.03\n",
"It: 12900, Loss: 2.780e-03, C: 4.482, Time: 0.03\n",
"It: 12910, Loss: 3.235e-03, C: 4.482, Time: 0.03\n",
"It: 12920, Loss: 2.971e-03, C: 4.482, Time: 0.03\n",
"It: 12930, Loss: 2.740e-03, C: 4.482, Time: 0.03\n",
"It: 12940, Loss: 2.731e-03, C: 4.482, Time: 0.03\n",
"It: 12950, Loss: 3.634e-03, C: 4.482, Time: 0.03\n",
"It: 12960, Loss: 3.112e-03, C: 4.482, Time: 0.03\n",
"It: 12970, Loss: 3.109e-03, C: 4.482, Time: 0.03\n",
"It: 12980, Loss: 3.883e-03, C: 4.482, Time: 0.03\n",
"It: 12990, Loss: 3.283e-03, C: 4.482, Time: 0.03\n",
"It: 13000, Loss: 2.947e-03, C: 4.482, Time: 0.03\n",
"It: 13010, Loss: 3.576e-03, C: 4.482, Time: 0.03\n",
"It: 13020, Loss: 6.016e-03, C: 4.482, Time: 0.03\n",
"It: 13030, Loss: 3.957e-03, C: 4.482, Time: 0.03\n",
"It: 13040, Loss: 4.143e-03, C: 4.482, Time: 0.03\n",
"It: 13050, Loss: 3.725e-03, C: 4.482, Time: 0.03\n",
"It: 13060, Loss: 3.563e-03, C: 4.482, Time: 0.03\n",
"It: 13070, Loss: 3.509e-03, C: 4.482, Time: 0.03\n",
"It: 13080, Loss: 3.318e-03, C: 4.482, Time: 0.03\n",
"It: 13090, Loss: 3.326e-03, C: 4.482, Time: 0.03\n",
"It: 13100, Loss: 3.407e-03, C: 4.482, Time: 0.04\n",
"It: 13110, Loss: 3.069e-03, C: 4.482, Time: 0.03\n",
"It: 13120, Loss: 4.019e-03, C: 4.482, Time: 0.03\n",
"It: 13130, Loss: 3.832e-03, C: 4.482, Time: 0.03\n",
"It: 13140, Loss: 2.945e-03, C: 4.482, Time: 0.03\n",
"It: 13150, Loss: 3.665e-03, C: 4.482, Time: 0.03\n",
"It: 13160, Loss: 3.600e-03, C: 4.482, Time: 0.04\n",
"It: 13170, Loss: 3.548e-03, C: 4.482, Time: 0.04\n",
"It: 13180, Loss: 3.583e-03, C: 4.482, Time: 0.03\n",
"It: 13190, Loss: 3.246e-03, C: 4.482, Time: 0.03\n",
"It: 13200, Loss: 4.231e-03, C: 4.482, Time: 0.03\n",
"It: 13210, Loss: 5.736e-03, C: 4.482, Time: 0.03\n",
"It: 13220, Loss: 2.633e-03, C: 4.482, Time: 0.03\n",
"It: 13230, Loss: 3.743e-03, C: 4.482, Time: 0.04\n",
"It: 13240, Loss: 3.901e-03, C: 4.482, Time: 0.03\n",
"It: 13250, Loss: 3.530e-03, C: 4.482, Time: 0.03\n",
"It: 13260, Loss: 3.668e-03, C: 4.482, Time: 0.03\n",
"It: 13270, Loss: 4.901e-03, C: 4.482, Time: 0.04\n",
"It: 13280, Loss: 3.831e-03, C: 4.482, Time: 0.03\n",
"It: 13290, Loss: 3.673e-03, C: 4.482, Time: 0.03\n",
"It: 13300, Loss: 3.822e-03, C: 4.482, Time: 0.03\n",
"It: 13310, Loss: 3.887e-03, C: 4.482, Time: 0.04\n",
"It: 13320, Loss: 2.174e-03, C: 4.482, Time: 0.04\n",
"It: 13330, Loss: 5.012e-03, C: 4.482, Time: 0.04\n",
"It: 13340, Loss: 2.755e-03, C: 4.482, Time: 0.04\n",
"It: 13350, Loss: 1.992e-03, C: 4.482, Time: 0.04\n",
"It: 13360, Loss: 3.377e-03, C: 4.482, Time: 0.03\n",
"It: 13370, Loss: 2.791e-03, C: 4.482, Time: 0.03\n",
"It: 13380, Loss: 2.293e-03, C: 4.482, Time: 0.04\n",
"It: 13390, Loss: 4.899e-03, C: 4.482, Time: 0.04\n",
"It: 13400, Loss: 3.365e-03, C: 4.482, Time: 0.03\n",
"It: 13410, Loss: 4.174e-03, C: 4.482, Time: 0.04\n",
"It: 13420, Loss: 2.854e-03, C: 4.482, Time: 0.03\n",
"It: 13430, Loss: 4.611e-03, C: 4.482, Time: 0.03\n",
"It: 13440, Loss: 3.599e-03, C: 4.482, Time: 0.03\n",
"It: 13450, Loss: 3.896e-03, C: 4.482, Time: 0.03\n",
"It: 13460, Loss: 2.390e-03, C: 4.482, Time: 0.03\n",
"It: 13470, Loss: 3.552e-03, C: 4.482, Time: 0.03\n",
"It: 13480, Loss: 5.452e-03, C: 4.482, Time: 0.03\n",
"It: 13490, Loss: 3.690e-03, C: 4.482, Time: 0.03\n",
"It: 13500, Loss: 3.013e-03, C: 4.482, Time: 0.03\n",
"It: 13510, Loss: 3.294e-03, C: 4.482, Time: 0.03\n",
"It: 13520, Loss: 3.066e-03, C: 4.482, Time: 0.03\n",
"It: 13530, Loss: 3.374e-03, C: 4.482, Time: 0.04\n",
"It: 13540, Loss: 3.370e-03, C: 4.482, Time: 0.04\n",
"It: 13550, Loss: 3.656e-03, C: 4.482, Time: 0.04\n",
"It: 13560, Loss: 2.978e-03, C: 4.482, Time: 0.03\n",
"It: 13570, Loss: 2.574e-03, C: 4.482, Time: 0.03\n",
"It: 13580, Loss: 2.955e-03, C: 4.482, Time: 0.04\n",
"It: 13590, Loss: 2.515e-03, C: 4.482, Time: 0.03\n",
"It: 13600, Loss: 2.671e-03, C: 4.482, Time: 0.03\n",
"It: 13610, Loss: 2.816e-03, C: 4.482, Time: 0.03\n",
"It: 13620, Loss: 3.669e-03, C: 4.482, Time: 0.03\n",
"It: 13630, Loss: 4.309e-03, C: 4.482, Time: 0.03\n",
"It: 13640, Loss: 5.719e-03, C: 4.482, Time: 0.03\n",
"It: 13650, Loss: 4.280e-03, C: 4.482, Time: 0.04\n",
"It: 13660, Loss: 2.668e-03, C: 4.482, Time: 0.04\n",
"It: 13670, Loss: 3.369e-03, C: 4.482, Time: 0.03\n",
"It: 13680, Loss: 2.860e-03, C: 4.482, Time: 0.04\n",
"It: 13690, Loss: 2.695e-03, C: 4.482, Time: 0.03\n",
"It: 13700, Loss: 3.347e-03, C: 4.482, Time: 0.03\n",
"It: 13710, Loss: 3.174e-03, C: 4.482, Time: 0.03\n",
"It: 13720, Loss: 4.046e-03, C: 4.482, Time: 0.03\n",
"It: 13730, Loss: 2.344e-03, C: 4.482, Time: 0.03\n",
"It: 13740, Loss: 3.269e-03, C: 4.482, Time: 0.03\n",
"It: 13750, Loss: 3.432e-03, C: 4.482, Time: 0.03\n",
"It: 13760, Loss: 2.298e-03, C: 4.482, Time: 0.03\n",
"It: 13770, Loss: 2.724e-03, C: 4.482, Time: 0.03\n",
"It: 13780, Loss: 2.501e-03, C: 4.482, Time: 0.03\n",
"It: 13790, Loss: 2.974e-03, C: 4.482, Time: 0.03\n",
"It: 13800, Loss: 2.985e-03, C: 4.482, Time: 0.03\n",
"It: 13810, Loss: 2.548e-03, C: 4.482, Time: 0.03\n",
"It: 13820, Loss: 6.011e-03, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 13830, Loss: 3.120e-03, C: 4.482, Time: 0.04\n",
"It: 13840, Loss: 2.212e-03, C: 4.482, Time: 0.03\n",
"It: 13850, Loss: 2.980e-03, C: 4.482, Time: 0.04\n",
"It: 13860, Loss: 3.279e-03, C: 4.482, Time: 0.03\n",
"It: 13870, Loss: 2.468e-03, C: 4.482, Time: 0.03\n",
"It: 13880, Loss: 2.631e-03, C: 4.482, Time: 0.03\n",
"It: 13890, Loss: 3.295e-03, C: 4.482, Time: 0.04\n",
"It: 13900, Loss: 5.117e-03, C: 4.482, Time: 0.03\n",
"It: 13910, Loss: 3.251e-03, C: 4.482, Time: 0.03\n",
"It: 13920, Loss: 2.482e-03, C: 4.482, Time: 0.03\n",
"It: 13930, Loss: 3.453e-03, C: 4.482, Time: 0.04\n",
"It: 13940, Loss: 3.652e-03, C: 4.482, Time: 0.03\n",
"It: 13950, Loss: 3.898e-03, C: 4.482, Time: 0.04\n",
"It: 13960, Loss: 2.974e-03, C: 4.482, Time: 0.03\n",
"It: 13970, Loss: 3.980e-03, C: 4.482, Time: 0.03\n",
"It: 13980, Loss: 2.276e-03, C: 4.482, Time: 0.04\n",
"It: 13990, Loss: 2.811e-03, C: 4.482, Time: 0.03\n",
"It: 14000, Loss: 2.258e-03, C: 4.482, Time: 0.03\n",
"It: 14010, Loss: 2.472e-03, C: 4.482, Time: 0.03\n",
"It: 14020, Loss: 3.490e-03, C: 4.482, Time: 0.03\n",
"It: 14030, Loss: 4.294e-03, C: 4.482, Time: 0.04\n",
"It: 14040, Loss: 3.057e-03, C: 4.482, Time: 0.03\n",
"It: 14050, Loss: 2.440e-03, C: 4.482, Time: 0.03\n",
"It: 14060, Loss: 3.236e-03, C: 4.482, Time: 0.03\n",
"It: 14070, Loss: 3.086e-03, C: 4.482, Time: 0.03\n",
"It: 14080, Loss: 2.210e-03, C: 4.482, Time: 0.04\n",
"It: 14090, Loss: 3.321e-03, C: 4.482, Time: 0.04\n",
"It: 14100, Loss: 4.278e-03, C: 4.482, Time: 0.03\n",
"It: 14110, Loss: 2.146e-03, C: 4.482, Time: 0.03\n",
"It: 14120, Loss: 2.770e-03, C: 4.482, Time: 0.03\n",
"It: 14130, Loss: 2.684e-03, C: 4.482, Time: 0.03\n",
"It: 14140, Loss: 3.933e-03, C: 4.482, Time: 0.04\n",
"It: 14150, Loss: 2.706e-03, C: 4.482, Time: 0.04\n",
"It: 14160, Loss: 2.897e-03, C: 4.482, Time: 0.04\n",
"It: 14170, Loss: 3.033e-03, C: 4.482, Time: 0.03\n",
"It: 14180, Loss: 2.144e-03, C: 4.482, Time: 0.03\n",
"It: 14190, Loss: 2.490e-03, C: 4.482, Time: 0.04\n",
"It: 14200, Loss: 2.304e-03, C: 4.482, Time: 0.03\n",
"It: 14210, Loss: 2.740e-03, C: 4.482, Time: 0.03\n",
"It: 14220, Loss: 2.369e-03, C: 4.482, Time: 0.03\n",
"It: 14230, Loss: 3.815e-03, C: 4.482, Time: 0.04\n",
"It: 14240, Loss: 3.052e-03, C: 4.482, Time: 0.03\n",
"It: 14250, Loss: 2.690e-03, C: 4.482, Time: 0.03\n",
"It: 14260, Loss: 2.607e-03, C: 4.482, Time: 0.03\n",
"It: 14270, Loss: 2.642e-03, C: 4.482, Time: 0.03\n",
"It: 14280, Loss: 3.239e-03, C: 4.482, Time: 0.04\n",
"It: 14290, Loss: 2.953e-03, C: 4.482, Time: 0.03\n",
"It: 14300, Loss: 2.149e-03, C: 4.482, Time: 0.04\n",
"It: 14310, Loss: 4.687e-03, C: 4.482, Time: 0.03\n",
"It: 14320, Loss: 4.114e-03, C: 4.482, Time: 0.04\n",
"It: 14330, Loss: 2.487e-03, C: 4.482, Time: 0.03\n",
"It: 14340, Loss: 2.377e-03, C: 4.482, Time: 0.03\n",
"It: 14350, Loss: 2.370e-03, C: 4.482, Time: 0.03\n",
"It: 14360, Loss: 2.512e-03, C: 4.482, Time: 0.03\n",
"It: 14370, Loss: 2.678e-03, C: 4.482, Time: 0.03\n",
"It: 14380, Loss: 2.651e-03, C: 4.482, Time: 0.03\n",
"It: 14390, Loss: 2.230e-03, C: 4.482, Time: 0.03\n",
"It: 14400, Loss: 4.082e-03, C: 4.482, Time: 0.04\n",
"It: 14410, Loss: 4.335e-03, C: 4.482, Time: 0.03\n",
"It: 14420, Loss: 1.899e-03, C: 4.482, Time: 0.03\n",
"It: 14430, Loss: 1.708e-03, C: 4.482, Time: 0.03\n",
"It: 14440, Loss: 3.541e-03, C: 4.482, Time: 0.04\n",
"It: 14450, Loss: 2.052e-03, C: 4.482, Time: 0.04\n",
"It: 14460, Loss: 2.041e-03, C: 4.482, Time: 0.04\n",
"It: 14470, Loss: 2.345e-03, C: 4.482, Time: 0.04\n",
"It: 14480, Loss: 3.318e-03, C: 4.482, Time: 0.04\n",
"It: 14490, Loss: 2.806e-03, C: 4.482, Time: 0.04\n",
"It: 14500, Loss: 2.677e-03, C: 4.482, Time: 0.04\n",
"It: 14510, Loss: 2.854e-03, C: 4.482, Time: 0.03\n",
"It: 14520, Loss: 2.379e-03, C: 4.482, Time: 0.03\n",
"It: 14530, Loss: 2.411e-03, C: 4.482, Time: 0.03\n",
"It: 14540, Loss: 2.581e-03, C: 4.482, Time: 0.03\n",
"It: 14550, Loss: 3.015e-03, C: 4.482, Time: 0.03\n",
"It: 14560, Loss: 3.089e-03, C: 4.482, Time: 0.03\n",
"It: 14570, Loss: 3.943e-03, C: 4.482, Time: 0.03\n",
"It: 14580, Loss: 3.559e-03, C: 4.482, Time: 0.03\n",
"It: 14590, Loss: 2.742e-03, C: 4.482, Time: 0.03\n",
"It: 14600, Loss: 3.360e-03, C: 4.482, Time: 0.04\n",
"It: 14610, Loss: 3.165e-03, C: 4.482, Time: 0.03\n",
"It: 14620, Loss: 2.374e-03, C: 4.482, Time: 0.03\n",
"It: 14630, Loss: 2.478e-03, C: 4.482, Time: 0.04\n",
"It: 14640, Loss: 3.708e-03, C: 4.482, Time: 0.04\n",
"It: 14650, Loss: 4.111e-03, C: 4.482, Time: 0.04\n",
"It: 14660, Loss: 2.697e-03, C: 4.482, Time: 0.04\n",
"It: 14670, Loss: 3.317e-03, C: 4.482, Time: 0.04\n",
"It: 14680, Loss: 4.622e-03, C: 4.482, Time: 0.04\n",
"It: 14690, Loss: 3.181e-03, C: 4.482, Time: 0.04\n",
"It: 14700, Loss: 2.868e-03, C: 4.482, Time: 0.04\n",
"It: 14710, Loss: 1.831e-03, C: 4.482, Time: 0.04\n",
"It: 14720, Loss: 2.387e-03, C: 4.482, Time: 0.03\n",
"It: 14730, Loss: 2.840e-03, C: 4.482, Time: 0.04\n",
"It: 14740, Loss: 2.893e-03, C: 4.482, Time: 0.04\n",
"It: 14750, Loss: 2.502e-03, C: 4.482, Time: 0.03\n",
"It: 14760, Loss: 3.948e-03, C: 4.482, Time: 0.04\n",
"It: 14770, Loss: 2.417e-03, C: 4.482, Time: 0.04\n",
"It: 14780, Loss: 3.944e-03, C: 4.482, Time: 0.04\n",
"It: 14790, Loss: 3.299e-03, C: 4.482, Time: 0.04\n",
"It: 14800, Loss: 2.100e-03, C: 4.482, Time: 0.04\n",
"It: 14810, Loss: 2.540e-03, C: 4.482, Time: 0.04\n",
"It: 14820, Loss: 2.627e-03, C: 4.482, Time: 0.04\n",
"It: 14830, Loss: 2.468e-03, C: 4.482, Time: 0.04\n",
"It: 14840, Loss: 3.445e-03, C: 4.482, Time: 0.03\n",
"It: 14850, Loss: 4.369e-03, C: 4.482, Time: 0.03\n",
"It: 14860, Loss: 3.107e-03, C: 4.482, Time: 0.03\n",
"It: 14870, Loss: 3.984e-03, C: 4.482, Time: 0.04\n",
"It: 14880, Loss: 2.045e-03, C: 4.482, Time: 0.04\n",
"It: 14890, Loss: 3.596e-03, C: 4.482, Time: 0.04\n",
"It: 14900, Loss: 3.723e-03, C: 4.482, Time: 0.04\n",
"It: 14910, Loss: 2.971e-03, C: 4.482, Time: 0.04\n",
"It: 14920, Loss: 2.350e-03, C: 4.482, Time: 0.03\n",
"It: 14930, Loss: 2.852e-03, C: 4.482, Time: 0.03\n",
"It: 14940, Loss: 3.943e-03, C: 4.482, Time: 0.04\n",
"It: 14950, Loss: 3.491e-03, C: 4.482, Time: 0.04\n",
"It: 14960, Loss: 2.521e-03, C: 4.482, Time: 0.03\n",
"It: 14970, Loss: 2.349e-03, C: 4.482, Time: 0.03\n",
"It: 14980, Loss: 2.946e-03, C: 4.482, Time: 0.04\n",
"It: 14990, Loss: 2.482e-03, C: 4.482, Time: 0.04\n",
"It: 15000, Loss: 3.264e-03, C: 4.482, Time: 0.04\n",
"It: 15010, Loss: 3.255e-03, C: 4.482, Time: 0.03\n",
"It: 15020, Loss: 2.333e-03, C: 4.482, Time: 0.03\n",
"It: 15030, Loss: 3.275e-03, C: 4.482, Time: 0.03\n",
"It: 15040, Loss: 2.188e-03, C: 4.482, Time: 0.03\n",
"It: 15050, Loss: 3.646e-03, C: 4.482, Time: 0.03\n",
"It: 15060, Loss: 1.983e-03, C: 4.482, Time: 0.04\n",
"It: 15070, Loss: 2.057e-03, C: 4.482, Time: 0.04\n",
"It: 15080, Loss: 1.743e-03, C: 4.482, Time: 0.04\n",
"It: 15090, Loss: 3.484e-03, C: 4.482, Time: 0.04\n",
"It: 15100, Loss: 2.639e-03, C: 4.482, Time: 0.03\n",
"It: 15110, Loss: 2.564e-03, C: 4.482, Time: 0.04\n",
"It: 15120, Loss: 2.115e-03, C: 4.482, Time: 0.04\n",
"It: 15130, Loss: 2.448e-03, C: 4.482, Time: 0.04\n",
"It: 15140, Loss: 1.938e-03, C: 4.482, Time: 0.04\n",
"It: 15150, Loss: 2.072e-03, C: 4.482, Time: 0.04\n",
"It: 15160, Loss: 2.614e-03, C: 4.482, Time: 0.03\n",
"It: 15170, Loss: 3.505e-03, C: 4.482, Time: 0.03\n",
"It: 15180, Loss: 2.464e-03, C: 4.482, Time: 0.04\n",
"It: 15190, Loss: 2.097e-03, C: 4.482, Time: 0.04\n",
"It: 15200, Loss: 2.632e-03, C: 4.482, Time: 0.03\n",
"It: 15210, Loss: 1.873e-03, C: 4.482, Time: 0.03\n",
"It: 15220, Loss: 2.682e-03, C: 4.482, Time: 0.04\n",
"It: 15230, Loss: 3.267e-03, C: 4.482, Time: 0.03\n",
"It: 15240, Loss: 3.251e-03, C: 4.482, Time: 0.04\n",
"It: 15250, Loss: 3.316e-03, C: 4.482, Time: 0.04\n",
"It: 15260, Loss: 3.397e-03, C: 4.482, Time: 0.04\n",
"It: 15270, Loss: 3.685e-03, C: 4.482, Time: 0.04\n",
"It: 15280, Loss: 2.677e-03, C: 4.482, Time: 0.05\n",
"It: 15290, Loss: 2.789e-03, C: 4.482, Time: 0.05\n",
"It: 15300, Loss: 3.672e-03, C: 4.482, Time: 0.04\n",
"It: 15310, Loss: 1.954e-03, C: 4.482, Time: 0.04\n",
"It: 15320, Loss: 3.449e-03, C: 4.482, Time: 0.04\n",
"It: 15330, Loss: 3.276e-03, C: 4.482, Time: 0.04\n",
"It: 15340, Loss: 3.257e-03, C: 4.482, Time: 0.04\n",
"It: 15350, Loss: 3.017e-03, C: 4.482, Time: 0.04\n",
"It: 15360, Loss: 2.869e-03, C: 4.482, Time: 0.04\n",
"It: 15370, Loss: 1.852e-03, C: 4.482, Time: 0.03\n",
"It: 15380, Loss: 2.126e-03, C: 4.482, Time: 0.04\n",
"It: 15390, Loss: 2.919e-03, C: 4.482, Time: 0.03\n",
"It: 15400, Loss: 2.887e-03, C: 4.482, Time: 0.04\n",
"It: 15410, Loss: 2.322e-03, C: 4.482, Time: 0.04\n",
"It: 15420, Loss: 2.278e-03, C: 4.482, Time: 0.04\n",
"It: 15430, Loss: 1.724e-03, C: 4.482, Time: 0.04\n",
"It: 15440, Loss: 3.247e-03, C: 4.482, Time: 0.04\n",
"It: 15450, Loss: 1.829e-03, C: 4.482, Time: 0.04\n",
"It: 15460, Loss: 2.251e-03, C: 4.482, Time: 0.04\n",
"It: 15470, Loss: 2.674e-03, C: 4.482, Time: 0.04\n",
"It: 15480, Loss: 4.185e-03, C: 4.482, Time: 0.03\n",
"It: 15490, Loss: 2.838e-03, C: 4.482, Time: 0.03\n",
"It: 15500, Loss: 2.715e-03, C: 4.482, Time: 0.03\n",
"It: 15510, Loss: 2.206e-03, C: 4.482, Time: 0.03\n",
"It: 15520, Loss: 2.570e-03, C: 4.482, Time: 0.03\n",
"It: 15530, Loss: 2.865e-03, C: 4.482, Time: 0.03\n",
"It: 15540, Loss: 2.061e-03, C: 4.482, Time: 0.03\n",
"It: 15550, Loss: 2.811e-03, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 15560, Loss: 2.090e-03, C: 4.482, Time: 0.04\n",
"It: 15570, Loss: 2.331e-03, C: 4.482, Time: 0.04\n",
"It: 15580, Loss: 2.476e-03, C: 4.482, Time: 0.03\n",
"It: 15590, Loss: 2.582e-03, C: 4.482, Time: 0.03\n",
"It: 15600, Loss: 2.165e-03, C: 4.482, Time: 0.03\n",
"It: 15610, Loss: 2.012e-03, C: 4.482, Time: 0.03\n",
"It: 15620, Loss: 2.507e-03, C: 4.482, Time: 0.03\n",
"It: 15630, Loss: 4.013e-03, C: 4.482, Time: 0.03\n",
"It: 15640, Loss: 2.936e-03, C: 4.482, Time: 0.03\n",
"It: 15650, Loss: 2.263e-03, C: 4.482, Time: 0.03\n",
"It: 15660, Loss: 2.615e-03, C: 4.482, Time: 0.03\n",
"It: 15670, Loss: 1.849e-03, C: 4.482, Time: 0.03\n",
"It: 15680, Loss: 2.957e-03, C: 4.482, Time: 0.03\n",
"It: 15690, Loss: 2.518e-03, C: 4.482, Time: 0.03\n",
"It: 15700, Loss: 2.051e-03, C: 4.482, Time: 0.03\n",
"It: 15710, Loss: 2.633e-03, C: 4.482, Time: 0.03\n",
"It: 15720, Loss: 3.637e-03, C: 4.482, Time: 0.03\n",
"It: 15730, Loss: 3.946e-03, C: 4.482, Time: 0.03\n",
"It: 15740, Loss: 2.159e-03, C: 4.482, Time: 0.03\n",
"It: 15750, Loss: 3.204e-03, C: 4.482, Time: 0.03\n",
"It: 15760, Loss: 3.636e-03, C: 4.482, Time: 0.03\n",
"It: 15770, Loss: 2.121e-03, C: 4.482, Time: 0.03\n",
"It: 15780, Loss: 2.606e-03, C: 4.482, Time: 0.03\n",
"It: 15790, Loss: 2.752e-03, C: 4.482, Time: 0.03\n",
"It: 15800, Loss: 2.290e-03, C: 4.482, Time: 0.03\n",
"It: 15810, Loss: 3.681e-03, C: 4.482, Time: 0.03\n",
"It: 15820, Loss: 2.208e-03, C: 4.482, Time: 0.03\n",
"It: 15830, Loss: 2.081e-03, C: 4.482, Time: 0.03\n",
"It: 15840, Loss: 3.159e-03, C: 4.482, Time: 0.04\n",
"It: 15850, Loss: 2.296e-03, C: 4.482, Time: 0.03\n",
"It: 15860, Loss: 2.983e-03, C: 4.482, Time: 0.04\n",
"It: 15870, Loss: 1.799e-03, C: 4.482, Time: 0.03\n",
"It: 15880, Loss: 3.312e-03, C: 4.482, Time: 0.03\n",
"It: 15890, Loss: 2.901e-03, C: 4.482, Time: 0.03\n",
"It: 15900, Loss: 1.841e-03, C: 4.482, Time: 0.03\n",
"It: 15910, Loss: 3.151e-03, C: 4.482, Time: 0.04\n",
"It: 15920, Loss: 3.218e-03, C: 4.482, Time: 0.04\n",
"It: 15930, Loss: 6.279e-03, C: 4.482, Time: 0.04\n",
"It: 15940, Loss: 3.772e-03, C: 4.482, Time: 0.04\n",
"It: 15950, Loss: 3.512e-03, C: 4.482, Time: 0.03\n",
"It: 15960, Loss: 2.650e-03, C: 4.482, Time: 0.03\n",
"It: 15970, Loss: 2.354e-03, C: 4.482, Time: 0.03\n",
"It: 15980, Loss: 3.456e-03, C: 4.482, Time: 0.04\n",
"It: 15990, Loss: 2.608e-03, C: 4.482, Time: 0.04\n",
"It: 16000, Loss: 2.309e-03, C: 4.482, Time: 0.03\n",
"It: 16010, Loss: 2.982e-03, C: 4.482, Time: 0.03\n",
"It: 16020, Loss: 2.491e-03, C: 4.482, Time: 0.04\n",
"It: 16030, Loss: 2.457e-03, C: 4.482, Time: 0.04\n",
"It: 16040, Loss: 2.050e-03, C: 4.482, Time: 0.04\n",
"It: 16050, Loss: 2.006e-03, C: 4.482, Time: 0.04\n",
"It: 16060, Loss: 1.803e-03, C: 4.482, Time: 0.04\n",
"It: 16070, Loss: 1.663e-03, C: 4.482, Time: 0.04\n",
"It: 16080, Loss: 1.604e-03, C: 4.482, Time: 0.04\n",
"It: 16090, Loss: 3.123e-03, C: 4.482, Time: 0.04\n",
"It: 16100, Loss: 2.753e-03, C: 4.482, Time: 0.04\n",
"It: 16110, Loss: 4.064e-03, C: 4.482, Time: 0.03\n",
"It: 16120, Loss: 3.000e-03, C: 4.482, Time: 0.03\n",
"It: 16130, Loss: 2.909e-03, C: 4.482, Time: 0.03\n",
"It: 16140, Loss: 1.824e-03, C: 4.482, Time: 0.03\n",
"It: 16150, Loss: 2.286e-03, C: 4.482, Time: 0.03\n",
"It: 16160, Loss: 4.324e-03, C: 4.482, Time: 0.04\n",
"It: 16170, Loss: 3.272e-03, C: 4.482, Time: 0.04\n",
"It: 16180, Loss: 3.088e-03, C: 4.482, Time: 0.04\n",
"It: 16190, Loss: 3.328e-03, C: 4.482, Time: 0.03\n",
"It: 16200, Loss: 2.479e-03, C: 4.482, Time: 0.03\n",
"It: 16210, Loss: 2.102e-03, C: 4.482, Time: 0.04\n",
"It: 16220, Loss: 1.921e-03, C: 4.482, Time: 0.03\n",
"It: 16230, Loss: 2.436e-03, C: 4.482, Time: 0.03\n",
"It: 16240, Loss: 1.977e-03, C: 4.482, Time: 0.03\n",
"It: 16250, Loss: 1.962e-03, C: 4.482, Time: 0.04\n",
"It: 16260, Loss: 1.817e-03, C: 4.482, Time: 0.03\n",
"It: 16270, Loss: 2.272e-03, C: 4.482, Time: 0.03\n",
"It: 16280, Loss: 1.670e-03, C: 4.482, Time: 0.04\n",
"It: 16290, Loss: 3.155e-03, C: 4.482, Time: 0.04\n",
"It: 16300, Loss: 3.379e-03, C: 4.482, Time: 0.04\n",
"It: 16310, Loss: 1.865e-03, C: 4.482, Time: 0.04\n",
"It: 16320, Loss: 2.334e-03, C: 4.482, Time: 0.04\n",
"It: 16330, Loss: 2.999e-03, C: 4.482, Time: 0.04\n",
"It: 16340, Loss: 3.310e-03, C: 4.482, Time: 0.03\n",
"It: 16350, Loss: 3.227e-03, C: 4.482, Time: 0.03\n",
"It: 16360, Loss: 2.238e-03, C: 4.482, Time: 0.03\n",
"It: 16370, Loss: 3.264e-03, C: 4.482, Time: 0.03\n",
"It: 16380, Loss: 2.990e-03, C: 4.482, Time: 0.03\n",
"It: 16390, Loss: 2.540e-03, C: 4.482, Time: 0.03\n",
"It: 16400, Loss: 2.484e-03, C: 4.482, Time: 0.04\n",
"It: 16410, Loss: 3.230e-03, C: 4.482, Time: 0.04\n",
"It: 16420, Loss: 1.717e-03, C: 4.482, Time: 0.04\n",
"It: 16430, Loss: 2.082e-03, C: 4.482, Time: 0.04\n",
"It: 16440, Loss: 2.793e-03, C: 4.482, Time: 0.04\n",
"It: 16450, Loss: 2.842e-03, C: 4.482, Time: 0.03\n",
"It: 16460, Loss: 2.802e-03, C: 4.482, Time: 0.03\n",
"It: 16470, Loss: 2.463e-03, C: 4.482, Time: 0.03\n",
"It: 16480, Loss: 2.587e-03, C: 4.482, Time: 0.04\n",
"It: 16490, Loss: 1.941e-03, C: 4.482, Time: 0.03\n",
"It: 16500, Loss: 3.510e-03, C: 4.482, Time: 0.03\n",
"It: 16510, Loss: 3.390e-03, C: 4.482, Time: 0.03\n",
"It: 16520, Loss: 3.762e-03, C: 4.482, Time: 0.04\n",
"It: 16530, Loss: 2.821e-03, C: 4.482, Time: 0.03\n",
"It: 16540, Loss: 2.470e-03, C: 4.482, Time: 0.04\n",
"It: 16550, Loss: 2.640e-03, C: 4.482, Time: 0.03\n",
"It: 16560, Loss: 2.861e-03, C: 4.482, Time: 0.03\n",
"It: 16570, Loss: 2.299e-03, C: 4.482, Time: 0.03\n",
"It: 16580, Loss: 2.240e-03, C: 4.482, Time: 0.03\n",
"It: 16590, Loss: 1.562e-03, C: 4.482, Time: 0.03\n",
"It: 16600, Loss: 3.990e-03, C: 4.482, Time: 0.04\n",
"It: 16610, Loss: 2.508e-03, C: 4.482, Time: 0.04\n",
"It: 16620, Loss: 2.426e-03, C: 4.482, Time: 0.03\n",
"It: 16630, Loss: 2.506e-03, C: 4.482, Time: 0.04\n",
"It: 16640, Loss: 3.217e-03, C: 4.482, Time: 0.04\n",
"It: 16650, Loss: 2.159e-03, C: 4.482, Time: 0.04\n",
"It: 16660, Loss: 2.571e-03, C: 4.482, Time: 0.04\n",
"It: 16670, Loss: 2.635e-03, C: 4.482, Time: 0.04\n",
"It: 16680, Loss: 2.844e-03, C: 4.482, Time: 0.04\n",
"It: 16690, Loss: 1.917e-03, C: 4.482, Time: 0.04\n",
"It: 16700, Loss: 2.070e-03, C: 4.482, Time: 0.04\n",
"It: 16710, Loss: 1.591e-03, C: 4.482, Time: 0.04\n",
"It: 16720, Loss: 2.139e-03, C: 4.482, Time: 0.03\n",
"It: 16730, Loss: 2.963e-03, C: 4.482, Time: 0.03\n",
"It: 16740, Loss: 2.943e-03, C: 4.482, Time: 0.04\n",
"It: 16750, Loss: 1.651e-03, C: 4.482, Time: 0.04\n",
"It: 16760, Loss: 2.457e-03, C: 4.482, Time: 0.03\n",
"It: 16770, Loss: 3.887e-03, C: 4.482, Time: 0.04\n",
"It: 16780, Loss: 3.061e-03, C: 4.482, Time: 0.03\n",
"It: 16790, Loss: 3.248e-03, C: 4.482, Time: 0.03\n",
"It: 16800, Loss: 3.309e-03, C: 4.482, Time: 0.03\n",
"It: 16810, Loss: 3.456e-03, C: 4.482, Time: 0.03\n",
"It: 16820, Loss: 2.489e-03, C: 4.482, Time: 0.03\n",
"It: 16830, Loss: 2.497e-03, C: 4.482, Time: 0.04\n",
"It: 16840, Loss: 2.243e-03, C: 4.482, Time: 0.03\n",
"It: 16850, Loss: 2.467e-03, C: 4.482, Time: 0.03\n",
"It: 16860, Loss: 3.419e-03, C: 4.482, Time: 0.04\n",
"It: 16870, Loss: 3.456e-03, C: 4.482, Time: 0.04\n",
"It: 16880, Loss: 2.164e-03, C: 4.482, Time: 0.04\n",
"It: 16890, Loss: 2.214e-03, C: 4.482, Time: 0.04\n",
"It: 16900, Loss: 2.353e-03, C: 4.482, Time: 0.04\n",
"It: 16910, Loss: 1.583e-03, C: 4.482, Time: 0.03\n",
"It: 16920, Loss: 3.045e-03, C: 4.482, Time: 0.03\n",
"It: 16930, Loss: 2.123e-03, C: 4.482, Time: 0.04\n",
"It: 16940, Loss: 3.095e-03, C: 4.482, Time: 0.04\n",
"It: 16950, Loss: 3.600e-03, C: 4.482, Time: 0.04\n",
"It: 16960, Loss: 2.472e-03, C: 4.482, Time: 0.04\n",
"It: 16970, Loss: 2.845e-03, C: 4.482, Time: 0.04\n",
"It: 16980, Loss: 2.669e-03, C: 4.482, Time: 0.04\n",
"It: 16990, Loss: 2.461e-03, C: 4.482, Time: 0.03\n",
"It: 17000, Loss: 2.780e-03, C: 4.482, Time: 0.03\n",
"It: 17010, Loss: 1.411e-03, C: 4.482, Time: 0.04\n",
"It: 17020, Loss: 1.773e-03, C: 4.482, Time: 0.03\n",
"It: 17030, Loss: 2.926e-03, C: 4.482, Time: 0.03\n",
"It: 17040, Loss: 2.469e-03, C: 4.482, Time: 0.03\n",
"It: 17050, Loss: 2.113e-03, C: 4.482, Time: 0.04\n",
"It: 17060, Loss: 2.240e-03, C: 4.482, Time: 0.03\n",
"It: 17070, Loss: 1.994e-03, C: 4.482, Time: 0.03\n",
"It: 17080, Loss: 1.325e-03, C: 4.482, Time: 0.03\n",
"It: 17090, Loss: 2.483e-03, C: 4.482, Time: 0.03\n",
"It: 17100, Loss: 1.568e-03, C: 4.482, Time: 0.03\n",
"It: 17110, Loss: 1.779e-03, C: 4.482, Time: 0.03\n",
"It: 17120, Loss: 2.141e-03, C: 4.482, Time: 0.03\n",
"It: 17130, Loss: 1.778e-03, C: 4.482, Time: 0.03\n",
"It: 17140, Loss: 2.129e-03, C: 4.482, Time: 0.03\n",
"It: 17150, Loss: 1.249e-03, C: 4.482, Time: 0.03\n",
"It: 17160, Loss: 1.768e-03, C: 4.482, Time: 0.04\n",
"It: 17170, Loss: 2.166e-03, C: 4.482, Time: 0.04\n",
"It: 17180, Loss: 1.950e-03, C: 4.482, Time: 0.03\n",
"It: 17190, Loss: 1.817e-03, C: 4.482, Time: 0.03\n",
"It: 17200, Loss: 2.520e-03, C: 4.482, Time: 0.03\n",
"It: 17210, Loss: 1.548e-03, C: 4.482, Time: 0.03\n",
"It: 17220, Loss: 3.219e-03, C: 4.482, Time: 0.03\n",
"It: 17230, Loss: 2.151e-03, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 17240, Loss: 2.110e-03, C: 4.482, Time: 0.03\n",
"It: 17250, Loss: 2.610e-03, C: 4.482, Time: 0.03\n",
"It: 17260, Loss: 1.740e-03, C: 4.482, Time: 0.04\n",
"It: 17270, Loss: 3.208e-03, C: 4.482, Time: 0.03\n",
"It: 17280, Loss: 2.132e-03, C: 4.482, Time: 0.03\n",
"It: 17290, Loss: 2.344e-03, C: 4.482, Time: 0.03\n",
"It: 17300, Loss: 2.096e-03, C: 4.482, Time: 0.03\n",
"It: 17310, Loss: 2.106e-03, C: 4.482, Time: 0.03\n",
"It: 17320, Loss: 2.352e-03, C: 4.482, Time: 0.03\n",
"It: 17330, Loss: 1.578e-03, C: 4.482, Time: 0.04\n",
"It: 17340, Loss: 2.212e-03, C: 4.482, Time: 0.03\n",
"It: 17350, Loss: 2.828e-03, C: 4.482, Time: 0.03\n",
"It: 17360, Loss: 2.156e-03, C: 4.482, Time: 0.03\n",
"It: 17370, Loss: 1.648e-03, C: 4.482, Time: 0.03\n",
"It: 17380, Loss: 1.856e-03, C: 4.482, Time: 0.03\n",
"It: 17390, Loss: 3.581e-03, C: 4.482, Time: 0.03\n",
"It: 17400, Loss: 1.553e-03, C: 4.482, Time: 0.03\n",
"It: 17410, Loss: 2.557e-03, C: 4.482, Time: 0.04\n",
"It: 17420, Loss: 1.680e-03, C: 4.482, Time: 0.04\n",
"It: 17430, Loss: 2.881e-03, C: 4.482, Time: 0.04\n",
"It: 17440, Loss: 2.897e-03, C: 4.482, Time: 0.03\n",
"It: 17450, Loss: 2.915e-03, C: 4.482, Time: 0.03\n",
"It: 17460, Loss: 2.167e-03, C: 4.482, Time: 0.03\n",
"It: 17470, Loss: 2.731e-03, C: 4.482, Time: 0.03\n",
"It: 17480, Loss: 3.015e-03, C: 4.482, Time: 0.03\n",
"It: 17490, Loss: 3.391e-03, C: 4.482, Time: 0.04\n",
"It: 17500, Loss: 2.157e-03, C: 4.482, Time: 0.03\n",
"It: 17510, Loss: 4.140e-03, C: 4.482, Time: 0.03\n",
"It: 17520, Loss: 3.436e-03, C: 4.482, Time: 0.03\n",
"It: 17530, Loss: 2.822e-03, C: 4.482, Time: 0.03\n",
"It: 17540, Loss: 3.212e-03, C: 4.482, Time: 0.03\n",
"It: 17550, Loss: 1.495e-03, C: 4.482, Time: 0.03\n",
"It: 17560, Loss: 1.635e-03, C: 4.482, Time: 0.03\n",
"It: 17570, Loss: 2.316e-03, C: 4.482, Time: 0.03\n",
"It: 17580, Loss: 1.592e-03, C: 4.482, Time: 0.03\n",
"It: 17590, Loss: 2.038e-03, C: 4.482, Time: 0.03\n",
"It: 17600, Loss: 2.163e-03, C: 4.482, Time: 0.03\n",
"It: 17610, Loss: 1.956e-03, C: 4.482, Time: 0.03\n",
"It: 17620, Loss: 1.509e-03, C: 4.482, Time: 0.04\n",
"It: 17630, Loss: 1.436e-03, C: 4.482, Time: 0.03\n",
"It: 17640, Loss: 2.559e-03, C: 4.482, Time: 0.03\n",
"It: 17650, Loss: 2.079e-03, C: 4.482, Time: 0.03\n",
"It: 17660, Loss: 2.598e-03, C: 4.482, Time: 0.04\n",
"It: 17670, Loss: 2.272e-03, C: 4.482, Time: 0.03\n",
"It: 17680, Loss: 1.650e-03, C: 4.482, Time: 0.04\n",
"It: 17690, Loss: 1.831e-03, C: 4.482, Time: 0.03\n",
"It: 17700, Loss: 2.684e-03, C: 4.482, Time: 0.03\n",
"It: 17710, Loss: 1.790e-03, C: 4.482, Time: 0.04\n",
"It: 17720, Loss: 3.138e-03, C: 4.482, Time: 0.04\n",
"It: 17730, Loss: 1.626e-03, C: 4.482, Time: 0.04\n",
"It: 17740, Loss: 2.480e-03, C: 4.482, Time: 0.03\n",
"It: 17750, Loss: 1.571e-03, C: 4.482, Time: 0.03\n",
"It: 17760, Loss: 2.331e-03, C: 4.482, Time: 0.03\n",
"It: 17770, Loss: 1.738e-03, C: 4.482, Time: 0.03\n",
"It: 17780, Loss: 1.626e-03, C: 4.482, Time: 0.03\n",
"It: 17790, Loss: 1.754e-03, C: 4.482, Time: 0.03\n",
"It: 17800, Loss: 1.539e-03, C: 4.482, Time: 0.03\n",
"It: 17810, Loss: 2.871e-03, C: 4.482, Time: 0.03\n",
"It: 17820, Loss: 2.155e-03, C: 4.482, Time: 0.03\n",
"It: 17830, Loss: 1.859e-03, C: 4.482, Time: 0.03\n",
"It: 17840, Loss: 3.202e-03, C: 4.482, Time: 0.03\n",
"It: 17850, Loss: 4.102e-03, C: 4.482, Time: 0.03\n",
"It: 17860, Loss: 3.329e-03, C: 4.482, Time: 0.03\n",
"It: 17870, Loss: 2.978e-03, C: 4.482, Time: 0.03\n",
"It: 17880, Loss: 3.566e-03, C: 4.482, Time: 0.03\n",
"It: 17890, Loss: 1.464e-03, C: 4.482, Time: 0.03\n",
"It: 17900, Loss: 1.584e-03, C: 4.482, Time: 0.03\n",
"It: 17910, Loss: 3.336e-03, C: 4.482, Time: 0.03\n",
"It: 17920, Loss: 2.008e-03, C: 4.482, Time: 0.04\n",
"It: 17930, Loss: 2.892e-03, C: 4.482, Time: 0.04\n",
"It: 17940, Loss: 2.039e-03, C: 4.482, Time: 0.04\n",
"It: 17950, Loss: 2.382e-03, C: 4.482, Time: 0.03\n",
"It: 17960, Loss: 4.636e-03, C: 4.482, Time: 0.04\n",
"It: 17970, Loss: 1.865e-03, C: 4.482, Time: 0.03\n",
"It: 17980, Loss: 1.819e-03, C: 4.482, Time: 0.03\n",
"It: 17990, Loss: 1.624e-03, C: 4.482, Time: 0.04\n",
"It: 18000, Loss: 3.397e-03, C: 4.482, Time: 0.03\n",
"It: 18010, Loss: 1.520e-03, C: 4.482, Time: 0.03\n",
"It: 18020, Loss: 1.384e-03, C: 4.482, Time: 0.03\n",
"It: 18030, Loss: 1.347e-03, C: 4.482, Time: 0.04\n",
"It: 18040, Loss: 2.513e-03, C: 4.482, Time: 0.03\n",
"It: 18050, Loss: 1.701e-03, C: 4.482, Time: 0.03\n",
"It: 18060, Loss: 2.350e-03, C: 4.482, Time: 0.03\n",
"It: 18070, Loss: 3.355e-03, C: 4.482, Time: 0.03\n",
"It: 18080, Loss: 3.435e-03, C: 4.482, Time: 0.03\n",
"It: 18090, Loss: 1.463e-03, C: 4.482, Time: 0.03\n",
"It: 18100, Loss: 3.691e-03, C: 4.482, Time: 0.04\n",
"It: 18110, Loss: 2.160e-03, C: 4.482, Time: 0.04\n",
"It: 18120, Loss: 3.386e-03, C: 4.482, Time: 0.04\n",
"It: 18130, Loss: 2.135e-03, C: 4.482, Time: 0.03\n",
"It: 18140, Loss: 1.689e-03, C: 4.482, Time: 0.03\n",
"It: 18150, Loss: 1.801e-03, C: 4.482, Time: 0.03\n",
"It: 18160, Loss: 2.128e-03, C: 4.482, Time: 0.03\n",
"It: 18170, Loss: 1.153e-03, C: 4.482, Time: 0.03\n",
"It: 18180, Loss: 2.283e-03, C: 4.482, Time: 0.03\n",
"It: 18190, Loss: 1.803e-03, C: 4.482, Time: 0.03\n",
"It: 18200, Loss: 2.779e-03, C: 4.482, Time: 0.04\n",
"It: 18210, Loss: 3.173e-03, C: 4.482, Time: 0.03\n",
"It: 18220, Loss: 1.441e-03, C: 4.482, Time: 0.03\n",
"It: 18230, Loss: 2.320e-03, C: 4.482, Time: 0.03\n",
"It: 18240, Loss: 2.979e-03, C: 4.482, Time: 0.03\n",
"It: 18250, Loss: 1.933e-03, C: 4.482, Time: 0.03\n",
"It: 18260, Loss: 1.768e-03, C: 4.482, Time: 0.03\n",
"It: 18270, Loss: 2.031e-03, C: 4.482, Time: 0.03\n",
"It: 18280, Loss: 1.685e-03, C: 4.482, Time: 0.03\n",
"It: 18290, Loss: 1.358e-03, C: 4.482, Time: 0.03\n",
"It: 18300, Loss: 2.579e-03, C: 4.482, Time: 0.03\n",
"It: 18310, Loss: 2.361e-03, C: 4.482, Time: 0.03\n",
"It: 18320, Loss: 1.856e-03, C: 4.482, Time: 0.03\n",
"It: 18330, Loss: 1.424e-03, C: 4.482, Time: 0.03\n",
"It: 18340, Loss: 1.685e-03, C: 4.482, Time: 0.03\n",
"It: 18350, Loss: 1.909e-03, C: 4.482, Time: 0.03\n",
"It: 18360, Loss: 2.339e-03, C: 4.482, Time: 0.03\n",
"It: 18370, Loss: 2.466e-03, C: 4.482, Time: 0.03\n",
"It: 18380, Loss: 3.533e-03, C: 4.482, Time: 0.03\n",
"It: 18390, Loss: 3.002e-03, C: 4.482, Time: 0.03\n",
"It: 18400, Loss: 2.394e-03, C: 4.482, Time: 0.03\n",
"It: 18410, Loss: 2.873e-03, C: 4.482, Time: 0.03\n",
"It: 18420, Loss: 3.051e-03, C: 4.482, Time: 0.03\n",
"It: 18430, Loss: 2.289e-03, C: 4.482, Time: 0.03\n",
"It: 18440, Loss: 1.721e-03, C: 4.482, Time: 0.03\n",
"It: 18450, Loss: 4.897e-03, C: 4.482, Time: 0.03\n",
"It: 18460, Loss: 1.833e-03, C: 4.482, Time: 0.03\n",
"It: 18470, Loss: 1.964e-03, C: 4.482, Time: 0.03\n",
"It: 18480, Loss: 1.550e-03, C: 4.482, Time: 0.03\n",
"It: 18490, Loss: 2.242e-03, C: 4.482, Time: 0.03\n",
"It: 18500, Loss: 2.316e-03, C: 4.482, Time: 0.03\n",
"It: 18510, Loss: 2.727e-03, C: 4.482, Time: 0.03\n",
"It: 18520, Loss: 2.136e-03, C: 4.482, Time: 0.03\n",
"It: 18530, Loss: 1.191e-03, C: 4.482, Time: 0.03\n",
"It: 18540, Loss: 1.655e-03, C: 4.482, Time: 0.03\n",
"It: 18550, Loss: 2.001e-03, C: 4.482, Time: 0.04\n",
"It: 18560, Loss: 2.012e-03, C: 4.482, Time: 0.04\n",
"It: 18570, Loss: 2.185e-03, C: 4.482, Time: 0.03\n",
"It: 18580, Loss: 2.054e-03, C: 4.482, Time: 0.04\n",
"It: 18590, Loss: 2.952e-03, C: 4.482, Time: 0.04\n",
"It: 18600, Loss: 1.153e-03, C: 4.482, Time: 0.04\n",
"It: 18610, Loss: 1.442e-03, C: 4.482, Time: 0.03\n",
"It: 18620, Loss: 1.453e-03, C: 4.482, Time: 0.03\n",
"It: 18630, Loss: 1.717e-03, C: 4.482, Time: 0.03\n",
"It: 18640, Loss: 2.375e-03, C: 4.482, Time: 0.03\n",
"It: 18650, Loss: 3.118e-03, C: 4.482, Time: 0.03\n",
"It: 18660, Loss: 2.147e-03, C: 4.482, Time: 0.03\n",
"It: 18670, Loss: 1.744e-03, C: 4.482, Time: 0.03\n",
"It: 18680, Loss: 2.122e-03, C: 4.482, Time: 0.03\n",
"It: 18690, Loss: 1.695e-03, C: 4.482, Time: 0.04\n",
"It: 18700, Loss: 3.423e-03, C: 4.482, Time: 0.04\n",
"It: 18710, Loss: 3.840e-03, C: 4.482, Time: 0.03\n",
"It: 18720, Loss: 1.909e-03, C: 4.482, Time: 0.03\n",
"It: 18730, Loss: 3.872e-03, C: 4.482, Time: 0.04\n",
"It: 18740, Loss: 1.852e-03, C: 4.482, Time: 0.03\n",
"It: 18750, Loss: 3.000e-03, C: 4.482, Time: 0.03\n",
"It: 18760, Loss: 1.229e-03, C: 4.482, Time: 0.03\n",
"It: 18770, Loss: 2.494e-03, C: 4.482, Time: 0.03\n",
"It: 18780, Loss: 2.095e-03, C: 4.482, Time: 0.03\n",
"It: 18790, Loss: 1.610e-03, C: 4.482, Time: 0.03\n",
"It: 18800, Loss: 1.760e-03, C: 4.482, Time: 0.04\n",
"It: 18810, Loss: 1.576e-03, C: 4.482, Time: 0.04\n",
"It: 18820, Loss: 1.625e-03, C: 4.482, Time: 0.04\n",
"It: 18830, Loss: 1.130e-03, C: 4.482, Time: 0.04\n",
"It: 18840, Loss: 1.461e-03, C: 4.482, Time: 0.04\n",
"It: 18850, Loss: 2.220e-03, C: 4.482, Time: 0.04\n",
"It: 18860, Loss: 1.900e-03, C: 4.482, Time: 0.04\n",
"It: 18870, Loss: 1.679e-03, C: 4.482, Time: 0.03\n",
"It: 18880, Loss: 2.018e-03, C: 4.482, Time: 0.03\n",
"It: 18890, Loss: 1.765e-03, C: 4.482, Time: 0.03\n",
"It: 18900, Loss: 2.187e-03, C: 4.482, Time: 0.03\n",
"It: 18910, Loss: 1.858e-03, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 18920, Loss: 1.695e-03, C: 4.482, Time: 0.04\n",
"It: 18930, Loss: 1.283e-03, C: 4.482, Time: 0.03\n",
"It: 18940, Loss: 2.761e-03, C: 4.482, Time: 0.03\n",
"It: 18950, Loss: 1.283e-03, C: 4.482, Time: 0.04\n",
"It: 18960, Loss: 2.546e-03, C: 4.482, Time: 0.03\n",
"It: 18970, Loss: 2.216e-03, C: 4.482, Time: 0.03\n",
"It: 18980, Loss: 2.437e-03, C: 4.482, Time: 0.04\n",
"It: 18990, Loss: 1.940e-03, C: 4.482, Time: 0.04\n",
"It: 19000, Loss: 2.480e-03, C: 4.482, Time: 0.03\n",
"It: 19010, Loss: 1.550e-03, C: 4.482, Time: 0.03\n",
"It: 19020, Loss: 1.894e-03, C: 4.482, Time: 0.03\n",
"It: 19030, Loss: 1.994e-03, C: 4.482, Time: 0.03\n",
"It: 19040, Loss: 2.081e-03, C: 4.482, Time: 0.03\n",
"It: 19050, Loss: 1.083e-03, C: 4.482, Time: 0.03\n",
"It: 19060, Loss: 1.459e-03, C: 4.482, Time: 0.03\n",
"It: 19070, Loss: 1.453e-03, C: 4.482, Time: 0.03\n",
"It: 19080, Loss: 2.155e-03, C: 4.482, Time: 0.03\n",
"It: 19090, Loss: 2.200e-03, C: 4.482, Time: 0.03\n",
"It: 19100, Loss: 8.998e-04, C: 4.482, Time: 0.03\n",
"It: 19110, Loss: 3.580e-03, C: 4.482, Time: 0.03\n",
"It: 19120, Loss: 2.133e-03, C: 4.482, Time: 0.03\n",
"It: 19130, Loss: 2.911e-03, C: 4.482, Time: 0.03\n",
"It: 19140, Loss: 1.353e-03, C: 4.482, Time: 0.03\n",
"It: 19150, Loss: 2.018e-03, C: 4.482, Time: 0.03\n",
"It: 19160, Loss: 3.046e-03, C: 4.482, Time: 0.04\n",
"It: 19170, Loss: 2.866e-03, C: 4.482, Time: 0.03\n",
"It: 19180, Loss: 2.511e-03, C: 4.482, Time: 0.03\n",
"It: 19190, Loss: 1.462e-03, C: 4.482, Time: 0.03\n",
"It: 19200, Loss: 1.842e-03, C: 4.482, Time: 0.03\n",
"It: 19210, Loss: 1.663e-03, C: 4.482, Time: 0.03\n",
"It: 19220, Loss: 1.929e-03, C: 4.482, Time: 0.04\n",
"It: 19230, Loss: 3.003e-03, C: 4.482, Time: 0.04\n",
"It: 19240, Loss: 1.534e-03, C: 4.482, Time: 0.04\n",
"It: 19250, Loss: 2.802e-03, C: 4.482, Time: 0.04\n",
"It: 19260, Loss: 2.444e-03, C: 4.482, Time: 0.03\n",
"It: 19270, Loss: 1.548e-03, C: 4.482, Time: 0.03\n",
"It: 19280, Loss: 1.667e-03, C: 4.482, Time: 0.04\n",
"It: 19290, Loss: 2.328e-03, C: 4.482, Time: 0.03\n",
"It: 19300, Loss: 2.278e-03, C: 4.482, Time: 0.04\n",
"It: 19310, Loss: 1.687e-03, C: 4.482, Time: 0.04\n",
"It: 19320, Loss: 1.825e-03, C: 4.482, Time: 0.03\n",
"It: 19330, Loss: 2.650e-03, C: 4.482, Time: 0.03\n",
"It: 19340, Loss: 9.019e-04, C: 4.482, Time: 0.03\n",
"It: 19350, Loss: 9.443e-04, C: 4.482, Time: 0.03\n",
"It: 19360, Loss: 1.151e-03, C: 4.482, Time: 0.03\n",
"It: 19370, Loss: 2.465e-03, C: 4.482, Time: 0.03\n",
"It: 19380, Loss: 2.141e-03, C: 4.482, Time: 0.03\n",
"It: 19390, Loss: 1.078e-03, C: 4.482, Time: 0.04\n",
"It: 19400, Loss: 1.429e-03, C: 4.482, Time: 0.03\n",
"It: 19410, Loss: 1.717e-03, C: 4.482, Time: 0.03\n",
"It: 19420, Loss: 1.778e-03, C: 4.482, Time: 0.03\n",
"It: 19430, Loss: 1.377e-03, C: 4.482, Time: 0.03\n",
"It: 19440, Loss: 2.705e-03, C: 4.482, Time: 0.03\n",
"It: 19450, Loss: 2.060e-03, C: 4.482, Time: 0.03\n",
"It: 19460, Loss: 3.398e-03, C: 4.482, Time: 0.04\n",
"It: 19470, Loss: 2.269e-03, C: 4.482, Time: 0.03\n",
"It: 19480, Loss: 1.579e-03, C: 4.482, Time: 0.03\n",
"It: 19490, Loss: 1.081e-03, C: 4.482, Time: 0.03\n",
"It: 19500, Loss: 1.635e-03, C: 4.482, Time: 0.03\n",
"It: 19510, Loss: 1.927e-03, C: 4.482, Time: 0.03\n",
"It: 19520, Loss: 2.912e-03, C: 4.482, Time: 0.03\n",
"It: 19530, Loss: 1.638e-03, C: 4.482, Time: 0.04\n",
"It: 19540, Loss: 9.912e-04, C: 4.482, Time: 0.04\n",
"It: 19550, Loss: 1.540e-03, C: 4.482, Time: 0.03\n",
"It: 19560, Loss: 1.069e-03, C: 4.482, Time: 0.04\n",
"It: 19570, Loss: 1.393e-03, C: 4.482, Time: 0.03\n",
"It: 19580, Loss: 1.080e-03, C: 4.482, Time: 0.04\n",
"It: 19590, Loss: 1.391e-03, C: 4.482, Time: 0.04\n",
"It: 19600, Loss: 2.270e-03, C: 4.482, Time: 0.04\n",
"It: 19610, Loss: 1.915e-03, C: 4.482, Time: 0.03\n",
"It: 19620, Loss: 1.695e-03, C: 4.482, Time: 0.03\n",
"It: 19630, Loss: 2.909e-03, C: 4.482, Time: 0.04\n",
"It: 19640, Loss: 1.073e-03, C: 4.482, Time: 0.04\n",
"It: 19650, Loss: 1.321e-03, C: 4.482, Time: 0.04\n",
"It: 19660, Loss: 1.798e-03, C: 4.482, Time: 0.03\n",
"It: 19670, Loss: 1.873e-03, C: 4.482, Time: 0.04\n",
"It: 19680, Loss: 1.427e-03, C: 4.482, Time: 0.04\n",
"It: 19690, Loss: 3.108e-03, C: 4.482, Time: 0.03\n",
"It: 19700, Loss: 1.337e-03, C: 4.482, Time: 0.03\n",
"It: 19710, Loss: 1.255e-03, C: 4.482, Time: 0.04\n",
"It: 19720, Loss: 9.255e-04, C: 4.482, Time: 0.04\n",
"It: 19730, Loss: 1.168e-03, C: 4.482, Time: 0.04\n",
"It: 19740, Loss: 8.768e-04, C: 4.482, Time: 0.03\n",
"It: 19750, Loss: 1.495e-03, C: 4.482, Time: 0.03\n",
"It: 19760, Loss: 2.434e-03, C: 4.482, Time: 0.03\n",
"It: 19770, Loss: 1.655e-03, C: 4.482, Time: 0.04\n",
"It: 19780, Loss: 1.015e-03, C: 4.482, Time: 0.04\n",
"It: 19790, Loss: 1.628e-03, C: 4.482, Time: 0.04\n",
"It: 19800, Loss: 1.679e-03, C: 4.482, Time: 0.04\n",
"It: 19810, Loss: 1.204e-03, C: 4.482, Time: 0.04\n",
"It: 19820, Loss: 1.158e-03, C: 4.482, Time: 0.04\n",
"It: 19830, Loss: 1.547e-03, C: 4.482, Time: 0.03\n",
"It: 19840, Loss: 1.270e-03, C: 4.482, Time: 0.04\n",
"It: 19850, Loss: 3.493e-03, C: 4.482, Time: 0.03\n",
"It: 19860, Loss: 1.757e-03, C: 4.482, Time: 0.03\n",
"It: 19870, Loss: 9.633e-04, C: 4.482, Time: 0.03\n",
"It: 19880, Loss: 1.860e-03, C: 4.482, Time: 0.03\n",
"It: 19890, Loss: 3.636e-03, C: 4.482, Time: 0.04\n",
"It: 19900, Loss: 1.667e-03, C: 4.482, Time: 0.03\n",
"It: 19910, Loss: 1.454e-03, C: 4.482, Time: 0.03\n",
"It: 19920, Loss: 1.478e-03, C: 4.482, Time: 0.03\n",
"It: 19930, Loss: 6.882e-04, C: 4.482, Time: 0.04\n",
"It: 19940, Loss: 1.845e-03, C: 4.482, Time: 0.04\n",
"It: 19950, Loss: 1.499e-03, C: 4.482, Time: 0.03\n",
"It: 19960, Loss: 2.518e-03, C: 4.482, Time: 0.04\n",
"It: 19970, Loss: 2.039e-03, C: 4.482, Time: 0.03\n",
"It: 19980, Loss: 9.815e-04, C: 4.482, Time: 0.03\n",
"It: 19990, Loss: 1.380e-03, C: 4.482, Time: 0.03\n",
"It: 20000, Loss: 1.451e-03, C: 4.482, Time: 0.03\n",
"It: 20010, Loss: 1.527e-03, C: 4.482, Time: 0.03\n",
"It: 20020, Loss: 1.022e-03, C: 4.482, Time: 0.04\n",
"It: 20030, Loss: 1.838e-03, C: 4.482, Time: 0.04\n",
"It: 20040, Loss: 1.410e-03, C: 4.482, Time: 0.04\n",
"It: 20050, Loss: 3.123e-03, C: 4.482, Time: 0.04\n",
"It: 20060, Loss: 1.212e-03, C: 4.482, Time: 0.04\n",
"It: 20070, Loss: 1.354e-03, C: 4.482, Time: 0.03\n",
"It: 20080, Loss: 1.065e-03, C: 4.482, Time: 0.03\n",
"It: 20090, Loss: 1.843e-03, C: 4.482, Time: 0.03\n",
"It: 20100, Loss: 1.360e-03, C: 4.482, Time: 0.03\n",
"It: 20110, Loss: 1.749e-03, C: 4.482, Time: 0.03\n",
"It: 20120, Loss: 1.850e-03, C: 4.482, Time: 0.04\n",
"It: 20130, Loss: 8.696e-04, C: 4.482, Time: 0.03\n",
"It: 20140, Loss: 3.540e-03, C: 4.482, Time: 0.03\n",
"It: 20150, Loss: 7.974e-04, C: 4.482, Time: 0.03\n",
"It: 20160, Loss: 1.513e-03, C: 4.482, Time: 0.03\n",
"It: 20170, Loss: 1.142e-03, C: 4.482, Time: 0.03\n",
"It: 20180, Loss: 2.353e-03, C: 4.482, Time: 0.04\n",
"It: 20190, Loss: 1.802e-03, C: 4.482, Time: 0.03\n",
"It: 20200, Loss: 1.161e-03, C: 4.482, Time: 0.03\n",
"It: 20210, Loss: 1.294e-03, C: 4.482, Time: 0.03\n",
"It: 20220, Loss: 1.940e-03, C: 4.482, Time: 0.04\n",
"It: 20230, Loss: 1.075e-03, C: 4.482, Time: 0.04\n",
"It: 20240, Loss: 1.941e-03, C: 4.482, Time: 0.04\n",
"It: 20250, Loss: 1.437e-03, C: 4.482, Time: 0.03\n",
"It: 20260, Loss: 1.715e-03, C: 4.482, Time: 0.04\n",
"It: 20270, Loss: 2.512e-03, C: 4.482, Time: 0.04\n",
"It: 20280, Loss: 3.339e-03, C: 4.482, Time: 0.04\n",
"It: 20290, Loss: 8.586e-04, C: 4.482, Time: 0.04\n",
"It: 20300, Loss: 2.136e-03, C: 4.482, Time: 0.04\n",
"It: 20310, Loss: 2.415e-03, C: 4.482, Time: 0.04\n",
"It: 20320, Loss: 2.027e-03, C: 4.482, Time: 0.04\n",
"It: 20330, Loss: 9.264e-04, C: 4.482, Time: 0.04\n",
"It: 20340, Loss: 2.491e-03, C: 4.482, Time: 0.04\n",
"It: 20350, Loss: 1.944e-03, C: 4.482, Time: 0.03\n",
"It: 20360, Loss: 1.514e-03, C: 4.482, Time: 0.03\n",
"It: 20370, Loss: 1.030e-03, C: 4.482, Time: 0.04\n",
"It: 20380, Loss: 1.455e-03, C: 4.482, Time: 0.04\n",
"It: 20390, Loss: 1.192e-03, C: 4.482, Time: 0.03\n",
"It: 20400, Loss: 1.345e-03, C: 4.482, Time: 0.04\n",
"It: 20410, Loss: 1.538e-03, C: 4.482, Time: 0.04\n",
"It: 20420, Loss: 1.197e-03, C: 4.482, Time: 0.04\n",
"It: 20430, Loss: 1.226e-03, C: 4.482, Time: 0.04\n",
"It: 20440, Loss: 1.081e-03, C: 4.482, Time: 0.04\n",
"It: 20450, Loss: 1.093e-03, C: 4.482, Time: 0.04\n",
"It: 20460, Loss: 2.368e-03, C: 4.482, Time: 0.03\n",
"It: 20470, Loss: 1.899e-03, C: 4.482, Time: 0.03\n",
"It: 20480, Loss: 1.598e-03, C: 4.482, Time: 0.04\n",
"It: 20490, Loss: 1.671e-03, C: 4.482, Time: 0.04\n",
"It: 20500, Loss: 2.104e-03, C: 4.482, Time: 0.03\n",
"It: 20510, Loss: 1.718e-03, C: 4.482, Time: 0.03\n",
"It: 20520, Loss: 1.074e-03, C: 4.482, Time: 0.03\n",
"It: 20530, Loss: 1.088e-03, C: 4.482, Time: 0.04\n",
"It: 20540, Loss: 1.808e-03, C: 4.482, Time: 0.03\n",
"It: 20550, Loss: 2.124e-03, C: 4.482, Time: 0.04\n",
"It: 20560, Loss: 8.767e-04, C: 4.482, Time: 0.03\n",
"It: 20570, Loss: 1.471e-03, C: 4.482, Time: 0.04\n",
"It: 20580, Loss: 2.397e-03, C: 4.482, Time: 0.04\n",
"It: 20590, Loss: 1.947e-03, C: 4.482, Time: 0.04\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 20600, Loss: 1.213e-03, C: 4.482, Time: 0.04\n",
"It: 20610, Loss: 2.122e-03, C: 4.482, Time: 0.04\n",
"It: 20620, Loss: 7.427e-04, C: 4.482, Time: 0.03\n",
"It: 20630, Loss: 1.424e-03, C: 4.482, Time: 0.04\n",
"It: 20640, Loss: 2.237e-03, C: 4.482, Time: 0.04\n",
"It: 20650, Loss: 1.399e-03, C: 4.482, Time: 0.04\n",
"It: 20660, Loss: 1.607e-03, C: 4.482, Time: 0.03\n",
"It: 20670, Loss: 2.290e-03, C: 4.482, Time: 0.04\n",
"It: 20680, Loss: 3.441e-03, C: 4.482, Time: 0.04\n",
"It: 20690, Loss: 1.045e-03, C: 4.482, Time: 0.04\n",
"It: 20700, Loss: 1.051e-03, C: 4.482, Time: 0.03\n",
"It: 20710, Loss: 1.295e-03, C: 4.482, Time: 0.04\n",
"It: 20720, Loss: 1.644e-03, C: 4.482, Time: 0.04\n",
"It: 20730, Loss: 1.250e-03, C: 4.482, Time: 0.04\n",
"It: 20740, Loss: 6.127e-04, C: 4.482, Time: 0.03\n",
"It: 20750, Loss: 1.285e-03, C: 4.482, Time: 0.03\n",
"It: 20760, Loss: 1.143e-03, C: 4.482, Time: 0.03\n",
"It: 20770, Loss: 1.518e-03, C: 4.482, Time: 0.03\n",
"It: 20780, Loss: 1.241e-03, C: 4.482, Time: 0.03\n",
"It: 20790, Loss: 6.065e-04, C: 4.482, Time: 0.04\n",
"It: 20800, Loss: 1.687e-03, C: 4.482, Time: 0.04\n",
"It: 20810, Loss: 7.353e-04, C: 4.482, Time: 0.04\n",
"It: 20820, Loss: 1.928e-03, C: 4.482, Time: 0.03\n",
"It: 20830, Loss: 1.387e-03, C: 4.482, Time: 0.04\n",
"It: 20840, Loss: 1.424e-03, C: 4.482, Time: 0.04\n",
"It: 20850, Loss: 2.983e-03, C: 4.482, Time: 0.04\n",
"It: 20860, Loss: 1.924e-03, C: 4.482, Time: 0.04\n",
"It: 20870, Loss: 2.166e-03, C: 4.482, Time: 0.04\n",
"It: 20880, Loss: 1.233e-03, C: 4.482, Time: 0.04\n",
"It: 20890, Loss: 1.018e-03, C: 4.482, Time: 0.04\n",
"It: 20900, Loss: 9.171e-04, C: 4.482, Time: 0.04\n",
"It: 20910, Loss: 1.253e-03, C: 4.482, Time: 0.04\n",
"It: 20920, Loss: 1.056e-03, C: 4.482, Time: 0.04\n",
"It: 20930, Loss: 2.296e-03, C: 4.482, Time: 0.04\n",
"It: 20940, Loss: 1.412e-03, C: 4.482, Time: 0.04\n",
"It: 20950, Loss: 1.162e-03, C: 4.482, Time: 0.04\n",
"It: 20960, Loss: 9.621e-04, C: 4.482, Time: 0.03\n",
"It: 20970, Loss: 1.834e-03, C: 4.482, Time: 0.04\n",
"It: 20980, Loss: 1.170e-03, C: 4.482, Time: 0.04\n",
"It: 20990, Loss: 1.254e-03, C: 4.482, Time: 0.03\n",
"It: 21000, Loss: 1.210e-03, C: 4.482, Time: 0.03\n",
"It: 21010, Loss: 1.590e-03, C: 4.482, Time: 0.03\n",
"It: 21020, Loss: 9.433e-04, C: 4.482, Time: 0.03\n",
"It: 21030, Loss: 9.514e-04, C: 4.482, Time: 0.04\n",
"It: 21040, Loss: 1.475e-03, C: 4.482, Time: 0.04\n",
"It: 21050, Loss: 1.078e-03, C: 4.482, Time: 0.04\n",
"It: 21060, Loss: 1.280e-03, C: 4.482, Time: 0.03\n",
"It: 21070, Loss: 1.868e-03, C: 4.482, Time: 0.03\n",
"It: 21080, Loss: 5.778e-04, C: 4.482, Time: 0.03\n",
"It: 21090, Loss: 1.239e-03, C: 4.482, Time: 0.03\n",
"It: 21100, Loss: 1.472e-03, C: 4.482, Time: 0.03\n",
"It: 21110, Loss: 2.354e-03, C: 4.482, Time: 0.03\n",
"It: 21120, Loss: 2.933e-03, C: 4.482, Time: 0.03\n",
"It: 21130, Loss: 1.271e-03, C: 4.482, Time: 0.03\n",
"It: 21140, Loss: 9.558e-04, C: 4.482, Time: 0.03\n",
"It: 21150, Loss: 1.527e-03, C: 4.482, Time: 0.03\n",
"It: 21160, Loss: 6.883e-04, C: 4.482, Time: 0.03\n",
"It: 21170, Loss: 2.663e-03, C: 4.482, Time: 0.04\n",
"It: 21180, Loss: 1.690e-03, C: 4.482, Time: 0.04\n",
"It: 21190, Loss: 2.583e-03, C: 4.482, Time: 0.04\n",
"It: 21200, Loss: 1.535e-03, C: 4.482, Time: 0.03\n",
"It: 21210, Loss: 6.007e-04, C: 4.482, Time: 0.04\n",
"It: 21220, Loss: 8.385e-04, C: 4.482, Time: 0.03\n",
"It: 21230, Loss: 6.242e-04, C: 4.482, Time: 0.03\n",
"It: 21240, Loss: 9.570e-04, C: 4.482, Time: 0.03\n",
"It: 21250, Loss: 1.429e-03, C: 4.482, Time: 0.04\n",
"It: 21260, Loss: 8.854e-04, C: 4.482, Time: 0.03\n",
"It: 21270, Loss: 6.971e-04, C: 4.482, Time: 0.03\n",
"It: 21280, Loss: 6.707e-04, C: 4.482, Time: 0.03\n",
"It: 21290, Loss: 8.555e-04, C: 4.482, Time: 0.04\n",
"It: 21300, Loss: 1.153e-03, C: 4.482, Time: 0.04\n",
"It: 21310, Loss: 1.885e-03, C: 4.482, Time: 0.04\n",
"It: 21320, Loss: 2.106e-03, C: 4.482, Time: 0.04\n",
"It: 21330, Loss: 2.343e-03, C: 4.482, Time: 0.04\n",
"It: 21340, Loss: 1.238e-03, C: 4.482, Time: 0.03\n",
"It: 21350, Loss: 1.675e-03, C: 4.482, Time: 0.03\n",
"It: 21360, Loss: 9.993e-04, C: 4.482, Time: 0.03\n",
"It: 21370, Loss: 2.054e-03, C: 4.482, Time: 0.03\n",
"It: 21380, Loss: 1.219e-03, C: 4.482, Time: 0.03\n",
"It: 21390, Loss: 1.084e-03, C: 4.482, Time: 0.03\n",
"It: 21400, Loss: 1.812e-03, C: 4.482, Time: 0.03\n",
"It: 21410, Loss: 1.550e-03, C: 4.482, Time: 0.03\n",
"It: 21420, Loss: 2.069e-03, C: 4.482, Time: 0.04\n",
"It: 21430, Loss: 2.300e-03, C: 4.482, Time: 0.04\n",
"It: 21440, Loss: 9.575e-04, C: 4.482, Time: 0.04\n",
"It: 21450, Loss: 2.005e-03, C: 4.482, Time: 0.04\n",
"It: 21460, Loss: 1.419e-03, C: 4.482, Time: 0.04\n",
"It: 21470, Loss: 6.005e-04, C: 4.482, Time: 0.04\n",
"It: 21480, Loss: 1.051e-03, C: 4.482, Time: 0.04\n",
"It: 21490, Loss: 1.861e-03, C: 4.482, Time: 0.03\n",
"It: 21500, Loss: 1.233e-03, C: 4.482, Time: 0.04\n",
"It: 21510, Loss: 2.804e-03, C: 4.482, Time: 0.04\n",
"It: 21520, Loss: 6.948e-04, C: 4.482, Time: 0.04\n",
"It: 21530, Loss: 1.361e-03, C: 4.482, Time: 0.04\n",
"It: 21540, Loss: 8.587e-04, C: 4.482, Time: 0.04\n",
"It: 21550, Loss: 1.320e-03, C: 4.482, Time: 0.03\n",
"It: 21560, Loss: 1.054e-03, C: 4.482, Time: 0.04\n",
"It: 21570, Loss: 1.705e-03, C: 4.482, Time: 0.04\n",
"It: 21580, Loss: 1.978e-03, C: 4.482, Time: 0.04\n",
"It: 21590, Loss: 8.791e-04, C: 4.482, Time: 0.04\n",
"It: 21600, Loss: 1.214e-03, C: 4.482, Time: 0.04\n",
"It: 21610, Loss: 1.657e-03, C: 4.482, Time: 0.04\n",
"It: 21620, Loss: 1.397e-03, C: 4.482, Time: 0.03\n",
"It: 21630, Loss: 6.369e-04, C: 4.482, Time: 0.04\n",
"It: 21640, Loss: 1.362e-03, C: 4.482, Time: 0.04\n",
"It: 21650, Loss: 1.321e-03, C: 4.482, Time: 0.04\n",
"It: 21660, Loss: 1.183e-03, C: 4.482, Time: 0.04\n",
"It: 21670, Loss: 6.810e-04, C: 4.482, Time: 0.04\n",
"It: 21680, Loss: 2.259e-03, C: 4.482, Time: 0.04\n",
"It: 21690, Loss: 1.758e-03, C: 4.482, Time: 0.04\n",
"It: 21700, Loss: 1.481e-03, C: 4.482, Time: 0.04\n",
"It: 21710, Loss: 8.635e-04, C: 4.482, Time: 0.04\n",
"It: 21720, Loss: 1.119e-03, C: 4.482, Time: 0.04\n",
"It: 21730, Loss: 1.708e-03, C: 4.482, Time: 0.04\n",
"It: 21740, Loss: 1.700e-03, C: 4.482, Time: 0.04\n",
"It: 21750, Loss: 1.373e-03, C: 4.482, Time: 0.04\n",
"It: 21760, Loss: 1.618e-03, C: 4.482, Time: 0.04\n",
"It: 21770, Loss: 6.749e-04, C: 4.482, Time: 0.04\n",
"It: 21780, Loss: 1.828e-03, C: 4.482, Time: 0.04\n",
"It: 21790, Loss: 1.382e-03, C: 4.482, Time: 0.04\n",
"It: 21800, Loss: 8.981e-04, C: 4.482, Time: 0.04\n",
"It: 21810, Loss: 1.093e-03, C: 4.482, Time: 0.04\n",
"It: 21820, Loss: 1.050e-03, C: 4.482, Time: 0.04\n",
"It: 21830, Loss: 6.008e-04, C: 4.482, Time: 0.04\n",
"It: 21840, Loss: 2.095e-03, C: 4.482, Time: 0.04\n",
"It: 21850, Loss: 3.005e-03, C: 4.482, Time: 0.04\n",
"It: 21860, Loss: 9.966e-04, C: 4.482, Time: 0.04\n",
"It: 21870, Loss: 9.518e-04, C: 4.482, Time: 0.03\n",
"It: 21880, Loss: 8.009e-04, C: 4.482, Time: 0.03\n",
"It: 21890, Loss: 1.202e-03, C: 4.482, Time: 0.03\n",
"It: 21900, Loss: 1.759e-03, C: 4.482, Time: 0.04\n",
"It: 21910, Loss: 2.344e-03, C: 4.482, Time: 0.04\n",
"It: 21920, Loss: 1.843e-03, C: 4.482, Time: 0.04\n",
"It: 21930, Loss: 8.529e-04, C: 4.482, Time: 0.03\n",
"It: 21940, Loss: 2.031e-03, C: 4.482, Time: 0.04\n",
"It: 21950, Loss: 1.508e-03, C: 4.482, Time: 0.04\n",
"It: 21960, Loss: 2.730e-03, C: 4.482, Time: 0.04\n",
"It: 21970, Loss: 2.969e-03, C: 4.482, Time: 0.04\n",
"It: 21980, Loss: 2.012e-03, C: 4.482, Time: 0.03\n",
"It: 21990, Loss: 8.831e-04, C: 4.482, Time: 0.04\n",
"It: 22000, Loss: 7.263e-04, C: 4.482, Time: 0.04\n",
"It: 22010, Loss: 2.609e-03, C: 4.482, Time: 0.04\n",
"It: 22020, Loss: 6.670e-04, C: 4.482, Time: 0.04\n",
"It: 22030, Loss: 6.054e-04, C: 4.482, Time: 0.04\n",
"It: 22040, Loss: 2.300e-03, C: 4.482, Time: 0.04\n",
"It: 22050, Loss: 1.198e-03, C: 4.482, Time: 0.04\n",
"It: 22060, Loss: 9.681e-04, C: 4.482, Time: 0.03\n",
"It: 22070, Loss: 1.324e-03, C: 4.482, Time: 0.03\n",
"It: 22080, Loss: 7.926e-04, C: 4.482, Time: 0.03\n",
"It: 22090, Loss: 1.051e-03, C: 4.482, Time: 0.03\n",
"It: 22100, Loss: 9.338e-04, C: 4.482, Time: 0.03\n",
"It: 22110, Loss: 1.453e-03, C: 4.482, Time: 0.04\n",
"It: 22120, Loss: 1.578e-03, C: 4.482, Time: 0.03\n",
"It: 22130, Loss: 1.549e-03, C: 4.482, Time: 0.04\n",
"It: 22140, Loss: 6.346e-04, C: 4.482, Time: 0.04\n",
"It: 22150, Loss: 3.692e-03, C: 4.482, Time: 0.03\n",
"It: 22160, Loss: 6.436e-04, C: 4.482, Time: 0.03\n",
"It: 22170, Loss: 1.747e-03, C: 4.482, Time: 0.04\n",
"It: 22180, Loss: 1.756e-03, C: 4.482, Time: 0.03\n",
"It: 22190, Loss: 1.294e-03, C: 4.482, Time: 0.04\n",
"It: 22200, Loss: 1.277e-03, C: 4.482, Time: 0.04\n",
"It: 22210, Loss: 1.465e-03, C: 4.482, Time: 0.04\n",
"It: 22220, Loss: 1.664e-03, C: 4.482, Time: 0.04\n",
"It: 22230, Loss: 1.679e-03, C: 4.482, Time: 0.04\n",
"It: 22240, Loss: 2.273e-03, C: 4.482, Time: 0.04\n",
"It: 22250, Loss: 1.538e-03, C: 4.482, Time: 0.04\n",
"It: 22260, Loss: 9.775e-04, C: 4.482, Time: 0.04\n",
"It: 22270, Loss: 7.485e-04, C: 4.482, Time: 0.04\n",
"It: 22280, Loss: 9.970e-04, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 22290, Loss: 1.144e-03, C: 4.482, Time: 0.04\n",
"It: 22300, Loss: 7.127e-04, C: 4.482, Time: 0.04\n",
"It: 22310, Loss: 6.082e-04, C: 4.482, Time: 0.04\n",
"It: 22320, Loss: 6.554e-04, C: 4.482, Time: 0.04\n",
"It: 22330, Loss: 1.499e-03, C: 4.482, Time: 0.04\n",
"It: 22340, Loss: 9.976e-04, C: 4.482, Time: 0.04\n",
"It: 22350, Loss: 1.513e-03, C: 4.482, Time: 0.04\n",
"It: 22360, Loss: 7.987e-04, C: 4.482, Time: 0.04\n",
"It: 22370, Loss: 1.713e-03, C: 4.482, Time: 0.04\n",
"It: 22380, Loss: 1.630e-03, C: 4.482, Time: 0.04\n",
"It: 22390, Loss: 1.021e-03, C: 4.482, Time: 0.03\n",
"It: 22400, Loss: 8.019e-04, C: 4.482, Time: 0.04\n",
"It: 22410, Loss: 1.596e-03, C: 4.482, Time: 0.04\n",
"It: 22420, Loss: 1.748e-03, C: 4.482, Time: 0.03\n",
"It: 22430, Loss: 9.184e-04, C: 4.482, Time: 0.03\n",
"It: 22440, Loss: 2.356e-03, C: 4.482, Time: 0.03\n",
"It: 22450, Loss: 1.489e-03, C: 4.482, Time: 0.03\n",
"It: 22460, Loss: 1.977e-03, C: 4.482, Time: 0.03\n",
"It: 22470, Loss: 2.381e-03, C: 4.482, Time: 0.04\n",
"It: 22480, Loss: 6.351e-04, C: 4.482, Time: 0.03\n",
"It: 22490, Loss: 1.067e-03, C: 4.482, Time: 0.04\n",
"It: 22500, Loss: 1.572e-03, C: 4.482, Time: 0.04\n",
"It: 22510, Loss: 8.212e-04, C: 4.482, Time: 0.04\n",
"It: 22520, Loss: 1.619e-03, C: 4.482, Time: 0.04\n",
"It: 22530, Loss: 6.644e-04, C: 4.482, Time: 0.04\n",
"It: 22540, Loss: 1.275e-03, C: 4.482, Time: 0.04\n",
"It: 22550, Loss: 1.222e-03, C: 4.482, Time: 0.04\n",
"It: 22560, Loss: 4.921e-04, C: 4.482, Time: 0.03\n",
"It: 22570, Loss: 1.130e-03, C: 4.482, Time: 0.04\n",
"It: 22580, Loss: 1.130e-03, C: 4.482, Time: 0.04\n",
"It: 22590, Loss: 1.698e-03, C: 4.482, Time: 0.04\n",
"It: 22600, Loss: 8.169e-04, C: 4.482, Time: 0.04\n",
"It: 22610, Loss: 2.132e-03, C: 4.482, Time: 0.04\n",
"It: 22620, Loss: 1.704e-03, C: 4.482, Time: 0.04\n",
"It: 22630, Loss: 6.505e-04, C: 4.482, Time: 0.04\n",
"It: 22640, Loss: 1.672e-03, C: 4.482, Time: 0.03\n",
"It: 22650, Loss: 2.175e-03, C: 4.482, Time: 0.03\n",
"It: 22660, Loss: 1.430e-03, C: 4.482, Time: 0.04\n",
"It: 22670, Loss: 7.918e-04, C: 4.482, Time: 0.04\n",
"It: 22680, Loss: 1.930e-03, C: 4.482, Time: 0.04\n",
"It: 22690, Loss: 9.998e-04, C: 4.482, Time: 0.04\n",
"It: 22700, Loss: 1.547e-03, C: 4.482, Time: 0.04\n",
"It: 22710, Loss: 2.206e-03, C: 4.482, Time: 0.04\n",
"It: 22720, Loss: 1.442e-03, C: 4.482, Time: 0.04\n",
"It: 22730, Loss: 1.078e-03, C: 4.482, Time: 0.03\n",
"It: 22740, Loss: 2.533e-03, C: 4.482, Time: 0.04\n",
"It: 22750, Loss: 1.331e-03, C: 4.482, Time: 0.04\n",
"It: 22760, Loss: 1.216e-03, C: 4.482, Time: 0.03\n",
"It: 22770, Loss: 1.617e-03, C: 4.482, Time: 0.04\n",
"It: 22780, Loss: 9.756e-04, C: 4.482, Time: 0.04\n",
"It: 22790, Loss: 9.368e-04, C: 4.482, Time: 0.04\n",
"It: 22800, Loss: 7.869e-04, C: 4.482, Time: 0.03\n",
"It: 22810, Loss: 1.477e-03, C: 4.482, Time: 0.03\n",
"It: 22820, Loss: 6.355e-04, C: 4.482, Time: 0.03\n",
"It: 22830, Loss: 1.589e-03, C: 4.482, Time: 0.03\n",
"It: 22840, Loss: 4.414e-04, C: 4.482, Time: 0.03\n",
"It: 22850, Loss: 1.096e-03, C: 4.482, Time: 0.03\n",
"It: 22860, Loss: 5.981e-04, C: 4.482, Time: 0.04\n",
"It: 22870, Loss: 1.478e-03, C: 4.482, Time: 0.04\n",
"It: 22880, Loss: 1.542e-03, C: 4.482, Time: 0.04\n",
"It: 22890, Loss: 6.737e-04, C: 4.482, Time: 0.03\n",
"It: 22900, Loss: 2.116e-03, C: 4.482, Time: 0.04\n",
"It: 22910, Loss: 1.064e-03, C: 4.482, Time: 0.04\n",
"It: 22920, Loss: 1.272e-03, C: 4.482, Time: 0.04\n",
"It: 22930, Loss: 1.126e-03, C: 4.482, Time: 0.04\n",
"It: 22940, Loss: 1.178e-03, C: 4.482, Time: 0.04\n",
"It: 22950, Loss: 7.314e-04, C: 4.482, Time: 0.04\n",
"It: 22960, Loss: 9.473e-04, C: 4.482, Time: 0.04\n",
"It: 22970, Loss: 6.918e-04, C: 4.482, Time: 0.03\n",
"It: 22980, Loss: 1.626e-03, C: 4.482, Time: 0.04\n",
"It: 22990, Loss: 9.180e-04, C: 4.482, Time: 0.03\n",
"It: 23000, Loss: 1.076e-03, C: 4.482, Time: 0.03\n",
"It: 23010, Loss: 5.679e-04, C: 4.482, Time: 0.04\n",
"It: 23020, Loss: 2.569e-03, C: 4.482, Time: 0.04\n",
"It: 23030, Loss: 9.178e-04, C: 4.482, Time: 0.04\n",
"It: 23040, Loss: 1.170e-03, C: 4.482, Time: 0.03\n",
"It: 23050, Loss: 1.267e-03, C: 4.482, Time: 0.03\n",
"It: 23060, Loss: 6.458e-04, C: 4.482, Time: 0.03\n",
"It: 23070, Loss: 6.622e-04, C: 4.482, Time: 0.03\n",
"It: 23080, Loss: 9.374e-04, C: 4.482, Time: 0.04\n",
"It: 23090, Loss: 1.724e-03, C: 4.482, Time: 0.03\n",
"It: 23100, Loss: 1.234e-03, C: 4.482, Time: 0.04\n",
"It: 23110, Loss: 6.995e-04, C: 4.482, Time: 0.04\n",
"It: 23120, Loss: 3.453e-03, C: 4.482, Time: 0.04\n",
"It: 23130, Loss: 6.342e-04, C: 4.482, Time: 0.04\n",
"It: 23140, Loss: 2.887e-03, C: 4.482, Time: 0.04\n",
"It: 23150, Loss: 1.191e-03, C: 4.482, Time: 0.04\n",
"It: 23160, Loss: 4.646e-04, C: 4.482, Time: 0.04\n",
"It: 23170, Loss: 1.108e-03, C: 4.482, Time: 0.04\n",
"It: 23180, Loss: 9.445e-04, C: 4.482, Time: 0.04\n",
"It: 23190, Loss: 8.355e-04, C: 4.482, Time: 0.04\n",
"It: 23200, Loss: 1.072e-03, C: 4.482, Time: 0.04\n",
"It: 23210, Loss: 9.833e-04, C: 4.482, Time: 0.04\n",
"It: 23220, Loss: 9.287e-04, C: 4.482, Time: 0.03\n",
"It: 23230, Loss: 6.496e-04, C: 4.482, Time: 0.04\n",
"It: 23240, Loss: 1.150e-03, C: 4.482, Time: 0.04\n",
"It: 23250, Loss: 5.690e-04, C: 4.482, Time: 0.04\n",
"It: 23260, Loss: 1.368e-03, C: 4.482, Time: 0.04\n",
"It: 23270, Loss: 9.224e-04, C: 4.482, Time: 0.04\n",
"It: 23280, Loss: 6.935e-04, C: 4.482, Time: 0.03\n",
"It: 23290, Loss: 8.524e-04, C: 4.482, Time: 0.04\n",
"It: 23300, Loss: 1.157e-03, C: 4.482, Time: 0.03\n",
"It: 23310, Loss: 1.513e-03, C: 4.482, Time: 0.04\n",
"It: 23320, Loss: 1.682e-03, C: 4.482, Time: 0.04\n",
"It: 23330, Loss: 5.980e-04, C: 4.482, Time: 0.04\n",
"It: 23340, Loss: 4.962e-04, C: 4.482, Time: 0.03\n",
"It: 23350, Loss: 7.344e-04, C: 4.482, Time: 0.03\n",
"It: 23360, Loss: 1.428e-03, C: 4.482, Time: 0.03\n",
"It: 23370, Loss: 1.048e-03, C: 4.482, Time: 0.03\n",
"It: 23380, Loss: 4.679e-04, C: 4.482, Time: 0.03\n",
"It: 23390, Loss: 1.335e-03, C: 4.482, Time: 0.03\n",
"It: 23400, Loss: 4.759e-04, C: 4.482, Time: 0.03\n",
"It: 23410, Loss: 3.386e-03, C: 4.482, Time: 0.04\n",
"It: 23420, Loss: 9.763e-04, C: 4.482, Time: 0.04\n",
"It: 23430, Loss: 7.375e-04, C: 4.482, Time: 0.03\n",
"It: 23440, Loss: 8.504e-04, C: 4.482, Time: 0.04\n",
"It: 23450, Loss: 9.473e-04, C: 4.482, Time: 0.04\n",
"It: 23460, Loss: 1.268e-03, C: 4.482, Time: 0.03\n",
"It: 23470, Loss: 1.472e-03, C: 4.482, Time: 0.03\n",
"It: 23480, Loss: 1.159e-03, C: 4.482, Time: 0.03\n",
"It: 23490, Loss: 2.066e-03, C: 4.482, Time: 0.03\n",
"It: 23500, Loss: 1.507e-03, C: 4.482, Time: 0.03\n",
"It: 23510, Loss: 7.320e-04, C: 4.482, Time: 0.04\n",
"It: 23520, Loss: 1.565e-03, C: 4.482, Time: 0.03\n",
"It: 23530, Loss: 1.382e-03, C: 4.482, Time: 0.03\n",
"It: 23540, Loss: 5.917e-04, C: 4.482, Time: 0.03\n",
"It: 23550, Loss: 1.402e-03, C: 4.482, Time: 0.03\n",
"It: 23560, Loss: 7.009e-04, C: 4.482, Time: 0.03\n",
"It: 23570, Loss: 7.000e-04, C: 4.482, Time: 0.04\n",
"It: 23580, Loss: 1.569e-03, C: 4.482, Time: 0.03\n",
"It: 23590, Loss: 1.017e-03, C: 4.482, Time: 0.03\n",
"It: 23600, Loss: 9.151e-04, C: 4.482, Time: 0.04\n",
"It: 23610, Loss: 1.292e-03, C: 4.482, Time: 0.04\n",
"It: 23620, Loss: 7.127e-04, C: 4.482, Time: 0.04\n",
"It: 23630, Loss: 1.025e-03, C: 4.482, Time: 0.04\n",
"It: 23640, Loss: 1.020e-03, C: 4.482, Time: 0.03\n",
"It: 23650, Loss: 8.294e-04, C: 4.482, Time: 0.04\n",
"It: 23660, Loss: 1.620e-03, C: 4.482, Time: 0.03\n",
"It: 23670, Loss: 5.837e-04, C: 4.482, Time: 0.04\n",
"It: 23680, Loss: 1.274e-03, C: 4.482, Time: 0.04\n",
"It: 23690, Loss: 1.590e-03, C: 4.482, Time: 0.04\n",
"It: 23700, Loss: 9.442e-04, C: 4.482, Time: 0.04\n",
"It: 23710, Loss: 2.296e-03, C: 4.482, Time: 0.04\n",
"It: 23720, Loss: 3.234e-03, C: 4.482, Time: 0.04\n",
"It: 23730, Loss: 1.088e-03, C: 4.482, Time: 0.04\n",
"It: 23740, Loss: 5.713e-04, C: 4.482, Time: 0.03\n",
"It: 23750, Loss: 7.343e-04, C: 4.482, Time: 0.04\n",
"It: 23760, Loss: 2.636e-03, C: 4.482, Time: 0.04\n",
"It: 23770, Loss: 6.570e-04, C: 4.482, Time: 0.03\n",
"It: 23780, Loss: 6.189e-04, C: 4.482, Time: 0.04\n",
"It: 23790, Loss: 4.878e-04, C: 4.482, Time: 0.03\n",
"It: 23800, Loss: 1.160e-03, C: 4.482, Time: 0.03\n",
"It: 23810, Loss: 1.290e-03, C: 4.482, Time: 0.03\n",
"It: 23820, Loss: 1.072e-03, C: 4.482, Time: 0.03\n",
"It: 23830, Loss: 3.651e-04, C: 4.482, Time: 0.03\n",
"It: 23840, Loss: 2.150e-03, C: 4.482, Time: 0.03\n",
"It: 23850, Loss: 9.557e-04, C: 4.482, Time: 0.03\n",
"It: 23860, Loss: 1.086e-03, C: 4.482, Time: 0.04\n",
"It: 23870, Loss: 1.797e-03, C: 4.482, Time: 0.03\n",
"It: 23880, Loss: 2.310e-03, C: 4.482, Time: 0.03\n",
"It: 23890, Loss: 1.416e-03, C: 4.482, Time: 0.04\n",
"It: 23900, Loss: 1.502e-03, C: 4.482, Time: 0.03\n",
"It: 23910, Loss: 9.274e-04, C: 4.482, Time: 0.03\n",
"It: 23920, Loss: 7.509e-04, C: 4.482, Time: 0.04\n",
"It: 23930, Loss: 1.112e-03, C: 4.482, Time: 0.04\n",
"It: 23940, Loss: 9.473e-04, C: 4.482, Time: 0.03\n",
"It: 23950, Loss: 1.025e-03, C: 4.482, Time: 0.03\n",
"It: 23960, Loss: 2.375e-03, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 23970, Loss: 6.090e-04, C: 4.482, Time: 0.03\n",
"It: 23980, Loss: 8.641e-04, C: 4.482, Time: 0.04\n",
"It: 23990, Loss: 1.608e-03, C: 4.482, Time: 0.03\n",
"It: 24000, Loss: 7.698e-04, C: 4.482, Time: 0.04\n",
"It: 24010, Loss: 7.639e-04, C: 4.482, Time: 0.03\n",
"It: 24020, Loss: 1.464e-03, C: 4.482, Time: 0.03\n",
"It: 24030, Loss: 9.178e-04, C: 4.482, Time: 0.03\n",
"It: 24040, Loss: 5.195e-04, C: 4.482, Time: 0.04\n",
"It: 24050, Loss: 2.566e-03, C: 4.482, Time: 0.04\n",
"It: 24060, Loss: 1.124e-03, C: 4.482, Time: 0.04\n",
"It: 24070, Loss: 2.388e-03, C: 4.482, Time: 0.03\n",
"It: 24080, Loss: 1.469e-03, C: 4.482, Time: 0.03\n",
"It: 24090, Loss: 8.607e-04, C: 4.482, Time: 0.04\n",
"It: 24100, Loss: 8.203e-04, C: 4.482, Time: 0.04\n",
"It: 24110, Loss: 1.872e-03, C: 4.482, Time: 0.03\n",
"It: 24120, Loss: 7.057e-04, C: 4.482, Time: 0.03\n",
"It: 24130, Loss: 1.015e-03, C: 4.482, Time: 0.03\n",
"It: 24140, Loss: 1.957e-03, C: 4.482, Time: 0.03\n",
"It: 24150, Loss: 1.355e-03, C: 4.482, Time: 0.03\n",
"It: 24160, Loss: 1.149e-03, C: 4.482, Time: 0.04\n",
"It: 24170, Loss: 1.077e-03, C: 4.482, Time: 0.04\n",
"It: 24180, Loss: 7.949e-04, C: 4.482, Time: 0.04\n",
"It: 24190, Loss: 8.963e-04, C: 4.482, Time: 0.04\n",
"It: 24200, Loss: 1.128e-03, C: 4.482, Time: 0.04\n",
"It: 24210, Loss: 5.680e-04, C: 4.482, Time: 0.04\n",
"It: 24220, Loss: 7.676e-04, C: 4.482, Time: 0.04\n",
"It: 24230, Loss: 9.838e-04, C: 4.482, Time: 0.04\n",
"It: 24240, Loss: 6.463e-04, C: 4.482, Time: 0.04\n",
"It: 24250, Loss: 9.550e-04, C: 4.482, Time: 0.04\n",
"It: 24260, Loss: 1.129e-03, C: 4.482, Time: 0.04\n",
"It: 24270, Loss: 8.619e-04, C: 4.482, Time: 0.04\n",
"It: 24280, Loss: 5.620e-04, C: 4.482, Time: 0.04\n",
"It: 24290, Loss: 1.169e-03, C: 4.482, Time: 0.04\n",
"It: 24300, Loss: 6.528e-04, C: 4.482, Time: 0.04\n",
"It: 24310, Loss: 1.291e-03, C: 4.482, Time: 0.03\n",
"It: 24320, Loss: 2.299e-03, C: 4.482, Time: 0.03\n",
"It: 24330, Loss: 9.176e-04, C: 4.482, Time: 0.03\n",
"It: 24340, Loss: 7.867e-04, C: 4.482, Time: 0.04\n",
"It: 24350, Loss: 1.159e-03, C: 4.482, Time: 0.04\n",
"It: 24360, Loss: 1.872e-03, C: 4.482, Time: 0.04\n",
"It: 24370, Loss: 1.270e-03, C: 4.482, Time: 0.04\n",
"It: 24380, Loss: 3.065e-03, C: 4.482, Time: 0.04\n",
"It: 24390, Loss: 1.520e-03, C: 4.482, Time: 0.04\n",
"It: 24400, Loss: 1.417e-03, C: 4.482, Time: 0.04\n",
"It: 24410, Loss: 9.677e-04, C: 4.482, Time: 0.03\n",
"It: 24420, Loss: 1.571e-03, C: 4.482, Time: 0.04\n",
"It: 24430, Loss: 1.955e-03, C: 4.482, Time: 0.04\n",
"It: 24440, Loss: 1.579e-03, C: 4.482, Time: 0.03\n",
"It: 24450, Loss: 1.280e-03, C: 4.482, Time: 0.03\n",
"It: 24460, Loss: 5.789e-04, C: 4.482, Time: 0.04\n",
"It: 24470, Loss: 8.404e-04, C: 4.482, Time: 0.04\n",
"It: 24480, Loss: 7.632e-04, C: 4.482, Time: 0.03\n",
"It: 24490, Loss: 2.717e-03, C: 4.482, Time: 0.04\n",
"It: 24500, Loss: 1.120e-03, C: 4.482, Time: 0.04\n",
"It: 24510, Loss: 4.955e-04, C: 4.482, Time: 0.04\n",
"It: 24520, Loss: 6.213e-04, C: 4.482, Time: 0.04\n",
"It: 24530, Loss: 2.302e-03, C: 4.482, Time: 0.03\n",
"It: 24540, Loss: 1.635e-03, C: 4.482, Time: 0.04\n",
"It: 24550, Loss: 1.076e-03, C: 4.482, Time: 0.03\n",
"It: 24560, Loss: 1.601e-03, C: 4.482, Time: 0.04\n",
"It: 24570, Loss: 1.194e-03, C: 4.482, Time: 0.04\n",
"It: 24580, Loss: 1.613e-03, C: 4.482, Time: 0.04\n",
"It: 24590, Loss: 1.157e-03, C: 4.482, Time: 0.03\n",
"It: 24600, Loss: 1.153e-03, C: 4.482, Time: 0.04\n",
"It: 24610, Loss: 1.118e-03, C: 4.482, Time: 0.04\n",
"It: 24620, Loss: 1.349e-03, C: 4.482, Time: 0.03\n",
"It: 24630, Loss: 6.572e-04, C: 4.482, Time: 0.03\n",
"It: 24640, Loss: 5.183e-04, C: 4.482, Time: 0.04\n",
"It: 24650, Loss: 7.039e-04, C: 4.482, Time: 0.03\n",
"It: 24660, Loss: 1.741e-03, C: 4.482, Time: 0.04\n",
"It: 24670, Loss: 6.896e-04, C: 4.482, Time: 0.04\n",
"It: 24680, Loss: 8.706e-04, C: 4.482, Time: 0.03\n",
"It: 24690, Loss: 2.339e-03, C: 4.482, Time: 0.03\n",
"It: 24700, Loss: 1.908e-03, C: 4.482, Time: 0.03\n",
"It: 24710, Loss: 1.286e-03, C: 4.482, Time: 0.03\n",
"It: 24720, Loss: 1.412e-03, C: 4.482, Time: 0.03\n",
"It: 24730, Loss: 1.097e-03, C: 4.482, Time: 0.03\n",
"It: 24740, Loss: 1.304e-03, C: 4.482, Time: 0.03\n",
"It: 24750, Loss: 1.998e-03, C: 4.482, Time: 0.04\n",
"It: 24760, Loss: 1.571e-03, C: 4.482, Time: 0.04\n",
"It: 24770, Loss: 1.164e-03, C: 4.482, Time: 0.04\n",
"It: 24780, Loss: 1.345e-03, C: 4.482, Time: 0.03\n",
"It: 24790, Loss: 7.346e-04, C: 4.482, Time: 0.03\n",
"It: 24800, Loss: 2.435e-03, C: 4.482, Time: 0.03\n",
"It: 24810, Loss: 2.184e-03, C: 4.482, Time: 0.04\n",
"It: 24820, Loss: 6.422e-04, C: 4.482, Time: 0.04\n",
"It: 24830, Loss: 3.598e-04, C: 4.482, Time: 0.03\n",
"It: 24840, Loss: 1.136e-03, C: 4.482, Time: 0.03\n",
"It: 24850, Loss: 2.069e-03, C: 4.482, Time: 0.04\n",
"It: 24860, Loss: 2.105e-03, C: 4.482, Time: 0.03\n",
"It: 24870, Loss: 7.242e-04, C: 4.482, Time: 0.03\n",
"It: 24880, Loss: 5.755e-04, C: 4.482, Time: 0.04\n",
"It: 24890, Loss: 9.709e-04, C: 4.482, Time: 0.03\n",
"It: 24900, Loss: 8.180e-04, C: 4.482, Time: 0.03\n",
"It: 24910, Loss: 2.235e-03, C: 4.482, Time: 0.03\n",
"It: 24920, Loss: 7.944e-04, C: 4.482, Time: 0.04\n",
"It: 24930, Loss: 1.470e-03, C: 4.482, Time: 0.04\n",
"It: 24940, Loss: 8.595e-04, C: 4.482, Time: 0.03\n",
"It: 24950, Loss: 1.597e-03, C: 4.482, Time: 0.04\n",
"It: 24960, Loss: 1.017e-03, C: 4.482, Time: 0.04\n",
"It: 24970, Loss: 3.151e-03, C: 4.482, Time: 0.03\n",
"It: 24980, Loss: 2.004e-03, C: 4.482, Time: 0.03\n",
"It: 24990, Loss: 1.484e-03, C: 4.482, Time: 0.03\n",
"It: 25000, Loss: 1.419e-03, C: 4.482, Time: 0.04\n",
"It: 25010, Loss: 5.369e-04, C: 4.482, Time: 0.03\n",
"It: 25020, Loss: 1.248e-03, C: 4.482, Time: 0.03\n",
"It: 25030, Loss: 4.431e-04, C: 4.482, Time: 0.03\n",
"It: 25040, Loss: 1.514e-03, C: 4.482, Time: 0.04\n",
"It: 25050, Loss: 1.256e-03, C: 4.482, Time: 0.04\n",
"It: 25060, Loss: 2.475e-03, C: 4.482, Time: 0.04\n",
"It: 25070, Loss: 1.015e-03, C: 4.482, Time: 0.03\n",
"It: 25080, Loss: 1.400e-03, C: 4.482, Time: 0.04\n",
"It: 25090, Loss: 6.535e-04, C: 4.482, Time: 0.03\n",
"It: 25100, Loss: 1.625e-03, C: 4.482, Time: 0.04\n",
"It: 25110, Loss: 1.030e-03, C: 4.482, Time: 0.04\n",
"It: 25120, Loss: 7.047e-04, C: 4.482, Time: 0.04\n",
"It: 25130, Loss: 1.058e-03, C: 4.482, Time: 0.04\n",
"It: 25140, Loss: 1.581e-03, C: 4.482, Time: 0.03\n",
"It: 25150, Loss: 1.247e-03, C: 4.482, Time: 0.04\n",
"It: 25160, Loss: 1.501e-03, C: 4.482, Time: 0.03\n",
"It: 25170, Loss: 9.094e-04, C: 4.482, Time: 0.04\n",
"It: 25180, Loss: 7.447e-04, C: 4.482, Time: 0.04\n",
"It: 25190, Loss: 1.191e-03, C: 4.482, Time: 0.04\n",
"It: 25200, Loss: 2.015e-03, C: 4.482, Time: 0.03\n",
"It: 25210, Loss: 9.437e-04, C: 4.482, Time: 0.03\n",
"It: 25220, Loss: 7.484e-04, C: 4.482, Time: 0.03\n",
"It: 25230, Loss: 5.846e-04, C: 4.482, Time: 0.03\n",
"It: 25240, Loss: 1.380e-03, C: 4.482, Time: 0.03\n",
"It: 25250, Loss: 1.222e-03, C: 4.482, Time: 0.03\n",
"It: 25260, Loss: 2.515e-03, C: 4.482, Time: 0.03\n",
"It: 25270, Loss: 1.351e-03, C: 4.482, Time: 0.03\n",
"It: 25280, Loss: 9.907e-04, C: 4.482, Time: 0.04\n",
"It: 25290, Loss: 9.106e-04, C: 4.482, Time: 0.04\n",
"It: 25300, Loss: 1.594e-03, C: 4.482, Time: 0.03\n",
"It: 25310, Loss: 1.198e-03, C: 4.482, Time: 0.04\n",
"It: 25320, Loss: 9.853e-04, C: 4.482, Time: 0.03\n",
"It: 25330, Loss: 1.077e-03, C: 4.482, Time: 0.03\n",
"It: 25340, Loss: 2.009e-03, C: 4.482, Time: 0.04\n",
"It: 25350, Loss: 2.213e-03, C: 4.482, Time: 0.04\n",
"It: 25360, Loss: 1.213e-03, C: 4.482, Time: 0.04\n",
"It: 25370, Loss: 6.814e-04, C: 4.482, Time: 0.04\n",
"It: 25380, Loss: 1.643e-03, C: 4.482, Time: 0.04\n",
"It: 25390, Loss: 7.719e-04, C: 4.482, Time: 0.03\n",
"It: 25400, Loss: 8.916e-04, C: 4.482, Time: 0.03\n",
"It: 25410, Loss: 6.383e-04, C: 4.482, Time: 0.03\n",
"It: 25420, Loss: 9.848e-04, C: 4.482, Time: 0.03\n",
"It: 25430, Loss: 4.849e-04, C: 4.482, Time: 0.03\n",
"It: 25440, Loss: 8.459e-04, C: 4.482, Time: 0.03\n",
"It: 25450, Loss: 2.768e-03, C: 4.482, Time: 0.04\n",
"It: 25460, Loss: 8.286e-04, C: 4.482, Time: 0.04\n",
"It: 25470, Loss: 2.641e-03, C: 4.482, Time: 0.04\n",
"It: 25480, Loss: 1.435e-03, C: 4.482, Time: 0.03\n",
"It: 25490, Loss: 2.068e-03, C: 4.482, Time: 0.03\n",
"It: 25500, Loss: 1.013e-03, C: 4.482, Time: 0.03\n",
"It: 25510, Loss: 1.265e-03, C: 4.482, Time: 0.04\n",
"It: 25520, Loss: 1.114e-03, C: 4.482, Time: 0.04\n",
"It: 25530, Loss: 9.024e-04, C: 4.482, Time: 0.04\n",
"It: 25540, Loss: 9.399e-04, C: 4.482, Time: 0.03\n",
"It: 25550, Loss: 8.685e-04, C: 4.482, Time: 0.03\n",
"It: 25560, Loss: 1.130e-03, C: 4.482, Time: 0.03\n",
"It: 25570, Loss: 8.551e-04, C: 4.482, Time: 0.03\n",
"It: 25580, Loss: 5.306e-04, C: 4.482, Time: 0.03\n",
"It: 25590, Loss: 9.904e-04, C: 4.482, Time: 0.03\n",
"It: 25600, Loss: 6.316e-04, C: 4.482, Time: 0.04\n",
"It: 25610, Loss: 1.136e-03, C: 4.482, Time: 0.03\n",
"It: 25620, Loss: 6.746e-04, C: 4.482, Time: 0.03\n",
"It: 25630, Loss: 1.049e-03, C: 4.482, Time: 0.03\n",
"It: 25640, Loss: 1.608e-03, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 25650, Loss: 1.006e-03, C: 4.482, Time: 0.04\n",
"It: 25660, Loss: 1.800e-03, C: 4.482, Time: 0.04\n",
"It: 25670, Loss: 1.149e-03, C: 4.482, Time: 0.04\n",
"It: 25680, Loss: 2.456e-03, C: 4.482, Time: 0.04\n",
"It: 25690, Loss: 2.906e-03, C: 4.482, Time: 0.04\n",
"It: 25700, Loss: 1.245e-03, C: 4.482, Time: 0.03\n",
"It: 25710, Loss: 1.563e-03, C: 4.482, Time: 0.03\n",
"It: 25720, Loss: 2.097e-03, C: 4.482, Time: 0.04\n",
"It: 25730, Loss: 8.286e-04, C: 4.482, Time: 0.03\n",
"It: 25740, Loss: 7.910e-04, C: 4.482, Time: 0.03\n",
"It: 25750, Loss: 5.440e-04, C: 4.482, Time: 0.04\n",
"It: 25760, Loss: 7.975e-04, C: 4.482, Time: 0.03\n",
"It: 25770, Loss: 8.475e-04, C: 4.482, Time: 0.03\n",
"It: 25780, Loss: 1.969e-03, C: 4.482, Time: 0.04\n",
"It: 25790, Loss: 8.220e-04, C: 4.482, Time: 0.03\n",
"It: 25800, Loss: 7.499e-04, C: 4.482, Time: 0.03\n",
"It: 25810, Loss: 7.145e-04, C: 4.482, Time: 0.04\n",
"It: 25820, Loss: 1.014e-03, C: 4.482, Time: 0.03\n",
"It: 25830, Loss: 1.343e-03, C: 4.482, Time: 0.03\n",
"It: 25840, Loss: 5.533e-04, C: 4.482, Time: 0.03\n",
"It: 25850, Loss: 9.765e-04, C: 4.482, Time: 0.03\n",
"It: 25860, Loss: 1.321e-03, C: 4.482, Time: 0.03\n",
"It: 25870, Loss: 4.131e-04, C: 4.482, Time: 0.04\n",
"It: 25880, Loss: 8.898e-04, C: 4.482, Time: 0.03\n",
"It: 25890, Loss: 2.331e-03, C: 4.482, Time: 0.04\n",
"It: 25900, Loss: 8.358e-04, C: 4.482, Time: 0.04\n",
"It: 25910, Loss: 8.078e-04, C: 4.482, Time: 0.03\n",
"It: 25920, Loss: 6.359e-04, C: 4.482, Time: 0.03\n",
"It: 25930, Loss: 8.823e-04, C: 4.482, Time: 0.04\n",
"It: 25940, Loss: 5.661e-04, C: 4.482, Time: 0.04\n",
"It: 25950, Loss: 5.169e-04, C: 4.482, Time: 0.03\n",
"It: 25960, Loss: 1.038e-03, C: 4.482, Time: 0.03\n",
"It: 25970, Loss: 5.286e-04, C: 4.482, Time: 0.03\n",
"It: 25980, Loss: 3.212e-03, C: 4.482, Time: 0.03\n",
"It: 25990, Loss: 2.045e-03, C: 4.482, Time: 0.03\n",
"It: 26000, Loss: 9.008e-04, C: 4.482, Time: 0.04\n",
"It: 26010, Loss: 1.263e-03, C: 4.482, Time: 0.04\n",
"It: 26020, Loss: 8.468e-04, C: 4.482, Time: 0.03\n",
"It: 26030, Loss: 1.059e-03, C: 4.482, Time: 0.04\n",
"It: 26040, Loss: 6.722e-04, C: 4.482, Time: 0.03\n",
"It: 26050, Loss: 1.230e-03, C: 4.482, Time: 0.03\n",
"It: 26060, Loss: 8.746e-04, C: 4.482, Time: 0.04\n",
"It: 26070, Loss: 2.303e-03, C: 4.482, Time: 0.03\n",
"It: 26080, Loss: 5.787e-04, C: 4.482, Time: 0.04\n",
"It: 26090, Loss: 1.113e-03, C: 4.482, Time: 0.03\n",
"It: 26100, Loss: 7.586e-04, C: 4.482, Time: 0.03\n",
"It: 26110, Loss: 3.680e-03, C: 4.482, Time: 0.03\n",
"It: 26120, Loss: 1.180e-03, C: 4.482, Time: 0.03\n",
"It: 26130, Loss: 1.667e-03, C: 4.482, Time: 0.03\n",
"It: 26140, Loss: 5.256e-04, C: 4.482, Time: 0.03\n",
"It: 26150, Loss: 1.082e-03, C: 4.482, Time: 0.03\n",
"It: 26160, Loss: 5.989e-04, C: 4.482, Time: 0.03\n",
"It: 26170, Loss: 2.689e-03, C: 4.482, Time: 0.04\n",
"It: 26180, Loss: 9.573e-04, C: 4.482, Time: 0.03\n",
"It: 26190, Loss: 7.232e-04, C: 4.482, Time: 0.03\n",
"It: 26200, Loss: 1.239e-03, C: 4.482, Time: 0.04\n",
"It: 26210, Loss: 7.862e-04, C: 4.482, Time: 0.04\n",
"It: 26220, Loss: 1.023e-03, C: 4.482, Time: 0.03\n",
"It: 26230, Loss: 5.452e-04, C: 4.482, Time: 0.04\n",
"It: 26240, Loss: 5.166e-04, C: 4.482, Time: 0.03\n",
"It: 26250, Loss: 1.273e-03, C: 4.482, Time: 0.04\n",
"It: 26260, Loss: 9.459e-04, C: 4.482, Time: 0.04\n",
"It: 26270, Loss: 8.552e-04, C: 4.482, Time: 0.04\n",
"It: 26280, Loss: 1.623e-03, C: 4.482, Time: 0.03\n",
"It: 26290, Loss: 1.157e-03, C: 4.482, Time: 0.04\n",
"It: 26300, Loss: 1.394e-03, C: 4.482, Time: 0.03\n",
"It: 26310, Loss: 1.015e-03, C: 4.482, Time: 0.04\n",
"It: 26320, Loss: 1.231e-03, C: 4.482, Time: 0.04\n",
"It: 26330, Loss: 7.930e-04, C: 4.482, Time: 0.03\n",
"It: 26340, Loss: 8.201e-04, C: 4.482, Time: 0.03\n",
"It: 26350, Loss: 1.014e-03, C: 4.482, Time: 0.03\n",
"It: 26360, Loss: 4.321e-04, C: 4.482, Time: 0.03\n",
"It: 26370, Loss: 1.450e-03, C: 4.482, Time: 0.03\n",
"It: 26380, Loss: 5.677e-04, C: 4.482, Time: 0.04\n",
"It: 26390, Loss: 9.188e-04, C: 4.482, Time: 0.04\n",
"It: 26400, Loss: 8.669e-04, C: 4.482, Time: 0.03\n",
"It: 26410, Loss: 1.230e-03, C: 4.482, Time: 0.03\n",
"It: 26420, Loss: 7.150e-04, C: 4.482, Time: 0.03\n",
"It: 26430, Loss: 4.541e-04, C: 4.482, Time: 0.03\n",
"It: 26440, Loss: 1.566e-03, C: 4.482, Time: 0.03\n",
"It: 26450, Loss: 2.289e-03, C: 4.482, Time: 0.04\n",
"It: 26460, Loss: 1.634e-03, C: 4.482, Time: 0.04\n",
"It: 26470, Loss: 8.949e-04, C: 4.482, Time: 0.03\n",
"It: 26480, Loss: 1.218e-03, C: 4.482, Time: 0.04\n",
"It: 26490, Loss: 1.999e-03, C: 4.482, Time: 0.04\n",
"It: 26500, Loss: 6.379e-04, C: 4.482, Time: 0.04\n",
"It: 26510, Loss: 6.852e-04, C: 4.482, Time: 0.04\n",
"It: 26520, Loss: 6.768e-04, C: 4.482, Time: 0.04\n",
"It: 26530, Loss: 2.015e-03, C: 4.482, Time: 0.03\n",
"It: 26540, Loss: 1.675e-03, C: 4.482, Time: 0.04\n",
"It: 26550, Loss: 2.623e-03, C: 4.482, Time: 0.04\n",
"It: 26560, Loss: 1.160e-03, C: 4.482, Time: 0.04\n",
"It: 26570, Loss: 8.256e-04, C: 4.482, Time: 0.03\n",
"It: 26580, Loss: 1.847e-03, C: 4.482, Time: 0.03\n",
"It: 26590, Loss: 6.819e-04, C: 4.482, Time: 0.03\n",
"It: 26600, Loss: 8.416e-04, C: 4.482, Time: 0.03\n",
"It: 26610, Loss: 6.615e-04, C: 4.482, Time: 0.04\n",
"It: 26620, Loss: 1.908e-03, C: 4.482, Time: 0.03\n",
"It: 26630, Loss: 1.238e-03, C: 4.482, Time: 0.04\n",
"It: 26640, Loss: 2.188e-03, C: 4.482, Time: 0.03\n",
"It: 26650, Loss: 5.716e-04, C: 4.482, Time: 0.03\n",
"It: 26660, Loss: 1.635e-03, C: 4.482, Time: 0.03\n",
"It: 26670, Loss: 1.522e-03, C: 4.482, Time: 0.03\n",
"It: 26680, Loss: 8.935e-04, C: 4.482, Time: 0.03\n",
"It: 26690, Loss: 9.908e-04, C: 4.482, Time: 0.03\n",
"It: 26700, Loss: 1.635e-03, C: 4.482, Time: 0.03\n",
"It: 26710, Loss: 1.115e-03, C: 4.482, Time: 0.04\n",
"It: 26720, Loss: 6.167e-04, C: 4.482, Time: 0.04\n",
"It: 26730, Loss: 1.427e-03, C: 4.482, Time: 0.04\n",
"It: 26740, Loss: 5.757e-04, C: 4.482, Time: 0.03\n",
"It: 26750, Loss: 2.386e-03, C: 4.482, Time: 0.04\n",
"It: 26760, Loss: 1.473e-03, C: 4.482, Time: 0.04\n",
"It: 26770, Loss: 6.307e-04, C: 4.482, Time: 0.04\n",
"It: 26780, Loss: 1.016e-03, C: 4.482, Time: 0.04\n",
"It: 26790, Loss: 6.514e-04, C: 4.482, Time: 0.03\n",
"It: 26800, Loss: 1.647e-03, C: 4.482, Time: 0.04\n",
"It: 26810, Loss: 1.407e-03, C: 4.482, Time: 0.03\n",
"It: 26820, Loss: 6.952e-04, C: 4.482, Time: 0.03\n",
"It: 26830, Loss: 5.788e-04, C: 4.482, Time: 0.03\n",
"It: 26840, Loss: 1.403e-03, C: 4.482, Time: 0.03\n",
"It: 26850, Loss: 6.927e-04, C: 4.482, Time: 0.04\n",
"It: 26860, Loss: 6.823e-04, C: 4.482, Time: 0.03\n",
"It: 26870, Loss: 7.649e-04, C: 4.482, Time: 0.04\n",
"It: 26880, Loss: 5.812e-04, C: 4.482, Time: 0.04\n",
"It: 26890, Loss: 7.602e-04, C: 4.482, Time: 0.03\n",
"It: 26900, Loss: 1.045e-03, C: 4.482, Time: 0.03\n",
"It: 26910, Loss: 1.321e-03, C: 4.482, Time: 0.04\n",
"It: 26920, Loss: 7.193e-04, C: 4.482, Time: 0.03\n",
"It: 26930, Loss: 1.619e-03, C: 4.482, Time: 0.04\n",
"It: 26940, Loss: 6.120e-04, C: 4.482, Time: 0.03\n",
"It: 26950, Loss: 1.911e-03, C: 4.482, Time: 0.04\n",
"It: 26960, Loss: 2.689e-03, C: 4.482, Time: 0.04\n",
"It: 26970, Loss: 1.635e-03, C: 4.482, Time: 0.03\n",
"It: 26980, Loss: 1.329e-03, C: 4.482, Time: 0.03\n",
"It: 26990, Loss: 9.340e-04, C: 4.482, Time: 0.03\n",
"It: 27000, Loss: 1.010e-03, C: 4.482, Time: 0.04\n",
"It: 27010, Loss: 7.485e-04, C: 4.482, Time: 0.03\n",
"It: 27020, Loss: 6.893e-04, C: 4.482, Time: 0.03\n",
"It: 27030, Loss: 1.106e-03, C: 4.482, Time: 0.04\n",
"It: 27040, Loss: 1.279e-03, C: 4.482, Time: 0.04\n",
"It: 27050, Loss: 1.590e-03, C: 4.482, Time: 0.03\n",
"It: 27060, Loss: 1.006e-03, C: 4.482, Time: 0.03\n",
"It: 27070, Loss: 6.246e-04, C: 4.482, Time: 0.04\n",
"It: 27080, Loss: 1.311e-03, C: 4.482, Time: 0.04\n",
"It: 27090, Loss: 6.998e-04, C: 4.482, Time: 0.04\n",
"It: 27100, Loss: 9.888e-04, C: 4.482, Time: 0.04\n",
"It: 27110, Loss: 3.000e-03, C: 4.482, Time: 0.03\n",
"It: 27120, Loss: 1.103e-03, C: 4.482, Time: 0.03\n",
"It: 27130, Loss: 8.676e-04, C: 4.482, Time: 0.04\n",
"It: 27140, Loss: 1.093e-03, C: 4.482, Time: 0.04\n",
"It: 27150, Loss: 7.025e-04, C: 4.482, Time: 0.03\n",
"It: 27160, Loss: 9.303e-04, C: 4.482, Time: 0.03\n",
"It: 27170, Loss: 1.587e-03, C: 4.482, Time: 0.03\n",
"It: 27180, Loss: 1.034e-03, C: 4.482, Time: 0.03\n",
"It: 27190, Loss: 6.750e-04, C: 4.482, Time: 0.03\n",
"It: 27200, Loss: 9.581e-04, C: 4.482, Time: 0.03\n",
"It: 27210, Loss: 3.799e-04, C: 4.482, Time: 0.03\n",
"It: 27220, Loss: 8.853e-04, C: 4.482, Time: 0.04\n",
"It: 27230, Loss: 7.249e-04, C: 4.482, Time: 0.03\n",
"It: 27240, Loss: 9.941e-04, C: 4.482, Time: 0.03\n",
"It: 27250, Loss: 4.362e-04, C: 4.482, Time: 0.03\n",
"It: 27260, Loss: 2.433e-03, C: 4.482, Time: 0.03\n",
"It: 27270, Loss: 3.365e-03, C: 4.482, Time: 0.03\n",
"It: 27280, Loss: 1.249e-03, C: 4.482, Time: 0.03\n",
"It: 27290, Loss: 6.947e-04, C: 4.482, Time: 0.04\n",
"It: 27300, Loss: 1.056e-03, C: 4.482, Time: 0.03\n",
"It: 27310, Loss: 8.134e-04, C: 4.482, Time: 0.03\n",
"It: 27320, Loss: 1.577e-03, C: 4.482, Time: 0.03\n",
"It: 27330, Loss: 1.975e-03, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 27340, Loss: 6.852e-04, C: 4.482, Time: 0.03\n",
"It: 27350, Loss: 8.863e-04, C: 4.482, Time: 0.04\n",
"It: 27360, Loss: 9.565e-04, C: 4.482, Time: 0.03\n",
"It: 27370, Loss: 1.117e-03, C: 4.482, Time: 0.03\n",
"It: 27380, Loss: 1.166e-03, C: 4.482, Time: 0.03\n",
"It: 27390, Loss: 1.980e-03, C: 4.482, Time: 0.04\n",
"It: 27400, Loss: 8.612e-04, C: 4.482, Time: 0.04\n",
"It: 27410, Loss: 4.856e-04, C: 4.482, Time: 0.04\n",
"It: 27420, Loss: 1.689e-03, C: 4.482, Time: 0.03\n",
"It: 27430, Loss: 9.178e-04, C: 4.482, Time: 0.04\n",
"It: 27440, Loss: 6.287e-04, C: 4.482, Time: 0.03\n",
"It: 27450, Loss: 2.253e-03, C: 4.482, Time: 0.03\n",
"It: 27460, Loss: 9.306e-04, C: 4.482, Time: 0.04\n",
"It: 27470, Loss: 1.580e-03, C: 4.482, Time: 0.03\n",
"It: 27480, Loss: 8.763e-04, C: 4.482, Time: 0.03\n",
"It: 27490, Loss: 1.923e-03, C: 4.482, Time: 0.03\n",
"It: 27500, Loss: 1.515e-03, C: 4.482, Time: 0.04\n",
"It: 27510, Loss: 1.197e-03, C: 4.482, Time: 0.03\n",
"It: 27520, Loss: 1.252e-03, C: 4.482, Time: 0.04\n",
"It: 27530, Loss: 1.004e-03, C: 4.482, Time: 0.04\n",
"It: 27540, Loss: 1.058e-03, C: 4.482, Time: 0.03\n",
"It: 27550, Loss: 7.553e-04, C: 4.482, Time: 0.03\n",
"It: 27560, Loss: 1.583e-03, C: 4.482, Time: 0.03\n",
"It: 27570, Loss: 1.147e-03, C: 4.482, Time: 0.04\n",
"It: 27580, Loss: 1.086e-03, C: 4.482, Time: 0.04\n",
"It: 27590, Loss: 1.581e-03, C: 4.482, Time: 0.03\n",
"It: 27600, Loss: 5.563e-04, C: 4.482, Time: 0.03\n",
"It: 27610, Loss: 1.390e-03, C: 4.482, Time: 0.04\n",
"It: 27620, Loss: 5.359e-04, C: 4.482, Time: 0.03\n",
"It: 27630, Loss: 6.760e-04, C: 4.482, Time: 0.03\n",
"It: 27640, Loss: 1.910e-03, C: 4.482, Time: 0.03\n",
"It: 27650, Loss: 1.324e-03, C: 4.482, Time: 0.04\n",
"It: 27660, Loss: 7.566e-04, C: 4.482, Time: 0.04\n",
"It: 27670, Loss: 1.241e-03, C: 4.482, Time: 0.03\n",
"It: 27680, Loss: 9.776e-04, C: 4.482, Time: 0.03\n",
"It: 27690, Loss: 1.446e-03, C: 4.482, Time: 0.04\n",
"It: 27700, Loss: 1.785e-03, C: 4.482, Time: 0.04\n",
"It: 27710, Loss: 2.575e-03, C: 4.482, Time: 0.04\n",
"It: 27720, Loss: 1.411e-03, C: 4.482, Time: 0.03\n",
"It: 27730, Loss: 1.090e-03, C: 4.482, Time: 0.04\n",
"It: 27740, Loss: 5.854e-04, C: 4.482, Time: 0.04\n",
"It: 27750, Loss: 2.021e-03, C: 4.482, Time: 0.03\n",
"It: 27760, Loss: 4.809e-04, C: 4.482, Time: 0.04\n",
"It: 27770, Loss: 5.316e-04, C: 4.482, Time: 0.03\n",
"It: 27780, Loss: 7.583e-04, C: 4.482, Time: 0.03\n",
"It: 27790, Loss: 7.457e-04, C: 4.482, Time: 0.03\n",
"It: 27800, Loss: 1.241e-03, C: 4.482, Time: 0.03\n",
"It: 27810, Loss: 4.966e-04, C: 4.482, Time: 0.03\n",
"It: 27820, Loss: 4.136e-04, C: 4.482, Time: 0.04\n",
"It: 27830, Loss: 6.504e-04, C: 4.482, Time: 0.03\n",
"It: 27840, Loss: 4.372e-04, C: 4.482, Time: 0.03\n",
"It: 27850, Loss: 2.047e-03, C: 4.482, Time: 0.03\n",
"It: 27860, Loss: 1.183e-03, C: 4.482, Time: 0.04\n",
"It: 27870, Loss: 1.231e-03, C: 4.482, Time: 0.03\n",
"It: 27880, Loss: 8.870e-04, C: 4.482, Time: 0.04\n",
"It: 27890, Loss: 3.717e-04, C: 4.482, Time: 0.03\n",
"It: 27900, Loss: 1.051e-03, C: 4.482, Time: 0.04\n",
"It: 27910, Loss: 1.862e-03, C: 4.482, Time: 0.03\n",
"It: 27920, Loss: 6.556e-04, C: 4.482, Time: 0.03\n",
"It: 27930, Loss: 1.046e-03, C: 4.482, Time: 0.03\n",
"It: 27940, Loss: 1.368e-03, C: 4.482, Time: 0.03\n",
"It: 27950, Loss: 8.908e-04, C: 4.482, Time: 0.04\n",
"It: 27960, Loss: 7.015e-04, C: 4.482, Time: 0.03\n",
"It: 27970, Loss: 1.143e-03, C: 4.482, Time: 0.03\n",
"It: 27980, Loss: 9.496e-04, C: 4.482, Time: 0.03\n",
"It: 27990, Loss: 1.456e-03, C: 4.482, Time: 0.03\n",
"It: 28000, Loss: 7.910e-04, C: 4.482, Time: 0.03\n",
"It: 28010, Loss: 5.789e-04, C: 4.482, Time: 0.04\n",
"It: 28020, Loss: 8.787e-04, C: 4.482, Time: 0.03\n",
"It: 28030, Loss: 1.064e-03, C: 4.482, Time: 0.03\n",
"It: 28040, Loss: 2.333e-03, C: 4.482, Time: 0.04\n",
"It: 28050, Loss: 5.918e-04, C: 4.482, Time: 0.03\n",
"It: 28060, Loss: 9.260e-04, C: 4.482, Time: 0.03\n",
"It: 28070, Loss: 1.893e-03, C: 4.482, Time: 0.03\n",
"It: 28080, Loss: 1.192e-03, C: 4.482, Time: 0.04\n",
"It: 28090, Loss: 6.323e-04, C: 4.482, Time: 0.03\n",
"It: 28100, Loss: 6.891e-04, C: 4.482, Time: 0.04\n",
"It: 28110, Loss: 6.264e-04, C: 4.482, Time: 0.03\n",
"It: 28120, Loss: 1.231e-03, C: 4.482, Time: 0.03\n",
"It: 28130, Loss: 1.679e-03, C: 4.482, Time: 0.03\n",
"It: 28140, Loss: 7.454e-04, C: 4.482, Time: 0.03\n",
"It: 28150, Loss: 9.112e-04, C: 4.482, Time: 0.03\n",
"It: 28160, Loss: 1.125e-03, C: 4.482, Time: 0.03\n",
"It: 28170, Loss: 1.249e-03, C: 4.482, Time: 0.03\n",
"It: 28180, Loss: 1.509e-03, C: 4.482, Time: 0.03\n",
"It: 28190, Loss: 1.885e-03, C: 4.482, Time: 0.03\n",
"It: 28200, Loss: 8.223e-04, C: 4.482, Time: 0.04\n",
"It: 28210, Loss: 4.815e-04, C: 4.482, Time: 0.04\n",
"It: 28220, Loss: 4.982e-04, C: 4.482, Time: 0.03\n",
"It: 28230, Loss: 1.093e-03, C: 4.482, Time: 0.03\n",
"It: 28240, Loss: 1.590e-03, C: 4.482, Time: 0.03\n",
"It: 28250, Loss: 7.687e-04, C: 4.482, Time: 0.03\n",
"It: 28260, Loss: 1.026e-03, C: 4.482, Time: 0.04\n",
"It: 28270, Loss: 7.026e-04, C: 4.482, Time: 0.03\n",
"It: 28280, Loss: 5.822e-04, C: 4.482, Time: 0.03\n",
"It: 28290, Loss: 9.651e-04, C: 4.482, Time: 0.03\n",
"It: 28300, Loss: 2.592e-03, C: 4.482, Time: 0.04\n",
"It: 28310, Loss: 1.807e-03, C: 4.482, Time: 0.03\n",
"It: 28320, Loss: 8.440e-04, C: 4.482, Time: 0.04\n",
"It: 28330, Loss: 1.454e-03, C: 4.482, Time: 0.03\n",
"It: 28340, Loss: 1.484e-03, C: 4.482, Time: 0.04\n",
"It: 28350, Loss: 4.323e-04, C: 4.482, Time: 0.04\n",
"It: 28360, Loss: 4.593e-04, C: 4.482, Time: 0.04\n",
"It: 28370, Loss: 8.986e-04, C: 4.482, Time: 0.03\n",
"It: 28380, Loss: 8.012e-04, C: 4.482, Time: 0.03\n",
"It: 28390, Loss: 1.195e-03, C: 4.482, Time: 0.03\n",
"It: 28400, Loss: 8.160e-04, C: 4.482, Time: 0.04\n",
"It: 28410, Loss: 1.703e-03, C: 4.482, Time: 0.03\n",
"It: 28420, Loss: 1.550e-03, C: 4.482, Time: 0.03\n",
"It: 28430, Loss: 1.047e-03, C: 4.482, Time: 0.04\n",
"It: 28440, Loss: 6.524e-04, C: 4.482, Time: 0.04\n",
"It: 28450, Loss: 1.247e-03, C: 4.482, Time: 0.04\n",
"It: 28460, Loss: 1.606e-03, C: 4.482, Time: 0.04\n",
"It: 28470, Loss: 8.898e-04, C: 4.482, Time: 0.04\n",
"It: 28480, Loss: 7.115e-04, C: 4.482, Time: 0.04\n",
"It: 28490, Loss: 1.890e-03, C: 4.482, Time: 0.03\n",
"It: 28500, Loss: 6.355e-04, C: 4.482, Time: 0.04\n",
"It: 28510, Loss: 1.161e-03, C: 4.482, Time: 0.04\n",
"It: 28520, Loss: 6.159e-04, C: 4.482, Time: 0.03\n",
"It: 28530, Loss: 8.526e-04, C: 4.482, Time: 0.03\n",
"It: 28540, Loss: 1.753e-03, C: 4.482, Time: 0.03\n",
"It: 28550, Loss: 7.401e-04, C: 4.482, Time: 0.04\n",
"It: 28560, Loss: 1.135e-03, C: 4.482, Time: 0.03\n",
"It: 28570, Loss: 8.369e-04, C: 4.482, Time: 0.04\n",
"It: 28580, Loss: 8.480e-04, C: 4.482, Time: 0.04\n",
"It: 28590, Loss: 1.848e-03, C: 4.482, Time: 0.04\n",
"It: 28600, Loss: 5.257e-04, C: 4.482, Time: 0.04\n",
"It: 28610, Loss: 2.315e-03, C: 4.482, Time: 0.04\n",
"It: 28620, Loss: 8.674e-04, C: 4.482, Time: 0.04\n",
"It: 28630, Loss: 8.613e-04, C: 4.482, Time: 0.04\n",
"It: 28640, Loss: 1.399e-03, C: 4.482, Time: 0.04\n",
"It: 28650, Loss: 2.497e-03, C: 4.482, Time: 0.04\n",
"It: 28660, Loss: 7.868e-04, C: 4.482, Time: 0.04\n",
"It: 28670, Loss: 1.226e-03, C: 4.482, Time: 0.04\n",
"It: 28680, Loss: 1.177e-03, C: 4.482, Time: 0.03\n",
"It: 28690, Loss: 1.883e-03, C: 4.482, Time: 0.03\n",
"It: 28700, Loss: 1.257e-03, C: 4.482, Time: 0.03\n",
"It: 28710, Loss: 6.348e-04, C: 4.482, Time: 0.03\n",
"It: 28720, Loss: 4.209e-04, C: 4.482, Time: 0.04\n",
"It: 28730, Loss: 6.025e-04, C: 4.482, Time: 0.04\n",
"It: 28740, Loss: 4.393e-04, C: 4.482, Time: 0.04\n",
"It: 28750, Loss: 5.536e-04, C: 4.482, Time: 0.04\n",
"It: 28760, Loss: 4.359e-04, C: 4.482, Time: 0.04\n",
"It: 28770, Loss: 1.077e-03, C: 4.482, Time: 0.04\n",
"It: 28780, Loss: 5.143e-04, C: 4.482, Time: 0.04\n",
"It: 28790, Loss: 7.973e-04, C: 4.482, Time: 0.04\n",
"It: 28800, Loss: 5.397e-04, C: 4.482, Time: 0.04\n",
"It: 28810, Loss: 9.217e-04, C: 4.482, Time: 0.04\n",
"It: 28820, Loss: 1.063e-03, C: 4.482, Time: 0.03\n",
"It: 28830, Loss: 5.237e-04, C: 4.482, Time: 0.04\n",
"It: 28840, Loss: 1.499e-03, C: 4.482, Time: 0.03\n",
"It: 28850, Loss: 6.983e-04, C: 4.482, Time: 0.03\n",
"It: 28860, Loss: 6.299e-04, C: 4.482, Time: 0.04\n",
"It: 28870, Loss: 7.449e-04, C: 4.482, Time: 0.03\n",
"It: 28880, Loss: 6.829e-04, C: 4.482, Time: 0.04\n",
"It: 28890, Loss: 1.881e-03, C: 4.482, Time: 0.03\n",
"It: 28900, Loss: 1.149e-03, C: 4.482, Time: 0.04\n",
"It: 28910, Loss: 8.099e-04, C: 4.482, Time: 0.03\n",
"It: 28920, Loss: 1.251e-03, C: 4.482, Time: 0.03\n",
"It: 28930, Loss: 9.060e-04, C: 4.482, Time: 0.04\n",
"It: 28940, Loss: 3.391e-04, C: 4.482, Time: 0.04\n",
"It: 28950, Loss: 8.002e-04, C: 4.482, Time: 0.04\n",
"It: 28960, Loss: 4.462e-04, C: 4.482, Time: 0.04\n",
"It: 28970, Loss: 1.366e-03, C: 4.482, Time: 0.03\n",
"It: 28980, Loss: 1.152e-03, C: 4.482, Time: 0.03\n",
"It: 28990, Loss: 1.385e-03, C: 4.482, Time: 0.04\n",
"It: 29000, Loss: 5.770e-04, C: 4.482, Time: 0.03\n",
"It: 29010, Loss: 8.241e-04, C: 4.482, Time: 0.04\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 29020, Loss: 6.268e-04, C: 4.482, Time: 0.03\n",
"It: 29030, Loss: 9.454e-04, C: 4.482, Time: 0.04\n",
"It: 29040, Loss: 8.093e-04, C: 4.482, Time: 0.03\n",
"It: 29050, Loss: 1.001e-03, C: 4.482, Time: 0.03\n",
"It: 29060, Loss: 6.524e-04, C: 4.482, Time: 0.03\n",
"It: 29070, Loss: 1.102e-03, C: 4.482, Time: 0.04\n",
"It: 29080, Loss: 6.364e-04, C: 4.482, Time: 0.04\n",
"It: 29090, Loss: 8.605e-04, C: 4.482, Time: 0.04\n",
"It: 29100, Loss: 1.370e-03, C: 4.482, Time: 0.04\n",
"It: 29110, Loss: 1.854e-03, C: 4.482, Time: 0.04\n",
"It: 29120, Loss: 1.553e-03, C: 4.482, Time: 0.04\n",
"It: 29130, Loss: 5.748e-04, C: 4.482, Time: 0.04\n",
"It: 29140, Loss: 1.111e-03, C: 4.482, Time: 0.03\n",
"It: 29150, Loss: 5.629e-04, C: 4.482, Time: 0.04\n",
"It: 29160, Loss: 5.806e-04, C: 4.482, Time: 0.03\n",
"It: 29170, Loss: 9.000e-04, C: 4.482, Time: 0.03\n",
"It: 29180, Loss: 2.030e-03, C: 4.482, Time: 0.03\n",
"It: 29190, Loss: 7.078e-04, C: 4.482, Time: 0.04\n",
"It: 29200, Loss: 1.252e-03, C: 4.482, Time: 0.04\n",
"It: 29210, Loss: 6.175e-04, C: 4.482, Time: 0.04\n",
"It: 29220, Loss: 1.760e-03, C: 4.482, Time: 0.03\n",
"It: 29230, Loss: 1.020e-03, C: 4.482, Time: 0.03\n",
"It: 29240, Loss: 5.952e-04, C: 4.482, Time: 0.03\n",
"It: 29250, Loss: 1.200e-03, C: 4.482, Time: 0.03\n",
"It: 29260, Loss: 7.841e-04, C: 4.482, Time: 0.03\n",
"It: 29270, Loss: 5.844e-04, C: 4.482, Time: 0.03\n",
"It: 29280, Loss: 6.640e-04, C: 4.482, Time: 0.04\n",
"It: 29290, Loss: 1.029e-03, C: 4.482, Time: 0.03\n",
"It: 29300, Loss: 5.530e-04, C: 4.482, Time: 0.03\n",
"It: 29310, Loss: 9.693e-04, C: 4.482, Time: 0.03\n",
"It: 29320, Loss: 2.329e-03, C: 4.482, Time: 0.03\n",
"It: 29330, Loss: 3.286e-04, C: 4.482, Time: 0.04\n",
"It: 29340, Loss: 1.157e-03, C: 4.482, Time: 0.03\n",
"It: 29350, Loss: 7.566e-04, C: 4.482, Time: 0.03\n",
"It: 29360, Loss: 9.616e-04, C: 4.482, Time: 0.03\n",
"It: 29370, Loss: 1.323e-03, C: 4.482, Time: 0.03\n",
"It: 29380, Loss: 7.043e-04, C: 4.482, Time: 0.03\n",
"It: 29390, Loss: 7.692e-04, C: 4.482, Time: 0.03\n",
"It: 29400, Loss: 8.456e-04, C: 4.482, Time: 0.04\n",
"It: 29410, Loss: 7.892e-04, C: 4.482, Time: 0.03\n",
"It: 29420, Loss: 1.778e-03, C: 4.482, Time: 0.03\n",
"It: 29430, Loss: 6.159e-04, C: 4.482, Time: 0.03\n",
"It: 29440, Loss: 6.458e-04, C: 4.482, Time: 0.03\n",
"It: 29450, Loss: 8.974e-04, C: 4.482, Time: 0.04\n",
"It: 29460, Loss: 1.583e-03, C: 4.482, Time: 0.04\n",
"It: 29470, Loss: 2.131e-03, C: 4.482, Time: 0.03\n",
"It: 29480, Loss: 2.563e-03, C: 4.482, Time: 0.04\n",
"It: 29490, Loss: 1.165e-03, C: 4.482, Time: 0.03\n",
"It: 29500, Loss: 5.636e-04, C: 4.482, Time: 0.03\n",
"It: 29510, Loss: 1.323e-03, C: 4.482, Time: 0.03\n",
"It: 29520, Loss: 5.154e-04, C: 4.482, Time: 0.03\n",
"It: 29530, Loss: 7.638e-04, C: 4.482, Time: 0.03\n",
"It: 29540, Loss: 1.552e-03, C: 4.482, Time: 0.03\n",
"It: 29550, Loss: 2.039e-03, C: 4.482, Time: 0.03\n",
"It: 29560, Loss: 1.088e-03, C: 4.482, Time: 0.04\n",
"It: 29570, Loss: 1.315e-03, C: 4.482, Time: 0.03\n",
"It: 29580, Loss: 9.038e-04, C: 4.482, Time: 0.03\n",
"It: 29590, Loss: 1.363e-03, C: 4.482, Time: 0.03\n",
"It: 29600, Loss: 1.202e-03, C: 4.482, Time: 0.03\n",
"It: 29610, Loss: 7.784e-04, C: 4.482, Time: 0.03\n",
"It: 29620, Loss: 1.583e-03, C: 4.482, Time: 0.03\n",
"It: 29630, Loss: 8.842e-04, C: 4.482, Time: 0.03\n",
"It: 29640, Loss: 1.470e-03, C: 4.482, Time: 0.03\n",
"It: 29650, Loss: 6.189e-04, C: 4.482, Time: 0.03\n",
"It: 29660, Loss: 1.716e-03, C: 4.482, Time: 0.03\n",
"It: 29670, Loss: 1.464e-03, C: 4.482, Time: 0.04\n",
"It: 29680, Loss: 6.983e-04, C: 4.482, Time: 0.04\n",
"It: 29690, Loss: 8.598e-04, C: 4.482, Time: 0.03\n",
"It: 29700, Loss: 1.363e-03, C: 4.482, Time: 0.03\n",
"It: 29710, Loss: 7.045e-04, C: 4.482, Time: 0.03\n",
"It: 29720, Loss: 5.631e-04, C: 4.482, Time: 0.03\n",
"It: 29730, Loss: 4.839e-04, C: 4.482, Time: 0.03\n",
"It: 29740, Loss: 1.080e-03, C: 4.482, Time: 0.03\n",
"It: 29750, Loss: 8.747e-04, C: 4.482, Time: 0.03\n",
"It: 29760, Loss: 1.708e-03, C: 4.482, Time: 0.03\n",
"It: 29770, Loss: 1.289e-03, C: 4.482, Time: 0.03\n",
"It: 29780, Loss: 8.826e-04, C: 4.482, Time: 0.03\n",
"It: 29790, Loss: 7.734e-04, C: 4.482, Time: 0.03\n",
"It: 29800, Loss: 8.059e-04, C: 4.482, Time: 0.04\n",
"It: 29810, Loss: 1.758e-03, C: 4.482, Time: 0.03\n",
"It: 29820, Loss: 9.865e-04, C: 4.482, Time: 0.03\n",
"It: 29830, Loss: 5.353e-04, C: 4.482, Time: 0.03\n",
"It: 29840, Loss: 6.158e-04, C: 4.482, Time: 0.03\n",
"It: 29850, Loss: 7.667e-04, C: 4.482, Time: 0.03\n",
"It: 29860, Loss: 1.014e-03, C: 4.482, Time: 0.03\n",
"It: 29870, Loss: 7.782e-04, C: 4.482, Time: 0.03\n",
"It: 29880, Loss: 8.284e-04, C: 4.482, Time: 0.04\n",
"It: 29890, Loss: 7.814e-04, C: 4.482, Time: 0.03\n",
"It: 29900, Loss: 5.131e-04, C: 4.482, Time: 0.03\n",
"It: 29910, Loss: 1.377e-03, C: 4.482, Time: 0.03\n",
"It: 29920, Loss: 9.822e-04, C: 4.482, Time: 0.03\n",
"It: 29930, Loss: 2.118e-03, C: 4.482, Time: 0.03\n",
"It: 29940, Loss: 1.735e-03, C: 4.482, Time: 0.03\n",
"It: 29950, Loss: 1.491e-03, C: 4.482, Time: 0.03\n",
"It: 29960, Loss: 1.506e-03, C: 4.482, Time: 0.03\n",
"It: 29970, Loss: 7.008e-04, C: 4.482, Time: 0.03\n",
"It: 29980, Loss: 1.142e-03, C: 4.482, Time: 0.03\n",
"It: 29990, Loss: 1.462e-03, C: 4.482, Time: 0.03\n",
"It: 30000, Loss: 9.656e-04, C: 4.482, Time: 0.03\n",
"It: 30010, Loss: 1.828e-03, C: 4.482, Time: 0.04\n",
"It: 30020, Loss: 1.365e-03, C: 4.482, Time: 0.03\n",
"It: 30030, Loss: 1.065e-03, C: 4.482, Time: 0.03\n",
"It: 30040, Loss: 1.051e-03, C: 4.482, Time: 0.03\n",
"It: 30050, Loss: 9.589e-04, C: 4.482, Time: 0.04\n",
"It: 30060, Loss: 6.410e-04, C: 4.482, Time: 0.03\n",
"It: 30070, Loss: 1.129e-03, C: 4.482, Time: 0.03\n",
"It: 30080, Loss: 1.014e-03, C: 4.482, Time: 0.03\n",
"It: 30090, Loss: 5.451e-04, C: 4.482, Time: 0.04\n",
"It: 30100, Loss: 7.068e-04, C: 4.482, Time: 0.03\n",
"It: 30110, Loss: 4.620e-04, C: 4.482, Time: 0.03\n",
"It: 30120, Loss: 6.963e-04, C: 4.482, Time: 0.03\n",
"It: 30130, Loss: 1.007e-03, C: 4.482, Time: 0.03\n",
"It: 30140, Loss: 1.081e-03, C: 4.482, Time: 0.03\n",
"It: 30150, Loss: 4.509e-04, C: 4.482, Time: 0.03\n",
"It: 30160, Loss: 5.361e-04, C: 4.482, Time: 0.03\n",
"It: 30170, Loss: 4.555e-04, C: 4.482, Time: 0.03\n",
"It: 30180, Loss: 1.083e-03, C: 4.482, Time: 0.03\n",
"It: 30190, Loss: 1.281e-03, C: 4.482, Time: 0.03\n",
"It: 30200, Loss: 1.171e-03, C: 4.482, Time: 0.04\n",
"It: 30210, Loss: 5.956e-04, C: 4.482, Time: 0.03\n",
"It: 30220, Loss: 5.689e-04, C: 4.482, Time: 0.03\n",
"It: 30230, Loss: 9.048e-04, C: 4.482, Time: 0.03\n",
"It: 30240, Loss: 6.524e-04, C: 4.482, Time: 0.03\n",
"It: 30250, Loss: 7.470e-04, C: 4.482, Time: 0.03\n",
"It: 30260, Loss: 1.224e-03, C: 4.482, Time: 0.04\n",
"It: 30270, Loss: 9.057e-04, C: 4.482, Time: 0.04\n",
"It: 30280, Loss: 6.268e-04, C: 4.482, Time: 0.03\n",
"It: 30290, Loss: 1.856e-03, C: 4.482, Time: 0.03\n",
"It: 30300, Loss: 6.311e-04, C: 4.482, Time: 0.03\n",
"It: 30310, Loss: 9.064e-04, C: 4.482, Time: 0.03\n",
"It: 30320, Loss: 1.521e-03, C: 4.482, Time: 0.03\n",
"It: 30330, Loss: 1.204e-03, C: 4.482, Time: 0.03\n",
"It: 30340, Loss: 1.293e-03, C: 4.482, Time: 0.04\n",
"It: 30350, Loss: 1.386e-03, C: 4.482, Time: 0.03\n",
"It: 30360, Loss: 4.284e-04, C: 4.482, Time: 0.03\n",
"It: 30370, Loss: 1.812e-03, C: 4.482, Time: 0.03\n",
"It: 30380, Loss: 2.112e-03, C: 4.482, Time: 0.03\n",
"It: 30390, Loss: 8.164e-04, C: 4.482, Time: 0.03\n",
"It: 30400, Loss: 5.384e-04, C: 4.482, Time: 0.04\n",
"It: 30410, Loss: 5.927e-04, C: 4.482, Time: 0.03\n",
"It: 30420, Loss: 1.386e-03, C: 4.482, Time: 0.03\n",
"It: 30430, Loss: 6.681e-04, C: 4.482, Time: 0.03\n",
"It: 30440, Loss: 7.198e-04, C: 4.482, Time: 0.03\n",
"It: 30450, Loss: 8.064e-04, C: 4.482, Time: 0.03\n",
"It: 30460, Loss: 8.678e-04, C: 4.482, Time: 0.04\n",
"It: 30470, Loss: 1.496e-03, C: 4.482, Time: 0.03\n",
"It: 30480, Loss: 1.050e-03, C: 4.482, Time: 0.03\n",
"It: 30490, Loss: 6.395e-04, C: 4.482, Time: 0.03\n",
"It: 30500, Loss: 1.027e-03, C: 4.482, Time: 0.04\n",
"It: 30510, Loss: 1.984e-03, C: 4.482, Time: 0.03\n",
"It: 30520, Loss: 1.216e-03, C: 4.482, Time: 0.03\n",
"It: 30530, Loss: 1.304e-03, C: 4.482, Time: 0.04\n",
"It: 30540, Loss: 9.101e-04, C: 4.482, Time: 0.03\n",
"It: 30550, Loss: 1.373e-03, C: 4.482, Time: 0.04\n",
"It: 30560, Loss: 1.397e-03, C: 4.482, Time: 0.03\n",
"It: 30570, Loss: 2.273e-03, C: 4.482, Time: 0.03\n",
"It: 30580, Loss: 1.621e-03, C: 4.482, Time: 0.03\n",
"It: 30590, Loss: 8.250e-04, C: 4.482, Time: 0.04\n",
"It: 30600, Loss: 1.098e-03, C: 4.482, Time: 0.03\n",
"It: 30610, Loss: 3.339e-04, C: 4.482, Time: 0.03\n",
"It: 30620, Loss: 1.036e-03, C: 4.482, Time: 0.03\n",
"It: 30630, Loss: 8.107e-04, C: 4.482, Time: 0.03\n",
"It: 30640, Loss: 6.012e-04, C: 4.482, Time: 0.04\n",
"It: 30650, Loss: 1.564e-03, C: 4.482, Time: 0.03\n",
"It: 30660, Loss: 3.783e-04, C: 4.482, Time: 0.04\n",
"It: 30670, Loss: 4.163e-04, C: 4.482, Time: 0.03\n",
"It: 30680, Loss: 3.489e-04, C: 4.482, Time: 0.04\n",
"It: 30690, Loss: 6.234e-04, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 30700, Loss: 4.865e-04, C: 4.482, Time: 0.03\n",
"It: 30710, Loss: 9.614e-04, C: 4.482, Time: 0.03\n",
"It: 30720, Loss: 6.060e-04, C: 4.482, Time: 0.03\n",
"It: 30730, Loss: 4.920e-04, C: 4.482, Time: 0.03\n",
"It: 30740, Loss: 8.470e-04, C: 4.482, Time: 0.03\n",
"It: 30750, Loss: 5.866e-04, C: 4.482, Time: 0.03\n",
"It: 30760, Loss: 1.006e-03, C: 4.482, Time: 0.03\n",
"It: 30770, Loss: 1.507e-03, C: 4.482, Time: 0.03\n",
"It: 30780, Loss: 8.564e-04, C: 4.482, Time: 0.03\n",
"It: 30790, Loss: 1.632e-03, C: 4.482, Time: 0.03\n",
"It: 30800, Loss: 5.928e-04, C: 4.482, Time: 0.03\n",
"It: 30810, Loss: 7.472e-04, C: 4.482, Time: 0.03\n",
"It: 30820, Loss: 4.770e-04, C: 4.482, Time: 0.03\n",
"It: 30830, Loss: 9.252e-04, C: 4.482, Time: 0.04\n",
"It: 30840, Loss: 1.150e-03, C: 4.482, Time: 0.03\n",
"It: 30850, Loss: 1.750e-03, C: 4.482, Time: 0.04\n",
"It: 30860, Loss: 1.078e-03, C: 4.482, Time: 0.03\n",
"It: 30870, Loss: 7.438e-04, C: 4.482, Time: 0.03\n",
"It: 30880, Loss: 1.407e-03, C: 4.482, Time: 0.04\n",
"It: 30890, Loss: 1.164e-03, C: 4.482, Time: 0.03\n",
"It: 30900, Loss: 9.179e-04, C: 4.482, Time: 0.04\n",
"It: 30910, Loss: 6.096e-04, C: 4.482, Time: 0.04\n",
"It: 30920, Loss: 9.032e-04, C: 4.482, Time: 0.03\n",
"It: 30930, Loss: 6.026e-04, C: 4.482, Time: 0.03\n",
"It: 30940, Loss: 2.968e-04, C: 4.482, Time: 0.03\n",
"It: 30950, Loss: 5.964e-04, C: 4.482, Time: 0.03\n",
"It: 30960, Loss: 4.175e-04, C: 4.482, Time: 0.03\n",
"It: 30970, Loss: 4.616e-04, C: 4.482, Time: 0.03\n",
"It: 30980, Loss: 7.567e-04, C: 4.482, Time: 0.03\n",
"It: 30990, Loss: 6.137e-04, C: 4.482, Time: 0.03\n",
"It: 31000, Loss: 4.852e-04, C: 4.482, Time: 0.03\n",
"It: 31010, Loss: 1.226e-03, C: 4.482, Time: 0.04\n",
"It: 31020, Loss: 1.142e-03, C: 4.482, Time: 0.03\n",
"It: 31030, Loss: 1.377e-03, C: 4.482, Time: 0.03\n",
"It: 31040, Loss: 7.880e-04, C: 4.482, Time: 0.03\n",
"It: 31050, Loss: 1.246e-03, C: 4.482, Time: 0.03\n",
"It: 31060, Loss: 4.728e-04, C: 4.482, Time: 0.04\n",
"It: 31070, Loss: 7.486e-04, C: 4.482, Time: 0.03\n",
"It: 31080, Loss: 4.897e-04, C: 4.482, Time: 0.04\n",
"It: 31090, Loss: 7.907e-04, C: 4.482, Time: 0.03\n",
"It: 31100, Loss: 6.262e-04, C: 4.482, Time: 0.03\n",
"It: 31110, Loss: 5.357e-04, C: 4.482, Time: 0.03\n",
"It: 31120, Loss: 2.173e-03, C: 4.482, Time: 0.03\n",
"It: 31130, Loss: 1.022e-03, C: 4.482, Time: 0.04\n",
"It: 31140, Loss: 5.397e-04, C: 4.482, Time: 0.03\n",
"It: 31150, Loss: 1.274e-03, C: 4.482, Time: 0.03\n",
"It: 31160, Loss: 9.927e-04, C: 4.482, Time: 0.03\n",
"It: 31170, Loss: 4.319e-04, C: 4.482, Time: 0.03\n",
"It: 31180, Loss: 4.744e-04, C: 4.482, Time: 0.03\n",
"It: 31190, Loss: 1.319e-03, C: 4.482, Time: 0.04\n",
"It: 31200, Loss: 3.140e-04, C: 4.482, Time: 0.04\n",
"It: 31210, Loss: 3.352e-04, C: 4.482, Time: 0.03\n",
"It: 31220, Loss: 7.599e-04, C: 4.482, Time: 0.03\n",
"It: 31230, Loss: 7.004e-04, C: 4.482, Time: 0.04\n",
"It: 31240, Loss: 5.343e-04, C: 4.482, Time: 0.03\n",
"It: 31250, Loss: 5.875e-04, C: 4.482, Time: 0.03\n",
"It: 31260, Loss: 9.522e-04, C: 4.482, Time: 0.04\n",
"It: 31270, Loss: 9.782e-04, C: 4.482, Time: 0.04\n",
"It: 31280, Loss: 4.224e-04, C: 4.482, Time: 0.03\n",
"It: 31290, Loss: 4.622e-04, C: 4.482, Time: 0.03\n",
"It: 31300, Loss: 6.163e-04, C: 4.482, Time: 0.03\n",
"It: 31310, Loss: 3.020e-03, C: 4.482, Time: 0.04\n",
"It: 31320, Loss: 1.744e-03, C: 4.482, Time: 0.03\n",
"It: 31330, Loss: 2.447e-03, C: 4.482, Time: 0.03\n",
"It: 31340, Loss: 1.774e-03, C: 4.482, Time: 0.04\n",
"It: 31350, Loss: 2.429e-03, C: 4.482, Time: 0.04\n",
"It: 31360, Loss: 1.276e-03, C: 4.482, Time: 0.04\n",
"It: 31370, Loss: 1.433e-03, C: 4.482, Time: 0.04\n",
"It: 31380, Loss: 1.163e-03, C: 4.482, Time: 0.04\n",
"It: 31390, Loss: 5.374e-04, C: 4.482, Time: 0.03\n",
"It: 31400, Loss: 1.189e-03, C: 4.482, Time: 0.04\n",
"It: 31410, Loss: 8.879e-04, C: 4.482, Time: 0.04\n",
"It: 31420, Loss: 1.541e-03, C: 4.482, Time: 0.04\n",
"It: 31430, Loss: 3.464e-03, C: 4.482, Time: 0.04\n",
"It: 31440, Loss: 1.015e-03, C: 4.482, Time: 0.03\n",
"It: 31450, Loss: 8.498e-04, C: 4.482, Time: 0.03\n",
"It: 31460, Loss: 8.741e-04, C: 4.482, Time: 0.04\n",
"It: 31470, Loss: 1.126e-03, C: 4.482, Time: 0.03\n",
"It: 31480, Loss: 1.536e-03, C: 4.482, Time: 0.03\n",
"It: 31490, Loss: 1.141e-03, C: 4.482, Time: 0.04\n",
"It: 31500, Loss: 6.140e-04, C: 4.482, Time: 0.03\n",
"It: 31510, Loss: 1.625e-03, C: 4.482, Time: 0.04\n",
"It: 31520, Loss: 7.781e-04, C: 4.482, Time: 0.03\n",
"It: 31530, Loss: 4.304e-04, C: 4.482, Time: 0.03\n",
"It: 31540, Loss: 7.805e-04, C: 4.482, Time: 0.03\n",
"It: 31550, Loss: 1.124e-03, C: 4.482, Time: 0.04\n",
"It: 31560, Loss: 1.054e-03, C: 4.482, Time: 0.03\n",
"It: 31570, Loss: 1.535e-03, C: 4.482, Time: 0.04\n",
"It: 31580, Loss: 9.957e-04, C: 4.482, Time: 0.03\n",
"It: 31590, Loss: 9.102e-04, C: 4.482, Time: 0.03\n",
"It: 31600, Loss: 1.314e-03, C: 4.482, Time: 0.04\n",
"It: 31610, Loss: 8.372e-04, C: 4.482, Time: 0.04\n",
"It: 31620, Loss: 6.513e-04, C: 4.482, Time: 0.03\n",
"It: 31630, Loss: 1.391e-03, C: 4.482, Time: 0.04\n",
"It: 31640, Loss: 5.817e-04, C: 4.482, Time: 0.04\n",
"It: 31650, Loss: 5.340e-04, C: 4.482, Time: 0.03\n",
"It: 31660, Loss: 7.714e-04, C: 4.482, Time: 0.03\n",
"It: 31670, Loss: 5.624e-04, C: 4.482, Time: 0.03\n",
"It: 31680, Loss: 1.283e-03, C: 4.482, Time: 0.03\n",
"It: 31690, Loss: 6.066e-04, C: 4.482, Time: 0.03\n",
"It: 31700, Loss: 9.605e-04, C: 4.482, Time: 0.04\n",
"It: 31710, Loss: 1.382e-03, C: 4.482, Time: 0.03\n",
"It: 31720, Loss: 5.368e-04, C: 4.482, Time: 0.03\n",
"It: 31730, Loss: 5.710e-04, C: 4.482, Time: 0.04\n",
"It: 31740, Loss: 8.508e-04, C: 4.482, Time: 0.04\n",
"It: 31750, Loss: 1.374e-03, C: 4.482, Time: 0.04\n",
"It: 31760, Loss: 5.573e-04, C: 4.482, Time: 0.04\n",
"It: 31770, Loss: 3.533e-04, C: 4.482, Time: 0.03\n",
"It: 31780, Loss: 5.871e-04, C: 4.482, Time: 0.03\n",
"It: 31790, Loss: 8.913e-04, C: 4.482, Time: 0.03\n",
"It: 31800, Loss: 6.076e-04, C: 4.482, Time: 0.03\n",
"It: 31810, Loss: 5.310e-04, C: 4.482, Time: 0.03\n",
"It: 31820, Loss: 6.041e-04, C: 4.482, Time: 0.03\n",
"It: 31830, Loss: 6.127e-04, C: 4.482, Time: 0.03\n",
"It: 31840, Loss: 1.521e-03, C: 4.482, Time: 0.03\n",
"It: 31850, Loss: 1.120e-03, C: 4.482, Time: 0.03\n",
"It: 31860, Loss: 1.190e-03, C: 4.482, Time: 0.03\n",
"It: 31870, Loss: 5.911e-04, C: 4.482, Time: 0.03\n",
"It: 31880, Loss: 8.934e-04, C: 4.482, Time: 0.03\n",
"It: 31890, Loss: 1.227e-03, C: 4.482, Time: 0.03\n",
"It: 31900, Loss: 2.461e-03, C: 4.482, Time: 0.03\n",
"It: 31910, Loss: 6.601e-04, C: 4.482, Time: 0.03\n",
"It: 31920, Loss: 1.226e-03, C: 4.482, Time: 0.04\n",
"It: 31930, Loss: 8.314e-04, C: 4.482, Time: 0.04\n",
"It: 31940, Loss: 4.676e-04, C: 4.482, Time: 0.04\n",
"It: 31950, Loss: 3.693e-04, C: 4.482, Time: 0.04\n",
"It: 31960, Loss: 5.210e-04, C: 4.482, Time: 0.03\n",
"It: 31970, Loss: 2.913e-04, C: 4.482, Time: 0.04\n",
"It: 31980, Loss: 5.237e-04, C: 4.482, Time: 0.03\n",
"It: 31990, Loss: 3.340e-04, C: 4.482, Time: 0.04\n",
"It: 32000, Loss: 1.308e-03, C: 4.482, Time: 0.03\n",
"It: 32010, Loss: 1.122e-03, C: 4.482, Time: 0.04\n",
"It: 32020, Loss: 8.049e-04, C: 4.482, Time: 0.03\n",
"It: 32030, Loss: 5.083e-04, C: 4.482, Time: 0.04\n",
"It: 32040, Loss: 6.617e-04, C: 4.482, Time: 0.03\n",
"It: 32050, Loss: 7.858e-04, C: 4.482, Time: 0.03\n",
"It: 32060, Loss: 8.729e-04, C: 4.482, Time: 0.03\n",
"It: 32070, Loss: 7.996e-04, C: 4.482, Time: 0.04\n",
"It: 32080, Loss: 6.453e-04, C: 4.482, Time: 0.04\n",
"It: 32090, Loss: 5.653e-04, C: 4.482, Time: 0.04\n",
"It: 32100, Loss: 8.059e-04, C: 4.482, Time: 0.04\n",
"It: 32110, Loss: 3.712e-04, C: 4.482, Time: 0.03\n",
"It: 32120, Loss: 5.446e-04, C: 4.482, Time: 0.03\n",
"It: 32130, Loss: 7.847e-04, C: 4.482, Time: 0.03\n",
"It: 32140, Loss: 1.880e-03, C: 4.482, Time: 0.04\n",
"It: 32150, Loss: 4.303e-04, C: 4.482, Time: 0.04\n",
"It: 32160, Loss: 4.895e-04, C: 4.482, Time: 0.03\n",
"It: 32170, Loss: 1.759e-03, C: 4.482, Time: 0.04\n",
"It: 32180, Loss: 4.905e-04, C: 4.482, Time: 0.03\n",
"It: 32190, Loss: 3.620e-04, C: 4.482, Time: 0.04\n",
"It: 32200, Loss: 1.505e-03, C: 4.482, Time: 0.03\n",
"It: 32210, Loss: 1.040e-03, C: 4.482, Time: 0.04\n",
"It: 32220, Loss: 5.383e-04, C: 4.482, Time: 0.04\n",
"It: 32230, Loss: 2.587e-03, C: 4.482, Time: 0.04\n",
"It: 32240, Loss: 4.139e-04, C: 4.482, Time: 0.04\n",
"It: 32250, Loss: 1.192e-03, C: 4.482, Time: 0.04\n",
"It: 32260, Loss: 8.584e-04, C: 4.482, Time: 0.04\n",
"It: 32270, Loss: 2.502e-03, C: 4.482, Time: 0.04\n",
"It: 32280, Loss: 5.828e-04, C: 4.482, Time: 0.03\n",
"It: 32290, Loss: 1.013e-03, C: 4.482, Time: 0.03\n",
"It: 32300, Loss: 6.776e-04, C: 4.482, Time: 0.03\n",
"It: 32310, Loss: 6.591e-04, C: 4.482, Time: 0.03\n",
"It: 32320, Loss: 1.293e-03, C: 4.482, Time: 0.04\n",
"It: 32330, Loss: 8.767e-04, C: 4.482, Time: 0.04\n",
"It: 32340, Loss: 5.984e-04, C: 4.482, Time: 0.04\n",
"It: 32350, Loss: 1.208e-03, C: 4.482, Time: 0.04\n",
"It: 32360, Loss: 3.675e-04, C: 4.482, Time: 0.04\n",
"It: 32370, Loss: 1.308e-03, C: 4.482, Time: 0.04\n",
"It: 32380, Loss: 1.357e-03, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 32390, Loss: 6.474e-04, C: 4.482, Time: 0.04\n",
"It: 32400, Loss: 1.159e-03, C: 4.482, Time: 0.03\n",
"It: 32410, Loss: 5.807e-04, C: 4.482, Time: 0.03\n",
"It: 32420, Loss: 3.376e-04, C: 4.482, Time: 0.03\n",
"It: 32430, Loss: 5.363e-04, C: 4.482, Time: 0.04\n",
"It: 32440, Loss: 4.937e-04, C: 4.482, Time: 0.04\n",
"It: 32450, Loss: 1.041e-03, C: 4.482, Time: 0.04\n",
"It: 32460, Loss: 1.252e-03, C: 4.482, Time: 0.04\n",
"It: 32470, Loss: 4.784e-04, C: 4.482, Time: 0.04\n",
"It: 32480, Loss: 5.238e-04, C: 4.482, Time: 0.04\n",
"It: 32490, Loss: 8.483e-04, C: 4.482, Time: 0.03\n",
"It: 32500, Loss: 6.660e-04, C: 4.482, Time: 0.03\n",
"It: 32510, Loss: 4.691e-04, C: 4.482, Time: 0.04\n",
"It: 32520, Loss: 1.094e-03, C: 4.482, Time: 0.04\n",
"It: 32530, Loss: 8.494e-04, C: 4.482, Time: 0.03\n",
"It: 32540, Loss: 8.785e-04, C: 4.482, Time: 0.03\n",
"It: 32550, Loss: 1.730e-03, C: 4.482, Time: 0.04\n",
"It: 32560, Loss: 9.605e-04, C: 4.482, Time: 0.03\n",
"It: 32570, Loss: 7.992e-04, C: 4.482, Time: 0.04\n",
"It: 32580, Loss: 1.049e-03, C: 4.482, Time: 0.04\n",
"It: 32590, Loss: 1.724e-03, C: 4.482, Time: 0.04\n",
"It: 32600, Loss: 4.177e-04, C: 4.482, Time: 0.03\n",
"It: 32610, Loss: 4.061e-04, C: 4.482, Time: 0.04\n",
"It: 32620, Loss: 6.702e-04, C: 4.482, Time: 0.04\n",
"It: 32630, Loss: 2.336e-03, C: 4.482, Time: 0.04\n",
"It: 32640, Loss: 6.123e-04, C: 4.482, Time: 0.04\n",
"It: 32650, Loss: 4.106e-04, C: 4.482, Time: 0.04\n",
"It: 32660, Loss: 4.913e-04, C: 4.482, Time: 0.04\n",
"It: 32670, Loss: 1.495e-03, C: 4.482, Time: 0.04\n",
"It: 32680, Loss: 1.132e-03, C: 4.482, Time: 0.04\n",
"It: 32690, Loss: 1.002e-03, C: 4.482, Time: 0.04\n",
"It: 32700, Loss: 7.380e-04, C: 4.482, Time: 0.03\n",
"It: 32710, Loss: 1.067e-03, C: 4.482, Time: 0.04\n",
"It: 32720, Loss: 1.585e-03, C: 4.482, Time: 0.04\n",
"It: 32730, Loss: 1.514e-03, C: 4.482, Time: 0.04\n",
"It: 32740, Loss: 1.179e-03, C: 4.482, Time: 0.04\n",
"It: 32750, Loss: 6.109e-04, C: 4.482, Time: 0.03\n",
"It: 32760, Loss: 2.122e-03, C: 4.482, Time: 0.04\n",
"It: 32770, Loss: 9.464e-04, C: 4.482, Time: 0.04\n",
"It: 32780, Loss: 8.763e-04, C: 4.482, Time: 0.03\n",
"It: 32790, Loss: 4.942e-04, C: 4.482, Time: 0.04\n",
"It: 32800, Loss: 6.302e-04, C: 4.482, Time: 0.03\n",
"It: 32810, Loss: 1.257e-03, C: 4.482, Time: 0.04\n",
"It: 32820, Loss: 3.761e-04, C: 4.482, Time: 0.03\n",
"It: 32830, Loss: 1.288e-03, C: 4.482, Time: 0.04\n",
"It: 32840, Loss: 1.356e-03, C: 4.482, Time: 0.04\n",
"It: 32850, Loss: 1.228e-03, C: 4.482, Time: 0.04\n",
"It: 32860, Loss: 1.713e-03, C: 4.482, Time: 0.03\n",
"It: 32870, Loss: 7.115e-04, C: 4.482, Time: 0.04\n",
"It: 32880, Loss: 7.691e-04, C: 4.482, Time: 0.04\n",
"It: 32890, Loss: 6.278e-04, C: 4.482, Time: 0.03\n",
"It: 32900, Loss: 4.126e-04, C: 4.482, Time: 0.03\n",
"It: 32910, Loss: 6.062e-04, C: 4.482, Time: 0.03\n",
"It: 32920, Loss: 4.309e-04, C: 4.482, Time: 0.04\n",
"It: 32930, Loss: 1.227e-03, C: 4.482, Time: 0.03\n",
"It: 32940, Loss: 8.533e-04, C: 4.482, Time: 0.04\n",
"It: 32950, Loss: 6.692e-04, C: 4.482, Time: 0.03\n",
"It: 32960, Loss: 8.480e-04, C: 4.482, Time: 0.04\n",
"It: 32970, Loss: 4.760e-04, C: 4.482, Time: 0.03\n",
"It: 32980, Loss: 5.616e-04, C: 4.482, Time: 0.03\n",
"It: 32990, Loss: 9.950e-04, C: 4.482, Time: 0.04\n",
"It: 33000, Loss: 5.252e-04, C: 4.482, Time: 0.04\n",
"It: 33010, Loss: 9.764e-04, C: 4.482, Time: 0.04\n",
"It: 33020, Loss: 5.149e-04, C: 4.482, Time: 0.04\n",
"It: 33030, Loss: 8.058e-04, C: 4.482, Time: 0.03\n",
"It: 33040, Loss: 5.744e-04, C: 4.482, Time: 0.04\n",
"It: 33050, Loss: 7.728e-04, C: 4.482, Time: 0.03\n",
"It: 33060, Loss: 6.057e-04, C: 4.482, Time: 0.04\n",
"It: 33070, Loss: 8.623e-04, C: 4.482, Time: 0.03\n",
"It: 33080, Loss: 5.212e-04, C: 4.482, Time: 0.04\n",
"It: 33090, Loss: 5.424e-04, C: 4.482, Time: 0.03\n",
"It: 33100, Loss: 6.755e-04, C: 4.482, Time: 0.04\n",
"It: 33110, Loss: 4.503e-04, C: 4.482, Time: 0.04\n",
"It: 33120, Loss: 3.957e-04, C: 4.482, Time: 0.03\n",
"It: 33130, Loss: 6.488e-04, C: 4.482, Time: 0.03\n",
"It: 33140, Loss: 6.100e-04, C: 4.482, Time: 0.03\n",
"It: 33150, Loss: 9.556e-04, C: 4.482, Time: 0.03\n",
"It: 33160, Loss: 1.582e-03, C: 4.482, Time: 0.03\n",
"It: 33170, Loss: 4.124e-04, C: 4.482, Time: 0.03\n",
"It: 33180, Loss: 1.083e-03, C: 4.482, Time: 0.04\n",
"It: 33190, Loss: 1.347e-03, C: 4.482, Time: 0.03\n",
"It: 33200, Loss: 3.944e-04, C: 4.482, Time: 0.03\n",
"It: 33210, Loss: 5.826e-04, C: 4.482, Time: 0.04\n",
"It: 33220, Loss: 3.304e-04, C: 4.482, Time: 0.03\n",
"It: 33230, Loss: 6.472e-04, C: 4.482, Time: 0.04\n",
"It: 33240, Loss: 7.201e-04, C: 4.482, Time: 0.03\n",
"It: 33250, Loss: 4.400e-04, C: 4.482, Time: 0.03\n",
"It: 33260, Loss: 7.223e-04, C: 4.482, Time: 0.03\n",
"It: 33270, Loss: 1.073e-03, C: 4.482, Time: 0.03\n",
"It: 33280, Loss: 7.467e-04, C: 4.482, Time: 0.03\n",
"It: 33290, Loss: 4.147e-04, C: 4.482, Time: 0.04\n",
"It: 33300, Loss: 8.251e-04, C: 4.482, Time: 0.04\n",
"It: 33310, Loss: 8.335e-04, C: 4.482, Time: 0.03\n",
"It: 33320, Loss: 7.857e-04, C: 4.482, Time: 0.03\n",
"It: 33330, Loss: 4.568e-04, C: 4.482, Time: 0.04\n",
"It: 33340, Loss: 5.078e-04, C: 4.482, Time: 0.03\n",
"It: 33350, Loss: 4.468e-04, C: 4.482, Time: 0.03\n",
"It: 33360, Loss: 9.787e-04, C: 4.482, Time: 0.03\n",
"It: 33370, Loss: 1.885e-03, C: 4.482, Time: 0.03\n",
"It: 33380, Loss: 1.227e-03, C: 4.482, Time: 0.03\n",
"It: 33390, Loss: 1.442e-03, C: 4.482, Time: 0.04\n",
"It: 33400, Loss: 5.355e-04, C: 4.482, Time: 0.03\n",
"It: 33410, Loss: 4.442e-04, C: 4.482, Time: 0.03\n",
"It: 33420, Loss: 1.803e-03, C: 4.482, Time: 0.04\n",
"It: 33430, Loss: 4.506e-04, C: 4.482, Time: 0.03\n",
"It: 33440, Loss: 4.921e-04, C: 4.482, Time: 0.03\n",
"It: 33450, Loss: 6.821e-04, C: 4.482, Time: 0.03\n",
"It: 33460, Loss: 5.849e-04, C: 4.482, Time: 0.03\n",
"It: 33470, Loss: 7.791e-04, C: 4.482, Time: 0.04\n",
"It: 33480, Loss: 1.759e-03, C: 4.482, Time: 0.04\n",
"It: 33490, Loss: 8.103e-04, C: 4.482, Time: 0.03\n",
"It: 33500, Loss: 1.178e-03, C: 4.482, Time: 0.04\n",
"It: 33510, Loss: 9.407e-04, C: 4.482, Time: 0.04\n",
"It: 33520, Loss: 1.099e-03, C: 4.482, Time: 0.03\n",
"It: 33530, Loss: 1.160e-03, C: 4.482, Time: 0.04\n",
"It: 33540, Loss: 7.260e-04, C: 4.482, Time: 0.04\n",
"It: 33550, Loss: 4.074e-04, C: 4.482, Time: 0.03\n",
"It: 33560, Loss: 5.258e-04, C: 4.482, Time: 0.04\n",
"It: 33570, Loss: 6.923e-04, C: 4.482, Time: 0.04\n",
"It: 33580, Loss: 8.363e-04, C: 4.482, Time: 0.04\n",
"It: 33590, Loss: 8.807e-04, C: 4.482, Time: 0.03\n",
"It: 33600, Loss: 1.088e-03, C: 4.482, Time: 0.04\n",
"It: 33610, Loss: 1.683e-03, C: 4.482, Time: 0.03\n",
"It: 33620, Loss: 5.108e-04, C: 4.482, Time: 0.04\n",
"It: 33630, Loss: 1.284e-03, C: 4.482, Time: 0.03\n",
"It: 33640, Loss: 6.768e-04, C: 4.482, Time: 0.03\n",
"It: 33650, Loss: 6.465e-04, C: 4.482, Time: 0.03\n",
"It: 33660, Loss: 5.870e-04, C: 4.482, Time: 0.03\n",
"It: 33670, Loss: 6.164e-04, C: 4.482, Time: 0.03\n",
"It: 33680, Loss: 8.084e-04, C: 4.482, Time: 0.03\n",
"It: 33690, Loss: 1.477e-03, C: 4.482, Time: 0.03\n",
"It: 33700, Loss: 9.917e-04, C: 4.482, Time: 0.03\n",
"It: 33710, Loss: 6.840e-04, C: 4.482, Time: 0.04\n",
"It: 33720, Loss: 1.029e-03, C: 4.482, Time: 0.03\n",
"It: 33730, Loss: 1.913e-03, C: 4.482, Time: 0.03\n",
"It: 33740, Loss: 5.307e-04, C: 4.482, Time: 0.03\n",
"It: 33750, Loss: 1.153e-03, C: 4.482, Time: 0.04\n",
"It: 33760, Loss: 1.083e-03, C: 4.482, Time: 0.04\n",
"It: 33770, Loss: 5.749e-04, C: 4.482, Time: 0.03\n",
"It: 33780, Loss: 1.247e-03, C: 4.482, Time: 0.03\n",
"It: 33790, Loss: 7.829e-04, C: 4.482, Time: 0.03\n",
"It: 33800, Loss: 7.660e-04, C: 4.482, Time: 0.03\n",
"It: 33810, Loss: 3.757e-04, C: 4.482, Time: 0.03\n",
"It: 33820, Loss: 5.552e-04, C: 4.482, Time: 0.04\n",
"It: 33830, Loss: 4.109e-04, C: 4.482, Time: 0.03\n",
"It: 33840, Loss: 1.242e-03, C: 4.482, Time: 0.03\n",
"It: 33850, Loss: 5.257e-04, C: 4.482, Time: 0.03\n",
"It: 33860, Loss: 4.142e-04, C: 4.482, Time: 0.03\n",
"It: 33870, Loss: 4.179e-04, C: 4.482, Time: 0.03\n",
"It: 33880, Loss: 4.033e-04, C: 4.482, Time: 0.03\n",
"It: 33890, Loss: 3.431e-04, C: 4.482, Time: 0.04\n",
"It: 33900, Loss: 4.906e-04, C: 4.482, Time: 0.03\n",
"It: 33910, Loss: 3.058e-04, C: 4.482, Time: 0.03\n",
"It: 33920, Loss: 8.374e-04, C: 4.482, Time: 0.03\n",
"It: 33930, Loss: 7.866e-04, C: 4.482, Time: 0.03\n",
"It: 33940, Loss: 4.777e-04, C: 4.482, Time: 0.03\n",
"It: 33950, Loss: 4.672e-04, C: 4.482, Time: 0.03\n",
"It: 33960, Loss: 3.441e-04, C: 4.482, Time: 0.04\n",
"It: 33970, Loss: 6.780e-04, C: 4.482, Time: 0.03\n",
"It: 33980, Loss: 5.723e-04, C: 4.482, Time: 0.03\n",
"It: 33990, Loss: 2.356e-03, C: 4.482, Time: 0.03\n",
"It: 34000, Loss: 8.570e-04, C: 4.482, Time: 0.03\n",
"It: 34010, Loss: 5.400e-04, C: 4.482, Time: 0.03\n",
"It: 34020, Loss: 1.047e-03, C: 4.482, Time: 0.04\n",
"It: 34030, Loss: 2.898e-04, C: 4.482, Time: 0.04\n",
"It: 34040, Loss: 5.360e-04, C: 4.482, Time: 0.04\n",
"It: 34050, Loss: 6.145e-04, C: 4.482, Time: 0.04\n",
"It: 34060, Loss: 1.064e-03, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 34070, Loss: 1.995e-03, C: 4.482, Time: 0.03\n",
"It: 34080, Loss: 1.056e-03, C: 4.482, Time: 0.03\n",
"It: 34090, Loss: 6.370e-04, C: 4.482, Time: 0.03\n",
"It: 34100, Loss: 8.706e-04, C: 4.482, Time: 0.03\n",
"It: 34110, Loss: 6.494e-04, C: 4.482, Time: 0.04\n",
"It: 34120, Loss: 1.528e-03, C: 4.482, Time: 0.03\n",
"It: 34130, Loss: 9.397e-04, C: 4.482, Time: 0.03\n",
"It: 34140, Loss: 5.388e-04, C: 4.482, Time: 0.03\n",
"It: 34150, Loss: 4.968e-04, C: 4.482, Time: 0.03\n",
"It: 34160, Loss: 1.621e-03, C: 4.482, Time: 0.04\n",
"It: 34170, Loss: 6.509e-04, C: 4.482, Time: 0.03\n",
"It: 34180, Loss: 1.125e-03, C: 4.482, Time: 0.03\n",
"It: 34190, Loss: 1.446e-03, C: 4.482, Time: 0.03\n",
"It: 34200, Loss: 7.486e-04, C: 4.482, Time: 0.03\n",
"It: 34210, Loss: 1.440e-03, C: 4.482, Time: 0.03\n",
"It: 34220, Loss: 1.324e-03, C: 4.482, Time: 0.03\n",
"It: 34230, Loss: 7.157e-04, C: 4.482, Time: 0.03\n",
"It: 34240, Loss: 1.180e-03, C: 4.482, Time: 0.03\n",
"It: 34250, Loss: 6.222e-04, C: 4.482, Time: 0.03\n",
"It: 34260, Loss: 5.467e-04, C: 4.482, Time: 0.03\n",
"It: 34270, Loss: 1.543e-03, C: 4.482, Time: 0.03\n",
"It: 34280, Loss: 6.422e-04, C: 4.482, Time: 0.03\n",
"It: 34290, Loss: 5.751e-04, C: 4.482, Time: 0.03\n",
"It: 34300, Loss: 3.690e-04, C: 4.482, Time: 0.03\n",
"It: 34310, Loss: 5.682e-04, C: 4.482, Time: 0.04\n",
"It: 34320, Loss: 4.830e-04, C: 4.482, Time: 0.03\n",
"It: 34330, Loss: 3.391e-04, C: 4.482, Time: 0.03\n",
"It: 34340, Loss: 2.904e-04, C: 4.482, Time: 0.03\n",
"It: 34350, Loss: 1.244e-03, C: 4.482, Time: 0.03\n",
"It: 34360, Loss: 3.889e-04, C: 4.482, Time: 0.03\n",
"It: 34370, Loss: 4.924e-04, C: 4.482, Time: 0.04\n",
"It: 34380, Loss: 4.651e-04, C: 4.482, Time: 0.03\n",
"It: 34390, Loss: 6.914e-04, C: 4.482, Time: 0.03\n",
"It: 34400, Loss: 2.437e-04, C: 4.482, Time: 0.03\n",
"It: 34410, Loss: 6.227e-04, C: 4.482, Time: 0.03\n",
"It: 34420, Loss: 4.990e-04, C: 4.482, Time: 0.03\n",
"It: 34430, Loss: 4.558e-04, C: 4.482, Time: 0.03\n",
"It: 34440, Loss: 5.209e-04, C: 4.482, Time: 0.04\n",
"It: 34450, Loss: 4.990e-04, C: 4.482, Time: 0.03\n",
"It: 34460, Loss: 9.725e-04, C: 4.482, Time: 0.03\n",
"It: 34470, Loss: 6.945e-04, C: 4.482, Time: 0.03\n",
"It: 34480, Loss: 6.381e-04, C: 4.482, Time: 0.03\n",
"It: 34490, Loss: 4.921e-04, C: 4.482, Time: 0.03\n",
"It: 34500, Loss: 9.447e-04, C: 4.482, Time: 0.03\n",
"It: 34510, Loss: 5.107e-04, C: 4.482, Time: 0.04\n",
"It: 34520, Loss: 7.044e-04, C: 4.482, Time: 0.03\n",
"It: 34530, Loss: 1.249e-03, C: 4.482, Time: 0.03\n",
"It: 34540, Loss: 1.350e-03, C: 4.482, Time: 0.03\n",
"It: 34550, Loss: 6.689e-04, C: 4.482, Time: 0.03\n",
"It: 34560, Loss: 5.718e-04, C: 4.482, Time: 0.03\n",
"It: 34570, Loss: 1.060e-03, C: 4.482, Time: 0.03\n",
"It: 34580, Loss: 9.853e-04, C: 4.482, Time: 0.03\n",
"It: 34590, Loss: 4.059e-04, C: 4.482, Time: 0.03\n",
"It: 34600, Loss: 6.742e-04, C: 4.482, Time: 0.04\n",
"It: 34610, Loss: 6.346e-04, C: 4.482, Time: 0.03\n",
"It: 34620, Loss: 9.831e-04, C: 4.482, Time: 0.04\n",
"It: 34630, Loss: 7.285e-04, C: 4.482, Time: 0.03\n",
"It: 34640, Loss: 1.502e-03, C: 4.482, Time: 0.03\n",
"It: 34650, Loss: 1.385e-03, C: 4.482, Time: 0.03\n",
"It: 34660, Loss: 4.091e-04, C: 4.482, Time: 0.03\n",
"It: 34670, Loss: 4.963e-04, C: 4.482, Time: 0.03\n",
"It: 34680, Loss: 3.535e-04, C: 4.482, Time: 0.03\n",
"It: 34690, Loss: 4.797e-04, C: 4.482, Time: 0.03\n",
"It: 34700, Loss: 2.072e-03, C: 4.482, Time: 0.03\n",
"It: 34710, Loss: 4.824e-04, C: 4.482, Time: 0.03\n",
"It: 34720, Loss: 4.550e-04, C: 4.482, Time: 0.04\n",
"It: 34730, Loss: 5.268e-04, C: 4.482, Time: 0.03\n",
"It: 34740, Loss: 3.631e-04, C: 4.482, Time: 0.03\n",
"It: 34750, Loss: 3.146e-04, C: 4.482, Time: 0.03\n",
"It: 34760, Loss: 1.367e-03, C: 4.482, Time: 0.03\n",
"It: 34770, Loss: 5.957e-04, C: 4.482, Time: 0.03\n",
"It: 34780, Loss: 8.607e-04, C: 4.482, Time: 0.03\n",
"It: 34790, Loss: 1.524e-03, C: 4.482, Time: 0.03\n",
"It: 34800, Loss: 8.406e-04, C: 4.482, Time: 0.04\n",
"It: 34810, Loss: 6.297e-04, C: 4.482, Time: 0.03\n",
"It: 34820, Loss: 3.964e-04, C: 4.482, Time: 0.03\n",
"It: 34830, Loss: 1.680e-03, C: 4.482, Time: 0.03\n",
"It: 34840, Loss: 3.238e-04, C: 4.482, Time: 0.03\n",
"It: 34850, Loss: 9.000e-04, C: 4.482, Time: 0.03\n",
"It: 34860, Loss: 3.949e-04, C: 4.482, Time: 0.03\n",
"It: 34870, Loss: 8.310e-04, C: 4.482, Time: 0.03\n",
"It: 34880, Loss: 4.853e-04, C: 4.482, Time: 0.03\n",
"It: 34890, Loss: 9.477e-04, C: 4.482, Time: 0.03\n",
"It: 34900, Loss: 7.722e-04, C: 4.482, Time: 0.03\n",
"It: 34910, Loss: 1.605e-03, C: 4.482, Time: 0.03\n",
"It: 34920, Loss: 5.899e-04, C: 4.482, Time: 0.03\n",
"It: 34930, Loss: 4.256e-04, C: 4.482, Time: 0.03\n",
"It: 34940, Loss: 8.824e-04, C: 4.482, Time: 0.03\n",
"It: 34950, Loss: 9.205e-04, C: 4.482, Time: 0.03\n",
"It: 34960, Loss: 1.243e-03, C: 4.482, Time: 0.04\n",
"It: 34970, Loss: 7.494e-04, C: 4.482, Time: 0.03\n",
"It: 34980, Loss: 6.509e-04, C: 4.482, Time: 0.03\n",
"It: 34990, Loss: 9.741e-04, C: 4.482, Time: 0.04\n",
"It: 35000, Loss: 7.029e-04, C: 4.482, Time: 0.03\n",
"It: 35010, Loss: 9.407e-04, C: 4.482, Time: 0.03\n",
"It: 35020, Loss: 8.997e-04, C: 4.482, Time: 0.03\n",
"It: 35030, Loss: 9.274e-04, C: 4.482, Time: 0.03\n",
"It: 35040, Loss: 8.411e-04, C: 4.482, Time: 0.03\n",
"It: 35050, Loss: 9.433e-04, C: 4.482, Time: 0.03\n",
"It: 35060, Loss: 5.241e-04, C: 4.482, Time: 0.03\n",
"It: 35070, Loss: 6.989e-04, C: 4.482, Time: 0.03\n",
"It: 35080, Loss: 1.552e-03, C: 4.482, Time: 0.03\n",
"It: 35090, Loss: 4.709e-04, C: 4.482, Time: 0.03\n",
"It: 35100, Loss: 1.350e-03, C: 4.482, Time: 0.03\n",
"It: 35110, Loss: 6.390e-04, C: 4.482, Time: 0.03\n",
"It: 35120, Loss: 3.796e-04, C: 4.482, Time: 0.03\n",
"It: 35130, Loss: 1.045e-03, C: 4.482, Time: 0.03\n",
"It: 35140, Loss: 8.759e-04, C: 4.482, Time: 0.04\n",
"It: 35150, Loss: 5.107e-04, C: 4.482, Time: 0.04\n",
"It: 35160, Loss: 4.083e-04, C: 4.482, Time: 0.03\n",
"It: 35170, Loss: 3.286e-04, C: 4.482, Time: 0.03\n",
"It: 35180, Loss: 8.095e-04, C: 4.482, Time: 0.03\n",
"It: 35190, Loss: 5.019e-04, C: 4.482, Time: 0.03\n",
"It: 35200, Loss: 1.039e-03, C: 4.482, Time: 0.03\n",
"It: 35210, Loss: 7.329e-04, C: 4.482, Time: 0.04\n",
"It: 35220, Loss: 6.921e-04, C: 4.482, Time: 0.04\n",
"It: 35230, Loss: 7.340e-04, C: 4.482, Time: 0.03\n",
"It: 35240, Loss: 3.798e-04, C: 4.482, Time: 0.03\n",
"It: 35250, Loss: 8.041e-04, C: 4.482, Time: 0.03\n",
"It: 35260, Loss: 4.951e-04, C: 4.482, Time: 0.03\n",
"It: 35270, Loss: 5.534e-04, C: 4.482, Time: 0.03\n",
"It: 35280, Loss: 3.355e-04, C: 4.482, Time: 0.04\n",
"It: 35290, Loss: 3.098e-04, C: 4.482, Time: 0.03\n",
"It: 35300, Loss: 1.422e-03, C: 4.482, Time: 0.04\n",
"It: 35310, Loss: 1.237e-03, C: 4.482, Time: 0.03\n",
"It: 35320, Loss: 1.680e-03, C: 4.482, Time: 0.03\n",
"It: 35330, Loss: 7.305e-04, C: 4.482, Time: 0.03\n",
"It: 35340, Loss: 2.772e-03, C: 4.482, Time: 0.04\n",
"It: 35350, Loss: 6.183e-04, C: 4.482, Time: 0.03\n",
"It: 35360, Loss: 4.365e-04, C: 4.482, Time: 0.03\n",
"It: 35370, Loss: 5.449e-04, C: 4.482, Time: 0.03\n",
"It: 35380, Loss: 1.331e-03, C: 4.482, Time: 0.04\n",
"It: 35390, Loss: 7.024e-04, C: 4.482, Time: 0.03\n",
"It: 35400, Loss: 6.013e-04, C: 4.482, Time: 0.04\n",
"It: 35410, Loss: 5.022e-04, C: 4.482, Time: 0.03\n",
"It: 35420, Loss: 8.279e-04, C: 4.482, Time: 0.03\n",
"It: 35430, Loss: 4.340e-04, C: 4.482, Time: 0.03\n",
"It: 35440, Loss: 6.831e-04, C: 4.482, Time: 0.04\n",
"It: 35450, Loss: 9.203e-04, C: 4.482, Time: 0.03\n",
"It: 35460, Loss: 1.544e-03, C: 4.482, Time: 0.03\n",
"It: 35470, Loss: 6.046e-04, C: 4.482, Time: 0.03\n",
"It: 35480, Loss: 4.726e-04, C: 4.482, Time: 0.03\n",
"It: 35490, Loss: 7.568e-04, C: 4.482, Time: 0.03\n",
"It: 35500, Loss: 5.944e-04, C: 4.482, Time: 0.03\n",
"It: 35510, Loss: 1.032e-03, C: 4.482, Time: 0.03\n",
"It: 35520, Loss: 6.791e-04, C: 4.482, Time: 0.03\n",
"It: 35530, Loss: 1.458e-03, C: 4.482, Time: 0.03\n",
"It: 35540, Loss: 4.507e-04, C: 4.482, Time: 0.03\n",
"It: 35550, Loss: 5.543e-04, C: 4.482, Time: 0.03\n",
"It: 35560, Loss: 3.786e-04, C: 4.482, Time: 0.03\n",
"It: 35570, Loss: 4.459e-04, C: 4.482, Time: 0.04\n",
"It: 35580, Loss: 6.286e-04, C: 4.482, Time: 0.03\n",
"It: 35590, Loss: 9.046e-04, C: 4.482, Time: 0.03\n",
"It: 35600, Loss: 6.205e-04, C: 4.482, Time: 0.03\n",
"It: 35610, Loss: 6.167e-04, C: 4.482, Time: 0.03\n",
"It: 35620, Loss: 5.670e-04, C: 4.482, Time: 0.03\n",
"It: 35630, Loss: 3.801e-04, C: 4.482, Time: 0.03\n",
"It: 35640, Loss: 1.487e-03, C: 4.482, Time: 0.04\n",
"It: 35650, Loss: 5.220e-04, C: 4.482, Time: 0.04\n",
"It: 35660, Loss: 7.137e-04, C: 4.482, Time: 0.03\n",
"It: 35670, Loss: 5.965e-04, C: 4.482, Time: 0.03\n",
"It: 35680, Loss: 5.473e-04, C: 4.482, Time: 0.03\n",
"It: 35690, Loss: 5.409e-04, C: 4.482, Time: 0.03\n",
"It: 35700, Loss: 4.826e-04, C: 4.482, Time: 0.03\n",
"It: 35710, Loss: 5.994e-04, C: 4.482, Time: 0.03\n",
"It: 35720, Loss: 3.377e-04, C: 4.482, Time: 0.03\n",
"It: 35730, Loss: 5.633e-04, C: 4.482, Time: 0.03\n",
"It: 35740, Loss: 5.877e-04, C: 4.482, Time: 0.03\n",
"It: 35750, Loss: 6.855e-04, C: 4.482, Time: 0.03\n",
"It: 35760, Loss: 9.397e-04, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 35770, Loss: 6.566e-04, C: 4.482, Time: 0.03\n",
"It: 35780, Loss: 5.336e-04, C: 4.482, Time: 0.03\n",
"It: 35790, Loss: 4.924e-04, C: 4.482, Time: 0.03\n",
"It: 35800, Loss: 9.796e-04, C: 4.482, Time: 0.03\n",
"It: 35810, Loss: 9.293e-04, C: 4.482, Time: 0.04\n",
"It: 35820, Loss: 4.707e-04, C: 4.482, Time: 0.03\n",
"It: 35830, Loss: 4.828e-04, C: 4.482, Time: 0.03\n",
"It: 35840, Loss: 6.035e-04, C: 4.482, Time: 0.03\n",
"It: 35850, Loss: 7.403e-04, C: 4.482, Time: 0.03\n",
"It: 35860, Loss: 1.581e-03, C: 4.482, Time: 0.03\n",
"It: 35870, Loss: 9.395e-04, C: 4.482, Time: 0.03\n",
"It: 35880, Loss: 1.894e-03, C: 4.482, Time: 0.03\n",
"It: 35890, Loss: 6.438e-04, C: 4.482, Time: 0.04\n",
"It: 35900, Loss: 1.944e-03, C: 4.482, Time: 0.04\n",
"It: 35910, Loss: 7.775e-04, C: 4.482, Time: 0.03\n",
"It: 35920, Loss: 1.460e-03, C: 4.482, Time: 0.03\n",
"It: 35930, Loss: 4.859e-04, C: 4.482, Time: 0.03\n",
"It: 35940, Loss: 8.146e-04, C: 4.482, Time: 0.03\n",
"It: 35950, Loss: 5.879e-04, C: 4.482, Time: 0.03\n",
"It: 35960, Loss: 1.048e-03, C: 4.482, Time: 0.03\n",
"It: 35970, Loss: 8.936e-04, C: 4.482, Time: 0.04\n",
"It: 35980, Loss: 4.643e-04, C: 4.482, Time: 0.03\n",
"It: 35990, Loss: 7.267e-04, C: 4.482, Time: 0.04\n",
"It: 36000, Loss: 8.903e-04, C: 4.482, Time: 0.03\n",
"It: 36010, Loss: 6.804e-04, C: 4.482, Time: 0.03\n",
"It: 36020, Loss: 7.802e-04, C: 4.482, Time: 0.03\n",
"It: 36030, Loss: 3.990e-04, C: 4.482, Time: 0.03\n",
"It: 36040, Loss: 6.459e-04, C: 4.482, Time: 0.03\n",
"It: 36050, Loss: 1.089e-03, C: 4.482, Time: 0.03\n",
"It: 36060, Loss: 7.018e-04, C: 4.482, Time: 0.03\n",
"It: 36070, Loss: 2.270e-03, C: 4.482, Time: 0.03\n",
"It: 36080, Loss: 1.911e-03, C: 4.482, Time: 0.03\n",
"It: 36090, Loss: 8.575e-04, C: 4.482, Time: 0.03\n",
"It: 36100, Loss: 6.871e-04, C: 4.482, Time: 0.03\n",
"It: 36110, Loss: 7.125e-04, C: 4.482, Time: 0.03\n",
"It: 36120, Loss: 6.845e-04, C: 4.482, Time: 0.03\n",
"It: 36130, Loss: 8.712e-04, C: 4.482, Time: 0.03\n",
"It: 36140, Loss: 9.624e-04, C: 4.482, Time: 0.03\n",
"It: 36150, Loss: 3.567e-04, C: 4.482, Time: 0.03\n",
"It: 36160, Loss: 3.257e-04, C: 4.482, Time: 0.03\n",
"It: 36170, Loss: 5.759e-04, C: 4.482, Time: 0.03\n",
"It: 36180, Loss: 4.354e-04, C: 4.482, Time: 0.03\n",
"It: 36190, Loss: 3.998e-04, C: 4.482, Time: 0.04\n",
"It: 36200, Loss: 4.654e-04, C: 4.482, Time: 0.03\n",
"It: 36210, Loss: 1.244e-03, C: 4.482, Time: 0.03\n",
"It: 36220, Loss: 1.091e-03, C: 4.482, Time: 0.03\n",
"It: 36230, Loss: 3.562e-04, C: 4.482, Time: 0.03\n",
"It: 36240, Loss: 1.086e-03, C: 4.482, Time: 0.03\n",
"It: 36250, Loss: 9.479e-04, C: 4.482, Time: 0.03\n",
"It: 36260, Loss: 1.094e-03, C: 4.482, Time: 0.03\n",
"It: 36270, Loss: 5.620e-04, C: 4.482, Time: 0.03\n",
"It: 36280, Loss: 5.476e-04, C: 4.482, Time: 0.03\n",
"It: 36290, Loss: 1.591e-03, C: 4.482, Time: 0.03\n",
"It: 36300, Loss: 9.786e-04, C: 4.482, Time: 0.03\n",
"It: 36310, Loss: 7.710e-04, C: 4.482, Time: 0.03\n",
"It: 36320, Loss: 8.489e-04, C: 4.482, Time: 0.03\n",
"It: 36330, Loss: 3.280e-04, C: 4.482, Time: 0.03\n",
"It: 36340, Loss: 9.512e-04, C: 4.482, Time: 0.03\n",
"It: 36350, Loss: 8.561e-04, C: 4.482, Time: 0.03\n",
"It: 36360, Loss: 1.768e-03, C: 4.482, Time: 0.03\n",
"It: 36370, Loss: 9.957e-04, C: 4.482, Time: 0.03\n",
"It: 36380, Loss: 1.397e-03, C: 4.482, Time: 0.03\n",
"It: 36390, Loss: 4.582e-04, C: 4.482, Time: 0.03\n",
"It: 36400, Loss: 5.635e-04, C: 4.482, Time: 0.04\n",
"It: 36410, Loss: 6.667e-04, C: 4.482, Time: 0.03\n",
"It: 36420, Loss: 5.812e-04, C: 4.482, Time: 0.03\n",
"It: 36430, Loss: 4.082e-04, C: 4.482, Time: 0.04\n",
"It: 36440, Loss: 5.439e-04, C: 4.482, Time: 0.03\n",
"It: 36450, Loss: 4.820e-04, C: 4.482, Time: 0.03\n",
"It: 36460, Loss: 9.100e-04, C: 4.482, Time: 0.03\n",
"It: 36470, Loss: 9.222e-04, C: 4.482, Time: 0.03\n",
"It: 36480, Loss: 5.963e-04, C: 4.482, Time: 0.03\n",
"It: 36490, Loss: 1.074e-03, C: 4.482, Time: 0.03\n",
"It: 36500, Loss: 5.107e-04, C: 4.482, Time: 0.03\n",
"It: 36510, Loss: 4.413e-04, C: 4.482, Time: 0.03\n",
"It: 36520, Loss: 5.203e-04, C: 4.482, Time: 0.03\n",
"It: 36530, Loss: 5.991e-04, C: 4.482, Time: 0.03\n",
"It: 36540, Loss: 8.378e-04, C: 4.482, Time: 0.03\n",
"It: 36550, Loss: 1.127e-03, C: 4.482, Time: 0.03\n",
"It: 36560, Loss: 1.368e-03, C: 4.482, Time: 0.03\n",
"It: 36570, Loss: 8.343e-04, C: 4.482, Time: 0.03\n",
"It: 36580, Loss: 4.232e-04, C: 4.482, Time: 0.03\n",
"It: 36590, Loss: 1.353e-03, C: 4.482, Time: 0.03\n",
"It: 36600, Loss: 1.127e-03, C: 4.482, Time: 0.03\n",
"It: 36610, Loss: 9.014e-04, C: 4.482, Time: 0.04\n",
"It: 36620, Loss: 6.140e-04, C: 4.482, Time: 0.03\n",
"It: 36630, Loss: 6.332e-04, C: 4.482, Time: 0.03\n",
"It: 36640, Loss: 9.877e-04, C: 4.482, Time: 0.03\n",
"It: 36650, Loss: 6.229e-04, C: 4.482, Time: 0.04\n",
"It: 36660, Loss: 4.841e-04, C: 4.482, Time: 0.04\n",
"It: 36670, Loss: 5.912e-04, C: 4.482, Time: 0.03\n",
"It: 36680, Loss: 8.063e-04, C: 4.482, Time: 0.04\n",
"It: 36690, Loss: 6.617e-04, C: 4.482, Time: 0.03\n",
"It: 36700, Loss: 6.767e-04, C: 4.482, Time: 0.03\n",
"It: 36710, Loss: 6.298e-04, C: 4.482, Time: 0.03\n",
"It: 36720, Loss: 5.562e-04, C: 4.482, Time: 0.03\n",
"It: 36730, Loss: 1.060e-03, C: 4.482, Time: 0.04\n",
"It: 36740, Loss: 6.747e-04, C: 4.482, Time: 0.03\n",
"It: 36750, Loss: 4.124e-04, C: 4.482, Time: 0.04\n",
"It: 36760, Loss: 1.028e-03, C: 4.482, Time: 0.03\n",
"It: 36770, Loss: 8.279e-04, C: 4.482, Time: 0.03\n",
"It: 36780, Loss: 9.378e-04, C: 4.482, Time: 0.03\n",
"It: 36790, Loss: 3.933e-04, C: 4.482, Time: 0.04\n",
"It: 36800, Loss: 2.102e-03, C: 4.482, Time: 0.03\n",
"It: 36810, Loss: 3.801e-04, C: 4.482, Time: 0.03\n",
"It: 36820, Loss: 7.765e-04, C: 4.482, Time: 0.03\n",
"It: 36830, Loss: 1.120e-03, C: 4.482, Time: 0.04\n",
"It: 36840, Loss: 1.916e-03, C: 4.482, Time: 0.03\n",
"It: 36850, Loss: 4.073e-04, C: 4.482, Time: 0.03\n",
"It: 36860, Loss: 4.435e-04, C: 4.482, Time: 0.03\n",
"It: 36870, Loss: 3.532e-04, C: 4.482, Time: 0.03\n",
"It: 36880, Loss: 8.492e-04, C: 4.482, Time: 0.03\n",
"It: 36890, Loss: 1.452e-03, C: 4.482, Time: 0.03\n",
"It: 36900, Loss: 5.659e-04, C: 4.482, Time: 0.03\n",
"It: 36910, Loss: 8.358e-04, C: 4.482, Time: 0.03\n",
"It: 36920, Loss: 4.726e-04, C: 4.482, Time: 0.03\n",
"It: 36930, Loss: 1.087e-03, C: 4.482, Time: 0.03\n",
"It: 36940, Loss: 6.300e-04, C: 4.482, Time: 0.03\n",
"It: 36950, Loss: 7.816e-04, C: 4.482, Time: 0.03\n",
"It: 36960, Loss: 3.795e-04, C: 4.482, Time: 0.03\n",
"It: 36970, Loss: 4.061e-04, C: 4.482, Time: 0.04\n",
"It: 36980, Loss: 5.399e-04, C: 4.482, Time: 0.03\n",
"It: 36990, Loss: 4.115e-04, C: 4.482, Time: 0.03\n",
"It: 37000, Loss: 7.638e-04, C: 4.482, Time: 0.04\n",
"It: 37010, Loss: 1.286e-03, C: 4.482, Time: 0.03\n",
"It: 37020, Loss: 3.153e-04, C: 4.482, Time: 0.03\n",
"It: 37030, Loss: 1.172e-03, C: 4.482, Time: 0.03\n",
"It: 37040, Loss: 7.968e-04, C: 4.482, Time: 0.04\n",
"It: 37050, Loss: 4.574e-04, C: 4.482, Time: 0.04\n",
"It: 37060, Loss: 8.460e-04, C: 4.482, Time: 0.03\n",
"It: 37070, Loss: 5.155e-04, C: 4.482, Time: 0.03\n",
"It: 37080, Loss: 5.162e-04, C: 4.482, Time: 0.04\n",
"It: 37090, Loss: 7.811e-04, C: 4.482, Time: 0.03\n",
"It: 37100, Loss: 1.076e-03, C: 4.482, Time: 0.03\n",
"It: 37110, Loss: 4.743e-04, C: 4.482, Time: 0.03\n",
"It: 37120, Loss: 7.507e-04, C: 4.482, Time: 0.03\n",
"It: 37130, Loss: 1.481e-03, C: 4.482, Time: 0.03\n",
"It: 37140, Loss: 9.389e-04, C: 4.482, Time: 0.03\n",
"It: 37150, Loss: 4.324e-04, C: 4.482, Time: 0.04\n",
"It: 37160, Loss: 4.008e-04, C: 4.482, Time: 0.03\n",
"It: 37170, Loss: 3.078e-04, C: 4.482, Time: 0.03\n",
"It: 37180, Loss: 2.037e-03, C: 4.482, Time: 0.03\n",
"It: 37190, Loss: 2.257e-03, C: 4.482, Time: 0.04\n",
"It: 37200, Loss: 1.050e-03, C: 4.482, Time: 0.03\n",
"It: 37210, Loss: 4.542e-04, C: 4.482, Time: 0.03\n",
"It: 37220, Loss: 3.748e-04, C: 4.482, Time: 0.03\n",
"It: 37230, Loss: 5.554e-04, C: 4.482, Time: 0.03\n",
"It: 37240, Loss: 7.500e-04, C: 4.482, Time: 0.04\n",
"It: 37250, Loss: 4.546e-04, C: 4.482, Time: 0.03\n",
"It: 37260, Loss: 4.421e-04, C: 4.482, Time: 0.03\n",
"It: 37270, Loss: 4.057e-04, C: 4.482, Time: 0.03\n",
"It: 37280, Loss: 6.017e-04, C: 4.482, Time: 0.04\n",
"It: 37290, Loss: 7.000e-04, C: 4.482, Time: 0.03\n",
"It: 37300, Loss: 4.925e-04, C: 4.482, Time: 0.03\n",
"It: 37310, Loss: 1.200e-03, C: 4.482, Time: 0.03\n",
"It: 37320, Loss: 7.420e-04, C: 4.482, Time: 0.03\n",
"It: 37330, Loss: 6.477e-04, C: 4.482, Time: 0.04\n",
"It: 37340, Loss: 6.996e-04, C: 4.482, Time: 0.03\n",
"It: 37350, Loss: 6.172e-04, C: 4.482, Time: 0.03\n",
"It: 37360, Loss: 4.011e-04, C: 4.482, Time: 0.03\n",
"It: 37370, Loss: 1.077e-03, C: 4.482, Time: 0.03\n",
"It: 37380, Loss: 4.904e-04, C: 4.482, Time: 0.03\n",
"It: 37390, Loss: 1.143e-03, C: 4.482, Time: 0.03\n",
"It: 37400, Loss: 7.942e-04, C: 4.482, Time: 0.03\n",
"It: 37410, Loss: 6.644e-04, C: 4.482, Time: 0.03\n",
"It: 37420, Loss: 1.157e-03, C: 4.482, Time: 0.03\n",
"It: 37430, Loss: 5.797e-04, C: 4.482, Time: 0.03\n",
"It: 37440, Loss: 1.613e-03, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 37450, Loss: 5.731e-04, C: 4.482, Time: 0.04\n",
"It: 37460, Loss: 9.134e-04, C: 4.482, Time: 0.03\n",
"It: 37470, Loss: 4.300e-04, C: 4.482, Time: 0.03\n",
"It: 37480, Loss: 7.154e-04, C: 4.482, Time: 0.03\n",
"It: 37490, Loss: 9.823e-04, C: 4.482, Time: 0.03\n",
"It: 37500, Loss: 5.113e-04, C: 4.482, Time: 0.03\n",
"It: 37510, Loss: 8.904e-04, C: 4.482, Time: 0.03\n",
"It: 37520, Loss: 6.361e-04, C: 4.482, Time: 0.03\n",
"It: 37530, Loss: 3.878e-04, C: 4.482, Time: 0.03\n",
"It: 37540, Loss: 9.425e-04, C: 4.482, Time: 0.03\n",
"It: 37550, Loss: 4.643e-04, C: 4.482, Time: 0.04\n",
"It: 37560, Loss: 5.546e-04, C: 4.482, Time: 0.03\n",
"It: 37570, Loss: 2.108e-03, C: 4.482, Time: 0.03\n",
"It: 37580, Loss: 2.679e-04, C: 4.482, Time: 0.03\n",
"It: 37590, Loss: 4.199e-04, C: 4.482, Time: 0.03\n",
"It: 37600, Loss: 6.436e-04, C: 4.482, Time: 0.03\n",
"It: 37610, Loss: 1.698e-03, C: 4.482, Time: 0.03\n",
"It: 37620, Loss: 5.229e-04, C: 4.482, Time: 0.04\n",
"It: 37630, Loss: 6.754e-04, C: 4.482, Time: 0.03\n",
"It: 37640, Loss: 6.801e-04, C: 4.482, Time: 0.03\n",
"It: 37650, Loss: 9.843e-04, C: 4.482, Time: 0.03\n",
"It: 37660, Loss: 9.881e-04, C: 4.482, Time: 0.03\n",
"It: 37670, Loss: 1.260e-03, C: 4.482, Time: 0.03\n",
"It: 37680, Loss: 6.430e-04, C: 4.482, Time: 0.04\n",
"It: 37690, Loss: 1.141e-03, C: 4.482, Time: 0.03\n",
"It: 37700, Loss: 6.288e-04, C: 4.482, Time: 0.03\n",
"It: 37710, Loss: 7.811e-04, C: 4.482, Time: 0.04\n",
"It: 37720, Loss: 8.667e-04, C: 4.482, Time: 0.03\n",
"It: 37730, Loss: 1.499e-03, C: 4.482, Time: 0.03\n",
"It: 37740, Loss: 8.079e-04, C: 4.482, Time: 0.03\n",
"It: 37750, Loss: 4.330e-04, C: 4.482, Time: 0.04\n",
"It: 37760, Loss: 4.218e-04, C: 4.482, Time: 0.04\n",
"It: 37770, Loss: 7.353e-04, C: 4.482, Time: 0.03\n",
"It: 37780, Loss: 1.052e-03, C: 4.482, Time: 0.03\n",
"It: 37790, Loss: 1.019e-03, C: 4.482, Time: 0.03\n",
"It: 37800, Loss: 5.088e-04, C: 4.482, Time: 0.03\n",
"It: 37810, Loss: 4.748e-04, C: 4.482, Time: 0.03\n",
"It: 37820, Loss: 1.362e-03, C: 4.482, Time: 0.03\n",
"It: 37830, Loss: 1.924e-03, C: 4.482, Time: 0.03\n",
"It: 37840, Loss: 1.469e-03, C: 4.482, Time: 0.03\n",
"It: 37850, Loss: 1.018e-03, C: 4.482, Time: 0.03\n",
"It: 37860, Loss: 1.267e-03, C: 4.482, Time: 0.03\n",
"It: 37870, Loss: 8.633e-04, C: 4.482, Time: 0.04\n",
"It: 37880, Loss: 7.382e-04, C: 4.482, Time: 0.03\n",
"It: 37890, Loss: 1.899e-03, C: 4.482, Time: 0.03\n",
"It: 37900, Loss: 9.424e-04, C: 4.482, Time: 0.03\n",
"It: 37910, Loss: 6.564e-04, C: 4.482, Time: 0.03\n",
"It: 37920, Loss: 1.206e-03, C: 4.482, Time: 0.04\n",
"It: 37930, Loss: 3.934e-04, C: 4.482, Time: 0.04\n",
"It: 37940, Loss: 1.444e-03, C: 4.482, Time: 0.04\n",
"It: 37950, Loss: 9.177e-04, C: 4.482, Time: 0.03\n",
"It: 37960, Loss: 4.527e-04, C: 4.482, Time: 0.03\n",
"It: 37970, Loss: 4.467e-04, C: 4.482, Time: 0.03\n",
"It: 37980, Loss: 3.348e-04, C: 4.482, Time: 0.03\n",
"It: 37990, Loss: 8.497e-04, C: 4.482, Time: 0.04\n",
"It: 38000, Loss: 5.019e-04, C: 4.482, Time: 0.03\n",
"It: 38010, Loss: 5.233e-04, C: 4.482, Time: 0.04\n",
"It: 38020, Loss: 2.835e-04, C: 4.482, Time: 0.03\n",
"It: 38030, Loss: 6.602e-04, C: 4.482, Time: 0.03\n",
"It: 38040, Loss: 5.439e-04, C: 4.482, Time: 0.04\n",
"It: 38050, Loss: 7.560e-04, C: 4.482, Time: 0.04\n",
"It: 38060, Loss: 2.285e-03, C: 4.482, Time: 0.03\n",
"It: 38070, Loss: 1.142e-03, C: 4.482, Time: 0.03\n",
"It: 38080, Loss: 5.971e-04, C: 4.482, Time: 0.03\n",
"It: 38090, Loss: 5.734e-04, C: 4.482, Time: 0.03\n",
"It: 38100, Loss: 3.286e-04, C: 4.482, Time: 0.03\n",
"It: 38110, Loss: 3.751e-04, C: 4.482, Time: 0.04\n",
"It: 38120, Loss: 7.156e-04, C: 4.482, Time: 0.03\n",
"It: 38130, Loss: 1.557e-03, C: 4.482, Time: 0.03\n",
"It: 38140, Loss: 9.014e-04, C: 4.482, Time: 0.03\n",
"It: 38150, Loss: 8.113e-04, C: 4.482, Time: 0.03\n",
"It: 38160, Loss: 6.442e-04, C: 4.482, Time: 0.04\n",
"It: 38170, Loss: 4.631e-04, C: 4.482, Time: 0.03\n",
"It: 38180, Loss: 9.189e-04, C: 4.482, Time: 0.03\n",
"It: 38190, Loss: 5.299e-04, C: 4.482, Time: 0.03\n",
"It: 38200, Loss: 7.182e-04, C: 4.482, Time: 0.03\n",
"It: 38210, Loss: 1.172e-03, C: 4.482, Time: 0.04\n",
"It: 38220, Loss: 4.539e-04, C: 4.482, Time: 0.03\n",
"It: 38230, Loss: 4.860e-04, C: 4.482, Time: 0.03\n",
"It: 38240, Loss: 3.844e-04, C: 4.482, Time: 0.03\n",
"It: 38250, Loss: 3.507e-04, C: 4.482, Time: 0.03\n",
"It: 38260, Loss: 2.613e-04, C: 4.482, Time: 0.03\n",
"It: 38270, Loss: 4.855e-04, C: 4.482, Time: 0.03\n",
"It: 38280, Loss: 1.418e-03, C: 4.482, Time: 0.03\n",
"It: 38290, Loss: 1.029e-03, C: 4.482, Time: 0.04\n",
"It: 38300, Loss: 8.979e-04, C: 4.482, Time: 0.03\n",
"It: 38310, Loss: 1.209e-03, C: 4.482, Time: 0.03\n",
"It: 38320, Loss: 3.711e-04, C: 4.482, Time: 0.03\n",
"It: 38330, Loss: 4.384e-04, C: 4.482, Time: 0.03\n",
"It: 38340, Loss: 3.754e-04, C: 4.482, Time: 0.03\n",
"It: 38350, Loss: 4.720e-04, C: 4.482, Time: 0.04\n",
"It: 38360, Loss: 9.060e-04, C: 4.482, Time: 0.03\n",
"It: 38370, Loss: 5.471e-04, C: 4.482, Time: 0.04\n",
"It: 38380, Loss: 8.292e-04, C: 4.482, Time: 0.03\n",
"It: 38390, Loss: 6.126e-04, C: 4.482, Time: 0.03\n",
"It: 38400, Loss: 3.871e-04, C: 4.482, Time: 0.04\n",
"It: 38410, Loss: 9.498e-04, C: 4.482, Time: 0.04\n",
"It: 38420, Loss: 4.250e-04, C: 4.482, Time: 0.03\n",
"It: 38430, Loss: 1.785e-03, C: 4.482, Time: 0.03\n",
"It: 38440, Loss: 8.067e-04, C: 4.482, Time: 0.03\n",
"It: 38450, Loss: 7.184e-04, C: 4.482, Time: 0.03\n",
"It: 38460, Loss: 4.634e-04, C: 4.482, Time: 0.03\n",
"It: 38470, Loss: 1.850e-03, C: 4.482, Time: 0.03\n",
"It: 38480, Loss: 9.282e-04, C: 4.482, Time: 0.03\n",
"It: 38490, Loss: 5.908e-04, C: 4.482, Time: 0.04\n",
"It: 38500, Loss: 5.468e-04, C: 4.482, Time: 0.03\n",
"It: 38510, Loss: 5.867e-04, C: 4.482, Time: 0.03\n",
"It: 38520, Loss: 3.558e-04, C: 4.482, Time: 0.03\n",
"It: 38530, Loss: 9.052e-04, C: 4.482, Time: 0.03\n",
"It: 38540, Loss: 1.072e-03, C: 4.482, Time: 0.03\n",
"It: 38550, Loss: 1.149e-03, C: 4.482, Time: 0.03\n",
"It: 38560, Loss: 5.550e-04, C: 4.482, Time: 0.03\n",
"It: 38570, Loss: 1.030e-03, C: 4.482, Time: 0.03\n",
"It: 38580, Loss: 5.296e-04, C: 4.482, Time: 0.03\n",
"It: 38590, Loss: 5.547e-04, C: 4.482, Time: 0.03\n",
"It: 38600, Loss: 8.034e-04, C: 4.482, Time: 0.04\n",
"It: 38610, Loss: 7.639e-04, C: 4.482, Time: 0.03\n",
"It: 38620, Loss: 6.651e-04, C: 4.482, Time: 0.03\n",
"It: 38630, Loss: 4.952e-04, C: 4.482, Time: 0.03\n",
"It: 38640, Loss: 9.402e-04, C: 4.482, Time: 0.03\n",
"It: 38650, Loss: 5.699e-04, C: 4.482, Time: 0.04\n",
"It: 38660, Loss: 4.008e-04, C: 4.482, Time: 0.03\n",
"It: 38670, Loss: 3.565e-04, C: 4.482, Time: 0.03\n",
"It: 38680, Loss: 5.223e-04, C: 4.482, Time: 0.03\n",
"It: 38690, Loss: 4.875e-04, C: 4.482, Time: 0.03\n",
"It: 38700, Loss: 6.433e-04, C: 4.482, Time: 0.03\n",
"It: 38710, Loss: 7.180e-04, C: 4.482, Time: 0.04\n",
"It: 38720, Loss: 5.514e-04, C: 4.482, Time: 0.03\n",
"It: 38730, Loss: 1.374e-03, C: 4.482, Time: 0.03\n",
"It: 38740, Loss: 4.553e-04, C: 4.482, Time: 0.03\n",
"It: 38750, Loss: 6.006e-04, C: 4.482, Time: 0.03\n",
"It: 38760, Loss: 3.756e-04, C: 4.482, Time: 0.03\n",
"It: 38770, Loss: 3.177e-04, C: 4.482, Time: 0.03\n",
"It: 38780, Loss: 3.145e-04, C: 4.482, Time: 0.03\n",
"It: 38790, Loss: 8.380e-04, C: 4.482, Time: 0.04\n",
"It: 38800, Loss: 1.042e-03, C: 4.482, Time: 0.03\n",
"It: 38810, Loss: 6.160e-04, C: 4.482, Time: 0.03\n",
"It: 38820, Loss: 4.912e-04, C: 4.482, Time: 0.03\n",
"It: 38830, Loss: 5.612e-04, C: 4.482, Time: 0.03\n",
"It: 38840, Loss: 7.662e-04, C: 4.482, Time: 0.03\n",
"It: 38850, Loss: 5.925e-04, C: 4.482, Time: 0.03\n",
"It: 38860, Loss: 9.752e-04, C: 4.482, Time: 0.03\n",
"It: 38870, Loss: 1.485e-03, C: 4.482, Time: 0.03\n",
"It: 38880, Loss: 7.325e-04, C: 4.482, Time: 0.03\n",
"It: 38890, Loss: 4.712e-04, C: 4.482, Time: 0.03\n",
"It: 38900, Loss: 4.241e-04, C: 4.482, Time: 0.03\n",
"It: 38910, Loss: 2.138e-03, C: 4.482, Time: 0.03\n",
"It: 38920, Loss: 4.800e-04, C: 4.482, Time: 0.03\n",
"It: 38930, Loss: 3.372e-04, C: 4.482, Time: 0.03\n",
"It: 38940, Loss: 3.172e-04, C: 4.482, Time: 0.04\n",
"It: 38950, Loss: 1.467e-03, C: 4.482, Time: 0.03\n",
"It: 38960, Loss: 9.984e-04, C: 4.482, Time: 0.03\n",
"It: 38970, Loss: 1.577e-03, C: 4.482, Time: 0.03\n",
"It: 38980, Loss: 5.172e-04, C: 4.482, Time: 0.03\n",
"It: 38990, Loss: 1.600e-03, C: 4.482, Time: 0.03\n",
"It: 39000, Loss: 6.483e-04, C: 4.482, Time: 0.03\n",
"It: 39010, Loss: 6.246e-04, C: 4.482, Time: 0.03\n",
"It: 39020, Loss: 7.178e-04, C: 4.482, Time: 0.03\n",
"It: 39030, Loss: 3.900e-04, C: 4.482, Time: 0.03\n",
"It: 39040, Loss: 7.435e-04, C: 4.482, Time: 0.03\n",
"It: 39050, Loss: 7.773e-04, C: 4.482, Time: 0.03\n",
"It: 39060, Loss: 1.113e-03, C: 4.482, Time: 0.03\n",
"It: 39070, Loss: 6.477e-04, C: 4.482, Time: 0.03\n",
"It: 39080, Loss: 8.979e-04, C: 4.482, Time: 0.03\n",
"It: 39090, Loss: 1.019e-03, C: 4.482, Time: 0.03\n",
"It: 39100, Loss: 5.376e-04, C: 4.482, Time: 0.03\n",
"It: 39110, Loss: 6.799e-04, C: 4.482, Time: 0.03\n",
"It: 39120, Loss: 9.332e-04, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 39130, Loss: 1.524e-03, C: 4.482, Time: 0.04\n",
"It: 39140, Loss: 1.176e-03, C: 4.482, Time: 0.03\n",
"It: 39150, Loss: 1.818e-03, C: 4.482, Time: 0.03\n",
"It: 39160, Loss: 1.358e-03, C: 4.482, Time: 0.03\n",
"It: 39170, Loss: 5.479e-04, C: 4.482, Time: 0.03\n",
"It: 39180, Loss: 7.775e-04, C: 4.482, Time: 0.03\n",
"It: 39190, Loss: 6.705e-04, C: 4.482, Time: 0.04\n",
"It: 39200, Loss: 3.339e-04, C: 4.482, Time: 0.04\n",
"It: 39210, Loss: 1.638e-03, C: 4.482, Time: 0.03\n",
"It: 39220, Loss: 1.372e-03, C: 4.482, Time: 0.03\n",
"It: 39230, Loss: 6.839e-04, C: 4.482, Time: 0.03\n",
"It: 39240, Loss: 1.314e-03, C: 4.482, Time: 0.03\n",
"It: 39250, Loss: 1.487e-03, C: 4.482, Time: 0.04\n",
"It: 39260, Loss: 8.222e-04, C: 4.482, Time: 0.03\n",
"It: 39270, Loss: 3.250e-04, C: 4.482, Time: 0.03\n",
"It: 39280, Loss: 1.534e-03, C: 4.482, Time: 0.03\n",
"It: 39290, Loss: 1.015e-03, C: 4.482, Time: 0.03\n",
"It: 39300, Loss: 1.125e-03, C: 4.482, Time: 0.03\n",
"It: 39310, Loss: 1.686e-03, C: 4.482, Time: 0.03\n",
"It: 39320, Loss: 7.507e-04, C: 4.482, Time: 0.03\n",
"It: 39330, Loss: 3.980e-04, C: 4.482, Time: 0.03\n",
"It: 39340, Loss: 5.300e-04, C: 4.482, Time: 0.03\n",
"It: 39350, Loss: 3.683e-04, C: 4.482, Time: 0.03\n",
"It: 39360, Loss: 5.168e-04, C: 4.482, Time: 0.03\n",
"It: 39370, Loss: 7.036e-04, C: 4.482, Time: 0.04\n",
"It: 39380, Loss: 3.372e-04, C: 4.482, Time: 0.03\n",
"It: 39390, Loss: 5.262e-04, C: 4.482, Time: 0.03\n",
"It: 39400, Loss: 6.936e-04, C: 4.482, Time: 0.04\n",
"It: 39410, Loss: 2.190e-03, C: 4.482, Time: 0.03\n",
"It: 39420, Loss: 5.368e-04, C: 4.482, Time: 0.03\n",
"It: 39430, Loss: 3.724e-04, C: 4.482, Time: 0.03\n",
"It: 39440, Loss: 4.585e-04, C: 4.482, Time: 0.03\n",
"It: 39450, Loss: 4.929e-04, C: 4.482, Time: 0.03\n",
"It: 39460, Loss: 6.726e-04, C: 4.482, Time: 0.03\n",
"It: 39470, Loss: 1.175e-03, C: 4.482, Time: 0.03\n",
"It: 39480, Loss: 5.393e-04, C: 4.482, Time: 0.03\n",
"It: 39490, Loss: 4.977e-04, C: 4.482, Time: 0.03\n",
"It: 39500, Loss: 3.133e-04, C: 4.482, Time: 0.03\n",
"It: 39510, Loss: 3.770e-04, C: 4.482, Time: 0.03\n",
"It: 39520, Loss: 2.718e-04, C: 4.482, Time: 0.03\n",
"It: 39530, Loss: 1.059e-03, C: 4.482, Time: 0.04\n",
"It: 39540, Loss: 5.306e-04, C: 4.482, Time: 0.03\n",
"It: 39550, Loss: 1.377e-03, C: 4.482, Time: 0.04\n",
"It: 39560, Loss: 4.109e-04, C: 4.482, Time: 0.04\n",
"It: 39570, Loss: 1.393e-03, C: 4.482, Time: 0.03\n",
"It: 39580, Loss: 7.000e-04, C: 4.482, Time: 0.03\n",
"It: 39590, Loss: 7.458e-04, C: 4.482, Time: 0.03\n",
"It: 39600, Loss: 7.508e-04, C: 4.482, Time: 0.03\n",
"It: 39610, Loss: 9.072e-04, C: 4.482, Time: 0.03\n",
"It: 39620, Loss: 4.062e-04, C: 4.482, Time: 0.04\n",
"It: 39630, Loss: 4.460e-04, C: 4.482, Time: 0.03\n",
"It: 39640, Loss: 7.724e-04, C: 4.482, Time: 0.03\n",
"It: 39650, Loss: 8.429e-04, C: 4.482, Time: 0.03\n",
"It: 39660, Loss: 4.931e-04, C: 4.482, Time: 0.03\n",
"It: 39670, Loss: 1.622e-03, C: 4.482, Time: 0.03\n",
"It: 39680, Loss: 5.850e-04, C: 4.482, Time: 0.03\n",
"It: 39690, Loss: 7.922e-04, C: 4.482, Time: 0.03\n",
"It: 39700, Loss: 7.904e-04, C: 4.482, Time: 0.03\n",
"It: 39710, Loss: 9.261e-04, C: 4.482, Time: 0.03\n",
"It: 39720, Loss: 4.504e-04, C: 4.482, Time: 0.03\n",
"It: 39730, Loss: 3.650e-04, C: 4.482, Time: 0.04\n",
"It: 39740, Loss: 2.578e-04, C: 4.482, Time: 0.03\n",
"It: 39750, Loss: 1.198e-03, C: 4.482, Time: 0.03\n",
"It: 39760, Loss: 4.359e-04, C: 4.482, Time: 0.03\n",
"It: 39770, Loss: 5.016e-04, C: 4.482, Time: 0.03\n",
"It: 39780, Loss: 1.687e-03, C: 4.482, Time: 0.03\n",
"It: 39790, Loss: 6.629e-04, C: 4.482, Time: 0.03\n",
"It: 39800, Loss: 1.221e-03, C: 4.482, Time: 0.03\n",
"It: 39810, Loss: 7.965e-04, C: 4.482, Time: 0.03\n",
"It: 39820, Loss: 4.950e-04, C: 4.482, Time: 0.03\n",
"It: 39830, Loss: 6.596e-04, C: 4.482, Time: 0.03\n",
"It: 39840, Loss: 3.617e-04, C: 4.482, Time: 0.03\n",
"It: 39850, Loss: 2.963e-04, C: 4.482, Time: 0.03\n",
"It: 39860, Loss: 5.819e-04, C: 4.482, Time: 0.03\n",
"It: 39870, Loss: 3.908e-04, C: 4.482, Time: 0.03\n",
"It: 39880, Loss: 1.411e-03, C: 4.482, Time: 0.03\n",
"It: 39890, Loss: 4.324e-04, C: 4.482, Time: 0.03\n",
"It: 39900, Loss: 4.196e-04, C: 4.482, Time: 0.03\n",
"It: 39910, Loss: 3.309e-04, C: 4.482, Time: 0.03\n",
"It: 39920, Loss: 3.826e-04, C: 4.482, Time: 0.04\n",
"It: 39930, Loss: 1.072e-03, C: 4.482, Time: 0.03\n",
"It: 39940, Loss: 7.751e-04, C: 4.482, Time: 0.03\n",
"It: 39950, Loss: 2.959e-04, C: 4.482, Time: 0.03\n",
"It: 39960, Loss: 4.395e-04, C: 4.482, Time: 0.03\n",
"It: 39970, Loss: 3.594e-04, C: 4.482, Time: 0.03\n",
"It: 39980, Loss: 4.721e-04, C: 4.482, Time: 0.03\n",
"It: 39990, Loss: 1.027e-03, C: 4.482, Time: 0.03\n",
"It: 40000, Loss: 3.292e-04, C: 4.482, Time: 0.03\n",
"It: 40010, Loss: 3.413e-04, C: 4.482, Time: 0.04\n",
"It: 40020, Loss: 7.438e-04, C: 4.482, Time: 0.03\n",
"It: 40030, Loss: 9.955e-04, C: 4.482, Time: 0.04\n",
"It: 40040, Loss: 7.504e-04, C: 4.482, Time: 0.03\n",
"It: 40050, Loss: 6.668e-04, C: 4.482, Time: 0.03\n",
"It: 40060, Loss: 2.939e-04, C: 4.482, Time: 0.03\n",
"It: 40070, Loss: 3.202e-04, C: 4.482, Time: 0.03\n",
"It: 40080, Loss: 1.210e-03, C: 4.482, Time: 0.04\n",
"It: 40090, Loss: 3.316e-04, C: 4.482, Time: 0.03\n",
"It: 40100, Loss: 3.889e-04, C: 4.482, Time: 0.03\n",
"It: 40110, Loss: 2.980e-04, C: 4.482, Time: 0.03\n",
"It: 40120, Loss: 3.896e-04, C: 4.482, Time: 0.03\n",
"It: 40130, Loss: 8.708e-04, C: 4.482, Time: 0.04\n",
"It: 40140, Loss: 3.950e-04, C: 4.482, Time: 0.03\n",
"It: 40150, Loss: 3.229e-04, C: 4.482, Time: 0.03\n",
"It: 40160, Loss: 5.373e-04, C: 4.482, Time: 0.03\n",
"It: 40170, Loss: 6.056e-04, C: 4.482, Time: 0.03\n",
"It: 40180, Loss: 8.433e-04, C: 4.482, Time: 0.03\n",
"It: 40190, Loss: 3.873e-04, C: 4.482, Time: 0.04\n",
"It: 40200, Loss: 1.276e-03, C: 4.482, Time: 0.03\n",
"It: 40210, Loss: 6.721e-04, C: 4.482, Time: 0.03\n",
"It: 40220, Loss: 5.458e-04, C: 4.482, Time: 0.03\n",
"It: 40230, Loss: 5.602e-04, C: 4.482, Time: 0.03\n",
"It: 40240, Loss: 1.164e-03, C: 4.482, Time: 0.03\n",
"It: 40250, Loss: 7.735e-04, C: 4.482, Time: 0.04\n",
"It: 40260, Loss: 5.239e-04, C: 4.482, Time: 0.03\n",
"It: 40270, Loss: 6.306e-04, C: 4.482, Time: 0.04\n",
"It: 40280, Loss: 1.329e-03, C: 4.482, Time: 0.03\n",
"It: 40290, Loss: 5.487e-04, C: 4.482, Time: 0.03\n",
"It: 40300, Loss: 3.924e-04, C: 4.482, Time: 0.04\n",
"It: 40310, Loss: 1.538e-03, C: 4.482, Time: 0.03\n",
"It: 40320, Loss: 1.151e-03, C: 4.482, Time: 0.03\n",
"It: 40330, Loss: 1.531e-03, C: 4.482, Time: 0.04\n",
"It: 40340, Loss: 2.367e-03, C: 4.482, Time: 0.03\n",
"It: 40350, Loss: 9.274e-04, C: 4.482, Time: 0.03\n",
"It: 40360, Loss: 9.759e-04, C: 4.482, Time: 0.03\n",
"It: 40370, Loss: 7.030e-04, C: 4.482, Time: 0.03\n",
"It: 40380, Loss: 6.276e-04, C: 4.482, Time: 0.04\n",
"It: 40390, Loss: 3.081e-04, C: 4.482, Time: 0.03\n",
"It: 40400, Loss: 4.039e-04, C: 4.482, Time: 0.03\n",
"It: 40410, Loss: 2.786e-04, C: 4.482, Time: 0.03\n",
"It: 40420, Loss: 9.130e-04, C: 4.482, Time: 0.03\n",
"It: 40430, Loss: 3.667e-04, C: 4.482, Time: 0.04\n",
"It: 40440, Loss: 7.279e-04, C: 4.482, Time: 0.03\n",
"It: 40450, Loss: 5.861e-04, C: 4.482, Time: 0.04\n",
"It: 40460, Loss: 5.155e-04, C: 4.482, Time: 0.03\n",
"It: 40470, Loss: 5.406e-04, C: 4.482, Time: 0.03\n",
"It: 40480, Loss: 4.278e-04, C: 4.482, Time: 0.03\n",
"It: 40490, Loss: 1.014e-03, C: 4.482, Time: 0.03\n",
"It: 40500, Loss: 7.634e-04, C: 4.482, Time: 0.03\n",
"It: 40510, Loss: 9.694e-04, C: 4.482, Time: 0.03\n",
"It: 40520, Loss: 5.663e-04, C: 4.482, Time: 0.03\n",
"It: 40530, Loss: 1.160e-03, C: 4.482, Time: 0.03\n",
"It: 40540, Loss: 1.991e-03, C: 4.482, Time: 0.03\n",
"It: 40550, Loss: 1.550e-03, C: 4.482, Time: 0.03\n",
"It: 40560, Loss: 1.258e-03, C: 4.482, Time: 0.04\n",
"It: 40570, Loss: 4.853e-04, C: 4.482, Time: 0.03\n",
"It: 40580, Loss: 5.725e-04, C: 4.482, Time: 0.03\n",
"It: 40590, Loss: 5.027e-04, C: 4.482, Time: 0.03\n",
"It: 40600, Loss: 4.718e-04, C: 4.482, Time: 0.03\n",
"It: 40610, Loss: 4.740e-04, C: 4.482, Time: 0.03\n",
"It: 40620, Loss: 5.858e-04, C: 4.482, Time: 0.03\n",
"It: 40630, Loss: 5.186e-04, C: 4.482, Time: 0.03\n",
"It: 40640, Loss: 5.969e-04, C: 4.482, Time: 0.04\n",
"It: 40650, Loss: 1.277e-03, C: 4.482, Time: 0.03\n",
"It: 40660, Loss: 7.784e-04, C: 4.482, Time: 0.03\n",
"It: 40670, Loss: 8.139e-04, C: 4.482, Time: 0.04\n",
"It: 40680, Loss: 6.889e-04, C: 4.482, Time: 0.04\n",
"It: 40690, Loss: 4.814e-04, C: 4.482, Time: 0.03\n",
"It: 40700, Loss: 5.969e-04, C: 4.482, Time: 0.03\n",
"It: 40710, Loss: 5.945e-04, C: 4.482, Time: 0.03\n",
"It: 40720, Loss: 2.841e-04, C: 4.482, Time: 0.03\n",
"It: 40730, Loss: 1.187e-03, C: 4.482, Time: 0.03\n",
"It: 40740, Loss: 4.900e-04, C: 4.482, Time: 0.03\n",
"It: 40750, Loss: 1.184e-03, C: 4.482, Time: 0.03\n",
"It: 40760, Loss: 7.701e-04, C: 4.482, Time: 0.03\n",
"It: 40770, Loss: 9.977e-04, C: 4.482, Time: 0.03\n",
"It: 40780, Loss: 4.061e-04, C: 4.482, Time: 0.03\n",
"It: 40790, Loss: 4.554e-04, C: 4.482, Time: 0.03\n",
"It: 40800, Loss: 4.560e-04, C: 4.482, Time: 0.03\n",
"It: 40810, Loss: 5.230e-04, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 40820, Loss: 7.220e-04, C: 4.482, Time: 0.04\n",
"It: 40830, Loss: 6.162e-04, C: 4.482, Time: 0.04\n",
"It: 40840, Loss: 6.780e-04, C: 4.482, Time: 0.03\n",
"It: 40850, Loss: 4.419e-04, C: 4.482, Time: 0.03\n",
"It: 40860, Loss: 7.532e-04, C: 4.482, Time: 0.03\n",
"It: 40870, Loss: 1.250e-03, C: 4.482, Time: 0.03\n",
"It: 40880, Loss: 2.554e-03, C: 4.482, Time: 0.03\n",
"It: 40890, Loss: 1.025e-03, C: 4.482, Time: 0.03\n",
"It: 40900, Loss: 5.618e-04, C: 4.482, Time: 0.03\n",
"It: 40910, Loss: 3.148e-04, C: 4.482, Time: 0.03\n",
"It: 40920, Loss: 5.975e-04, C: 4.482, Time: 0.03\n",
"It: 40930, Loss: 3.508e-04, C: 4.482, Time: 0.04\n",
"It: 40940, Loss: 3.894e-04, C: 4.482, Time: 0.04\n",
"It: 40950, Loss: 3.308e-04, C: 4.482, Time: 0.04\n",
"It: 40960, Loss: 6.235e-04, C: 4.482, Time: 0.04\n",
"It: 40970, Loss: 5.611e-04, C: 4.482, Time: 0.03\n",
"It: 40980, Loss: 4.969e-04, C: 4.482, Time: 0.04\n",
"It: 40990, Loss: 7.471e-04, C: 4.482, Time: 0.03\n",
"It: 41000, Loss: 3.115e-04, C: 4.482, Time: 0.04\n",
"It: 41010, Loss: 5.578e-04, C: 4.482, Time: 0.04\n",
"It: 41020, Loss: 1.518e-03, C: 4.482, Time: 0.03\n",
"It: 41030, Loss: 6.410e-04, C: 4.482, Time: 0.04\n",
"It: 41040, Loss: 3.333e-04, C: 4.482, Time: 0.03\n",
"It: 41050, Loss: 9.382e-04, C: 4.482, Time: 0.03\n",
"It: 41060, Loss: 3.779e-04, C: 4.482, Time: 0.03\n",
"It: 41070, Loss: 1.878e-03, C: 4.482, Time: 0.03\n",
"It: 41080, Loss: 5.959e-04, C: 4.482, Time: 0.03\n",
"It: 41090, Loss: 9.062e-04, C: 4.482, Time: 0.03\n",
"It: 41100, Loss: 4.393e-04, C: 4.482, Time: 0.03\n",
"It: 41110, Loss: 6.413e-04, C: 4.482, Time: 0.04\n",
"It: 41120, Loss: 4.361e-04, C: 4.482, Time: 0.04\n",
"It: 41130, Loss: 1.024e-03, C: 4.482, Time: 0.03\n",
"It: 41140, Loss: 5.842e-04, C: 4.482, Time: 0.03\n",
"It: 41150, Loss: 4.381e-04, C: 4.482, Time: 0.03\n",
"It: 41160, Loss: 4.194e-04, C: 4.482, Time: 0.03\n",
"It: 41170, Loss: 3.022e-04, C: 4.482, Time: 0.03\n",
"It: 41180, Loss: 4.223e-04, C: 4.482, Time: 0.03\n",
"It: 41190, Loss: 6.791e-04, C: 4.482, Time: 0.03\n",
"It: 41200, Loss: 1.047e-03, C: 4.482, Time: 0.03\n",
"It: 41210, Loss: 5.875e-04, C: 4.482, Time: 0.03\n",
"It: 41220, Loss: 5.313e-04, C: 4.482, Time: 0.04\n",
"It: 41230, Loss: 4.298e-04, C: 4.482, Time: 0.04\n",
"It: 41240, Loss: 2.989e-04, C: 4.482, Time: 0.03\n",
"It: 41250, Loss: 4.848e-04, C: 4.482, Time: 0.03\n",
"It: 41260, Loss: 7.210e-04, C: 4.482, Time: 0.03\n",
"It: 41270, Loss: 2.062e-03, C: 4.482, Time: 0.04\n",
"It: 41280, Loss: 8.116e-04, C: 4.482, Time: 0.03\n",
"It: 41290, Loss: 4.397e-04, C: 4.482, Time: 0.04\n",
"It: 41300, Loss: 4.178e-04, C: 4.482, Time: 0.03\n",
"It: 41310, Loss: 1.086e-03, C: 4.482, Time: 0.03\n",
"It: 41320, Loss: 3.500e-04, C: 4.482, Time: 0.04\n",
"It: 41330, Loss: 7.908e-04, C: 4.482, Time: 0.03\n",
"It: 41340, Loss: 1.518e-03, C: 4.482, Time: 0.03\n",
"It: 41350, Loss: 8.222e-04, C: 4.482, Time: 0.03\n",
"It: 41360, Loss: 7.903e-04, C: 4.482, Time: 0.03\n",
"It: 41370, Loss: 1.085e-03, C: 4.482, Time: 0.04\n",
"It: 41380, Loss: 9.380e-04, C: 4.482, Time: 0.03\n",
"It: 41390, Loss: 1.771e-03, C: 4.482, Time: 0.03\n",
"It: 41400, Loss: 5.663e-04, C: 4.482, Time: 0.03\n",
"It: 41410, Loss: 4.189e-04, C: 4.482, Time: 0.03\n",
"It: 41420, Loss: 1.546e-03, C: 4.482, Time: 0.03\n",
"It: 41430, Loss: 5.107e-04, C: 4.482, Time: 0.03\n",
"It: 41440, Loss: 7.322e-04, C: 4.482, Time: 0.03\n",
"It: 41450, Loss: 1.000e-03, C: 4.482, Time: 0.03\n",
"It: 41460, Loss: 4.263e-04, C: 4.482, Time: 0.03\n",
"It: 41470, Loss: 6.322e-04, C: 4.482, Time: 0.04\n",
"It: 41480, Loss: 4.074e-04, C: 4.482, Time: 0.03\n",
"It: 41490, Loss: 8.668e-04, C: 4.482, Time: 0.03\n",
"It: 41500, Loss: 9.603e-04, C: 4.482, Time: 0.03\n",
"It: 41510, Loss: 4.093e-04, C: 4.482, Time: 0.03\n",
"It: 41520, Loss: 5.559e-04, C: 4.482, Time: 0.04\n",
"It: 41530, Loss: 6.364e-04, C: 4.482, Time: 0.03\n",
"It: 41540, Loss: 3.223e-04, C: 4.482, Time: 0.03\n",
"It: 41550, Loss: 1.037e-03, C: 4.482, Time: 0.03\n",
"It: 41560, Loss: 2.998e-04, C: 4.482, Time: 0.03\n",
"It: 41570, Loss: 2.567e-04, C: 4.482, Time: 0.03\n",
"It: 41580, Loss: 8.426e-04, C: 4.482, Time: 0.03\n",
"It: 41590, Loss: 8.006e-04, C: 4.482, Time: 0.03\n",
"It: 41600, Loss: 3.638e-04, C: 4.482, Time: 0.04\n",
"It: 41610, Loss: 4.882e-04, C: 4.482, Time: 0.03\n",
"It: 41620, Loss: 4.883e-04, C: 4.482, Time: 0.03\n",
"It: 41630, Loss: 2.349e-04, C: 4.482, Time: 0.03\n",
"It: 41640, Loss: 3.463e-04, C: 4.482, Time: 0.03\n",
"It: 41650, Loss: 1.352e-03, C: 4.482, Time: 0.04\n",
"It: 41660, Loss: 5.882e-04, C: 4.482, Time: 0.03\n",
"It: 41670, Loss: 5.685e-04, C: 4.482, Time: 0.03\n",
"It: 41680, Loss: 9.062e-04, C: 4.482, Time: 0.04\n",
"It: 41690, Loss: 9.931e-04, C: 4.482, Time: 0.03\n",
"It: 41700, Loss: 5.562e-04, C: 4.482, Time: 0.03\n",
"It: 41710, Loss: 6.813e-04, C: 4.482, Time: 0.03\n",
"It: 41720, Loss: 1.206e-03, C: 4.482, Time: 0.03\n",
"It: 41730, Loss: 8.279e-04, C: 4.482, Time: 0.03\n",
"It: 41740, Loss: 1.330e-03, C: 4.482, Time: 0.03\n",
"It: 41750, Loss: 5.421e-04, C: 4.482, Time: 0.04\n",
"It: 41760, Loss: 1.782e-03, C: 4.482, Time: 0.03\n",
"It: 41770, Loss: 8.113e-04, C: 4.482, Time: 0.03\n",
"It: 41780, Loss: 2.726e-04, C: 4.482, Time: 0.03\n",
"It: 41790, Loss: 5.548e-04, C: 4.482, Time: 0.03\n",
"It: 41800, Loss: 4.993e-04, C: 4.482, Time: 0.03\n",
"It: 41810, Loss: 4.034e-04, C: 4.482, Time: 0.03\n",
"It: 41820, Loss: 5.704e-04, C: 4.482, Time: 0.03\n",
"It: 41830, Loss: 5.257e-04, C: 4.482, Time: 0.03\n",
"It: 41840, Loss: 1.251e-03, C: 4.482, Time: 0.03\n",
"It: 41850, Loss: 1.142e-03, C: 4.482, Time: 0.03\n",
"It: 41860, Loss: 6.636e-04, C: 4.482, Time: 0.03\n",
"It: 41870, Loss: 4.886e-04, C: 4.482, Time: 0.03\n",
"It: 41880, Loss: 3.442e-04, C: 4.482, Time: 0.03\n",
"It: 41890, Loss: 3.356e-04, C: 4.482, Time: 0.03\n",
"It: 41900, Loss: 5.683e-04, C: 4.482, Time: 0.03\n",
"It: 41910, Loss: 2.882e-04, C: 4.482, Time: 0.04\n",
"It: 41920, Loss: 2.219e-04, C: 4.482, Time: 0.04\n",
"It: 41930, Loss: 4.191e-04, C: 4.482, Time: 0.03\n",
"It: 41940, Loss: 3.589e-04, C: 4.482, Time: 0.03\n",
"It: 41950, Loss: 4.314e-04, C: 4.482, Time: 0.04\n",
"It: 41960, Loss: 4.280e-04, C: 4.482, Time: 0.03\n",
"It: 41970, Loss: 3.323e-04, C: 4.482, Time: 0.03\n",
"It: 41980, Loss: 3.018e-04, C: 4.482, Time: 0.03\n",
"It: 41990, Loss: 1.336e-03, C: 4.482, Time: 0.03\n",
"It: 42000, Loss: 3.109e-04, C: 4.482, Time: 0.03\n",
"It: 42010, Loss: 2.355e-04, C: 4.482, Time: 0.03\n",
"It: 42020, Loss: 3.218e-04, C: 4.482, Time: 0.03\n",
"It: 42030, Loss: 6.389e-04, C: 4.482, Time: 0.04\n",
"It: 42040, Loss: 8.361e-04, C: 4.482, Time: 0.03\n",
"It: 42050, Loss: 4.526e-04, C: 4.482, Time: 0.03\n",
"It: 42060, Loss: 7.854e-04, C: 4.482, Time: 0.03\n",
"It: 42070, Loss: 1.807e-03, C: 4.482, Time: 0.03\n",
"It: 42080, Loss: 3.890e-04, C: 4.482, Time: 0.03\n",
"It: 42090, Loss: 4.822e-04, C: 4.482, Time: 0.03\n",
"It: 42100, Loss: 4.966e-04, C: 4.482, Time: 0.03\n",
"It: 42110, Loss: 3.941e-04, C: 4.482, Time: 0.03\n",
"It: 42120, Loss: 4.062e-04, C: 4.482, Time: 0.03\n",
"It: 42130, Loss: 3.886e-04, C: 4.482, Time: 0.03\n",
"It: 42140, Loss: 6.437e-04, C: 4.482, Time: 0.04\n",
"It: 42150, Loss: 4.687e-04, C: 4.482, Time: 0.03\n",
"It: 42160, Loss: 3.555e-04, C: 4.482, Time: 0.03\n",
"It: 42170, Loss: 3.147e-04, C: 4.482, Time: 0.03\n",
"It: 42180, Loss: 1.124e-03, C: 4.482, Time: 0.04\n",
"It: 42190, Loss: 7.858e-04, C: 4.482, Time: 0.03\n",
"It: 42200, Loss: 8.814e-04, C: 4.482, Time: 0.03\n",
"It: 42210, Loss: 4.949e-04, C: 4.482, Time: 0.03\n",
"It: 42220, Loss: 3.598e-04, C: 4.482, Time: 0.04\n",
"It: 42230, Loss: 6.589e-04, C: 4.482, Time: 0.03\n",
"It: 42240, Loss: 3.520e-04, C: 4.482, Time: 0.03\n",
"It: 42250, Loss: 3.368e-04, C: 4.482, Time: 0.03\n",
"It: 42260, Loss: 8.422e-04, C: 4.482, Time: 0.03\n",
"It: 42270, Loss: 8.473e-04, C: 4.482, Time: 0.03\n",
"It: 42280, Loss: 4.005e-04, C: 4.482, Time: 0.03\n",
"It: 42290, Loss: 4.174e-04, C: 4.482, Time: 0.03\n",
"It: 42300, Loss: 4.471e-04, C: 4.482, Time: 0.04\n",
"It: 42310, Loss: 1.129e-03, C: 4.482, Time: 0.04\n",
"It: 42320, Loss: 5.309e-04, C: 4.482, Time: 0.04\n",
"It: 42330, Loss: 6.685e-04, C: 4.482, Time: 0.03\n",
"It: 42340, Loss: 4.440e-04, C: 4.482, Time: 0.03\n",
"It: 42350, Loss: 1.262e-03, C: 4.482, Time: 0.03\n",
"It: 42360, Loss: 4.310e-04, C: 4.482, Time: 0.03\n",
"It: 42370, Loss: 1.107e-03, C: 4.482, Time: 0.03\n",
"It: 42380, Loss: 1.417e-03, C: 4.482, Time: 0.03\n",
"It: 42390, Loss: 5.407e-04, C: 4.482, Time: 0.03\n",
"It: 42400, Loss: 1.344e-03, C: 4.482, Time: 0.03\n",
"It: 42410, Loss: 3.298e-04, C: 4.482, Time: 0.03\n",
"It: 42420, Loss: 4.283e-04, C: 4.482, Time: 0.03\n",
"It: 42430, Loss: 6.204e-04, C: 4.482, Time: 0.04\n",
"It: 42440, Loss: 3.759e-04, C: 4.482, Time: 0.04\n",
"It: 42450, Loss: 2.574e-04, C: 4.482, Time: 0.04\n",
"It: 42460, Loss: 1.156e-03, C: 4.482, Time: 0.04\n",
"It: 42470, Loss: 6.590e-04, C: 4.482, Time: 0.03\n",
"It: 42480, Loss: 9.625e-04, C: 4.482, Time: 0.03\n",
"It: 42490, Loss: 3.587e-04, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 42500, Loss: 8.603e-04, C: 4.482, Time: 0.03\n",
"It: 42510, Loss: 5.193e-04, C: 4.482, Time: 0.03\n",
"It: 42520, Loss: 1.169e-03, C: 4.482, Time: 0.03\n",
"It: 42530, Loss: 3.853e-04, C: 4.482, Time: 0.04\n",
"It: 42540, Loss: 9.139e-04, C: 4.482, Time: 0.03\n",
"It: 42550, Loss: 4.500e-04, C: 4.482, Time: 0.03\n",
"It: 42560, Loss: 4.567e-04, C: 4.482, Time: 0.03\n",
"It: 42570, Loss: 4.891e-04, C: 4.482, Time: 0.03\n",
"It: 42580, Loss: 3.110e-04, C: 4.482, Time: 0.03\n",
"It: 42590, Loss: 4.143e-04, C: 4.482, Time: 0.04\n",
"It: 42600, Loss: 3.820e-04, C: 4.482, Time: 0.03\n",
"It: 42610, Loss: 2.709e-04, C: 4.482, Time: 0.04\n",
"It: 42620, Loss: 6.286e-04, C: 4.482, Time: 0.03\n",
"It: 42630, Loss: 6.462e-04, C: 4.482, Time: 0.04\n",
"It: 42640, Loss: 8.398e-04, C: 4.482, Time: 0.03\n",
"It: 42650, Loss: 9.722e-04, C: 4.482, Time: 0.03\n",
"It: 42660, Loss: 5.196e-04, C: 4.482, Time: 0.03\n",
"It: 42670, Loss: 9.308e-04, C: 4.482, Time: 0.03\n",
"It: 42680, Loss: 6.690e-04, C: 4.482, Time: 0.03\n",
"It: 42690, Loss: 1.046e-03, C: 4.482, Time: 0.03\n",
"It: 42700, Loss: 5.504e-04, C: 4.482, Time: 0.03\n",
"It: 42710, Loss: 1.071e-03, C: 4.482, Time: 0.03\n",
"It: 42720, Loss: 4.930e-04, C: 4.482, Time: 0.03\n",
"It: 42730, Loss: 6.614e-04, C: 4.482, Time: 0.03\n",
"It: 42740, Loss: 6.141e-04, C: 4.482, Time: 0.04\n",
"It: 42750, Loss: 2.486e-04, C: 4.482, Time: 0.04\n",
"It: 42760, Loss: 5.670e-04, C: 4.482, Time: 0.03\n",
"It: 42770, Loss: 5.196e-04, C: 4.482, Time: 0.03\n",
"It: 42780, Loss: 6.319e-04, C: 4.482, Time: 0.03\n",
"It: 42790, Loss: 3.083e-04, C: 4.482, Time: 0.03\n",
"It: 42800, Loss: 3.039e-04, C: 4.482, Time: 0.03\n",
"It: 42810, Loss: 3.114e-04, C: 4.482, Time: 0.03\n",
"It: 42820, Loss: 5.327e-04, C: 4.482, Time: 0.03\n",
"It: 42830, Loss: 4.019e-04, C: 4.482, Time: 0.03\n",
"It: 42840, Loss: 2.695e-04, C: 4.482, Time: 0.03\n",
"It: 42850, Loss: 5.336e-04, C: 4.482, Time: 0.03\n",
"It: 42860, Loss: 4.259e-04, C: 4.482, Time: 0.03\n",
"It: 42870, Loss: 9.577e-04, C: 4.482, Time: 0.04\n",
"It: 42880, Loss: 7.124e-04, C: 4.482, Time: 0.03\n",
"It: 42890, Loss: 8.209e-04, C: 4.482, Time: 0.03\n",
"It: 42900, Loss: 2.685e-04, C: 4.482, Time: 0.03\n",
"It: 42910, Loss: 4.981e-04, C: 4.482, Time: 0.03\n",
"It: 42920, Loss: 5.884e-04, C: 4.482, Time: 0.03\n",
"It: 42930, Loss: 7.065e-04, C: 4.482, Time: 0.03\n",
"It: 42940, Loss: 4.632e-04, C: 4.482, Time: 0.03\n",
"It: 42950, Loss: 4.713e-04, C: 4.482, Time: 0.03\n",
"It: 42960, Loss: 7.481e-04, C: 4.482, Time: 0.04\n",
"It: 42970, Loss: 3.412e-04, C: 4.482, Time: 0.03\n",
"It: 42980, Loss: 5.950e-04, C: 4.482, Time: 0.03\n",
"It: 42990, Loss: 7.304e-04, C: 4.482, Time: 0.03\n",
"It: 43000, Loss: 1.408e-03, C: 4.482, Time: 0.03\n",
"It: 43010, Loss: 8.462e-04, C: 4.482, Time: 0.03\n",
"It: 43020, Loss: 1.961e-03, C: 4.482, Time: 0.03\n",
"It: 43030, Loss: 4.536e-04, C: 4.482, Time: 0.04\n",
"It: 43040, Loss: 8.305e-04, C: 4.482, Time: 0.03\n",
"It: 43050, Loss: 4.253e-04, C: 4.482, Time: 0.03\n",
"It: 43060, Loss: 4.561e-04, C: 4.482, Time: 0.03\n",
"It: 43070, Loss: 5.107e-04, C: 4.482, Time: 0.03\n",
"It: 43080, Loss: 7.015e-04, C: 4.482, Time: 0.03\n",
"It: 43090, Loss: 4.156e-04, C: 4.482, Time: 0.03\n",
"It: 43100, Loss: 5.350e-04, C: 4.482, Time: 0.03\n",
"It: 43110, Loss: 2.846e-04, C: 4.482, Time: 0.03\n",
"It: 43120, Loss: 3.760e-04, C: 4.482, Time: 0.03\n",
"It: 43130, Loss: 4.286e-04, C: 4.482, Time: 0.03\n",
"It: 43140, Loss: 5.091e-04, C: 4.482, Time: 0.03\n",
"It: 43150, Loss: 3.288e-04, C: 4.482, Time: 0.04\n",
"It: 43160, Loss: 1.217e-03, C: 4.482, Time: 0.03\n",
"It: 43170, Loss: 6.282e-04, C: 4.482, Time: 0.03\n",
"It: 43180, Loss: 4.928e-04, C: 4.482, Time: 0.04\n",
"It: 43190, Loss: 1.328e-03, C: 4.482, Time: 0.03\n",
"It: 43200, Loss: 5.304e-04, C: 4.482, Time: 0.03\n",
"It: 43210, Loss: 5.355e-04, C: 4.482, Time: 0.03\n",
"It: 43220, Loss: 7.806e-04, C: 4.482, Time: 0.03\n",
"It: 43230, Loss: 2.369e-03, C: 4.482, Time: 0.03\n",
"It: 43240, Loss: 6.283e-04, C: 4.482, Time: 0.03\n",
"It: 43250, Loss: 3.650e-04, C: 4.482, Time: 0.03\n",
"It: 43260, Loss: 1.185e-03, C: 4.482, Time: 0.03\n",
"It: 43270, Loss: 9.252e-04, C: 4.482, Time: 0.03\n",
"It: 43280, Loss: 7.575e-04, C: 4.482, Time: 0.04\n",
"It: 43290, Loss: 8.619e-04, C: 4.482, Time: 0.03\n",
"It: 43300, Loss: 9.499e-04, C: 4.482, Time: 0.04\n",
"It: 43310, Loss: 2.718e-04, C: 4.482, Time: 0.04\n",
"It: 43320, Loss: 2.347e-04, C: 4.482, Time: 0.03\n",
"It: 43330, Loss: 1.048e-03, C: 4.482, Time: 0.03\n",
"It: 43340, Loss: 3.430e-04, C: 4.482, Time: 0.04\n",
"It: 43350, Loss: 2.894e-04, C: 4.482, Time: 0.04\n",
"It: 43360, Loss: 2.679e-04, C: 4.482, Time: 0.03\n",
"It: 43370, Loss: 3.882e-04, C: 4.482, Time: 0.04\n",
"It: 43380, Loss: 1.189e-03, C: 4.482, Time: 0.03\n",
"It: 43390, Loss: 1.045e-03, C: 4.482, Time: 0.03\n",
"It: 43400, Loss: 1.275e-03, C: 4.482, Time: 0.03\n",
"It: 43410, Loss: 5.316e-04, C: 4.482, Time: 0.04\n",
"It: 43420, Loss: 4.667e-04, C: 4.482, Time: 0.03\n",
"It: 43430, Loss: 9.744e-04, C: 4.482, Time: 0.03\n",
"It: 43440, Loss: 3.739e-04, C: 4.482, Time: 0.03\n",
"It: 43450, Loss: 6.963e-04, C: 4.482, Time: 0.03\n",
"It: 43460, Loss: 1.375e-03, C: 4.482, Time: 0.04\n",
"It: 43470, Loss: 7.780e-04, C: 4.482, Time: 0.03\n",
"It: 43480, Loss: 2.869e-04, C: 4.482, Time: 0.03\n",
"It: 43490, Loss: 4.058e-04, C: 4.482, Time: 0.03\n",
"It: 43500, Loss: 9.431e-04, C: 4.482, Time: 0.03\n",
"It: 43510, Loss: 5.725e-04, C: 4.482, Time: 0.03\n",
"It: 43520, Loss: 5.558e-04, C: 4.482, Time: 0.03\n",
"It: 43530, Loss: 3.565e-04, C: 4.482, Time: 0.03\n",
"It: 43540, Loss: 1.728e-03, C: 4.482, Time: 0.04\n",
"It: 43550, Loss: 1.110e-03, C: 4.482, Time: 0.03\n",
"It: 43560, Loss: 3.217e-04, C: 4.482, Time: 0.04\n",
"It: 43570, Loss: 3.227e-04, C: 4.482, Time: 0.03\n",
"It: 43580, Loss: 3.209e-04, C: 4.482, Time: 0.03\n",
"It: 43590, Loss: 9.948e-04, C: 4.482, Time: 0.03\n",
"It: 43600, Loss: 1.363e-03, C: 4.482, Time: 0.04\n",
"It: 43610, Loss: 8.309e-04, C: 4.482, Time: 0.03\n",
"It: 43620, Loss: 2.269e-04, C: 4.482, Time: 0.03\n",
"It: 43630, Loss: 8.236e-04, C: 4.482, Time: 0.03\n",
"It: 43640, Loss: 3.148e-04, C: 4.482, Time: 0.04\n",
"It: 43650, Loss: 2.447e-04, C: 4.482, Time: 0.03\n",
"It: 43660, Loss: 2.590e-04, C: 4.482, Time: 0.03\n",
"It: 43670, Loss: 5.740e-04, C: 4.482, Time: 0.03\n",
"It: 43680, Loss: 6.102e-04, C: 4.482, Time: 0.03\n",
"It: 43690, Loss: 3.325e-04, C: 4.482, Time: 0.03\n",
"It: 43700, Loss: 1.142e-03, C: 4.482, Time: 0.04\n",
"It: 43710, Loss: 3.795e-04, C: 4.482, Time: 0.03\n",
"It: 43720, Loss: 6.848e-04, C: 4.482, Time: 0.03\n",
"It: 43730, Loss: 6.518e-04, C: 4.482, Time: 0.03\n",
"It: 43740, Loss: 1.025e-03, C: 4.482, Time: 0.04\n",
"It: 43750, Loss: 7.115e-04, C: 4.482, Time: 0.03\n",
"It: 43760, Loss: 3.798e-04, C: 4.482, Time: 0.04\n",
"It: 43770, Loss: 1.007e-03, C: 4.482, Time: 0.04\n",
"It: 43780, Loss: 4.059e-04, C: 4.482, Time: 0.03\n",
"It: 43790, Loss: 4.433e-04, C: 4.482, Time: 0.03\n",
"It: 43800, Loss: 5.658e-04, C: 4.482, Time: 0.04\n",
"It: 43810, Loss: 3.195e-04, C: 4.482, Time: 0.03\n",
"It: 43820, Loss: 2.958e-04, C: 4.482, Time: 0.03\n",
"It: 43830, Loss: 7.975e-04, C: 4.482, Time: 0.04\n",
"It: 43840, Loss: 6.891e-04, C: 4.482, Time: 0.04\n",
"It: 43850, Loss: 4.255e-04, C: 4.482, Time: 0.03\n",
"It: 43860, Loss: 5.883e-04, C: 4.482, Time: 0.03\n",
"It: 43870, Loss: 3.869e-04, C: 4.482, Time: 0.03\n",
"It: 43880, Loss: 3.108e-04, C: 4.482, Time: 0.03\n",
"It: 43890, Loss: 4.005e-04, C: 4.482, Time: 0.03\n",
"It: 43900, Loss: 4.104e-04, C: 4.482, Time: 0.04\n",
"It: 43910, Loss: 6.746e-04, C: 4.482, Time: 0.03\n",
"It: 43920, Loss: 1.044e-03, C: 4.482, Time: 0.03\n",
"It: 43930, Loss: 6.004e-04, C: 4.482, Time: 0.03\n",
"It: 43940, Loss: 4.699e-04, C: 4.482, Time: 0.03\n",
"It: 43950, Loss: 1.048e-03, C: 4.482, Time: 0.03\n",
"It: 43960, Loss: 3.171e-04, C: 4.482, Time: 0.03\n",
"It: 43970, Loss: 5.157e-04, C: 4.482, Time: 0.03\n",
"It: 43980, Loss: 6.869e-04, C: 4.482, Time: 0.03\n",
"It: 43990, Loss: 4.481e-04, C: 4.482, Time: 0.03\n",
"It: 44000, Loss: 3.203e-04, C: 4.482, Time: 0.03\n",
"It: 44010, Loss: 7.604e-04, C: 4.482, Time: 0.03\n",
"It: 44020, Loss: 5.723e-04, C: 4.482, Time: 0.04\n",
"It: 44030, Loss: 1.160e-03, C: 4.482, Time: 0.03\n",
"It: 44040, Loss: 4.051e-04, C: 4.482, Time: 0.03\n",
"It: 44050, Loss: 3.334e-04, C: 4.482, Time: 0.03\n",
"It: 44060, Loss: 4.956e-04, C: 4.482, Time: 0.04\n",
"It: 44070, Loss: 1.128e-03, C: 4.482, Time: 0.04\n",
"It: 44080, Loss: 4.485e-04, C: 4.482, Time: 0.03\n",
"It: 44090, Loss: 7.836e-04, C: 4.482, Time: 0.03\n",
"It: 44100, Loss: 1.369e-03, C: 4.482, Time: 0.04\n",
"It: 44110, Loss: 1.213e-03, C: 4.482, Time: 0.04\n",
"It: 44120, Loss: 5.993e-04, C: 4.482, Time: 0.03\n",
"It: 44130, Loss: 5.376e-04, C: 4.482, Time: 0.03\n",
"It: 44140, Loss: 9.925e-04, C: 4.482, Time: 0.03\n",
"It: 44150, Loss: 5.879e-04, C: 4.482, Time: 0.03\n",
"It: 44160, Loss: 1.021e-03, C: 4.482, Time: 0.04\n",
"It: 44170, Loss: 1.001e-03, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 44180, Loss: 4.804e-04, C: 4.482, Time: 0.03\n",
"It: 44190, Loss: 8.604e-04, C: 4.482, Time: 0.03\n",
"It: 44200, Loss: 2.536e-04, C: 4.482, Time: 0.03\n",
"It: 44210, Loss: 2.348e-04, C: 4.482, Time: 0.03\n",
"It: 44220, Loss: 5.304e-04, C: 4.482, Time: 0.03\n",
"It: 44230, Loss: 7.148e-04, C: 4.482, Time: 0.03\n",
"It: 44240, Loss: 4.262e-04, C: 4.482, Time: 0.04\n",
"It: 44250, Loss: 4.080e-04, C: 4.482, Time: 0.03\n",
"It: 44260, Loss: 3.724e-04, C: 4.482, Time: 0.03\n",
"It: 44270, Loss: 7.757e-04, C: 4.482, Time: 0.03\n",
"It: 44280, Loss: 5.270e-04, C: 4.482, Time: 0.04\n",
"It: 44290, Loss: 7.154e-04, C: 4.482, Time: 0.03\n",
"It: 44300, Loss: 5.223e-04, C: 4.482, Time: 0.04\n",
"It: 44310, Loss: 7.805e-04, C: 4.482, Time: 0.03\n",
"It: 44320, Loss: 1.330e-03, C: 4.482, Time: 0.03\n",
"It: 44330, Loss: 2.572e-04, C: 4.482, Time: 0.03\n",
"It: 44340, Loss: 6.645e-04, C: 4.482, Time: 0.03\n",
"It: 44350, Loss: 1.232e-03, C: 4.482, Time: 0.03\n",
"It: 44360, Loss: 3.235e-04, C: 4.482, Time: 0.04\n",
"It: 44370, Loss: 5.668e-04, C: 4.482, Time: 0.04\n",
"It: 44380, Loss: 1.157e-03, C: 4.482, Time: 0.04\n",
"It: 44390, Loss: 5.499e-04, C: 4.482, Time: 0.03\n",
"It: 44400, Loss: 7.615e-04, C: 4.482, Time: 0.03\n",
"It: 44410, Loss: 4.587e-04, C: 4.482, Time: 0.03\n",
"It: 44420, Loss: 4.424e-04, C: 4.482, Time: 0.03\n",
"It: 44430, Loss: 3.195e-04, C: 4.482, Time: 0.04\n",
"It: 44440, Loss: 3.479e-04, C: 4.482, Time: 0.03\n",
"It: 44450, Loss: 5.153e-04, C: 4.482, Time: 0.03\n",
"It: 44460, Loss: 1.520e-03, C: 4.482, Time: 0.03\n",
"It: 44470, Loss: 8.154e-04, C: 4.482, Time: 0.04\n",
"It: 44480, Loss: 3.792e-04, C: 4.482, Time: 0.03\n",
"It: 44490, Loss: 5.679e-04, C: 4.482, Time: 0.03\n",
"It: 44500, Loss: 6.992e-04, C: 4.482, Time: 0.03\n",
"It: 44510, Loss: 2.396e-04, C: 4.482, Time: 0.03\n",
"It: 44520, Loss: 2.719e-04, C: 4.482, Time: 0.04\n",
"It: 44530, Loss: 4.041e-04, C: 4.482, Time: 0.03\n",
"It: 44540, Loss: 3.811e-04, C: 4.482, Time: 0.04\n",
"It: 44550, Loss: 2.778e-04, C: 4.482, Time: 0.03\n",
"It: 44560, Loss: 4.208e-04, C: 4.482, Time: 0.03\n",
"It: 44570, Loss: 3.887e-04, C: 4.482, Time: 0.03\n",
"It: 44580, Loss: 3.674e-04, C: 4.482, Time: 0.03\n",
"It: 44590, Loss: 2.362e-03, C: 4.482, Time: 0.04\n",
"It: 44600, Loss: 7.105e-04, C: 4.482, Time: 0.03\n",
"It: 44610, Loss: 6.232e-04, C: 4.482, Time: 0.03\n",
"It: 44620, Loss: 9.729e-04, C: 4.482, Time: 0.03\n",
"It: 44630, Loss: 8.122e-04, C: 4.482, Time: 0.04\n",
"It: 44640, Loss: 1.242e-03, C: 4.482, Time: 0.03\n",
"It: 44650, Loss: 4.271e-04, C: 4.482, Time: 0.03\n",
"It: 44660, Loss: 3.786e-04, C: 4.482, Time: 0.03\n",
"It: 44670, Loss: 2.261e-04, C: 4.482, Time: 0.03\n",
"It: 44680, Loss: 1.292e-03, C: 4.482, Time: 0.03\n",
"It: 44690, Loss: 6.949e-04, C: 4.482, Time: 0.03\n",
"It: 44700, Loss: 1.102e-03, C: 4.482, Time: 0.03\n",
"It: 44710, Loss: 3.486e-04, C: 4.482, Time: 0.03\n",
"It: 44720, Loss: 4.111e-04, C: 4.482, Time: 0.04\n",
"It: 44730, Loss: 1.505e-03, C: 4.482, Time: 0.03\n",
"It: 44740, Loss: 8.214e-04, C: 4.482, Time: 0.03\n",
"It: 44750, Loss: 9.120e-04, C: 4.482, Time: 0.04\n",
"It: 44760, Loss: 3.370e-04, C: 4.482, Time: 0.03\n",
"It: 44770, Loss: 6.033e-04, C: 4.482, Time: 0.04\n",
"It: 44780, Loss: 5.207e-04, C: 4.482, Time: 0.03\n",
"It: 44790, Loss: 4.125e-04, C: 4.482, Time: 0.04\n",
"It: 44800, Loss: 5.489e-04, C: 4.482, Time: 0.03\n",
"It: 44810, Loss: 2.628e-04, C: 4.482, Time: 0.03\n",
"It: 44820, Loss: 1.069e-03, C: 4.482, Time: 0.03\n",
"It: 44830, Loss: 2.405e-04, C: 4.482, Time: 0.03\n",
"It: 44840, Loss: 2.585e-04, C: 4.482, Time: 0.03\n",
"It: 44850, Loss: 2.985e-04, C: 4.482, Time: 0.04\n",
"It: 44860, Loss: 2.353e-04, C: 4.482, Time: 0.03\n",
"It: 44870, Loss: 3.727e-04, C: 4.482, Time: 0.03\n",
"It: 44880, Loss: 2.192e-04, C: 4.482, Time: 0.03\n",
"It: 44890, Loss: 7.694e-04, C: 4.482, Time: 0.03\n",
"It: 44900, Loss: 3.590e-04, C: 4.482, Time: 0.03\n",
"It: 44910, Loss: 4.876e-04, C: 4.482, Time: 0.03\n",
"It: 44920, Loss: 7.582e-04, C: 4.482, Time: 0.03\n",
"It: 44930, Loss: 3.754e-04, C: 4.482, Time: 0.04\n",
"It: 44940, Loss: 3.975e-04, C: 4.482, Time: 0.03\n",
"It: 44950, Loss: 3.984e-04, C: 4.482, Time: 0.04\n",
"It: 44960, Loss: 3.382e-04, C: 4.482, Time: 0.04\n",
"It: 44970, Loss: 3.122e-04, C: 4.482, Time: 0.03\n",
"It: 44980, Loss: 2.908e-04, C: 4.482, Time: 0.03\n",
"It: 44990, Loss: 7.722e-04, C: 4.482, Time: 0.03\n",
"It: 45000, Loss: 6.766e-04, C: 4.482, Time: 0.03\n",
"It: 45010, Loss: 2.498e-04, C: 4.482, Time: 0.03\n",
"It: 45020, Loss: 2.006e-04, C: 4.482, Time: 0.04\n",
"It: 45030, Loss: 2.450e-04, C: 4.482, Time: 0.03\n",
"It: 45040, Loss: 7.750e-04, C: 4.482, Time: 0.03\n",
"It: 45050, Loss: 1.482e-03, C: 4.482, Time: 0.03\n",
"It: 45060, Loss: 1.019e-03, C: 4.482, Time: 0.03\n",
"It: 45070, Loss: 9.758e-04, C: 4.482, Time: 0.03\n",
"It: 45080, Loss: 7.323e-04, C: 4.482, Time: 0.03\n",
"It: 45090, Loss: 6.750e-04, C: 4.482, Time: 0.04\n",
"It: 45100, Loss: 4.340e-04, C: 4.482, Time: 0.03\n",
"It: 45110, Loss: 3.475e-04, C: 4.482, Time: 0.03\n",
"It: 45120, Loss: 6.207e-04, C: 4.482, Time: 0.03\n",
"It: 45130, Loss: 3.147e-04, C: 4.482, Time: 0.03\n",
"It: 45140, Loss: 6.813e-04, C: 4.482, Time: 0.04\n",
"It: 45150, Loss: 4.899e-04, C: 4.482, Time: 0.03\n",
"It: 45160, Loss: 3.068e-04, C: 4.482, Time: 0.03\n",
"It: 45170, Loss: 4.169e-04, C: 4.482, Time: 0.04\n",
"It: 45180, Loss: 7.528e-04, C: 4.482, Time: 0.04\n",
"It: 45190, Loss: 3.413e-04, C: 4.482, Time: 0.03\n",
"It: 45200, Loss: 8.884e-04, C: 4.482, Time: 0.04\n",
"It: 45210, Loss: 3.629e-04, C: 4.482, Time: 0.03\n",
"It: 45220, Loss: 4.073e-04, C: 4.482, Time: 0.03\n",
"It: 45230, Loss: 8.976e-04, C: 4.482, Time: 0.04\n",
"It: 45240, Loss: 1.805e-03, C: 4.482, Time: 0.03\n",
"It: 45250, Loss: 7.335e-04, C: 4.482, Time: 0.03\n",
"It: 45260, Loss: 4.217e-04, C: 4.482, Time: 0.03\n",
"It: 45270, Loss: 3.331e-04, C: 4.482, Time: 0.03\n",
"It: 45280, Loss: 2.337e-04, C: 4.482, Time: 0.03\n",
"It: 45290, Loss: 3.616e-04, C: 4.482, Time: 0.03\n",
"It: 45300, Loss: 2.564e-04, C: 4.482, Time: 0.03\n",
"It: 45310, Loss: 4.624e-04, C: 4.482, Time: 0.04\n",
"It: 45320, Loss: 2.178e-04, C: 4.482, Time: 0.03\n",
"It: 45330, Loss: 5.192e-04, C: 4.482, Time: 0.03\n",
"It: 45340, Loss: 4.183e-04, C: 4.482, Time: 0.04\n",
"It: 45350, Loss: 4.623e-04, C: 4.482, Time: 0.03\n",
"It: 45360, Loss: 2.237e-04, C: 4.482, Time: 0.03\n",
"It: 45370, Loss: 8.509e-04, C: 4.482, Time: 0.03\n",
"It: 45380, Loss: 2.891e-04, C: 4.482, Time: 0.03\n",
"It: 45390, Loss: 2.813e-04, C: 4.482, Time: 0.04\n",
"It: 45400, Loss: 3.084e-04, C: 4.482, Time: 0.04\n",
"It: 45410, Loss: 3.335e-04, C: 4.482, Time: 0.03\n",
"It: 45420, Loss: 3.464e-04, C: 4.482, Time: 0.03\n",
"It: 45430, Loss: 2.551e-04, C: 4.482, Time: 0.03\n",
"It: 45440, Loss: 2.573e-04, C: 4.482, Time: 0.03\n",
"It: 45450, Loss: 3.065e-04, C: 4.482, Time: 0.03\n",
"It: 45460, Loss: 1.760e-03, C: 4.482, Time: 0.04\n",
"It: 45470, Loss: 3.135e-04, C: 4.482, Time: 0.03\n",
"It: 45480, Loss: 5.455e-04, C: 4.482, Time: 0.03\n",
"It: 45490, Loss: 3.641e-04, C: 4.482, Time: 0.04\n",
"It: 45500, Loss: 6.095e-04, C: 4.482, Time: 0.04\n",
"It: 45510, Loss: 3.212e-04, C: 4.482, Time: 0.03\n",
"It: 45520, Loss: 1.499e-03, C: 4.482, Time: 0.04\n",
"It: 45530, Loss: 7.548e-04, C: 4.482, Time: 0.03\n",
"It: 45540, Loss: 4.554e-04, C: 4.482, Time: 0.03\n",
"It: 45550, Loss: 5.306e-04, C: 4.482, Time: 0.03\n",
"It: 45560, Loss: 3.013e-04, C: 4.482, Time: 0.03\n",
"It: 45570, Loss: 5.715e-04, C: 4.482, Time: 0.03\n",
"It: 45580, Loss: 1.470e-03, C: 4.482, Time: 0.03\n",
"It: 45590, Loss: 5.545e-04, C: 4.482, Time: 0.03\n",
"It: 45600, Loss: 4.530e-04, C: 4.482, Time: 0.03\n",
"It: 45610, Loss: 4.110e-04, C: 4.482, Time: 0.03\n",
"It: 45620, Loss: 4.186e-04, C: 4.482, Time: 0.03\n",
"It: 45630, Loss: 2.456e-04, C: 4.482, Time: 0.03\n",
"It: 45640, Loss: 2.351e-04, C: 4.482, Time: 0.04\n",
"It: 45650, Loss: 1.739e-04, C: 4.482, Time: 0.04\n",
"It: 45660, Loss: 4.407e-04, C: 4.482, Time: 0.03\n",
"It: 45670, Loss: 5.791e-04, C: 4.482, Time: 0.03\n",
"It: 45680, Loss: 4.173e-04, C: 4.482, Time: 0.03\n",
"It: 45690, Loss: 3.114e-04, C: 4.482, Time: 0.04\n",
"It: 45700, Loss: 5.130e-04, C: 4.482, Time: 0.03\n",
"It: 45710, Loss: 8.343e-04, C: 4.482, Time: 0.03\n",
"It: 45720, Loss: 1.021e-03, C: 4.482, Time: 0.03\n",
"It: 45730, Loss: 8.841e-04, C: 4.482, Time: 0.03\n",
"It: 45740, Loss: 8.049e-04, C: 4.482, Time: 0.04\n",
"It: 45750, Loss: 4.672e-04, C: 4.482, Time: 0.04\n",
"It: 45760, Loss: 9.593e-04, C: 4.482, Time: 0.03\n",
"It: 45770, Loss: 8.617e-04, C: 4.482, Time: 0.03\n",
"It: 45780, Loss: 9.570e-04, C: 4.482, Time: 0.03\n",
"It: 45790, Loss: 1.105e-03, C: 4.482, Time: 0.03\n",
"It: 45800, Loss: 9.343e-04, C: 4.482, Time: 0.04\n",
"It: 45810, Loss: 1.143e-03, C: 4.482, Time: 0.03\n",
"It: 45820, Loss: 3.217e-04, C: 4.482, Time: 0.03\n",
"It: 45830, Loss: 2.788e-04, C: 4.482, Time: 0.03\n",
"It: 45840, Loss: 1.926e-04, C: 4.482, Time: 0.03\n",
"It: 45850, Loss: 4.726e-04, C: 4.482, Time: 0.04\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 45860, Loss: 3.446e-04, C: 4.482, Time: 0.04\n",
"It: 45870, Loss: 2.447e-04, C: 4.482, Time: 0.04\n",
"It: 45880, Loss: 5.039e-04, C: 4.482, Time: 0.03\n",
"It: 45890, Loss: 3.351e-04, C: 4.482, Time: 0.03\n",
"It: 45900, Loss: 1.167e-03, C: 4.482, Time: 0.03\n",
"It: 45910, Loss: 5.817e-04, C: 4.482, Time: 0.03\n",
"It: 45920, Loss: 1.208e-03, C: 4.482, Time: 0.03\n",
"It: 45930, Loss: 4.860e-04, C: 4.482, Time: 0.04\n",
"It: 45940, Loss: 9.325e-04, C: 4.482, Time: 0.03\n",
"It: 45950, Loss: 5.570e-04, C: 4.482, Time: 0.03\n",
"It: 45960, Loss: 6.951e-04, C: 4.482, Time: 0.04\n",
"It: 45970, Loss: 5.410e-04, C: 4.482, Time: 0.03\n",
"It: 45980, Loss: 4.612e-04, C: 4.482, Time: 0.03\n",
"It: 45990, Loss: 3.486e-04, C: 4.482, Time: 0.03\n",
"It: 46000, Loss: 6.890e-04, C: 4.482, Time: 0.03\n",
"It: 46010, Loss: 8.183e-04, C: 4.482, Time: 0.04\n",
"It: 46020, Loss: 5.919e-04, C: 4.482, Time: 0.03\n",
"It: 46030, Loss: 3.861e-04, C: 4.482, Time: 0.03\n",
"It: 46040, Loss: 4.250e-04, C: 4.482, Time: 0.03\n",
"It: 46050, Loss: 3.702e-04, C: 4.482, Time: 0.03\n",
"It: 46060, Loss: 3.186e-04, C: 4.482, Time: 0.03\n",
"It: 46070, Loss: 1.751e-03, C: 4.482, Time: 0.03\n",
"It: 46080, Loss: 1.310e-03, C: 4.482, Time: 0.03\n",
"It: 46090, Loss: 4.287e-04, C: 4.482, Time: 0.03\n",
"It: 46100, Loss: 4.491e-04, C: 4.482, Time: 0.03\n",
"It: 46110, Loss: 3.118e-04, C: 4.482, Time: 0.03\n",
"It: 46120, Loss: 3.668e-04, C: 4.482, Time: 0.04\n",
"It: 46130, Loss: 4.734e-04, C: 4.482, Time: 0.03\n",
"It: 46140, Loss: 2.376e-04, C: 4.482, Time: 0.04\n",
"It: 46150, Loss: 5.221e-04, C: 4.482, Time: 0.03\n",
"It: 46160, Loss: 2.877e-04, C: 4.482, Time: 0.03\n",
"It: 46170, Loss: 9.452e-04, C: 4.482, Time: 0.04\n",
"It: 46180, Loss: 4.520e-04, C: 4.482, Time: 0.03\n",
"It: 46190, Loss: 4.030e-04, C: 4.482, Time: 0.03\n",
"It: 46200, Loss: 3.777e-04, C: 4.482, Time: 0.03\n",
"It: 46210, Loss: 6.608e-04, C: 4.482, Time: 0.03\n",
"It: 46220, Loss: 4.138e-04, C: 4.482, Time: 0.03\n",
"It: 46230, Loss: 1.899e-04, C: 4.482, Time: 0.03\n",
"It: 46240, Loss: 5.802e-04, C: 4.482, Time: 0.03\n",
"It: 46250, Loss: 6.507e-04, C: 4.482, Time: 0.03\n",
"It: 46260, Loss: 4.075e-04, C: 4.482, Time: 0.03\n",
"It: 46270, Loss: 1.451e-03, C: 4.482, Time: 0.04\n",
"It: 46280, Loss: 2.557e-04, C: 4.482, Time: 0.03\n",
"It: 46290, Loss: 5.229e-04, C: 4.482, Time: 0.04\n",
"It: 46300, Loss: 3.103e-04, C: 4.482, Time: 0.03\n",
"It: 46310, Loss: 4.237e-04, C: 4.482, Time: 0.03\n",
"It: 46320, Loss: 5.269e-04, C: 4.482, Time: 0.03\n",
"It: 46330, Loss: 3.588e-04, C: 4.482, Time: 0.04\n",
"It: 46340, Loss: 4.864e-04, C: 4.482, Time: 0.04\n",
"It: 46350, Loss: 1.932e-04, C: 4.482, Time: 0.03\n",
"It: 46360, Loss: 2.716e-04, C: 4.482, Time: 0.03\n",
"It: 46370, Loss: 9.030e-04, C: 4.482, Time: 0.03\n",
"It: 46380, Loss: 3.391e-04, C: 4.482, Time: 0.03\n",
"It: 46390, Loss: 2.910e-04, C: 4.482, Time: 0.03\n",
"It: 46400, Loss: 2.218e-04, C: 4.482, Time: 0.04\n",
"It: 46410, Loss: 5.307e-04, C: 4.482, Time: 0.04\n",
"It: 46420, Loss: 1.196e-03, C: 4.482, Time: 0.03\n",
"It: 46430, Loss: 4.790e-04, C: 4.482, Time: 0.03\n",
"It: 46440, Loss: 9.188e-04, C: 4.482, Time: 0.04\n",
"It: 46450, Loss: 4.397e-04, C: 4.482, Time: 0.03\n",
"It: 46460, Loss: 4.527e-04, C: 4.482, Time: 0.04\n",
"It: 46470, Loss: 1.084e-03, C: 4.482, Time: 0.03\n",
"It: 46480, Loss: 7.407e-04, C: 4.482, Time: 0.04\n",
"It: 46490, Loss: 3.677e-04, C: 4.482, Time: 0.04\n",
"It: 46500, Loss: 4.144e-04, C: 4.482, Time: 0.04\n",
"It: 46510, Loss: 6.020e-04, C: 4.482, Time: 0.03\n",
"It: 46520, Loss: 5.369e-04, C: 4.482, Time: 0.03\n",
"It: 46530, Loss: 3.027e-04, C: 4.482, Time: 0.03\n",
"It: 46540, Loss: 2.723e-04, C: 4.482, Time: 0.04\n",
"It: 46550, Loss: 1.001e-03, C: 4.482, Time: 0.03\n",
"It: 46560, Loss: 2.648e-04, C: 4.482, Time: 0.03\n",
"It: 46570, Loss: 2.948e-04, C: 4.482, Time: 0.03\n",
"It: 46580, Loss: 2.521e-04, C: 4.482, Time: 0.03\n",
"It: 46590, Loss: 1.090e-03, C: 4.482, Time: 0.03\n",
"It: 46600, Loss: 3.655e-04, C: 4.482, Time: 0.04\n",
"It: 46610, Loss: 3.618e-04, C: 4.482, Time: 0.04\n",
"It: 46620, Loss: 1.861e-03, C: 4.482, Time: 0.03\n",
"It: 46630, Loss: 1.240e-03, C: 4.482, Time: 0.03\n",
"It: 46640, Loss: 6.938e-04, C: 4.482, Time: 0.03\n",
"It: 46650, Loss: 3.706e-04, C: 4.482, Time: 0.03\n",
"It: 46660, Loss: 2.427e-04, C: 4.482, Time: 0.03\n",
"It: 46670, Loss: 5.428e-04, C: 4.482, Time: 0.03\n",
"It: 46680, Loss: 3.437e-04, C: 4.482, Time: 0.03\n",
"It: 46690, Loss: 4.334e-04, C: 4.482, Time: 0.04\n",
"It: 46700, Loss: 2.632e-04, C: 4.482, Time: 0.03\n",
"It: 46710, Loss: 3.743e-04, C: 4.482, Time: 0.04\n",
"It: 46720, Loss: 2.750e-04, C: 4.482, Time: 0.04\n",
"It: 46730, Loss: 2.427e-04, C: 4.482, Time: 0.03\n",
"It: 46740, Loss: 6.074e-04, C: 4.482, Time: 0.03\n",
"It: 46750, Loss: 1.119e-03, C: 4.482, Time: 0.03\n",
"It: 46760, Loss: 8.054e-04, C: 4.482, Time: 0.03\n",
"It: 46770, Loss: 5.269e-04, C: 4.482, Time: 0.04\n",
"It: 46780, Loss: 7.888e-04, C: 4.482, Time: 0.04\n",
"It: 46790, Loss: 3.055e-04, C: 4.482, Time: 0.03\n",
"It: 46800, Loss: 3.784e-04, C: 4.482, Time: 0.03\n",
"It: 46810, Loss: 3.019e-04, C: 4.482, Time: 0.03\n",
"It: 46820, Loss: 1.139e-03, C: 4.482, Time: 0.03\n",
"It: 46830, Loss: 1.478e-03, C: 4.482, Time: 0.03\n",
"It: 46840, Loss: 2.989e-04, C: 4.482, Time: 0.03\n",
"It: 46850, Loss: 2.561e-04, C: 4.482, Time: 0.03\n",
"It: 46860, Loss: 5.263e-04, C: 4.482, Time: 0.03\n",
"It: 46870, Loss: 2.704e-04, C: 4.482, Time: 0.04\n",
"It: 46880, Loss: 2.363e-04, C: 4.482, Time: 0.03\n",
"It: 46890, Loss: 1.815e-04, C: 4.482, Time: 0.04\n",
"It: 46900, Loss: 2.643e-04, C: 4.482, Time: 0.03\n",
"It: 46910, Loss: 7.688e-04, C: 4.482, Time: 0.03\n",
"It: 46920, Loss: 2.656e-04, C: 4.482, Time: 0.04\n",
"It: 46930, Loss: 3.351e-04, C: 4.482, Time: 0.03\n",
"It: 46940, Loss: 4.981e-04, C: 4.482, Time: 0.03\n",
"It: 46950, Loss: 2.161e-04, C: 4.482, Time: 0.03\n",
"It: 46960, Loss: 3.172e-04, C: 4.482, Time: 0.03\n",
"It: 46970, Loss: 3.525e-04, C: 4.482, Time: 0.03\n",
"It: 46980, Loss: 2.918e-04, C: 4.482, Time: 0.03\n",
"It: 46990, Loss: 3.584e-04, C: 4.482, Time: 0.04\n",
"It: 47000, Loss: 3.482e-04, C: 4.482, Time: 0.03\n",
"It: 47010, Loss: 2.588e-04, C: 4.482, Time: 0.03\n",
"It: 47020, Loss: 7.457e-04, C: 4.482, Time: 0.03\n",
"It: 47030, Loss: 1.231e-03, C: 4.482, Time: 0.03\n",
"It: 47040, Loss: 3.160e-04, C: 4.482, Time: 0.03\n",
"It: 47050, Loss: 3.700e-04, C: 4.482, Time: 0.03\n",
"It: 47060, Loss: 3.555e-04, C: 4.482, Time: 0.04\n",
"It: 47070, Loss: 2.355e-04, C: 4.482, Time: 0.04\n",
"It: 47080, Loss: 9.274e-04, C: 4.482, Time: 0.03\n",
"It: 47090, Loss: 2.565e-04, C: 4.482, Time: 0.03\n",
"It: 47100, Loss: 9.898e-04, C: 4.482, Time: 0.03\n",
"It: 47110, Loss: 6.587e-04, C: 4.482, Time: 0.03\n",
"It: 47120, Loss: 6.476e-04, C: 4.482, Time: 0.03\n",
"It: 47130, Loss: 3.403e-04, C: 4.482, Time: 0.03\n",
"It: 47140, Loss: 1.779e-04, C: 4.482, Time: 0.03\n",
"It: 47150, Loss: 1.618e-03, C: 4.482, Time: 0.03\n",
"It: 47160, Loss: 1.585e-03, C: 4.482, Time: 0.03\n",
"It: 47170, Loss: 1.437e-03, C: 4.482, Time: 0.03\n",
"It: 47180, Loss: 4.748e-04, C: 4.482, Time: 0.04\n",
"It: 47190, Loss: 1.661e-03, C: 4.482, Time: 0.03\n",
"It: 47200, Loss: 1.401e-03, C: 4.482, Time: 0.04\n",
"It: 47210, Loss: 1.229e-03, C: 4.482, Time: 0.03\n",
"It: 47220, Loss: 1.480e-03, C: 4.482, Time: 0.03\n",
"It: 47230, Loss: 4.626e-04, C: 4.482, Time: 0.03\n",
"It: 47240, Loss: 1.893e-03, C: 4.482, Time: 0.03\n",
"It: 47250, Loss: 3.385e-04, C: 4.482, Time: 0.03\n",
"It: 47260, Loss: 1.467e-03, C: 4.482, Time: 0.03\n",
"It: 47270, Loss: 3.827e-04, C: 4.482, Time: 0.03\n",
"It: 47280, Loss: 3.725e-04, C: 4.482, Time: 0.04\n",
"It: 47290, Loss: 1.002e-03, C: 4.482, Time: 0.03\n",
"It: 47300, Loss: 9.765e-04, C: 4.482, Time: 0.04\n",
"It: 47310, Loss: 8.291e-04, C: 4.482, Time: 0.03\n",
"It: 47320, Loss: 8.275e-04, C: 4.482, Time: 0.03\n",
"It: 47330, Loss: 8.897e-04, C: 4.482, Time: 0.04\n",
"It: 47340, Loss: 3.120e-04, C: 4.482, Time: 0.04\n",
"It: 47350, Loss: 4.278e-04, C: 4.482, Time: 0.03\n",
"It: 47360, Loss: 1.360e-03, C: 4.482, Time: 0.03\n",
"It: 47370, Loss: 3.737e-04, C: 4.482, Time: 0.03\n",
"It: 47380, Loss: 8.364e-04, C: 4.482, Time: 0.03\n",
"It: 47390, Loss: 3.644e-04, C: 4.482, Time: 0.03\n",
"It: 47400, Loss: 5.661e-04, C: 4.482, Time: 0.04\n",
"It: 47410, Loss: 3.390e-04, C: 4.482, Time: 0.04\n",
"It: 47420, Loss: 1.277e-03, C: 4.482, Time: 0.03\n",
"It: 47430, Loss: 5.198e-04, C: 4.482, Time: 0.03\n",
"It: 47440, Loss: 2.275e-04, C: 4.482, Time: 0.04\n",
"It: 47450, Loss: 4.904e-04, C: 4.482, Time: 0.03\n",
"It: 47460, Loss: 8.453e-04, C: 4.482, Time: 0.03\n",
"It: 47470, Loss: 7.195e-04, C: 4.482, Time: 0.03\n",
"It: 47480, Loss: 6.549e-04, C: 4.482, Time: 0.03\n",
"It: 47490, Loss: 1.063e-03, C: 4.482, Time: 0.03\n",
"It: 47500, Loss: 1.448e-03, C: 4.482, Time: 0.04\n",
"It: 47510, Loss: 2.823e-04, C: 4.482, Time: 0.03\n",
"It: 47520, Loss: 3.764e-04, C: 4.482, Time: 0.03\n",
"It: 47530, Loss: 9.400e-04, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 47540, Loss: 6.810e-04, C: 4.482, Time: 0.04\n",
"It: 47550, Loss: 1.455e-03, C: 4.482, Time: 0.03\n",
"It: 47560, Loss: 8.186e-04, C: 4.482, Time: 0.03\n",
"It: 47570, Loss: 1.243e-03, C: 4.482, Time: 0.03\n",
"It: 47580, Loss: 8.192e-04, C: 4.482, Time: 0.03\n",
"It: 47590, Loss: 1.168e-03, C: 4.482, Time: 0.03\n",
"It: 47600, Loss: 4.840e-04, C: 4.482, Time: 0.03\n",
"It: 47610, Loss: 1.538e-03, C: 4.482, Time: 0.03\n",
"It: 47620, Loss: 8.976e-04, C: 4.482, Time: 0.03\n",
"It: 47630, Loss: 5.613e-04, C: 4.482, Time: 0.04\n",
"It: 47640, Loss: 3.964e-04, C: 4.482, Time: 0.03\n",
"It: 47650, Loss: 3.488e-04, C: 4.482, Time: 0.03\n",
"It: 47660, Loss: 3.607e-04, C: 4.482, Time: 0.03\n",
"It: 47670, Loss: 3.247e-03, C: 4.482, Time: 0.03\n",
"It: 47680, Loss: 4.707e-04, C: 4.482, Time: 0.04\n",
"It: 47690, Loss: 1.003e-03, C: 4.482, Time: 0.04\n",
"It: 47700, Loss: 8.826e-04, C: 4.482, Time: 0.03\n",
"It: 47710, Loss: 9.863e-04, C: 4.482, Time: 0.03\n",
"It: 47720, Loss: 4.883e-04, C: 4.482, Time: 0.03\n",
"It: 47730, Loss: 1.007e-03, C: 4.482, Time: 0.04\n",
"It: 47740, Loss: 6.534e-04, C: 4.482, Time: 0.03\n",
"It: 47750, Loss: 5.725e-04, C: 4.482, Time: 0.03\n",
"It: 47760, Loss: 9.244e-04, C: 4.482, Time: 0.03\n",
"It: 47770, Loss: 5.138e-04, C: 4.482, Time: 0.04\n",
"It: 47780, Loss: 1.995e-03, C: 4.482, Time: 0.03\n",
"It: 47790, Loss: 4.326e-04, C: 4.482, Time: 0.03\n",
"It: 47800, Loss: 5.486e-04, C: 4.482, Time: 0.03\n",
"It: 47810, Loss: 1.644e-03, C: 4.482, Time: 0.03\n",
"It: 47820, Loss: 6.077e-04, C: 4.482, Time: 0.03\n",
"It: 47830, Loss: 4.483e-04, C: 4.482, Time: 0.03\n",
"It: 47840, Loss: 1.576e-03, C: 4.482, Time: 0.03\n",
"It: 47850, Loss: 1.038e-03, C: 4.482, Time: 0.03\n",
"It: 47860, Loss: 6.533e-04, C: 4.482, Time: 0.03\n",
"It: 47870, Loss: 4.319e-04, C: 4.482, Time: 0.03\n",
"It: 47880, Loss: 1.016e-03, C: 4.482, Time: 0.03\n",
"It: 47890, Loss: 4.676e-04, C: 4.482, Time: 0.03\n",
"It: 47900, Loss: 3.220e-04, C: 4.482, Time: 0.04\n",
"It: 47910, Loss: 1.197e-03, C: 4.482, Time: 0.03\n",
"It: 47920, Loss: 6.955e-04, C: 4.482, Time: 0.03\n",
"It: 47930, Loss: 9.550e-04, C: 4.482, Time: 0.03\n",
"It: 47940, Loss: 2.826e-04, C: 4.482, Time: 0.03\n",
"It: 47950, Loss: 4.065e-04, C: 4.482, Time: 0.03\n",
"It: 47960, Loss: 9.089e-04, C: 4.482, Time: 0.03\n",
"It: 47970, Loss: 7.758e-04, C: 4.482, Time: 0.04\n",
"It: 47980, Loss: 8.342e-04, C: 4.482, Time: 0.03\n",
"It: 47990, Loss: 3.284e-04, C: 4.482, Time: 0.04\n",
"It: 48000, Loss: 8.713e-04, C: 4.482, Time: 0.03\n",
"It: 48010, Loss: 9.891e-04, C: 4.482, Time: 0.03\n",
"It: 48020, Loss: 5.640e-04, C: 4.482, Time: 0.03\n",
"It: 48030, Loss: 1.190e-03, C: 4.482, Time: 0.03\n",
"It: 48040, Loss: 4.106e-04, C: 4.482, Time: 0.03\n",
"It: 48050, Loss: 6.175e-04, C: 4.482, Time: 0.03\n",
"It: 48060, Loss: 5.759e-04, C: 4.482, Time: 0.03\n",
"It: 48070, Loss: 6.663e-04, C: 4.482, Time: 0.03\n",
"It: 48080, Loss: 3.403e-04, C: 4.482, Time: 0.04\n",
"It: 48090, Loss: 7.440e-04, C: 4.482, Time: 0.03\n",
"It: 48100, Loss: 6.991e-04, C: 4.482, Time: 0.04\n",
"It: 48110, Loss: 4.307e-04, C: 4.482, Time: 0.04\n",
"It: 48120, Loss: 5.415e-04, C: 4.482, Time: 0.03\n",
"It: 48130, Loss: 1.222e-03, C: 4.482, Time: 0.03\n",
"It: 48140, Loss: 4.095e-04, C: 4.482, Time: 0.04\n",
"It: 48150, Loss: 8.095e-04, C: 4.482, Time: 0.04\n",
"It: 48160, Loss: 1.110e-03, C: 4.482, Time: 0.03\n",
"It: 48170, Loss: 4.531e-04, C: 4.482, Time: 0.04\n",
"It: 48180, Loss: 4.639e-04, C: 4.482, Time: 0.03\n",
"It: 48190, Loss: 5.008e-04, C: 4.482, Time: 0.03\n",
"It: 48200, Loss: 1.265e-03, C: 4.482, Time: 0.04\n",
"It: 48210, Loss: 1.143e-03, C: 4.482, Time: 0.03\n",
"It: 48220, Loss: 3.849e-04, C: 4.482, Time: 0.04\n",
"It: 48230, Loss: 7.322e-04, C: 4.482, Time: 0.03\n",
"It: 48240, Loss: 9.416e-04, C: 4.482, Time: 0.03\n",
"It: 48250, Loss: 9.101e-04, C: 4.482, Time: 0.03\n",
"It: 48260, Loss: 2.954e-04, C: 4.482, Time: 0.04\n",
"It: 48270, Loss: 4.438e-04, C: 4.482, Time: 0.03\n",
"It: 48280, Loss: 5.096e-04, C: 4.482, Time: 0.03\n",
"It: 48290, Loss: 4.172e-04, C: 4.482, Time: 0.03\n",
"It: 48300, Loss: 4.511e-04, C: 4.482, Time: 0.03\n",
"It: 48310, Loss: 6.489e-04, C: 4.482, Time: 0.03\n",
"It: 48320, Loss: 5.357e-04, C: 4.482, Time: 0.03\n",
"It: 48330, Loss: 5.683e-04, C: 4.482, Time: 0.03\n",
"It: 48340, Loss: 3.530e-04, C: 4.482, Time: 0.04\n",
"It: 48350, Loss: 2.438e-04, C: 4.482, Time: 0.04\n",
"It: 48360, Loss: 2.773e-04, C: 4.482, Time: 0.03\n",
"It: 48370, Loss: 3.773e-04, C: 4.482, Time: 0.03\n",
"It: 48380, Loss: 4.900e-04, C: 4.482, Time: 0.04\n",
"It: 48390, Loss: 6.169e-04, C: 4.482, Time: 0.04\n",
"It: 48400, Loss: 4.204e-04, C: 4.482, Time: 0.04\n",
"It: 48410, Loss: 4.035e-04, C: 4.482, Time: 0.03\n",
"It: 48420, Loss: 2.861e-04, C: 4.482, Time: 0.03\n",
"It: 48430, Loss: 2.621e-04, C: 4.482, Time: 0.03\n",
"It: 48440, Loss: 7.784e-04, C: 4.482, Time: 0.03\n",
"It: 48450, Loss: 9.226e-04, C: 4.482, Time: 0.03\n",
"It: 48460, Loss: 2.472e-04, C: 4.482, Time: 0.03\n",
"It: 48470, Loss: 8.556e-04, C: 4.482, Time: 0.03\n",
"It: 48480, Loss: 1.080e-03, C: 4.482, Time: 0.03\n",
"It: 48490, Loss: 8.535e-04, C: 4.482, Time: 0.03\n",
"It: 48500, Loss: 1.073e-03, C: 4.482, Time: 0.04\n",
"It: 48510, Loss: 1.244e-03, C: 4.482, Time: 0.04\n",
"It: 48520, Loss: 1.172e-03, C: 4.482, Time: 0.03\n",
"It: 48530, Loss: 3.355e-04, C: 4.482, Time: 0.03\n",
"It: 48540, Loss: 2.566e-04, C: 4.482, Time: 0.03\n",
"It: 48550, Loss: 4.046e-04, C: 4.482, Time: 0.03\n",
"It: 48560, Loss: 2.984e-04, C: 4.482, Time: 0.04\n",
"It: 48570, Loss: 8.349e-04, C: 4.482, Time: 0.03\n",
"It: 48580, Loss: 4.176e-04, C: 4.482, Time: 0.03\n",
"It: 48590, Loss: 2.594e-04, C: 4.482, Time: 0.03\n",
"It: 48600, Loss: 6.593e-04, C: 4.482, Time: 0.04\n",
"It: 48610, Loss: 6.266e-04, C: 4.482, Time: 0.03\n",
"It: 48620, Loss: 2.638e-04, C: 4.482, Time: 0.04\n",
"It: 48630, Loss: 2.646e-04, C: 4.482, Time: 0.04\n",
"It: 48640, Loss: 8.409e-04, C: 4.482, Time: 0.03\n",
"It: 48650, Loss: 2.500e-04, C: 4.482, Time: 0.03\n",
"It: 48660, Loss: 4.076e-04, C: 4.482, Time: 0.04\n",
"It: 48670, Loss: 3.765e-04, C: 4.482, Time: 0.03\n",
"It: 48680, Loss: 5.000e-04, C: 4.482, Time: 0.04\n",
"It: 48690, Loss: 5.167e-04, C: 4.482, Time: 0.03\n",
"It: 48700, Loss: 1.118e-03, C: 4.482, Time: 0.03\n",
"It: 48710, Loss: 3.656e-04, C: 4.482, Time: 0.03\n",
"It: 48720, Loss: 4.709e-04, C: 4.482, Time: 0.03\n",
"It: 48730, Loss: 3.768e-04, C: 4.482, Time: 0.03\n",
"It: 48740, Loss: 2.728e-04, C: 4.482, Time: 0.03\n",
"It: 48750, Loss: 4.740e-04, C: 4.482, Time: 0.03\n",
"It: 48760, Loss: 4.130e-04, C: 4.482, Time: 0.03\n",
"It: 48770, Loss: 7.535e-04, C: 4.482, Time: 0.04\n",
"It: 48780, Loss: 8.948e-04, C: 4.482, Time: 0.03\n",
"It: 48790, Loss: 7.026e-04, C: 4.482, Time: 0.03\n",
"It: 48800, Loss: 3.899e-04, C: 4.482, Time: 0.03\n",
"It: 48810, Loss: 1.812e-03, C: 4.482, Time: 0.03\n",
"It: 48820, Loss: 9.150e-04, C: 4.482, Time: 0.03\n",
"It: 48830, Loss: 4.749e-04, C: 4.482, Time: 0.03\n",
"It: 48840, Loss: 6.750e-04, C: 4.482, Time: 0.03\n",
"It: 48850, Loss: 2.637e-04, C: 4.482, Time: 0.03\n",
"It: 48860, Loss: 2.351e-04, C: 4.482, Time: 0.04\n",
"It: 48870, Loss: 2.345e-04, C: 4.482, Time: 0.03\n",
"It: 48880, Loss: 5.983e-04, C: 4.482, Time: 0.03\n",
"It: 48890, Loss: 4.310e-04, C: 4.482, Time: 0.03\n",
"It: 48900, Loss: 2.174e-04, C: 4.482, Time: 0.03\n",
"It: 48910, Loss: 5.908e-04, C: 4.482, Time: 0.04\n",
"It: 48920, Loss: 2.620e-04, C: 4.482, Time: 0.03\n",
"It: 48930, Loss: 1.189e-03, C: 4.482, Time: 0.03\n",
"It: 48940, Loss: 1.128e-03, C: 4.482, Time: 0.03\n",
"It: 48950, Loss: 1.100e-03, C: 4.482, Time: 0.04\n",
"It: 48960, Loss: 6.009e-04, C: 4.482, Time: 0.03\n",
"It: 48970, Loss: 3.884e-04, C: 4.482, Time: 0.03\n",
"It: 48980, Loss: 5.192e-04, C: 4.482, Time: 0.03\n",
"It: 48990, Loss: 4.754e-04, C: 4.482, Time: 0.04\n",
"It: 49000, Loss: 1.978e-04, C: 4.482, Time: 0.04\n",
"It: 49010, Loss: 3.122e-04, C: 4.482, Time: 0.04\n",
"It: 49020, Loss: 4.853e-04, C: 4.482, Time: 0.03\n",
"It: 49030, Loss: 2.348e-04, C: 4.482, Time: 0.04\n",
"It: 49040, Loss: 2.229e-04, C: 4.482, Time: 0.03\n",
"It: 49050, Loss: 5.046e-04, C: 4.482, Time: 0.03\n",
"It: 49060, Loss: 4.381e-04, C: 4.482, Time: 0.03\n",
"It: 49070, Loss: 1.042e-03, C: 4.482, Time: 0.03\n",
"It: 49080, Loss: 4.513e-04, C: 4.482, Time: 0.04\n",
"It: 49090, Loss: 5.223e-04, C: 4.482, Time: 0.04\n",
"It: 49100, Loss: 1.922e-03, C: 4.482, Time: 0.03\n",
"It: 49110, Loss: 4.996e-04, C: 4.482, Time: 0.04\n",
"It: 49120, Loss: 3.065e-04, C: 4.482, Time: 0.03\n",
"It: 49130, Loss: 9.542e-04, C: 4.482, Time: 0.03\n",
"It: 49140, Loss: 3.241e-04, C: 4.482, Time: 0.03\n",
"It: 49150, Loss: 3.977e-04, C: 4.482, Time: 0.03\n",
"It: 49160, Loss: 1.613e-03, C: 4.482, Time: 0.03\n",
"It: 49170, Loss: 4.801e-04, C: 4.482, Time: 0.04\n",
"It: 49180, Loss: 3.695e-04, C: 4.482, Time: 0.03\n",
"It: 49190, Loss: 8.181e-04, C: 4.482, Time: 0.03\n",
"It: 49200, Loss: 8.325e-04, C: 4.482, Time: 0.04\n",
"It: 49210, Loss: 6.826e-04, C: 4.482, Time: 0.03\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"It: 49220, Loss: 1.580e-03, C: 4.482, Time: 0.03\n",
"It: 49230, Loss: 6.790e-04, C: 4.482, Time: 0.04\n",
"It: 49240, Loss: 1.603e-03, C: 4.482, Time: 0.03\n",
"It: 49250, Loss: 1.018e-03, C: 4.482, Time: 0.03\n",
"It: 49260, Loss: 8.269e-04, C: 4.482, Time: 0.03\n",
"It: 49270, Loss: 4.003e-04, C: 4.482, Time: 0.04\n",
"It: 49280, Loss: 3.439e-04, C: 4.482, Time: 0.04\n",
"It: 49290, Loss: 3.892e-04, C: 4.482, Time: 0.03\n",
"It: 49300, Loss: 3.421e-04, C: 4.482, Time: 0.03\n",
"It: 49310, Loss: 9.714e-04, C: 4.482, Time: 0.03\n",
"It: 49320, Loss: 6.964e-04, C: 4.482, Time: 0.03\n",
"It: 49330, Loss: 4.772e-04, C: 4.482, Time: 0.03\n",
"It: 49340, Loss: 4.142e-04, C: 4.482, Time: 0.03\n",
"It: 49350, Loss: 1.671e-03, C: 4.482, Time: 0.03\n",
"It: 49360, Loss: 1.402e-03, C: 4.482, Time: 0.03\n",
"It: 49370, Loss: 5.658e-04, C: 4.482, Time: 0.03\n",
"It: 49380, Loss: 8.161e-04, C: 4.482, Time: 0.03\n",
"It: 49390, Loss: 4.294e-04, C: 4.482, Time: 0.03\n",
"It: 49400, Loss: 7.535e-04, C: 4.482, Time: 0.04\n",
"It: 49410, Loss: 4.752e-04, C: 4.482, Time: 0.03\n",
"It: 49420, Loss: 3.487e-04, C: 4.482, Time: 0.03\n",
"It: 49430, Loss: 3.179e-04, C: 4.482, Time: 0.03\n",
"It: 49440, Loss: 9.233e-04, C: 4.482, Time: 0.03\n",
"It: 49450, Loss: 5.104e-04, C: 4.482, Time: 0.03\n",
"It: 49460, Loss: 9.772e-04, C: 4.482, Time: 0.04\n",
"It: 49470, Loss: 4.031e-04, C: 4.482, Time: 0.03\n",
"It: 49480, Loss: 1.120e-03, C: 4.482, Time: 0.04\n",
"It: 49490, Loss: 8.560e-04, C: 4.482, Time: 0.03\n",
"It: 49500, Loss: 5.431e-04, C: 4.482, Time: 0.03\n",
"It: 49510, Loss: 6.103e-04, C: 4.482, Time: 0.04\n",
"It: 49520, Loss: 3.508e-04, C: 4.482, Time: 0.03\n",
"It: 49530, Loss: 3.672e-04, C: 4.482, Time: 0.04\n",
"It: 49540, Loss: 5.085e-04, C: 4.482, Time: 0.03\n",
"It: 49550, Loss: 2.962e-04, C: 4.482, Time: 0.03\n",
"It: 49560, Loss: 1.306e-03, C: 4.482, Time: 0.04\n",
"It: 49570, Loss: 5.014e-04, C: 4.482, Time: 0.03\n",
"It: 49580, Loss: 5.258e-04, C: 4.482, Time: 0.03\n",
"It: 49590, Loss: 7.492e-04, C: 4.482, Time: 0.04\n",
"It: 49600, Loss: 7.409e-04, C: 4.482, Time: 0.04\n",
"It: 49610, Loss: 4.883e-04, C: 4.482, Time: 0.03\n",
"It: 49620, Loss: 6.245e-04, C: 4.482, Time: 0.03\n",
"It: 49630, Loss: 5.049e-04, C: 4.482, Time: 0.03\n",
"It: 49640, Loss: 3.821e-04, C: 4.482, Time: 0.04\n",
"It: 49650, Loss: 4.327e-04, C: 4.482, Time: 0.03\n",
"It: 49660, Loss: 3.732e-04, C: 4.482, Time: 0.03\n",
"It: 49670, Loss: 3.604e-04, C: 4.482, Time: 0.03\n",
"It: 49680, Loss: 3.646e-04, C: 4.482, Time: 0.03\n",
"It: 49690, Loss: 2.874e-04, C: 4.482, Time: 0.03\n",
"It: 49700, Loss: 6.452e-04, C: 4.482, Time: 0.04\n",
"It: 49710, Loss: 3.278e-04, C: 4.482, Time: 0.03\n",
"It: 49720, Loss: 6.597e-04, C: 4.482, Time: 0.03\n",
"It: 49730, Loss: 4.955e-04, C: 4.482, Time: 0.03\n",
"It: 49740, Loss: 9.487e-04, C: 4.482, Time: 0.03\n",
"It: 49750, Loss: 8.502e-04, C: 4.482, Time: 0.03\n",
"It: 49760, Loss: 8.560e-04, C: 4.482, Time: 0.03\n",
"It: 49770, Loss: 4.244e-04, C: 4.482, Time: 0.03\n",
"It: 49780, Loss: 6.522e-04, C: 4.482, Time: 0.03\n",
"It: 49790, Loss: 5.452e-04, C: 4.482, Time: 0.04\n",
"It: 49800, Loss: 3.873e-04, C: 4.482, Time: 0.03\n",
"It: 49810, Loss: 2.062e-04, C: 4.482, Time: 0.03\n",
"It: 49820, Loss: 5.275e-04, C: 4.482, Time: 0.03\n",
"It: 49830, Loss: 2.679e-04, C: 4.482, Time: 0.03\n",
"It: 49840, Loss: 5.635e-04, C: 4.482, Time: 0.03\n",
"It: 49850, Loss: 4.835e-04, C: 4.482, Time: 0.03\n",
"It: 49860, Loss: 3.430e-04, C: 4.482, Time: 0.03\n",
"It: 49870, Loss: 1.473e-03, C: 4.482, Time: 0.03\n",
"It: 49880, Loss: 3.618e-04, C: 4.482, Time: 0.03\n",
"It: 49890, Loss: 3.464e-04, C: 4.482, Time: 0.03\n",
"It: 49900, Loss: 4.065e-04, C: 4.482, Time: 0.03\n",
"It: 49910, Loss: 2.223e-04, C: 4.482, Time: 0.03\n",
"It: 49920, Loss: 1.836e-04, C: 4.482, Time: 0.03\n",
"It: 49930, Loss: 2.982e-04, C: 4.482, Time: 0.03\n",
"It: 49940, Loss: 8.421e-04, C: 4.482, Time: 0.04\n",
"It: 49950, Loss: 1.003e-03, C: 4.482, Time: 0.03\n",
"It: 49960, Loss: 5.120e-04, C: 4.482, Time: 0.03\n",
"It: 49970, Loss: 3.825e-04, C: 4.482, Time: 0.03\n",
"It: 49980, Loss: 5.884e-04, C: 4.482, Time: 0.03\n",
"It: 49990, Loss: 6.587e-04, C: 4.482, Time: 0.04\n",
"Loss: 6.30376e-04\n",
"Loss: 2.77913e+02\n",
"Loss: 1.86334e+01\n",
"Loss: 5.50459e-04\n",
"Loss: 4.97607e-04\n",
"Loss: 4.95048e-04\n",
"Loss: 4.74199e-04\n",
"Loss: 4.45490e-04\n",
"Loss: 3.98105e-04\n",
"Loss: 3.44561e-04\n",
"Loss: 3.41376e-04\n",
"Loss: 3.39851e-04\n",
"Loss: 3.37623e-04\n",
"Loss: 3.30243e-04\n",
"Loss: 3.21817e-04\n",
"Loss: 3.12322e-04\n",
"Loss: 3.00562e-04\n",
"Loss: 2.93565e-04\n",
"Loss: 2.92528e-04\n",
"Loss: 2.91690e-04\n",
"Loss: 2.86494e-04\n",
"Loss: 2.81124e-04\n",
"Loss: 2.77221e-04\n",
"Loss: 2.73537e-04\n",
"Loss: 2.66713e-04\n",
"Loss: 2.63066e-04\n",
"Loss: 2.59308e-04\n",
"Loss: 2.54512e-04\n",
"Loss: 2.51691e-04\n",
"Loss: 2.48805e-04\n",
"Loss: 2.46587e-04\n",
"Loss: 2.44860e-04\n",
"Loss: 2.40123e-04\n",
"Loss: 2.34019e-04\n",
"Loss: 2.31047e-04\n",
"Loss: 2.29627e-04\n",
"Loss: 2.27967e-04\n",
"Loss: 2.25928e-04\n",
"Loss: 2.24985e-04\n",
"Loss: 2.23671e-04\n",
"Loss: 2.22821e-04\n",
"Loss: 2.22375e-04\n",
"Loss: 2.21945e-04\n",
"Loss: 2.21533e-04\n",
"Loss: 2.19525e-04\n",
"Loss: 2.14845e-04\n",
"Loss: 2.13455e-04\n",
"Loss: 2.12635e-04\n",
"Loss: 2.11583e-04\n",
"Loss: 2.09452e-04\n",
"Loss: 2.08112e-04\n",
"Loss: 2.07241e-04\n",
"Loss: 2.06578e-04\n",
"Loss: 2.05326e-04\n",
"Loss: 2.03346e-04\n",
"Loss: 2.00249e-04\n",
"Loss: 2.11794e-04\n",
"Loss: 1.99496e-04\n",
"Loss: 1.97775e-04\n",
"Loss: 1.97103e-04\n",
"Loss: 1.96208e-04\n",
"Loss: 1.94654e-04\n",
"Loss: 1.91671e-04\n",
"Loss: 1.90105e-04\n",
"Loss: 1.89501e-04\n",
"Loss: 1.91865e-04\n",
"Loss: 1.89194e-04\n",
"Loss: 1.88887e-04\n",
"Loss: 1.87683e-04\n",
"Loss: 1.86391e-04\n",
"Loss: 1.88509e-04\n",
"Loss: 1.86135e-04\n",
"Loss: 1.85213e-04\n",
"Loss: 1.84321e-04\n",
"Loss: 1.82656e-04\n",
"Loss: 1.81193e-04\n",
"Loss: 1.80098e-04\n",
"Loss: 1.79331e-04\n",
"Loss: 1.78617e-04\n",
"Loss: 1.77877e-04\n",
"Loss: 1.75610e-04\n",
"Loss: 1.73476e-04\n",
"Loss: 1.71543e-04\n",
"Loss: 1.70677e-04\n",
"Loss: 1.69694e-04\n",
"Loss: 1.68379e-04\n",
"Loss: 1.67064e-04\n",
"Loss: 1.65965e-04\n",
"Loss: 1.65156e-04\n",
"Loss: 1.64535e-04\n",
"Loss: 1.63983e-04\n",
"Loss: 1.63110e-04\n",
"Loss: 1.62293e-04\n",
"Loss: 1.61119e-04\n",
"Loss: 1.60353e-04\n",
"Loss: 1.59719e-04\n",
"Loss: 1.59907e-04\n",
"Loss: 1.59641e-04\n",
"Loss: 1.59517e-04\n",
"Loss: 1.59368e-04\n",
"Loss: 1.58928e-04\n",
"Loss: 1.58544e-04\n",
"Loss: 1.61510e-04\n",
"Loss: 1.58210e-04\n",
"Loss: 1.57863e-04\n",
"Loss: 1.57578e-04\n",
"Loss: 1.57403e-04\n",
"Loss: 1.56245e-04\n",
"Loss: 1.55867e-04\n",
"Loss: 1.54781e-04\n",
"Loss: 1.54354e-04\n",
"Loss: 1.54198e-04\n",
"Loss: 1.54083e-04\n",
"Loss: 1.54692e-04\n",
"Loss: 1.53987e-04\n",
"Loss: 1.53718e-04\n",
"Loss: 1.53285e-04\n",
"Loss: 1.52952e-04\n",
"Loss: 1.52894e-04\n",
"Loss: 1.52760e-04\n",
"Loss: 1.52728e-04\n",
"Loss: 1.52639e-04\n",
"Loss: 1.52455e-04\n",
"Loss: 1.52061e-04\n",
"Loss: 1.68981e-04\n",
"Loss: 1.51831e-04\n",
"Loss: 1.51187e-04\n",
"Loss: 1.50525e-04\n",
"Loss: 1.50252e-04\n",
"Loss: 1.50074e-04\n",
"Loss: 1.49982e-04\n",
"Loss: 1.49923e-04\n",
"Loss: 1.49887e-04\n",
"Loss: 1.49744e-04\n",
"Loss: 1.48970e-04\n",
"Loss: 1.48560e-04\n",
"Loss: 1.47377e-04\n",
"Loss: 1.46640e-04\n",
"Loss: 1.46026e-04\n",
"Loss: 1.49389e-04\n",
"Loss: 1.45796e-04\n",
"Loss: 1.45465e-04\n",
"Loss: 1.44927e-04\n",
"Loss: 1.44523e-04\n",
"Loss: 1.43791e-04\n",
"Loss: 1.43161e-04\n",
"Loss: 1.42557e-04\n",
"Loss: 1.42170e-04\n",
"Loss: 1.41814e-04\n",
"Loss: 1.42871e-04\n",
"Loss: 1.41429e-04\n",
"Loss: 1.40354e-04\n",
"Loss: 1.39684e-04\n",
"Loss: 1.39434e-04\n",
"Loss: 1.39178e-04\n",
"Loss: 1.39132e-04\n",
"Loss: 1.38983e-04\n",
"Loss: 1.38818e-04\n",
"Loss: 1.38441e-04\n",
"Loss: 1.38154e-04\n",
"Loss: 1.37847e-04\n",
"Loss: 1.37276e-04\n",
"Loss: 1.40000e-04\n",
"Loss: 1.36784e-04\n",
"Loss: 1.36201e-04\n",
"Loss: 1.35888e-04\n",
"Loss: 1.35451e-04\n",
"Loss: 1.34868e-04\n",
"Loss: 1.34749e-04\n",
"Loss: 1.34602e-04\n",
"Loss: 1.34500e-04\n",
"Loss: 1.34339e-04\n",
"Loss: 1.33559e-04\n",
"Loss: 1.33304e-04\n",
"Loss: 1.33131e-04\n",
"Loss: 1.32920e-04\n",
"Loss: 1.32728e-04\n",
"Loss: 1.32498e-04\n",
"Loss: 1.32243e-04\n",
"Loss: 1.32079e-04\n",
"Loss: 1.31792e-04\n",
"Loss: 1.31476e-04\n",
"Loss: 1.32185e-04\n",
"Loss: 1.31019e-04\n",
"Loss: 1.31658e-04\n",
"Loss: 1.30653e-04\n",
"Loss: 1.30321e-04\n",
"Loss: 1.29924e-04\n",
"Loss: 1.29613e-04\n",
"Loss: 1.29403e-04\n",
"Loss: 1.29254e-04\n",
"Loss: 1.28812e-04\n",
"Loss: 1.28392e-04\n",
"Loss: 1.27395e-04\n",
"Loss: 1.26935e-04\n",
"Loss: 1.26417e-04\n",
"Loss: 1.25434e-04\n",
"Loss: 1.24326e-04\n",
"Loss: 1.27095e-04\n",
"Loss: 1.24087e-04\n",
"Loss: 1.23442e-04\n",
"Loss: 1.23623e-04\n",
"Loss: 1.23191e-04\n",
"Loss: 1.22811e-04\n",
"Loss: 1.22259e-04\n",
"Loss: 1.21119e-04\n",
"Loss: 1.20484e-04\n",
"Loss: 1.20022e-04\n",
"Loss: 1.20278e-04\n",
"Loss: 1.19880e-04\n",
"Loss: 1.19572e-04\n",
"Loss: 1.19169e-04\n",
"Loss: 1.27786e-04\n",
"Loss: 1.19121e-04\n",
"Loss: 1.18969e-04\n",
"Loss: 1.18873e-04\n",
"Loss: 1.18800e-04\n",
"Loss: 1.18687e-04\n",
"Loss: 1.18495e-04\n",
"Loss: 1.18068e-04\n",
"Loss: 1.17392e-04\n",
"Loss: 1.16225e-04\n",
"Loss: 1.15649e-04\n",
"Loss: 1.14561e-04\n",
"Loss: 1.14252e-04\n",
"Loss: 1.13978e-04\n",
"Loss: 1.13879e-04\n",
"Loss: 1.13573e-04\n",
"Loss: 1.13417e-04\n",
"Loss: 1.13345e-04\n",
"Loss: 1.13187e-04\n",
"Loss: 1.12978e-04\n",
"Loss: 1.15747e-04\n",
"Loss: 1.12803e-04\n",
"Loss: 1.12486e-04\n",
"Loss: 1.11792e-04\n",
"Loss: 1.11573e-04\n",
"Loss: 1.11046e-04\n",
"Loss: 1.10715e-04\n",
"Loss: 1.10514e-04\n",
"Loss: 1.12550e-04\n",
"Loss: 1.10451e-04\n",
"Loss: 1.10284e-04\n",
"Loss: 1.10168e-04\n",
"Loss: 1.09959e-04\n",
"Loss: 1.09789e-04\n",
"Loss: 1.09628e-04\n",
"Loss: 1.09480e-04\n",
"Loss: 1.09300e-04\n",
"Loss: 1.09070e-04\n",
"Loss: 1.15026e-04\n",
"Loss: 1.09034e-04\n",
"Loss: 1.08911e-04\n",
"Loss: 1.08939e-04\n",
"Loss: 1.08794e-04\n",
"Loss: 1.08697e-04\n",
"Loss: 1.08622e-04\n",
"Loss: 1.08536e-04\n",
"Loss: 1.08397e-04\n",
"Loss: 1.08174e-04\n",
"Loss: 1.07779e-04\n",
"Loss: 1.07337e-04\n",
"Loss: 1.06929e-04\n",
"Loss: 1.06676e-04\n",
"Loss: 1.06575e-04\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 1.06478e-04\n",
"Loss: 1.06398e-04\n",
"Loss: 1.06247e-04\n",
"Loss: 1.06039e-04\n",
"Loss: 1.13528e-04\n",
"Loss: 1.06004e-04\n",
"Loss: 1.05881e-04\n",
"Loss: 1.05765e-04\n",
"Loss: 1.05677e-04\n",
"Loss: 1.05481e-04\n",
"Loss: 1.05375e-04\n",
"Loss: 1.05084e-04\n",
"Loss: 1.04566e-04\n",
"Loss: 1.07558e-04\n",
"Loss: 1.04462e-04\n",
"Loss: 1.04245e-04\n",
"Loss: 1.04168e-04\n",
"Loss: 1.04092e-04\n",
"Loss: 1.06347e-04\n",
"Loss: 1.04055e-04\n",
"Loss: 1.03920e-04\n",
"Loss: 1.03826e-04\n",
"Loss: 1.03759e-04\n",
"Loss: 1.03734e-04\n",
"Loss: 1.03628e-04\n",
"Loss: 1.03551e-04\n",
"Loss: 1.03456e-04\n",
"Loss: 1.03313e-04\n",
"Loss: 1.03106e-04\n",
"Loss: 1.02895e-04\n",
"Loss: 1.02627e-04\n",
"Loss: 1.02222e-04\n",
"Loss: 1.01880e-04\n",
"Loss: 1.04675e-04\n",
"Loss: 1.01810e-04\n",
"Loss: 1.01522e-04\n",
"Loss: 1.01282e-04\n",
"Loss: 1.00991e-04\n",
"Loss: 1.00853e-04\n",
"Loss: 1.00634e-04\n",
"Loss: 1.00389e-04\n",
"Loss: 1.02395e-04\n",
"Loss: 1.00305e-04\n",
"Loss: 1.00635e-04\n",
"Loss: 1.00204e-04\n",
"Loss: 1.00046e-04\n",
"Loss: 9.97451e-05\n",
"Loss: 9.95663e-05\n",
"Loss: 9.94638e-05\n",
"Loss: 9.92111e-05\n",
"Loss: 9.90874e-05\n",
"Loss: 9.89081e-05\n",
"Loss: 9.87196e-05\n",
"Loss: 9.83815e-05\n",
"Loss: 9.80039e-05\n",
"Loss: 9.76517e-05\n",
"Loss: 9.73627e-05\n",
"Loss: 9.71526e-05\n",
"Loss: 9.69533e-05\n",
"Loss: 9.67990e-05\n",
"Loss: 9.65057e-05\n",
"Loss: 9.63623e-05\n",
"Loss: 9.67034e-05\n",
"Loss: 9.62838e-05\n",
"Loss: 9.61960e-05\n",
"Loss: 9.61369e-05\n",
"Loss: 9.60362e-05\n",
"Loss: 9.59005e-05\n",
"Loss: 9.56975e-05\n",
"Loss: 9.52093e-05\n",
"Loss: 1.04701e-04\n",
"Loss: 9.51296e-05\n",
"Loss: 9.47408e-05\n",
"Loss: 9.45140e-05\n",
"Loss: 9.43156e-05\n",
"Loss: 9.41891e-05\n",
"Loss: 9.40677e-05\n",
"Loss: 9.46504e-05\n",
"Loss: 9.39869e-05\n",
"Loss: 9.38575e-05\n",
"Loss: 9.36153e-05\n",
"Loss: 9.35503e-05\n",
"Loss: 9.34778e-05\n",
"Loss: 9.34270e-05\n",
"Loss: 9.33598e-05\n",
"Loss: 9.32293e-05\n",
"Loss: 9.30296e-05\n",
"Loss: 9.34831e-05\n",
"Loss: 9.28803e-05\n",
"Loss: 9.25822e-05\n",
"Loss: 9.23299e-05\n",
"Loss: 9.21705e-05\n",
"Loss: 9.19390e-05\n",
"Loss: 9.40232e-05\n",
"Loss: 9.18900e-05\n",
"Loss: 9.17666e-05\n",
"Loss: 9.15723e-05\n",
"Loss: 9.15316e-05\n",
"Loss: 9.14340e-05\n",
"Loss: 9.13968e-05\n",
"Loss: 9.13383e-05\n",
"Loss: 9.12743e-05\n",
"Loss: 9.12969e-05\n",
"Loss: 9.12318e-05\n",
"Loss: 9.11689e-05\n",
"Loss: 9.11091e-05\n",
"Loss: 9.10447e-05\n",
"Loss: 9.09105e-05\n",
"Loss: 9.08104e-05\n",
"Loss: 9.06676e-05\n",
"Loss: 9.06186e-05\n",
"Loss: 9.05728e-05\n",
"Loss: 9.05459e-05\n",
"Loss: 9.04786e-05\n",
"Loss: 9.03786e-05\n",
"Loss: 9.01105e-05\n",
"Loss: 8.98036e-05\n",
"Loss: 9.04602e-05\n",
"Loss: 8.96573e-05\n",
"Loss: 8.93444e-05\n",
"Loss: 8.91113e-05\n",
"Loss: 8.89454e-05\n",
"Loss: 8.88938e-05\n",
"Loss: 8.87507e-05\n",
"Loss: 8.86442e-05\n",
"Loss: 8.85700e-05\n",
"Loss: 8.84437e-05\n",
"Loss: 8.81566e-05\n",
"Loss: 8.79428e-05\n",
"Loss: 8.77855e-05\n",
"Loss: 8.83956e-05\n",
"Loss: 8.77647e-05\n",
"Loss: 8.76979e-05\n",
"Loss: 8.75959e-05\n",
"Loss: 8.75089e-05\n",
"Loss: 8.74202e-05\n",
"Loss: 8.75372e-05\n",
"Loss: 8.73759e-05\n",
"Loss: 8.73233e-05\n",
"Loss: 8.72016e-05\n",
"Loss: 8.69824e-05\n",
"Loss: 8.67741e-05\n",
"Loss: 8.66101e-05\n",
"Loss: 8.64954e-05\n",
"Loss: 8.63176e-05\n",
"Loss: 8.61448e-05\n",
"Loss: 8.58538e-05\n",
"Loss: 8.68075e-05\n",
"Loss: 8.56818e-05\n",
"Loss: 8.55098e-05\n",
"Loss: 8.53484e-05\n",
"Loss: 8.53359e-05\n",
"Loss: 8.52624e-05\n",
"Loss: 8.52119e-05\n",
"Loss: 8.51466e-05\n",
"Loss: 8.51625e-05\n",
"Loss: 8.51161e-05\n",
"Loss: 8.50444e-05\n",
"Loss: 8.49600e-05\n",
"Loss: 8.48759e-05\n",
"Loss: 8.48026e-05\n",
"Loss: 8.47585e-05\n",
"Loss: 8.46792e-05\n",
"Loss: 8.46045e-05\n",
"Loss: 8.45225e-05\n",
"Loss: 8.44416e-05\n",
"Loss: 8.42651e-05\n",
"Loss: 8.41927e-05\n",
"Loss: 8.39999e-05\n",
"Loss: 8.39415e-05\n",
"Loss: 8.38895e-05\n",
"Loss: 8.38103e-05\n",
"Loss: 8.36817e-05\n",
"Loss: 8.34832e-05\n",
"Loss: 8.42360e-05\n",
"Loss: 8.34248e-05\n",
"Loss: 8.33127e-05\n",
"Loss: 8.31155e-05\n",
"Loss: 8.29569e-05\n",
"Loss: 8.27901e-05\n",
"Loss: 8.27070e-05\n",
"Loss: 8.26589e-05\n",
"Loss: 8.26364e-05\n",
"Loss: 8.26158e-05\n",
"Loss: 8.25602e-05\n",
"Loss: 8.24717e-05\n",
"Loss: 8.24025e-05\n",
"Loss: 8.22994e-05\n",
"Loss: 8.22187e-05\n",
"Loss: 8.21796e-05\n",
"Loss: 8.21430e-05\n",
"Loss: 8.23467e-05\n",
"Loss: 8.21074e-05\n",
"Loss: 8.19986e-05\n",
"Loss: 8.18736e-05\n",
"Loss: 8.17430e-05\n",
"Loss: 8.16354e-05\n",
"Loss: 8.14683e-05\n",
"Loss: 8.15977e-05\n",
"Loss: 8.13841e-05\n",
"Loss: 8.11981e-05\n",
"Loss: 8.09746e-05\n",
"Loss: 8.07830e-05\n",
"Loss: 8.06129e-05\n",
"Loss: 8.04509e-05\n",
"Loss: 8.03545e-05\n",
"Loss: 8.01091e-05\n",
"Loss: 8.00427e-05\n",
"Loss: 7.98711e-05\n",
"Loss: 7.97876e-05\n",
"Loss: 7.99038e-05\n",
"Loss: 7.97559e-05\n",
"Loss: 7.96708e-05\n",
"Loss: 7.96029e-05\n",
"Loss: 7.95436e-05\n",
"Loss: 7.94992e-05\n",
"Loss: 7.94215e-05\n",
"Loss: 7.93305e-05\n",
"Loss: 7.91914e-05\n",
"Loss: 7.91446e-05\n",
"Loss: 7.90825e-05\n",
"Loss: 7.89861e-05\n",
"Loss: 7.88297e-05\n",
"Loss: 7.89825e-05\n",
"Loss: 7.87164e-05\n",
"Loss: 7.85543e-05\n",
"Loss: 7.84077e-05\n",
"Loss: 7.82929e-05\n",
"Loss: 7.81887e-05\n",
"Loss: 7.80943e-05\n",
"Loss: 7.80121e-05\n",
"Loss: 7.79321e-05\n",
"Loss: 7.78825e-05\n",
"Loss: 7.78063e-05\n",
"Loss: 7.77526e-05\n",
"Loss: 7.77089e-05\n",
"Loss: 7.76589e-05\n",
"Loss: 7.76294e-05\n",
"Loss: 7.75618e-05\n",
"Loss: 7.74434e-05\n",
"Loss: 7.73204e-05\n",
"Loss: 7.72573e-05\n",
"Loss: 7.71829e-05\n",
"Loss: 7.72982e-05\n",
"Loss: 7.71517e-05\n",
"Loss: 7.71157e-05\n",
"Loss: 7.70737e-05\n",
"Loss: 7.70594e-05\n",
"Loss: 7.70186e-05\n",
"Loss: 7.69625e-05\n",
"Loss: 7.68808e-05\n",
"Loss: 7.67975e-05\n",
"Loss: 7.67119e-05\n",
"Loss: 7.65581e-05\n",
"Loss: 7.64311e-05\n",
"Loss: 7.63182e-05\n",
"Loss: 7.62112e-05\n",
"Loss: 7.60833e-05\n",
"Loss: 7.59436e-05\n",
"Loss: 7.58431e-05\n",
"Loss: 7.57510e-05\n",
"Loss: 7.56376e-05\n",
"Loss: 7.55290e-05\n",
"Loss: 7.53669e-05\n",
"Loss: 7.52601e-05\n",
"Loss: 1.03027e-04\n",
"Loss: 7.52486e-05\n",
"Loss: 7.51371e-05\n",
"Loss: 7.49709e-05\n",
"Loss: 7.49046e-05\n",
"Loss: 7.48586e-05\n",
"Loss: 7.47925e-05\n",
"Loss: 7.47387e-05\n",
"Loss: 7.45718e-05\n",
"Loss: 7.43558e-05\n",
"Loss: 7.41075e-05\n",
"Loss: 7.39682e-05\n",
"Loss: 7.38862e-05\n",
"Loss: 7.38056e-05\n",
"Loss: 7.37059e-05\n",
"Loss: 7.36065e-05\n",
"Loss: 7.34429e-05\n",
"Loss: 7.33401e-05\n",
"Loss: 7.32923e-05\n",
"Loss: 7.34215e-05\n",
"Loss: 7.32025e-05\n",
"Loss: 7.31119e-05\n",
"Loss: 7.30535e-05\n",
"Loss: 7.29161e-05\n",
"Loss: 7.28598e-05\n",
"Loss: 7.27509e-05\n",
"Loss: 7.28075e-05\n",
"Loss: 7.26920e-05\n",
"Loss: 7.26213e-05\n",
"Loss: 7.25589e-05\n",
"Loss: 7.25403e-05\n",
"Loss: 7.24703e-05\n",
"Loss: 7.24187e-05\n",
"Loss: 7.26548e-05\n",
"Loss: 7.24033e-05\n",
"Loss: 7.23413e-05\n",
"Loss: 7.22869e-05\n",
"Loss: 7.22445e-05\n",
"Loss: 7.21890e-05\n",
"Loss: 7.21265e-05\n",
"Loss: 7.21413e-05\n",
"Loss: 7.20775e-05\n",
"Loss: 7.20180e-05\n",
"Loss: 7.19470e-05\n",
"Loss: 7.19184e-05\n",
"Loss: 7.18286e-05\n",
"Loss: 7.17239e-05\n",
"Loss: 7.16176e-05\n",
"Loss: 7.15148e-05\n",
"Loss: 7.14322e-05\n",
"Loss: 7.13377e-05\n",
"Loss: 7.12469e-05\n",
"Loss: 7.12396e-05\n",
"Loss: 7.11981e-05\n",
"Loss: 7.11398e-05\n",
"Loss: 7.10431e-05\n",
"Loss: 7.09474e-05\n",
"Loss: 7.08361e-05\n",
"Loss: 7.07815e-05\n",
"Loss: 7.07074e-05\n",
"Loss: 7.06783e-05\n",
"Loss: 7.06186e-05\n",
"Loss: 7.05940e-05\n",
"Loss: 7.09626e-05\n",
"Loss: 7.04569e-05\n",
"Loss: 7.03518e-05\n",
"Loss: 7.02819e-05\n",
"Loss: 7.02131e-05\n",
"Loss: 7.01560e-05\n",
"Loss: 7.00976e-05\n",
"Loss: 7.00141e-05\n",
"Loss: 6.99497e-05\n",
"Loss: 6.98892e-05\n",
"Loss: 6.98229e-05\n",
"Loss: 6.97265e-05\n",
"Loss: 6.96518e-05\n",
"Loss: 6.95455e-05\n",
"Loss: 6.94768e-05\n",
"Loss: 6.93802e-05\n",
"Loss: 6.93263e-05\n",
"Loss: 6.92918e-05\n",
"Loss: 6.92910e-05\n",
"Loss: 6.92569e-05\n",
"Loss: 6.92974e-05\n",
"Loss: 6.92440e-05\n",
"Loss: 6.92217e-05\n",
"Loss: 6.91601e-05\n",
"Loss: 6.91553e-05\n",
"Loss: 6.91011e-05\n",
"Loss: 6.89967e-05\n",
"Loss: 6.87917e-05\n",
"Loss: 6.86280e-05\n",
"Loss: 6.95166e-05\n",
"Loss: 6.86100e-05\n",
"Loss: 6.84993e-05\n",
"Loss: 6.84171e-05\n",
"Loss: 6.83095e-05\n",
"Loss: 6.81557e-05\n",
"Loss: 6.85768e-05\n",
"Loss: 6.81196e-05\n",
"Loss: 6.80075e-05\n",
"Loss: 6.79346e-05\n",
"Loss: 6.79603e-05\n",
"Loss: 6.78738e-05\n",
"Loss: 6.77607e-05\n",
"Loss: 6.76637e-05\n",
"Loss: 6.75592e-05\n",
"Loss: 6.75162e-05\n",
"Loss: 6.73424e-05\n",
"Loss: 6.72434e-05\n",
"Loss: 6.71186e-05\n",
"Loss: 6.69092e-05\n",
"Loss: 6.71112e-05\n",
"Loss: 6.67609e-05\n",
"Loss: 6.65625e-05\n",
"Loss: 6.64435e-05\n",
"Loss: 6.63598e-05\n",
"Loss: 6.63136e-05\n",
"Loss: 6.62713e-05\n",
"Loss: 6.62303e-05\n",
"Loss: 6.62000e-05\n",
"Loss: 6.60880e-05\n",
"Loss: 6.60085e-05\n",
"Loss: 6.59316e-05\n",
"Loss: 6.58786e-05\n",
"Loss: 6.57845e-05\n",
"Loss: 6.57218e-05\n",
"Loss: 6.59967e-05\n",
"Loss: 6.55504e-05\n",
"Loss: 6.54386e-05\n",
"Loss: 6.52412e-05\n",
"Loss: 6.51021e-05\n",
"Loss: 6.49368e-05\n",
"Loss: 6.48472e-05\n",
"Loss: 6.47904e-05\n",
"Loss: 6.46737e-05\n",
"Loss: 6.45030e-05\n",
"Loss: 6.45762e-05\n",
"Loss: 6.44556e-05\n",
"Loss: 6.43582e-05\n",
"Loss: 6.41848e-05\n",
"Loss: 6.40709e-05\n",
"Loss: 6.44950e-05\n",
"Loss: 6.40309e-05\n",
"Loss: 6.39361e-05\n",
"Loss: 6.38488e-05\n",
"Loss: 6.38057e-05\n",
"Loss: 6.37521e-05\n",
"Loss: 6.36473e-05\n",
"Loss: 6.34288e-05\n",
"Loss: 6.33856e-05\n",
"Loss: 6.32161e-05\n",
"Loss: 6.31734e-05\n",
"Loss: 6.31190e-05\n",
"Loss: 6.30741e-05\n",
"Loss: 6.39333e-05\n",
"Loss: 6.30609e-05\n",
"Loss: 6.30063e-05\n",
"Loss: 6.29168e-05\n",
"Loss: 6.27324e-05\n",
"Loss: 6.32073e-05\n",
"Loss: 6.26433e-05\n",
"Loss: 6.24998e-05\n",
"Loss: 6.23488e-05\n",
"Loss: 6.22462e-05\n",
"Loss: 6.20345e-05\n",
"Loss: 6.30866e-05\n",
"Loss: 6.19465e-05\n",
"Loss: 6.18454e-05\n",
"Loss: 6.16408e-05\n",
"Loss: 6.15741e-05\n",
"Loss: 6.14983e-05\n",
"Loss: 6.14169e-05\n",
"Loss: 6.13222e-05\n",
"Loss: 6.11920e-05\n",
"Loss: 6.12831e-05\n",
"Loss: 6.11257e-05\n",
"Loss: 6.10299e-05\n",
"Loss: 6.09661e-05\n",
"Loss: 6.09261e-05\n",
"Loss: 6.08487e-05\n",
"Loss: 6.07411e-05\n",
"Loss: 6.08348e-05\n",
"Loss: 6.06727e-05\n",
"Loss: 6.05357e-05\n",
"Loss: 6.04565e-05\n",
"Loss: 6.03301e-05\n",
"Loss: 6.02026e-05\n",
"Loss: 6.14938e-05\n",
"Loss: 6.01578e-05\n",
"Loss: 5.99724e-05\n",
"Loss: 5.98652e-05\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 5.96565e-05\n",
"Loss: 5.95097e-05\n",
"Loss: 5.93962e-05\n",
"Loss: 5.93322e-05\n",
"Loss: 5.92737e-05\n",
"Loss: 5.92399e-05\n",
"Loss: 5.91013e-05\n",
"Loss: 5.90183e-05\n",
"Loss: 5.89600e-05\n",
"Loss: 5.89474e-05\n",
"Loss: 5.88809e-05\n",
"Loss: 5.88445e-05\n",
"Loss: 5.87751e-05\n",
"Loss: 5.87169e-05\n",
"Loss: 5.86992e-05\n",
"Loss: 5.85973e-05\n",
"Loss: 5.87796e-05\n",
"Loss: 5.85902e-05\n",
"Loss: 5.85517e-05\n",
"Loss: 5.84647e-05\n",
"Loss: 5.82940e-05\n",
"Loss: 5.87135e-05\n",
"Loss: 5.82482e-05\n",
"Loss: 5.80953e-05\n",
"Loss: 5.78311e-05\n",
"Loss: 5.77668e-05\n",
"Loss: 5.76511e-05\n",
"Loss: 5.77133e-05\n",
"Loss: 5.75817e-05\n",
"Loss: 5.74752e-05\n",
"Loss: 5.73581e-05\n",
"Loss: 5.72567e-05\n",
"Loss: 5.71726e-05\n",
"Loss: 5.70163e-05\n",
"Loss: 5.69039e-05\n",
"Loss: 5.67593e-05\n",
"Loss: 5.66509e-05\n",
"Loss: 5.65375e-05\n",
"Loss: 5.69847e-05\n",
"Loss: 5.64701e-05\n",
"Loss: 5.64106e-05\n",
"Loss: 5.63487e-05\n",
"Loss: 5.63744e-05\n",
"Loss: 5.63305e-05\n",
"Loss: 5.62831e-05\n",
"Loss: 5.61144e-05\n",
"Loss: 5.60136e-05\n",
"Loss: 5.59296e-05\n",
"Loss: 5.58807e-05\n",
"Loss: 5.58328e-05\n",
"Loss: 5.58104e-05\n",
"Loss: 5.57698e-05\n",
"Loss: 5.57224e-05\n",
"Loss: 5.56552e-05\n",
"Loss: 5.62193e-05\n",
"Loss: 5.56164e-05\n",
"Loss: 5.54412e-05\n",
"Loss: 5.53919e-05\n",
"Loss: 5.51873e-05\n",
"Loss: 5.51099e-05\n",
"Loss: 5.50629e-05\n",
"Loss: 5.50162e-05\n",
"Loss: 5.49468e-05\n",
"Loss: 5.48383e-05\n",
"Loss: 6.02577e-05\n",
"Loss: 5.48211e-05\n",
"Loss: 5.47228e-05\n",
"Loss: 5.46529e-05\n",
"Loss: 5.45181e-05\n",
"Loss: 5.80076e-05\n",
"Loss: 5.44642e-05\n",
"Loss: 5.43170e-05\n",
"Loss: 5.41951e-05\n",
"Loss: 5.40691e-05\n",
"Loss: 5.39567e-05\n",
"Loss: 5.41590e-05\n",
"Loss: 5.39445e-05\n",
"Loss: 5.38784e-05\n",
"Loss: 5.37731e-05\n",
"Loss: 5.37219e-05\n",
"Loss: 5.36647e-05\n",
"Loss: 5.36331e-05\n",
"Loss: 5.36051e-05\n",
"Loss: 5.35031e-05\n",
"Loss: 5.34184e-05\n",
"Loss: 5.33216e-05\n",
"Loss: 5.33699e-05\n",
"Loss: 5.32607e-05\n",
"Loss: 5.31620e-05\n",
"Loss: 5.30838e-05\n",
"Loss: 5.29539e-05\n",
"Loss: 5.27079e-05\n",
"Loss: 5.24028e-05\n",
"Loss: 5.22543e-05\n",
"Loss: 5.21672e-05\n",
"Loss: 5.21123e-05\n",
"Loss: 5.19936e-05\n",
"Loss: 5.22176e-05\n",
"Loss: 5.19373e-05\n",
"Loss: 5.18368e-05\n",
"Loss: 5.18038e-05\n",
"Loss: 5.17485e-05\n",
"Loss: 5.17068e-05\n",
"Loss: 5.16415e-05\n",
"Loss: 5.15780e-05\n",
"Loss: 5.14824e-05\n",
"Loss: 5.14805e-05\n",
"Loss: 5.14279e-05\n",
"Loss: 5.13753e-05\n",
"Loss: 5.13129e-05\n",
"Loss: 5.12535e-05\n",
"Loss: 5.11860e-05\n",
"Loss: 5.11240e-05\n",
"Loss: 5.10958e-05\n",
"Loss: 5.10588e-05\n",
"Loss: 5.10077e-05\n",
"Loss: 5.09157e-05\n",
"Loss: 5.08626e-05\n",
"Loss: 5.08228e-05\n",
"Loss: 5.07623e-05\n",
"Loss: 5.07178e-05\n",
"Loss: 5.06622e-05\n",
"Loss: 5.06025e-05\n",
"Loss: 5.20056e-05\n",
"Loss: 5.05901e-05\n",
"Loss: 5.05495e-05\n",
"Loss: 5.05097e-05\n",
"Loss: 5.04707e-05\n",
"Loss: 5.04015e-05\n",
"Loss: 5.03105e-05\n",
"Loss: 5.02874e-05\n",
"Loss: 5.02712e-05\n",
"Loss: 5.02531e-05\n",
"Loss: 5.03484e-05\n",
"Loss: 5.02427e-05\n",
"Loss: 5.02196e-05\n",
"Loss: 5.01860e-05\n",
"Loss: 5.01682e-05\n",
"Loss: 5.02148e-05\n",
"Loss: 5.01590e-05\n",
"Loss: 5.01362e-05\n",
"Loss: 5.00994e-05\n",
"Loss: 5.00746e-05\n",
"Loss: 5.00378e-05\n",
"Loss: 5.00454e-05\n",
"Loss: 5.00114e-05\n",
"Loss: 4.99368e-05\n",
"Loss: 4.98699e-05\n",
"Loss: 4.97971e-05\n",
"Loss: 4.97498e-05\n",
"Loss: 4.96867e-05\n",
"Loss: 4.96411e-05\n",
"Loss: 4.95894e-05\n",
"Loss: 4.94801e-05\n",
"Loss: 4.94349e-05\n",
"Loss: 4.94983e-05\n",
"Loss: 4.94168e-05\n",
"Loss: 4.93946e-05\n",
"Loss: 4.93723e-05\n",
"Loss: 4.93412e-05\n",
"Loss: 4.92879e-05\n",
"Loss: 4.96364e-05\n",
"Loss: 4.92578e-05\n",
"Loss: 4.92071e-05\n",
"Loss: 4.90885e-05\n",
"Loss: 4.90420e-05\n",
"Loss: 4.89573e-05\n",
"Loss: 4.89149e-05\n",
"Loss: 4.88492e-05\n",
"Loss: 4.87872e-05\n",
"Loss: 4.87425e-05\n",
"Loss: 4.87003e-05\n",
"Loss: 4.86785e-05\n",
"Loss: 4.86599e-05\n",
"Loss: 4.86257e-05\n",
"Loss: 4.85835e-05\n",
"Loss: 4.90158e-05\n",
"Loss: 4.85612e-05\n",
"Loss: 4.84913e-05\n",
"Loss: 4.85227e-05\n",
"Loss: 4.84269e-05\n",
"Loss: 4.83541e-05\n",
"Loss: 4.82815e-05\n",
"Loss: 4.82546e-05\n",
"Loss: 4.82983e-05\n",
"Loss: 4.82352e-05\n",
"Loss: 4.82480e-05\n",
"Loss: 4.81736e-05\n",
"Loss: 4.81374e-05\n",
"Loss: 4.83849e-05\n",
"Loss: 4.81131e-05\n",
"Loss: 4.80688e-05\n",
"Loss: 4.80521e-05\n",
"Loss: 4.79753e-05\n",
"Loss: 4.79260e-05\n",
"Loss: 4.78919e-05\n",
"Loss: 4.77972e-05\n",
"Loss: 4.77665e-05\n",
"Loss: 4.77485e-05\n",
"Loss: 4.91256e-05\n",
"Loss: 4.77448e-05\n",
"Loss: 4.77397e-05\n",
"Loss: 4.77178e-05\n",
"Loss: 4.77087e-05\n",
"Loss: 4.76910e-05\n",
"Loss: 4.76624e-05\n",
"Loss: 4.75984e-05\n",
"Loss: 4.75526e-05\n",
"Loss: 4.74868e-05\n",
"Loss: 4.74497e-05\n",
"Loss: 4.73709e-05\n",
"Loss: 4.72646e-05\n",
"Loss: 4.71948e-05\n",
"Loss: 4.71371e-05\n",
"Loss: 4.70724e-05\n",
"Loss: 4.72082e-05\n",
"Loss: 4.70598e-05\n",
"Loss: 4.70322e-05\n",
"Loss: 4.70192e-05\n",
"Loss: 4.69844e-05\n",
"Loss: 4.69403e-05\n",
"Loss: 4.71091e-05\n",
"Loss: 4.69239e-05\n",
"Loss: 4.68587e-05\n",
"Loss: 4.68207e-05\n",
"Loss: 4.67910e-05\n",
"Loss: 4.67561e-05\n",
"Loss: 4.71161e-05\n",
"Loss: 4.67369e-05\n",
"Loss: 4.66758e-05\n",
"Loss: 4.66355e-05\n",
"Loss: 4.66623e-05\n",
"Loss: 4.66194e-05\n",
"Loss: 4.65942e-05\n",
"Loss: 4.65749e-05\n",
"Loss: 4.65508e-05\n",
"Loss: 4.65171e-05\n",
"Loss: 4.64930e-05\n",
"Loss: 4.64049e-05\n",
"Loss: 4.63753e-05\n",
"Loss: 4.63452e-05\n",
"Loss: 4.63112e-05\n",
"Loss: 4.64001e-05\n",
"Loss: 4.62860e-05\n",
"Loss: 4.62935e-05\n",
"Loss: 4.62476e-05\n",
"Loss: 4.62296e-05\n",
"Loss: 4.61908e-05\n",
"Loss: 4.61782e-05\n",
"Loss: 4.61590e-05\n",
"Loss: 4.61388e-05\n",
"Loss: 4.61120e-05\n",
"Loss: 4.60849e-05\n",
"Loss: 4.60576e-05\n",
"Loss: 5.32268e-05\n",
"Loss: 4.60422e-05\n",
"Loss: 4.60288e-05\n",
"Loss: 4.60096e-05\n",
"Loss: 4.59980e-05\n",
"Loss: 4.70385e-05\n",
"Loss: 4.59959e-05\n",
"Loss: 4.59776e-05\n",
"Loss: 4.59572e-05\n",
"Loss: 4.59284e-05\n",
"Loss: 4.59087e-05\n",
"Loss: 4.58806e-05\n",
"Loss: 4.63948e-05\n",
"Loss: 4.58675e-05\n",
"Loss: 4.58207e-05\n",
"Loss: 4.57925e-05\n",
"Loss: 4.57610e-05\n",
"Loss: 4.57097e-05\n",
"Loss: 4.56840e-05\n",
"Loss: 4.56550e-05\n",
"Loss: 4.56125e-05\n",
"Loss: 4.55807e-05\n",
"Loss: 4.55586e-05\n",
"Loss: 4.55504e-05\n",
"Loss: 4.55008e-05\n",
"Loss: 4.54333e-05\n",
"Loss: 4.54685e-05\n",
"Loss: 4.53845e-05\n",
"Loss: 4.52948e-05\n",
"Loss: 4.52537e-05\n",
"Loss: 4.52287e-05\n",
"Loss: 4.52100e-05\n",
"Loss: 4.51677e-05\n",
"Loss: 4.51460e-05\n",
"Loss: 4.51229e-05\n",
"Loss: 4.50895e-05\n",
"Loss: 4.51461e-05\n",
"Loss: 4.50702e-05\n",
"Loss: 4.50245e-05\n",
"Loss: 4.49718e-05\n",
"Loss: 4.49468e-05\n",
"Loss: 4.49070e-05\n",
"Loss: 4.48312e-05\n",
"Loss: 4.49560e-05\n",
"Loss: 4.48149e-05\n",
"Loss: 4.47764e-05\n",
"Loss: 4.47451e-05\n",
"Loss: 4.46966e-05\n",
"Loss: 4.46507e-05\n",
"Loss: 4.46049e-05\n",
"Loss: 4.46398e-05\n",
"Loss: 4.45904e-05\n",
"Loss: 4.45527e-05\n",
"Loss: 4.45105e-05\n",
"Loss: 4.44639e-05\n",
"Loss: 4.73903e-05\n",
"Loss: 4.44527e-05\n",
"Loss: 4.44221e-05\n",
"Loss: 4.44052e-05\n",
"Loss: 4.43839e-05\n",
"Loss: 4.43583e-05\n",
"Loss: 4.43290e-05\n",
"Loss: 4.42991e-05\n",
"Loss: 4.57364e-05\n",
"Loss: 4.42917e-05\n",
"Loss: 4.42749e-05\n",
"Loss: 4.42465e-05\n",
"Loss: 4.42268e-05\n",
"Loss: 4.41874e-05\n",
"Loss: 4.41448e-05\n",
"Loss: 4.41125e-05\n",
"Loss: 4.40881e-05\n",
"Loss: 4.40665e-05\n",
"Loss: 4.40463e-05\n",
"Loss: 4.40090e-05\n",
"Loss: 4.40196e-05\n",
"Loss: 4.39947e-05\n",
"Loss: 4.40070e-05\n",
"Loss: 4.39698e-05\n",
"Loss: 4.39583e-05\n",
"Loss: 4.39380e-05\n",
"Loss: 4.39215e-05\n",
"Loss: 4.38770e-05\n",
"Loss: 4.38285e-05\n",
"Loss: 4.37873e-05\n",
"Loss: 4.39424e-05\n",
"Loss: 4.37739e-05\n",
"Loss: 4.37466e-05\n",
"Loss: 4.37627e-05\n",
"Loss: 4.37340e-05\n",
"Loss: 4.37161e-05\n",
"Loss: 4.36990e-05\n",
"Loss: 4.36794e-05\n",
"Loss: 4.36601e-05\n",
"Loss: 4.36262e-05\n",
"Loss: 4.35851e-05\n",
"Loss: 4.35421e-05\n",
"Loss: 4.73394e-05\n",
"Loss: 4.35345e-05\n",
"Loss: 4.34980e-05\n",
"Loss: 4.34607e-05\n",
"Loss: 4.33921e-05\n",
"Loss: 4.33339e-05\n",
"Loss: 4.32947e-05\n",
"Loss: 4.33118e-05\n",
"Loss: 4.32566e-05\n",
"Loss: 4.32299e-05\n",
"Loss: 4.31892e-05\n",
"Loss: 4.31495e-05\n",
"Loss: 4.30996e-05\n",
"Loss: 4.30425e-05\n",
"Loss: 4.30521e-05\n",
"Loss: 4.30150e-05\n",
"Loss: 4.29853e-05\n",
"Loss: 4.29492e-05\n",
"Loss: 4.29124e-05\n",
"Loss: 4.28582e-05\n",
"Loss: 4.28312e-05\n",
"Loss: 4.27688e-05\n",
"Loss: 4.27219e-05\n",
"Loss: 4.26817e-05\n",
"Loss: 4.26493e-05\n",
"Loss: 4.26928e-05\n",
"Loss: 4.26382e-05\n",
"Loss: 4.26051e-05\n",
"Loss: 4.25889e-05\n",
"Loss: 4.25620e-05\n",
"Loss: 4.25404e-05\n",
"Loss: 4.24973e-05\n",
"Loss: 4.24542e-05\n",
"Loss: 4.24196e-05\n",
"Loss: 4.23981e-05\n",
"Loss: 4.23792e-05\n",
"Loss: 4.23620e-05\n",
"Loss: 4.23172e-05\n",
"Loss: 4.24040e-05\n",
"Loss: 4.22987e-05\n",
"Loss: 4.22471e-05\n",
"Loss: 4.22002e-05\n",
"Loss: 4.21765e-05\n",
"Loss: 4.21526e-05\n",
"Loss: 4.21165e-05\n",
"Loss: 4.20739e-05\n",
"Loss: 4.20008e-05\n",
"Loss: 4.19677e-05\n",
"Loss: 4.19380e-05\n",
"Loss: 4.19217e-05\n",
"Loss: 4.19072e-05\n",
"Loss: 4.19361e-05\n",
"Loss: 4.18841e-05\n",
"Loss: 4.18694e-05\n",
"Loss: 4.18420e-05\n",
"Loss: 4.18067e-05\n",
"Loss: 4.17529e-05\n",
"Loss: 4.17716e-05\n",
"Loss: 4.17379e-05\n",
"Loss: 4.17003e-05\n",
"Loss: 4.16738e-05\n",
"Loss: 4.16462e-05\n",
"Loss: 4.15868e-05\n",
"Loss: 4.15413e-05\n",
"Loss: 4.16398e-05\n",
"Loss: 4.15127e-05\n",
"Loss: 4.14761e-05\n",
"Loss: 4.14406e-05\n",
"Loss: 4.14070e-05\n",
"Loss: 4.13505e-05\n",
"Loss: 4.12549e-05\n",
"Loss: 4.12860e-05\n",
"Loss: 4.12231e-05\n",
"Loss: 4.11676e-05\n",
"Loss: 4.11503e-05\n",
"Loss: 4.11352e-05\n",
"Loss: 4.11143e-05\n",
"Loss: 4.10798e-05\n",
"Loss: 4.10709e-05\n",
"Loss: 4.10385e-05\n",
"Loss: 4.10313e-05\n",
"Loss: 4.10078e-05\n",
"Loss: 4.11693e-05\n",
"Loss: 4.10022e-05\n",
"Loss: 4.09831e-05\n",
"Loss: 4.09548e-05\n",
"Loss: 4.09300e-05\n",
"Loss: 4.08981e-05\n",
"Loss: 4.08147e-05\n",
"Loss: 4.07529e-05\n",
"Loss: 4.07103e-05\n",
"Loss: 4.16731e-05\n",
"Loss: 4.07009e-05\n",
"Loss: 4.06948e-05\n",
"Loss: 4.06650e-05\n",
"Loss: 4.06403e-05\n",
"Loss: 4.05907e-05\n",
"Loss: 4.05346e-05\n",
"Loss: 4.05309e-05\n",
"Loss: 4.04848e-05\n",
"Loss: 4.04094e-05\n",
"Loss: 4.03576e-05\n",
"Loss: 4.02864e-05\n",
"Loss: 4.02241e-05\n",
"Loss: 4.02201e-05\n",
"Loss: 4.01991e-05\n",
"Loss: 4.01728e-05\n",
"Loss: 4.01468e-05\n",
"Loss: 4.01607e-05\n",
"Loss: 4.01275e-05\n",
"Loss: 4.00977e-05\n",
"Loss: 4.00616e-05\n",
"Loss: 4.00338e-05\n",
"Loss: 4.00115e-05\n",
"Loss: 3.99881e-05\n",
"Loss: 3.99626e-05\n",
"Loss: 3.99211e-05\n",
"Loss: 4.04306e-05\n",
"Loss: 3.99049e-05\n",
"Loss: 3.98547e-05\n",
"Loss: 3.98310e-05\n",
"Loss: 3.98027e-05\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 3.97535e-05\n",
"Loss: 3.96652e-05\n",
"Loss: 3.97073e-05\n",
"Loss: 3.96302e-05\n",
"Loss: 3.95414e-05\n",
"Loss: 3.95022e-05\n",
"Loss: 3.94740e-05\n",
"Loss: 3.94475e-05\n",
"Loss: 3.93866e-05\n",
"Loss: 3.93422e-05\n",
"Loss: 4.05904e-05\n",
"Loss: 3.93277e-05\n",
"Loss: 3.93017e-05\n",
"Loss: 3.92849e-05\n",
"Loss: 3.92753e-05\n",
"Loss: 3.92497e-05\n",
"Loss: 3.92826e-05\n",
"Loss: 3.92371e-05\n",
"Loss: 3.92063e-05\n",
"Loss: 3.91755e-05\n",
"Loss: 3.91651e-05\n",
"Loss: 3.91568e-05\n",
"Loss: 3.91505e-05\n",
"Loss: 3.91426e-05\n",
"Loss: 3.91247e-05\n",
"Loss: 3.91118e-05\n",
"Loss: 3.90689e-05\n",
"Loss: 3.90447e-05\n",
"Loss: 3.90478e-05\n",
"Loss: 3.90341e-05\n",
"Loss: 3.90159e-05\n",
"Loss: 3.89828e-05\n",
"Loss: 3.89754e-05\n",
"Loss: 3.89436e-05\n",
"Loss: 3.89221e-05\n",
"Loss: 3.88698e-05\n",
"Loss: 3.88264e-05\n",
"Loss: 3.87885e-05\n",
"Loss: 3.87672e-05\n",
"Loss: 3.87448e-05\n",
"Loss: 3.87250e-05\n",
"Loss: 3.87030e-05\n",
"Loss: 3.86839e-05\n",
"Loss: 3.86701e-05\n",
"Loss: 3.96607e-05\n",
"Loss: 3.86656e-05\n",
"Loss: 3.86467e-05\n",
"Loss: 3.98402e-05\n",
"Loss: 3.86414e-05\n",
"Loss: 3.86242e-05\n",
"Loss: 3.85821e-05\n",
"Loss: 3.85358e-05\n",
"Loss: 3.84926e-05\n",
"Loss: 3.84245e-05\n",
"Loss: 3.85463e-05\n",
"Loss: 3.84027e-05\n",
"Loss: 3.83578e-05\n",
"Loss: 3.82940e-05\n",
"Loss: 3.82666e-05\n",
"Loss: 3.82392e-05\n",
"Loss: 3.81984e-05\n",
"Loss: 3.81480e-05\n",
"Loss: 3.80748e-05\n",
"Loss: 3.80252e-05\n",
"Loss: 3.79639e-05\n",
"Loss: 3.79261e-05\n",
"Loss: 3.78709e-05\n",
"Loss: 3.78497e-05\n",
"Loss: 3.78270e-05\n",
"Loss: 3.78103e-05\n",
"Loss: 3.77994e-05\n",
"Loss: 3.77855e-05\n",
"Loss: 3.77666e-05\n",
"Loss: 3.77731e-05\n",
"Loss: 3.77595e-05\n",
"Loss: 3.77504e-05\n",
"Loss: 3.77443e-05\n",
"Loss: 3.77404e-05\n",
"Loss: 3.77309e-05\n",
"Loss: 3.76926e-05\n",
"Loss: 3.97181e-05\n",
"Loss: 3.76738e-05\n",
"Loss: 3.76367e-05\n",
"Loss: 3.76205e-05\n",
"Loss: 3.76111e-05\n",
"Loss: 3.76485e-05\n",
"Loss: 3.76049e-05\n",
"Loss: 3.75889e-05\n",
"Loss: 3.75462e-05\n",
"Loss: 3.75262e-05\n",
"Loss: 3.75100e-05\n",
"Loss: 3.74968e-05\n",
"Loss: 3.74698e-05\n",
"Loss: 3.74144e-05\n",
"Loss: 3.73610e-05\n",
"Loss: 3.73253e-05\n",
"Loss: 3.77394e-05\n",
"Loss: 3.73161e-05\n",
"Loss: 3.72914e-05\n",
"Loss: 3.72556e-05\n",
"Loss: 3.72306e-05\n",
"Loss: 3.72010e-05\n",
"Loss: 3.71627e-05\n",
"Loss: 3.71478e-05\n",
"Loss: 3.74840e-05\n",
"Loss: 3.71373e-05\n",
"Loss: 3.71241e-05\n",
"Loss: 3.71076e-05\n",
"Loss: 3.70912e-05\n",
"Loss: 3.70595e-05\n",
"Loss: 3.70354e-05\n",
"Loss: 3.70107e-05\n",
"Loss: 3.69936e-05\n",
"Loss: 3.69793e-05\n",
"Loss: 3.69628e-05\n",
"Loss: 3.69397e-05\n",
"Loss: 3.69121e-05\n",
"Loss: 3.68832e-05\n",
"Loss: 3.68396e-05\n",
"Loss: 3.68141e-05\n",
"Loss: 3.67877e-05\n",
"Loss: 3.67526e-05\n",
"Loss: 3.67403e-05\n",
"Loss: 3.67253e-05\n",
"Loss: 3.73350e-05\n",
"Loss: 3.67216e-05\n",
"Loss: 3.67062e-05\n",
"Loss: 3.66885e-05\n",
"Loss: 3.66739e-05\n",
"Loss: 3.66479e-05\n",
"Loss: 3.77446e-05\n",
"Loss: 3.66443e-05\n",
"Loss: 3.66183e-05\n",
"Loss: 3.66154e-05\n",
"Loss: 3.65895e-05\n",
"Loss: 3.65787e-05\n",
"Loss: 3.65678e-05\n",
"Loss: 3.65487e-05\n",
"Loss: 3.65367e-05\n",
"Loss: 3.65212e-05\n",
"Loss: 3.64952e-05\n",
"Loss: 3.64855e-05\n",
"Loss: 3.64729e-05\n",
"Loss: 3.64639e-05\n",
"Loss: 3.64529e-05\n",
"Loss: 3.64450e-05\n",
"Loss: 3.64320e-05\n",
"Loss: 3.64234e-05\n",
"Loss: 3.63997e-05\n",
"Loss: 3.63711e-05\n",
"Loss: 3.63450e-05\n",
"Loss: 3.63231e-05\n",
"Loss: 3.64291e-05\n",
"Loss: 3.63158e-05\n",
"Loss: 3.62979e-05\n",
"Loss: 3.62701e-05\n",
"Loss: 3.62584e-05\n",
"Loss: 3.62339e-05\n",
"Loss: 3.62245e-05\n",
"Loss: 3.62139e-05\n",
"Loss: 3.61967e-05\n",
"Loss: 3.61743e-05\n",
"Loss: 3.61303e-05\n",
"Loss: 3.60991e-05\n",
"Loss: 3.60647e-05\n",
"Loss: 3.60394e-05\n",
"Loss: 3.60122e-05\n",
"Loss: 3.61833e-05\n",
"Loss: 3.60068e-05\n",
"Loss: 3.59950e-05\n",
"Loss: 3.59705e-05\n",
"Loss: 3.59573e-05\n",
"Loss: 3.59448e-05\n",
"Loss: 3.59157e-05\n",
"Loss: 3.58865e-05\n",
"Loss: 3.58614e-05\n",
"Loss: 3.58417e-05\n",
"Loss: 3.58331e-05\n",
"Loss: 3.58181e-05\n",
"Loss: 3.57956e-05\n",
"Loss: 3.57654e-05\n",
"Loss: 3.57392e-05\n",
"Loss: 3.57242e-05\n",
"Loss: 3.57000e-05\n",
"Loss: 3.57418e-05\n",
"Loss: 3.56973e-05\n",
"Loss: 3.56843e-05\n",
"Loss: 3.56546e-05\n",
"Loss: 3.56343e-05\n",
"Loss: 3.56099e-05\n",
"Loss: 3.56097e-05\n",
"Loss: 3.56008e-05\n",
"Loss: 3.55834e-05\n",
"Loss: 3.55675e-05\n",
"Loss: 3.55501e-05\n",
"Loss: 3.55338e-05\n",
"Loss: 3.55108e-05\n",
"Loss: 3.54738e-05\n",
"Loss: 3.55932e-05\n",
"Loss: 3.54640e-05\n",
"Loss: 3.54457e-05\n",
"Loss: 3.54706e-05\n",
"Loss: 3.54383e-05\n",
"Loss: 3.54290e-05\n",
"Loss: 3.54116e-05\n",
"Loss: 3.53900e-05\n",
"Loss: 3.53575e-05\n",
"Loss: 3.53248e-05\n",
"Loss: 3.53020e-05\n",
"Loss: 3.52762e-05\n",
"Loss: 3.52468e-05\n",
"Loss: 3.52267e-05\n",
"Loss: 3.51970e-05\n",
"Loss: 3.52717e-05\n",
"Loss: 3.51841e-05\n",
"Loss: 3.51513e-05\n",
"Loss: 3.51408e-05\n",
"Loss: 3.51244e-05\n",
"Loss: 3.51069e-05\n",
"Loss: 3.50905e-05\n",
"Loss: 3.50674e-05\n",
"Loss: 3.50500e-05\n",
"Loss: 3.50391e-05\n",
"Loss: 3.50235e-05\n",
"Loss: 3.50063e-05\n",
"Loss: 3.56901e-05\n",
"Loss: 3.50036e-05\n",
"Loss: 3.49875e-05\n",
"Loss: 3.49642e-05\n",
"Loss: 3.49364e-05\n",
"Loss: 3.49336e-05\n",
"Loss: 3.49219e-05\n",
"Loss: 3.48962e-05\n",
"Loss: 3.48634e-05\n",
"Loss: 3.57510e-05\n",
"Loss: 3.48586e-05\n",
"Loss: 3.48480e-05\n",
"Loss: 3.48293e-05\n",
"Loss: 3.48024e-05\n",
"Loss: 3.47791e-05\n",
"Loss: 3.47604e-05\n",
"Loss: 3.47405e-05\n",
"Loss: 3.47207e-05\n",
"Loss: 3.46955e-05\n",
"Loss: 3.46799e-05\n",
"Loss: 3.46632e-05\n",
"Loss: 3.46364e-05\n",
"Loss: 3.46026e-05\n",
"Loss: 3.51976e-05\n",
"Loss: 3.45933e-05\n",
"Loss: 3.45651e-05\n",
"Loss: 3.45528e-05\n",
"Loss: 3.45449e-05\n",
"Loss: 3.45355e-05\n",
"Loss: 3.45272e-05\n",
"Loss: 3.45077e-05\n",
"Loss: 3.45233e-05\n",
"Loss: 3.44978e-05\n",
"Loss: 3.44682e-05\n",
"Loss: 3.44482e-05\n",
"Loss: 3.44301e-05\n",
"Loss: 3.44199e-05\n",
"Loss: 3.44115e-05\n",
"Loss: 3.44080e-05\n",
"Loss: 3.44032e-05\n",
"Loss: 3.43959e-05\n",
"Loss: 3.43739e-05\n",
"Loss: 3.43909e-05\n",
"Loss: 3.43634e-05\n",
"Loss: 3.43478e-05\n",
"Loss: 3.43205e-05\n",
"Loss: 3.42755e-05\n",
"Loss: 3.42290e-05\n",
"Loss: 3.41990e-05\n",
"Loss: 3.41899e-05\n",
"Loss: 3.41831e-05\n",
"Loss: 3.41787e-05\n",
"Loss: 3.41679e-05\n",
"Loss: 3.41544e-05\n",
"Loss: 3.41297e-05\n",
"Loss: 3.41227e-05\n",
"Loss: 3.40888e-05\n",
"Loss: 3.40758e-05\n",
"Loss: 3.40525e-05\n",
"Loss: 3.40264e-05\n",
"Loss: 3.39904e-05\n",
"Loss: 3.39589e-05\n",
"Loss: 3.39325e-05\n",
"Loss: 3.39143e-05\n",
"Loss: 3.38828e-05\n",
"Loss: 3.39537e-05\n",
"Loss: 3.38781e-05\n",
"Loss: 3.38601e-05\n",
"Loss: 3.38385e-05\n",
"Loss: 3.38201e-05\n",
"Loss: 3.38332e-05\n",
"Loss: 3.38139e-05\n",
"Loss: 3.38015e-05\n",
"Loss: 3.37950e-05\n",
"Loss: 3.37781e-05\n",
"Loss: 3.37630e-05\n",
"Loss: 3.37515e-05\n",
"Loss: 3.37333e-05\n",
"Loss: 3.37225e-05\n",
"Loss: 3.37139e-05\n",
"Loss: 3.37699e-05\n",
"Loss: 3.37076e-05\n",
"Loss: 3.36948e-05\n",
"Loss: 3.36842e-05\n",
"Loss: 3.36716e-05\n",
"Loss: 3.36611e-05\n",
"Loss: 3.36275e-05\n",
"Loss: 3.41207e-05\n",
"Loss: 3.36249e-05\n",
"Loss: 3.36127e-05\n",
"Loss: 3.36088e-05\n",
"Loss: 3.35941e-05\n",
"Loss: 3.35899e-05\n",
"Loss: 3.42863e-05\n",
"Loss: 3.35875e-05\n",
"Loss: 3.35809e-05\n",
"Loss: 3.35770e-05\n",
"Loss: 3.35689e-05\n",
"Loss: 3.35608e-05\n",
"Loss: 3.35475e-05\n",
"Loss: 3.35194e-05\n",
"Loss: 3.35178e-05\n",
"Loss: 3.35030e-05\n",
"Loss: 3.34772e-05\n",
"Loss: 3.34673e-05\n",
"Loss: 3.34494e-05\n",
"Loss: 3.34343e-05\n",
"Loss: 3.34138e-05\n",
"Loss: 3.34154e-05\n",
"Loss: 3.34038e-05\n",
"Loss: 3.33825e-05\n",
"Loss: 3.33659e-05\n",
"Loss: 3.33384e-05\n",
"Loss: 3.33388e-05\n",
"Loss: 3.33231e-05\n",
"Loss: 3.33022e-05\n",
"Loss: 3.32751e-05\n",
"Loss: 3.32777e-05\n",
"Loss: 3.32614e-05\n",
"Loss: 3.32445e-05\n",
"Loss: 3.32224e-05\n",
"Loss: 3.32087e-05\n",
"Loss: 3.32015e-05\n",
"Loss: 3.31878e-05\n",
"Loss: 3.31825e-05\n",
"Loss: 3.31714e-05\n",
"Loss: 3.31651e-05\n",
"Loss: 3.31494e-05\n",
"Loss: 3.31375e-05\n",
"Loss: 3.31284e-05\n",
"Loss: 3.31223e-05\n",
"Loss: 3.31114e-05\n",
"Loss: 3.31038e-05\n",
"Loss: 3.30879e-05\n",
"Loss: 3.30821e-05\n",
"Loss: 3.32294e-05\n",
"Loss: 3.30790e-05\n",
"Loss: 3.30734e-05\n",
"Loss: 3.30611e-05\n",
"Loss: 3.30497e-05\n",
"Loss: 3.30606e-05\n",
"Loss: 3.30468e-05\n",
"Loss: 3.30397e-05\n",
"Loss: 3.30358e-05\n",
"Loss: 3.30306e-05\n",
"Loss: 3.30193e-05\n",
"Loss: 3.29971e-05\n",
"Loss: 3.30413e-05\n",
"Loss: 3.29906e-05\n",
"Loss: 3.29762e-05\n",
"Loss: 3.29733e-05\n",
"Loss: 3.29643e-05\n",
"Loss: 3.29591e-05\n",
"Loss: 3.29486e-05\n",
"Loss: 3.29486e-05\n",
"Loss: 3.29438e-05\n",
"Loss: 3.29342e-05\n",
"Loss: 3.29292e-05\n",
"Loss: 3.29206e-05\n",
"Loss: 3.29105e-05\n",
"Loss: 3.28822e-05\n",
"Loss: 3.32475e-05\n",
"Loss: 3.28781e-05\n",
"Loss: 3.28566e-05\n",
"Loss: 3.28371e-05\n",
"Loss: 3.28314e-05\n",
"Loss: 3.28065e-05\n",
"Loss: 3.27965e-05\n",
"Loss: 3.27865e-05\n",
"Loss: 3.27832e-05\n",
"Loss: 3.27734e-05\n",
"Loss: 3.27674e-05\n",
"Loss: 3.27623e-05\n",
"Loss: 3.27573e-05\n",
"Loss: 3.27442e-05\n",
"Loss: 3.28234e-05\n",
"Loss: 3.27394e-05\n",
"Loss: 3.27201e-05\n",
"Loss: 3.27042e-05\n",
"Loss: 3.26892e-05\n",
"Loss: 3.27468e-05\n",
"Loss: 3.26863e-05\n",
"Loss: 3.26788e-05\n",
"Loss: 3.26697e-05\n",
"Loss: 3.26616e-05\n",
"Loss: 3.26555e-05\n",
"Loss: 3.27092e-05\n",
"Loss: 3.26529e-05\n",
"Loss: 3.26474e-05\n",
"Loss: 3.26404e-05\n",
"Loss: 3.26335e-05\n",
"Loss: 3.26252e-05\n",
"Loss: 3.26268e-05\n",
"Loss: 3.26212e-05\n",
"Loss: 3.26132e-05\n",
"Loss: 3.26072e-05\n",
"Loss: 3.26012e-05\n",
"Loss: 3.25937e-05\n",
"Loss: 3.26266e-05\n",
"Loss: 3.25859e-05\n",
"Loss: 3.25803e-05\n",
"Loss: 3.25750e-05\n",
"Loss: 3.25724e-05\n",
"Loss: 3.25624e-05\n",
"Loss: 3.25523e-05\n",
"Loss: 3.25455e-05\n",
"Loss: 3.25372e-05\n",
"Loss: 3.25311e-05\n",
"Loss: 3.25284e-05\n",
"Loss: 3.25201e-05\n",
"Loss: 3.25165e-05\n",
"Loss: 3.25093e-05\n",
"Loss: 3.24975e-05\n",
"Loss: 3.24895e-05\n",
"Loss: 3.24715e-05\n",
"Loss: 3.24642e-05\n",
"Loss: 3.24568e-05\n",
"Loss: 3.24502e-05\n",
"Loss: 3.24469e-05\n",
"Loss: 3.24438e-05\n",
"Loss: 3.24367e-05\n",
"Loss: 3.24214e-05\n",
"Loss: 3.25513e-05\n",
"Loss: 3.24180e-05\n",
"Loss: 3.24070e-05\n",
"Loss: 3.24007e-05\n",
"Loss: 3.23962e-05\n",
"Loss: 3.23880e-05\n",
"Loss: 3.23769e-05\n",
"Loss: 3.24343e-05\n",
"Loss: 3.23725e-05\n",
"Loss: 3.23615e-05\n",
"Loss: 3.23533e-05\n",
"Loss: 3.23442e-05\n",
"Loss: 3.23285e-05\n",
"Loss: 3.23015e-05\n",
"Loss: 3.22787e-05\n",
"Loss: 3.22881e-05\n",
"Loss: 3.22729e-05\n",
"Loss: 3.22562e-05\n",
"Loss: 3.22494e-05\n",
"Loss: 3.22363e-05\n",
"Loss: 3.22273e-05\n",
"Loss: 3.21982e-05\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 3.25350e-05\n",
"Loss: 3.21938e-05\n",
"Loss: 3.21684e-05\n",
"Loss: 3.21516e-05\n",
"Loss: 3.21372e-05\n",
"Loss: 3.21261e-05\n",
"Loss: 3.22433e-05\n",
"Loss: 3.21215e-05\n",
"Loss: 3.21069e-05\n",
"Loss: 3.20868e-05\n",
"Loss: 3.20604e-05\n",
"Loss: 3.20649e-05\n",
"Loss: 3.20508e-05\n",
"Loss: 3.20338e-05\n",
"Loss: 3.20241e-05\n",
"Loss: 3.20048e-05\n",
"Loss: 3.20153e-05\n",
"Loss: 3.19918e-05\n",
"Loss: 3.19803e-05\n",
"Loss: 3.19693e-05\n",
"Loss: 3.19550e-05\n",
"Loss: 3.19383e-05\n",
"Loss: 3.19399e-05\n",
"Loss: 3.19274e-05\n",
"Loss: 3.19099e-05\n",
"Loss: 3.18966e-05\n",
"Loss: 3.18897e-05\n",
"Loss: 3.18806e-05\n",
"Loss: 3.18670e-05\n",
"Loss: 3.18890e-05\n",
"Loss: 3.18594e-05\n",
"Loss: 3.18470e-05\n",
"Loss: 3.18283e-05\n",
"Loss: 3.17839e-05\n",
"Loss: 3.18265e-05\n",
"Loss: 3.17710e-05\n",
"Loss: 3.17408e-05\n",
"Loss: 3.17231e-05\n",
"Loss: 3.17040e-05\n",
"Loss: 3.17010e-05\n",
"Loss: 3.16782e-05\n",
"Loss: 3.16695e-05\n",
"Loss: 3.16628e-05\n",
"Loss: 3.16488e-05\n",
"Loss: 3.16388e-05\n",
"Loss: 3.16149e-05\n",
"Loss: 3.16447e-05\n",
"Loss: 3.16051e-05\n",
"Loss: 3.15946e-05\n",
"Loss: 3.15868e-05\n",
"Loss: 3.15758e-05\n",
"Loss: 3.15969e-05\n",
"Loss: 3.15674e-05\n",
"Loss: 3.15501e-05\n",
"Loss: 3.15358e-05\n",
"Loss: 3.15373e-05\n",
"Loss: 3.15283e-05\n",
"Loss: 3.15148e-05\n",
"Loss: 3.15063e-05\n",
"Loss: 3.14992e-05\n",
"Loss: 3.14862e-05\n",
"Loss: 3.15345e-05\n",
"Loss: 3.14819e-05\n",
"Loss: 3.14680e-05\n",
"Loss: 3.14563e-05\n",
"Loss: 3.14376e-05\n",
"Loss: 3.14216e-05\n",
"Loss: 3.14072e-05\n",
"Loss: 3.27648e-05\n",
"Loss: 3.14021e-05\n",
"Loss: 3.13857e-05\n",
"Loss: 3.13686e-05\n",
"Loss: 3.13582e-05\n",
"Loss: 3.13525e-05\n",
"Loss: 3.13622e-05\n",
"Loss: 3.13483e-05\n",
"Loss: 3.13410e-05\n",
"Loss: 3.13303e-05\n",
"Loss: 3.13250e-05\n",
"Loss: 3.13188e-05\n",
"Loss: 3.13164e-05\n",
"Loss: 3.13144e-05\n",
"Loss: 3.13119e-05\n",
"Loss: 3.13281e-05\n",
"Loss: 3.13101e-05\n",
"Loss: 3.13033e-05\n",
"Loss: 3.12986e-05\n",
"Loss: 3.12932e-05\n",
"Loss: 3.12873e-05\n",
"Loss: 3.12716e-05\n",
"Loss: 3.20894e-05\n",
"Loss: 3.12694e-05\n",
"Loss: 3.12604e-05\n",
"Loss: 3.12537e-05\n",
"Loss: 3.12484e-05\n",
"Loss: 3.12423e-05\n",
"Loss: 3.12324e-05\n",
"Loss: 3.12202e-05\n",
"Loss: 3.12064e-05\n",
"Loss: 3.12120e-05\n",
"Loss: 3.12007e-05\n",
"Loss: 3.11857e-05\n",
"Loss: 3.11674e-05\n",
"Loss: 3.11519e-05\n",
"Loss: 3.11445e-05\n",
"Loss: 3.11341e-05\n",
"Loss: 3.11264e-05\n",
"Loss: 3.11162e-05\n",
"Loss: 3.11129e-05\n",
"Loss: 3.11068e-05\n",
"Loss: 3.10953e-05\n",
"Loss: 3.20411e-05\n",
"Loss: 3.10929e-05\n",
"Loss: 3.10832e-05\n",
"Loss: 3.11866e-05\n",
"Loss: 3.10820e-05\n",
"Loss: 3.10744e-05\n",
"Loss: 3.10706e-05\n",
"Loss: 3.10653e-05\n",
"Loss: 3.10577e-05\n",
"Loss: 3.10437e-05\n",
"Loss: 3.10325e-05\n",
"Loss: 3.10133e-05\n",
"Loss: 3.10023e-05\n",
"Loss: 3.09809e-05\n",
"Loss: 3.09658e-05\n",
"Loss: 3.09546e-05\n",
"Loss: 3.10138e-05\n",
"Loss: 3.09469e-05\n",
"Loss: 3.09320e-05\n",
"Loss: 3.09200e-05\n",
"Loss: 3.09033e-05\n",
"Loss: 3.08900e-05\n",
"Loss: 3.08737e-05\n",
"Loss: 3.09062e-05\n",
"Loss: 3.08650e-05\n",
"Loss: 3.08540e-05\n",
"Loss: 3.08460e-05\n",
"Loss: 3.08295e-05\n",
"Loss: 3.08187e-05\n",
"Loss: 3.08096e-05\n",
"Loss: 3.07788e-05\n",
"Loss: 3.07744e-05\n",
"Loss: 3.07575e-05\n",
"Loss: 3.07453e-05\n",
"Loss: 3.07297e-05\n",
"Loss: 3.07111e-05\n",
"Loss: 3.12089e-05\n",
"Loss: 3.07094e-05\n",
"Loss: 3.06957e-05\n",
"Loss: 3.06896e-05\n",
"Loss: 3.06871e-05\n",
"Loss: 3.06814e-05\n",
"Loss: 3.06736e-05\n",
"Loss: 3.06598e-05\n",
"Loss: 3.06457e-05\n",
"Loss: 3.07979e-05\n",
"Loss: 3.06427e-05\n",
"Loss: 3.06246e-05\n",
"Loss: 3.06189e-05\n",
"Loss: 3.06138e-05\n",
"Loss: 3.06128e-05\n",
"Loss: 3.05988e-05\n",
"Loss: 3.05871e-05\n",
"Loss: 3.05688e-05\n",
"Loss: 3.05548e-05\n",
"Loss: 3.05455e-05\n",
"Loss: 3.05389e-05\n",
"Loss: 3.05341e-05\n",
"Loss: 3.05299e-05\n",
"Loss: 3.05429e-05\n",
"Loss: 3.05292e-05\n",
"Loss: 3.05276e-05\n",
"Loss: 3.05243e-05\n",
"Loss: 3.05191e-05\n",
"Loss: 3.05138e-05\n",
"Loss: 3.05074e-05\n",
"Loss: 3.04972e-05\n",
"Loss: 3.05042e-05\n",
"Loss: 3.04915e-05\n",
"Loss: 3.04825e-05\n",
"Loss: 3.04665e-05\n",
"Loss: 3.04629e-05\n",
"Loss: 3.04553e-05\n",
"Loss: 3.04447e-05\n",
"Loss: 3.06547e-05\n",
"Loss: 3.04408e-05\n",
"Loss: 3.04267e-05\n",
"Loss: 3.04208e-05\n",
"Loss: 3.04004e-05\n",
"Loss: 3.04210e-05\n",
"Loss: 3.03907e-05\n",
"Loss: 3.03821e-05\n",
"Loss: 3.03799e-05\n",
"Loss: 3.03750e-05\n",
"Loss: 3.03719e-05\n",
"Loss: 3.03627e-05\n",
"Loss: 3.03581e-05\n",
"Loss: 3.03575e-05\n",
"Loss: 3.03531e-05\n",
"Loss: 3.03474e-05\n",
"Loss: 3.03361e-05\n",
"Loss: 3.03305e-05\n",
"Loss: 3.03216e-05\n",
"Loss: 3.03177e-05\n",
"Loss: 3.03105e-05\n",
"Loss: 3.03051e-05\n",
"Loss: 3.02932e-05\n",
"Loss: 3.03725e-05\n",
"Loss: 3.02883e-05\n",
"Loss: 3.02837e-05\n",
"Loss: 3.02755e-05\n",
"Loss: 3.02708e-05\n",
"Loss: 3.02522e-05\n",
"Loss: 3.02268e-05\n",
"Loss: 3.02016e-05\n",
"Loss: 3.01843e-05\n",
"Loss: 3.01703e-05\n",
"Loss: 3.01520e-05\n",
"Loss: 3.01438e-05\n",
"Loss: 3.01379e-05\n",
"Loss: 3.01321e-05\n",
"Loss: 3.01234e-05\n",
"Loss: 3.01164e-05\n",
"Loss: 3.01110e-05\n",
"Loss: 3.01082e-05\n",
"Loss: 3.01043e-05\n",
"Loss: 3.00989e-05\n",
"Loss: 3.00920e-05\n",
"Loss: 3.00876e-05\n",
"Loss: 3.00837e-05\n",
"Loss: 3.00766e-05\n",
"Loss: 3.00635e-05\n",
"Loss: 3.00442e-05\n",
"Loss: 3.00351e-05\n",
"Loss: 3.00296e-05\n",
"Loss: 3.00152e-05\n",
"Loss: 3.00113e-05\n",
"Loss: 3.01039e-05\n",
"Loss: 2.99965e-05\n",
"Loss: 2.99867e-05\n",
"Loss: 2.99751e-05\n",
"Loss: 2.99671e-05\n",
"Loss: 2.99519e-05\n",
"Loss: 2.99445e-05\n",
"Loss: 2.99378e-05\n",
"Loss: 2.99332e-05\n",
"Loss: 2.99282e-05\n",
"Loss: 2.99409e-05\n",
"Loss: 2.99260e-05\n",
"Loss: 2.99188e-05\n",
"Loss: 2.99094e-05\n",
"Loss: 2.98960e-05\n",
"Loss: 2.98838e-05\n",
"Loss: 2.98777e-05\n",
"Loss: 2.98647e-05\n",
"Loss: 2.98612e-05\n",
"Loss: 2.98501e-05\n",
"Loss: 2.98451e-05\n",
"Loss: 2.98607e-05\n",
"Loss: 2.98381e-05\n",
"Loss: 2.98172e-05\n",
"Loss: 2.98074e-05\n",
"Loss: 2.97827e-05\n",
"Loss: 2.97696e-05\n",
"Loss: 2.97577e-05\n",
"Loss: 2.98097e-05\n",
"Loss: 2.97471e-05\n",
"Loss: 2.97266e-05\n",
"Loss: 2.97232e-05\n",
"Loss: 2.97107e-05\n",
"Loss: 2.97070e-05\n",
"Loss: 2.97024e-05\n",
"Loss: 2.96983e-05\n",
"Loss: 2.96922e-05\n",
"Loss: 2.99269e-05\n",
"Loss: 2.96898e-05\n",
"Loss: 2.96831e-05\n",
"Loss: 2.96752e-05\n",
"Loss: 2.96683e-05\n",
"Loss: 2.98201e-05\n",
"Loss: 2.96672e-05\n",
"Loss: 2.96618e-05\n",
"Loss: 2.96471e-05\n",
"Loss: 2.96351e-05\n",
"Loss: 2.96263e-05\n",
"Loss: 6.38888e-05\n",
"Loss: 2.96228e-05\n",
"Loss: 2.96177e-05\n",
"Loss: 2.96080e-05\n",
"Loss: 2.95944e-05\n",
"Loss: 2.96161e-05\n",
"Loss: 2.95886e-05\n",
"Loss: 2.95782e-05\n",
"Loss: 2.95734e-05\n",
"Loss: 2.95700e-05\n",
"Loss: 2.95658e-05\n",
"Loss: 2.95568e-05\n",
"Loss: 2.95458e-05\n",
"Loss: 2.95668e-05\n",
"Loss: 2.95391e-05\n",
"Loss: 2.95311e-05\n",
"Loss: 2.95172e-05\n",
"Loss: 2.95121e-05\n",
"Loss: 2.95083e-05\n",
"Loss: 2.95036e-05\n",
"Loss: 2.95063e-05\n",
"Loss: 2.95007e-05\n",
"Loss: 2.94983e-05\n",
"Loss: 2.94945e-05\n",
"Loss: 2.94882e-05\n",
"Loss: 2.94796e-05\n",
"Loss: 2.94805e-05\n",
"Loss: 2.94745e-05\n",
"Loss: 2.94943e-05\n",
"Loss: 2.94701e-05\n",
"Loss: 2.95115e-05\n",
"Loss: 2.94693e-05\n",
"Loss: 2.94657e-05\n",
"Loss: 2.94596e-05\n",
"Loss: 2.94502e-05\n",
"Loss: 2.94409e-05\n",
"Loss: 2.94276e-05\n",
"Loss: 2.94217e-05\n",
"Loss: 2.94151e-05\n",
"Loss: 2.94161e-05\n",
"Loss: 2.94042e-05\n",
"Loss: 2.93945e-05\n",
"Loss: 2.93719e-05\n",
"Loss: 2.93541e-05\n",
"Loss: 2.93381e-05\n",
"Loss: 2.93323e-05\n",
"Loss: 2.93234e-05\n",
"Loss: 2.93141e-05\n",
"Loss: 2.93080e-05\n",
"Loss: 2.93020e-05\n",
"Loss: 2.92964e-05\n",
"Loss: 2.92879e-05\n",
"Loss: 2.92807e-05\n",
"Loss: 2.92749e-05\n",
"Loss: 2.92972e-05\n",
"Loss: 2.92737e-05\n",
"Loss: 2.92692e-05\n",
"Loss: 2.92647e-05\n",
"Loss: 2.92613e-05\n",
"Loss: 2.92576e-05\n",
"Loss: 2.92520e-05\n",
"Loss: 2.92862e-05\n",
"Loss: 2.92510e-05\n",
"Loss: 2.92456e-05\n",
"Loss: 2.92357e-05\n",
"Loss: 2.92244e-05\n",
"Loss: 2.92147e-05\n",
"Loss: 2.92056e-05\n",
"Loss: 2.91973e-05\n",
"Loss: 2.91858e-05\n",
"Loss: 2.91745e-05\n",
"Loss: 2.91615e-05\n",
"Loss: 2.91715e-05\n",
"Loss: 2.91595e-05\n",
"Loss: 2.91539e-05\n",
"Loss: 2.91482e-05\n",
"Loss: 2.91436e-05\n",
"Loss: 2.91331e-05\n",
"Loss: 2.91275e-05\n",
"Loss: 2.91141e-05\n",
"Loss: 2.91977e-05\n",
"Loss: 2.91121e-05\n",
"Loss: 2.91023e-05\n",
"Loss: 2.90955e-05\n",
"Loss: 2.90806e-05\n",
"Loss: 2.90676e-05\n",
"Loss: 2.90484e-05\n",
"Loss: 2.90367e-05\n",
"Loss: 2.90305e-05\n",
"Loss: 2.90696e-05\n",
"Loss: 2.90283e-05\n",
"Loss: 2.90242e-05\n",
"Loss: 2.90151e-05\n",
"Loss: 2.90095e-05\n",
"Loss: 2.90020e-05\n",
"Loss: 2.89922e-05\n",
"Loss: 2.90262e-05\n",
"Loss: 2.89877e-05\n",
"Loss: 2.89757e-05\n",
"Loss: 2.89581e-05\n",
"Loss: 2.89384e-05\n",
"Loss: 2.89262e-05\n",
"Loss: 2.92304e-05\n",
"Loss: 2.89250e-05\n",
"Loss: 2.89134e-05\n",
"Loss: 2.89066e-05\n",
"Loss: 2.88973e-05\n",
"Loss: 2.88893e-05\n",
"Loss: 2.88801e-05\n",
"Loss: 2.88724e-05\n",
"Loss: 2.88649e-05\n",
"Loss: 2.88572e-05\n",
"Loss: 2.88476e-05\n",
"Loss: 2.88499e-05\n",
"Loss: 2.88410e-05\n",
"Loss: 2.88375e-05\n",
"Loss: 2.88332e-05\n",
"Loss: 2.88297e-05\n",
"Loss: 2.88321e-05\n",
"Loss: 2.88238e-05\n",
"Loss: 2.88332e-05\n",
"Loss: 2.88192e-05\n",
"Loss: 2.88117e-05\n",
"Loss: 2.87975e-05\n",
"Loss: 2.87910e-05\n",
"Loss: 2.87736e-05\n",
"Loss: 2.87621e-05\n",
"Loss: 2.87429e-05\n",
"Loss: 2.87273e-05\n",
"Loss: 2.87039e-05\n",
"Loss: 2.86827e-05\n",
"Loss: 2.86870e-05\n",
"Loss: 2.86758e-05\n",
"Loss: 2.86651e-05\n",
"Loss: 2.86619e-05\n",
"Loss: 2.86526e-05\n",
"Loss: 2.86458e-05\n",
"Loss: 2.86379e-05\n",
"Loss: 2.86320e-05\n",
"Loss: 2.86263e-05\n",
"Loss: 2.86176e-05\n",
"Loss: 2.86124e-05\n",
"Loss: 2.89153e-05\n",
"Loss: 2.85941e-05\n",
"Loss: 2.85713e-05\n",
"Loss: 2.85374e-05\n",
"Loss: 2.85121e-05\n",
"Loss: 2.84992e-05\n",
"Loss: 2.84669e-05\n",
"Loss: 2.84870e-05\n",
"Loss: 2.84570e-05\n",
"Loss: 2.84412e-05\n",
"Loss: 2.84281e-05\n",
"Loss: 2.84147e-05\n",
"Loss: 2.83980e-05\n",
"Loss: 2.83848e-05\n",
"Loss: 2.83710e-05\n",
"Loss: 2.83641e-05\n",
"Loss: 2.83489e-05\n",
"Loss: 2.84290e-05\n",
"Loss: 2.83426e-05\n",
"Loss: 2.84565e-05\n",
"Loss: 2.83416e-05\n",
"Loss: 2.83338e-05\n",
"Loss: 2.83271e-05\n",
"Loss: 2.83179e-05\n",
"Loss: 2.84953e-05\n",
"Loss: 2.83144e-05\n",
"Loss: 2.83034e-05\n",
"Loss: 2.82789e-05\n",
"Loss: 2.83725e-05\n",
"Loss: 2.82721e-05\n",
"Loss: 2.82654e-05\n",
"Loss: 2.82554e-05\n",
"Loss: 2.82621e-05\n",
"Loss: 2.82459e-05\n",
"Loss: 2.82312e-05\n",
"Loss: 2.82177e-05\n",
"Loss: 2.82075e-05\n",
"Loss: 2.82037e-05\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 2.82741e-05\n",
"Loss: 2.82020e-05\n",
"Loss: 2.81940e-05\n",
"Loss: 2.81881e-05\n",
"Loss: 2.81814e-05\n",
"Loss: 2.81714e-05\n",
"Loss: 2.82760e-05\n",
"Loss: 2.81635e-05\n",
"Loss: 2.81396e-05\n",
"Loss: 2.81150e-05\n",
"Loss: 2.80886e-05\n",
"Loss: 2.80708e-05\n",
"Loss: 2.80583e-05\n",
"Loss: 2.80645e-05\n",
"Loss: 2.80484e-05\n",
"Loss: 2.80418e-05\n",
"Loss: 2.80263e-05\n",
"Loss: 2.81721e-05\n",
"Loss: 2.80232e-05\n",
"Loss: 2.80082e-05\n",
"Loss: 2.79899e-05\n",
"Loss: 2.79685e-05\n",
"Loss: 2.79499e-05\n",
"Loss: 2.79362e-05\n",
"Loss: 2.79307e-05\n",
"Loss: 2.79234e-05\n",
"Loss: 2.79060e-05\n",
"Loss: 2.78848e-05\n",
"Loss: 2.78675e-05\n",
"Loss: 2.78530e-05\n",
"Loss: 2.78454e-05\n",
"Loss: 2.78160e-05\n",
"Loss: 2.78014e-05\n",
"Loss: 2.77788e-05\n",
"Loss: 2.77769e-05\n",
"Loss: 2.77663e-05\n",
"Loss: 2.77625e-05\n",
"Loss: 2.77528e-05\n",
"Loss: 2.80275e-05\n",
"Loss: 2.77453e-05\n",
"Loss: 2.77350e-05\n",
"Loss: 2.77281e-05\n",
"Loss: 2.77254e-05\n",
"Loss: 2.77203e-05\n",
"Loss: 2.77108e-05\n",
"Loss: 2.77540e-05\n",
"Loss: 2.77095e-05\n",
"Loss: 2.77058e-05\n",
"Loss: 2.76988e-05\n",
"Loss: 2.76965e-05\n",
"Loss: 2.76920e-05\n",
"Loss: 2.76893e-05\n",
"Loss: 2.76872e-05\n",
"Loss: 2.76811e-05\n",
"Loss: 2.76798e-05\n",
"Loss: 2.76765e-05\n",
"Loss: 2.76719e-05\n",
"Loss: 2.76628e-05\n",
"Loss: 2.78725e-05\n",
"Loss: 2.76608e-05\n",
"Loss: 2.76515e-05\n",
"Loss: 2.76449e-05\n",
"Loss: 2.76351e-05\n",
"Loss: 2.76228e-05\n",
"Loss: 2.76075e-05\n",
"Loss: 2.75976e-05\n",
"Loss: 2.75884e-05\n",
"Loss: 2.75837e-05\n",
"Loss: 2.75749e-05\n",
"Loss: 2.75720e-05\n",
"Loss: 2.75699e-05\n",
"Loss: 2.75666e-05\n",
"Loss: 2.75599e-05\n",
"Loss: 2.75555e-05\n",
"Loss: 2.75493e-05\n",
"Loss: 2.75424e-05\n",
"Loss: 2.75367e-05\n",
"Loss: 2.75338e-05\n",
"Loss: 2.75245e-05\n",
"Loss: 2.76882e-05\n",
"Loss: 2.75199e-05\n",
"Loss: 2.75129e-05\n",
"Loss: 2.75029e-05\n",
"Loss: 2.74922e-05\n",
"Loss: 2.74794e-05\n",
"Loss: 2.74603e-05\n",
"Loss: 2.74576e-05\n",
"Loss: 2.74403e-05\n",
"Loss: 2.74359e-05\n",
"Loss: 2.74307e-05\n",
"Loss: 2.74256e-05\n",
"Loss: 2.74245e-05\n",
"Loss: 2.74175e-05\n",
"Loss: 2.74133e-05\n",
"Loss: 2.74080e-05\n",
"Loss: 2.73986e-05\n",
"Loss: 2.80172e-05\n",
"Loss: 2.73970e-05\n",
"Loss: 2.73844e-05\n",
"Loss: 2.73753e-05\n",
"Loss: 2.73703e-05\n",
"Loss: 2.73664e-05\n",
"Loss: 2.73631e-05\n",
"Loss: 2.73582e-05\n",
"Loss: 2.73541e-05\n",
"Loss: 2.75353e-05\n",
"Loss: 2.73519e-05\n",
"Loss: 2.73467e-05\n",
"Loss: 2.73763e-05\n",
"Loss: 2.73439e-05\n",
"Loss: 2.73366e-05\n",
"Loss: 2.73253e-05\n",
"Loss: 2.73053e-05\n",
"Loss: 2.72875e-05\n",
"Loss: 2.72768e-05\n",
"Loss: 2.72814e-05\n",
"Loss: 2.72726e-05\n",
"Loss: 2.72637e-05\n",
"Loss: 2.72574e-05\n",
"Loss: 2.72524e-05\n",
"Loss: 2.72475e-05\n",
"Loss: 2.72385e-05\n",
"Loss: 2.72254e-05\n",
"Loss: 2.74239e-05\n",
"Loss: 2.72225e-05\n",
"Loss: 2.72125e-05\n",
"Loss: 2.72075e-05\n",
"Loss: 2.72036e-05\n",
"Loss: 2.71995e-05\n",
"Loss: 2.71924e-05\n",
"Loss: 2.71838e-05\n",
"Loss: 2.71799e-05\n",
"Loss: 2.71756e-05\n",
"Loss: 2.77017e-05\n",
"Loss: 2.71748e-05\n",
"Loss: 2.71720e-05\n",
"Loss: 2.71688e-05\n",
"Loss: 2.71633e-05\n",
"Loss: 2.71556e-05\n",
"Loss: 2.71669e-05\n",
"Loss: 2.71485e-05\n",
"Loss: 2.71369e-05\n",
"Loss: 2.71111e-05\n",
"Loss: 2.70999e-05\n",
"Loss: 2.70695e-05\n",
"Loss: 2.71748e-05\n",
"Loss: 2.70598e-05\n",
"Loss: 2.70391e-05\n",
"Loss: 2.70250e-05\n",
"Loss: 2.70103e-05\n",
"Loss: 2.70035e-05\n",
"Loss: 2.69966e-05\n",
"Loss: 2.69927e-05\n",
"Loss: 2.69868e-05\n",
"Loss: 2.69796e-05\n",
"Loss: 2.69701e-05\n",
"Loss: 2.69679e-05\n",
"Loss: 2.69667e-05\n",
"Loss: 2.69650e-05\n",
"Loss: 2.69616e-05\n",
"Loss: 2.69569e-05\n",
"Loss: 2.69504e-05\n",
"Loss: 2.69436e-05\n",
"Loss: 2.69399e-05\n",
"Loss: 2.69344e-05\n",
"Loss: 2.69311e-05\n",
"Loss: 2.69269e-05\n",
"Loss: 2.69226e-05\n",
"Loss: 2.69174e-05\n",
"Loss: 2.69101e-05\n",
"Loss: 2.69281e-05\n",
"Loss: 2.69036e-05\n",
"Loss: 2.68971e-05\n",
"Loss: 2.68879e-05\n",
"Loss: 2.68788e-05\n",
"Loss: 2.68742e-05\n",
"Loss: 2.68730e-05\n",
"Loss: 2.68698e-05\n",
"Loss: 2.68629e-05\n",
"Loss: 2.68594e-05\n",
"Loss: 2.68495e-05\n",
"Loss: 2.69092e-05\n",
"Loss: 2.68471e-05\n",
"Loss: 2.68404e-05\n",
"Loss: 2.68471e-05\n",
"Loss: 2.68336e-05\n",
"Loss: 2.68270e-05\n",
"Loss: 2.68283e-05\n",
"Loss: 2.68238e-05\n",
"Loss: 2.68195e-05\n",
"Loss: 2.68131e-05\n",
"Loss: 2.71434e-05\n",
"Loss: 2.68121e-05\n",
"Loss: 2.68042e-05\n",
"Loss: 2.68152e-05\n",
"Loss: 2.67986e-05\n",
"Loss: 2.67859e-05\n",
"Loss: 2.67769e-05\n",
"Loss: 2.67595e-05\n",
"Loss: 2.67516e-05\n",
"Loss: 2.67296e-05\n",
"Loss: 2.67225e-05\n",
"Loss: 2.67093e-05\n",
"Loss: 2.66968e-05\n",
"Loss: 2.68370e-05\n",
"Loss: 2.66957e-05\n",
"Loss: 2.66904e-05\n",
"Loss: 2.66867e-05\n",
"Loss: 2.66753e-05\n",
"Loss: 2.66689e-05\n",
"Loss: 2.66589e-05\n",
"Loss: 2.66466e-05\n",
"Loss: 2.66806e-05\n",
"Loss: 2.66421e-05\n",
"Loss: 2.66296e-05\n",
"Loss: 2.66173e-05\n",
"Loss: 2.66040e-05\n",
"Loss: 2.73032e-05\n",
"Loss: 2.65960e-05\n",
"Loss: 2.65838e-05\n",
"Loss: 2.65525e-05\n",
"Loss: 2.65388e-05\n",
"Loss: 2.66901e-05\n",
"Loss: 2.65367e-05\n",
"Loss: 2.65253e-05\n",
"Loss: 2.65212e-05\n",
"Loss: 2.65162e-05\n",
"Loss: 2.65076e-05\n",
"Loss: 2.65142e-05\n",
"Loss: 2.65015e-05\n",
"Loss: 2.64845e-05\n",
"Loss: 2.64672e-05\n",
"Loss: 2.64533e-05\n",
"Loss: 2.64404e-05\n",
"Loss: 2.64238e-05\n",
"Loss: 2.68438e-05\n",
"Loss: 2.64203e-05\n",
"Loss: 2.64078e-05\n",
"Loss: 2.63824e-05\n",
"Loss: 2.63640e-05\n",
"Loss: 2.64146e-05\n",
"Loss: 2.63557e-05\n",
"Loss: 2.63440e-05\n",
"Loss: 2.63378e-05\n",
"Loss: 2.63317e-05\n",
"Loss: 2.63271e-05\n",
"Loss: 2.63165e-05\n",
"Loss: 2.63097e-05\n",
"Loss: 2.63006e-05\n",
"Loss: 2.64089e-05\n",
"Loss: 2.62986e-05\n",
"Loss: 2.62885e-05\n",
"Loss: 2.62834e-05\n",
"Loss: 2.62783e-05\n",
"Loss: 2.62999e-05\n",
"Loss: 2.62725e-05\n",
"Loss: 2.62630e-05\n",
"Loss: 2.62482e-05\n",
"Loss: 2.62415e-05\n",
"Loss: 2.62331e-05\n",
"Loss: 2.62142e-05\n",
"Loss: 2.61911e-05\n",
"Loss: 2.61709e-05\n",
"Loss: 2.61601e-05\n",
"Loss: 2.61469e-05\n",
"Loss: 2.61636e-05\n",
"Loss: 2.61435e-05\n",
"Loss: 2.61362e-05\n",
"Loss: 2.61282e-05\n",
"Loss: 2.61170e-05\n",
"Loss: 2.61032e-05\n",
"Loss: 2.60944e-05\n",
"Loss: 2.60887e-05\n",
"Loss: 2.60820e-05\n",
"Loss: 2.60738e-05\n",
"Loss: 2.60917e-05\n",
"Loss: 2.60657e-05\n",
"Loss: 2.61359e-05\n",
"Loss: 2.60622e-05\n",
"Loss: 2.60523e-05\n",
"Loss: 2.60431e-05\n",
"Loss: 2.60382e-05\n",
"Loss: 2.60302e-05\n",
"Loss: 2.60230e-05\n",
"Loss: 2.60134e-05\n",
"Loss: 2.64318e-05\n",
"Loss: 2.60094e-05\n",
"Loss: 2.59977e-05\n",
"Loss: 2.59768e-05\n",
"Loss: 2.59711e-05\n",
"Loss: 2.63342e-05\n",
"Loss: 2.59687e-05\n",
"Loss: 2.59655e-05\n",
"Loss: 2.59601e-05\n",
"Loss: 2.59565e-05\n",
"Loss: 2.59515e-05\n",
"Loss: 2.59445e-05\n",
"Loss: 2.59385e-05\n",
"Loss: 2.59322e-05\n",
"Loss: 2.59292e-05\n",
"Loss: 2.59250e-05\n",
"Loss: 2.59230e-05\n",
"Loss: 2.59190e-05\n",
"Loss: 2.59150e-05\n",
"Loss: 2.59087e-05\n",
"Loss: 2.59002e-05\n",
"Loss: 2.58920e-05\n",
"Loss: 2.58914e-05\n",
"Loss: 2.58829e-05\n",
"Loss: 2.58802e-05\n",
"Loss: 2.58675e-05\n",
"Loss: 2.60211e-05\n",
"Loss: 2.58647e-05\n",
"Loss: 2.58524e-05\n",
"Loss: 2.58440e-05\n",
"Loss: 2.58360e-05\n",
"Loss: 2.58280e-05\n",
"Loss: 2.58254e-05\n",
"Loss: 2.58219e-05\n",
"Loss: 2.58192e-05\n",
"Loss: 2.58149e-05\n",
"Loss: 2.58098e-05\n",
"Loss: 2.59552e-05\n",
"Loss: 2.58088e-05\n",
"Loss: 2.58038e-05\n",
"Loss: 2.58000e-05\n",
"Loss: 2.57923e-05\n",
"Loss: 2.57888e-05\n",
"Loss: 2.57856e-05\n",
"Loss: 2.57832e-05\n",
"Loss: 2.57802e-05\n",
"Loss: 2.57772e-05\n",
"Loss: 2.57740e-05\n",
"Loss: 2.57724e-05\n",
"Loss: 2.57700e-05\n",
"Loss: 2.57685e-05\n",
"Loss: 2.57663e-05\n",
"Loss: 2.57645e-05\n",
"Loss: 2.57623e-05\n",
"Loss: 2.57605e-05\n",
"Loss: 2.57580e-05\n",
"Loss: 2.57531e-05\n",
"Loss: 2.57547e-05\n",
"Loss: 2.57496e-05\n",
"Loss: 2.57422e-05\n",
"Loss: 2.57554e-05\n",
"Loss: 2.57409e-05\n",
"Loss: 2.57355e-05\n",
"Loss: 2.57329e-05\n",
"Loss: 2.57300e-05\n",
"Loss: 2.57391e-05\n",
"Loss: 2.57284e-05\n",
"Loss: 2.57236e-05\n",
"Loss: 2.57161e-05\n",
"Loss: 2.57109e-05\n",
"Loss: 2.57108e-05\n",
"Loss: 2.57093e-05\n",
"Loss: 2.57075e-05\n",
"Loss: 2.57064e-05\n",
"Loss: 2.57022e-05\n",
"Loss: 2.56998e-05\n",
"Loss: 2.56937e-05\n",
"Loss: 2.56859e-05\n",
"Loss: 2.56758e-05\n",
"Loss: 2.56696e-05\n",
"Loss: 2.58816e-05\n",
"Loss: 2.56681e-05\n",
"Loss: 2.56642e-05\n",
"Loss: 2.56603e-05\n",
"Loss: 2.56543e-05\n",
"Loss: 2.56487e-05\n",
"Loss: 2.56414e-05\n",
"Loss: 2.56855e-05\n",
"Loss: 2.56399e-05\n",
"Loss: 2.56353e-05\n",
"Loss: 2.56327e-05\n",
"Loss: 2.56289e-05\n",
"Loss: 2.56263e-05\n",
"Loss: 2.56225e-05\n",
"Loss: 2.56154e-05\n",
"Loss: 2.56691e-05\n",
"Loss: 2.56146e-05\n",
"Loss: 2.56073e-05\n",
"Loss: 2.56006e-05\n",
"Loss: 2.55956e-05\n",
"Loss: 2.55925e-05\n",
"Loss: 2.55889e-05\n",
"Loss: 2.55849e-05\n",
"Loss: 2.55822e-05\n",
"Loss: 2.55771e-05\n",
"Loss: 2.59604e-05\n",
"Loss: 2.55762e-05\n",
"Loss: 2.55731e-05\n",
"Loss: 2.55681e-05\n",
"Loss: 2.55775e-05\n",
"Loss: 2.55609e-05\n",
"Loss: 2.55555e-05\n",
"Loss: 2.55514e-05\n",
"Loss: 2.55479e-05\n",
"Loss: 2.55414e-05\n",
"Loss: 2.55362e-05\n",
"Loss: 2.55263e-05\n",
"Loss: 2.55209e-05\n",
"Loss: 2.55134e-05\n",
"Loss: 2.55059e-05\n",
"Loss: 2.55650e-05\n",
"Loss: 2.55027e-05\n",
"Loss: 2.54969e-05\n",
"Loss: 2.54811e-05\n",
"Loss: 2.54710e-05\n",
"Loss: 2.54629e-05\n",
"Loss: 2.54600e-05\n",
"Loss: 2.54540e-05\n",
"Loss: 2.54441e-05\n",
"Loss: 2.78423e-05\n",
"Loss: 2.54424e-05\n",
"Loss: 2.54252e-05\n",
"Loss: 2.54210e-05\n",
"Loss: 2.54172e-05\n",
"Loss: 2.54170e-05\n",
"Loss: 2.54153e-05\n",
"Loss: 2.54091e-05\n",
"Loss: 2.54025e-05\n",
"Loss: 2.53992e-05\n",
"Loss: 2.53973e-05\n",
"Loss: 2.54012e-05\n",
"Loss: 2.53961e-05\n",
"Loss: 2.53945e-05\n",
"Loss: 2.53910e-05\n",
"Loss: 2.53882e-05\n",
"Loss: 2.53836e-05\n",
"Loss: 2.53802e-05\n",
"Loss: 2.53786e-05\n",
"Loss: 2.53762e-05\n",
"Loss: 2.53681e-05\n",
"Loss: 2.53796e-05\n",
"Loss: 2.53643e-05\n",
"Loss: 2.53872e-05\n",
"Loss: 2.53559e-05\n",
"Loss: 2.53507e-05\n",
"Loss: 2.53454e-05\n",
"Loss: 2.53425e-05\n",
"Loss: 2.53366e-05\n",
"Loss: 2.53328e-05\n",
"Loss: 2.53312e-05\n",
"Loss: 2.53243e-05\n",
"Loss: 2.53201e-05\n",
"Loss: 2.53116e-05\n",
"Loss: 2.53051e-05\n",
"Loss: 2.53160e-05\n",
"Loss: 2.53041e-05\n",
"Loss: 2.53019e-05\n",
"Loss: 2.53010e-05\n",
"Loss: 2.52999e-05\n",
"Loss: 2.52963e-05\n",
"Loss: 2.52889e-05\n",
"Loss: 2.53024e-05\n",
"Loss: 2.52852e-05\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 2.52824e-05\n",
"Loss: 2.52728e-05\n",
"Loss: 2.52663e-05\n",
"Loss: 2.52580e-05\n",
"Loss: 2.52498e-05\n",
"Loss: 2.52403e-05\n",
"Loss: 2.52397e-05\n",
"Loss: 2.52326e-05\n",
"Loss: 2.52300e-05\n",
"Loss: 2.52264e-05\n",
"Loss: 2.53168e-05\n",
"Loss: 2.52251e-05\n",
"Loss: 2.52196e-05\n",
"Loss: 2.52156e-05\n",
"Loss: 2.52132e-05\n",
"Loss: 2.52109e-05\n",
"Loss: 2.52151e-05\n",
"Loss: 2.52097e-05\n",
"Loss: 2.52071e-05\n",
"Loss: 2.52054e-05\n",
"Loss: 2.52006e-05\n",
"Loss: 2.51937e-05\n",
"Loss: 2.51867e-05\n",
"Loss: 2.51787e-05\n",
"Loss: 2.51759e-05\n",
"Loss: 2.51705e-05\n",
"Loss: 2.52153e-05\n",
"Loss: 2.51695e-05\n",
"Loss: 2.51616e-05\n",
"Loss: 2.51534e-05\n",
"Loss: 2.51481e-05\n",
"Loss: 2.51572e-05\n",
"Loss: 2.51461e-05\n",
"Loss: 2.51437e-05\n",
"Loss: 2.51402e-05\n",
"Loss: 2.51366e-05\n",
"Loss: 2.51319e-05\n",
"Loss: 2.51252e-05\n",
"Loss: 2.51287e-05\n",
"Loss: 2.51210e-05\n",
"Loss: 2.51154e-05\n",
"Loss: 2.51100e-05\n",
"Loss: 2.51064e-05\n",
"Loss: 2.51036e-05\n",
"Loss: 2.50995e-05\n",
"Loss: 2.50971e-05\n",
"Loss: 2.50941e-05\n",
"Loss: 2.51469e-05\n",
"Loss: 2.50935e-05\n",
"Loss: 2.50907e-05\n",
"Loss: 2.50884e-05\n",
"Loss: 2.50864e-05\n",
"Loss: 2.50841e-05\n",
"Loss: 2.50806e-05\n",
"Loss: 2.50762e-05\n",
"Loss: 2.50735e-05\n",
"Loss: 2.50676e-05\n",
"Loss: 2.50710e-05\n",
"Loss: 2.50647e-05\n",
"Loss: 2.50598e-05\n",
"Loss: 2.50512e-05\n",
"Loss: 2.50447e-05\n",
"Loss: 2.50407e-05\n",
"Loss: 2.50378e-05\n",
"Loss: 2.50357e-05\n",
"Loss: 2.50334e-05\n",
"Loss: 2.50324e-05\n",
"Loss: 2.50304e-05\n",
"Loss: 2.50279e-05\n",
"Loss: 2.50259e-05\n",
"Loss: 2.50213e-05\n",
"Loss: 2.50191e-05\n",
"Loss: 2.50161e-05\n",
"Loss: 2.50528e-05\n",
"Loss: 2.50158e-05\n",
"Loss: 2.50131e-05\n",
"Loss: 2.50077e-05\n",
"Loss: 2.50033e-05\n",
"Loss: 2.49984e-05\n",
"Loss: 2.49938e-05\n",
"Loss: 2.49908e-05\n",
"Loss: 2.49850e-05\n",
"Loss: 2.49819e-05\n",
"Loss: 2.49802e-05\n",
"Loss: 2.49789e-05\n",
"Loss: 2.49797e-05\n",
"Loss: 2.49767e-05\n",
"Loss: 2.49864e-05\n",
"Loss: 2.49722e-05\n",
"Loss: 2.49793e-05\n",
"Loss: 2.49711e-05\n",
"Loss: 2.49677e-05\n",
"Loss: 2.49651e-05\n",
"Loss: 2.49631e-05\n",
"Loss: 2.49593e-05\n",
"Loss: 2.50397e-05\n",
"Loss: 2.49565e-05\n",
"Loss: 2.49521e-05\n",
"Loss: 2.50290e-05\n",
"Loss: 2.49490e-05\n",
"Loss: 2.49413e-05\n",
"Loss: 2.49426e-05\n",
"Loss: 2.49375e-05\n",
"Loss: 2.49311e-05\n",
"Loss: 2.49225e-05\n",
"Loss: 2.49144e-05\n",
"Loss: 2.49054e-05\n",
"Loss: 2.48979e-05\n",
"Loss: 2.48888e-05\n",
"Loss: 2.48954e-05\n",
"Loss: 2.48843e-05\n",
"Loss: 2.48757e-05\n",
"Loss: 2.48717e-05\n",
"Loss: 2.48676e-05\n",
"Loss: 2.48647e-05\n",
"Loss: 2.48609e-05\n",
"Loss: 2.48553e-05\n",
"Loss: 2.48499e-05\n",
"Loss: 2.48370e-05\n",
"Loss: 2.48303e-05\n",
"Loss: 2.48139e-05\n",
"Loss: 2.48034e-05\n",
"Loss: 2.47973e-05\n",
"Loss: 2.47909e-05\n",
"Loss: 2.47846e-05\n",
"Loss: 2.47794e-05\n",
"Loss: 2.47772e-05\n",
"Loss: 2.47689e-05\n",
"Loss: 2.47645e-05\n",
"Loss: 2.49184e-05\n",
"Loss: 2.47633e-05\n",
"Loss: 2.47567e-05\n",
"Loss: 2.47533e-05\n",
"Loss: 2.47489e-05\n",
"Loss: 2.47449e-05\n",
"Loss: 2.47354e-05\n",
"Loss: 2.47355e-05\n",
"Loss: 2.47310e-05\n",
"Loss: 2.47208e-05\n",
"Loss: 2.47106e-05\n",
"Loss: 2.47077e-05\n",
"Loss: 2.47042e-05\n",
"Loss: 2.47580e-05\n",
"Loss: 2.47037e-05\n",
"Loss: 2.47004e-05\n",
"Loss: 2.46961e-05\n",
"Loss: 2.46918e-05\n",
"Loss: 2.46873e-05\n",
"Loss: 2.46817e-05\n",
"Loss: 2.47394e-05\n",
"Loss: 2.46805e-05\n",
"Loss: 2.46755e-05\n",
"Loss: 2.46703e-05\n",
"Loss: 2.46643e-05\n",
"Loss: 2.46718e-05\n",
"Loss: 2.46617e-05\n",
"Loss: 2.46563e-05\n",
"Loss: 2.46466e-05\n",
"Loss: 2.49130e-05\n",
"Loss: 2.46456e-05\n",
"Loss: 2.46418e-05\n",
"Loss: 2.46386e-05\n",
"Loss: 2.46376e-05\n",
"Loss: 2.46353e-05\n",
"Loss: 2.46335e-05\n",
"Loss: 2.46315e-05\n",
"Loss: 2.46292e-05\n",
"Loss: 2.46319e-05\n",
"Loss: 2.46274e-05\n",
"Loss: 2.46469e-05\n",
"Loss: 2.46232e-05\n",
"Loss: 2.46203e-05\n",
"Loss: 2.47261e-05\n",
"Loss: 2.46174e-05\n",
"Loss: 2.46108e-05\n",
"Loss: 2.46080e-05\n",
"Loss: 2.46028e-05\n",
"Loss: 2.45984e-05\n",
"Loss: 2.45896e-05\n",
"Loss: 2.45807e-05\n",
"Loss: 2.45737e-05\n",
"Loss: 2.45643e-05\n",
"Loss: 2.45579e-05\n",
"Loss: 2.45530e-05\n",
"Loss: 2.45482e-05\n",
"Loss: 2.45440e-05\n",
"Loss: 2.45328e-05\n",
"Loss: 2.51274e-05\n",
"Loss: 2.45296e-05\n",
"Loss: 2.45208e-05\n",
"Loss: 2.45151e-05\n",
"Loss: 2.45118e-05\n",
"Loss: 2.45083e-05\n",
"Loss: 2.45045e-05\n",
"Loss: 2.45069e-05\n",
"Loss: 2.45013e-05\n",
"Loss: 2.44989e-05\n",
"Loss: 2.44950e-05\n",
"Loss: 2.44922e-05\n",
"Loss: 2.44860e-05\n",
"Loss: 2.44858e-05\n",
"Loss: 2.44809e-05\n",
"Loss: 2.44742e-05\n",
"Loss: 2.44708e-05\n",
"Loss: 2.46318e-05\n",
"Loss: 2.44700e-05\n",
"Loss: 2.44652e-05\n",
"Loss: 2.44583e-05\n",
"Loss: 2.44481e-05\n",
"Loss: 2.44428e-05\n",
"Loss: 2.44371e-05\n",
"Loss: 2.44341e-05\n",
"Loss: 2.44352e-05\n",
"Loss: 2.44330e-05\n",
"Loss: 2.44307e-05\n",
"Loss: 2.44255e-05\n",
"Loss: 2.44218e-05\n",
"Loss: 2.44139e-05\n",
"Loss: 2.44071e-05\n",
"Loss: 2.43987e-05\n",
"Loss: 2.43931e-05\n",
"Loss: 2.43902e-05\n",
"Loss: 2.43860e-05\n",
"Loss: 2.43923e-05\n",
"Loss: 2.43794e-05\n",
"Loss: 2.43736e-05\n",
"Loss: 2.43636e-05\n",
"Loss: 2.43588e-05\n",
"Loss: 2.43552e-05\n",
"Loss: 2.43506e-05\n",
"Loss: 2.43573e-05\n",
"Loss: 2.43496e-05\n",
"Loss: 2.43455e-05\n",
"Loss: 2.43420e-05\n",
"Loss: 2.43354e-05\n",
"Loss: 2.43309e-05\n",
"Loss: 2.43325e-05\n",
"Loss: 2.43224e-05\n",
"Loss: 2.43144e-05\n",
"Loss: 2.43057e-05\n",
"Loss: 2.42990e-05\n",
"Loss: 2.42895e-05\n",
"Loss: 2.42968e-05\n",
"Loss: 2.42863e-05\n",
"Loss: 2.42796e-05\n",
"Loss: 2.42749e-05\n",
"Loss: 2.42672e-05\n",
"Loss: 2.42574e-05\n",
"Loss: 2.42728e-05\n",
"Loss: 2.42530e-05\n",
"Loss: 2.42403e-05\n",
"Loss: 2.42314e-05\n",
"Loss: 2.42316e-05\n",
"Loss: 2.42242e-05\n",
"Loss: 2.42155e-05\n",
"Loss: 2.42099e-05\n",
"Loss: 2.42064e-05\n",
"Loss: 2.42027e-05\n",
"Loss: 2.43504e-05\n",
"Loss: 2.42012e-05\n",
"Loss: 2.41968e-05\n",
"Loss: 2.41895e-05\n",
"Loss: 2.41864e-05\n",
"Loss: 2.41985e-05\n",
"Loss: 2.41842e-05\n",
"Loss: 2.41819e-05\n",
"Loss: 2.41807e-05\n",
"Loss: 2.41764e-05\n",
"Loss: 2.41723e-05\n",
"Loss: 2.41690e-05\n",
"Loss: 2.41635e-05\n",
"Loss: 2.41599e-05\n",
"Loss: 2.41545e-05\n",
"Loss: 2.41505e-05\n",
"Loss: 2.41428e-05\n",
"Loss: 2.41331e-05\n",
"Loss: 2.41271e-05\n",
"Loss: 2.41246e-05\n",
"Loss: 2.41211e-05\n",
"Loss: 2.41179e-05\n",
"Loss: 2.41309e-05\n",
"Loss: 2.41153e-05\n",
"Loss: 2.41130e-05\n",
"Loss: 2.41105e-05\n",
"Loss: 2.41095e-05\n",
"Loss: 2.41069e-05\n",
"Loss: 2.41006e-05\n",
"Loss: 2.40994e-05\n",
"Loss: 2.40944e-05\n",
"Loss: 2.40920e-05\n",
"Loss: 2.40901e-05\n",
"Loss: 2.40835e-05\n",
"Loss: 2.40772e-05\n",
"Loss: 2.40634e-05\n",
"Loss: 2.40496e-05\n",
"Loss: 2.40470e-05\n",
"Loss: 2.40278e-05\n",
"Loss: 2.40200e-05\n",
"Loss: 2.40094e-05\n",
"Loss: 2.39979e-05\n",
"Loss: 2.40691e-05\n",
"Loss: 2.39915e-05\n",
"Loss: 2.39856e-05\n",
"Loss: 2.39795e-05\n",
"Loss: 2.39740e-05\n",
"Loss: 2.40744e-05\n",
"Loss: 2.39729e-05\n",
"Loss: 2.39655e-05\n",
"Loss: 2.39575e-05\n",
"Loss: 2.39534e-05\n",
"Loss: 2.43061e-05\n",
"Loss: 2.39526e-05\n",
"Loss: 2.39496e-05\n",
"Loss: 2.39506e-05\n",
"Loss: 2.39462e-05\n",
"Loss: 2.39368e-05\n",
"Loss: 2.39329e-05\n",
"Loss: 2.39191e-05\n",
"Loss: 2.39130e-05\n",
"Loss: 2.39072e-05\n",
"Loss: 2.39211e-05\n",
"Loss: 2.39038e-05\n",
"Loss: 2.39027e-05\n",
"Loss: 2.38968e-05\n",
"Loss: 2.38947e-05\n",
"Loss: 2.38894e-05\n",
"Loss: 2.38858e-05\n",
"Loss: 2.39800e-05\n",
"Loss: 2.38835e-05\n",
"Loss: 2.38780e-05\n",
"Loss: 2.38746e-05\n",
"Loss: 2.38720e-05\n",
"Loss: 2.38670e-05\n",
"Loss: 2.38791e-05\n",
"Loss: 2.38633e-05\n",
"Loss: 2.38568e-05\n",
"Loss: 2.38521e-05\n",
"Loss: 2.38492e-05\n",
"Loss: 2.38450e-05\n",
"Loss: 2.38350e-05\n",
"Loss: 2.38432e-05\n",
"Loss: 2.38318e-05\n",
"Loss: 2.38240e-05\n",
"Loss: 2.38206e-05\n",
"Loss: 2.38179e-05\n",
"Loss: 2.38132e-05\n",
"Loss: 2.38067e-05\n",
"Loss: 2.37945e-05\n",
"Loss: 2.38634e-05\n",
"Loss: 2.37906e-05\n",
"Loss: 2.37772e-05\n",
"Loss: 2.37687e-05\n",
"Loss: 2.37626e-05\n",
"Loss: 2.37542e-05\n",
"Loss: 2.37361e-05\n",
"Loss: 2.37241e-05\n",
"Loss: 2.37122e-05\n",
"Loss: 2.37056e-05\n",
"Loss: 2.36952e-05\n",
"Loss: 2.36801e-05\n",
"Loss: 2.36727e-05\n",
"Loss: 2.36563e-05\n",
"Loss: 2.36471e-05\n",
"Loss: 2.36404e-05\n",
"Loss: 2.36355e-05\n",
"Loss: 2.38260e-05\n",
"Loss: 2.36350e-05\n",
"Loss: 2.36310e-05\n",
"Loss: 2.36242e-05\n",
"Loss: 2.36211e-05\n",
"Loss: 2.36175e-05\n",
"Loss: 2.36161e-05\n",
"Loss: 2.36136e-05\n",
"Loss: 2.36115e-05\n",
"Loss: 2.36177e-05\n",
"Loss: 2.36097e-05\n",
"Loss: 2.36054e-05\n",
"Loss: 2.36010e-05\n",
"Loss: 2.35963e-05\n",
"Loss: 2.35932e-05\n",
"Loss: 2.35882e-05\n",
"Loss: 2.37108e-05\n",
"Loss: 2.35863e-05\n",
"Loss: 2.35803e-05\n",
"Loss: 2.35743e-05\n",
"Loss: 2.35705e-05\n",
"Loss: 2.35667e-05\n",
"Loss: 2.35609e-05\n",
"Loss: 2.35562e-05\n",
"Loss: 2.35516e-05\n",
"Loss: 2.35474e-05\n",
"Loss: 2.36229e-05\n",
"Loss: 2.35456e-05\n",
"Loss: 2.35422e-05\n",
"Loss: 2.35373e-05\n",
"Loss: 2.35281e-05\n",
"Loss: 2.35197e-05\n",
"Loss: 2.35161e-05\n",
"Loss: 2.35138e-05\n",
"Loss: 2.35085e-05\n",
"Loss: 2.35047e-05\n",
"Loss: 2.34984e-05\n",
"Loss: 2.34935e-05\n",
"Loss: 2.34862e-05\n",
"Loss: 2.35430e-05\n",
"Loss: 2.34849e-05\n",
"Loss: 2.34809e-05\n",
"Loss: 2.34759e-05\n",
"Loss: 2.34691e-05\n",
"Loss: 2.34564e-05\n",
"Loss: 2.34395e-05\n",
"Loss: 2.34175e-05\n",
"Loss: 2.34043e-05\n",
"Loss: 2.33933e-05\n",
"Loss: 2.33814e-05\n",
"Loss: 2.33743e-05\n",
"Loss: 2.33668e-05\n",
"Loss: 2.33615e-05\n",
"Loss: 2.33505e-05\n",
"Loss: 2.33455e-05\n",
"Loss: 2.33365e-05\n",
"Loss: 2.33334e-05\n",
"Loss: 2.33431e-05\n",
"Loss: 2.33320e-05\n",
"Loss: 2.33293e-05\n",
"Loss: 2.33259e-05\n",
"Loss: 2.33379e-05\n",
"Loss: 2.33246e-05\n",
"Loss: 2.33203e-05\n",
"Loss: 2.33157e-05\n",
"Loss: 2.33107e-05\n",
"Loss: 2.33068e-05\n",
"Loss: 2.33124e-05\n",
"Loss: 2.33039e-05\n",
"Loss: 2.32994e-05\n",
"Loss: 2.32949e-05\n",
"Loss: 2.32886e-05\n",
"Loss: 2.32822e-05\n",
"Loss: 2.32691e-05\n",
"Loss: 2.32660e-05\n",
"Loss: 2.32581e-05\n",
"Loss: 2.32550e-05\n",
"Loss: 2.32484e-05\n",
"Loss: 2.32421e-05\n",
"Loss: 2.32366e-05\n",
"Loss: 2.32311e-05\n",
"Loss: 2.34619e-05\n",
"Loss: 2.32297e-05\n",
"Loss: 2.32242e-05\n",
"Loss: 2.32439e-05\n",
"Loss: 2.32221e-05\n",
"Loss: 2.32195e-05\n",
"Loss: 2.32120e-05\n",
"Loss: 2.32070e-05\n",
"Loss: 2.32050e-05\n",
"Loss: 2.31995e-05\n",
"Loss: 2.31975e-05\n",
"Loss: 2.31958e-05\n",
"Loss: 2.31913e-05\n",
"Loss: 2.31841e-05\n",
"Loss: 2.31762e-05\n",
"Loss: 2.34899e-05\n",
"Loss: 2.31754e-05\n",
"Loss: 2.31675e-05\n",
"Loss: 2.31646e-05\n",
"Loss: 2.31547e-05\n",
"Loss: 2.31496e-05\n",
"Loss: 2.31444e-05\n",
"Loss: 2.31395e-05\n",
"Loss: 2.36401e-05\n",
"Loss: 2.31381e-05\n",
"Loss: 2.31349e-05\n",
"Loss: 2.31249e-05\n",
"Loss: 2.31178e-05\n",
"Loss: 2.31132e-05\n",
"Loss: 2.31069e-05\n",
"Loss: 2.31000e-05\n",
"Loss: 2.30890e-05\n",
"Loss: 2.30786e-05\n",
"Loss: 2.30644e-05\n",
"Loss: 2.30514e-05\n",
"Loss: 2.30969e-05\n",
"Loss: 2.30468e-05\n",
"Loss: 2.30356e-05\n",
"Loss: 2.30288e-05\n",
"Loss: 2.30214e-05\n",
"Loss: 2.30314e-05\n",
"Loss: 2.30183e-05\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 2.30146e-05\n",
"Loss: 2.30080e-05\n",
"Loss: 2.30056e-05\n",
"Loss: 2.30016e-05\n",
"Loss: 2.30093e-05\n",
"Loss: 2.29997e-05\n",
"Loss: 2.29957e-05\n",
"Loss: 2.29898e-05\n",
"Loss: 2.29864e-05\n",
"Loss: 2.30116e-05\n",
"Loss: 2.29830e-05\n",
"Loss: 2.29794e-05\n",
"Loss: 2.29745e-05\n",
"Loss: 2.29713e-05\n",
"Loss: 2.29636e-05\n",
"Loss: 2.29520e-05\n",
"Loss: 2.29439e-05\n",
"Loss: 2.29334e-05\n",
"Loss: 2.29401e-05\n",
"Loss: 2.29318e-05\n",
"Loss: 2.29297e-05\n",
"Loss: 2.29281e-05\n",
"Loss: 2.29259e-05\n",
"Loss: 2.29230e-05\n",
"Loss: 2.29171e-05\n",
"Loss: 2.29102e-05\n",
"Loss: 2.29055e-05\n",
"Loss: 2.29030e-05\n",
"Loss: 2.29011e-05\n",
"Loss: 2.28990e-05\n",
"Loss: 2.28937e-05\n",
"Loss: 2.28870e-05\n",
"Loss: 2.29249e-05\n",
"Loss: 2.28854e-05\n",
"Loss: 2.28805e-05\n",
"Loss: 2.28750e-05\n",
"Loss: 2.28699e-05\n",
"Loss: 2.28623e-05\n",
"Loss: 2.28495e-05\n",
"Loss: 2.28583e-05\n",
"Loss: 2.28455e-05\n",
"Loss: 2.28347e-05\n",
"Loss: 2.28292e-05\n",
"Loss: 2.28220e-05\n",
"Loss: 2.28143e-05\n",
"Loss: 2.28065e-05\n",
"Loss: 2.27979e-05\n",
"Loss: 2.27924e-05\n",
"Loss: 2.27878e-05\n",
"Loss: 2.27835e-05\n",
"Loss: 2.27755e-05\n",
"Loss: 2.27686e-05\n",
"Loss: 2.28998e-05\n",
"Loss: 2.27667e-05\n",
"Loss: 2.27633e-05\n",
"Loss: 2.27598e-05\n",
"Loss: 2.27595e-05\n",
"Loss: 2.27570e-05\n",
"Loss: 2.27498e-05\n",
"Loss: 2.27461e-05\n",
"Loss: 2.27412e-05\n",
"Loss: 2.27362e-05\n",
"Loss: 2.27366e-05\n",
"Loss: 2.27345e-05\n",
"Loss: 2.27321e-05\n",
"Loss: 2.27295e-05\n",
"Loss: 2.27366e-05\n",
"Loss: 2.27275e-05\n",
"Loss: 2.27231e-05\n",
"Loss: 2.27198e-05\n",
"Loss: 2.27168e-05\n",
"Loss: 2.27136e-05\n",
"Loss: 2.27097e-05\n",
"Loss: 2.27043e-05\n",
"Loss: 2.27009e-05\n",
"Loss: 2.26943e-05\n",
"Loss: 2.27076e-05\n",
"Loss: 2.26931e-05\n",
"Loss: 2.26895e-05\n",
"Loss: 2.26869e-05\n",
"Loss: 2.26850e-05\n",
"Loss: 2.26797e-05\n",
"Loss: 2.26764e-05\n",
"Loss: 2.26797e-05\n",
"Loss: 2.26715e-05\n",
"Loss: 2.26665e-05\n",
"Loss: 2.26625e-05\n",
"Loss: 2.26535e-05\n",
"Loss: 2.26571e-05\n",
"Loss: 2.26502e-05\n",
"Loss: 2.26433e-05\n",
"Loss: 2.26289e-05\n",
"Loss: 2.26162e-05\n",
"Loss: 2.30770e-05\n",
"Loss: 2.26158e-05\n",
"Loss: 2.26069e-05\n",
"Loss: 2.26029e-05\n",
"Loss: 2.25996e-05\n",
"Loss: 2.25960e-05\n",
"Loss: 2.25875e-05\n",
"Loss: 2.27868e-05\n",
"Loss: 2.25854e-05\n",
"Loss: 2.25773e-05\n",
"Loss: 2.25736e-05\n",
"Loss: 2.25698e-05\n",
"Loss: 2.25610e-05\n",
"Loss: 2.25557e-05\n",
"Loss: 2.25480e-05\n",
"Loss: 2.25427e-05\n",
"Loss: 2.25376e-05\n",
"Loss: 2.25316e-05\n",
"Loss: 2.25254e-05\n",
"Loss: 2.25191e-05\n",
"Loss: 2.25464e-05\n",
"Loss: 2.25179e-05\n",
"Loss: 2.25138e-05\n",
"Loss: 2.25119e-05\n",
"Loss: 2.25097e-05\n",
"Loss: 2.25079e-05\n",
"Loss: 2.25060e-05\n",
"Loss: 2.25039e-05\n",
"Loss: 2.24986e-05\n",
"Loss: 2.24973e-05\n",
"Loss: 2.24887e-05\n",
"Loss: 2.24813e-05\n",
"Loss: 2.24744e-05\n",
"Loss: 2.24685e-05\n",
"Loss: 2.24661e-05\n",
"Loss: 2.24673e-05\n",
"Loss: 2.24618e-05\n",
"Loss: 2.24601e-05\n",
"Loss: 2.24574e-05\n",
"Loss: 2.24464e-05\n",
"Loss: 2.24414e-05\n",
"Loss: 2.24368e-05\n",
"Loss: 2.30390e-05\n",
"Loss: 2.24361e-05\n",
"Loss: 2.24331e-05\n",
"Loss: 2.24271e-05\n",
"Loss: 2.24252e-05\n",
"Loss: 2.24143e-05\n",
"Loss: 2.24093e-05\n",
"Loss: 2.24121e-05\n",
"Loss: 2.24059e-05\n",
"Loss: 2.23992e-05\n",
"Loss: 2.23878e-05\n",
"Loss: 2.23763e-05\n",
"Loss: 2.23647e-05\n",
"Loss: 2.23554e-05\n",
"Loss: 2.23454e-05\n",
"Loss: 2.23380e-05\n",
"Loss: 2.23311e-05\n",
"Loss: 2.23799e-05\n",
"Loss: 2.23296e-05\n",
"Loss: 2.23238e-05\n",
"Loss: 2.23155e-05\n",
"Loss: 2.23076e-05\n",
"Loss: 2.23011e-05\n",
"Loss: 2.22976e-05\n",
"Loss: 2.22912e-05\n",
"Loss: 2.22841e-05\n",
"Loss: 2.22793e-05\n",
"Loss: 2.22754e-05\n",
"Loss: 2.22960e-05\n",
"Loss: 2.22731e-05\n",
"Loss: 2.22705e-05\n",
"Loss: 2.22650e-05\n",
"Loss: 2.22620e-05\n",
"Loss: 2.22540e-05\n",
"Loss: 2.22437e-05\n",
"Loss: 2.22383e-05\n",
"Loss: 2.22380e-05\n",
"Loss: 2.22357e-05\n",
"Loss: 2.22322e-05\n",
"Loss: 2.22304e-05\n",
"Loss: 2.22284e-05\n",
"Loss: 2.22268e-05\n",
"Loss: 2.22581e-05\n",
"Loss: 2.22263e-05\n",
"Loss: 2.22248e-05\n",
"Loss: 2.22212e-05\n",
"Loss: 2.22166e-05\n",
"Loss: 2.22123e-05\n",
"Loss: 2.23845e-05\n",
"Loss: 2.22114e-05\n",
"Loss: 2.22070e-05\n",
"Loss: 2.22031e-05\n",
"Loss: 2.21980e-05\n",
"Loss: 2.21930e-05\n",
"Loss: 2.22003e-05\n",
"Loss: 2.21902e-05\n",
"Loss: 2.21854e-05\n",
"Loss: 2.21815e-05\n",
"Loss: 2.21796e-05\n",
"Loss: 2.21762e-05\n",
"Loss: 2.21880e-05\n",
"Loss: 2.21744e-05\n",
"Loss: 2.21710e-05\n",
"Loss: 2.21632e-05\n",
"Loss: 2.21452e-05\n",
"Loss: 2.21432e-05\n",
"Loss: 2.21324e-05\n",
"Loss: 2.21206e-05\n",
"Loss: 2.21107e-05\n",
"Loss: 2.21022e-05\n",
"Loss: 2.20944e-05\n",
"Loss: 2.20831e-05\n",
"Loss: 2.20710e-05\n",
"Loss: 2.20620e-05\n",
"Loss: 2.20530e-05\n",
"Loss: 2.20465e-05\n",
"Loss: 2.20410e-05\n",
"Loss: 2.20420e-05\n",
"Loss: 2.20341e-05\n",
"Loss: 2.20175e-05\n",
"Loss: 2.20029e-05\n",
"Loss: 2.19916e-05\n",
"Loss: 2.19824e-05\n",
"Loss: 2.19617e-05\n",
"Loss: 2.19561e-05\n",
"Loss: 2.19445e-05\n",
"Loss: 2.19390e-05\n",
"Loss: 2.19319e-05\n",
"Loss: 2.19247e-05\n",
"Loss: 2.19127e-05\n",
"Loss: 2.19014e-05\n",
"Loss: 2.18948e-05\n",
"Loss: 2.19089e-05\n",
"Loss: 2.18895e-05\n",
"Loss: 2.18811e-05\n",
"Loss: 2.18729e-05\n",
"Loss: 2.18812e-05\n",
"Loss: 2.18683e-05\n",
"Loss: 2.18607e-05\n",
"Loss: 2.18555e-05\n",
"Loss: 2.18512e-05\n",
"Loss: 2.18561e-05\n",
"Loss: 2.18496e-05\n",
"Loss: 2.18445e-05\n",
"Loss: 2.18382e-05\n",
"Loss: 2.18300e-05\n",
"Loss: 2.18221e-05\n",
"Loss: 2.18446e-05\n",
"Loss: 2.18159e-05\n",
"Loss: 2.18004e-05\n",
"Loss: 2.17888e-05\n",
"Loss: 2.17735e-05\n",
"Loss: 2.17728e-05\n",
"Loss: 2.17653e-05\n",
"Loss: 2.17633e-05\n",
"Loss: 2.17617e-05\n",
"Loss: 2.17535e-05\n",
"Loss: 2.17462e-05\n",
"Loss: 2.17494e-05\n",
"Loss: 2.17436e-05\n",
"Loss: 2.17390e-05\n",
"Loss: 2.17287e-05\n",
"Loss: 2.17174e-05\n",
"Loss: 2.17011e-05\n",
"Loss: 2.16846e-05\n",
"Loss: 2.16767e-05\n",
"Loss: 2.16689e-05\n",
"Loss: 2.16577e-05\n",
"Loss: 2.16514e-05\n",
"Loss: 2.16420e-05\n",
"Loss: 2.16354e-05\n",
"Loss: 2.16517e-05\n",
"Loss: 2.16314e-05\n",
"Loss: 2.16238e-05\n",
"Loss: 2.16171e-05\n",
"Loss: 2.16070e-05\n",
"Loss: 2.16198e-05\n",
"Loss: 2.16047e-05\n",
"Loss: 2.16015e-05\n",
"Loss: 2.15983e-05\n",
"Loss: 2.15910e-05\n",
"Loss: 2.15827e-05\n",
"Loss: 2.15687e-05\n",
"Loss: 2.15543e-05\n",
"Loss: 2.15442e-05\n",
"Loss: 2.15385e-05\n",
"Loss: 2.15271e-05\n",
"Loss: 2.15210e-05\n",
"Loss: 2.15145e-05\n",
"Loss: 2.15082e-05\n",
"Loss: 2.14966e-05\n",
"Loss: 2.14962e-05\n",
"Loss: 2.14910e-05\n",
"Loss: 2.14795e-05\n",
"Loss: 2.14738e-05\n",
"Loss: 2.14696e-05\n",
"Loss: 2.14777e-05\n",
"Loss: 2.14688e-05\n",
"Loss: 2.14662e-05\n",
"Loss: 2.14610e-05\n",
"Loss: 2.14553e-05\n",
"Loss: 2.14502e-05\n",
"Loss: 2.14346e-05\n",
"Loss: 2.14271e-05\n",
"Loss: 2.14183e-05\n",
"Loss: 2.14673e-05\n",
"Loss: 2.14168e-05\n",
"Loss: 2.14107e-05\n",
"Loss: 2.14103e-05\n",
"Loss: 2.14002e-05\n",
"Loss: 2.13916e-05\n",
"Loss: 2.13855e-05\n",
"Loss: 2.15563e-05\n",
"Loss: 2.13841e-05\n",
"Loss: 2.13822e-05\n",
"Loss: 2.13790e-05\n",
"Loss: 2.13756e-05\n",
"Loss: 2.13728e-05\n",
"Loss: 2.13701e-05\n",
"Loss: 2.13652e-05\n",
"Loss: 2.13616e-05\n",
"Loss: 2.13578e-05\n",
"Loss: 2.13523e-05\n",
"Loss: 2.13460e-05\n",
"Loss: 2.13418e-05\n",
"Loss: 2.13389e-05\n",
"Loss: 2.13358e-05\n",
"Loss: 2.13294e-05\n",
"Loss: 2.13386e-05\n",
"Loss: 2.13241e-05\n",
"Loss: 2.13180e-05\n",
"Loss: 2.13113e-05\n",
"Loss: 2.13043e-05\n",
"Loss: 2.12975e-05\n",
"Loss: 2.15478e-05\n",
"Loss: 2.12968e-05\n",
"Loss: 2.12932e-05\n",
"Loss: 2.12906e-05\n",
"Loss: 2.12872e-05\n",
"Loss: 2.12843e-05\n",
"Loss: 2.12794e-05\n",
"Loss: 2.12745e-05\n",
"Loss: 2.12672e-05\n",
"Loss: 2.12557e-05\n",
"Loss: 2.12449e-05\n",
"Loss: 2.12910e-05\n",
"Loss: 2.12400e-05\n",
"Loss: 2.12310e-05\n",
"Loss: 2.12254e-05\n",
"Loss: 2.12211e-05\n",
"Loss: 2.12131e-05\n",
"Loss: 2.12045e-05\n",
"Loss: 2.13241e-05\n",
"Loss: 2.12016e-05\n",
"Loss: 2.11967e-05\n",
"Loss: 2.11933e-05\n",
"Loss: 2.11900e-05\n",
"Loss: 2.11866e-05\n",
"Loss: 2.11804e-05\n",
"Loss: 2.11742e-05\n",
"Loss: 2.11693e-05\n",
"Loss: 2.11612e-05\n",
"Loss: 2.11572e-05\n",
"Loss: 2.11540e-05\n",
"Loss: 2.11517e-05\n",
"Loss: 2.11462e-05\n",
"Loss: 2.11405e-05\n",
"Loss: 2.11372e-05\n",
"Loss: 2.11346e-05\n",
"Loss: 2.11329e-05\n",
"Loss: 2.11302e-05\n",
"Loss: 2.11266e-05\n",
"Loss: 2.11221e-05\n",
"Loss: 2.11485e-05\n",
"Loss: 2.11202e-05\n",
"Loss: 2.11142e-05\n",
"Loss: 2.11101e-05\n",
"Loss: 2.11059e-05\n",
"Loss: 2.10991e-05\n",
"Loss: 2.10926e-05\n",
"Loss: 2.10901e-05\n",
"Loss: 2.10856e-05\n",
"Loss: 2.10809e-05\n",
"Loss: 2.10713e-05\n",
"Loss: 2.10628e-05\n",
"Loss: 2.10570e-05\n",
"Loss: 2.10496e-05\n",
"Loss: 2.10462e-05\n",
"Loss: 2.10328e-05\n",
"Loss: 2.10466e-05\n",
"Loss: 2.10312e-05\n",
"Loss: 2.10246e-05\n",
"Loss: 2.10182e-05\n",
"Loss: 2.10166e-05\n",
"Loss: 2.10065e-05\n",
"Loss: 2.09974e-05\n",
"Loss: 2.09896e-05\n",
"Loss: 2.09831e-05\n",
"Loss: 2.09836e-05\n",
"Loss: 2.09798e-05\n",
"Loss: 2.09715e-05\n",
"Loss: 2.09643e-05\n",
"Loss: 2.09554e-05\n",
"Loss: 2.09527e-05\n",
"Loss: 2.09431e-05\n",
"Loss: 2.09352e-05\n",
"Loss: 2.09266e-05\n",
"Loss: 2.10015e-05\n",
"Loss: 2.09244e-05\n",
"Loss: 2.09174e-05\n",
"Loss: 2.09053e-05\n",
"Loss: 2.10106e-05\n",
"Loss: 2.09004e-05\n",
"Loss: 2.08848e-05\n",
"Loss: 2.08771e-05\n",
"Loss: 2.08714e-05\n",
"Loss: 2.08638e-05\n",
"Loss: 2.08583e-05\n",
"Loss: 2.08509e-05\n",
"Loss: 2.08433e-05\n",
"Loss: 2.08378e-05\n",
"Loss: 2.08524e-05\n",
"Loss: 2.08356e-05\n",
"Loss: 2.08328e-05\n",
"Loss: 2.08313e-05\n",
"Loss: 2.08273e-05\n",
"Loss: 2.08266e-05\n",
"Loss: 2.08253e-05\n",
"Loss: 2.08335e-05\n",
"Loss: 2.08242e-05\n",
"Loss: 2.08206e-05\n",
"Loss: 2.08178e-05\n",
"Loss: 2.08190e-05\n",
"Loss: 2.08152e-05\n",
"Loss: 2.08126e-05\n",
"Loss: 2.08052e-05\n",
"Loss: 2.08035e-05\n",
"Loss: 2.08093e-05\n",
"Loss: 2.08024e-05\n",
"Loss: 2.08005e-05\n",
"Loss: 2.07990e-05\n",
"Loss: 2.07955e-05\n",
"Loss: 2.07896e-05\n",
"Loss: 2.08013e-05\n",
"Loss: 2.07862e-05\n",
"Loss: 2.07775e-05\n",
"Loss: 2.07747e-05\n",
"Loss: 2.07718e-05\n",
"Loss: 2.07685e-05\n",
"Loss: 2.07599e-05\n",
"Loss: 2.07780e-05\n",
"Loss: 2.07568e-05\n",
"Loss: 2.07505e-05\n",
"Loss: 2.07410e-05\n",
"Loss: 2.07391e-05\n",
"Loss: 2.07319e-05\n",
"Loss: 2.07249e-05\n",
"Loss: 2.07160e-05\n",
"Loss: 2.07083e-05\n",
"Loss: 2.07017e-05\n",
"Loss: 2.06930e-05\n",
"Loss: 2.07342e-05\n",
"Loss: 2.06911e-05\n",
"Loss: 2.06878e-05\n",
"Loss: 2.06769e-05\n",
"Loss: 2.06681e-05\n",
"Loss: 2.06607e-05\n",
"Loss: 2.06526e-05\n",
"Loss: 2.06377e-05\n",
"Loss: 2.06293e-05\n",
"Loss: 2.06257e-05\n",
"Loss: 2.06220e-05\n",
"Loss: 2.06205e-05\n",
"Loss: 2.06183e-05\n",
"Loss: 2.06163e-05\n",
"Loss: 2.06125e-05\n",
"Loss: 2.06105e-05\n",
"Loss: 2.06059e-05\n",
"Loss: 2.06010e-05\n",
"Loss: 2.05963e-05\n",
"Loss: 2.11399e-05\n",
"Loss: 2.05956e-05\n",
"Loss: 2.05912e-05\n",
"Loss: 2.05869e-05\n",
"Loss: 2.05836e-05\n",
"Loss: 2.05812e-05\n",
"Loss: 2.05777e-05\n",
"Loss: 2.05732e-05\n",
"Loss: 2.05705e-05\n",
"Loss: 2.46484e-05\n",
"Loss: 2.05699e-05\n",
"Loss: 2.05687e-05\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 2.05667e-05\n",
"Loss: 2.05630e-05\n",
"Loss: 2.05595e-05\n",
"Loss: 2.05550e-05\n",
"Loss: 2.06817e-05\n",
"Loss: 2.05540e-05\n",
"Loss: 2.05461e-05\n",
"Loss: 2.05403e-05\n",
"Loss: 2.05352e-05\n",
"Loss: 2.05285e-05\n",
"Loss: 2.06442e-05\n",
"Loss: 2.05268e-05\n",
"Loss: 2.05195e-05\n",
"Loss: 2.05115e-05\n",
"Loss: 2.05055e-05\n",
"Loss: 2.05002e-05\n",
"Loss: 2.11768e-05\n",
"Loss: 2.04989e-05\n",
"Loss: 2.04941e-05\n",
"Loss: 2.04904e-05\n",
"Loss: 2.04856e-05\n",
"Loss: 2.04822e-05\n",
"Loss: 2.04773e-05\n",
"Loss: 2.04727e-05\n",
"Loss: 2.04682e-05\n",
"Loss: 2.04651e-05\n",
"Loss: 2.04624e-05\n",
"Loss: 2.04659e-05\n",
"Loss: 2.04602e-05\n",
"Loss: 2.04589e-05\n",
"Loss: 2.04512e-05\n",
"Loss: 2.04427e-05\n",
"Loss: 2.04324e-05\n",
"Loss: 2.04232e-05\n",
"Loss: 2.04179e-05\n",
"Loss: 2.04133e-05\n",
"Loss: 2.04110e-05\n",
"Loss: 2.04083e-05\n",
"Loss: 2.04009e-05\n",
"Loss: 2.04087e-05\n",
"Loss: 2.03975e-05\n",
"Loss: 2.03897e-05\n",
"Loss: 2.03792e-05\n",
"Loss: 2.03830e-05\n",
"Loss: 2.03761e-05\n",
"Loss: 2.03652e-05\n",
"Loss: 2.03585e-05\n",
"Loss: 2.03502e-05\n",
"Loss: 2.04169e-05\n",
"Loss: 2.03498e-05\n",
"Loss: 2.03467e-05\n",
"Loss: 2.03426e-05\n",
"Loss: 2.03430e-05\n",
"Loss: 2.03404e-05\n",
"Loss: 2.03299e-05\n",
"Loss: 2.03591e-05\n",
"Loss: 2.03249e-05\n",
"Loss: 2.03136e-05\n",
"Loss: 2.03115e-05\n",
"Loss: 2.03049e-05\n",
"Loss: 2.03024e-05\n",
"Loss: 2.02972e-05\n",
"Loss: 2.02914e-05\n",
"Loss: 2.02849e-05\n",
"Loss: 2.02808e-05\n",
"Loss: 2.02804e-05\n",
"Loss: 2.02771e-05\n",
"Loss: 2.02746e-05\n",
"Loss: 2.02669e-05\n",
"Loss: 2.02623e-05\n",
"Loss: 2.02590e-05\n",
"Loss: 2.02569e-05\n",
"Loss: 2.02896e-05\n",
"Loss: 2.02527e-05\n",
"Loss: 2.02472e-05\n",
"Loss: 2.02433e-05\n",
"Loss: 2.02396e-05\n",
"Loss: 2.02363e-05\n",
"Loss: 2.02321e-05\n",
"Loss: 2.02262e-05\n",
"Loss: 2.02212e-05\n",
"Loss: 2.02162e-05\n",
"Loss: 2.02150e-05\n",
"Loss: 2.02110e-05\n",
"Loss: 2.02057e-05\n",
"Loss: 2.02206e-05\n",
"Loss: 2.02042e-05\n",
"Loss: 2.02062e-05\n",
"Loss: 2.02023e-05\n",
"Loss: 2.01986e-05\n",
"Loss: 2.01965e-05\n",
"Loss: 2.01942e-05\n",
"Loss: 2.01917e-05\n",
"Loss: 2.01861e-05\n",
"Loss: 2.01824e-05\n",
"Loss: 2.01780e-05\n",
"Loss: 2.01749e-05\n",
"Loss: 2.03699e-05\n",
"Loss: 2.01742e-05\n",
"Loss: 2.01685e-05\n",
"Loss: 2.02297e-05\n",
"Loss: 2.01643e-05\n",
"Loss: 2.01573e-05\n",
"Loss: 2.01501e-05\n",
"Loss: 2.01450e-05\n",
"Loss: 2.01408e-05\n",
"Loss: 2.01389e-05\n",
"Loss: 2.01362e-05\n",
"Loss: 2.01341e-05\n",
"Loss: 2.03400e-05\n",
"Loss: 2.01332e-05\n",
"Loss: 2.01319e-05\n",
"Loss: 2.01285e-05\n",
"Loss: 2.01260e-05\n",
"Loss: 2.01221e-05\n",
"Loss: 2.01183e-05\n",
"Loss: 2.01180e-05\n",
"Loss: 2.01154e-05\n",
"Loss: 2.01145e-05\n",
"Loss: 2.01108e-05\n",
"Loss: 2.01095e-05\n",
"Loss: 2.01070e-05\n",
"Loss: 2.01048e-05\n",
"Loss: 2.00992e-05\n",
"Loss: 2.00929e-05\n",
"Loss: 2.00884e-05\n",
"Loss: 2.00850e-05\n",
"Loss: 2.00798e-05\n",
"Loss: 2.00762e-05\n",
"Loss: 2.04828e-05\n",
"Loss: 2.00735e-05\n",
"Loss: 2.00709e-05\n",
"Loss: 2.00639e-05\n",
"Loss: 2.00580e-05\n",
"Loss: 2.00542e-05\n",
"Loss: 2.00536e-05\n",
"Loss: 2.00497e-05\n",
"Loss: 2.00482e-05\n",
"Loss: 2.00454e-05\n",
"Loss: 2.00429e-05\n",
"Loss: 2.00439e-05\n",
"Loss: 2.00393e-05\n",
"Loss: 2.02119e-05\n",
"Loss: 2.00324e-05\n",
"Loss: 2.00276e-05\n",
"Loss: 2.00247e-05\n",
"Loss: 2.00242e-05\n",
"Loss: 2.00224e-05\n",
"Loss: 2.00211e-05\n",
"Loss: 2.00189e-05\n",
"Loss: 2.00706e-05\n",
"Loss: 2.00180e-05\n",
"Loss: 2.00157e-05\n",
"Loss: 2.00130e-05\n",
"Loss: 2.00104e-05\n",
"Loss: 2.00068e-05\n",
"Loss: 2.00038e-05\n",
"Loss: 2.00167e-05\n",
"Loss: 2.00004e-05\n",
"Loss: 1.99961e-05\n",
"Loss: 1.99912e-05\n",
"Loss: 1.99890e-05\n",
"Loss: 1.99837e-05\n",
"Loss: 1.99798e-05\n",
"Loss: 1.99745e-05\n",
"Loss: 1.99733e-05\n",
"Loss: 1.99687e-05\n",
"Loss: 1.99664e-05\n",
"Loss: 1.99651e-05\n",
"Loss: 1.99647e-05\n",
"Loss: 1.99630e-05\n",
"Loss: 1.99610e-05\n",
"Loss: 1.99559e-05\n",
"Loss: 1.99533e-05\n",
"Loss: 1.99518e-05\n",
"Loss: 1.99497e-05\n",
"Loss: 1.99466e-05\n",
"Loss: 1.99441e-05\n",
"Loss: 1.99417e-05\n",
"Loss: 1.99384e-05\n",
"Loss: 1.99342e-05\n",
"Loss: 1.99317e-05\n",
"Loss: 1.99284e-05\n",
"Loss: 1.99247e-05\n",
"Loss: 1.99354e-05\n",
"Loss: 1.99226e-05\n",
"Loss: 1.99164e-05\n",
"Loss: 1.99132e-05\n",
"Loss: 1.99115e-05\n",
"Loss: 1.99104e-05\n",
"Loss: 1.99095e-05\n",
"Loss: 1.99070e-05\n",
"Loss: 1.99045e-05\n",
"Loss: 1.99010e-05\n",
"Loss: 1.98982e-05\n",
"Loss: 1.99011e-05\n",
"Loss: 1.98966e-05\n",
"Loss: 1.98947e-05\n",
"Loss: 1.98930e-05\n",
"Loss: 1.98916e-05\n",
"Loss: 1.98875e-05\n",
"Loss: 1.98842e-05\n",
"Loss: 1.98851e-05\n",
"Loss: 1.98817e-05\n",
"Loss: 1.98790e-05\n",
"Loss: 1.98764e-05\n",
"Loss: 1.98741e-05\n",
"Loss: 1.98683e-05\n",
"Loss: 1.98854e-05\n",
"Loss: 1.98662e-05\n",
"Loss: 1.98623e-05\n",
"Loss: 1.98579e-05\n",
"Loss: 1.98551e-05\n",
"Loss: 1.98510e-05\n",
"Loss: 1.98592e-05\n",
"Loss: 1.98492e-05\n",
"Loss: 1.98554e-05\n",
"Loss: 1.98458e-05\n",
"Loss: 1.98423e-05\n",
"Loss: 1.98409e-05\n",
"Loss: 1.98374e-05\n",
"Loss: 1.98344e-05\n",
"Loss: 1.98311e-05\n",
"Loss: 1.98289e-05\n",
"Loss: 1.98264e-05\n",
"Loss: 1.98225e-05\n",
"Loss: 1.98495e-05\n",
"Loss: 1.98202e-05\n",
"Loss: 1.98256e-05\n",
"Loss: 1.98186e-05\n",
"Loss: 1.98122e-05\n",
"Loss: 1.98088e-05\n",
"Loss: 1.98066e-05\n",
"Loss: 1.98043e-05\n",
"Loss: 1.98015e-05\n",
"Loss: 1.97980e-05\n",
"Loss: 1.98355e-05\n",
"Loss: 1.97966e-05\n",
"Loss: 1.97944e-05\n",
"Loss: 1.97904e-05\n",
"Loss: 1.97864e-05\n",
"Loss: 1.97907e-05\n",
"Loss: 1.97844e-05\n",
"Loss: 1.97816e-05\n",
"Loss: 1.97788e-05\n",
"Loss: 1.97758e-05\n",
"Loss: 1.97707e-05\n",
"Loss: 1.97910e-05\n",
"Loss: 1.97690e-05\n",
"Loss: 1.97638e-05\n",
"Loss: 1.97600e-05\n",
"Loss: 1.97568e-05\n",
"Loss: 1.97771e-05\n",
"Loss: 1.97532e-05\n",
"Loss: 1.97489e-05\n",
"Loss: 1.97430e-05\n",
"Loss: 1.97367e-05\n",
"Loss: 1.97327e-05\n",
"Loss: 1.97309e-05\n",
"Loss: 1.97283e-05\n",
"Loss: 1.97255e-05\n",
"Loss: 1.97170e-05\n",
"Loss: 1.97036e-05\n",
"Loss: 1.96866e-05\n",
"Loss: 1.97914e-05\n",
"Loss: 1.96808e-05\n",
"Loss: 1.96680e-05\n",
"Loss: 1.96574e-05\n",
"Loss: 1.96522e-05\n",
"Loss: 1.96487e-05\n",
"Loss: 1.96442e-05\n",
"Loss: 1.96398e-05\n",
"Loss: 1.96357e-05\n",
"Loss: 1.96323e-05\n",
"Loss: 1.96279e-05\n",
"Loss: 1.96233e-05\n",
"Loss: 1.96216e-05\n",
"Loss: 1.96190e-05\n",
"Loss: 1.96138e-05\n",
"Loss: 1.96090e-05\n",
"Loss: 1.96050e-05\n",
"Loss: 1.97113e-05\n",
"Loss: 1.96040e-05\n",
"Loss: 1.96008e-05\n",
"Loss: 1.95975e-05\n",
"Loss: 1.95926e-05\n",
"Loss: 1.95863e-05\n",
"Loss: 1.95806e-05\n",
"Loss: 1.96056e-05\n",
"Loss: 1.95796e-05\n",
"Loss: 1.95758e-05\n",
"Loss: 1.95732e-05\n",
"Loss: 1.95703e-05\n",
"Loss: 1.95643e-05\n",
"Loss: 1.95567e-05\n",
"Loss: 1.95507e-05\n",
"Loss: 1.95444e-05\n",
"Loss: 1.95407e-05\n",
"Loss: 1.95384e-05\n",
"Loss: 1.95314e-05\n",
"Loss: 1.95278e-05\n",
"Loss: 1.95240e-05\n",
"Loss: 1.95202e-05\n",
"Loss: 1.95084e-05\n",
"Loss: 1.95184e-05\n",
"Loss: 1.95027e-05\n",
"Loss: 1.94964e-05\n",
"Loss: 1.94916e-05\n",
"Loss: 1.94899e-05\n",
"Loss: 1.94887e-05\n",
"Loss: 1.94876e-05\n",
"Loss: 1.94868e-05\n",
"Loss: 1.94845e-05\n",
"Loss: 1.94825e-05\n",
"Loss: 1.94809e-05\n",
"Loss: 1.94785e-05\n",
"Loss: 1.94775e-05\n",
"Loss: 1.95137e-05\n",
"Loss: 1.94708e-05\n",
"Loss: 1.94618e-05\n",
"Loss: 1.94544e-05\n",
"Loss: 1.94489e-05\n",
"Loss: 1.94455e-05\n",
"Loss: 1.94390e-05\n",
"Loss: 1.94325e-05\n",
"Loss: 1.94265e-05\n",
"Loss: 1.94203e-05\n",
"Loss: 1.94401e-05\n",
"Loss: 1.94152e-05\n",
"Loss: 1.94078e-05\n",
"Loss: 1.94035e-05\n",
"Loss: 1.93999e-05\n",
"Loss: 1.93972e-05\n",
"Loss: 1.93928e-05\n",
"Loss: 1.93884e-05\n",
"Loss: 1.93890e-05\n",
"Loss: 1.93862e-05\n",
"Loss: 1.93817e-05\n",
"Loss: 1.93797e-05\n",
"Loss: 1.93770e-05\n",
"Loss: 1.93898e-05\n",
"Loss: 1.93762e-05\n",
"Loss: 1.93735e-05\n",
"Loss: 1.93675e-05\n",
"Loss: 1.93625e-05\n",
"Loss: 1.93579e-05\n",
"Loss: 1.93529e-05\n",
"Loss: 1.93433e-05\n",
"Loss: 1.94511e-05\n",
"Loss: 1.93418e-05\n",
"Loss: 1.93336e-05\n",
"Loss: 1.93283e-05\n",
"Loss: 1.93217e-05\n",
"Loss: 1.93174e-05\n",
"Loss: 1.93152e-05\n",
"Loss: 1.93122e-05\n",
"Loss: 1.93037e-05\n",
"Loss: 1.93016e-05\n",
"Loss: 1.92953e-05\n",
"Loss: 1.92906e-05\n",
"Loss: 1.92890e-05\n",
"Loss: 1.92874e-05\n",
"Loss: 1.92837e-05\n",
"Loss: 1.93364e-05\n",
"Loss: 1.92817e-05\n",
"Loss: 1.92768e-05\n",
"Loss: 1.92723e-05\n",
"Loss: 1.92661e-05\n",
"Loss: 1.92836e-05\n",
"Loss: 1.92632e-05\n",
"Loss: 1.92579e-05\n",
"Loss: 1.94477e-05\n",
"Loss: 1.92571e-05\n",
"Loss: 1.92539e-05\n",
"Loss: 1.92481e-05\n",
"Loss: 1.92391e-05\n",
"Loss: 1.92327e-05\n",
"Loss: 1.92280e-05\n",
"Loss: 1.92249e-05\n",
"Loss: 1.92265e-05\n",
"Loss: 1.92218e-05\n",
"Loss: 1.92151e-05\n",
"Loss: 1.92087e-05\n",
"Loss: 1.92040e-05\n",
"Loss: 1.91981e-05\n",
"Loss: 1.91891e-05\n",
"Loss: 1.91933e-05\n",
"Loss: 1.91869e-05\n",
"Loss: 1.91830e-05\n",
"Loss: 1.91924e-05\n",
"Loss: 1.91804e-05\n",
"Loss: 1.91783e-05\n",
"Loss: 1.91736e-05\n",
"Loss: 1.91647e-05\n",
"Loss: 1.91578e-05\n",
"Loss: 1.93257e-05\n",
"Loss: 1.91555e-05\n",
"Loss: 1.91525e-05\n",
"Loss: 1.91522e-05\n",
"Loss: 1.91503e-05\n",
"Loss: 1.91483e-05\n",
"Loss: 1.91465e-05\n",
"Loss: 1.91487e-05\n",
"Loss: 1.91453e-05\n",
"Loss: 1.91433e-05\n",
"Loss: 1.91411e-05\n",
"Loss: 1.91389e-05\n",
"Loss: 1.91366e-05\n",
"Loss: 1.91337e-05\n",
"Loss: 1.91314e-05\n",
"Loss: 1.91607e-05\n",
"Loss: 1.91273e-05\n",
"Loss: 1.91205e-05\n",
"Loss: 1.91123e-05\n",
"Loss: 1.91118e-05\n",
"Loss: 1.91096e-05\n",
"Loss: 1.91078e-05\n",
"Loss: 1.91068e-05\n",
"Loss: 1.90958e-05\n",
"Loss: 1.90920e-05\n",
"Loss: 1.90887e-05\n",
"Loss: 1.90850e-05\n",
"Loss: 1.90825e-05\n",
"Loss: 1.90808e-05\n",
"Loss: 1.90783e-05\n",
"Loss: 1.90741e-05\n",
"Loss: 1.90696e-05\n",
"Loss: 2.19220e-05\n",
"Loss: 1.90677e-05\n",
"Loss: 1.90654e-05\n",
"Loss: 1.90626e-05\n",
"Loss: 1.90613e-05\n",
"Loss: 1.90572e-05\n",
"Loss: 1.90850e-05\n",
"Loss: 1.90523e-05\n",
"Loss: 1.90467e-05\n",
"Loss: 1.90388e-05\n",
"Loss: 1.90342e-05\n",
"Loss: 1.90245e-05\n",
"Loss: 1.90174e-05\n",
"Loss: 1.90124e-05\n",
"Loss: 1.90067e-05\n",
"Loss: 1.90031e-05\n",
"Loss: 1.90000e-05\n",
"Loss: 1.89963e-05\n",
"Loss: 1.89920e-05\n",
"Loss: 1.89891e-05\n",
"Loss: 1.89864e-05\n",
"Loss: 1.89830e-05\n",
"Loss: 1.89795e-05\n",
"Loss: 1.89762e-05\n",
"Loss: 1.89647e-05\n",
"Loss: 1.89560e-05\n",
"Loss: 1.89465e-05\n",
"Loss: 1.89414e-05\n",
"Loss: 1.89361e-05\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 1.90136e-05\n",
"Loss: 1.89344e-05\n",
"Loss: 1.89307e-05\n",
"Loss: 1.89236e-05\n",
"Loss: 1.89208e-05\n",
"Loss: 1.89188e-05\n",
"Loss: 1.89119e-05\n",
"Loss: 1.89073e-05\n",
"Loss: 1.89038e-05\n",
"Loss: 1.89019e-05\n",
"Loss: 1.88997e-05\n",
"Loss: 1.88952e-05\n",
"Loss: 1.88915e-05\n",
"Loss: 1.88921e-05\n",
"Loss: 1.88894e-05\n",
"Loss: 1.88868e-05\n",
"Loss: 1.88831e-05\n",
"Loss: 1.88809e-05\n",
"Loss: 1.88795e-05\n",
"Loss: 1.88739e-05\n",
"Loss: 1.88663e-05\n",
"Loss: 1.88878e-05\n",
"Loss: 1.88641e-05\n",
"Loss: 1.88571e-05\n",
"Loss: 1.88479e-05\n",
"Loss: 1.88425e-05\n",
"Loss: 1.88340e-05\n",
"Loss: 1.88471e-05\n",
"Loss: 1.88274e-05\n",
"Loss: 1.88152e-05\n",
"Loss: 1.88140e-05\n",
"Loss: 1.88018e-05\n",
"Loss: 1.87952e-05\n",
"Loss: 1.87863e-05\n",
"Loss: 1.87779e-05\n",
"Loss: 1.87713e-05\n",
"Loss: 1.87615e-05\n",
"Loss: 1.87516e-05\n",
"Loss: 1.87453e-05\n",
"Loss: 1.87387e-05\n",
"Loss: 1.87219e-05\n",
"Loss: 1.87822e-05\n",
"Loss: 1.87120e-05\n",
"Loss: 1.87019e-05\n",
"Loss: 1.86949e-05\n",
"Loss: 1.86908e-05\n",
"Loss: 1.86799e-05\n",
"Loss: 1.86914e-05\n",
"Loss: 1.86713e-05\n",
"Loss: 1.86670e-05\n",
"Loss: 1.86602e-05\n",
"Loss: 1.86563e-05\n",
"Loss: 1.86520e-05\n",
"Loss: 1.86474e-05\n",
"Loss: 1.86411e-05\n",
"Loss: 1.86357e-05\n",
"Loss: 1.86298e-05\n",
"Loss: 1.86263e-05\n",
"Loss: 1.86232e-05\n",
"Loss: 1.86203e-05\n",
"Loss: 1.86291e-05\n",
"Loss: 1.86195e-05\n",
"Loss: 1.86172e-05\n",
"Loss: 1.86145e-05\n",
"Loss: 1.86092e-05\n",
"Loss: 1.86027e-05\n",
"Loss: 1.85960e-05\n",
"Loss: 1.85890e-05\n",
"Loss: 1.85776e-05\n",
"Loss: 1.85679e-05\n",
"Loss: 1.85602e-05\n",
"Loss: 1.87029e-05\n",
"Loss: 1.85575e-05\n",
"Loss: 1.85530e-05\n",
"Loss: 1.85441e-05\n",
"Loss: 1.85376e-05\n",
"Loss: 1.86436e-05\n",
"Loss: 1.85358e-05\n",
"Loss: 1.85294e-05\n",
"Loss: 1.85217e-05\n",
"Loss: 1.87500e-05\n",
"Loss: 1.85202e-05\n",
"Loss: 1.85151e-05\n",
"Loss: 1.85079e-05\n",
"Loss: 1.88809e-05\n",
"Loss: 1.85019e-05\n",
"Loss: 1.84942e-05\n",
"Loss: 1.84877e-05\n",
"Loss: 1.84803e-05\n",
"Loss: 1.84699e-05\n",
"Loss: 1.84616e-05\n",
"Loss: 1.84541e-05\n",
"Loss: 1.84459e-05\n",
"Loss: 1.84385e-05\n",
"Loss: 1.84466e-05\n",
"Loss: 1.84362e-05\n",
"Loss: 1.84314e-05\n",
"Loss: 1.84248e-05\n",
"Loss: 1.84187e-05\n",
"Loss: 1.84077e-05\n",
"Loss: 1.84058e-05\n",
"Loss: 1.84365e-05\n",
"Loss: 1.83951e-05\n",
"Loss: 1.83895e-05\n",
"Loss: 1.83846e-05\n",
"Loss: 1.83802e-05\n",
"Loss: 1.83775e-05\n",
"Loss: 1.83732e-05\n",
"Loss: 1.83705e-05\n",
"Loss: 1.83800e-05\n",
"Loss: 1.83675e-05\n",
"Loss: 1.83752e-05\n",
"Loss: 1.83627e-05\n",
"Loss: 1.83574e-05\n",
"Loss: 1.83483e-05\n",
"Loss: 1.83495e-05\n",
"Loss: 1.83460e-05\n",
"Loss: 1.83441e-05\n",
"Loss: 1.83411e-05\n",
"Loss: 1.83375e-05\n",
"Loss: 1.83328e-05\n",
"Loss: 1.83220e-05\n",
"Loss: 1.83226e-05\n",
"Loss: 1.83187e-05\n",
"Loss: 1.83155e-05\n",
"Loss: 1.83084e-05\n",
"Loss: 1.83037e-05\n",
"Loss: 1.82986e-05\n",
"Loss: 1.82903e-05\n",
"Loss: 1.82858e-05\n",
"Loss: 1.82785e-05\n",
"Loss: 1.82728e-05\n",
"Loss: 1.82657e-05\n",
"Loss: 1.82625e-05\n",
"Loss: 1.82601e-05\n",
"Loss: 1.82577e-05\n",
"Loss: 1.82552e-05\n",
"Loss: 1.82497e-05\n",
"Loss: 1.82412e-05\n",
"Loss: 1.82322e-05\n",
"Loss: 1.82255e-05\n",
"Loss: 1.82186e-05\n",
"Loss: 1.82156e-05\n",
"Loss: 1.82116e-05\n",
"Loss: 1.82073e-05\n",
"Loss: 1.81981e-05\n",
"Loss: 1.81987e-05\n",
"Loss: 1.81910e-05\n",
"Loss: 1.81795e-05\n",
"Loss: 1.81732e-05\n",
"Loss: 1.81681e-05\n",
"Loss: 1.81625e-05\n",
"Loss: 1.81571e-05\n",
"Loss: 1.81768e-05\n",
"Loss: 1.81557e-05\n",
"Loss: 1.81525e-05\n",
"Loss: 1.81484e-05\n",
"Loss: 1.81459e-05\n",
"Loss: 1.81441e-05\n",
"Loss: 1.81387e-05\n",
"Loss: 1.81872e-05\n",
"Loss: 1.81336e-05\n",
"Loss: 1.81244e-05\n",
"Loss: 1.81168e-05\n",
"Loss: 1.81096e-05\n",
"Loss: 1.81304e-05\n",
"Loss: 1.81070e-05\n",
"Loss: 1.80989e-05\n",
"Loss: 1.80917e-05\n",
"Loss: 1.80817e-05\n",
"Loss: 1.80737e-05\n",
"Loss: 1.80636e-05\n",
"Loss: 1.80518e-05\n",
"Loss: 1.80579e-05\n",
"Loss: 1.80480e-05\n",
"Loss: 1.80421e-05\n",
"Loss: 1.80380e-05\n",
"Loss: 1.80344e-05\n",
"Loss: 1.80307e-05\n",
"Loss: 1.80291e-05\n",
"Loss: 1.80281e-05\n",
"Loss: 1.80237e-05\n",
"Loss: 1.80204e-05\n",
"Loss: 1.80176e-05\n",
"Loss: 1.80156e-05\n",
"Loss: 1.80211e-05\n",
"Loss: 1.80136e-05\n",
"Loss: 1.80110e-05\n",
"Loss: 1.80076e-05\n",
"Loss: 1.80031e-05\n",
"Loss: 1.80126e-05\n",
"Loss: 1.79995e-05\n",
"Loss: 1.79934e-05\n",
"Loss: 1.79883e-05\n",
"Loss: 1.79863e-05\n",
"Loss: 1.79825e-05\n",
"Loss: 1.79796e-05\n",
"Loss: 1.79762e-05\n",
"Loss: 1.79711e-05\n",
"Loss: 1.80613e-05\n",
"Loss: 1.79698e-05\n",
"Loss: 1.79648e-05\n",
"Loss: 1.79621e-05\n",
"Loss: 1.79572e-05\n",
"Loss: 1.79532e-05\n",
"Loss: 1.79459e-05\n",
"Loss: 1.79471e-05\n",
"Loss: 1.79415e-05\n",
"Loss: 1.79347e-05\n",
"Loss: 1.79304e-05\n",
"Loss: 1.79212e-05\n",
"Loss: 1.79135e-05\n",
"Loss: 1.79028e-05\n",
"Loss: 1.78996e-05\n",
"Loss: 1.78974e-05\n",
"Loss: 1.78936e-05\n",
"Loss: 1.78893e-05\n",
"Loss: 1.78845e-05\n",
"Loss: 1.78807e-05\n",
"Loss: 1.78689e-05\n",
"Loss: 1.78614e-05\n",
"Loss: 1.78544e-05\n",
"Loss: 1.78484e-05\n",
"Loss: 1.78430e-05\n",
"Loss: 1.78383e-05\n",
"Loss: 1.78360e-05\n",
"Loss: 1.78336e-05\n",
"Loss: 1.78266e-05\n",
"Loss: 1.78190e-05\n",
"Loss: 1.78233e-05\n",
"Loss: 1.78130e-05\n",
"Loss: 1.78208e-05\n",
"Loss: 1.78060e-05\n",
"Loss: 1.77972e-05\n",
"Loss: 1.78004e-05\n",
"Loss: 1.77952e-05\n",
"Loss: 1.77921e-05\n",
"Loss: 1.77897e-05\n",
"Loss: 1.77946e-05\n",
"Loss: 1.77869e-05\n",
"Loss: 1.77838e-05\n",
"Loss: 1.77800e-05\n",
"Loss: 1.77775e-05\n",
"Loss: 1.79016e-05\n",
"Loss: 1.77752e-05\n",
"Loss: 1.77729e-05\n",
"Loss: 1.77662e-05\n",
"Loss: 1.77634e-05\n",
"Loss: 1.77597e-05\n",
"Loss: 1.77542e-05\n",
"Loss: 1.77518e-05\n",
"Loss: 1.77483e-05\n",
"Loss: 1.77503e-05\n",
"Loss: 1.77474e-05\n",
"Loss: 1.77462e-05\n",
"Loss: 1.77439e-05\n",
"Loss: 1.77395e-05\n",
"Loss: 1.77389e-05\n",
"Loss: 1.77333e-05\n",
"Loss: 1.77308e-05\n",
"Loss: 1.77279e-05\n",
"Loss: 1.77269e-05\n",
"Loss: 1.77250e-05\n",
"Loss: 1.77212e-05\n",
"Loss: 1.77162e-05\n",
"Loss: 1.77132e-05\n",
"Loss: 1.77118e-05\n",
"Loss: 1.77075e-05\n",
"Loss: 1.77030e-05\n",
"Loss: 1.76987e-05\n",
"Loss: 1.77600e-05\n",
"Loss: 1.76975e-05\n",
"Loss: 1.76948e-05\n",
"Loss: 1.76912e-05\n",
"Loss: 1.76896e-05\n",
"Loss: 1.76862e-05\n",
"Loss: 1.76811e-05\n",
"Loss: 1.76765e-05\n",
"Loss: 1.76713e-05\n",
"Loss: 1.76662e-05\n",
"Loss: 1.76601e-05\n",
"Loss: 1.76537e-05\n",
"Loss: 1.77112e-05\n",
"Loss: 1.76526e-05\n",
"Loss: 1.76484e-05\n",
"Loss: 1.76427e-05\n",
"Loss: 1.76386e-05\n",
"Loss: 1.76331e-05\n",
"Loss: 1.76285e-05\n",
"Loss: 1.76253e-05\n",
"Loss: 1.76304e-05\n",
"Loss: 1.76230e-05\n",
"Loss: 1.76211e-05\n",
"Loss: 1.76179e-05\n",
"Loss: 1.76130e-05\n",
"Loss: 1.76095e-05\n",
"Loss: 1.89686e-05\n",
"Loss: 1.76091e-05\n",
"Loss: 1.76060e-05\n",
"Loss: 1.76010e-05\n",
"Loss: 1.75962e-05\n",
"Loss: 1.75933e-05\n",
"Loss: 1.75889e-05\n",
"Loss: 1.75850e-05\n",
"Loss: 1.75934e-05\n",
"Loss: 1.75831e-05\n",
"Loss: 1.75795e-05\n",
"Loss: 1.75725e-05\n",
"Loss: 1.75829e-05\n",
"Loss: 1.75696e-05\n",
"Loss: 1.75668e-05\n",
"Loss: 1.75645e-05\n",
"Loss: 1.75620e-05\n",
"Loss: 1.75597e-05\n",
"Loss: 1.75544e-05\n",
"Loss: 1.75572e-05\n",
"Loss: 1.75515e-05\n",
"Loss: 1.75458e-05\n",
"Loss: 1.75407e-05\n",
"Loss: 1.75352e-05\n",
"Loss: 1.75741e-05\n",
"Loss: 1.75337e-05\n",
"Loss: 1.75263e-05\n",
"Loss: 1.75158e-05\n",
"Loss: 1.75038e-05\n",
"Loss: 1.74957e-05\n",
"Loss: 1.74896e-05\n",
"Loss: 1.74856e-05\n",
"Loss: 1.74825e-05\n",
"Loss: 1.74803e-05\n",
"Loss: 1.74788e-05\n",
"Loss: 1.74770e-05\n",
"Loss: 1.74758e-05\n",
"Loss: 1.74748e-05\n",
"Loss: 1.74815e-05\n",
"Loss: 1.74732e-05\n",
"Loss: 1.74692e-05\n",
"Loss: 1.74863e-05\n",
"Loss: 1.74670e-05\n",
"Loss: 1.74645e-05\n",
"Loss: 1.74625e-05\n",
"Loss: 1.74607e-05\n",
"Loss: 1.74667e-05\n",
"Loss: 1.74585e-05\n",
"Loss: 1.74533e-05\n",
"Loss: 1.74511e-05\n",
"Loss: 1.74501e-05\n",
"Loss: 1.74486e-05\n",
"Loss: 1.74457e-05\n",
"Loss: 1.74410e-05\n",
"Loss: 1.74345e-05\n",
"Loss: 1.74323e-05\n",
"Loss: 1.74303e-05\n",
"Loss: 1.74286e-05\n",
"Loss: 1.74255e-05\n",
"Loss: 1.74212e-05\n",
"Loss: 1.84494e-05\n",
"Loss: 1.74209e-05\n",
"Loss: 1.74178e-05\n",
"Loss: 1.74160e-05\n",
"Loss: 1.74134e-05\n",
"Loss: 1.74107e-05\n",
"Loss: 1.74066e-05\n",
"Loss: 1.74035e-05\n",
"Loss: 1.74006e-05\n",
"Loss: 1.73946e-05\n",
"Loss: 1.73898e-05\n",
"Loss: 1.73844e-05\n",
"Loss: 1.74489e-05\n",
"Loss: 1.73816e-05\n",
"Loss: 1.73783e-05\n",
"Loss: 1.73731e-05\n",
"Loss: 1.73680e-05\n",
"Loss: 1.73614e-05\n",
"Loss: 1.73984e-05\n",
"Loss: 1.73607e-05\n",
"Loss: 1.73562e-05\n",
"Loss: 1.73621e-05\n",
"Loss: 1.73507e-05\n",
"Loss: 1.73438e-05\n",
"Loss: 1.73476e-05\n",
"Loss: 1.73396e-05\n",
"Loss: 1.73367e-05\n",
"Loss: 1.73323e-05\n",
"Loss: 1.73283e-05\n",
"Loss: 1.73238e-05\n",
"Loss: 1.73151e-05\n",
"Loss: 1.73499e-05\n",
"Loss: 1.73104e-05\n",
"Loss: 1.73041e-05\n",
"Loss: 1.73002e-05\n",
"Loss: 1.72976e-05\n",
"Loss: 1.72954e-05\n",
"Loss: 1.72927e-05\n",
"Loss: 1.72900e-05\n",
"Loss: 1.72869e-05\n",
"Loss: 1.72799e-05\n",
"Loss: 1.72716e-05\n",
"Loss: 1.72655e-05\n",
"Loss: 1.72640e-05\n",
"Loss: 1.73254e-05\n",
"Loss: 1.72626e-05\n",
"Loss: 1.72569e-05\n",
"Loss: 1.72536e-05\n",
"Loss: 1.72619e-05\n",
"Loss: 1.72519e-05\n",
"Loss: 1.72474e-05\n",
"Loss: 1.72421e-05\n",
"Loss: 1.72301e-05\n",
"Loss: 1.73370e-05\n",
"Loss: 1.72273e-05\n",
"Loss: 1.72156e-05\n",
"Loss: 1.72075e-05\n",
"Loss: 1.71920e-05\n",
"Loss: 1.71794e-05\n",
"Loss: 1.71713e-05\n",
"Loss: 1.71598e-05\n",
"Loss: 1.71585e-05\n",
"Loss: 1.71518e-05\n",
"Loss: 1.71478e-05\n",
"Loss: 1.71438e-05\n",
"Loss: 1.71377e-05\n",
"Loss: 1.71421e-05\n",
"Loss: 1.71348e-05\n",
"Loss: 1.71276e-05\n",
"Loss: 1.71229e-05\n",
"Loss: 1.71151e-05\n",
"Loss: 1.71086e-05\n",
"Loss: 1.71053e-05\n",
"Loss: 1.71033e-05\n",
"Loss: 1.71020e-05\n",
"Loss: 1.70990e-05\n",
"Loss: 1.70972e-05\n",
"Loss: 1.70961e-05\n",
"Loss: 1.70935e-05\n",
"Loss: 1.70915e-05\n",
"Loss: 1.70893e-05\n",
"Loss: 1.70872e-05\n",
"Loss: 1.70830e-05\n",
"Loss: 1.70789e-05\n",
"Loss: 1.70761e-05\n",
"Loss: 1.70734e-05\n",
"Loss: 1.70700e-05\n",
"Loss: 1.70642e-05\n",
"Loss: 1.70528e-05\n",
"Loss: 1.70458e-05\n",
"Loss: 1.70352e-05\n",
"Loss: 1.70310e-05\n",
"Loss: 1.70284e-05\n",
"Loss: 1.70234e-05\n",
"Loss: 1.70168e-05\n",
"Loss: 1.70066e-05\n",
"Loss: 1.69969e-05\n",
"Loss: 1.69885e-05\n",
"Loss: 1.69805e-05\n",
"Loss: 1.77627e-05\n",
"Loss: 1.69802e-05\n",
"Loss: 1.69762e-05\n",
"Loss: 1.69692e-05\n",
"Loss: 1.70508e-05\n",
"Loss: 1.69684e-05\n",
"Loss: 1.69652e-05\n",
"Loss: 1.69591e-05\n",
"Loss: 1.69492e-05\n",
"Loss: 1.69395e-05\n",
"Loss: 1.69310e-05\n",
"Loss: 1.69252e-05\n",
"Loss: 1.69219e-05\n",
"Loss: 1.69197e-05\n",
"Loss: 1.69173e-05\n",
"Loss: 1.69108e-05\n",
"Loss: 1.69472e-05\n",
"Loss: 1.69089e-05\n",
"Loss: 1.69039e-05\n",
"Loss: 1.69004e-05\n",
"Loss: 1.68982e-05\n",
"Loss: 1.68954e-05\n",
"Loss: 1.68908e-05\n",
"Loss: 1.68882e-05\n",
"Loss: 1.68849e-05\n",
"Loss: 1.68827e-05\n",
"Loss: 1.68769e-05\n",
"Loss: 1.68788e-05\n",
"Loss: 1.68731e-05\n",
"Loss: 1.68700e-05\n",
"Loss: 1.68683e-05\n",
"Loss: 1.68641e-05\n",
"Loss: 1.68615e-05\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 1.68651e-05\n",
"Loss: 1.68594e-05\n",
"Loss: 1.68554e-05\n",
"Loss: 1.68522e-05\n",
"Loss: 1.68542e-05\n",
"Loss: 1.68493e-05\n",
"Loss: 1.68424e-05\n",
"Loss: 1.68366e-05\n",
"Loss: 1.68998e-05\n",
"Loss: 1.68338e-05\n",
"Loss: 1.68295e-05\n",
"Loss: 1.68266e-05\n",
"Loss: 1.68202e-05\n",
"Loss: 1.68152e-05\n",
"Loss: 1.68126e-05\n",
"Loss: 1.68096e-05\n",
"Loss: 1.68041e-05\n",
"Loss: 1.67985e-05\n",
"Loss: 1.70436e-05\n",
"Loss: 1.67950e-05\n",
"Loss: 1.67891e-05\n",
"Loss: 1.67860e-05\n",
"Loss: 1.67813e-05\n",
"Loss: 1.67757e-05\n",
"Loss: 1.67707e-05\n",
"Loss: 1.67654e-05\n",
"Loss: 1.67664e-05\n",
"Loss: 1.67638e-05\n",
"Loss: 1.67609e-05\n",
"Loss: 1.67554e-05\n",
"Loss: 1.67504e-05\n",
"Loss: 1.67431e-05\n",
"Loss: 1.67694e-05\n",
"Loss: 1.67389e-05\n",
"Loss: 1.67339e-05\n",
"Loss: 1.67225e-05\n",
"Loss: 1.67184e-05\n",
"Loss: 1.67143e-05\n",
"Loss: 1.67100e-05\n",
"Loss: 1.67056e-05\n",
"Loss: 1.66940e-05\n",
"Loss: 1.66860e-05\n",
"Loss: 1.66801e-05\n",
"Loss: 1.66741e-05\n",
"Loss: 1.66713e-05\n",
"Loss: 1.66658e-05\n",
"Loss: 1.66618e-05\n",
"Loss: 1.66565e-05\n",
"Loss: 1.66521e-05\n",
"Loss: 1.66484e-05\n",
"Loss: 1.66456e-05\n",
"Loss: 1.66399e-05\n",
"Loss: 1.66352e-05\n",
"Loss: 1.66304e-05\n",
"Loss: 1.66267e-05\n",
"Loss: 1.66229e-05\n",
"Loss: 1.66161e-05\n",
"Loss: 1.66077e-05\n",
"Loss: 1.66076e-05\n",
"Loss: 1.66031e-05\n",
"Loss: 1.65985e-05\n",
"Loss: 1.65965e-05\n",
"Loss: 1.65940e-05\n",
"Loss: 1.65878e-05\n",
"Loss: 1.65835e-05\n",
"Loss: 1.65733e-05\n",
"Loss: 1.65673e-05\n",
"Loss: 1.66436e-05\n",
"Loss: 1.65646e-05\n",
"Loss: 1.65587e-05\n",
"Loss: 1.65544e-05\n",
"Loss: 1.65516e-05\n",
"Loss: 1.65519e-05\n",
"Loss: 1.65495e-05\n",
"Loss: 1.65484e-05\n",
"Loss: 1.65469e-05\n",
"Loss: 1.65464e-05\n",
"Loss: 1.65450e-05\n",
"Loss: 1.65472e-05\n",
"Loss: 1.65438e-05\n",
"Loss: 1.65414e-05\n",
"Loss: 1.65377e-05\n",
"Loss: 1.65320e-05\n",
"Loss: 1.65347e-05\n",
"Loss: 1.65316e-05\n",
"Loss: 1.65300e-05\n",
"Loss: 1.65283e-05\n",
"Loss: 1.65263e-05\n",
"Loss: 1.65212e-05\n",
"Loss: 1.65157e-05\n",
"Loss: 1.65080e-05\n",
"Loss: 1.65122e-05\n",
"Loss: 1.65030e-05\n",
"Loss: 1.64954e-05\n",
"Loss: 1.64873e-05\n",
"Loss: 1.64842e-05\n",
"Loss: 1.65426e-05\n",
"Loss: 1.64829e-05\n",
"Loss: 1.64793e-05\n",
"Loss: 1.64743e-05\n",
"Loss: 1.64697e-05\n",
"Loss: 1.64688e-05\n",
"Loss: 1.64652e-05\n",
"Loss: 1.64631e-05\n",
"Loss: 1.64583e-05\n",
"Loss: 1.64507e-05\n",
"Loss: 1.65049e-05\n",
"Loss: 1.64486e-05\n",
"Loss: 1.64386e-05\n",
"Loss: 1.64319e-05\n",
"Loss: 1.64241e-05\n",
"Loss: 1.64194e-05\n",
"Loss: 1.64409e-05\n",
"Loss: 1.64176e-05\n",
"Loss: 1.64120e-05\n",
"Loss: 1.64075e-05\n",
"Loss: 1.64036e-05\n",
"Loss: 1.63997e-05\n",
"Loss: 1.63936e-05\n",
"Loss: 1.63981e-05\n",
"Loss: 1.63914e-05\n",
"Loss: 1.63875e-05\n",
"Loss: 1.63813e-05\n",
"Loss: 1.63742e-05\n",
"Loss: 1.63654e-05\n",
"Loss: 1.63598e-05\n",
"Loss: 1.63553e-05\n",
"Loss: 1.63407e-05\n",
"Loss: 1.63295e-05\n",
"Loss: 1.64310e-05\n",
"Loss: 1.63275e-05\n",
"Loss: 1.63196e-05\n",
"Loss: 1.63163e-05\n",
"Loss: 1.63129e-05\n",
"Loss: 1.63092e-05\n",
"Loss: 1.63379e-05\n",
"Loss: 1.63083e-05\n",
"Loss: 1.63037e-05\n",
"Loss: 1.62995e-05\n",
"Loss: 1.62952e-05\n",
"Loss: 1.62919e-05\n",
"Loss: 1.62850e-05\n",
"Loss: 1.62986e-05\n",
"Loss: 1.62838e-05\n",
"Loss: 1.62798e-05\n",
"Loss: 1.62781e-05\n",
"Loss: 1.62758e-05\n",
"Loss: 1.62734e-05\n",
"Loss: 1.62660e-05\n",
"Loss: 1.62600e-05\n",
"Loss: 1.62551e-05\n",
"Loss: 1.62506e-05\n",
"Loss: 1.62546e-05\n",
"Loss: 1.62478e-05\n",
"Loss: 1.62429e-05\n",
"Loss: 1.62378e-05\n",
"Loss: 1.62317e-05\n",
"Loss: 1.62288e-05\n",
"Loss: 1.62245e-05\n",
"Loss: 1.62577e-05\n",
"Loss: 1.62201e-05\n",
"Loss: 1.62168e-05\n",
"Loss: 1.62058e-05\n",
"Loss: 1.61977e-05\n",
"Loss: 1.61854e-05\n",
"Loss: 1.61766e-05\n",
"Loss: 1.62695e-05\n",
"Loss: 1.61724e-05\n",
"Loss: 1.61665e-05\n",
"Loss: 1.61570e-05\n",
"Loss: 1.61540e-05\n",
"Loss: 1.61518e-05\n",
"Loss: 1.61497e-05\n",
"Loss: 1.61428e-05\n",
"Loss: 1.61370e-05\n",
"Loss: 1.61318e-05\n",
"Loss: 1.61302e-05\n",
"Loss: 1.61256e-05\n",
"Loss: 1.61198e-05\n",
"Loss: 1.61153e-05\n",
"Loss: 1.61123e-05\n",
"Loss: 1.61096e-05\n",
"Loss: 1.61050e-05\n",
"Loss: 1.61025e-05\n",
"Loss: 1.60986e-05\n",
"Loss: 1.60965e-05\n",
"Loss: 1.61067e-05\n",
"Loss: 1.60951e-05\n",
"Loss: 1.60917e-05\n",
"Loss: 1.60894e-05\n",
"Loss: 1.60849e-05\n",
"Loss: 1.60808e-05\n",
"Loss: 1.60782e-05\n",
"Loss: 1.61111e-05\n",
"Loss: 1.60758e-05\n",
"Loss: 1.60732e-05\n",
"Loss: 1.60691e-05\n",
"Loss: 1.60645e-05\n",
"Loss: 1.60616e-05\n",
"Loss: 1.60589e-05\n",
"Loss: 1.60561e-05\n",
"Loss: 1.60540e-05\n",
"Loss: 1.60515e-05\n",
"Loss: 1.60481e-05\n",
"Loss: 1.60813e-05\n",
"Loss: 1.60449e-05\n",
"Loss: 1.60388e-05\n",
"Loss: 1.60756e-05\n",
"Loss: 1.60350e-05\n",
"Loss: 1.60259e-05\n",
"Loss: 1.60213e-05\n",
"Loss: 1.60212e-05\n",
"Loss: 1.60166e-05\n",
"Loss: 1.60141e-05\n",
"Loss: 1.60099e-05\n",
"Loss: 1.60072e-05\n",
"Loss: 1.60066e-05\n",
"Loss: 1.60334e-05\n",
"Loss: 1.60058e-05\n",
"Loss: 1.60032e-05\n",
"Loss: 1.59995e-05\n",
"Loss: 1.59859e-05\n",
"Loss: 1.59777e-05\n",
"Loss: 1.59681e-05\n",
"Loss: 1.59787e-05\n",
"Loss: 1.59655e-05\n",
"Loss: 1.59577e-05\n",
"Loss: 1.59494e-05\n",
"Loss: 1.59450e-05\n",
"Loss: 1.59396e-05\n",
"Loss: 1.59593e-05\n",
"Loss: 1.59351e-05\n",
"Loss: 1.59322e-05\n",
"Loss: 1.59249e-05\n",
"Loss: 1.62470e-05\n",
"Loss: 1.59240e-05\n",
"Loss: 1.59185e-05\n",
"Loss: 1.59129e-05\n",
"Loss: 1.59059e-05\n",
"Loss: 1.58940e-05\n",
"Loss: 1.58773e-05\n",
"Loss: 1.58648e-05\n",
"Loss: 1.58525e-05\n",
"Loss: 1.58445e-05\n",
"Loss: 1.58408e-05\n",
"Loss: 1.58334e-05\n",
"Loss: 1.65430e-05\n",
"Loss: 1.58325e-05\n",
"Loss: 1.58268e-05\n",
"Loss: 1.58219e-05\n",
"Loss: 1.58177e-05\n",
"Loss: 1.58149e-05\n",
"Loss: 1.58178e-05\n",
"Loss: 1.58137e-05\n",
"Loss: 1.58115e-05\n",
"Loss: 1.58083e-05\n",
"Loss: 1.58057e-05\n",
"Loss: 1.58014e-05\n",
"Loss: 1.58004e-05\n",
"Loss: 1.59243e-05\n",
"Loss: 1.57984e-05\n",
"Loss: 1.57919e-05\n",
"Loss: 1.57860e-05\n",
"Loss: 1.57800e-05\n",
"Loss: 1.57970e-05\n",
"Loss: 1.57761e-05\n",
"Loss: 1.57725e-05\n",
"Loss: 1.57643e-05\n",
"Loss: 1.57566e-05\n",
"Loss: 1.57574e-05\n",
"Loss: 1.57512e-05\n",
"Loss: 1.57440e-05\n",
"Loss: 1.57383e-05\n",
"Loss: 1.57344e-05\n",
"Loss: 1.57310e-05\n",
"Loss: 1.57291e-05\n",
"Loss: 1.57277e-05\n",
"Loss: 1.57268e-05\n",
"Loss: 1.57246e-05\n",
"Loss: 1.57223e-05\n",
"Loss: 1.57205e-05\n",
"Loss: 1.57174e-05\n",
"Loss: 1.57161e-05\n",
"Loss: 1.57211e-05\n",
"Loss: 1.57154e-05\n",
"Loss: 1.57127e-05\n",
"Loss: 1.57085e-05\n",
"Loss: 1.57046e-05\n",
"Loss: 1.57532e-05\n",
"Loss: 1.57015e-05\n",
"Loss: 1.56972e-05\n",
"Loss: 1.56914e-05\n",
"Loss: 1.56891e-05\n",
"Loss: 1.56845e-05\n",
"Loss: 1.56810e-05\n",
"Loss: 1.56793e-05\n",
"Loss: 1.56739e-05\n",
"Loss: 1.56713e-05\n",
"Loss: 1.56696e-05\n",
"Loss: 1.56684e-05\n",
"Loss: 1.56657e-05\n",
"Loss: 1.56699e-05\n",
"Loss: 1.56637e-05\n",
"Loss: 1.56604e-05\n",
"Loss: 1.56553e-05\n",
"Loss: 1.56572e-05\n",
"Loss: 1.56543e-05\n",
"Loss: 1.56516e-05\n",
"Loss: 1.56499e-05\n",
"Loss: 1.56489e-05\n",
"Loss: 1.56470e-05\n",
"Loss: 1.56446e-05\n",
"Loss: 1.56506e-05\n",
"Loss: 1.56437e-05\n",
"Loss: 1.56420e-05\n",
"Loss: 1.56404e-05\n",
"Loss: 1.56385e-05\n",
"Loss: 1.56354e-05\n",
"Loss: 1.56307e-05\n",
"Loss: 1.56247e-05\n",
"Loss: 1.56231e-05\n",
"Loss: 1.56182e-05\n",
"Loss: 1.56159e-05\n",
"Loss: 1.56117e-05\n",
"Loss: 1.56033e-05\n",
"Loss: 1.55975e-05\n",
"Loss: 1.55932e-05\n",
"Loss: 1.55911e-05\n",
"Loss: 1.55869e-05\n",
"Loss: 1.55824e-05\n",
"Loss: 1.55781e-05\n",
"Loss: 1.55739e-05\n",
"Loss: 1.55689e-05\n",
"Loss: 1.55657e-05\n",
"Loss: 1.55640e-05\n",
"Loss: 1.55585e-05\n",
"Loss: 1.55569e-05\n",
"Loss: 1.55533e-05\n",
"Loss: 1.55494e-05\n",
"Loss: 1.55581e-05\n",
"Loss: 1.55481e-05\n",
"Loss: 1.55433e-05\n",
"Loss: 1.55392e-05\n",
"Loss: 1.55343e-05\n",
"Loss: 1.55310e-05\n",
"Loss: 1.55267e-05\n",
"Loss: 1.55207e-05\n",
"Loss: 1.55651e-05\n",
"Loss: 1.55170e-05\n",
"Loss: 1.55095e-05\n",
"Loss: 1.55037e-05\n",
"Loss: 1.55014e-05\n",
"Loss: 1.61746e-05\n",
"Loss: 1.55004e-05\n",
"Loss: 1.54987e-05\n",
"Loss: 1.54957e-05\n",
"Loss: 1.54924e-05\n",
"Loss: 1.54878e-05\n",
"Loss: 1.54798e-05\n",
"Loss: 1.54921e-05\n",
"Loss: 1.54760e-05\n",
"Loss: 1.54692e-05\n",
"Loss: 1.54657e-05\n",
"Loss: 1.54653e-05\n",
"Loss: 1.54639e-05\n",
"Loss: 1.54616e-05\n",
"Loss: 1.54551e-05\n",
"Loss: 1.54518e-05\n",
"Loss: 1.54476e-05\n",
"Loss: 1.54440e-05\n",
"Loss: 1.54363e-05\n",
"Loss: 1.54317e-05\n",
"Loss: 1.54300e-05\n",
"Loss: 1.54250e-05\n",
"Loss: 1.54219e-05\n",
"Loss: 1.54186e-05\n",
"Loss: 1.54138e-05\n",
"Loss: 1.54060e-05\n",
"Loss: 1.54262e-05\n",
"Loss: 1.54012e-05\n",
"Loss: 1.53858e-05\n",
"Loss: 1.53762e-05\n",
"Loss: 1.53688e-05\n",
"Loss: 1.53649e-05\n",
"Loss: 1.53563e-05\n",
"Loss: 1.53541e-05\n",
"Loss: 1.53470e-05\n",
"Loss: 1.53424e-05\n",
"Loss: 1.53383e-05\n",
"Loss: 1.53333e-05\n",
"Loss: 1.54304e-05\n",
"Loss: 1.53318e-05\n",
"Loss: 1.53255e-05\n",
"Loss: 1.53240e-05\n",
"Loss: 1.53196e-05\n",
"Loss: 1.53176e-05\n",
"Loss: 1.53147e-05\n",
"Loss: 1.53118e-05\n",
"Loss: 1.53274e-05\n",
"Loss: 1.53110e-05\n",
"Loss: 1.53088e-05\n",
"Loss: 1.53067e-05\n",
"Loss: 1.53029e-05\n",
"Loss: 1.53004e-05\n",
"Loss: 1.52999e-05\n",
"Loss: 1.52982e-05\n",
"Loss: 1.52947e-05\n",
"Loss: 1.52882e-05\n",
"Loss: 1.52856e-05\n",
"Loss: 1.52817e-05\n",
"Loss: 1.52782e-05\n",
"Loss: 1.52753e-05\n",
"Loss: 1.52727e-05\n",
"Loss: 1.52705e-05\n",
"Loss: 1.52682e-05\n",
"Loss: 1.52672e-05\n",
"Loss: 1.52648e-05\n",
"Loss: 1.52595e-05\n",
"Loss: 1.52501e-05\n",
"Loss: 1.55252e-05\n",
"Loss: 1.52466e-05\n",
"Loss: 1.52401e-05\n",
"Loss: 1.52317e-05\n",
"Loss: 1.52290e-05\n",
"Loss: 1.52246e-05\n",
"Loss: 1.52201e-05\n",
"Loss: 1.52175e-05\n",
"Loss: 1.52122e-05\n",
"Loss: 1.52090e-05\n",
"Loss: 1.52077e-05\n",
"Loss: 1.52051e-05\n",
"Loss: 1.52043e-05\n",
"Loss: 1.52037e-05\n",
"Loss: 1.52014e-05\n",
"Loss: 1.51935e-05\n",
"Loss: 1.51891e-05\n",
"Loss: 1.51808e-05\n",
"Loss: 1.51847e-05\n",
"Loss: 1.51689e-05\n",
"Loss: 1.51551e-05\n",
"Loss: 1.51440e-05\n",
"Loss: 1.51307e-05\n",
"Loss: 1.51222e-05\n",
"Loss: 1.51172e-05\n",
"Loss: 1.51130e-05\n",
"Loss: 1.51041e-05\n",
"Loss: 1.50914e-05\n",
"Loss: 1.50838e-05\n",
"Loss: 1.51891e-05\n",
"Loss: 1.50790e-05\n",
"Loss: 1.50701e-05\n",
"Loss: 1.50497e-05\n",
"Loss: 1.50407e-05\n",
"Loss: 1.50341e-05\n",
"Loss: 1.50866e-05\n",
"Loss: 1.50328e-05\n",
"Loss: 1.50282e-05\n",
"Loss: 1.50184e-05\n",
"Loss: 1.50132e-05\n",
"Loss: 1.50094e-05\n",
"Loss: 1.50079e-05\n",
"Loss: 1.49997e-05\n",
"Loss: 1.53083e-05\n",
"Loss: 1.49980e-05\n",
"Loss: 1.49919e-05\n",
"Loss: 1.49821e-05\n",
"Loss: 1.49915e-05\n",
"Loss: 1.49775e-05\n",
"Loss: 1.49738e-05\n",
"Loss: 1.49685e-05\n",
"Loss: 1.49660e-05\n",
"Loss: 1.49611e-05\n",
"Loss: 1.49571e-05\n",
"Loss: 1.49533e-05\n",
"Loss: 1.49461e-05\n",
"Loss: 1.49399e-05\n",
"Loss: 1.49318e-05\n",
"Loss: 1.50514e-05\n",
"Loss: 1.49293e-05\n",
"Loss: 1.49269e-05\n",
"Loss: 1.49234e-05\n",
"Loss: 1.49186e-05\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 1.49113e-05\n",
"Loss: 1.49147e-05\n",
"Loss: 1.49069e-05\n",
"Loss: 1.49039e-05\n",
"Loss: 1.48997e-05\n",
"Loss: 1.48968e-05\n",
"Loss: 1.48954e-05\n",
"Loss: 1.48944e-05\n",
"Loss: 1.48933e-05\n",
"Loss: 1.48929e-05\n",
"Loss: 1.48911e-05\n",
"Loss: 1.48919e-05\n",
"Loss: 1.48856e-05\n",
"Loss: 1.48794e-05\n",
"Loss: 1.48712e-05\n",
"Loss: 1.48667e-05\n",
"Loss: 1.48622e-05\n",
"Loss: 1.48574e-05\n",
"Loss: 1.48541e-05\n",
"Loss: 1.48420e-05\n",
"Loss: 1.48456e-05\n",
"Loss: 1.48380e-05\n",
"Loss: 1.48324e-05\n",
"Loss: 1.48247e-05\n",
"Loss: 1.48198e-05\n",
"Loss: 1.48107e-05\n",
"Loss: 1.48216e-05\n",
"Loss: 1.48079e-05\n",
"Loss: 1.48019e-05\n",
"Loss: 1.47969e-05\n",
"Loss: 1.47912e-05\n",
"Loss: 1.47858e-05\n",
"Loss: 1.52246e-05\n",
"Loss: 1.47844e-05\n",
"Loss: 1.47815e-05\n",
"Loss: 1.47770e-05\n",
"Loss: 1.47728e-05\n",
"Loss: 1.47841e-05\n",
"Loss: 1.47703e-05\n",
"Loss: 1.47653e-05\n",
"Loss: 1.47629e-05\n",
"Loss: 1.47599e-05\n",
"Loss: 1.47526e-05\n",
"Loss: 1.47617e-05\n",
"Loss: 1.47482e-05\n",
"Loss: 1.47466e-05\n",
"Loss: 1.47406e-05\n",
"Loss: 1.47365e-05\n",
"Loss: 1.47320e-05\n",
"Loss: 1.47301e-05\n",
"Loss: 1.47233e-05\n",
"Loss: 1.47188e-05\n",
"Loss: 1.47088e-05\n",
"Loss: 1.47043e-05\n",
"Loss: 1.46985e-05\n",
"Loss: 1.47241e-05\n",
"Loss: 1.46972e-05\n",
"Loss: 1.46922e-05\n",
"Loss: 1.46850e-05\n",
"Loss: 1.46804e-05\n",
"Loss: 1.46726e-05\n",
"Loss: 1.46687e-05\n",
"Loss: 1.46647e-05\n",
"Loss: 1.46591e-05\n",
"Loss: 1.46715e-05\n",
"Loss: 1.46570e-05\n",
"Loss: 1.46545e-05\n",
"Loss: 1.46496e-05\n",
"Loss: 1.46457e-05\n",
"Loss: 1.46416e-05\n",
"Loss: 1.46349e-05\n",
"Loss: 1.46317e-05\n",
"Loss: 1.46272e-05\n",
"Loss: 1.46222e-05\n",
"Loss: 1.46117e-05\n",
"Loss: 1.46208e-05\n",
"Loss: 1.46072e-05\n",
"Loss: 1.45976e-05\n",
"Loss: 1.45934e-05\n",
"Loss: 1.45898e-05\n",
"Loss: 1.45855e-05\n",
"Loss: 1.45805e-05\n",
"Loss: 1.45755e-05\n",
"Loss: 1.45724e-05\n",
"Loss: 1.45665e-05\n",
"Loss: 1.45655e-05\n",
"Loss: 1.45604e-05\n",
"Loss: 1.45589e-05\n",
"Loss: 1.45564e-05\n",
"Loss: 1.45556e-05\n",
"Loss: 1.45514e-05\n",
"Loss: 1.45496e-05\n",
"Loss: 1.45468e-05\n",
"Loss: 1.45444e-05\n",
"Loss: 1.45517e-05\n",
"Loss: 1.45426e-05\n",
"Loss: 1.45396e-05\n",
"Loss: 1.45360e-05\n",
"Loss: 1.45334e-05\n",
"Loss: 1.45288e-05\n",
"Loss: 1.45224e-05\n",
"Loss: 1.45168e-05\n",
"Loss: 1.45144e-05\n",
"Loss: 1.45102e-05\n",
"Loss: 1.45066e-05\n",
"Loss: 1.45003e-05\n",
"Loss: 1.45134e-05\n",
"Loss: 1.44971e-05\n",
"Loss: 1.44899e-05\n",
"Loss: 1.44810e-05\n",
"Loss: 1.44831e-05\n",
"Loss: 1.44764e-05\n",
"Loss: 1.44931e-05\n",
"Loss: 1.44750e-05\n",
"Loss: 1.44703e-05\n",
"Loss: 1.44669e-05\n",
"Loss: 1.44602e-05\n",
"Loss: 1.44553e-05\n",
"Loss: 1.44469e-05\n",
"Loss: 1.44430e-05\n",
"Loss: 1.44398e-05\n",
"Loss: 1.44382e-05\n",
"Loss: 1.44363e-05\n",
"Loss: 1.44347e-05\n",
"Loss: 1.44327e-05\n",
"Loss: 1.44303e-05\n",
"Loss: 1.44268e-05\n",
"Loss: 1.44220e-05\n",
"Loss: 1.44246e-05\n",
"Loss: 1.44190e-05\n",
"Loss: 1.44099e-05\n",
"Loss: 1.44028e-05\n",
"Loss: 1.43999e-05\n",
"Loss: 1.43982e-05\n",
"Loss: 1.43946e-05\n",
"Loss: 1.43918e-05\n",
"Loss: 1.43911e-05\n",
"Loss: 1.43882e-05\n",
"Loss: 1.43872e-05\n",
"Loss: 1.43862e-05\n",
"Loss: 1.43845e-05\n",
"Loss: 1.43807e-05\n",
"Loss: 1.45420e-05\n",
"Loss: 1.43799e-05\n",
"Loss: 1.43757e-05\n",
"Loss: 1.43719e-05\n",
"Loss: 1.43696e-05\n",
"Loss: 1.43671e-05\n",
"Loss: 1.43643e-05\n",
"Loss: 1.45318e-05\n",
"Loss: 1.43627e-05\n",
"Loss: 1.43611e-05\n",
"Loss: 1.43559e-05\n",
"Loss: 1.43528e-05\n",
"Loss: 1.43488e-05\n",
"Loss: 1.43438e-05\n",
"Loss: 1.43362e-05\n",
"Loss: 1.43558e-05\n",
"Loss: 1.43315e-05\n",
"Loss: 1.43215e-05\n",
"Loss: 1.43145e-05\n",
"Loss: 1.43055e-05\n",
"Loss: 1.43003e-05\n",
"Loss: 1.42959e-05\n",
"Loss: 1.42905e-05\n",
"Loss: 1.42884e-05\n",
"Loss: 1.42846e-05\n",
"Loss: 1.42799e-05\n",
"Loss: 1.43372e-05\n",
"Loss: 1.42793e-05\n",
"Loss: 1.42761e-05\n",
"Loss: 1.42735e-05\n",
"Loss: 1.42708e-05\n",
"Loss: 1.42681e-05\n",
"Loss: 1.42759e-05\n",
"Loss: 1.42670e-05\n",
"Loss: 1.42644e-05\n",
"Loss: 1.42627e-05\n",
"Loss: 1.42603e-05\n",
"Loss: 1.42574e-05\n",
"Loss: 1.42764e-05\n",
"Loss: 1.42561e-05\n",
"Loss: 1.42519e-05\n",
"Loss: 1.42482e-05\n",
"Loss: 1.42451e-05\n",
"Loss: 1.42412e-05\n",
"Loss: 1.42360e-05\n",
"Loss: 1.42337e-05\n",
"Loss: 1.42306e-05\n",
"Loss: 1.42263e-05\n",
"Loss: 1.42214e-05\n",
"Loss: 1.42155e-05\n",
"Loss: 1.42164e-05\n",
"Loss: 1.42114e-05\n",
"Loss: 1.42074e-05\n",
"Loss: 1.42128e-05\n",
"Loss: 1.42042e-05\n",
"Loss: 1.41983e-05\n",
"Loss: 1.41931e-05\n",
"Loss: 1.41902e-05\n",
"Loss: 1.41854e-05\n",
"Loss: 1.41701e-05\n",
"Loss: 1.41970e-05\n",
"Loss: 1.41662e-05\n",
"Loss: 1.41556e-05\n",
"Loss: 1.41380e-05\n",
"Loss: 1.41246e-05\n",
"Loss: 1.41094e-05\n",
"Loss: 1.41315e-05\n",
"Loss: 1.41063e-05\n",
"Loss: 1.40970e-05\n",
"Loss: 1.40914e-05\n",
"Loss: 1.40858e-05\n",
"Loss: 1.40830e-05\n",
"Loss: 1.40790e-05\n",
"Loss: 1.40738e-05\n",
"Loss: 1.40700e-05\n",
"Loss: 1.40601e-05\n",
"Loss: 1.40509e-05\n",
"Loss: 1.40401e-05\n",
"Loss: 1.40287e-05\n",
"Loss: 1.40154e-05\n",
"Loss: 1.40201e-05\n",
"Loss: 1.40123e-05\n",
"Loss: 1.40053e-05\n",
"Loss: 1.40002e-05\n",
"Loss: 1.39916e-05\n",
"Loss: 1.39838e-05\n",
"Loss: 1.39963e-05\n",
"Loss: 1.39811e-05\n",
"Loss: 1.39733e-05\n",
"Loss: 1.39675e-05\n",
"Loss: 1.39551e-05\n",
"Loss: 1.39539e-05\n",
"Loss: 1.39412e-05\n",
"Loss: 1.39381e-05\n",
"Loss: 1.39350e-05\n",
"Loss: 1.39255e-05\n",
"Loss: 1.39204e-05\n",
"Loss: 1.39327e-05\n",
"Loss: 1.39168e-05\n",
"Loss: 1.39109e-05\n",
"Loss: 1.39021e-05\n",
"Loss: 1.38974e-05\n",
"Loss: 1.38999e-05\n",
"Loss: 1.38931e-05\n",
"Loss: 1.38842e-05\n",
"Loss: 1.38637e-05\n",
"Loss: 1.38443e-05\n",
"Loss: 1.38307e-05\n",
"Loss: 1.38221e-05\n",
"Loss: 1.38177e-05\n",
"Loss: 1.38065e-05\n",
"Loss: 1.39876e-05\n",
"Loss: 1.38056e-05\n",
"Loss: 1.37984e-05\n",
"Loss: 1.37881e-05\n",
"Loss: 1.37802e-05\n",
"Loss: 1.37637e-05\n",
"Loss: 1.37476e-05\n",
"Loss: 1.37386e-05\n",
"Loss: 1.37186e-05\n",
"Loss: 1.37060e-05\n",
"Loss: 1.37018e-05\n",
"Loss: 1.36976e-05\n",
"Loss: 1.36928e-05\n",
"Loss: 1.41008e-05\n",
"Loss: 1.36878e-05\n",
"Loss: 1.36842e-05\n",
"Loss: 1.36863e-05\n",
"Loss: 1.36810e-05\n",
"Loss: 1.36739e-05\n",
"Loss: 1.36711e-05\n",
"Loss: 1.36649e-05\n",
"Loss: 1.36575e-05\n",
"Loss: 1.36530e-05\n",
"Loss: 1.36455e-05\n",
"Loss: 1.36423e-05\n",
"Loss: 1.36366e-05\n",
"Loss: 1.36289e-05\n",
"Loss: 1.36199e-05\n",
"Loss: 1.36169e-05\n",
"Loss: 1.36067e-05\n",
"Loss: 1.36027e-05\n",
"Loss: 1.35976e-05\n",
"Loss: 1.35943e-05\n",
"Loss: 1.36279e-05\n",
"Loss: 1.35938e-05\n",
"Loss: 1.35908e-05\n",
"Loss: 1.35883e-05\n",
"Loss: 1.35849e-05\n",
"Loss: 1.35819e-05\n",
"Loss: 1.36002e-05\n",
"Loss: 1.35810e-05\n",
"Loss: 1.35765e-05\n",
"Loss: 1.35722e-05\n",
"Loss: 1.35658e-05\n",
"Loss: 1.35621e-05\n",
"Loss: 1.35603e-05\n",
"Loss: 1.35448e-05\n",
"Loss: 1.35381e-05\n",
"Loss: 1.35293e-05\n",
"Loss: 1.35733e-05\n",
"Loss: 1.35240e-05\n",
"Loss: 1.35181e-05\n",
"Loss: 1.35233e-05\n",
"Loss: 1.35094e-05\n",
"Loss: 1.34974e-05\n",
"Loss: 1.34881e-05\n",
"Loss: 1.34801e-05\n",
"Loss: 1.34857e-05\n",
"Loss: 1.34776e-05\n",
"Loss: 1.35704e-05\n",
"Loss: 1.34769e-05\n",
"Loss: 1.34741e-05\n",
"Loss: 1.34757e-05\n",
"Loss: 1.34695e-05\n",
"Loss: 1.34641e-05\n",
"Loss: 1.34576e-05\n",
"Loss: 1.34535e-05\n",
"Loss: 1.34495e-05\n",
"Loss: 1.34465e-05\n",
"Loss: 1.34544e-05\n",
"Loss: 1.34453e-05\n",
"Loss: 1.34434e-05\n",
"Loss: 1.34391e-05\n",
"Loss: 1.34362e-05\n",
"Loss: 1.34273e-05\n",
"Loss: 1.34267e-05\n",
"Loss: 1.34231e-05\n",
"Loss: 1.34180e-05\n",
"Loss: 1.34155e-05\n",
"Loss: 1.34114e-05\n",
"Loss: 1.34056e-05\n",
"Loss: 1.33995e-05\n",
"Loss: 1.33971e-05\n",
"Loss: 1.33958e-05\n",
"Loss: 1.33920e-05\n",
"Loss: 1.33917e-05\n",
"Loss: 1.33875e-05\n",
"Loss: 1.33854e-05\n",
"Loss: 1.33808e-05\n",
"Loss: 1.33830e-05\n",
"Loss: 1.33796e-05\n",
"Loss: 1.33747e-05\n",
"Loss: 1.33696e-05\n",
"Loss: 1.33614e-05\n",
"Loss: 1.33542e-05\n",
"Loss: 1.38609e-05\n",
"Loss: 1.33533e-05\n",
"Loss: 1.33480e-05\n",
"Loss: 1.33423e-05\n",
"Loss: 1.33372e-05\n",
"Loss: 1.33315e-05\n",
"Loss: 1.33248e-05\n",
"Loss: 1.33194e-05\n",
"Loss: 1.33108e-05\n",
"Loss: 1.33063e-05\n",
"Loss: 1.32985e-05\n",
"Loss: 1.32933e-05\n",
"Loss: 1.32865e-05\n",
"Loss: 1.32818e-05\n",
"Loss: 1.32759e-05\n",
"Loss: 1.32683e-05\n",
"Loss: 1.33083e-05\n",
"Loss: 1.32619e-05\n",
"Loss: 1.32538e-05\n",
"Loss: 1.32488e-05\n",
"Loss: 1.32404e-05\n",
"Loss: 1.32232e-05\n",
"Loss: 1.32163e-05\n",
"Loss: 1.32086e-05\n",
"Loss: 1.32056e-05\n",
"Loss: 1.32034e-05\n",
"Loss: 1.32020e-05\n",
"Loss: 1.31979e-05\n",
"Loss: 1.31996e-05\n",
"Loss: 1.31960e-05\n",
"Loss: 1.31945e-05\n",
"Loss: 1.31926e-05\n",
"Loss: 1.31897e-05\n",
"Loss: 1.31875e-05\n",
"Loss: 1.31824e-05\n",
"Loss: 1.31775e-05\n",
"Loss: 1.31666e-05\n",
"Loss: 1.31649e-05\n",
"Loss: 1.31609e-05\n",
"Loss: 1.31576e-05\n",
"Loss: 1.31560e-05\n",
"Loss: 1.31503e-05\n",
"Loss: 1.31443e-05\n",
"Loss: 1.31397e-05\n",
"Loss: 1.31346e-05\n",
"Loss: 1.31291e-05\n",
"Loss: 1.31207e-05\n",
"Loss: 1.31469e-05\n",
"Loss: 1.31174e-05\n",
"Loss: 1.31094e-05\n",
"Loss: 1.31035e-05\n",
"Loss: 1.30987e-05\n",
"Loss: 1.30942e-05\n",
"Loss: 1.30940e-05\n",
"Loss: 1.30868e-05\n",
"Loss: 1.30821e-05\n",
"Loss: 1.30786e-05\n",
"Loss: 1.30754e-05\n",
"Loss: 1.30730e-05\n",
"Loss: 1.30683e-05\n",
"Loss: 1.30630e-05\n",
"Loss: 1.30563e-05\n",
"Loss: 1.31019e-05\n",
"Loss: 1.30550e-05\n",
"Loss: 1.30500e-05\n",
"Loss: 1.30423e-05\n",
"Loss: 1.30355e-05\n",
"Loss: 1.30301e-05\n",
"Loss: 1.30264e-05\n",
"Loss: 1.30228e-05\n",
"Loss: 1.30171e-05\n",
"Loss: 1.30138e-05\n",
"Loss: 1.30082e-05\n",
"Loss: 1.35434e-05\n",
"Loss: 1.30065e-05\n",
"Loss: 1.30012e-05\n",
"Loss: 1.29924e-05\n",
"Loss: 1.31917e-05\n",
"Loss: 1.29912e-05\n",
"Loss: 1.29820e-05\n",
"Loss: 1.29776e-05\n",
"Loss: 1.29725e-05\n",
"Loss: 1.29670e-05\n",
"Loss: 1.29577e-05\n",
"Loss: 1.29534e-05\n",
"Loss: 1.29443e-05\n",
"Loss: 1.29413e-05\n",
"Loss: 1.30317e-05\n",
"Loss: 1.29322e-05\n",
"Loss: 1.29235e-05\n",
"Loss: 1.29167e-05\n",
"Loss: 1.29088e-05\n",
"Loss: 1.29036e-05\n",
"Loss: 1.28988e-05\n",
"Loss: 1.29010e-05\n",
"Loss: 1.28965e-05\n",
"Loss: 1.28928e-05\n",
"Loss: 1.28890e-05\n",
"Loss: 1.28858e-05\n",
"Loss: 1.28874e-05\n",
"Loss: 1.28827e-05\n",
"Loss: 1.28790e-05\n",
"Loss: 1.28764e-05\n",
"Loss: 1.28747e-05\n",
"Loss: 1.28709e-05\n",
"Loss: 1.28627e-05\n",
"Loss: 1.28606e-05\n",
"Loss: 1.28485e-05\n",
"Loss: 1.28443e-05\n",
"Loss: 1.28387e-05\n",
"Loss: 1.28339e-05\n",
"Loss: 1.28282e-05\n",
"Loss: 1.28230e-05\n",
"Loss: 1.28200e-05\n",
"Loss: 1.28184e-05\n",
"Loss: 1.28167e-05\n",
"Loss: 1.28148e-05\n",
"Loss: 1.28112e-05\n",
"Loss: 1.28072e-05\n",
"Loss: 1.27988e-05\n",
"Loss: 1.27886e-05\n",
"Loss: 1.27801e-05\n",
"Loss: 1.27702e-05\n",
"Loss: 1.27642e-05\n",
"Loss: 1.27598e-05\n",
"Loss: 1.27517e-05\n",
"Loss: 1.27476e-05\n",
"Loss: 1.27460e-05\n",
"Loss: 1.27440e-05\n",
"Loss: 1.27428e-05\n",
"Loss: 1.27379e-05\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 1.27337e-05\n",
"Loss: 1.40262e-05\n",
"Loss: 1.27332e-05\n",
"Loss: 1.27302e-05\n",
"Loss: 1.27276e-05\n",
"Loss: 1.27252e-05\n",
"Loss: 1.27227e-05\n",
"Loss: 1.27194e-05\n",
"Loss: 1.27170e-05\n",
"Loss: 1.27143e-05\n",
"Loss: 1.27134e-05\n",
"Loss: 1.27095e-05\n",
"Loss: 1.27177e-05\n",
"Loss: 1.27085e-05\n",
"Loss: 1.27042e-05\n",
"Loss: 1.26993e-05\n",
"Loss: 1.26930e-05\n",
"Loss: 1.26869e-05\n",
"Loss: 1.26828e-05\n",
"Loss: 1.26824e-05\n",
"Loss: 1.26808e-05\n",
"Loss: 1.26795e-05\n",
"Loss: 1.26768e-05\n",
"Loss: 1.26742e-05\n",
"Loss: 1.26713e-05\n",
"Loss: 1.26668e-05\n",
"Loss: 1.26638e-05\n",
"Loss: 1.26598e-05\n",
"Loss: 1.26577e-05\n",
"Loss: 1.26497e-05\n",
"Loss: 1.26394e-05\n",
"Loss: 1.26324e-05\n",
"Loss: 1.26266e-05\n",
"Loss: 1.26219e-05\n",
"Loss: 1.26188e-05\n",
"Loss: 1.26155e-05\n",
"Loss: 1.26138e-05\n",
"Loss: 1.26098e-05\n",
"Loss: 1.26085e-05\n",
"Loss: 1.26070e-05\n",
"Loss: 1.26053e-05\n",
"Loss: 1.26008e-05\n",
"Loss: 1.26870e-05\n",
"Loss: 1.26002e-05\n",
"Loss: 1.25972e-05\n",
"Loss: 1.25956e-05\n",
"Loss: 1.25936e-05\n",
"Loss: 1.25907e-05\n",
"Loss: 1.25863e-05\n",
"Loss: 1.25959e-05\n",
"Loss: 1.25841e-05\n",
"Loss: 1.25762e-05\n",
"Loss: 1.25705e-05\n",
"Loss: 1.25671e-05\n",
"Loss: 1.25644e-05\n",
"Loss: 1.25596e-05\n",
"Loss: 1.26986e-05\n",
"Loss: 1.25588e-05\n",
"Loss: 1.25547e-05\n",
"Loss: 1.25500e-05\n",
"Loss: 1.25447e-05\n",
"Loss: 1.25477e-05\n",
"Loss: 1.25431e-05\n",
"Loss: 1.25405e-05\n",
"Loss: 1.25368e-05\n",
"Loss: 1.25352e-05\n",
"Loss: 1.25331e-05\n",
"Loss: 1.25314e-05\n",
"Loss: 1.25280e-05\n",
"Loss: 1.25243e-05\n",
"Loss: 1.25200e-05\n",
"Loss: 1.25131e-05\n",
"Loss: 1.25068e-05\n",
"Loss: 1.25007e-05\n",
"Loss: 1.24956e-05\n",
"Loss: 1.24887e-05\n",
"Loss: 1.24822e-05\n",
"Loss: 1.24807e-05\n",
"Loss: 1.24784e-05\n",
"Loss: 1.24896e-05\n",
"Loss: 1.24771e-05\n",
"Loss: 1.24970e-05\n",
"Loss: 1.24746e-05\n",
"Loss: 1.24726e-05\n",
"Loss: 1.24703e-05\n",
"Loss: 1.24688e-05\n",
"Loss: 1.24656e-05\n",
"Loss: 1.24623e-05\n",
"Loss: 1.24577e-05\n",
"Loss: 1.24500e-05\n",
"Loss: 1.25435e-05\n",
"Loss: 1.24487e-05\n",
"Loss: 1.24428e-05\n",
"Loss: 1.24371e-05\n",
"Loss: 1.24317e-05\n",
"Loss: 1.24305e-05\n",
"Loss: 1.24279e-05\n",
"Loss: 1.24264e-05\n",
"Loss: 1.24250e-05\n",
"Loss: 1.24233e-05\n",
"Loss: 1.24207e-05\n",
"Loss: 1.24167e-05\n",
"Loss: 1.24326e-05\n",
"Loss: 1.24157e-05\n",
"Loss: 1.24116e-05\n",
"Loss: 1.24065e-05\n",
"Loss: 1.24029e-05\n",
"Loss: 1.24009e-05\n",
"Loss: 1.23997e-05\n",
"Loss: 1.23959e-05\n",
"Loss: 1.23938e-05\n",
"Loss: 1.23909e-05\n",
"Loss: 1.23881e-05\n",
"Loss: 1.24762e-05\n",
"Loss: 1.23877e-05\n",
"Loss: 1.23864e-05\n",
"Loss: 1.23852e-05\n",
"Loss: 1.23841e-05\n",
"Loss: 1.23807e-05\n",
"Loss: 1.23843e-05\n",
"Loss: 1.23788e-05\n",
"Loss: 1.23732e-05\n",
"Loss: 1.23699e-05\n",
"Loss: 1.23644e-05\n",
"Loss: 1.23604e-05\n",
"Loss: 1.23519e-05\n",
"Loss: 1.23549e-05\n",
"Loss: 1.23497e-05\n",
"Loss: 1.23435e-05\n",
"Loss: 1.23342e-05\n",
"Loss: 1.23284e-05\n",
"Loss: 1.23253e-05\n",
"Loss: 1.23226e-05\n",
"Loss: 1.23205e-05\n",
"Loss: 1.23179e-05\n",
"Loss: 1.23159e-05\n",
"Loss: 1.23133e-05\n",
"Loss: 1.23087e-05\n",
"Loss: 1.23029e-05\n",
"Loss: 1.23074e-05\n",
"Loss: 1.23005e-05\n",
"Loss: 1.22962e-05\n",
"Loss: 1.22886e-05\n",
"Loss: 1.22819e-05\n",
"Loss: 1.22750e-05\n",
"Loss: 1.22694e-05\n",
"Loss: 1.22610e-05\n",
"Loss: 1.22555e-05\n",
"Loss: 1.22527e-05\n",
"Loss: 1.22474e-05\n",
"Loss: 1.22442e-05\n",
"Loss: 1.22413e-05\n",
"Loss: 1.22382e-05\n",
"Loss: 1.22372e-05\n",
"Loss: 1.22344e-05\n",
"Loss: 1.22303e-05\n",
"Loss: 1.22273e-05\n",
"Loss: 1.22267e-05\n",
"Loss: 1.22239e-05\n",
"Loss: 1.22191e-05\n",
"Loss: 1.22136e-05\n",
"Loss: 1.22090e-05\n",
"Loss: 1.22032e-05\n",
"Loss: 1.22004e-05\n",
"Loss: 1.21953e-05\n",
"Loss: 1.21909e-05\n",
"Loss: 1.21963e-05\n",
"Loss: 1.21877e-05\n",
"Loss: 1.21809e-05\n",
"Loss: 1.21717e-05\n",
"Loss: 1.21669e-05\n",
"Loss: 1.21641e-05\n",
"Loss: 1.21610e-05\n",
"Loss: 1.21588e-05\n",
"Loss: 1.21522e-05\n",
"Loss: 1.21479e-05\n",
"Loss: 1.21861e-05\n",
"Loss: 1.21467e-05\n",
"Loss: 1.21427e-05\n",
"Loss: 1.21361e-05\n",
"Loss: 1.21302e-05\n",
"Loss: 1.21275e-05\n",
"Loss: 1.21237e-05\n",
"Loss: 1.21222e-05\n",
"Loss: 1.21203e-05\n",
"Loss: 1.21170e-05\n",
"Loss: 1.21138e-05\n",
"Loss: 1.21127e-05\n",
"Loss: 1.21134e-05\n",
"Loss: 1.21101e-05\n",
"Loss: 1.21071e-05\n",
"Loss: 1.21055e-05\n",
"Loss: 1.21002e-05\n",
"Loss: 1.20950e-05\n",
"Loss: 1.20912e-05\n",
"Loss: 1.21515e-05\n",
"Loss: 1.20910e-05\n",
"Loss: 1.20895e-05\n",
"Loss: 1.20874e-05\n",
"Loss: 1.20852e-05\n",
"Loss: 1.20827e-05\n",
"Loss: 1.20779e-05\n",
"Loss: 1.20737e-05\n",
"Loss: 1.20708e-05\n",
"Loss: 1.20668e-05\n",
"Loss: 1.20629e-05\n",
"Loss: 1.20660e-05\n",
"Loss: 1.20612e-05\n",
"Loss: 1.20562e-05\n",
"Loss: 1.20510e-05\n",
"Loss: 1.20472e-05\n",
"Loss: 1.20438e-05\n",
"Loss: 1.20803e-05\n",
"Loss: 1.20432e-05\n",
"Loss: 1.20392e-05\n",
"Loss: 1.20367e-05\n",
"Loss: 1.20353e-05\n",
"Loss: 1.20343e-05\n",
"Loss: 1.20334e-05\n",
"Loss: 1.20318e-05\n",
"Loss: 1.20416e-05\n",
"Loss: 1.20297e-05\n",
"Loss: 1.20271e-05\n",
"Loss: 1.20243e-05\n",
"Loss: 1.20221e-05\n",
"Loss: 1.20195e-05\n",
"Loss: 1.20224e-05\n",
"Loss: 1.20188e-05\n",
"Loss: 1.20176e-05\n",
"Loss: 1.20150e-05\n",
"Loss: 1.20131e-05\n",
"Loss: 1.20106e-05\n",
"Loss: 1.20104e-05\n",
"Loss: 1.20090e-05\n",
"Loss: 1.20069e-05\n",
"Loss: 1.20048e-05\n",
"Loss: 1.20032e-05\n",
"Loss: 1.19999e-05\n",
"Loss: 1.19965e-05\n",
"Loss: 1.19920e-05\n",
"Loss: 1.19986e-05\n",
"Loss: 1.19890e-05\n",
"Loss: 1.19902e-05\n",
"Loss: 1.19868e-05\n",
"Loss: 1.19855e-05\n",
"Loss: 1.19839e-05\n",
"Loss: 1.19825e-05\n",
"Loss: 1.19787e-05\n",
"Loss: 1.19773e-05\n",
"Loss: 1.19865e-05\n",
"Loss: 1.19738e-05\n",
"Loss: 1.19721e-05\n",
"Loss: 1.19700e-05\n",
"Loss: 1.19661e-05\n",
"Loss: 1.19624e-05\n",
"Loss: 1.19598e-05\n",
"Loss: 1.19589e-05\n",
"Loss: 1.19576e-05\n",
"Loss: 1.19571e-05\n",
"Loss: 1.19557e-05\n",
"Loss: 1.19520e-05\n",
"Loss: 1.19527e-05\n",
"Loss: 1.19508e-05\n",
"Loss: 1.19487e-05\n",
"Loss: 1.19474e-05\n",
"Loss: 1.19460e-05\n",
"Loss: 1.19406e-05\n",
"Loss: 1.19442e-05\n",
"Loss: 1.19384e-05\n",
"Loss: 1.19356e-05\n",
"Loss: 1.19322e-05\n",
"Loss: 1.19290e-05\n",
"Loss: 1.20564e-05\n",
"Loss: 1.19287e-05\n",
"Loss: 1.19269e-05\n",
"Loss: 1.19203e-05\n",
"Loss: 1.19168e-05\n",
"Loss: 1.19428e-05\n",
"Loss: 1.19161e-05\n",
"Loss: 1.19145e-05\n",
"Loss: 1.19116e-05\n",
"Loss: 1.19046e-05\n",
"Loss: 1.18965e-05\n",
"Loss: 1.18954e-05\n",
"Loss: 1.18867e-05\n",
"Loss: 1.18833e-05\n",
"Loss: 1.18783e-05\n",
"Loss: 1.18795e-05\n",
"Loss: 1.18767e-05\n",
"Loss: 1.18740e-05\n",
"Loss: 1.18714e-05\n",
"Loss: 1.18665e-05\n",
"Loss: 1.18601e-05\n",
"Loss: 1.18655e-05\n",
"Loss: 1.18576e-05\n",
"Loss: 1.18535e-05\n",
"Loss: 1.18442e-05\n",
"Loss: 1.18401e-05\n",
"Loss: 1.18374e-05\n",
"Loss: 1.18339e-05\n",
"Loss: 1.18304e-05\n",
"Loss: 1.18283e-05\n",
"Loss: 1.18217e-05\n",
"Loss: 1.18182e-05\n",
"Loss: 1.18137e-05\n",
"Loss: 1.18468e-05\n",
"Loss: 1.18107e-05\n",
"Loss: 1.18065e-05\n",
"Loss: 1.17983e-05\n",
"Loss: 1.17942e-05\n",
"Loss: 1.17902e-05\n",
"Loss: 1.17885e-05\n",
"Loss: 1.17869e-05\n",
"Loss: 1.17839e-05\n",
"Loss: 1.17818e-05\n",
"Loss: 1.17784e-05\n",
"Loss: 1.17766e-05\n",
"Loss: 1.17742e-05\n",
"Loss: 1.17728e-05\n",
"Loss: 1.17698e-05\n",
"Loss: 1.17794e-05\n",
"Loss: 1.17689e-05\n",
"Loss: 1.17660e-05\n",
"Loss: 1.17612e-05\n",
"Loss: 1.17524e-05\n",
"Loss: 1.17466e-05\n",
"Loss: 1.17585e-05\n",
"Loss: 1.17453e-05\n",
"Loss: 1.17424e-05\n",
"Loss: 1.17395e-05\n",
"Loss: 1.17382e-05\n",
"Loss: 1.17361e-05\n",
"Loss: 1.17330e-05\n",
"Loss: 1.17341e-05\n",
"Loss: 1.17297e-05\n",
"Loss: 1.17250e-05\n",
"Loss: 1.17183e-05\n",
"Loss: 1.17559e-05\n",
"Loss: 1.17171e-05\n",
"Loss: 1.17137e-05\n",
"Loss: 1.17077e-05\n",
"Loss: 1.17029e-05\n",
"Loss: 1.16971e-05\n",
"Loss: 1.16903e-05\n",
"Loss: 1.16806e-05\n",
"Loss: 1.16884e-05\n",
"Loss: 1.16786e-05\n",
"Loss: 1.16735e-05\n",
"Loss: 1.16717e-05\n",
"Loss: 1.16698e-05\n",
"Loss: 1.16664e-05\n",
"Loss: 1.16607e-05\n",
"Loss: 1.16902e-05\n",
"Loss: 1.16590e-05\n",
"Loss: 1.16536e-05\n",
"Loss: 1.16518e-05\n",
"Loss: 1.16453e-05\n",
"Loss: 1.16416e-05\n",
"Loss: 1.16321e-05\n",
"Loss: 1.16256e-05\n",
"Loss: 1.16346e-05\n",
"Loss: 1.16235e-05\n",
"Loss: 1.16204e-05\n",
"Loss: 1.16165e-05\n",
"Loss: 1.16126e-05\n",
"Loss: 1.16098e-05\n",
"Loss: 1.16033e-05\n",
"Loss: 1.16004e-05\n",
"Loss: 1.15948e-05\n",
"Loss: 1.16105e-05\n",
"Loss: 1.15902e-05\n",
"Loss: 1.15832e-05\n",
"Loss: 1.15767e-05\n",
"Loss: 1.15723e-05\n",
"Loss: 1.15690e-05\n",
"Loss: 1.16319e-05\n",
"Loss: 1.15681e-05\n",
"Loss: 1.15656e-05\n",
"Loss: 1.15633e-05\n",
"Loss: 1.15620e-05\n",
"Loss: 1.15606e-05\n",
"Loss: 1.15685e-05\n",
"Loss: 1.15593e-05\n",
"Loss: 1.15569e-05\n",
"Loss: 1.15550e-05\n",
"Loss: 1.15538e-05\n",
"Loss: 1.15523e-05\n",
"Loss: 1.15492e-05\n",
"Loss: 1.15454e-05\n",
"Loss: 1.15426e-05\n",
"Loss: 1.15395e-05\n",
"Loss: 1.15373e-05\n",
"Loss: 1.15463e-05\n",
"Loss: 1.15360e-05\n",
"Loss: 1.15331e-05\n",
"Loss: 1.15294e-05\n",
"Loss: 1.15282e-05\n",
"Loss: 1.15245e-05\n",
"Loss: 1.15213e-05\n",
"Loss: 1.15185e-05\n",
"Loss: 1.15152e-05\n",
"Loss: 1.15103e-05\n",
"Loss: 1.15058e-05\n",
"Loss: 1.15010e-05\n",
"Loss: 1.15280e-05\n",
"Loss: 1.14982e-05\n",
"Loss: 1.14952e-05\n",
"Loss: 1.14900e-05\n",
"Loss: 1.14869e-05\n",
"Loss: 1.14818e-05\n",
"Loss: 1.14745e-05\n",
"Loss: 1.14678e-05\n",
"Loss: 1.14658e-05\n",
"Loss: 1.14616e-05\n",
"Loss: 1.14596e-05\n",
"Loss: 1.14578e-05\n",
"Loss: 1.14549e-05\n",
"Loss: 1.14782e-05\n",
"Loss: 1.14545e-05\n",
"Loss: 1.14534e-05\n",
"Loss: 1.14526e-05\n",
"Loss: 1.14514e-05\n",
"Loss: 1.14483e-05\n",
"Loss: 1.14456e-05\n",
"Loss: 1.14450e-05\n",
"Loss: 1.14403e-05\n",
"Loss: 1.14377e-05\n",
"Loss: 1.14344e-05\n",
"Loss: 1.14414e-05\n",
"Loss: 1.14327e-05\n",
"Loss: 1.14291e-05\n",
"Loss: 1.14261e-05\n",
"Loss: 1.14220e-05\n",
"Loss: 1.14533e-05\n",
"Loss: 1.14214e-05\n",
"Loss: 1.14171e-05\n",
"Loss: 1.14120e-05\n",
"Loss: 1.14058e-05\n",
"Loss: 1.14484e-05\n",
"Loss: 1.14047e-05\n",
"Loss: 1.14015e-05\n",
"Loss: 1.13976e-05\n",
"Loss: 1.13956e-05\n",
"Loss: 1.13950e-05\n",
"Loss: 1.13920e-05\n",
"Loss: 1.13908e-05\n",
"Loss: 1.13888e-05\n",
"Loss: 1.13864e-05\n",
"Loss: 1.13816e-05\n",
"Loss: 1.13767e-05\n",
"Loss: 1.13740e-05\n",
"Loss: 1.16712e-05\n",
"Loss: 1.13734e-05\n",
"Loss: 1.13725e-05\n",
"Loss: 1.13711e-05\n",
"Loss: 1.13702e-05\n",
"Loss: 1.13680e-05\n",
"Loss: 1.13644e-05\n",
"Loss: 1.13596e-05\n",
"Loss: 1.13842e-05\n",
"Loss: 1.13582e-05\n",
"Loss: 1.13546e-05\n",
"Loss: 1.13527e-05\n",
"Loss: 1.13509e-05\n",
"Loss: 1.13953e-05\n",
"Loss: 1.13504e-05\n",
"Loss: 1.13480e-05\n",
"Loss: 1.13460e-05\n",
"Loss: 1.13439e-05\n",
"Loss: 1.13414e-05\n",
"Loss: 1.13367e-05\n",
"Loss: 1.13334e-05\n",
"Loss: 1.13300e-05\n",
"Loss: 1.13275e-05\n",
"Loss: 1.13226e-05\n",
"Loss: 1.13375e-05\n",
"Loss: 1.13220e-05\n",
"Loss: 1.13195e-05\n",
"Loss: 1.13225e-05\n",
"Loss: 1.13176e-05\n",
"Loss: 1.13152e-05\n",
"Loss: 1.13119e-05\n",
"Loss: 1.13093e-05\n",
"Loss: 1.13057e-05\n",
"Loss: 1.13035e-05\n",
"Loss: 1.12988e-05\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 1.13143e-05\n",
"Loss: 1.12968e-05\n",
"Loss: 1.12926e-05\n",
"Loss: 1.12875e-05\n",
"Loss: 1.12823e-05\n",
"Loss: 1.12741e-05\n",
"Loss: 1.13543e-05\n",
"Loss: 1.12731e-05\n",
"Loss: 1.12674e-05\n",
"Loss: 1.12626e-05\n",
"Loss: 1.12571e-05\n",
"Loss: 1.12535e-05\n",
"Loss: 1.12464e-05\n",
"Loss: 1.12761e-05\n",
"Loss: 1.12455e-05\n",
"Loss: 1.12436e-05\n",
"Loss: 1.12374e-05\n",
"Loss: 1.12341e-05\n",
"Loss: 1.12291e-05\n",
"Loss: 1.12272e-05\n",
"Loss: 1.12209e-05\n",
"Loss: 1.12152e-05\n",
"Loss: 1.12124e-05\n",
"Loss: 1.12078e-05\n",
"Loss: 1.12254e-05\n",
"Loss: 1.12070e-05\n",
"Loss: 1.12040e-05\n",
"Loss: 1.12001e-05\n",
"Loss: 1.11978e-05\n",
"Loss: 1.11935e-05\n",
"Loss: 1.12593e-05\n",
"Loss: 1.11911e-05\n",
"Loss: 1.11881e-05\n",
"Loss: 1.11838e-05\n",
"Loss: 1.11879e-05\n",
"Loss: 1.11828e-05\n",
"Loss: 1.11809e-05\n",
"Loss: 1.11780e-05\n",
"Loss: 1.11733e-05\n",
"Loss: 1.15766e-05\n",
"Loss: 1.11728e-05\n",
"Loss: 1.11701e-05\n",
"Loss: 1.11671e-05\n",
"Loss: 1.11643e-05\n",
"Loss: 1.11618e-05\n",
"Loss: 1.11600e-05\n",
"Loss: 1.11594e-05\n",
"Loss: 1.11589e-05\n",
"Loss: 1.11573e-05\n",
"Loss: 1.11542e-05\n",
"Loss: 1.11608e-05\n",
"Loss: 1.11531e-05\n",
"Loss: 1.11498e-05\n",
"Loss: 1.11471e-05\n",
"Loss: 1.11917e-05\n",
"Loss: 1.11461e-05\n",
"Loss: 1.11436e-05\n",
"Loss: 1.11425e-05\n",
"Loss: 1.11410e-05\n",
"Loss: 1.11388e-05\n",
"Loss: 1.11343e-05\n",
"Loss: 1.11295e-05\n",
"Loss: 1.11263e-05\n",
"Loss: 1.11374e-05\n",
"Loss: 1.11251e-05\n",
"Loss: 1.11225e-05\n",
"Loss: 1.11205e-05\n",
"Loss: 1.11187e-05\n",
"Loss: 1.11159e-05\n",
"Loss: 1.11152e-05\n",
"Loss: 1.11123e-05\n",
"Loss: 1.11097e-05\n",
"Loss: 1.11074e-05\n",
"Loss: 1.11080e-05\n",
"Loss: 1.11063e-05\n",
"Loss: 1.11044e-05\n",
"Loss: 1.11031e-05\n",
"Loss: 1.11015e-05\n",
"Loss: 1.10978e-05\n",
"Loss: 1.11684e-05\n",
"Loss: 1.10970e-05\n",
"Loss: 1.10918e-05\n",
"Loss: 1.10882e-05\n",
"Loss: 1.10855e-05\n",
"Loss: 1.10834e-05\n",
"Loss: 1.10814e-05\n",
"Loss: 1.10784e-05\n",
"Loss: 1.10773e-05\n",
"Loss: 1.10750e-05\n",
"Loss: 1.10722e-05\n",
"Loss: 1.10692e-05\n",
"Loss: 1.10672e-05\n",
"Loss: 1.10726e-05\n",
"Loss: 1.10652e-05\n",
"Loss: 1.10628e-05\n",
"Loss: 1.10608e-05\n",
"Loss: 1.10578e-05\n",
"Loss: 1.10514e-05\n",
"Loss: 1.10557e-05\n",
"Loss: 1.10494e-05\n",
"Loss: 1.10447e-05\n",
"Loss: 1.10429e-05\n",
"Loss: 1.10392e-05\n",
"Loss: 1.10950e-05\n",
"Loss: 1.10382e-05\n",
"Loss: 1.10340e-05\n",
"Loss: 1.10315e-05\n",
"Loss: 1.10297e-05\n",
"Loss: 1.10260e-05\n",
"Loss: 1.10242e-05\n",
"Loss: 1.10222e-05\n",
"Loss: 1.10207e-05\n",
"Loss: 1.10192e-05\n",
"Loss: 1.10187e-05\n",
"Loss: 1.10455e-05\n",
"Loss: 1.10185e-05\n",
"Loss: 1.10173e-05\n",
"Loss: 1.10162e-05\n",
"Loss: 1.10145e-05\n",
"Loss: 1.10126e-05\n",
"Loss: 1.10095e-05\n",
"Loss: 1.10074e-05\n",
"Loss: 1.10057e-05\n",
"Loss: 1.10054e-05\n",
"Loss: 1.10031e-05\n",
"Loss: 1.10016e-05\n",
"Loss: 1.09994e-05\n",
"Loss: 1.09972e-05\n",
"Loss: 1.09936e-05\n",
"Loss: 1.09895e-05\n",
"Loss: 1.09865e-05\n",
"Loss: 1.09861e-05\n",
"Loss: 1.09816e-05\n",
"Loss: 1.09807e-05\n",
"Loss: 1.09790e-05\n",
"Loss: 1.09779e-05\n",
"Loss: 1.09741e-05\n",
"Loss: 1.11565e-05\n",
"Loss: 1.09735e-05\n",
"Loss: 1.09712e-05\n",
"Loss: 1.09689e-05\n",
"Loss: 1.09675e-05\n",
"Loss: 1.09651e-05\n",
"Loss: 1.09758e-05\n",
"Loss: 1.09642e-05\n",
"Loss: 1.09614e-05\n",
"Loss: 1.09593e-05\n",
"Loss: 1.09577e-05\n",
"Loss: 1.09561e-05\n",
"Loss: 1.09538e-05\n",
"Loss: 1.09501e-05\n",
"Loss: 1.09451e-05\n",
"Loss: 1.09387e-05\n",
"Loss: 1.09372e-05\n",
"Loss: 1.10153e-05\n",
"Loss: 1.09355e-05\n",
"Loss: 1.09308e-05\n",
"Loss: 1.09284e-05\n",
"Loss: 1.09237e-05\n",
"Loss: 1.11224e-05\n",
"Loss: 1.09232e-05\n",
"Loss: 1.09212e-05\n",
"Loss: 1.09198e-05\n",
"Loss: 1.09188e-05\n",
"Loss: 1.09171e-05\n",
"Loss: 1.09193e-05\n",
"Loss: 1.09164e-05\n",
"Loss: 1.09159e-05\n",
"Loss: 1.09143e-05\n",
"Loss: 1.09137e-05\n",
"Loss: 1.09133e-05\n",
"Loss: 1.09114e-05\n",
"Loss: 1.09108e-05\n",
"Loss: 1.09080e-05\n",
"Loss: 1.09049e-05\n",
"Loss: 1.09032e-05\n",
"Loss: 1.09009e-05\n",
"Loss: 1.08978e-05\n",
"Loss: 1.08933e-05\n",
"Loss: 1.11346e-05\n",
"Loss: 1.08928e-05\n",
"Loss: 1.08886e-05\n",
"Loss: 1.08882e-05\n",
"Loss: 1.08855e-05\n",
"Loss: 1.08844e-05\n",
"Loss: 1.08809e-05\n",
"Loss: 1.08749e-05\n",
"Loss: 1.08959e-05\n",
"Loss: 1.08714e-05\n",
"Loss: 1.08667e-05\n",
"Loss: 1.08626e-05\n",
"Loss: 1.08779e-05\n",
"Loss: 1.08615e-05\n",
"Loss: 1.08597e-05\n",
"Loss: 1.08544e-05\n",
"Loss: 1.08513e-05\n",
"Loss: 1.08530e-05\n",
"Loss: 1.08495e-05\n",
"Loss: 1.08459e-05\n",
"Loss: 1.08435e-05\n",
"Loss: 1.08413e-05\n",
"Loss: 1.08448e-05\n",
"Loss: 1.08404e-05\n",
"Loss: 1.08390e-05\n",
"Loss: 1.08374e-05\n",
"Loss: 1.08358e-05\n",
"Loss: 1.08343e-05\n",
"Loss: 1.08340e-05\n",
"Loss: 1.08326e-05\n",
"Loss: 1.08304e-05\n",
"Loss: 1.08268e-05\n",
"Loss: 1.08246e-05\n",
"Loss: 1.08184e-05\n",
"Loss: 1.08152e-05\n",
"Loss: 1.08088e-05\n",
"Loss: 1.08062e-05\n",
"Loss: 1.08047e-05\n",
"Loss: 1.08033e-05\n",
"Loss: 1.08011e-05\n",
"Loss: 1.08237e-05\n",
"Loss: 1.08000e-05\n",
"Loss: 1.07971e-05\n",
"Loss: 1.07955e-05\n",
"Loss: 1.07932e-05\n",
"Loss: 1.07895e-05\n",
"Loss: 1.07861e-05\n",
"Loss: 1.07845e-05\n",
"Loss: 1.07833e-05\n",
"Loss: 1.07824e-05\n",
"Loss: 1.07794e-05\n",
"Loss: 1.07780e-05\n",
"Loss: 1.07746e-05\n",
"Loss: 1.07723e-05\n",
"Loss: 1.07710e-05\n",
"Loss: 1.07687e-05\n",
"Loss: 1.07664e-05\n",
"Loss: 1.07659e-05\n",
"Loss: 1.07634e-05\n",
"Loss: 1.07618e-05\n",
"Loss: 1.07593e-05\n",
"Loss: 1.07569e-05\n",
"Loss: 1.07548e-05\n",
"Loss: 1.07527e-05\n",
"Loss: 1.07491e-05\n",
"Loss: 1.07458e-05\n",
"Loss: 1.07674e-05\n",
"Loss: 1.07443e-05\n",
"Loss: 1.07419e-05\n",
"Loss: 1.07398e-05\n",
"Loss: 1.08097e-05\n",
"Loss: 1.07395e-05\n",
"Loss: 1.07382e-05\n",
"Loss: 1.07356e-05\n",
"Loss: 1.07304e-05\n",
"Loss: 1.07293e-05\n",
"Loss: 1.07259e-05\n",
"Loss: 1.07243e-05\n",
"Loss: 1.07224e-05\n",
"Loss: 1.07203e-05\n",
"Loss: 1.07191e-05\n",
"Loss: 1.07153e-05\n",
"Loss: 1.07136e-05\n",
"Loss: 1.07127e-05\n",
"Loss: 1.07113e-05\n",
"Loss: 1.07101e-05\n",
"Loss: 1.07094e-05\n",
"Loss: 1.07080e-05\n",
"Loss: 1.07069e-05\n",
"Loss: 1.07051e-05\n",
"Loss: 1.07195e-05\n",
"Loss: 1.07047e-05\n",
"Loss: 1.07036e-05\n",
"Loss: 1.07008e-05\n",
"Loss: 1.06979e-05\n",
"Loss: 1.06959e-05\n",
"Loss: 1.06941e-05\n",
"Loss: 1.07003e-05\n",
"Loss: 1.06940e-05\n",
"Loss: 1.06932e-05\n",
"Loss: 1.06912e-05\n",
"Loss: 1.06898e-05\n",
"Loss: 1.06941e-05\n",
"Loss: 1.06894e-05\n",
"Loss: 1.06885e-05\n",
"Loss: 1.06874e-05\n",
"Loss: 1.06854e-05\n",
"Loss: 1.06829e-05\n",
"Loss: 1.06807e-05\n",
"Loss: 1.06780e-05\n",
"Loss: 1.06751e-05\n",
"Loss: 1.06718e-05\n",
"Loss: 1.06683e-05\n",
"Loss: 1.06627e-05\n",
"Loss: 1.06606e-05\n",
"Loss: 1.06587e-05\n",
"Loss: 1.06567e-05\n",
"Loss: 1.06544e-05\n",
"Loss: 1.06526e-05\n",
"Loss: 1.06506e-05\n",
"Loss: 1.06484e-05\n",
"Loss: 1.06461e-05\n",
"Loss: 1.07133e-05\n",
"Loss: 1.06456e-05\n",
"Loss: 1.06428e-05\n",
"Loss: 1.06387e-05\n",
"Loss: 1.06351e-05\n",
"Loss: 1.06343e-05\n",
"Loss: 1.06321e-05\n",
"Loss: 1.06315e-05\n",
"Loss: 1.06294e-05\n",
"Loss: 1.06304e-05\n",
"Loss: 1.06285e-05\n",
"Loss: 1.06276e-05\n",
"Loss: 1.06257e-05\n",
"Loss: 1.06241e-05\n",
"Loss: 1.06208e-05\n",
"Loss: 1.06652e-05\n",
"Loss: 1.06201e-05\n",
"Loss: 1.06189e-05\n",
"Loss: 1.06174e-05\n",
"Loss: 1.06157e-05\n",
"Loss: 1.06130e-05\n",
"Loss: 1.06092e-05\n",
"Loss: 1.06071e-05\n",
"Loss: 1.06038e-05\n",
"Loss: 1.06013e-05\n",
"Loss: 1.05984e-05\n",
"Loss: 1.09057e-05\n",
"Loss: 1.05983e-05\n",
"Loss: 1.05964e-05\n",
"Loss: 1.05940e-05\n",
"Loss: 1.05910e-05\n",
"Loss: 1.05883e-05\n",
"Loss: 1.05844e-05\n",
"Loss: 1.06039e-05\n",
"Loss: 1.05840e-05\n",
"Loss: 1.05826e-05\n",
"Loss: 1.05793e-05\n",
"Loss: 1.05763e-05\n",
"Loss: 1.09983e-05\n",
"Loss: 1.05760e-05\n",
"Loss: 1.05740e-05\n",
"Loss: 1.05721e-05\n",
"Loss: 1.05701e-05\n",
"Loss: 1.05685e-05\n",
"Loss: 1.05667e-05\n",
"Loss: 1.05628e-05\n",
"Loss: 1.05793e-05\n",
"Loss: 1.05614e-05\n",
"Loss: 1.05579e-05\n",
"Loss: 1.05548e-05\n",
"Loss: 1.05516e-05\n",
"Loss: 1.05493e-05\n",
"Loss: 1.05460e-05\n",
"Loss: 1.05445e-05\n",
"Loss: 1.05420e-05\n",
"Loss: 1.05393e-05\n",
"Loss: 1.05405e-05\n",
"Loss: 1.05374e-05\n",
"Loss: 1.05326e-05\n",
"Loss: 1.05283e-05\n",
"Loss: 1.05473e-05\n",
"Loss: 1.05271e-05\n",
"Loss: 1.05232e-05\n",
"Loss: 1.05204e-05\n",
"Loss: 1.05180e-05\n",
"Loss: 1.05160e-05\n",
"Loss: 1.05183e-05\n",
"Loss: 1.05156e-05\n",
"Loss: 1.05143e-05\n",
"Loss: 1.05123e-05\n",
"Loss: 1.05112e-05\n",
"Loss: 1.05099e-05\n",
"Loss: 1.05084e-05\n",
"Loss: 1.05065e-05\n",
"Loss: 1.05037e-05\n",
"Loss: 1.05022e-05\n",
"Loss: 1.04992e-05\n",
"Loss: 1.04978e-05\n",
"Loss: 1.04963e-05\n",
"Loss: 1.04960e-05\n",
"Loss: 1.04949e-05\n",
"Loss: 1.04940e-05\n",
"Loss: 1.04926e-05\n",
"Loss: 1.04908e-05\n",
"Loss: 1.04889e-05\n",
"Loss: 1.05509e-05\n",
"Loss: 1.04887e-05\n",
"Loss: 1.04875e-05\n",
"Loss: 1.04861e-05\n",
"Loss: 1.04838e-05\n",
"Loss: 1.04807e-05\n",
"Loss: 1.04902e-05\n",
"Loss: 1.04798e-05\n",
"Loss: 1.04766e-05\n",
"Loss: 1.04745e-05\n",
"Loss: 1.04714e-05\n",
"Loss: 1.04752e-05\n",
"Loss: 1.04707e-05\n",
"Loss: 1.04685e-05\n",
"Loss: 1.04660e-05\n",
"Loss: 1.04636e-05\n",
"Loss: 1.04614e-05\n",
"Loss: 1.04597e-05\n",
"Loss: 1.04582e-05\n",
"Loss: 1.04533e-05\n",
"Loss: 1.04529e-05\n",
"Loss: 1.04490e-05\n",
"Loss: 1.04461e-05\n",
"Loss: 1.04419e-05\n",
"Loss: 1.04362e-05\n",
"Loss: 1.04506e-05\n",
"Loss: 1.04324e-05\n",
"Loss: 1.04290e-05\n",
"Loss: 1.04269e-05\n",
"Loss: 1.04259e-05\n",
"Loss: 1.04231e-05\n",
"Loss: 1.04195e-05\n",
"Loss: 1.04159e-05\n",
"Loss: 1.04175e-05\n",
"Loss: 1.04127e-05\n",
"Loss: 1.04092e-05\n",
"Loss: 1.04066e-05\n",
"Loss: 1.04040e-05\n",
"Loss: 1.04040e-05\n",
"Loss: 1.04019e-05\n",
"Loss: 1.03971e-05\n",
"Loss: 1.03937e-05\n",
"Loss: 1.03911e-05\n",
"Loss: 1.03909e-05\n",
"Loss: 1.03898e-05\n",
"Loss: 1.03884e-05\n",
"Loss: 1.03865e-05\n",
"Loss: 1.03856e-05\n",
"Loss: 1.03828e-05\n",
"Loss: 1.03805e-05\n",
"Loss: 1.03786e-05\n",
"Loss: 1.03774e-05\n",
"Loss: 1.03762e-05\n",
"Loss: 1.03746e-05\n",
"Loss: 1.03735e-05\n",
"Loss: 1.03727e-05\n",
"Loss: 1.03714e-05\n",
"Loss: 1.04076e-05\n",
"Loss: 1.03711e-05\n",
"Loss: 1.03701e-05\n",
"Loss: 1.03749e-05\n",
"Loss: 1.03695e-05\n",
"Loss: 1.03692e-05\n",
"Loss: 1.03670e-05\n",
"Loss: 1.03638e-05\n",
"Loss: 1.03609e-05\n",
"Loss: 1.03594e-05\n",
"Loss: 1.03571e-05\n",
"Loss: 1.03556e-05\n",
"Loss: 1.03532e-05\n",
"Loss: 1.03776e-05\n",
"Loss: 1.03524e-05\n",
"Loss: 1.03486e-05\n",
"Loss: 1.03463e-05\n",
"Loss: 1.03438e-05\n",
"Loss: 1.03398e-05\n",
"Loss: 1.03384e-05\n",
"Loss: 1.03369e-05\n",
"Loss: 1.03358e-05\n",
"Loss: 1.03341e-05\n",
"Loss: 1.03315e-05\n",
"Loss: 1.03285e-05\n",
"Loss: 1.03283e-05\n",
"Loss: 1.03256e-05\n",
"Loss: 1.03248e-05\n",
"Loss: 1.03251e-05\n",
"Loss: 1.03237e-05\n",
"Loss: 1.03223e-05\n",
"Loss: 1.03198e-05\n",
"Loss: 1.03175e-05\n",
"Loss: 1.03153e-05\n",
"Loss: 1.03111e-05\n",
"Loss: 1.03096e-05\n",
"Loss: 1.03060e-05\n",
"Loss: 1.03037e-05\n",
"Loss: 1.03007e-05\n",
"Loss: 1.02969e-05\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 1.02946e-05\n",
"Loss: 1.02935e-05\n",
"Loss: 1.03057e-05\n",
"Loss: 1.02931e-05\n",
"Loss: 1.02926e-05\n",
"Loss: 1.02979e-05\n",
"Loss: 1.02919e-05\n",
"Loss: 1.02913e-05\n",
"Loss: 1.02893e-05\n",
"Loss: 1.02873e-05\n",
"Loss: 1.02843e-05\n",
"Loss: 1.02851e-05\n",
"Loss: 1.02822e-05\n",
"Loss: 1.02784e-05\n",
"Loss: 1.02746e-05\n",
"Loss: 1.02738e-05\n",
"Loss: 1.02707e-05\n",
"Loss: 1.02688e-05\n",
"Loss: 1.02660e-05\n",
"Loss: 1.02640e-05\n",
"Loss: 1.02709e-05\n",
"Loss: 1.02635e-05\n",
"Loss: 1.02618e-05\n",
"Loss: 1.02608e-05\n",
"Loss: 1.02599e-05\n",
"Loss: 1.02591e-05\n",
"Loss: 1.02595e-05\n",
"Loss: 1.02580e-05\n",
"Loss: 1.02850e-05\n",
"Loss: 1.02573e-05\n",
"Loss: 1.02562e-05\n",
"Loss: 1.02552e-05\n",
"Loss: 1.02547e-05\n",
"Loss: 1.02530e-05\n",
"Loss: 1.02581e-05\n",
"Loss: 1.02526e-05\n",
"Loss: 1.02508e-05\n",
"Loss: 1.02482e-05\n",
"Loss: 1.02465e-05\n",
"Loss: 1.02450e-05\n",
"Loss: 1.02440e-05\n",
"Loss: 1.02381e-05\n",
"Loss: 1.02332e-05\n",
"Loss: 1.02286e-05\n",
"Loss: 1.02266e-05\n",
"Loss: 1.02242e-05\n",
"Loss: 1.02219e-05\n",
"Loss: 1.02205e-05\n",
"Loss: 1.02865e-05\n",
"Loss: 1.02190e-05\n",
"Loss: 1.02152e-05\n",
"Loss: 1.02110e-05\n",
"Loss: 1.02098e-05\n",
"Loss: 1.02071e-05\n",
"Loss: 1.02036e-05\n",
"Loss: 1.01979e-05\n",
"Loss: 1.01979e-05\n",
"Loss: 1.01951e-05\n",
"Loss: 1.01919e-05\n",
"Loss: 1.01896e-05\n",
"Loss: 1.01892e-05\n",
"Loss: 1.01870e-05\n",
"Loss: 1.01857e-05\n",
"Loss: 1.01836e-05\n",
"Loss: 1.02123e-05\n",
"Loss: 1.01829e-05\n",
"Loss: 1.01813e-05\n",
"Loss: 1.01790e-05\n",
"Loss: 1.01770e-05\n",
"Loss: 1.01746e-05\n",
"Loss: 1.01699e-05\n",
"Loss: 1.01647e-05\n",
"Loss: 1.01610e-05\n",
"Loss: 1.01594e-05\n",
"Loss: 1.01575e-05\n",
"Loss: 1.02634e-05\n",
"Loss: 1.01571e-05\n",
"Loss: 1.01539e-05\n",
"Loss: 1.01509e-05\n",
"Loss: 1.01495e-05\n",
"Loss: 1.01490e-05\n",
"Loss: 1.01482e-05\n",
"Loss: 1.01476e-05\n",
"Loss: 1.01463e-05\n",
"Loss: 1.01439e-05\n",
"Loss: 1.01413e-05\n",
"Loss: 1.01403e-05\n",
"Loss: 1.01366e-05\n",
"Loss: 1.01346e-05\n",
"Loss: 1.01334e-05\n",
"Loss: 1.01327e-05\n",
"Loss: 1.01329e-05\n",
"Loss: 1.01317e-05\n",
"Loss: 1.01296e-05\n",
"Loss: 1.01286e-05\n",
"Loss: 1.01276e-05\n",
"Loss: 1.01267e-05\n",
"Loss: 1.01221e-05\n",
"Loss: 1.01166e-05\n",
"Loss: 1.01117e-05\n",
"Loss: 1.01098e-05\n",
"Loss: 1.01085e-05\n",
"Loss: 1.01077e-05\n",
"Loss: 1.01054e-05\n",
"Loss: 1.01036e-05\n",
"Loss: 1.01025e-05\n",
"Loss: 1.01012e-05\n",
"Loss: 1.00993e-05\n",
"Loss: 1.01010e-05\n",
"Loss: 1.00984e-05\n",
"Loss: 1.00961e-05\n",
"Loss: 1.00953e-05\n",
"Loss: 1.00920e-05\n",
"Loss: 1.00893e-05\n",
"Loss: 1.00865e-05\n",
"Loss: 1.00860e-05\n",
"Loss: 1.00843e-05\n",
"Loss: 1.00834e-05\n",
"Loss: 1.00806e-05\n",
"Loss: 1.00793e-05\n",
"Loss: 1.00760e-05\n",
"Loss: 1.00729e-05\n",
"Loss: 1.00704e-05\n",
"Loss: 1.00671e-05\n",
"Loss: 1.00668e-05\n",
"Loss: 1.00655e-05\n",
"Loss: 1.00750e-05\n",
"Loss: 1.00625e-05\n",
"Loss: 1.00608e-05\n",
"Loss: 1.00598e-05\n",
"Loss: 1.00563e-05\n",
"Loss: 1.00536e-05\n",
"Loss: 1.00503e-05\n",
"Loss: 1.00481e-05\n",
"Loss: 1.00460e-05\n",
"Loss: 1.00442e-05\n",
"Loss: 1.00468e-05\n",
"Loss: 1.00437e-05\n",
"Loss: 1.00422e-05\n",
"Loss: 1.00406e-05\n",
"Loss: 1.00394e-05\n",
"Loss: 1.00382e-05\n",
"Loss: 1.00368e-05\n",
"Loss: 1.00362e-05\n",
"Loss: 1.00353e-05\n",
"Loss: 1.00368e-05\n",
"Loss: 1.00341e-05\n",
"Loss: 1.00326e-05\n",
"Loss: 1.00317e-05\n",
"Loss: 1.00307e-05\n",
"Loss: 1.00291e-05\n",
"Loss: 1.00265e-05\n",
"Loss: 1.01669e-05\n",
"Loss: 1.00256e-05\n",
"Loss: 1.00239e-05\n",
"Loss: 1.00216e-05\n",
"Loss: 1.00200e-05\n",
"Loss: 1.00168e-05\n",
"Loss: 1.00154e-05\n",
"Loss: 1.00147e-05\n",
"Loss: 1.00131e-05\n",
"Loss: 1.00133e-05\n",
"Loss: 1.00127e-05\n",
"Loss: 1.00120e-05\n",
"Loss: 1.00112e-05\n",
"Loss: 1.00103e-05\n",
"Loss: 1.00093e-05\n",
"Loss: 1.00085e-05\n",
"Loss: 1.00048e-05\n",
"Loss: 1.00028e-05\n",
"Loss: 1.00010e-05\n",
"Loss: 9.99778e-06\n",
"Loss: 9.99269e-06\n",
"Loss: 9.99115e-06\n",
"Loss: 9.98826e-06\n",
"Loss: 9.98638e-06\n",
"Loss: 9.98484e-06\n",
"Loss: 9.98431e-06\n",
"Loss: 9.98369e-06\n",
"Loss: 9.98737e-06\n",
"Loss: 9.98240e-06\n",
"Loss: 9.98135e-06\n",
"Loss: 9.97855e-06\n",
"Loss: 9.97784e-06\n",
"Loss: 9.97521e-06\n",
"Loss: 9.97299e-06\n",
"Loss: 9.97581e-06\n",
"Loss: 9.97167e-06\n",
"Loss: 9.96983e-06\n",
"Loss: 9.96814e-06\n",
"Loss: 9.96458e-06\n",
"Loss: 9.96474e-06\n",
"Loss: 9.96260e-06\n",
"Loss: 9.96038e-06\n",
"Loss: 9.95830e-06\n",
"Loss: 9.95744e-06\n",
"Loss: 9.95603e-06\n",
"Loss: 9.95640e-06\n",
"Loss: 9.95513e-06\n",
"Loss: 9.95330e-06\n",
"Loss: 9.95113e-06\n",
"Loss: 9.94969e-06\n",
"Loss: 9.94742e-06\n",
"Loss: 9.94530e-06\n",
"Loss: 9.94411e-06\n",
"Loss: 9.94322e-06\n",
"Loss: 9.94133e-06\n",
"Loss: 1.00957e-05\n",
"Loss: 9.94055e-06\n",
"Loss: 9.93672e-06\n",
"Loss: 9.93277e-06\n",
"Loss: 9.93111e-06\n",
"Loss: 9.92993e-06\n",
"Loss: 9.92866e-06\n",
"Loss: 9.92636e-06\n",
"Loss: 9.92360e-06\n",
"Loss: 9.92069e-06\n",
"Loss: 9.91982e-06\n",
"Loss: 9.91846e-06\n",
"Loss: 9.91650e-06\n",
"Loss: 9.91525e-06\n",
"Loss: 9.91289e-06\n",
"Loss: 9.91118e-06\n",
"Loss: 9.90873e-06\n",
"Loss: 9.91349e-06\n",
"Loss: 9.90810e-06\n",
"Loss: 9.90609e-06\n",
"Loss: 9.90394e-06\n",
"Loss: 9.89955e-06\n",
"Loss: 9.89695e-06\n",
"Loss: 9.89796e-06\n",
"Loss: 9.89588e-06\n",
"Loss: 9.89394e-06\n",
"Loss: 9.89318e-06\n",
"Loss: 9.89090e-06\n",
"Loss: 9.88994e-06\n",
"Loss: 9.88888e-06\n",
"Loss: 9.90597e-06\n",
"Loss: 9.88872e-06\n",
"Loss: 9.88791e-06\n",
"Loss: 9.88650e-06\n",
"Loss: 9.88566e-06\n",
"Loss: 9.88427e-06\n",
"Loss: 9.88237e-06\n",
"Loss: 1.00931e-05\n",
"Loss: 9.88176e-06\n",
"Loss: 9.88054e-06\n",
"Loss: 9.87935e-06\n",
"Loss: 9.87873e-06\n",
"Loss: 9.87779e-06\n",
"Loss: 9.87655e-06\n",
"Loss: 9.87771e-06\n",
"Loss: 9.87596e-06\n",
"Loss: 9.87493e-06\n",
"Loss: 9.87393e-06\n",
"Loss: 9.87183e-06\n",
"Loss: 9.86990e-06\n",
"Loss: 9.88807e-06\n",
"Loss: 9.86972e-06\n",
"Loss: 9.86831e-06\n",
"Loss: 9.86568e-06\n",
"Loss: 9.86259e-06\n",
"Loss: 9.86085e-06\n",
"Loss: 9.85730e-06\n",
"Loss: 9.86837e-06\n",
"Loss: 9.85468e-06\n",
"Loss: 9.85262e-06\n",
"Loss: 9.85093e-06\n",
"Loss: 9.85001e-06\n",
"Loss: 9.84811e-06\n",
"Loss: 9.89842e-06\n",
"Loss: 9.84762e-06\n",
"Loss: 9.84627e-06\n",
"Loss: 9.84524e-06\n",
"Loss: 9.84320e-06\n",
"Loss: 9.84064e-06\n",
"Loss: 9.84316e-06\n",
"Loss: 9.83938e-06\n",
"Loss: 9.83815e-06\n",
"Loss: 9.83662e-06\n",
"Loss: 9.83556e-06\n",
"Loss: 9.83253e-06\n",
"Loss: 9.82947e-06\n",
"Loss: 9.82576e-06\n",
"Loss: 9.85444e-06\n",
"Loss: 9.82533e-06\n",
"Loss: 9.82237e-06\n",
"Loss: 9.81957e-06\n",
"Loss: 9.81768e-06\n",
"Loss: 9.81551e-06\n",
"Loss: 9.81178e-06\n",
"Loss: 9.86256e-06\n",
"Loss: 9.81145e-06\n",
"Loss: 9.80944e-06\n",
"Loss: 9.80827e-06\n",
"Loss: 9.80618e-06\n",
"Loss: 9.80374e-06\n",
"Loss: 9.82716e-06\n",
"Loss: 9.80329e-06\n",
"Loss: 9.80162e-06\n",
"Loss: 9.80073e-06\n",
"Loss: 9.80001e-06\n",
"Loss: 9.79960e-06\n",
"Loss: 9.79898e-06\n",
"Loss: 9.79741e-06\n",
"Loss: 9.79642e-06\n",
"Loss: 9.79466e-06\n",
"Loss: 9.79368e-06\n",
"Loss: 9.79112e-06\n",
"Loss: 9.78845e-06\n",
"Loss: 9.78535e-06\n",
"Loss: 1.06164e-05\n",
"Loss: 9.78470e-06\n",
"Loss: 9.78236e-06\n",
"Loss: 9.78126e-06\n",
"Loss: 9.77857e-06\n",
"Loss: 9.77549e-06\n",
"Loss: 9.77549e-06\n",
"Loss: 9.77435e-06\n",
"Loss: 9.77266e-06\n",
"Loss: 9.77085e-06\n",
"Loss: 9.76949e-06\n",
"Loss: 9.76778e-06\n",
"Loss: 9.76815e-06\n",
"Loss: 9.76588e-06\n",
"Loss: 9.76456e-06\n",
"Loss: 9.76297e-06\n",
"Loss: 9.76150e-06\n",
"Loss: 9.75603e-06\n",
"Loss: 9.76163e-06\n",
"Loss: 9.75498e-06\n",
"Loss: 9.75177e-06\n",
"Loss: 9.74991e-06\n",
"Loss: 9.74870e-06\n",
"Loss: 9.74784e-06\n",
"Loss: 9.74524e-06\n",
"Loss: 9.74460e-06\n",
"Loss: 9.75957e-06\n",
"Loss: 9.74415e-06\n",
"Loss: 9.74267e-06\n",
"Loss: 9.74127e-06\n",
"Loss: 9.73812e-06\n",
"Loss: 9.73645e-06\n",
"Loss: 9.73471e-06\n",
"Loss: 9.73284e-06\n",
"Loss: 9.73183e-06\n",
"Loss: 9.73126e-06\n",
"Loss: 9.73208e-06\n",
"Loss: 9.73083e-06\n",
"Loss: 9.72844e-06\n",
"Loss: 9.72664e-06\n",
"Loss: 9.72447e-06\n",
"Loss: 9.72492e-06\n",
"Loss: 9.72354e-06\n",
"Loss: 9.72193e-06\n",
"Loss: 9.71776e-06\n",
"Loss: 9.71611e-06\n",
"Loss: 9.72147e-06\n",
"Loss: 9.71535e-06\n",
"Loss: 9.71353e-06\n",
"Loss: 9.71232e-06\n",
"Loss: 9.71083e-06\n",
"Loss: 9.70965e-06\n",
"Loss: 9.70793e-06\n",
"Loss: 9.70235e-06\n",
"Loss: 9.69954e-06\n",
"Loss: 9.69762e-06\n",
"Loss: 9.69537e-06\n",
"Loss: 9.69368e-06\n",
"Loss: 9.69166e-06\n",
"Loss: 9.69400e-06\n",
"Loss: 9.69082e-06\n",
"Loss: 9.68763e-06\n",
"Loss: 9.68527e-06\n",
"Loss: 9.68264e-06\n",
"Loss: 9.68089e-06\n",
"Loss: 9.67950e-06\n",
"Loss: 9.67608e-06\n",
"Loss: 9.67181e-06\n",
"Loss: 9.68272e-06\n",
"Loss: 9.67033e-06\n",
"Loss: 9.66738e-06\n",
"Loss: 9.66478e-06\n",
"Loss: 9.66337e-06\n",
"Loss: 9.66104e-06\n",
"Loss: 9.66037e-06\n",
"Loss: 9.65840e-06\n",
"Loss: 9.65477e-06\n",
"Loss: 9.65286e-06\n",
"Loss: 9.65122e-06\n",
"Loss: 9.64916e-06\n",
"Loss: 9.64772e-06\n",
"Loss: 9.64606e-06\n",
"Loss: 9.67084e-06\n",
"Loss: 9.64597e-06\n",
"Loss: 9.64486e-06\n",
"Loss: 9.64392e-06\n",
"Loss: 9.64302e-06\n",
"Loss: 9.64219e-06\n",
"Loss: 9.64109e-06\n",
"Loss: 9.63947e-06\n",
"Loss: 9.63753e-06\n",
"Loss: 9.63602e-06\n",
"Loss: 9.64200e-06\n",
"Loss: 9.63545e-06\n",
"Loss: 9.63355e-06\n",
"Loss: 9.62959e-06\n",
"Loss: 9.62761e-06\n",
"Loss: 9.62593e-06\n",
"Loss: 9.62489e-06\n",
"Loss: 9.62228e-06\n",
"Loss: 9.62245e-06\n",
"Loss: 9.62122e-06\n",
"Loss: 9.61968e-06\n",
"Loss: 9.61765e-06\n",
"Loss: 9.61592e-06\n",
"Loss: 9.61474e-06\n",
"Loss: 9.61534e-06\n",
"Loss: 9.61380e-06\n",
"Loss: 9.61268e-06\n",
"Loss: 9.61165e-06\n",
"Loss: 9.61003e-06\n",
"Loss: 9.60876e-06\n",
"Loss: 9.60752e-06\n",
"Loss: 9.60666e-06\n",
"Loss: 9.60594e-06\n",
"Loss: 9.60496e-06\n",
"Loss: 9.60233e-06\n",
"Loss: 9.60682e-06\n",
"Loss: 9.60175e-06\n",
"Loss: 9.60011e-06\n",
"Loss: 9.59940e-06\n",
"Loss: 9.59878e-06\n",
"Loss: 9.59747e-06\n",
"Loss: 9.59573e-06\n",
"Loss: 9.59315e-06\n",
"Loss: 9.59159e-06\n",
"Loss: 9.58936e-06\n",
"Loss: 9.63894e-06\n",
"Loss: 9.58827e-06\n",
"Loss: 9.58730e-06\n",
"Loss: 9.58543e-06\n",
"Loss: 9.58385e-06\n",
"Loss: 9.58040e-06\n",
"Loss: 9.57815e-06\n",
"Loss: 9.57691e-06\n",
"Loss: 9.57626e-06\n",
"Loss: 9.57559e-06\n",
"Loss: 9.57470e-06\n",
"Loss: 9.57085e-06\n",
"Loss: 9.56730e-06\n",
"Loss: 9.56385e-06\n",
"Loss: 9.56192e-06\n",
"Loss: 9.55852e-06\n",
"Loss: 9.55824e-06\n",
"Loss: 9.55102e-06\n",
"Loss: 9.54843e-06\n",
"Loss: 9.54479e-06\n",
"Loss: 9.54039e-06\n",
"Loss: 9.53750e-06\n",
"Loss: 9.54314e-06\n",
"Loss: 9.53541e-06\n",
"Loss: 9.53453e-06\n",
"Loss: 9.53247e-06\n",
"Loss: 9.53164e-06\n",
"Loss: 9.53017e-06\n",
"Loss: 9.52832e-06\n",
"Loss: 9.56440e-06\n",
"Loss: 9.52810e-06\n",
"Loss: 9.52594e-06\n",
"Loss: 9.52348e-06\n",
"Loss: 9.52142e-06\n",
"Loss: 9.51990e-06\n",
"Loss: 9.53690e-06\n",
"Loss: 9.51965e-06\n",
"Loss: 9.51888e-06\n",
"Loss: 9.51769e-06\n",
"Loss: 9.51635e-06\n",
"Loss: 9.51322e-06\n",
"Loss: 9.51060e-06\n",
"Loss: 9.50799e-06\n",
"Loss: 9.50604e-06\n",
"Loss: 9.50462e-06\n",
"Loss: 9.50147e-06\n",
"Loss: 9.49874e-06\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 9.49661e-06\n",
"Loss: 9.49542e-06\n",
"Loss: 9.49395e-06\n",
"Loss: 9.49090e-06\n",
"Loss: 9.48908e-06\n",
"Loss: 9.48571e-06\n",
"Loss: 9.48432e-06\n",
"Loss: 9.48313e-06\n",
"Loss: 9.48092e-06\n",
"Loss: 9.47914e-06\n",
"Loss: 9.47846e-06\n",
"Loss: 9.47765e-06\n",
"Loss: 9.47688e-06\n",
"Loss: 9.47507e-06\n",
"Loss: 9.47277e-06\n",
"Loss: 9.47005e-06\n",
"Loss: 9.46751e-06\n",
"Loss: 9.47013e-06\n",
"Loss: 9.46666e-06\n",
"Loss: 9.46529e-06\n",
"Loss: 9.46435e-06\n",
"Loss: 9.46376e-06\n",
"Loss: 9.46237e-06\n",
"Loss: 9.45954e-06\n",
"Loss: 9.45657e-06\n",
"Loss: 9.45405e-06\n",
"Loss: 9.45270e-06\n",
"Loss: 9.45132e-06\n",
"Loss: 9.44832e-06\n",
"Loss: 9.44687e-06\n",
"Loss: 9.44585e-06\n",
"Loss: 9.44495e-06\n",
"Loss: 9.44483e-06\n",
"Loss: 9.44398e-06\n",
"Loss: 9.44281e-06\n",
"Loss: 9.59620e-06\n",
"Loss: 9.44244e-06\n",
"Loss: 9.44176e-06\n",
"Loss: 9.44071e-06\n",
"Loss: 9.44029e-06\n",
"Loss: 9.43895e-06\n",
"Loss: 9.43773e-06\n",
"Loss: 9.43709e-06\n",
"Loss: 9.43651e-06\n",
"Loss: 9.43602e-06\n",
"Loss: 9.43559e-06\n",
"Loss: 9.43446e-06\n",
"Loss: 9.43212e-06\n",
"Loss: 9.42978e-06\n",
"Loss: 9.43188e-06\n",
"Loss: 9.42885e-06\n",
"Loss: 9.42714e-06\n",
"Loss: 9.42672e-06\n",
"Loss: 9.42462e-06\n",
"Loss: 9.42322e-06\n",
"Loss: 9.42157e-06\n",
"Loss: 9.41975e-06\n",
"Loss: 9.42809e-06\n",
"Loss: 9.41830e-06\n",
"Loss: 9.41670e-06\n",
"Loss: 9.41016e-06\n",
"Loss: 9.52892e-06\n",
"Loss: 9.40896e-06\n",
"Loss: 9.40703e-06\n",
"Loss: 9.40504e-06\n",
"Loss: 9.40303e-06\n",
"Loss: 9.40077e-06\n",
"Loss: 9.50952e-06\n",
"Loss: 9.40084e-06\n",
"Loss: 9.40058e-06\n",
"Loss: 9.40001e-06\n",
"Loss: 9.39893e-06\n",
"Loss: 9.39623e-06\n",
"Loss: 9.39121e-06\n",
"Loss: 9.39076e-06\n",
"Loss: 9.38494e-06\n",
"Loss: 9.38240e-06\n",
"Loss: 9.39435e-06\n",
"Loss: 9.38152e-06\n",
"Loss: 9.37912e-06\n",
"Loss: 9.37641e-06\n",
"Loss: 9.37251e-06\n",
"Loss: 9.36909e-06\n",
"Loss: 9.36601e-06\n",
"Loss: 9.36198e-06\n",
"Loss: 9.35692e-06\n",
"Loss: 9.35266e-06\n",
"Loss: 9.35215e-06\n",
"Loss: 9.34807e-06\n",
"Loss: 9.34707e-06\n",
"Loss: 9.34620e-06\n",
"Loss: 9.34661e-06\n",
"Loss: 9.34489e-06\n",
"Loss: 9.34367e-06\n",
"Loss: 9.34561e-06\n",
"Loss: 9.34225e-06\n",
"Loss: 9.33994e-06\n",
"Loss: 9.33900e-06\n",
"Loss: 9.33825e-06\n",
"Loss: 9.33634e-06\n",
"Loss: 9.33710e-06\n",
"Loss: 9.33514e-06\n",
"Loss: 9.33390e-06\n",
"Loss: 9.33231e-06\n",
"Loss: 9.33059e-06\n",
"Loss: 9.33036e-06\n",
"Loss: 9.32788e-06\n",
"Loss: 9.32669e-06\n",
"Loss: 9.32502e-06\n",
"Loss: 9.32335e-06\n",
"Loss: 9.33797e-06\n",
"Loss: 9.32256e-06\n",
"Loss: 9.32074e-06\n",
"Loss: 9.31619e-06\n",
"Loss: 9.31147e-06\n",
"Loss: 9.30742e-06\n",
"Loss: 9.30438e-06\n",
"Loss: 9.29996e-06\n",
"Loss: 9.29416e-06\n",
"Loss: 9.28831e-06\n",
"Loss: 9.28855e-06\n",
"Loss: 9.28557e-06\n",
"Loss: 9.28034e-06\n",
"Loss: 9.27643e-06\n",
"Loss: 9.27109e-06\n",
"Loss: 9.26766e-06\n",
"Loss: 9.26221e-06\n",
"Loss: 9.29337e-06\n",
"Loss: 9.26057e-06\n",
"Loss: 9.25597e-06\n",
"Loss: 9.24737e-06\n",
"Loss: 9.23920e-06\n",
"Loss: 9.23476e-06\n",
"Loss: 9.23924e-06\n",
"Loss: 9.23405e-06\n",
"Loss: 9.23193e-06\n",
"Loss: 9.23031e-06\n",
"Loss: 9.22746e-06\n",
"Loss: 9.22561e-06\n",
"Loss: 9.22275e-06\n",
"Loss: 9.21967e-06\n",
"Loss: 9.21651e-06\n",
"Loss: 9.21388e-06\n",
"Loss: 9.21142e-06\n",
"Loss: 9.21541e-06\n",
"Loss: 9.21005e-06\n",
"Loss: 9.20816e-06\n",
"Loss: 9.20461e-06\n",
"Loss: 9.20493e-06\n",
"Loss: 9.20417e-06\n",
"Loss: 9.20213e-06\n",
"Loss: 9.19942e-06\n",
"Loss: 9.19580e-06\n",
"Loss: 9.19352e-06\n",
"Loss: 9.19500e-06\n",
"Loss: 9.19232e-06\n",
"Loss: 9.19152e-06\n",
"Loss: 9.19071e-06\n",
"Loss: 9.18947e-06\n",
"Loss: 9.18682e-06\n",
"Loss: 9.19280e-06\n",
"Loss: 9.18598e-06\n",
"Loss: 9.18295e-06\n",
"Loss: 9.18018e-06\n",
"Loss: 9.17566e-06\n",
"Loss: 9.17417e-06\n",
"Loss: 9.17260e-06\n",
"Loss: 9.17281e-06\n",
"Loss: 9.17083e-06\n",
"Loss: 9.16969e-06\n",
"Loss: 9.16636e-06\n",
"Loss: 9.16438e-06\n",
"Loss: 9.16200e-06\n",
"Loss: 9.16108e-06\n",
"Loss: 9.16008e-06\n",
"Loss: 9.15937e-06\n",
"Loss: 9.15790e-06\n",
"Loss: 9.15640e-06\n",
"Loss: 9.15207e-06\n",
"Loss: 9.19084e-06\n",
"Loss: 9.15110e-06\n",
"Loss: 9.14777e-06\n",
"Loss: 9.14393e-06\n",
"Loss: 9.14108e-06\n",
"Loss: 9.13783e-06\n",
"Loss: 9.13417e-06\n",
"Loss: 9.19369e-06\n",
"Loss: 9.13411e-06\n",
"Loss: 9.13231e-06\n",
"Loss: 9.13085e-06\n",
"Loss: 9.12936e-06\n",
"Loss: 9.12793e-06\n",
"Loss: 9.12587e-06\n",
"Loss: 9.13244e-06\n",
"Loss: 9.12515e-06\n",
"Loss: 9.12351e-06\n",
"Loss: 9.12242e-06\n",
"Loss: 9.12153e-06\n",
"Loss: 9.11951e-06\n",
"Loss: 9.11575e-06\n",
"Loss: 9.11842e-06\n",
"Loss: 9.11482e-06\n",
"Loss: 9.11283e-06\n",
"Loss: 9.11071e-06\n",
"Loss: 9.10874e-06\n",
"Loss: 9.15300e-06\n",
"Loss: 9.10851e-06\n",
"Loss: 9.10743e-06\n",
"Loss: 9.10592e-06\n",
"Loss: 9.10413e-06\n",
"Loss: 9.10268e-06\n",
"Loss: 9.09982e-06\n",
"Loss: 9.10051e-06\n",
"Loss: 9.09761e-06\n",
"Loss: 9.11856e-06\n",
"Loss: 9.09687e-06\n",
"Loss: 9.09449e-06\n",
"Loss: 9.09141e-06\n",
"Loss: 9.09064e-06\n",
"Loss: 9.08918e-06\n",
"Loss: 9.08809e-06\n",
"Loss: 9.08849e-06\n",
"Loss: 9.08752e-06\n",
"Loss: 9.08669e-06\n",
"Loss: 9.08479e-06\n",
"Loss: 9.08417e-06\n",
"Loss: 9.07949e-06\n",
"Loss: 9.07643e-06\n",
"Loss: 9.07309e-06\n",
"Loss: 9.13236e-06\n",
"Loss: 9.07243e-06\n",
"Loss: 9.07070e-06\n",
"Loss: 9.06950e-06\n",
"Loss: 9.06777e-06\n",
"Loss: 9.06566e-06\n",
"Loss: 9.08999e-06\n",
"Loss: 9.06513e-06\n",
"Loss: 9.06333e-06\n",
"Loss: 9.06167e-06\n",
"Loss: 9.05874e-06\n",
"Loss: 9.05517e-06\n",
"Loss: 9.05203e-06\n",
"Loss: 9.08342e-06\n",
"Loss: 9.05053e-06\n",
"Loss: 9.04857e-06\n",
"Loss: 9.04711e-06\n",
"Loss: 9.04557e-06\n",
"Loss: 9.04635e-06\n",
"Loss: 9.04488e-06\n",
"Loss: 9.04319e-06\n",
"Loss: 9.04194e-06\n",
"Loss: 9.04078e-06\n",
"Loss: 9.04017e-06\n",
"Loss: 9.03877e-06\n",
"Loss: 9.03900e-06\n",
"Loss: 9.03785e-06\n",
"Loss: 9.03719e-06\n",
"Loss: 9.03551e-06\n",
"Loss: 9.03469e-06\n",
"Loss: 9.03250e-06\n",
"Loss: 9.03195e-06\n",
"Loss: 9.02948e-06\n",
"Loss: 9.02858e-06\n",
"Loss: 9.02685e-06\n",
"Loss: 9.02465e-06\n",
"Loss: 9.02547e-06\n",
"Loss: 9.02324e-06\n",
"Loss: 9.02099e-06\n",
"Loss: 9.01851e-06\n",
"Loss: 9.01670e-06\n",
"Loss: 9.01441e-06\n",
"Loss: 9.01224e-06\n",
"Loss: 9.01140e-06\n",
"Loss: 9.01017e-06\n",
"Loss: 9.00787e-06\n",
"Loss: 9.00764e-06\n",
"Loss: 9.00663e-06\n",
"Loss: 9.00482e-06\n",
"Loss: 9.05537e-06\n",
"Loss: 9.00436e-06\n",
"Loss: 9.00595e-06\n",
"Loss: 9.00382e-06\n",
"Loss: 9.00207e-06\n",
"Loss: 9.00056e-06\n",
"Loss: 8.99995e-06\n",
"Loss: 8.99919e-06\n",
"Loss: 8.99823e-06\n",
"Loss: 8.99735e-06\n",
"Loss: 8.99658e-06\n",
"Loss: 9.00095e-06\n",
"Loss: 8.99572e-06\n",
"Loss: 8.99458e-06\n",
"Loss: 8.99198e-06\n",
"Loss: 8.99106e-06\n",
"Loss: 8.99234e-06\n",
"Loss: 8.99034e-06\n",
"Loss: 8.98910e-06\n",
"Loss: 8.98746e-06\n",
"Loss: 8.98611e-06\n",
"Loss: 8.98774e-06\n",
"Loss: 8.98492e-06\n",
"Loss: 8.98307e-06\n",
"Loss: 8.98152e-06\n",
"Loss: 8.98500e-06\n",
"Loss: 8.98089e-06\n",
"Loss: 8.97935e-06\n",
"Loss: 8.97759e-06\n",
"Loss: 8.97602e-06\n",
"Loss: 8.97443e-06\n",
"Loss: 8.97301e-06\n",
"Loss: 8.97126e-06\n",
"Loss: 8.96911e-06\n",
"Loss: 8.96504e-06\n",
"Loss: 8.96314e-06\n",
"Loss: 8.96277e-06\n",
"Loss: 8.96217e-06\n",
"Loss: 8.96145e-06\n",
"Loss: 8.96063e-06\n",
"Loss: 8.95930e-06\n",
"Loss: 8.95779e-06\n",
"Loss: 8.96142e-06\n",
"Loss: 8.95631e-06\n",
"Loss: 8.95506e-06\n",
"Loss: 8.95338e-06\n",
"Loss: 8.95192e-06\n",
"Loss: 8.94970e-06\n",
"Loss: 8.94864e-06\n",
"Loss: 8.94741e-06\n",
"Loss: 8.94483e-06\n",
"Loss: 8.94099e-06\n",
"Loss: 8.93886e-06\n",
"Loss: 8.93604e-06\n",
"Loss: 8.93477e-06\n",
"Loss: 8.93333e-06\n",
"Loss: 8.93223e-06\n",
"Loss: 8.92893e-06\n",
"Loss: 8.92737e-06\n",
"Loss: 8.92524e-06\n",
"Loss: 8.92348e-06\n",
"Loss: 8.92213e-06\n",
"Loss: 8.93882e-06\n",
"Loss: 8.92157e-06\n",
"Loss: 8.91992e-06\n",
"Loss: 8.91782e-06\n",
"Loss: 8.91508e-06\n",
"Loss: 8.91245e-06\n",
"Loss: 8.90985e-06\n",
"Loss: 8.91799e-06\n",
"Loss: 8.90938e-06\n",
"Loss: 8.90488e-06\n",
"Loss: 8.90224e-06\n",
"Loss: 8.89911e-06\n",
"Loss: 8.89737e-06\n",
"Loss: 8.89500e-06\n",
"Loss: 8.89366e-06\n",
"Loss: 8.89236e-06\n",
"Loss: 8.89081e-06\n",
"Loss: 8.88757e-06\n",
"Loss: 8.88534e-06\n",
"Loss: 9.11268e-06\n",
"Loss: 8.88448e-06\n",
"Loss: 8.88274e-06\n",
"Loss: 8.89211e-06\n",
"Loss: 8.88192e-06\n",
"Loss: 8.88033e-06\n",
"Loss: 8.88024e-06\n",
"Loss: 8.87923e-06\n",
"Loss: 8.87805e-06\n",
"Loss: 8.87664e-06\n",
"Loss: 8.87505e-06\n",
"Loss: 8.87910e-06\n",
"Loss: 8.87423e-06\n",
"Loss: 8.87336e-06\n",
"Loss: 8.87117e-06\n",
"Loss: 8.90460e-06\n",
"Loss: 8.87045e-06\n",
"Loss: 8.86858e-06\n",
"Loss: 8.86570e-06\n",
"Loss: 8.86292e-06\n",
"Loss: 8.86292e-06\n",
"Loss: 8.86106e-06\n",
"Loss: 8.85816e-06\n",
"Loss: 8.85369e-06\n",
"Loss: 8.84988e-06\n",
"Loss: 8.84619e-06\n",
"Loss: 8.84353e-06\n",
"Loss: 8.84156e-06\n",
"Loss: 8.84136e-06\n",
"Loss: 8.83993e-06\n",
"Loss: 8.83825e-06\n",
"Loss: 8.83597e-06\n",
"Loss: 8.83493e-06\n",
"Loss: 8.83748e-06\n",
"Loss: 8.83443e-06\n",
"Loss: 8.83304e-06\n",
"Loss: 8.83262e-06\n",
"Loss: 8.83206e-06\n",
"Loss: 8.83136e-06\n",
"Loss: 8.83041e-06\n",
"Loss: 8.86862e-06\n",
"Loss: 8.82827e-06\n",
"Loss: 8.82582e-06\n",
"Loss: 8.82383e-06\n",
"Loss: 8.82309e-06\n",
"Loss: 8.81976e-06\n",
"Loss: 8.81706e-06\n",
"Loss: 8.87776e-06\n",
"Loss: 8.81657e-06\n",
"Loss: 8.81442e-06\n",
"Loss: 8.81336e-06\n",
"Loss: 8.81289e-06\n",
"Loss: 8.81201e-06\n",
"Loss: 8.81020e-06\n",
"Loss: 8.80723e-06\n",
"Loss: 8.81157e-06\n",
"Loss: 8.80595e-06\n",
"Loss: 8.80254e-06\n",
"Loss: 8.80052e-06\n",
"Loss: 8.79784e-06\n",
"Loss: 8.79674e-06\n",
"Loss: 8.79452e-06\n",
"Loss: 8.79225e-06\n",
"Loss: 8.93904e-06\n",
"Loss: 8.79197e-06\n",
"Loss: 8.78959e-06\n",
"Loss: 8.79066e-06\n",
"Loss: 8.78875e-06\n",
"Loss: 8.78858e-06\n",
"Loss: 8.78675e-06\n",
"Loss: 8.78582e-06\n",
"Loss: 8.78406e-06\n",
"Loss: 8.78258e-06\n",
"Loss: 8.78432e-06\n",
"Loss: 8.78223e-06\n",
"Loss: 8.78157e-06\n",
"Loss: 8.77905e-06\n",
"Loss: 8.77819e-06\n",
"Loss: 8.77678e-06\n",
"Loss: 8.77592e-06\n",
"Loss: 8.77443e-06\n",
"Loss: 8.77685e-06\n",
"Loss: 8.77386e-06\n",
"Loss: 8.77240e-06\n",
"Loss: 8.77166e-06\n",
"Loss: 8.77088e-06\n",
"Loss: 8.77009e-06\n",
"Loss: 8.76879e-06\n",
"Loss: 8.76652e-06\n",
"Loss: 8.77087e-06\n",
"Loss: 8.76481e-06\n",
"Loss: 8.76213e-06\n",
"Loss: 8.75929e-06\n",
"Loss: 8.75647e-06\n",
"Loss: 8.77059e-06\n",
"Loss: 8.75602e-06\n",
"Loss: 8.75448e-06\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 8.75220e-06\n",
"Loss: 8.75746e-06\n",
"Loss: 8.75082e-06\n",
"Loss: 8.74753e-06\n",
"Loss: 8.74630e-06\n",
"Loss: 8.74488e-06\n",
"Loss: 8.74308e-06\n",
"Loss: 8.74583e-06\n",
"Loss: 8.74217e-06\n",
"Loss: 8.74048e-06\n",
"Loss: 8.73927e-06\n",
"Loss: 8.73823e-06\n",
"Loss: 8.73763e-06\n",
"Loss: 8.73646e-06\n",
"Loss: 8.73600e-06\n",
"Loss: 8.73517e-06\n",
"Loss: 8.73317e-06\n",
"Loss: 8.72939e-06\n",
"Loss: 8.73774e-06\n",
"Loss: 8.72746e-06\n",
"Loss: 8.72396e-06\n",
"Loss: 8.72319e-06\n",
"Loss: 8.71859e-06\n",
"Loss: 8.71745e-06\n",
"Loss: 8.71523e-06\n",
"Loss: 8.71347e-06\n",
"Loss: 8.71104e-06\n",
"Loss: 9.04086e-06\n",
"Loss: 8.71009e-06\n",
"Loss: 8.70934e-06\n",
"Loss: 8.70822e-06\n",
"Loss: 8.70764e-06\n",
"Loss: 8.70737e-06\n",
"Loss: 8.70661e-06\n",
"Loss: 8.70623e-06\n",
"Loss: 8.70592e-06\n",
"Loss: 8.70506e-06\n",
"Loss: 8.70370e-06\n",
"Loss: 8.70319e-06\n",
"Loss: 8.70179e-06\n",
"Loss: 8.70133e-06\n",
"Loss: 8.70062e-06\n",
"Loss: 8.69996e-06\n",
"Loss: 8.69900e-06\n",
"Loss: 8.69781e-06\n",
"Loss: 8.69635e-06\n",
"Loss: 8.69521e-06\n",
"Loss: 8.69193e-06\n",
"Loss: 8.69046e-06\n",
"Loss: 8.68932e-06\n",
"Loss: 8.68862e-06\n",
"Loss: 8.68648e-06\n",
"Loss: 8.68307e-06\n",
"Loss: 8.68034e-06\n",
"Loss: 8.68543e-06\n",
"Loss: 8.67983e-06\n",
"Loss: 8.67898e-06\n",
"Loss: 8.67736e-06\n",
"Loss: 8.67493e-06\n",
"Loss: 8.67280e-06\n",
"Loss: 8.67043e-06\n",
"Loss: 8.67443e-06\n",
"Loss: 8.66902e-06\n",
"Loss: 8.66784e-06\n",
"Loss: 8.66761e-06\n",
"Loss: 8.66723e-06\n",
"Loss: 8.66653e-06\n",
"Loss: 8.66541e-06\n",
"Loss: 8.66299e-06\n",
"Loss: 8.66867e-06\n",
"Loss: 8.66251e-06\n",
"Loss: 8.66108e-06\n",
"Loss: 8.65875e-06\n",
"Loss: 8.65616e-06\n",
"Loss: 8.65389e-06\n",
"Loss: 8.65102e-06\n",
"Loss: 8.64677e-06\n",
"Loss: 8.64472e-06\n",
"Loss: 8.64080e-06\n",
"Loss: 8.63914e-06\n",
"Loss: 8.63553e-06\n",
"Loss: 8.63337e-06\n",
"Loss: 8.63111e-06\n",
"Loss: 8.62785e-06\n",
"Loss: 8.66323e-06\n",
"Loss: 8.62665e-06\n",
"Loss: 8.62447e-06\n",
"Loss: 8.62297e-06\n",
"Loss: 8.62177e-06\n",
"Loss: 8.62108e-06\n",
"Loss: 8.62039e-06\n",
"Loss: 8.61833e-06\n",
"Loss: 8.61605e-06\n",
"Loss: 8.61394e-06\n",
"Loss: 8.61158e-06\n",
"Loss: 8.62298e-06\n",
"Loss: 8.61125e-06\n",
"Loss: 8.61004e-06\n",
"Loss: 8.60879e-06\n",
"Loss: 8.60761e-06\n",
"Loss: 8.60646e-06\n",
"Loss: 8.60528e-06\n",
"Loss: 8.60379e-06\n",
"Loss: 8.60201e-06\n",
"Loss: 8.60002e-06\n",
"Loss: 8.59842e-06\n",
"Loss: 8.59776e-06\n",
"Loss: 8.59462e-06\n",
"Loss: 8.59115e-06\n",
"Loss: 8.58944e-06\n",
"Loss: 8.58721e-06\n",
"Loss: 8.60682e-06\n",
"Loss: 8.58691e-06\n",
"Loss: 8.58597e-06\n",
"Loss: 8.58431e-06\n",
"Loss: 8.58262e-06\n",
"Loss: 8.58048e-06\n",
"Loss: 8.57804e-06\n",
"Loss: 8.57592e-06\n",
"Loss: 8.57327e-06\n",
"Loss: 8.57236e-06\n",
"Loss: 8.57557e-06\n",
"Loss: 8.57168e-06\n",
"Loss: 8.57000e-06\n",
"Loss: 8.56896e-06\n",
"Loss: 8.57162e-06\n",
"Loss: 8.56856e-06\n",
"Loss: 8.56727e-06\n",
"Loss: 8.56671e-06\n",
"Loss: 8.56572e-06\n",
"Loss: 8.56500e-06\n",
"Loss: 8.56474e-06\n",
"Loss: 8.56383e-06\n",
"Loss: 8.56219e-06\n",
"Loss: 8.56065e-06\n",
"Loss: 8.55946e-06\n",
"Loss: 8.55808e-06\n",
"Loss: 8.55670e-06\n",
"Loss: 8.55578e-06\n",
"Loss: 8.55498e-06\n",
"Loss: 8.55423e-06\n",
"Loss: 8.55354e-06\n",
"Loss: 8.55214e-06\n",
"Loss: 8.55346e-06\n",
"Loss: 8.55172e-06\n",
"Loss: 8.55060e-06\n",
"Loss: 8.54982e-06\n",
"Loss: 8.54909e-06\n",
"Loss: 8.54835e-06\n",
"Loss: 8.54773e-06\n",
"Loss: 8.54637e-06\n",
"Loss: 8.54460e-06\n",
"Loss: 8.54325e-06\n",
"Loss: 8.54249e-06\n",
"Loss: 8.54121e-06\n",
"Loss: 8.54026e-06\n",
"Loss: 8.53891e-06\n",
"Loss: 8.53817e-06\n",
"Loss: 8.53635e-06\n",
"Loss: 8.53521e-06\n",
"Loss: 8.53298e-06\n",
"Loss: 8.53215e-06\n",
"Loss: 8.53122e-06\n",
"Loss: 8.53078e-06\n",
"Loss: 8.52976e-06\n",
"Loss: 8.52916e-06\n",
"Loss: 8.52798e-06\n",
"Loss: 8.54376e-06\n",
"Loss: 8.52780e-06\n",
"Loss: 8.52675e-06\n",
"Loss: 8.52561e-06\n",
"Loss: 8.52496e-06\n",
"Loss: 8.52424e-06\n",
"Loss: 8.52320e-06\n",
"Loss: 8.52821e-06\n",
"Loss: 8.52291e-06\n",
"Loss: 8.52248e-06\n",
"Loss: 8.52166e-06\n",
"Loss: 8.52060e-06\n",
"Loss: 8.51923e-06\n",
"Loss: 8.51817e-06\n",
"Loss: 8.51694e-06\n",
"Loss: 8.81817e-06\n",
"Loss: 8.51662e-06\n",
"Loss: 8.51537e-06\n",
"Loss: 8.51408e-06\n",
"Loss: 8.51322e-06\n",
"Loss: 8.51156e-06\n",
"Loss: 8.51081e-06\n",
"Loss: 8.52440e-06\n",
"Loss: 8.51053e-06\n",
"Loss: 8.50863e-06\n",
"Loss: 8.50728e-06\n",
"Loss: 8.50583e-06\n",
"Loss: 8.50494e-06\n",
"Loss: 8.50235e-06\n",
"Loss: 8.50066e-06\n",
"Loss: 8.49915e-06\n",
"Loss: 8.49801e-06\n",
"Loss: 8.49648e-06\n",
"Loss: 8.49553e-06\n",
"Loss: 8.49333e-06\n",
"Loss: 8.49192e-06\n",
"Loss: 8.48932e-06\n",
"Loss: 8.49677e-06\n",
"Loss: 8.48630e-06\n",
"Loss: 8.48442e-06\n",
"Loss: 8.48256e-06\n",
"Loss: 8.48163e-06\n",
"Loss: 8.48074e-06\n",
"Loss: 8.47957e-06\n",
"Loss: 8.47715e-06\n",
"Loss: 8.47579e-06\n",
"Loss: 8.47678e-06\n",
"Loss: 8.47475e-06\n",
"Loss: 8.47347e-06\n",
"Loss: 8.47284e-06\n",
"Loss: 8.47359e-06\n",
"Loss: 8.47137e-06\n",
"Loss: 8.47052e-06\n",
"Loss: 8.46996e-06\n",
"Loss: 8.46834e-06\n",
"Loss: 8.46740e-06\n",
"Loss: 8.46583e-06\n",
"Loss: 8.46537e-06\n",
"Loss: 8.49537e-06\n",
"Loss: 8.46530e-06\n",
"Loss: 8.46480e-06\n",
"Loss: 8.46412e-06\n",
"Loss: 8.46471e-06\n",
"Loss: 8.46359e-06\n",
"Loss: 8.46327e-06\n",
"Loss: 8.46242e-06\n",
"Loss: 8.46124e-06\n",
"Loss: 8.46032e-06\n",
"Loss: 8.45867e-06\n",
"Loss: 8.45781e-06\n",
"Loss: 8.45682e-06\n",
"Loss: 8.45543e-06\n",
"Loss: 8.45409e-06\n",
"Loss: 8.45300e-06\n",
"Loss: 8.51497e-06\n",
"Loss: 8.45272e-06\n",
"Loss: 8.45070e-06\n",
"Loss: 8.44915e-06\n",
"Loss: 8.44827e-06\n",
"Loss: 8.44775e-06\n",
"Loss: 8.44774e-06\n",
"Loss: 8.44675e-06\n",
"Loss: 8.44633e-06\n",
"Loss: 8.44480e-06\n",
"Loss: 8.44340e-06\n",
"Loss: 8.44174e-06\n",
"Loss: 8.44054e-06\n",
"Loss: 8.43944e-06\n",
"Loss: 8.43821e-06\n",
"Loss: 8.43637e-06\n",
"Loss: 8.43428e-06\n",
"Loss: 8.43243e-06\n",
"Loss: 8.43718e-06\n",
"Loss: 8.43132e-06\n",
"Loss: 8.43079e-06\n",
"Loss: 8.42890e-06\n",
"Loss: 8.42722e-06\n",
"Loss: 8.42520e-06\n",
"Loss: 8.45041e-06\n",
"Loss: 8.42394e-06\n",
"Loss: 8.42285e-06\n",
"Loss: 8.42020e-06\n",
"Loss: 8.41884e-06\n",
"Loss: 8.41675e-06\n",
"Loss: 8.41374e-06\n",
"Loss: 8.41054e-06\n",
"Loss: 8.40748e-06\n",
"Loss: 8.44690e-06\n",
"Loss: 8.40686e-06\n",
"Loss: 8.40420e-06\n",
"Loss: 8.40201e-06\n",
"Loss: 8.40078e-06\n",
"Loss: 8.39840e-06\n",
"Loss: 8.41213e-06\n",
"Loss: 8.39717e-06\n",
"Loss: 8.39431e-06\n",
"Loss: 8.39292e-06\n",
"Loss: 8.41452e-06\n",
"Loss: 8.39210e-06\n",
"Loss: 8.39151e-06\n",
"Loss: 8.38994e-06\n",
"Loss: 8.38924e-06\n",
"Loss: 8.38804e-06\n",
"Loss: 8.38689e-06\n",
"Loss: 8.38585e-06\n",
"Loss: 8.38517e-06\n",
"Loss: 8.38471e-06\n",
"Loss: 8.38375e-06\n",
"Loss: 8.38222e-06\n",
"Loss: 8.38268e-06\n",
"Loss: 8.38155e-06\n",
"Loss: 8.38002e-06\n",
"Loss: 8.37835e-06\n",
"Loss: 8.37616e-06\n",
"Loss: 8.37474e-06\n",
"Loss: 8.37431e-06\n",
"Loss: 8.37218e-06\n",
"Loss: 8.37023e-06\n",
"Loss: 8.36867e-06\n",
"Loss: 8.36669e-06\n",
"Loss: 8.36577e-06\n",
"Loss: 8.36401e-06\n",
"Loss: 8.36300e-06\n",
"Loss: 8.36132e-06\n",
"Loss: 8.35984e-06\n",
"Loss: 8.35896e-06\n",
"Loss: 8.35512e-06\n",
"Loss: 8.35321e-06\n",
"Loss: 8.35073e-06\n",
"Loss: 8.35075e-06\n",
"Loss: 8.35010e-06\n",
"Loss: 8.34898e-06\n",
"Loss: 8.37802e-06\n",
"Loss: 8.34866e-06\n",
"Loss: 8.34706e-06\n",
"Loss: 8.34552e-06\n",
"Loss: 8.37113e-06\n",
"Loss: 8.34476e-06\n",
"Loss: 8.34283e-06\n",
"Loss: 8.34223e-06\n",
"Loss: 8.34032e-06\n",
"Loss: 8.33779e-06\n",
"Loss: 8.33628e-06\n",
"Loss: 8.33319e-06\n",
"Loss: 8.33091e-06\n",
"Loss: 8.32879e-06\n",
"Loss: 8.33955e-06\n",
"Loss: 8.32829e-06\n",
"Loss: 8.32656e-06\n",
"Loss: 8.32614e-06\n",
"Loss: 8.33504e-06\n",
"Loss: 8.32257e-06\n",
"Loss: 8.31921e-06\n",
"Loss: 8.31497e-06\n",
"Loss: 8.31229e-06\n",
"Loss: 8.30996e-06\n",
"Loss: 8.31829e-06\n",
"Loss: 8.30978e-06\n",
"Loss: 8.30833e-06\n",
"Loss: 8.30729e-06\n",
"Loss: 8.30612e-06\n",
"Loss: 8.30429e-06\n",
"Loss: 8.32776e-06\n",
"Loss: 8.30378e-06\n",
"Loss: 8.30217e-06\n",
"Loss: 8.30052e-06\n",
"Loss: 8.29937e-06\n",
"Loss: 8.29676e-06\n",
"Loss: 8.29400e-06\n",
"Loss: 8.29701e-06\n",
"Loss: 8.29338e-06\n",
"Loss: 8.29216e-06\n",
"Loss: 8.29154e-06\n",
"Loss: 8.29053e-06\n",
"Loss: 8.28899e-06\n",
"Loss: 8.28946e-06\n",
"Loss: 8.28835e-06\n",
"Loss: 8.28651e-06\n",
"Loss: 8.28501e-06\n",
"Loss: 8.28270e-06\n",
"Loss: 8.28311e-06\n",
"Loss: 8.28223e-06\n",
"Loss: 8.28111e-06\n",
"Loss: 8.28033e-06\n",
"Loss: 8.27995e-06\n",
"Loss: 8.27948e-06\n",
"Loss: 8.27841e-06\n",
"Loss: 8.27675e-06\n",
"Loss: 8.27554e-06\n",
"Loss: 8.27369e-06\n",
"Loss: 8.27232e-06\n",
"Loss: 8.27030e-06\n",
"Loss: 8.27046e-06\n",
"Loss: 8.26886e-06\n",
"Loss: 8.26745e-06\n",
"Loss: 8.26802e-06\n",
"Loss: 8.26603e-06\n",
"Loss: 8.26362e-06\n",
"Loss: 8.25932e-06\n",
"Loss: 8.25516e-06\n",
"Loss: 8.25568e-06\n",
"Loss: 8.25377e-06\n",
"Loss: 8.25201e-06\n",
"Loss: 8.25134e-06\n",
"Loss: 8.25067e-06\n",
"Loss: 8.24955e-06\n",
"Loss: 8.24843e-06\n",
"Loss: 8.24739e-06\n",
"Loss: 8.26247e-06\n",
"Loss: 8.24707e-06\n",
"Loss: 8.24617e-06\n",
"Loss: 8.24489e-06\n",
"Loss: 8.24340e-06\n",
"Loss: 8.24174e-06\n",
"Loss: 8.23982e-06\n",
"Loss: 8.23765e-06\n",
"Loss: 8.23643e-06\n",
"Loss: 8.23563e-06\n",
"Loss: 8.23462e-06\n",
"Loss: 8.23419e-06\n",
"Loss: 8.23349e-06\n",
"Loss: 8.23172e-06\n",
"Loss: 8.23349e-06\n",
"Loss: 8.23069e-06\n",
"Loss: 8.22961e-06\n",
"Loss: 8.22784e-06\n",
"Loss: 8.22669e-06\n",
"Loss: 8.22827e-06\n",
"Loss: 8.22585e-06\n",
"Loss: 8.22458e-06\n",
"Loss: 8.22312e-06\n",
"Loss: 8.22128e-06\n",
"Loss: 8.21949e-06\n",
"Loss: 8.21826e-06\n",
"Loss: 8.25615e-06\n",
"Loss: 8.21750e-06\n",
"Loss: 8.21610e-06\n",
"Loss: 8.21434e-06\n",
"Loss: 8.21160e-06\n",
"Loss: 8.20992e-06\n",
"Loss: 8.21513e-06\n",
"Loss: 8.20947e-06\n",
"Loss: 8.20802e-06\n",
"Loss: 8.20580e-06\n",
"Loss: 8.20313e-06\n",
"Loss: 8.19970e-06\n",
"Loss: 8.30404e-06\n",
"Loss: 8.19890e-06\n",
"Loss: 8.19566e-06\n",
"Loss: 8.19355e-06\n",
"Loss: 8.19174e-06\n",
"Loss: 8.19052e-06\n",
"Loss: 8.18883e-06\n",
"Loss: 8.19535e-06\n",
"Loss: 8.18752e-06\n",
"Loss: 8.18623e-06\n",
"Loss: 8.18501e-06\n",
"Loss: 8.18378e-06\n",
"Loss: 8.18281e-06\n",
"Loss: 8.18149e-06\n",
"Loss: 8.17989e-06\n",
"Loss: 8.17773e-06\n",
"Loss: 8.17535e-06\n",
"Loss: 8.17368e-06\n",
"Loss: 8.17246e-06\n",
"Loss: 8.17004e-06\n",
"Loss: 8.16885e-06\n",
"Loss: 8.16967e-06\n",
"Loss: 8.16577e-06\n",
"Loss: 8.16437e-06\n",
"Loss: 8.16224e-06\n",
"Loss: 8.15960e-06\n",
"Loss: 8.15735e-06\n",
"Loss: 8.15478e-06\n",
"Loss: 8.15918e-06\n",
"Loss: 8.15444e-06\n",
"Loss: 8.15325e-06\n",
"Loss: 8.15243e-06\n",
"Loss: 8.15157e-06\n",
"Loss: 8.14990e-06\n",
"Loss: 8.14731e-06\n",
"Loss: 8.34373e-06\n",
"Loss: 8.14675e-06\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 8.14438e-06\n",
"Loss: 8.14218e-06\n",
"Loss: 8.14124e-06\n",
"Loss: 8.13920e-06\n",
"Loss: 8.13751e-06\n",
"Loss: 8.13479e-06\n",
"Loss: 8.13668e-06\n",
"Loss: 8.13380e-06\n",
"Loss: 8.13208e-06\n",
"Loss: 8.13021e-06\n",
"Loss: 8.12589e-06\n",
"Loss: 8.12340e-06\n",
"Loss: 8.12149e-06\n",
"Loss: 8.12070e-06\n",
"Loss: 8.12024e-06\n",
"Loss: 8.11969e-06\n",
"Loss: 8.11883e-06\n",
"Loss: 8.12534e-06\n",
"Loss: 8.11857e-06\n",
"Loss: 8.11689e-06\n",
"Loss: 8.11504e-06\n",
"Loss: 8.11210e-06\n",
"Loss: 8.11076e-06\n",
"Loss: 8.10938e-06\n",
"Loss: 8.10635e-06\n",
"Loss: 8.10448e-06\n",
"Loss: 8.10232e-06\n",
"Loss: 8.10112e-06\n",
"Loss: 8.16004e-06\n",
"Loss: 8.10057e-06\n",
"Loss: 8.09927e-06\n",
"Loss: 8.09855e-06\n",
"Loss: 8.09766e-06\n",
"Loss: 8.09652e-06\n",
"Loss: 8.09473e-06\n",
"Loss: 8.09312e-06\n",
"Loss: 8.09229e-06\n",
"Loss: 8.09095e-06\n",
"Loss: 8.09016e-06\n",
"Loss: 8.08931e-06\n",
"Loss: 8.08697e-06\n",
"Loss: 8.08610e-06\n",
"Loss: 8.08664e-06\n",
"Loss: 8.08502e-06\n",
"Loss: 8.08407e-06\n",
"Loss: 8.08286e-06\n",
"Loss: 8.08128e-06\n",
"Loss: 8.08002e-06\n",
"Loss: 8.07864e-06\n",
"Loss: 8.07805e-06\n",
"Loss: 8.07735e-06\n",
"Loss: 8.07627e-06\n",
"Loss: 8.07456e-06\n",
"Loss: 8.07318e-06\n",
"Loss: 8.07835e-06\n",
"Loss: 8.07268e-06\n",
"Loss: 8.07073e-06\n",
"Loss: 8.06952e-06\n",
"Loss: 8.06843e-06\n",
"Loss: 8.06720e-06\n",
"Loss: 8.06631e-06\n",
"Loss: 8.06339e-06\n",
"Loss: 8.06190e-06\n",
"Loss: 8.06031e-06\n",
"Loss: 8.05883e-06\n",
"Loss: 8.05709e-06\n",
"Loss: 8.19403e-06\n",
"Loss: 8.05676e-06\n",
"Loss: 8.05589e-06\n",
"Loss: 8.05474e-06\n",
"Loss: 8.05402e-06\n",
"Loss: 8.05352e-06\n",
"Loss: 8.05236e-06\n",
"Loss: 8.10037e-06\n",
"Loss: 8.05201e-06\n",
"Loss: 8.05034e-06\n",
"Loss: 8.04786e-06\n",
"Loss: 8.04612e-06\n",
"Loss: 8.04401e-06\n",
"Loss: 8.05484e-06\n",
"Loss: 8.04330e-06\n",
"Loss: 8.04231e-06\n",
"Loss: 8.04085e-06\n",
"Loss: 8.03958e-06\n",
"Loss: 8.05128e-06\n",
"Loss: 8.03878e-06\n",
"Loss: 8.03779e-06\n",
"Loss: 8.03658e-06\n",
"Loss: 8.03499e-06\n",
"Loss: 8.03513e-06\n",
"Loss: 8.03430e-06\n",
"Loss: 8.03357e-06\n",
"Loss: 8.03273e-06\n",
"Loss: 8.03160e-06\n",
"Loss: 8.03269e-06\n",
"Loss: 8.03092e-06\n",
"Loss: 8.02991e-06\n",
"Loss: 8.02894e-06\n",
"Loss: 8.02800e-06\n",
"Loss: 8.02744e-06\n",
"Loss: 8.02668e-06\n",
"Loss: 8.02451e-06\n",
"Loss: 8.11939e-06\n",
"Loss: 8.02424e-06\n",
"Loss: 8.02344e-06\n",
"Loss: 8.02491e-06\n",
"Loss: 8.02328e-06\n",
"Loss: 8.02205e-06\n",
"Loss: 8.02129e-06\n",
"Loss: 8.02028e-06\n",
"Loss: 8.08899e-06\n",
"Loss: 8.01990e-06\n",
"Loss: 8.01869e-06\n",
"Loss: 8.01740e-06\n",
"Loss: 8.01629e-06\n",
"Loss: 8.01548e-06\n",
"Loss: 8.01430e-06\n",
"Loss: 8.01987e-06\n",
"Loss: 8.01352e-06\n",
"Loss: 8.01247e-06\n",
"Loss: 8.01138e-06\n",
"Loss: 8.00944e-06\n",
"Loss: 8.00875e-06\n",
"Loss: 8.00724e-06\n",
"Loss: 8.00665e-06\n",
"Loss: 8.00559e-06\n",
"Loss: 8.00411e-06\n",
"Loss: 8.00207e-06\n",
"Loss: 8.00055e-06\n",
"Loss: 7.99942e-06\n",
"Loss: 7.99905e-06\n",
"Loss: 7.99781e-06\n",
"Loss: 7.99676e-06\n",
"Loss: 7.99508e-06\n",
"Loss: 7.99319e-06\n",
"Loss: 7.99124e-06\n",
"Loss: 7.98868e-06\n",
"Loss: 7.98765e-06\n",
"Loss: 7.98711e-06\n",
"Loss: 7.98640e-06\n",
"Loss: 7.98547e-06\n",
"Loss: 7.98560e-06\n",
"Loss: 7.98388e-06\n",
"Loss: 7.98281e-06\n",
"Loss: 7.97958e-06\n",
"Loss: 8.05221e-06\n",
"Loss: 7.97932e-06\n",
"Loss: 7.97820e-06\n",
"Loss: 7.97699e-06\n",
"Loss: 7.97624e-06\n",
"Loss: 7.97562e-06\n",
"Loss: 7.97418e-06\n",
"Loss: 7.97206e-06\n",
"Loss: 7.97332e-06\n",
"Loss: 7.97160e-06\n",
"Loss: 7.97014e-06\n",
"Loss: 7.96915e-06\n",
"Loss: 7.96829e-06\n",
"Loss: 7.96710e-06\n",
"Loss: 7.96594e-06\n",
"Loss: 7.96653e-06\n",
"Loss: 7.96571e-06\n",
"Loss: 7.96486e-06\n",
"Loss: 7.96535e-06\n",
"Loss: 7.96442e-06\n",
"Loss: 7.96386e-06\n",
"Loss: 7.96310e-06\n",
"Loss: 7.96225e-06\n",
"Loss: 7.96106e-06\n",
"Loss: 7.96023e-06\n",
"Loss: 7.97776e-06\n",
"Loss: 7.95986e-06\n",
"Loss: 7.95853e-06\n",
"Loss: 7.95701e-06\n",
"Loss: 7.95655e-06\n",
"Loss: 7.95585e-06\n",
"Loss: 7.95571e-06\n",
"Loss: 7.95471e-06\n",
"Loss: 7.95432e-06\n",
"Loss: 7.95328e-06\n",
"Loss: 8.81473e-06\n",
"Loss: 7.95282e-06\n",
"Loss: 7.95200e-06\n",
"Loss: 7.94964e-06\n",
"Loss: 7.94816e-06\n",
"Loss: 7.95397e-06\n",
"Loss: 7.94683e-06\n",
"Loss: 7.94472e-06\n",
"Loss: 7.94055e-06\n",
"Loss: 7.93794e-06\n",
"Loss: 7.93532e-06\n",
"Loss: 7.93176e-06\n",
"Loss: 7.92916e-06\n",
"Loss: 7.92690e-06\n",
"Loss: 7.92522e-06\n",
"Loss: 7.92487e-06\n",
"Loss: 7.92187e-06\n",
"Loss: 7.92110e-06\n",
"Loss: 7.92010e-06\n",
"Loss: 7.91941e-06\n",
"Loss: 7.91866e-06\n",
"Loss: 7.91981e-06\n",
"Loss: 7.91812e-06\n",
"Loss: 7.91728e-06\n",
"Loss: 7.91596e-06\n",
"Loss: 7.91522e-06\n",
"Loss: 7.91455e-06\n",
"Loss: 7.91405e-06\n",
"Loss: 7.91295e-06\n",
"Loss: 7.91190e-06\n",
"Loss: 7.91306e-06\n",
"Loss: 7.91108e-06\n",
"Loss: 7.91215e-06\n",
"Loss: 7.91059e-06\n",
"Loss: 7.90989e-06\n",
"Loss: 7.90900e-06\n",
"Loss: 7.90755e-06\n",
"Loss: 7.90508e-06\n",
"Loss: 7.90262e-06\n",
"Loss: 8.11556e-06\n",
"Loss: 7.90235e-06\n",
"Loss: 7.90114e-06\n",
"Loss: 7.90021e-06\n",
"Loss: 7.89872e-06\n",
"Loss: 7.89815e-06\n",
"Loss: 7.89709e-06\n",
"Loss: 7.89664e-06\n",
"Loss: 7.89605e-06\n",
"Loss: 7.89473e-06\n",
"Loss: 7.89290e-06\n",
"Loss: 7.89172e-06\n",
"Loss: 7.89027e-06\n",
"Loss: 7.88900e-06\n",
"Loss: 7.88791e-06\n",
"Loss: 7.88903e-06\n",
"Loss: 7.88727e-06\n",
"Loss: 7.88597e-06\n",
"Loss: 7.88508e-06\n",
"Loss: 7.88397e-06\n",
"Loss: 7.89096e-06\n",
"Loss: 7.88380e-06\n",
"Loss: 7.88293e-06\n",
"Loss: 7.88225e-06\n",
"Loss: 7.88172e-06\n",
"Loss: 7.88047e-06\n",
"Loss: 7.87912e-06\n",
"Loss: 7.87814e-06\n",
"Loss: 7.87710e-06\n",
"Loss: 7.89519e-06\n",
"Loss: 7.87656e-06\n",
"Loss: 7.87597e-06\n",
"Loss: 7.87421e-06\n",
"Loss: 7.87137e-06\n",
"Loss: 7.86942e-06\n",
"Loss: 7.86671e-06\n",
"Loss: 7.86572e-06\n",
"Loss: 7.86508e-06\n",
"Loss: 7.86360e-06\n",
"Loss: 7.86162e-06\n",
"Loss: 7.86030e-06\n",
"Loss: 7.85915e-06\n",
"Loss: 7.85831e-06\n",
"Loss: 7.85746e-06\n",
"Loss: 7.85721e-06\n",
"Loss: 7.85608e-06\n",
"Loss: 7.85555e-06\n",
"Loss: 7.85508e-06\n",
"Loss: 7.85387e-06\n",
"Loss: 8.05472e-06\n",
"Loss: 7.85379e-06\n",
"Loss: 7.85266e-06\n",
"Loss: 7.85144e-06\n",
"Loss: 7.84990e-06\n",
"Loss: 7.84865e-06\n",
"Loss: 7.84743e-06\n",
"Loss: 7.84555e-06\n",
"Loss: 7.84731e-06\n",
"Loss: 7.84458e-06\n",
"Loss: 7.84368e-06\n",
"Loss: 7.84232e-06\n",
"Loss: 7.84885e-06\n",
"Loss: 7.84205e-06\n",
"Loss: 7.84149e-06\n",
"Loss: 7.84006e-06\n",
"Loss: 7.83834e-06\n",
"Loss: 7.83557e-06\n",
"Loss: 7.83428e-06\n",
"Loss: 7.83380e-06\n",
"Loss: 7.83058e-06\n",
"Loss: 7.82995e-06\n",
"Loss: 7.82855e-06\n",
"Loss: 7.82932e-06\n",
"Loss: 7.82824e-06\n",
"Loss: 7.82738e-06\n",
"Loss: 7.82594e-06\n",
"Loss: 7.82519e-06\n",
"Loss: 7.82496e-06\n",
"Loss: 7.82410e-06\n",
"Loss: 7.82304e-06\n",
"Loss: 7.82223e-06\n",
"Loss: 7.82064e-06\n",
"Loss: 7.81960e-06\n",
"Loss: 7.81783e-06\n",
"Loss: 7.81686e-06\n",
"Loss: 7.81498e-06\n",
"Loss: 7.81248e-06\n",
"Loss: 7.81072e-06\n",
"Loss: 7.80975e-06\n",
"Loss: 7.80900e-06\n",
"Loss: 7.80672e-06\n",
"Loss: 7.80253e-06\n",
"Loss: 7.79888e-06\n",
"Loss: 7.79563e-06\n",
"Loss: 7.79397e-06\n",
"Loss: 7.79135e-06\n",
"Loss: 7.78855e-06\n",
"Loss: 7.78356e-06\n",
"Loss: 7.78336e-06\n",
"Loss: 7.78040e-06\n",
"Loss: 7.77749e-06\n",
"Loss: 7.77619e-06\n",
"Loss: 7.77533e-06\n",
"Loss: 7.77436e-06\n",
"Loss: 7.77143e-06\n",
"Loss: 7.77096e-06\n",
"Loss: 7.76890e-06\n",
"Loss: 7.76709e-06\n",
"Loss: 7.76439e-06\n",
"Loss: 7.76319e-06\n",
"Loss: 7.76098e-06\n",
"Loss: 7.75958e-06\n",
"Loss: 7.87589e-06\n",
"Loss: 7.75944e-06\n",
"Loss: 7.75834e-06\n",
"Loss: 7.75636e-06\n",
"Loss: 7.75465e-06\n",
"Loss: 7.75332e-06\n",
"Loss: 7.75239e-06\n",
"Loss: 7.75182e-06\n",
"Loss: 7.75056e-06\n",
"Loss: 7.74965e-06\n",
"Loss: 7.74827e-06\n",
"Loss: 7.74801e-06\n",
"Loss: 7.74726e-06\n",
"Loss: 7.74550e-06\n",
"Loss: 7.74427e-06\n",
"Loss: 7.74345e-06\n",
"Loss: 7.74276e-06\n",
"Loss: 7.74229e-06\n",
"Loss: 7.74125e-06\n",
"Loss: 7.74080e-06\n",
"Loss: 7.73975e-06\n",
"Loss: 7.73899e-06\n",
"Loss: 7.73766e-06\n",
"Loss: 7.73675e-06\n",
"Loss: 7.73576e-06\n",
"Loss: 7.73510e-06\n",
"Loss: 7.73402e-06\n",
"Loss: 7.73227e-06\n",
"Loss: 7.73142e-06\n",
"Loss: 7.73063e-06\n",
"Loss: 7.73500e-06\n",
"Loss: 7.73043e-06\n",
"Loss: 7.72934e-06\n",
"Loss: 7.72833e-06\n",
"Loss: 7.72751e-06\n",
"Loss: 7.72608e-06\n",
"Loss: 7.72375e-06\n",
"Loss: 7.72212e-06\n",
"Loss: 7.72030e-06\n",
"Loss: 7.71914e-06\n",
"Loss: 7.71831e-06\n",
"Loss: 7.71991e-06\n",
"Loss: 7.71741e-06\n",
"Loss: 7.71537e-06\n",
"Loss: 7.71247e-06\n",
"Loss: 7.71067e-06\n",
"Loss: 7.71711e-06\n",
"Loss: 7.71005e-06\n",
"Loss: 7.70928e-06\n",
"Loss: 7.70819e-06\n",
"Loss: 7.70729e-06\n",
"Loss: 7.70622e-06\n",
"Loss: 7.70508e-06\n",
"Loss: 7.70449e-06\n",
"Loss: 7.70318e-06\n",
"Loss: 7.70120e-06\n",
"Loss: 7.71681e-06\n",
"Loss: 7.70080e-06\n",
"Loss: 7.69907e-06\n",
"Loss: 7.70660e-06\n",
"Loss: 7.69787e-06\n",
"Loss: 7.69637e-06\n",
"Loss: 7.70360e-06\n",
"Loss: 7.69555e-06\n",
"Loss: 7.69397e-06\n",
"Loss: 7.69308e-06\n",
"Loss: 7.69234e-06\n",
"Loss: 7.69157e-06\n",
"Loss: 7.69117e-06\n",
"Loss: 7.68963e-06\n",
"Loss: 7.68947e-06\n",
"Loss: 7.68875e-06\n",
"Loss: 7.68786e-06\n",
"Loss: 7.69349e-06\n",
"Loss: 7.68747e-06\n",
"Loss: 7.68650e-06\n",
"Loss: 7.68550e-06\n",
"Loss: 7.68424e-06\n",
"Loss: 7.68278e-06\n",
"Loss: 7.68352e-06\n",
"Loss: 7.68205e-06\n",
"Loss: 7.68099e-06\n",
"Loss: 7.68053e-06\n",
"Loss: 7.67984e-06\n",
"Loss: 7.67915e-06\n",
"Loss: 7.67713e-06\n",
"Loss: 7.73071e-06\n",
"Loss: 7.67636e-06\n",
"Loss: 7.67449e-06\n",
"Loss: 7.67204e-06\n",
"Loss: 7.67016e-06\n",
"Loss: 7.66803e-06\n",
"Loss: 7.66508e-06\n",
"Loss: 7.66226e-06\n",
"Loss: 7.66094e-06\n",
"Loss: 7.65905e-06\n",
"Loss: 7.65659e-06\n",
"Loss: 7.65445e-06\n",
"Loss: 7.65363e-06\n",
"Loss: 7.65259e-06\n",
"Loss: 7.65126e-06\n",
"Loss: 7.64968e-06\n",
"Loss: 7.64824e-06\n",
"Loss: 7.64725e-06\n",
"Loss: 7.64673e-06\n",
"Loss: 7.64850e-06\n",
"Loss: 7.64640e-06\n",
"Loss: 7.64619e-06\n",
"Loss: 7.64463e-06\n",
"Loss: 7.64384e-06\n",
"Loss: 7.64301e-06\n",
"Loss: 7.64241e-06\n",
"Loss: 7.64142e-06\n",
"Loss: 7.64056e-06\n",
"Loss: 7.64055e-06\n",
"Loss: 7.63995e-06\n",
"Loss: 7.63969e-06\n",
"Loss: 7.63948e-06\n",
"Loss: 7.63901e-06\n",
"Loss: 7.64315e-06\n",
"Loss: 7.63893e-06\n",
"Loss: 7.63857e-06\n",
"Loss: 7.63785e-06\n",
"Loss: 7.63716e-06\n",
"Loss: 7.63654e-06\n",
"Loss: 7.63680e-06\n",
"Loss: 7.63620e-06\n",
"Loss: 7.63512e-06\n",
"Loss: 7.63417e-06\n",
"Loss: 7.63315e-06\n",
"Loss: 7.63179e-06\n",
"Loss: 7.75129e-06\n",
"Loss: 7.63135e-06\n",
"Loss: 7.62988e-06\n",
"Loss: 7.62710e-06\n",
"Loss: 7.62590e-06\n",
"Loss: 7.62525e-06\n",
"Loss: 7.62833e-06\n",
"Loss: 7.62480e-06\n",
"Loss: 7.62449e-06\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 7.62363e-06\n",
"Loss: 7.62292e-06\n",
"Loss: 7.62210e-06\n",
"Loss: 7.62123e-06\n",
"Loss: 7.62028e-06\n",
"Loss: 7.64589e-06\n",
"Loss: 7.61978e-06\n",
"Loss: 7.61876e-06\n",
"Loss: 7.61649e-06\n",
"Loss: 7.61444e-06\n",
"Loss: 7.61251e-06\n",
"Loss: 7.61203e-06\n",
"Loss: 7.60998e-06\n",
"Loss: 7.60900e-06\n",
"Loss: 7.60765e-06\n",
"Loss: 7.60637e-06\n",
"Loss: 7.60751e-06\n",
"Loss: 7.60536e-06\n",
"Loss: 7.60682e-06\n",
"Loss: 7.60496e-06\n",
"Loss: 7.60397e-06\n",
"Loss: 7.60331e-06\n",
"Loss: 7.60288e-06\n",
"Loss: 7.60241e-06\n",
"Loss: 7.60202e-06\n",
"Loss: 7.60142e-06\n",
"Loss: 7.60075e-06\n",
"Loss: 7.60005e-06\n",
"Loss: 7.60945e-06\n",
"Loss: 7.59957e-06\n",
"Loss: 7.59933e-06\n",
"Loss: 7.59857e-06\n",
"Loss: 7.59798e-06\n",
"Loss: 7.60719e-06\n",
"Loss: 7.59781e-06\n",
"Loss: 7.59700e-06\n",
"Loss: 7.59647e-06\n",
"Loss: 7.59701e-06\n",
"Loss: 7.59631e-06\n",
"Loss: 7.59551e-06\n",
"Loss: 7.59477e-06\n",
"Loss: 7.59418e-06\n",
"Loss: 7.59350e-06\n",
"Loss: 7.62450e-06\n",
"Loss: 7.59248e-06\n",
"Loss: 7.59161e-06\n",
"Loss: 7.58993e-06\n",
"Loss: 7.59308e-06\n",
"Loss: 7.58990e-06\n",
"Loss: 7.58924e-06\n",
"Loss: 7.58858e-06\n",
"Loss: 7.58825e-06\n",
"Loss: 7.58767e-06\n",
"Loss: 7.58706e-06\n",
"Loss: 7.58694e-06\n",
"Loss: 7.58634e-06\n",
"Loss: 7.58575e-06\n",
"Loss: 7.58481e-06\n",
"Loss: 7.58442e-06\n",
"Loss: 7.58335e-06\n",
"Loss: 7.60550e-06\n",
"Loss: 7.58311e-06\n",
"Loss: 7.58232e-06\n",
"Loss: 7.58127e-06\n",
"Loss: 7.58001e-06\n",
"Loss: 7.57937e-06\n",
"Loss: 7.57769e-06\n",
"Loss: 7.57693e-06\n",
"Loss: 7.57602e-06\n",
"Loss: 7.57461e-06\n",
"Loss: 7.57397e-06\n",
"Loss: 7.57263e-06\n",
"Loss: 7.57160e-06\n",
"Loss: 7.57047e-06\n",
"Loss: 7.56942e-06\n",
"Loss: 7.56764e-06\n",
"Loss: 7.56624e-06\n",
"Loss: 7.56492e-06\n",
"Loss: 7.56360e-06\n",
"Loss: 7.56229e-06\n",
"Loss: 7.56277e-06\n",
"Loss: 7.56107e-06\n",
"Loss: 7.55869e-06\n",
"Loss: 7.55634e-06\n",
"Loss: 7.55534e-06\n",
"Loss: 7.55478e-06\n",
"Loss: 7.55191e-06\n",
"Loss: 7.55312e-06\n",
"Loss: 7.55121e-06\n",
"Loss: 7.54981e-06\n",
"Loss: 7.54914e-06\n",
"Loss: 7.54854e-06\n",
"Loss: 7.54794e-06\n",
"Loss: 7.54722e-06\n",
"Loss: 7.54808e-06\n",
"Loss: 7.54675e-06\n",
"Loss: 7.54649e-06\n",
"Loss: 7.54561e-06\n",
"Loss: 7.54514e-06\n",
"Loss: 7.54462e-06\n",
"Loss: 7.54409e-06\n",
"Loss: 7.54337e-06\n",
"Loss: 7.54271e-06\n",
"Loss: 7.54307e-06\n",
"Loss: 7.54172e-06\n",
"Loss: 7.54122e-06\n",
"Loss: 7.54057e-06\n",
"Loss: 7.54182e-06\n",
"Loss: 7.54017e-06\n",
"Loss: 7.53896e-06\n",
"Loss: 7.53652e-06\n",
"Loss: 7.53500e-06\n",
"Loss: 7.53422e-06\n",
"Loss: 7.53388e-06\n",
"Loss: 7.53340e-06\n",
"Loss: 7.53270e-06\n",
"Loss: 7.53156e-06\n",
"Loss: 7.53098e-06\n",
"Loss: 7.53431e-06\n",
"Loss: 7.53063e-06\n",
"Loss: 7.52971e-06\n",
"Loss: 7.52910e-06\n",
"Loss: 7.52776e-06\n",
"Loss: 7.52614e-06\n",
"Loss: 7.52469e-06\n",
"Loss: 7.52397e-06\n",
"Loss: 7.52253e-06\n",
"Loss: 7.52153e-06\n",
"Loss: 7.52008e-06\n",
"Loss: 7.51908e-06\n",
"Loss: 7.51823e-06\n",
"Loss: 7.51727e-06\n",
"Loss: 7.51609e-06\n",
"Loss: 7.51505e-06\n",
"Loss: 7.51407e-06\n",
"Loss: 7.51206e-06\n",
"Loss: 7.51092e-06\n",
"Loss: 7.50930e-06\n",
"Loss: 7.50785e-06\n",
"Loss: 7.50682e-06\n",
"Loss: 7.50578e-06\n",
"Loss: 7.50522e-06\n",
"Loss: 7.50534e-06\n",
"Loss: 7.50463e-06\n",
"Loss: 7.50395e-06\n",
"Loss: 7.50263e-06\n",
"Loss: 7.50736e-06\n",
"Loss: 7.50235e-06\n",
"Loss: 7.50198e-06\n",
"Loss: 7.50149e-06\n",
"Loss: 7.50123e-06\n",
"Loss: 7.50075e-06\n",
"Loss: 7.50021e-06\n",
"Loss: 7.50072e-06\n",
"Loss: 7.49989e-06\n",
"Loss: 7.49962e-06\n",
"Loss: 7.49899e-06\n",
"Loss: 7.49823e-06\n",
"Loss: 7.49749e-06\n",
"Loss: 7.49630e-06\n",
"Loss: 7.49484e-06\n",
"Loss: 7.49396e-06\n",
"Loss: 7.49255e-06\n",
"Loss: 7.49171e-06\n",
"Loss: 7.49035e-06\n",
"Loss: 7.48833e-06\n",
"Loss: 7.48586e-06\n",
"Loss: 7.48424e-06\n",
"Loss: 7.48318e-06\n",
"Loss: 7.48215e-06\n",
"Loss: 7.48132e-06\n",
"Loss: 7.48060e-06\n",
"Loss: 7.47941e-06\n",
"Loss: 7.47855e-06\n",
"Loss: 7.48539e-06\n",
"Loss: 7.47826e-06\n",
"Loss: 7.47757e-06\n",
"Loss: 7.47637e-06\n",
"Loss: 7.47545e-06\n",
"Loss: 7.47422e-06\n",
"Loss: 7.49415e-06\n",
"Loss: 7.47386e-06\n",
"Loss: 7.47298e-06\n",
"Loss: 7.47207e-06\n",
"Loss: 7.47112e-06\n",
"Loss: 7.46960e-06\n",
"Loss: 7.47041e-06\n",
"Loss: 7.46856e-06\n",
"Loss: 7.46673e-06\n",
"Loss: 7.46475e-06\n",
"Loss: 7.46209e-06\n",
"Loss: 7.45967e-06\n",
"Loss: 7.45683e-06\n",
"Loss: 7.45553e-06\n",
"Loss: 7.45401e-06\n",
"Loss: 7.45216e-06\n",
"Loss: 7.45109e-06\n",
"Loss: 7.45311e-06\n",
"Loss: 7.45028e-06\n",
"Loss: 7.44923e-06\n",
"Loss: 7.44852e-06\n",
"Loss: 7.44742e-06\n",
"Loss: 7.44585e-06\n",
"Loss: 7.44969e-06\n",
"Loss: 7.44557e-06\n",
"Loss: 7.44481e-06\n",
"Loss: 7.44416e-06\n",
"Loss: 7.44286e-06\n",
"Loss: 7.44260e-06\n",
"Loss: 7.44201e-06\n",
"Loss: 7.44130e-06\n",
"Loss: 7.44142e-06\n",
"Loss: 7.44101e-06\n",
"Loss: 7.44002e-06\n",
"Loss: 7.43938e-06\n",
"Loss: 7.43806e-06\n",
"Loss: 7.43758e-06\n",
"Loss: 7.43626e-06\n",
"Loss: 7.43846e-06\n",
"Loss: 7.43581e-06\n",
"Loss: 7.43469e-06\n",
"Loss: 7.43415e-06\n",
"Loss: 7.43356e-06\n",
"Loss: 7.43353e-06\n",
"Loss: 7.43265e-06\n",
"Loss: 7.43178e-06\n",
"Loss: 7.43116e-06\n",
"Loss: 7.42971e-06\n",
"Loss: 7.42884e-06\n",
"Loss: 7.42839e-06\n",
"Loss: 7.42739e-06\n",
"Loss: 7.42716e-06\n",
"Loss: 7.42788e-06\n",
"Loss: 7.42649e-06\n",
"Loss: 7.42548e-06\n",
"Loss: 7.42487e-06\n",
"Loss: 7.42378e-06\n",
"Loss: 7.42310e-06\n",
"Loss: 7.42195e-06\n",
"Loss: 7.42246e-06\n",
"Loss: 7.42063e-06\n",
"Loss: 7.41931e-06\n",
"Loss: 7.41869e-06\n",
"Loss: 7.41768e-06\n",
"Loss: 7.41626e-06\n",
"Loss: 7.41542e-06\n",
"Loss: 7.41400e-06\n",
"Loss: 7.41323e-06\n",
"Loss: 7.41255e-06\n",
"Loss: 7.41203e-06\n",
"Loss: 7.41054e-06\n",
"Loss: 7.40841e-06\n",
"Loss: 7.41891e-06\n",
"Loss: 7.40775e-06\n",
"Loss: 7.40549e-06\n",
"Loss: 7.40458e-06\n",
"Loss: 7.40419e-06\n",
"Loss: 7.40380e-06\n",
"Loss: 7.40308e-06\n",
"Loss: 7.40245e-06\n",
"Loss: 7.40227e-06\n",
"Loss: 7.40174e-06\n",
"Loss: 7.40157e-06\n",
"Loss: 7.40155e-06\n",
"Loss: 7.40110e-06\n",
"Loss: 7.40078e-06\n",
"Loss: 7.40046e-06\n",
"Loss: 7.39986e-06\n",
"Loss: 7.39965e-06\n",
"Loss: 7.39904e-06\n",
"Loss: 7.39865e-06\n",
"Loss: 7.43453e-06\n",
"Loss: 7.39836e-06\n",
"Loss: 7.39807e-06\n",
"Loss: 7.39786e-06\n",
"Loss: 7.39645e-06\n",
"Loss: 7.39586e-06\n",
"Loss: 7.39560e-06\n",
"Loss: 7.39487e-06\n",
"Loss: 7.39450e-06\n",
"Loss: 7.39362e-06\n",
"Loss: 7.39248e-06\n",
"Loss: 7.39182e-06\n",
"Loss: 7.39113e-06\n",
"Loss: 7.39087e-06\n",
"Loss: 7.39012e-06\n",
"Loss: 7.38954e-06\n",
"Loss: 7.38829e-06\n",
"Loss: 7.39088e-06\n",
"Loss: 7.38802e-06\n",
"Loss: 7.38686e-06\n",
"Loss: 7.38635e-06\n",
"Loss: 7.38444e-06\n",
"Loss: 7.38372e-06\n",
"Loss: 7.38253e-06\n",
"Loss: 7.38131e-06\n",
"Loss: 7.38008e-06\n",
"Loss: 7.37864e-06\n",
"Loss: 7.37613e-06\n",
"Loss: 7.37369e-06\n",
"Loss: 7.37211e-06\n",
"Loss: 7.36941e-06\n",
"Loss: 7.36682e-06\n",
"Loss: 7.36541e-06\n",
"Loss: 7.36345e-06\n",
"Loss: 7.36300e-06\n",
"Loss: 7.36254e-06\n",
"Loss: 7.36188e-06\n",
"Loss: 7.36041e-06\n",
"Loss: 7.36082e-06\n",
"Loss: 7.35959e-06\n",
"Loss: 7.37054e-06\n",
"Loss: 7.35931e-06\n",
"Loss: 7.35810e-06\n",
"Loss: 7.35783e-06\n",
"Loss: 7.35738e-06\n",
"Loss: 7.35649e-06\n",
"Loss: 7.35628e-06\n",
"Loss: 7.35562e-06\n",
"Loss: 7.35532e-06\n",
"Loss: 7.35500e-06\n",
"Loss: 7.35572e-06\n",
"Loss: 7.35490e-06\n",
"Loss: 7.35460e-06\n",
"Loss: 7.35400e-06\n",
"Loss: 7.35363e-06\n",
"Loss: 7.35264e-06\n",
"Loss: 7.35154e-06\n",
"Loss: 7.35246e-06\n",
"Loss: 7.35137e-06\n",
"Loss: 7.35062e-06\n",
"Loss: 7.34879e-06\n",
"Loss: 7.34715e-06\n",
"Loss: 7.34556e-06\n",
"Loss: 7.34379e-06\n",
"Loss: 7.34281e-06\n",
"Loss: 7.34200e-06\n",
"Loss: 7.34123e-06\n",
"Loss: 7.34556e-06\n",
"Loss: 7.34050e-06\n",
"Loss: 7.33979e-06\n",
"Loss: 7.33795e-06\n",
"Loss: 7.33653e-06\n",
"Loss: 7.33478e-06\n",
"Loss: 7.33347e-06\n",
"Loss: 7.33223e-06\n",
"Loss: 7.38299e-06\n",
"Loss: 7.33219e-06\n",
"Loss: 7.33143e-06\n",
"Loss: 7.32926e-06\n",
"Loss: 7.32666e-06\n",
"Loss: 7.32540e-06\n",
"Loss: 7.32456e-06\n",
"Loss: 7.32391e-06\n",
"Loss: 7.32305e-06\n",
"Loss: 7.32218e-06\n",
"Loss: 7.32110e-06\n",
"Loss: 7.31969e-06\n",
"Loss: 7.31804e-06\n",
"Loss: 7.31726e-06\n",
"Loss: 7.31621e-06\n",
"Loss: 7.31521e-06\n",
"Loss: 7.31394e-06\n",
"Loss: 7.31335e-06\n",
"Loss: 7.31199e-06\n",
"Loss: 7.31098e-06\n",
"Loss: 7.30978e-06\n",
"Loss: 7.31323e-06\n",
"Loss: 7.30952e-06\n",
"Loss: 7.30854e-06\n",
"Loss: 7.30801e-06\n",
"Loss: 7.30769e-06\n",
"Loss: 7.30726e-06\n",
"Loss: 7.30650e-06\n",
"Loss: 7.30605e-06\n",
"Loss: 7.30589e-06\n",
"Loss: 7.30557e-06\n",
"Loss: 7.30507e-06\n",
"Loss: 7.30705e-06\n",
"Loss: 7.30491e-06\n",
"Loss: 7.30422e-06\n",
"Loss: 7.30333e-06\n",
"Loss: 7.30183e-06\n",
"Loss: 7.30020e-06\n",
"Loss: 7.29805e-06\n",
"Loss: 7.30109e-06\n",
"Loss: 7.29741e-06\n",
"Loss: 7.29628e-06\n",
"Loss: 7.29534e-06\n",
"Loss: 7.29466e-06\n",
"Loss: 7.29377e-06\n",
"Loss: 7.29604e-06\n",
"Loss: 7.29360e-06\n",
"Loss: 7.29289e-06\n",
"Loss: 7.29185e-06\n",
"Loss: 7.29107e-06\n",
"Loss: 7.29054e-06\n",
"Loss: 7.28994e-06\n",
"Loss: 7.31935e-06\n",
"Loss: 7.28965e-06\n",
"Loss: 7.28878e-06\n",
"Loss: 7.28790e-06\n",
"Loss: 7.28728e-06\n",
"Loss: 7.28640e-06\n",
"Loss: 7.28544e-06\n",
"Loss: 7.28467e-06\n",
"Loss: 7.28442e-06\n",
"Loss: 7.28337e-06\n",
"Loss: 7.28285e-06\n",
"Loss: 7.28212e-06\n",
"Loss: 7.28159e-06\n",
"Loss: 7.28092e-06\n",
"Loss: 7.27852e-06\n",
"Loss: 7.27653e-06\n",
"Loss: 7.27494e-06\n",
"Loss: 7.27394e-06\n",
"Loss: 7.27348e-06\n",
"Loss: 7.27298e-06\n",
"Loss: 7.27208e-06\n",
"Loss: 7.27138e-06\n",
"Loss: 7.27035e-06\n",
"Loss: 7.26917e-06\n",
"Loss: 7.26829e-06\n",
"Loss: 7.26719e-06\n",
"Loss: 7.26678e-06\n",
"Loss: 7.26592e-06\n",
"Loss: 7.26491e-06\n",
"Loss: 7.26341e-06\n",
"Loss: 7.26176e-06\n",
"Loss: 7.26027e-06\n",
"Loss: 7.26719e-06\n",
"Loss: 7.25909e-06\n",
"Loss: 7.25894e-06\n",
"Loss: 7.25836e-06\n",
"Loss: 7.25825e-06\n",
"Loss: 7.25735e-06\n",
"Loss: 7.25629e-06\n",
"Loss: 7.25685e-06\n",
"Loss: 7.25529e-06\n",
"Loss: 7.25341e-06\n",
"Loss: 7.25037e-06\n",
"Loss: 7.24920e-06\n",
"Loss: 7.26084e-06\n",
"Loss: 7.24902e-06\n",
"Loss: 7.24839e-06\n",
"Loss: 7.24783e-06\n",
"Loss: 7.24702e-06\n",
"Loss: 7.25481e-06\n",
"Loss: 7.24711e-06\n",
"Loss: 7.24691e-06\n",
"Loss: 7.24629e-06\n",
"Loss: 7.24530e-06\n",
"Loss: 7.24408e-06\n",
"Loss: 7.24369e-06\n",
"Loss: 7.24445e-06\n",
"Loss: 7.24319e-06\n",
"Loss: 7.24297e-06\n",
"Loss: 7.24176e-06\n",
"Loss: 7.24065e-06\n",
"Loss: 7.23993e-06\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loss: 7.24075e-06\n",
"Loss: 7.23943e-06\n",
"Loss: 7.23885e-06\n",
"Loss: 7.23762e-06\n",
"Loss: 7.25703e-06\n",
"Loss: 7.23729e-06\n",
"Loss: 7.23634e-06\n",
"Loss: 7.23490e-06\n",
"Loss: 7.24258e-06\n",
"Loss: 7.23459e-06\n",
"Loss: 7.23389e-06\n",
"Loss: 7.23211e-06\n",
"Loss: 7.23043e-06\n",
"Loss: 7.23476e-06\n",
"Loss: 7.22984e-06\n",
"Loss: 7.22844e-06\n",
"Loss: 7.22697e-06\n",
"Loss: 7.24065e-06\n",
"Loss: 7.22697e-06\n",
"Loss: 7.22686e-06\n",
"Loss: 7.22613e-06\n",
"Loss: 7.22561e-06\n",
"Loss: 7.22478e-06\n",
"Loss: 7.22409e-06\n",
"Loss: 7.22349e-06\n",
"Loss: 7.22129e-06\n",
"Loss: 7.22026e-06\n",
"Loss: 7.21855e-06\n",
"Loss: 7.31645e-06\n",
"Loss: 7.21838e-06\n",
"Loss: 7.21741e-06\n",
"Loss: 7.21634e-06\n",
"Loss: 7.21477e-06\n",
"Loss: 7.21364e-06\n",
"Loss: 7.21253e-06\n",
"Loss: 7.23043e-06\n",
"Loss: 7.21248e-06\n",
"Loss: 7.21169e-06\n",
"Loss: 7.21091e-06\n",
"Loss: 7.21012e-06\n",
"Loss: 7.20947e-06\n",
"Loss: 7.20766e-06\n",
"Loss: 7.20698e-06\n",
"Loss: 7.20516e-06\n",
"Loss: 7.20451e-06\n",
"Loss: 7.20345e-06\n",
"Loss: 7.20244e-06\n",
"Loss: 7.20451e-06\n",
"Loss: 7.20173e-06\n",
"Loss: 7.20060e-06\n",
"Loss: 7.19876e-06\n",
"Loss: 7.19822e-06\n",
"Loss: 7.21812e-06\n",
"Loss: 7.19797e-06\n",
"Loss: 7.19694e-06\n",
"Loss: 7.19556e-06\n",
"Loss: 7.19458e-06\n",
"Loss: 7.19362e-06\n",
"Loss: 7.19307e-06\n",
"Loss: 7.19173e-06\n",
"Loss: 7.19131e-06\n",
"Loss: 7.19108e-06\n",
"Loss: 7.19069e-06\n",
"Loss: 7.18979e-06\n",
"Loss: 7.19186e-06\n",
"Loss: 7.18950e-06\n",
"Loss: 7.18901e-06\n",
"Loss: 7.18864e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18993e-06\n",
"Loss: 7.18834e-06\n",
"Loss: 7.18839e-06\n",
"Loss: 7.18826e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n",
"Loss: 7.18821e-06\n"
]
}
],
"source": [
"model.train_Adam_minibatch(50000, size = 100)\n",
"model.train() # BFGS training"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Finally, we can predict and compare to the exact results."
]
},
{
"cell_type": "code",
"execution_count": 19,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAA0EAAAFgCAYAAAB9knJjAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAgAElEQVR4nOzdd5hU1fnA8e87dXtjWZbepIgg1YKKiKgg9q4RuxJNNGr0pzGxxcSWoolojJpYYldUNBYUGwQVlSJNAel1WdjeZ3fm/P6YWdyF7Tszd8r7eZ552L137p13dpd75r3nnPeIMQallFJKKaWUihc2qwNQSimllFJKqXDSJEgppZRSSikVVzQJUkoppZRSSsUVTYKUUkoppZRScUWTIKWUUkoppVRc0SRIKaWUUkopFVc0CVJhJSJ9ReSaUJ1PRJ4TkROsPlcz579eRLoHvj5dRB4P1rmVUkr9JNhtTTtfe6yIbOzE8ekicluD7+8TkcuCE12H4rlLRGZ24vi98YvIaSIyPnjRKdVxousEqXASkYnAo8aYEZF2vmDH1sT5NwJnG2MWh+L8Siml/EJ9PW/ltccCs4wx/Tt4fF9gpTEmNbiRdYyI3AVkG2OuC8K5nsH/3v7a+ciU6hztCVJ7ichkEVkoImtF5FsRGS4iCSKyRER+FXjOJBHZICJdRCRZRJ4XkU0iskVEPhORboHnHSEiXwfOtUxEzhGRycB/gMEi8oOIpO7z+p0+X+CYs0TkDyLyVINzTxGR+YGvrxSRNSLyo4isF5EzWzpX4JgTRWSRiKwO/HtiYPslIjJbRN4IxLZaRI5q4mf7DdATeENELg4c99/Avs9E5E4R+V/gff8pcOdtiYhsF5ErGpznBhH5LvAzekNEMgPb7xaRdYEY5olIv2D8TSilVLBFQFvjFpGHA23AahF5TUTSA/vafa0XkezAOTaIyA/AhQ1e6xkRuanB93t7VUTkQBH5NBDHKhG5VkSGAp8AyYHYhzY8h4gcKiILAq+/XEQuCWyfKCJfichTgfZtQ337tc97b6ltHC0inwfOvUpEjmni+MEi8mGD59zSYN+pgd/BmsDvdVLDn4GIPAqcAdwqIo+KyNsicmOD46eLyDut/wUpFSTGGH3oA6AfsBDIDHw/HvgRcAADgV3AZGAjcEjgOdcAj+NPpgWYF9iWAeQDRweeNwAoALKBicCKZmLo9PmAz4CzGjzHFdj+b+ByIAtYh/+uFoFtXwe+bu5cAwPnGh3YPgYoDGy/BKgARgT2XQfMbeb9bQTGBr6+BPhvg9d5O/Cz7g0Y4LrAvlOBDYGvzwWeARyB728GXgT6AjVAcmD79cDNVv9N6UMf+tDHvo8IaWvuBN4F3IHvHwMe6ui1PnAdfiwQmxuYDWwM7HsGuKnBa98FzAy83zXA9MD2LsBmYGTgml7W4JhngJsC73cXMDWwvT+wAzgq8H69wPGBfacAPzbx3ptrG9OAJUDvwPYhgXiy9ol5NXBV4DldgRX4k76BwG5gWGDf4cDOwDF7fwb7fD0Jf69QfWwfA1Os/hvVR/w8HCjldyIwGPhSROq3pQEDjTFrAnfn5uL/cP0tgDHm8cBdq0uAYcBwIBk4ElhvjJkfeN4G/Bd4Gpx7P8E8nzFmg4gsB6aIyBzgBOAGY0yZ+IcqHBt4rdMCr9GSqcBHxpilgXMvEZGPgSn4G8WvjDErAs9dAcxo5XxNedEYUwdsFZEK4L3A9rVA/V3Ms/D/LFYE3rcd8OFvaBYA80XkfeBz4NMOxKCUUqFmeVuDP0G43RhTEzjul4FjfknHrvVTgfHGGAPUiMjDwLOt/ByGAKnGmBcCr1WAP/mpHw7XlCPx3xSbEzhmo4i8gP9m2Xv4E6+5DeLL3fcEzbWN+BOpQcBHDX52TvyJYMOY040xTwXOtVv8c1tPw5+gfWyM+T6wbyFQPwe2yTdjjPlMRLwichj+ZK438FEz712poNPhcKqeHfjAGHNg/QP/naX1gf2H4L8DNE1EbAAi8hvgSfx3yp7Gf2et/ly1DU8uIiNEJKWlAIJ9vsA5LgCOBz4JJEBDgWX4G4dPgD+2cg7w393bl2mwvWqf7R3h3ecc3gZf17MDv2/w+xkBnGKM8RhjJgPn4797ehfwegfjUEqpULK8rdn3OBHJDLQNHb3WG9p+7U9sEENdwx2BoWZdWjg2WG3Rfm1jIJ7l+/xejgK+aOPrN/W7GCcizhbiAPg7cCX+BPeJQCKpVFhoEqTqfQScKCIHAYi/esu7gFdEpgEnA4cGnvu7wL/TgGeNMW8DxfgvmDbgK2BE4O4OIjIEf8+EHf8F095MDME+3xv4G9fL+Omu3BHAZmPM48Bi/Hf46v8fNHeuOcAJIjIy8Pqj8DceHzbzus1pKda2eA+4RkQyAt/fCNwWaPS/A/KMMU8DdwCjOvE6SikVKpHQ1swFrhURRyDR+jMwnY5f698F/k9EbCJixz9Urt5uoP69dgFOD2xfHdhWP++0G/4bc9ktxP4FMEACVUsDPUYXAv9tJb59NdU2fgkMbTCPZyD+EQYN41gNlEpgnmrg/VwNvIP/5z5NRA4I7DsaeJPGN/ho4r29BByL/ybeM+18H0p1ig6HUwAYY9aKyJXAyyLiwn/H7Wz83dn/Ak4zxpSLv8zl0sBEyr8DfwlMbNyK/8L6q8DzzwMeF5Fk/HenLjbGlIjIGiBFRNbinx9T1iCMTp9vn/dUKSLv4u/unxfY/B5wuYhswN84vQZkichF+Bvn/c5ljFknIhcDz4pIElAJXBTYfmQ7fsyfAbMDdzU74jn8wyW+EREf8ANwuTGmSETewv978eCfH/TLDr6GUkqFTIS0NX8AHsZ/DTX45yjda4yp6uC1/kb8c5bWAWX4k4d6jwGvBuLZSmCosjHGIyJnAn8Xkfvx9wr9LjAk0AVsCLRT0xr87IpE5DTgYfEXV6gD7jDGzBd/Nbw2aaptNMYUisgZgZ9zOv7hfxcFfhf1x9WJyKnAY+IviGDwJ6fPg38ZCODtQCJYApxljPHtMxxuAfCAiPQxxlxrjKkWfzGEVGNMUVvfg1LBoCWylVJKKaVU2ImIA/gWuNQYs8zqeFR80eFwSimllFIqrAJD79YDczQBUlbQniCllFJKKaVUXGlzT5D4F7F6uYnt48S/6NkXIjJLRNzBDVEppZQVRGRm4Pq+SERO2mffRPEvUjlP/AtZdqboR0di0zZJKaXiUAvX/7PFv5DvZ4HHmKaOr9dqEiQiWSKyGH/1lKbMxL+w45H467xf2Yb4lVJKRTARmQIMMMYcjn8dkscClbTq/Ql/YY6J+EvkTg1TXNomKaVUHGrD9X80MMMYMynwWNLS+VqtDmeMKQTGBiqPXN3EUwbUL2iGv7zkuU0EPYPAomKuRPvY7P6tlfCPPKV1CVaH0CEeT/QVABRPi4vcRSy7x+oIOsZeHZ1DYqU6+n7gpXW79xhjugbrfBOOSTBFhb4OH79qRe0qoLrBpieNMU8Gvh6Dv5QwxpgdIpIP9AE2BfYXAtmBxCgL/yr0IRfsNikpScb2Hxh910mllAqGVStqI6ZdaqVNasv1fzQwKFBx8SvgN4GF6JsUjCt/w3daAew39CDwBp4E6HlQhvnFa0cF4WVDy1vrY/6/17Poja1Ul9aSOror/S47iuS+La1jFnk2bQva33XYuLe4rA6hQ1I3R2cykb6+xuoQOsS1epvVIbTbnLx/bA7m+YoKfbzxXnaHjx/aZ2e1MWZcM7vryxfX2/f6PhP/+ih5gUekTGxuV5s0/GCX6czPMFyMMbz8fCXPP1PBrjwfI0c5ue6mVMaMi87rpVIqMgztszNi2qVW2qS2mAu8AuQDzwNXAE809+RgVIdreI5E/GuURL2371nB5iWFXPTYOG76cBLpw3ux7ObXqNld1vrBSikV/WqBhl3ge6/vgTVZHgcGG2MG4V9osaPrXwVbTLZJ/5xZzqxXKrnvLxnM+zqH085K5JdXFrFqRa3VoSmllOXEvyDVU8aYncYYL/AicEhLxwQjCdrYYOLRCcCiIJzTUsU7q/jh011c8Lex5A5OIynDRa+zx5IzaSjb3/nO6vCUUioclgCTAUSkO5CDf7FH8C+SaPAviAj+u25hLYzQgphrk6qqDM/+q4KZT2YyeqyL1DQbp5+dxDW/SuHfT5RbHZ5SSkUCG7BMRDIC308AVrV0QIeGw4nIJUCuMeZB4FpgpojUAnuA2zpyzkiye0M5PQ5Mx53U+MeTMbIXeXNWWhSVUqo9jPGxx7OVirpikh2ZZLt6s8/K5aoFxpg5InKyiCwAnMB1wHQRyTXGPCgivwfmi0glUApcbFWssd4m5e30kp5uo2evxm3SYeNdvP5SpUVRKaXawxjD4m9qWbnCQ4+edo6ZnIDLpW1SZzW8/ovIzcBHgXZpHXBXS8e2OQkyxswD5gW+fq7B9kXAkR0JPFJ16ZNM3tpSamu8ON0/3dws/SGPxF6ZFkamlKrnGdqr2XlBHl8ViwrfBSDTlcuOqrWsk28Ym3kyLlt0FjmxgjHm2hb2PQM8E8Zw9n39uGmTcrrZKC7ykb/LS063n9qk75bU0k+LOigV8aqrDdfNKGL7tjqOOMrNp3Nr+Mt9ZTz9Yha9+uj/4fZq4fr/FvBWW88TjOFwMSerdxL9D+nCrNu+oySvirpaH3kfrSJvzgp6nDLK6vCUUq1YU/YVma5cxnc5mwPTJjC+y9mkObryY9lCq0NTqt2Sk22cd2ESv762mA3r6/D5DJ99XM3Mh8q47Kpkq8NTSrXi6SfKcbngnY+6cvs96fzn1S6c+7Mk7rytpPWDVchoEtSMs+4dSXpuIjPPmM894+aQ9+Eqhv/xTBJ7ZLR+sFLKMsYY8qrXMSB57N7hbyLCwJSx7Kxeb3F0SnXMDbekcuQEFxedU8CIgXnMfKiM+/+artXhlIoCH/y3mhm/TMHh+Gn428WXJ7NsaS3FxR1f5kB1jvbBNcOZYGfaLcOYevOBGK/h04JhVocUN2r6eKK2THY0Khnojtoy2c0xxmCTxvP0BRsGbWxUdLLbhWt+lcrV16VQVwdOp84lUCpaeH2mUQIEYLODzQY+b3QurxELtCeoFTabYHfqj0m1rqyvfigJN8/QXvttExFyEvqzqaLxsjWbKpfRzd0/XKEpFRIiogmQUlFm8gkJ/OffFRjzU8Lz5mtVDBrsIKtLpBTWjD/aE6SUijlDUsfzTeHblNblk+HsTpFnJ5XeEg7NOt3q0JRSSsWZGb9I4fILC7nwrAKOnuRmzQ91LPrGw79fzLI6tLimSZBSKuYk2lM5Kvs88qrXU15XRPfEA8hNOAC76CVPKaVUeKWm2XjpzS58OrealctqOWy8i3seSCc1TUcaWUk/ESgVY0q3ryH/+y+orSwlOacP3YYfgzs1du82NVcq2y5OeiYOtSAipZRS9VYs8/DMUxVs3uhl0BAHl/88mcFDnFaHFXZOpzBlWiJTpiVaHYoK0BRUqRiyZ+3XbP7f62T2G0Hv8Wdgd7hZ8+5MasoKrQ5NKaVUnPlifg1XX1bEmHEu7ro3jQMGObjkvEJWLPNYHZpS2hOkVKzweevYvugDBk2dQVJWDwBScvqCCLtWfEafI85q9thYrBCnlFLKWg89WMY9D6Qz+QT/ItUHj3KRnmHjkb+W89R/YneEgooO2hPURsfnrrY6BKVa5CkrxO5w7U2A6mX0G0H5rk3WBBUmTVWJU0opZR2Px7Dmh1omHedutP34ExNYulh7gpT1NAlSKkY4EpKpq6nA66lutL2mZDfOpDSLolJKKRWPnE5ISRW2bvE22r5pQx1dc/Tjp7Ke/hUqFURWrhXkSEgmvfdBbPnqrb2JUHXxLrYvnkPXA4+0LK5w0d4gpZSKHCLCzy5O5ve/LaGo0L9Q9a48L/fdXcrPLk62ODqldE5QzOvXazebtnW1OgwVJn2OPIstX7zOilf/iDMxlbqaSrqPPoGMPsNaPVbnBSmllAqmX1yfwp/+WMqUo/PJ6WYnf5eX6ZclM/3SJKtDU0qTIBWZavp4cG9xWR1G2NXVVFJZsB1nYiqJmbntPt7udNP/mOnUVpVTV1WGOy0bmyN+SpE2Vy5bKaVU+1VVGZZ/5yElxcaw4Q5E2jfaweEQfnt3OtfemMrOnV569rKTkqKDkFRk0CRIqQiRt+wT8pZ/RmJWd2rKCnCnZjPg2ItwJqa2+1zOxBSciSkhiFIppVQ8mD2rkvvvKaX/QAdFhT7cbuHv/8yk/4D2f3RMS7eRlq7Jj4osYU+CMu2V4X5JpSJe8aYV7PnxW4adeTOu5AyMz8f2xe+zad7LDJo6w+rwoor2BimlVOd8v7KWv95fxguvd2HQECfGGF55oZJfXFHIe590xWazbv6rUsGiablSEWDP2q/pMfoEXMkZAIjNRo8xU6ks2IanvChscZQMdLf+pCigRRKUUqrj3nitkgsvTWLQEP9wahHh/OlJuN3C4m+1vLWKDZoEKRVkHakQV1dTiTMpvdE2m92BIyGFupqqYIWmQqzWV8Oemq2U1OZjjLE6HKWU6pCSYh85ufZG20SEbrl2iov02hYtqqsNC7+oYckiD16v/t72pUmQUhEgtfsgCtcvabStcs826moqScjIsSiq6Bbu3qDNFcuZt/sF1pcvZlnxXL4qeJ3KutKwxqCUUsFw+BFu3p1d1ehmzq48L0sXeRh7SPwU24lmH75fxaTD8/nbX8q4+7clTJ24m5XLa60OK6JoYQSlIkC34RNY/d+ZbJz3Mpn9RlBTuoddK+fR+7BTsdnD+980lkplh2t+UEHNdjZWfMcRXc4hyZGGMYZNlcv4rvhDxnc5u90VlZRSykqnnJ7IG69WcvVlRZx5biJFBT6efrKCK69JIauLvfUTKEtt3lTH3b8t5an/ZDH8YH/SOufdKn5xRSEfL8jB5dY2CbQnqF2Oz11tdQgd0q/XbqtD6JCaPvEz7tiRkMLQU68nISOH3asXUlW8i4HHXUbWwDFWh6baYHvVD/RPHk2SIw3wDxvplzSSWlNDWV2BxdEppVT7uBOEp1/qwtGT3Lz1ehWLvvVw573pzPilVh2NBm+/UcXpZyfuTYAApp6cyICBDuZ9Fhs3OYNBe4KUCoGyvkLq5vaNv3W4k+g+cjKMDFFQcSocvUG1poYEe+MV0EUEty2JWp82OEqp6JOYKFx4STIXXpLc+pNVRCkp8dGr9/49djnd7JQU+yyIKDJpT5BSaj+xUiWuXqjnB3Vx9WJH1ZpG4+cr6oqpqCsi3alzupRSSoXPEUe5eXd2NXV1P7VJxcU+5n9ezeFHxN9C9M2xJAk6O21J609SSqkgCmUi1CtxGFXecpYWzyGveh2bKpbxbeHbDEo9HIdNJxErpZQKn2Mmu8nuauPSCwr57+wqXnmhggvO2MM5FyTRq48OAqunPwmlVJNiqUBCvVANjXPYnByadTrbqn5gR9VanLYERmacQKare9BfSymllGqJ3S48+lQm/51dxUfvV+FOEG69PY2Jx8bWKI/O0iRIRbSaPh7cW6Kz67Yj84JU6IUyEeqXfDD9kg8O+rmVUkqp9nA6hTPPSeLMc5KsDiVi6ZwgpWJEbVU5RRuXUbJtNcbntTqciBbuNYSUUirelJf5mDunmk8/qqaqSm8IqsijSVCciNYy2aptdq2cz6pZD1CwbhE7l37Eitfuo3JP53s7Yq1AQkOaCCmlVGjMebeKyUfk8+qLlfzn6QqOHZ/P/+bF1vBqFf10OFw7HZ+7mrl5Q60OQ6m9yndtZNfKzxl2xk24UjIBKNq4jPWfPMvwc25DbLqwXXPCtZiqUkrFi+3b6rjrtyU892oXhg7zF4ZZ8q2Ha64o5MP5OWRk6P13FRn0L1FFvGheNLWsb+hXZS5Yt5icYRP2JkAAmf1H4khIoSxvQ6fPH8u9QeBPhLRXSCmlguP9d6qZdkri3gQIYMwhLo44ys3cD6otjEypxjQJUirK+WprcLj3n/jocCfhq9XhB22lidD+RGSmiCwUkUUictI++94Xkc8Cjx9E5K9WxamUihxVVYb0Jnp70jNsVFbo3CDVeSIyXURebmH/mSLyTGvnsSwJ0rWCIo+vxkPJ+/PI++M/yLv/Cco++Qrj1Qn2kS6t11D2rP2mUTGEmtI9VOzeQkruwKC8Rqz3BtXTXqGfiMgUYIAx5nDgVOAxEdnbZhhjphljJgEnAz8C91kTqYpVXq/hlRcquOjcAs4/Yw9PPlZOZaWudh/pjpro5t3ZVVRU/PS7KiryF0mYMCk+2hIVGiKSJSKLgT+38JwM4Ia2nE/nBMWRfr12s2lb1yb3Ga+X/IefxZboJv3ME6DOS8l7n1O9dhNdr7kgzJHGllCXys4aMIrC9UtY895jdBl0CHVV5exe/SU9DzkJhzsxZK8by3SuEABjgLkAxpgdIpIP9AE27fO8e4F/GmMKwhueinW/vbmEbVvquOoXKSQkCi8+V8mV0wt57tUuOJ2hH2qsOmb0WCdHTHBz3qkFnHthEnV1hleer+Ss8xIZMFA/dqqOM8YUAmNFZCJwdTNPuw+4Hzi3tfPpX6MCoGrZakyNh663XInY/Dd7Ew4cyPbf/IWaTdtw99O745FKbHYOOP5yijYtp3TbGuzOBAYefznJ2b2D+jqxuHhqS+p7hCI5GSryJjGrdEwnzvBetogsarDhSWPMk4GvXUBhg30VQKPbuCIyABhjjGnTXTel2mr197Us/KKGD+fnkJDgT3gOPdzFhWcV8MmH1Uw9WW/wRCoR4ff3pzH/sxo+nlON3SHc82A6h42PzjX/VPt0rl1qsU1qlYhMAvKB79vyfE2COiAWK8TVrNtC0piD9iZAAOJ0kDhyKDU/brY8CYrmRVPDQWx2sgaMJmvAaKtDiTnRkAx1wh5jzLhm9tUCCQ2+TwT2zYIvAV4MRWAqvn232MPRx7j3JkAANptw3JQEli6u1SQowokIE49NYOKxCa0/WamftNQmtUhEEoGbgTOB3LYco4URFAD29FRqd+3Zb3vdrj3YM9IsiCi2hKNKXDjEy9ygptTPF4qjOUNLgMkAItIdyAG27vOcacAHYY5LxYGu3exs3Lj/nNRNG+vomqMfXZRS+zkY6AnMAV4BporIjS0doFcSBUDy+FFULVtN5eKVGGMwPh9ln39N7c7dJI060OrwlIoo8ZAMGWPmAFtEZAEwG7gOmC4itzZ4Wi/2T4yU6rQJx7jJ2+Hlpf9U4PUajDF8Oreajz+s4bSztBdIKeUnIpeIyK3GmK+NMaMCBXvOB+YYYx5u6VgdDhdnmiuOYE9LIef6iyl4+g0KX3oX6rzYs9LpdvMViDMy/kx0SFxkiLe5QS3ZNxGKtSFzxphrW9nfPVyxqPjicglPPZ/Fb35dzD/+Xo7LLbjdMPOJTLrm6ALQSsUzY8w8YF7g6+ea2L8ZuKy181j66fbstCWdnNSrgsl9QF+633sjdXl7wG7DmdPF6pBiSqirxIVTOBIhr7eWgsI11NZWkpkxgKSk7JC+XjC01DsUawmSUqHWf4CDV2dns21LHZ5a6D/AjkhsDC1W0aeuzvDl/2rYsd3LiJEuDhrhbP0gFdEi4xa/ihgigrN702W0lQqXsrIdLF/xH5KSuuJOSGfDxrnkdhvFAQOnRe2HIM/QXpBndRRKRZ9effSjirLWju1erryogNRUG4OHOHjysQpGjHTyl5kZWq49iumcoA46Pne11SGoKBQrBRIgdEUSjPGx6vtXGDjwREaPuoJhQ89m/GE3UVi0nt17VoXkNZVSSqnm3HFrMaeemcSrb2fzhz9lMGdeV8rLffzn6QqrQ1OdoElQHOrXa7fVIXRYTR+P1SGoBkKRCJWV7QARuuUcvHebw5FAn95HsWvXsqC/nlJKKdWcokIfy7+r5fKrkvduc7mEa65L5b23qyyMTHWWJkFKhVks9QaFgs94sduc+w17s9uc+Hx1FkWllFIqHnk8Brsd7PuMykxIFDxaIyiqaRKklOqUYPcGpaX2pMZTRknJ5r3bfD4v23Z8TXa2lmtXSikVPjndbPTs5eD9/1bv3WaM4cXnKph0fPyunRcLLJ9tqBXiVHtpqezIE8xqcTabg6GDz2D5iufp1m0kbnc6+fkrcLqS6Z6r1wqllFLhIyLcfV8aV19WxFcLahg81MG8T2ooLvbx3CtaRTeaaU9QJ0RzcYRonhcUC3RIXMuys4cybtwvcTqTqakppV+/SYwccTE2m+X3bZRSSsWZESNdvPNRNgcMdrB9q5czzknk1dnZpKXrx+hoFvWfKCoKa/jwodWs/GgnGBh2XC5TbhpKanaC1aEpFVeCvXZQYkIm/fsdG7TzKRUOVVWGv/25jLffqKSiwnD0MW5uui2NAQOjvrlVKq51ybZz+YwUq8NQQRTVKay3zsczV32DO9nBr9+fxE0fTiK1q5unL1tIncdrdXgqhGKhSlws9gaFqmy2UtHi178sYvcuL7Pezearpd0Yd5iLS84roGCPtklKKRVJojoJWjs/H2eCjWm/GUZKtpvkLDdTfn0gqTkJfP/xLqvDUyouaSKk4tWa1bX8sKqWB/+WQa/eDlJSbVx2VQrHHOtm1itaSlcppSJJRCRBZ6ct6dBx+RvK6Tsma79Sun3HZJG/oSwYocU0nRdkvVjsDQJNhFR82vBjHQePcu23gvyYQ1ys/1HLuyulVCSJiCSoo3IGpLBlaRHGmEbbNy8tpOuA8IzbjObiCJHKV+PBszUPb3nLKzHHwpA4pVTsGHCAg+Xfeaira9wmLfnWw4BBOicoWnk8hrVrasnfpUMalYolUZ0EDT46B09lHR/86QfKC2qoKPLw4cOrKd1VzUHH5VodnmonYwwl789j+68fYM/jL7H9//5MwdNvYDy1VocWUtobpJR1irxJQTvXkAOdDB3m5JYbitm+rY7ych/P/quczz+p4ezzEoP2Oip83n6jksnj87n+6iJOOW43115VSHGxz+qwlFJBENVJkN1h47J/HUZVWS0PTf2Mvxz/CaW7qrni6cNxuOxWh6faqeKr76j4Ygnd776WHvf9ml5/uRVveQVFr75vdWiqgzQRUtEgmGvVPfRYBl1z7Jw5bQ/jR+7i24Uennu1C9ldtU+G4GkAACAASURBVE2KNou+9vDQg2U89XwWH3yWw7xvu9Gtu51bbyi2OjSlVBC0KQkSkZkislBEFonISfvsmygiX4vIPBF5XkQ6dKXv6Lyg5Cw3Z/1xJHd8M4U7v53KOQ+MIrWrlsduq0iaF1T2yVdknjcNR9csAGzJiXS55AzKv1yKr5neoFgZEhervUGgiZAKvlC0ScFKhJKSbNx2ZxpfL89l+bpcHvtXFgMO0KFw0ejlFyq4+roUhg5zApCQINz6uzRWLKtl21ad46VUtGs1CRKRKcAAY8zhwKnAYyLS8Lg/AZcbYyYCAkwNSaStEJH9CiSEUlVpLQtf3sQHf/6enMWf4PPoBbGzvMWlOHKzG22zpaUgdhumqtqiqFQwaCKkgiWUbdKs0jFB7RUKZ5tUXW148/VKHvxjKa+9VElFhQ7Z6qzd+T769mucwLrcQo+edvbs1p+vUtGuLT1BY4C5AMaYHUA+0KfB/kIgO9AIZQEFwQ4y0uxaV8Yjp81j85IikrPcLHpjK0uve4naUi2B2hnuQX2pXLyq0baatRuxJSdiS01u9jjtDYoOmgipIAl5mxTMRCgc8nd5OX3qbua8W02XLjbmf1bNKcfvYdsWvTnXGaPHOvn4w8Y34LZtqWPr5joGDdHePaWiXVv+F7vwNyr1KoCGn2ZmAu8CeYHHsn1PICIzgBkAPXpG/7jo//5xJZOuHsSh5/UFYMLlA/jHrVvZ8tLXDLz6GGuD64B+vXazaVtXq8Mg/ZRj2fXgU5gaD4kjBuPZupOS2R+TeeGpiC2qp6+pgJKBbtLX11gdhopuQW2T0rs3XbBgVumYDg/TDreH/1TGcSckcPNv0/Zue+LRch74QymPPpVlYWTR7aLLkjn3tALsd5Vw4imJbN/m5dGHyrjm+hSSk7VNUirateV/cS3QcJJNIlADICLJwOPAYGPMIOBL4Df7nsAY86QxZpwxZlxmVnRfOKpKa9nxfQljz+y9d5uI0PPMMexZsM7CyKKfq2c3cn97Nd7iUgpfeIfqlT+Sfc3PSD5khNWhhU2s9waB9gipTgtqm5Sc6Wr2haKlR+jTj6q56PLGveXTL0ti3qc1eL2mmaNUa7K72nnlrS44nMJ9d5fy7uwqbr09jUuvDM8SHEqp0GpLT9AS4BfA30WkO5ADbA3sM4FHSeD7fCCzo8GcnbYk4hsdsQEGvHUGu/On7YekrmeVM7oTvEjgzM2my6Vntvu4mj4e3Fua/zATTcr6CqmbY/uDi/YIqU4IW5sE0dEj5HAKnprG1wyPB2w2COO0pJiU083Orbentf5EpVTUafVTuzFmDrBFRBYAs4HrgOkicqsxphL4PTBfROYDk4CHQhmw1RJSnPQ/tAsLnt2wd5u3zse8J38k55ihFkbWOZFUJU7Fh5KBbu0VUu1mRZsU7IIJwTbtlAQen1mOz+dPhIwx/PORcqaelIjNplmQUko1pU0z+4wx17aw7xngmaBF1IyqklqWvb+d4h1V9DwonQMn5+KwqOfllNuH89yMr1n3xW66D01j/cI9ZPZMovf5h1gSj/LT3qDopL1Cqr2sapMa9gpVVRk+eLeK9WvrGHCAgxNPSSApyZo26fqbU5lxSSGnT93DuMNcLFtSizGGfz2v84GUUqo5UTF+K29tKX8PVGNLTHOy8OXNPDX9S6rLml47JtQyuidy7VtHc/SVA8nun8zp9xzMxf88hKl911sSj1LRTnuEVLSYVTqGXXleTp+ymw/fqyY9w8bHH1Zz2gl72LHda0lMKak2XnyjC7+9O41+/R3ccEsqs97NJqtL9BciUkqpUIm4Go9NzQt6556VHHfdYMad5a+CevSVA3nz9uXM/9d6TrjRmiFodoeNocd0s+S1QyVSqsQpv3jqDQLtEVLR41e/d3DiKYnc8H+pe7c9+nAZf7mvlIce69QUpA4TEQ4/ws3hR+gNBaWUaouI7wmqKKwhf30Zo0/rtXebiDB+ej9++DTPwshUJIqVNYPqxUO1uIZ0npCKBj98uovMcw5vtO3iy5P5+KNqjImfGxdKKRXNIj4JErtgfGB8jRsWr8eHzRHx4SulOkATIRXJbE7BW2sajVrweAwOe3zdtFBKqWgWkVlEw3KkSekueo/M4MvnN+3d5q3zMe9f6xkxtbsF0bXs+NzVVofQKbFQJU57g2KDJkIqHErrElp/0j5GTOnBZ//8EWP8idDrJaN5fGY5J56SgGhNaqWUigoRNyeoKafdNYJnZ3zDmvn55A5OZd2Xe+jSJ4kjLx1gdWhKhUW8zQ+qV58I6VwhFUpz84a26wbWCTcM5blrvuEf5yyg75hMtnxXhIiTWS+1P6FSSilljahIgjJ7JvGr2UezZn4+xTuqOPjEHvQZnal33FSzYqlcttKiCSr02pMIJaY7mfHCEWxYuIf89eUMmdiNgeOz+cQmnE1kL6yqlFLKLyKHwwH7rdBtd9oYNjmXIy7qT98xWRGdAOmQOBUK8Tosrp4WTVChNjdvKHPz2lZx1GYTDjiiK0dc1J9BR3bduyhpJC+qqpRS6icRmwQp1VmxNjcINBECnSsUTiIyU0QWisgiETlpn31dRGSOiHwtIl+KSH+r4gy2tiZCzdFESCmlQkdEpovIy01sP0VEvgo8HmrtPJoEqSbFa2+Q8fmo2rqJyg0/4vPEXhIVK7RXKPREZAowwBhzOHAq8JiINGwz/g7cY4w5DLgZGGxBmCGjiZCKBMYYflhVyxfzaygt8VkdjlKWEpEsEVkM/LmZpzwInGCMGQ+MFpFRLZ0vKuYEKdVR7ZkbVJO3gx2vPofYbNgSEvEU7CZn2hmkHRxZH2bitUhCU7RwQkiNAeYCGGN2iEg+0AfYFNg/HDhLRO4LbPuVBTGGVHsLJuyrPhHad3i3Um2xc4eX668uorDAR/eedtZ8X8vVv0rh8hkpVoemlCWMMYXAWBGZCFzdcJ+IOIDrjTFlIpIEpAMVLZ0vopOgs9OWRO3dtONzV3f6TqIKH+P1sv3Ff5N93ImkHjwWEaEmbwfbnvsn7tweuHNyrQ6xEU2EGovXZKi0LqGT15n3skVkUYMNTxpjngx87QIKG+yrANwAgQZmJP6eoJtE5HbgDuD/OhFMRKr/+XY2GdJESLXXTdcWMek4Nz+/NgWbTdi5w8vF5xYwaIiTCRO1J1xFps61Sy22SS0yxtQBc0XkOODfwHJ+umnXJB0Op5oVK0Pi2jI3qHL9Whxp6aSNHLe36IY7twfpYw+n9LtvQx1ih+j8oP3pELl222OMGdfg0bCxqQUa1nxOBOqzzBrAA7wV+P4N/D1HUcfjadu9QB0ep8Jpw7o6tm/zMuOXKXuLbnTvYWfGL1N445VKi6NTKmRaapNaJCIuEck0xnwM9AN2AVe1dEzEJ0F690yFg7e6Ekda+n7bHWnpeKu0wYkm9fOFNCHqtCXAZAAR6Q7kAFsBjDFe4BtgfOC5RwErLIgxKDZt69qm52kipMKlpMRHdlc7dnvjm13dcm2UFOvcIKWa0BP4SETsxhgDFLd2QMQnQdEs2ktlQ/z0BiX2HUjlhrXUVZTv3WZ8PkqXLSZ5QOTO99beoJZpMtRxxpg5wBYRWQDMBq4DpovIrYGnXAn8TUQ+B04E/mBJoEGiiZCKJAce5GTHdi8/rqlttP3tN6sYf5Re05SqJyKXiMitxpiN+EclLAy0W92Ap1s6NqLnBAVLTWUdG74uQAQGHJaNK9FudUgqwjjTM8g8/Gi2PvUImUcegy0hkZJFC7E5XaQMO9jq8Fqk84NaF69zhjrLGHNtC/vWAIeGMZyQq0+EWrv509mCCa8UjKLnioVUV8Nh412kpev9SNVYQoLwf79L5YrphVw2I5kePe28/041GzfUcfe9+49aUCqeGGPmAfMCXz/XYPsDwANtPU9UJEGdKZDw/cd5vHXXcnocmI4xhjdvX8bZ949i8IScIEepIl1rleK6TJpCQq++lC5bhK/WQ9rBo0kdOQ6xR37SrIlQ2zTsFdKESDVn07aubUqEoP09/lu+K+LlGxeT1TuJvumV/O7mYm65PY2zz0/qcLwqNp15ThIDBjqY9Uol3y70cMjhLu79czopqZo0KxUMUZEEdVRpfjVv3bWcS588jJ4H+e+cbPmuiOd/+S03vncMSRltK53cGbFQJa5fr91tHioS6VpLhJIHDSV5UHT+vjQRah9NiFRL2pIIQft6hWprvLx0/WLO+MPBDDnafyNuzOYKHr5kPiNGORky1NmpmFXsGTXGxagxof+solQ8iunbCSs+2MGw43L3JkAAfUZlMuiorqz8cKeFkSkVGjpHqGMaFlPQOUSqXrDnCf24YDc5B6TsTYAAsvsmM/zMAfz3zaoOxaiUUqpjoiYJ6kiVuJqKuiZ7e5LSXdRU1AUjrLgRKwUSoG0ls1V806RI1du0rWubkqG2JELNtkkZTpYXdelQfEoppTomapKgjhg8IYcVH+xolPBUldayau7OsM4JioUqcarzfJ4aylYto3TZYrwNqtAFm/YGBd++SZEmRvGnrYlQS8nQwMOzWffVbkrzq/duq6v18d072xk8IUcrx6mw8ngMn31czexZlezY7rU6HKXCLqbnBPUcns7gCV154mdfcMg5fTAGvn5lMwdP60G3QalWhxd14mluULBVrFvDzlkvkNCjFzaXi/z33iT7hJPJGDe+9YM7QOcHhV5bEyGdbxTZxCO4t7ja1EPc2XlCaTkJTLzyAJ742RccdkE/3MkOlry1lazeSQwODJGbVTpG18dTIbf6+1quvqyQXr0d5OTauP+eUn52cTK/uill74LhSsW6mE6CRIRTbh/Ojwt2s2puHgic8ruDGDg+O+yxxEKBBNUxvppqds56gR7nX0ZSvwEAeAr3sPWpR0js0x93Tm5IXlcTocjQKFn63LIwVCvClQhNuHwgfUdn8t2726mt8XH0lQM58NhcbLafPnhqIqRCyeczXH9NETfdlsYppycCUFTo44Iz9zBqjJOJxyZYHKFS4RHTSRD4E6HBE3K0JHaQaG9Q+5Wv+Z7EXn33JkAArqxs0kYfStnyJbiPmxay19ZESKm2C1ci1Gd0Fn1GZ7V4rCZCKlSWf1eLyymcfNpPyU5mlo3LrkrhnbeqNAlScSNq5gTpWGkVCuEokmBqPdgS9m9UbO4EfLW1TRwRXOGYI+Sr8+CpKMH4fCF/LaVCqa03RoJZMKE52u6pUKiuMiSnyn7D3lJShZrq2Lhp5qkx7Mrz4vHExvtRoRE1SVAsiJUCCbFUKS4ckg4YSsWPP1BbWrx3m89TQ+myRaQMGRaWGEKVCPm8dWz9ajbLX/49P7z9MCtfu4+CdYtD8lpKhUt7eoiDUTChJZoIqWAbNdbF5o1evl/50004r9fw2kuVUd8LZIzh8UfKOPrQXZx90h6OOSyffz9RjjGaDKn9RfxwOG0AVKiFelicMz2DrKOPZ8sTfyNj3HjE6aJ06Tck9ulHYv8DQva6+wrF0LhtX7+Dp7yIg866FWdSGhW7t7D+k+dwJqSQ1mtIUF9LqXCqvyaEY3hca3RonAqmhAThrj+mceVFhZx5TiI5uXbee7uKlFThtLMSrQ6vU579VwWfzq3h9Xey6d3XwYb1ddz4iyKSk4XzpydbHZ6KMGFPgoq8SZrYxIBYmhsEoU+Eso48hqR+AylbsQRfRTldTzydpIGDw16FJ5iJkNdTTeH6xQw/5zYcCSkAJHftQ89x09i1ar4mQSomhGueUGvq201NhlQwTD05kcEHOnn7jUo2rKvjshnJHDclAYcjeivDGWN49qkKnnwui959/R9vBwx0cNe96dx+S7EmQWo/Ed8TFGu0Slz8SujZm4Seva0OY+/QuM4mQ7XV5dhdSXsToHqJWd3xLCvq1LmViiTtSYSg9SHDHU2EQHuFVPAMGOjgxlvSrA4jaHw+yN/lY9CQxh9thxzoYPtWXQdJ7U/nBKkOa62hr1q+hrx7H2fL1Xey865HqFi4LEyRdUw4iiREks7OE3IlZ+Cr81BVvKvR9tKtq0nKtj7ZUyqYQjFPqKN0NEXHLP/Ow5UXFTJ2WB4nTsrn+WcqdK5IDLHbhaHDHCyY13httv99VsOIkeFbF1BFD02CLBArBRJaUrVyLQVPzyJtygR6PfRbMs45keI3P6L8f4usDq1Fmgi1nc3uoPvo41k/92mKN6+kumQ3u1Z8zq6V88g9eFIQo1QqMmgiFL3WrK7l55cWMfWkBD75Mof7/5rBO29WMfOhcqtDU0H0q5tS+d3NJbz9RiWbN9XxxquV/OHOUq69MaX1g1Xc0SRIdUpzvUEl73xK5oWnkjRuOLakBBKHDyJ7xnmUvPOp3nmLMJ1JhHKGHeWfA7RyHj9++BQVe7Yx+MSrScwMzQKwSllNK8dFp2eerODKq5M5+/wkMjJsjBrj4tGnMnnhmQrKy7W0f6yYdFwCf34kg3ffruaKCwuZO6eamU9kcviR7tYPVnFH5wRZJNbnBtVu30XC0AGNtrkG9sZbXIrx1CLuyO2aDtciqpGkMwUTMvuPJLP/yCBHpFTk0spx0Wft6louvCSp0bZuuXa65tjZsd3L4CF6TzhWHH6kW5Me1Sb6v151WlMNvKNbNjXrtzbaVrt1J7aUJMTlDFdoHRZvw+LAnwiFY2FVpWJFexZWbQvtEQqdfgMcLFvaeHHqwgIv+flecnPtFkWllLKSJkEqJNKnTaTw+dlUr9mAMQbP5h3seeo10qZNDHtZaNU+mggp1XbtSYRCOU9IE6GWXXplMo8/Us4nH1Xj9Ro2bazjpmuLOfOcJNLS9aOQUvFI/+dbKJYKJOzbG5Q0bjiZZ0+l4Jm32HLl78j/+3OkHH0IqccdYVGE7RePvUH1NBFSqu0ipWCCJkLNO3iUiwf/lsHjj5QzclAeF5xRwOhxLv7vd6lWh6aUsojOCVJtUrhoEzve/o6agnLSDuxO73PGkZCb3uIxyeNHkXT4SKjzgsMelT1A8Tg/qF4wF1ZVKta1dS0haNs8IZ0j1LLl33l49l8VbNpQx+ChTi6bkcyQoS0PtT7qaDdHHe3G4zE4nURlm6SUCh7tCbJYNPQG5c1ZydqHPiJ7wiAGXTcZe5KLpde/TNXOkkbPa6pRFxHE6Yjqxibee4S0V0jFMnsQ/3u7t7iCOk+oo5XjYr1H6Iv5NVxzeRFjxrn4/f3pDBrs4NLzC1n+Xdt+mS6XRHWbpJQKDk2CVIt8tV42Pr2A4X88g9wTDiLtwO4MuGICuVOGs+21b60OT4WJJkIqlgW7xzMSCibEciL00INl3PNAOtMvTWbESBdXXJ3Czbel8shfdc0fpVTbaRIUASK5N6h6Vwk2t4OUAY0b6+wJgyhZtWO/57elFGw0iufeoHraK6RimZWJUKjmCc0qHRNzyVBtrWH197UcM7lxCeTjpiSwdLFep5VSbadJkGqRMy2R2tJq6ipqGm2v2lqIq0tyk8doIhTbNBFSscqqRAi0YEJbORyQlm5j6xZvo+0bN9TRNUc/0iil2k6vGBEiUnuDnGmJZI8fyI8zP6Gu0p8EVG4pYOMzX9Dj1FEWRxd+mgj5aa+QilWaCEU2EeGCi5L4/W9LKCzwJ0K78rzcd3cpF17S9I05pZRqiiZBLfBUeclbW0pFUXx/8B10/WQAvv7Zk3xz2dN89+tX6X3OOLLHD2z2mFjtDQJNhBrSREjFokhNhHx1Xio27eHdlb06FEesJEK/uD6FwUOdTJ24m5Mn7+aU43Zz1EQ30y9Nsjo0pVQU0RLZzfjiPxuY9+Q6krPclO2p5sBJuZx6x3CcCfG3srQ90cWBv5mGp7iS2qJKEntmYHPpn47yq0+EtJy2iiWpm01Qk/z6RKgtN1GaKqGd/9lq1v/zc+yJLmpLqlh7UA9+/ueBJGe2r4R/LJTQdjiE2+5K45c3pJCX56VHTzspKXpPVynVPnrVaMLKD3fy7etb+PlLR3L9OxO5+aNjqa328t6D34f0dSN1SFw9V0YSyf2z25wAaW9QfNFeIRVrUjebiKgcV/rDTtb/83OG33M6hz57OeNf/TmJPTN5/Po1HYohVnqE0tJtDB7i1ARIKdUheuVowsKXNzHlxqF06eMfX5yQ4uTUO4azcs4OairqrA0uymgiFF90rpCKRVYnQjveXUbvcw8hdUguADaXgwFXTaBycwFvL8rREtpKKdUBmgQ1oWx3DV36Np5gmZThwplop6q0NqSvHem9QaoxTYSapolQbBCRmSKyUEQWichJ++w7W0RWi8hngUdMf6q2MhEq2V5LYs/MRtttDjsJuel4CioAXUtIKRU/RGS6iLzcxPZTAm3WQhF5XERazHM0CWpCn9GZfP9xXqNtW5cXYbPbSMtJsCiq6BXLvUGgiVBztFcouonIFGCAMeZw4FTgsX0alNHADGPMpMAjuieatIFViZB7UB82fbS10baa3WVUbi4gZeBPw+Y0EVJKxTIRyRKRxcCfm9gnwMPASYF2KweY2tL5NAlqwsSrDmDhS5v5+NE1bF1exKI3tvDyjUs44cYh2Oyh/1CnvUGRz1taTvmCxZR/uRRfRZUmQi3QZChqjQHmAhhjdgD5QJ8G+0cD14rIFyLyFxGJi2opViRCqceOp/qHDSy5/0tKVm4n/7PVLL91Fr3POwRHSuMbc5oIxafyMh//nV3Fm69Xsjvf2/oBSkUhY0yhMWYscH4Tu93A3caYgsD3pUCL1cw0CWpCdt9krnp+PJWFHt69dxVr5+dz9v0jGTmtp9WhRa1Y6g0qm/cNO37zF6qWr6Hy2xVsv+VPVC5ZpYlQKzQZikjZgaFu9Y8ZDfa5gKoG31fgb2TqzQWuB44GegBXhDzaCBHuRMiemkzu7dcgbhff/30BeR+tov/lR9HngsOafL4mQvFl3qfVTD4yn/ffqWLB5zWcdOxuXvpPhdVhKdURLbVJLTLGVBtjXgAQkauB3sAHLR0TF3fuOqJLn2ROvXOEZa9/fO7qDi+KF6n69drdpsUAI1lt3h6KZ31I7l3X4uyWDUDNpm3k//nfuAf3o6ZP+9YEiUdaUjt4PB5HZ/9P7THGjGtmXy3QsJshEaiBvcMOnjLGlAe+fxE4A3iiM8FEk3CX0Lanp5J57ol7v89u5cbS3Lyh7R5VEAvls+NNWamPW24o5olnsxg1xv83tG1rHeefXsC4w1wMHuK0OEIVbzrZLrXUJrVKRHKAfwNLgWnGmBarmWlPUATTYXGRp+LrZSSPH7U3AQJw9+tFwkGDqFzsL6GuPUJto71CEW8JMBlARLrjH19dPzHFBiwTkYzA9xOAVWGP0GKhSOQ7UkK7OdojFPs++7iacYe69iZAAL16OzjjnETef6fawsiUCq/AnNXXgbuMMXcaY1r9MKZJkAqraB8WZ+rqEPf+H1LE7YJaLZ/eXjpELnIZY+YAW0RkATAbuA6YLiK3GmO8wM3ARyLyOZAN/MOyYC2kiZCykscDiUn7X0OTkoSaGu1tV7FPRC4RkVuBkcDBwF8bVC09uqVjNQmKcLHYGxTNiVDSqAOp+HIpvsqf7rDVFZdSteR7EkcO2btNe4PaR5OhyGSMudYYc5Qx5jBjzHvGmOeMMQ8G9r1ljDnUGHOMMeZKY0yN1fFaRRMhZZUJx7iZ/2kNO3f8VAyhosLH7FlVTDrO3cKRSkUvY8w8Y8wFga+fM8Y8aIxZaozJbFCxdJIxZn5L52nTnCARmQkcEnj+XcaY9xrs6wK8CGQCXuBCY8zGDr4vpSKae2AfksYexM67HiFlwjhMXR3l8xeRdtJEHF2zGj23PhHSOUJt1zAR0jlDqjmR2CYFe44Q+K8dbbmhUp8ItXSDqT4Ras+NNZ0jFPm65dq59sZUzj1lD2edn0RCovDW65WMP8rNIYdp26NUS1pNghquFSEiPYAvReQDY4wv8JS/A/cYY74UkSOAwUBcJ0FVJbUsfmsr21eVkNEjkXFn9aZLn+TWD2yGFkmILJkXnEzS2OFULlmF2O10veES3P2arxxY08ejiVAHaAEF1ZRIbpOsTITAnww1lQh5a2rZNfcHSpZtZV16IqdPT6H70LQ2nVMToch38RXJHDrexXvvVFFUaLjngXQOPdyFv36JUqo5bRkO19paEcOBswLjwmcAXwU5xqhStruaf5y3gB0/lDDk6K5gDE9c+CUbFxW0fnCcidZhcSJCwpD+ZF1wMpnnnthiAlRPh8d1nA6VU/uI6DbJyqFxsP/wOG9VLctueo2CL9eReUg/nBmJPHnVYpa9v73N59ShcZFv6DAnN/0mjdvuTOOw8W5NgJRqg7YkQc2uFSEiSfgnIn1hjDkGWAfcse8JRGRGfc3viqLY/jA476l1DJvcjXMfHM2oU3ox5dcHcvrdI3jvvlUY0/HGMRbnBsUbTYQ6pz4Z0oQo7gW1Taqrio71VDqaCO14bxnu7BSG33sGuSccRN/p4zn4/rOYfd8aamvavqimJkJKqVjTliSo2bUiAv96gLcC37+B/y5dI8aYJ40x44wx47ypaczNG7rfI1b8uGA3Y87o3Wjb0EndKM2vpnxP3M4bbla09gZ1lCZCwaEJUVwLapvkSOz4UOXmhGoIZ0cSoaJvN9FtyvBGPQMpB+Tgzknj7QUZzR3eJE2ElFKxpC1JULNrRQTKpH4DjA889yhgRUcCaSoxisYEyZ3ioKKw8Qfd2iov3lqDM8HeqXPHam+QJkKqMzQhijthaZM6K1ISIXuSi7qSqkbbjc9QW1KFI8nd7jZWEyGlVKxotTCCMWaOiJwcWCvCyU9rReQGSqVeCTwvIpVAIXBVsINs7iIdiUnB6NN68cmja+j5+KG4kx34fIZPHl3LoKO6kpCqKzc3J5oLJXSEFksIDa0uF/sioU1qq1AUSoD2FUuQsUew5ZV3yDq0P66sZIwxbJ+9FGd6Ikn9ugD+Nlarximl4k2bSmQbY65tYd8a4NCgRdQOLd3BsipBPt7SPwAAIABJREFUOuz8fuSvK+evUz6lz+gsdv1YRlq3BH72t7FBOX8sVoqLV5oIhda+Hz41KYodkdomNSWUiRC03rOcNOpAPJt38O3lz5B2UE9qdpVijGH4H05vNEROEyGlVLxpUxIUjazqPbLZhdPuGsGEKway84cSMnok0WNYWlArtcRqIhRvvUGgiVA4NfVBVBMjFQ6hSoSgbb1CGadNJvWYQ0kvXoEzLZG0g3oitv3j0URIKRVPYjYJak64kqOsXklk9UoK6jnjQbwmQqCLqlpBEyMVLlYnQvb0VMrTj2h1DqYmQkqpeBF3SVBzomneEcRubxDEZyIE2itUV1aKp2APri7ZOFLbtpBjKLT0QVUTJNUZVidC0PyCqg1pIqQUlBT7+HFNHbk9bPTqrR+XY5H+VlsRbcmRim7xmAgZr5f8d9+g7PtluLK74dmzi9SDRpJz0lmIvXMVFYOtrR9gNVlSzdFESKnIZoxh5kPlPP9MBQMHOdiyycuYcU4eeDiDlJS2FFVW0UJ/mx0U7aW8I128lc1uKN5KaBfMm0ttSRH9b7yDPlf9iv433kFtUREF8+ZaHVqHNSzbrSW81b5CmSS39SZKW3rbtXy2ikdvzaris4+ref/TrrzyVjaffZVDWrqNP9xRanVoKsg0CQqicK5zFA9JV7wnQvGSDJUs+oqcE0/HnuBf/9KekEDXaadTsnihxZEpFTqaCCkVmV57qZIbb0mla45/JII7QfjNHWl8/GE15WU+i6NTwaTD4cIg2EPq4iEBqhev84PqxcPwOG9lBY6MzEbbnOmZeCsrLIpof97qaspWLsVbVkpin/4kDhgU1IqPSgWbDo1TqmOKCn306Nl4KHZqmpCQIJSXG1JSLQqsgZpqw0cfVLN5Ux1Dhzk5ZrIbh0PbpPbSniALdaTnKJ4SoHrx3CMEsT88LqnfQMpWLG20rWzlUpL6DrQoosaqd2xj0yP3U7l+rX/+0gez2f7CU/jq6qwOTUW5UM8da0+PUGs3m7RHSMWLQw938d471Y22ff2Vh5RUIaeb9R+bt2+r4+TjdvP2m1V4vfDvf5Zz7ql7KCnWXqr20p6gCBSPiY5qWSz3CGUffxLbn3+K2uIiEvv0p2rLRkq+/ZKe06+yOjSMMeS99TJdp55K2sH+BY+7TJrC9hf/Rck3X5B5xESLI1TRLpSFEqDtPULQeq+Q9gipePDza1O48KwCykp9TJycwNrVtTzzZAX3PJCOrYn1tcLt3rtKOeu8RK6+zt8l9aubUrjzNyU8+nAZv/t9usXRRRfrU1ql2iDee4MgducJJfTsQ+8rr8NXVUnRgk/xVVXS+8rrSOjZ2+rQqC3cg6+qitQRP93VFrudzCOOoez75RZGpmJJJFUT1B4hFe969Xbw2jvZJCYJzzxRzvof6/jnM1kce3yC1aHhqTEsmF/DxVck790mIlxxdcr/s3ff4W1Wd//H30eyJVneduw4ibN3yCKEJBBKgABhJeyywuhgtAVKKS3dtH14SumAXx9GKaM8jBYeCmVDIGxCICUkJGTv6Tje25Is6fz+cOzY8ZBsjfu+pe/runJdseaJpejcH33P+d689Yanl3uK7kglSFhGsu8PapOIVSHHgEIKz77A6GH0QIPW0HEPkNYY/32gSCRmaJ3dRipCItkNLLLzw58Yd766ULTu+rNsU+07QypB/kYv2+5/j+UXPsiyRfex6fdv4q2oN2IowmKkItQqUatCZpOaNwC7O536tYcP4HQgQPXyD8g4appxAxNRFQz4KVm1hLXP/JbVT/yUbUv/TnN1adzHYYaOcW2iXRESQkTO4VSceJKT/33kcOMgrTWPPdTAgrOMr1RZTfwrQRq++vmLpA3JYcYDV2BzprD/xdV8eetzzPzbVdjTUuM+JGEtUhE6LBGrQmailGLg+Zex/+lHqN+4FkdeAY2b15OaX0DOsccbPTwRJbuXPYff08TYM64j1Z1F5daVbHnzISaeewuO9Byjhxc1fa0IhdKXipBUg4SIjp/9OptvXF7JyhU+Jk9LZcXy1v/Tjz6VZ/DIrCfulaBAsw9/g4fxP1yAqygbR246I795AunD8yj7oH8to4VIZlIVii3XoCGMvPmnZEyYjM2VRuHCixh8+TdRKbKaOBEEA37q9m1i9PyrSMstIsXpZuDkE8kbOZ3yjcvjPh6zdIyD6J9HSPYHCRG5wUPsvPp2AV+/3E16uuI7N2fwzIv5ZGXLNv++ivtvLOjzkz25GHVEh43sqUNp3FUZ7+EIi5JlcV1JEIodm9NJ9tGzyJ93Ku6RY+QcQQlEB/y484uxpXQOBxmDRhmyJA4kCAkheudwKs5c2Noh7uRTXdjtMif1R9xDkC01hbqNJegjdnXVrS/BPTS3h3sJ0ZUEoa6kKiRE3yh7Ck2V+wkGOp/3qbFsN67sQoNGJUFICCFiLe4hyO52YEuxs+3+9/DVNOFv8rHnnyuo31JK4SkT4z0cYXEShLonQUiI8NjsKWQOGs3OD/6Br6GaYMBPxebPqNy6koKJxu77kiAkhBCxE/8FhAqm/O4Cgp4WVlzxCMsveID6LaVM+9PXSXHLBm/RdxKEuidVISHCM+LEy3Ck57DhxT+x+omfUrXjS8YuuBZnZuJvNJYgJIRIVobs7E3NTmP8j85g3G0LQNNlf5AQ0Nr2seTlL9n/0mq8FfVkjh/EiGuOJ2dKcZfbSse4nkkHOSF6Z0tJZeiccymevaj1HFA282wwjuX5g9r0pWtcqHMIQWJ3jXvjlWYe+WsDO7f7GTMulRtuyuDUBdKaWAgrMvSTXiklAUj0aM8/V1D61jom3H4mxz33HQadNYUNv3mFuk3db1aWilDPpCokRGitc5J5AlCbWC+L66toV4Ss4tWXmrnn7npu+2kWn6weyE23ZnDnr2p55y2P0UMTQvSD+T7thQAC3hb2Pf8FR92xiKyJg0hxOxg4fyLDrzyOvc993uP9JAj1TsKQENZkpv1BEN0gZJVlcQ/9TwN33ZPN3BOdpKfbmHeKi9/+Ppu//k+D0UMTQvSDhCBhSr7KRlLSHbiKsjtdnjN1KE27Knq9rwSh0CQICSGOJEGoZ8GgZvs2PzNndf4dzZztYNuWFoNGJYSIhIQgYUqOvHT8jT685fWdLq/bUELa0NCblSUIhSZVISGsJR7L4iQIdc9mUwwfYefLVZ0Dz5pVLYwcLSdOFsKKJAQJU7K7Uhm8cBob7nyNxt2V6ECQik+3s/N/P2HoxTPxlNWx558r2P7QB1R8uh0dCHZ5DAlC4ZEwJHqilLpPKfWZUmqlUursHm5zgVLq8XiPLVlZNQiFy8xB6NrvZvCz22r4cpUPrTUrV/j41U9qufa7GVRXBXni0Ubu+k0dr77UjM9rrn1cQiQSpdRipdQzPVyXq5T6XCk1PtTjSAgSpjXimrnkzxnF2h//i4/OuJfdT33KhB+dgb/Jxxc3PIW3op7UHDe7n1zOul++RLAl0PUxJAiFTYKQ6EgptQAYpbWeAywCHlBK2Y64TQ5wixHjE+YSKgglQqOEiy51863rM/jx92uYNKKUX/6khptuzWDkqBTOObWcDetbKCyy8fyzTVx6fgV1tV2/nBNC9J9SKk8p9QXwxx6uvw1YAUwO5/HiXsP1+VI6fVjKQaroibIphl02m2GXzUYHgii7jaA/wIorHuGo35zb3ip76MUzWXv785S+tZ7B50zt8jjSPjt8bUFIWmoLYAawFEBrXaKUKgOGAbs63OZ3wF3A1+M+uiRmtrbZ4Qq3dbaZ22ZfdKmbiy51Ewho7PbW1+Driyr40c8yOe8iNwDfvC6dn/6wlkf+2sAPf5Jl5HCFSCha6yrgGKXUPOCGbq7/E/AnpdT74Tye4QtZuzs4lWAkjqTsrV9A1285iCPH3elcQcpuY/Ci6ZQuWddtCAIJQn0lYcgalE9F+hoNUEqt7PDzw1rrhw/93QFUdbiuEXC2P7dSJwNlwIZIBiD6x4xBKJrnEDJzEALaA1BFeYDdO/0sPD+t/TqlFFd+I50ff7+GH/7EqBEKYYwI56Xe5qSoMzwEdUeCkeiJLcVOwOdHa41Shw8Agl4/KtXe630lCPWdhKGEV6G1ntnDdS1Ax7NApgFeAKVUGnAbcAFQFNMRxpjdI3s3emNkELICe4oiEISAH+wdpiCvR5MqH5tC9FVvc1LUWWZP0K59BV3+iOSTMaYQNJR/sLn9Mn+jl33Pr2TgKaHXnEuY7h9pnpCUVgHzAZRSg4BCYO+h66YCQ4AlwLPAGUqpHxgxyGQWr5OoGtUxzsxNEtrk5tqYdnQqjz/S2H5ZS4vmofsbOHtRWi/3FEIYzZSVoHAd+UErB7iJT9kUE39+Nut+8RKlS9bhLMyk8rMdFJw4jgEnjgvrMaQi1H9SGUoeWuslSqlzlFLLgFTgJmCxUqpIa303MB1AKTUc+LXW+l4DhxuR7O1eakc7Q9/QhOKxLK4/olURMvuyOIDf/j6bb19Zxfvvehg3PpVPPvYycVIKV3873eihCZHwlFJXA23zUp9YOgQdSZbRJYfMsQOZ9eQ3qfpsBy11Hoovmkn68Pw+PYYEochIGEoOWusbw7jNbuAbcRhOTFk5CMVDfxolhBOEwmH2IDSkOIVXlxaw7EMvB0oCXHxZGlOmyWejELGitf4Q+PDQ35/o5vqTw3mchApB3ZFqUWKyO1MpmBeyBXyv2t4LEob6T8KQSCRWDULxqgYZ2THO7FJSFCfNd4W+oRDCNCyzJyhaZF+ROJIE48jJniEhRHeSaX+QEMJaki4EHUlCkQAJQtEiYagrHQjQuH0L9RvWGj0UEYbs7V6jh9AvZm2SABKEhDATrTWrPvex5PVmSvZ3Pcl8Mkn45XB9Fc/lczoQBKVQNvNtak1Gsk8oemSZXCtvaQn7//l3UtIzsKdnGD0cEaZkXRang0EAlK3370djtT8oURolCGFWB0oCfPdbVbT4YMQoO3f8tNboIRlKQlAIHQ+KoxWImvZVs+NvH1D1n12oVBuFJ09k1HUnkpop64mNJvuEoqvjgVKyBSIdDFLyzOMMmH8mWdOOAWDLr241eFQiXFYNQv3hb6infMnLNGxYC1qTPmEyBWcsIjU7t8f7SKMEIaznp7fWcPqZLm64KQOlFHW1QWZNOWj0sAyT9Mvh+iIaS+da6j2sue05sqcNZe7LNzL7qWtRdsW6X7yI1nLSPrOQ5XHR17ZULlmWyzXv2YnN6WoPQELEQ1+XxelAgH1PPERKZhajfvRrRt3+XzgLi9j3+IMEW+L/fzWcZXFCiL47UBJgy2Y/3/5ORvvJ5rOykzsGJPe/PkL9CUQH315PztRihl40E7srFUeum7HfPxV/vYe6dftjOFrRVxKEYicZwlDQ68Xudhs9DBEBq+4P6ovGrRuxOZwMOH0h9jQ3dpeL/JMXkJpfQMP6Nb3eV/YHCWEdjQ1B0tMVqamyBaONLIeLknD3EjXvqyZr0uBOlymlyJw4iKZ91WRPKY7ZGEXfyfK42Erk5XJpw0dS+sI/8FWW48iX949VWXFZXF/2BvkqykgbOrz9m+E2aUNH4KuIzRdBsizOOvoaNuX1MK+Ro1MIaljxqZfZx7V+piX7CiQJQTHS014i94h8atbsY8h5R7dfpgNBar/az+Czp8Z1jCJ80jQh9hItENldaQw4/Rz2/v0BcmbNlcYIwpSchYOoWLcGrXWnINS8aztZM2aHvn8Mzh0EiXP+IKuJtMLW8f4SiMzFblfccWcWt3ynhosuTWPk6BSWvukxeliGkuVwcdBx2dzAUyfRsKWUXU98gq+6ieaSGjbd/SauoiwyJw4yeqiiF7I8Ln467h+y8rK5nJnHMeSKbxFoqMezZ6fRwxH9ZMVlceHuDXKPGQ8Kyl79Fy01Vfjrail782X89XVkTIrdF3OyLM5cnq+bEfXfZSweU0TmxJNd/PPf+QQD8NkyHyeebK0qd7RJJSjO9lUXk/+j71Lx/BL2Xf0YNmcKA+dPZNwPTuuyHEH0na+qkYrl2wFN/nGjceZH99t3WR5nDCtXiVyDh+IaPBSAui9XGjwa0V9WXBYXDmWzUXzV9VS8+ya7H7oXgkEyJk2l+BvfxZYS3iFCf6tBydA2u642yDtve2hu0nxtnpNhI8x12BWPkNL2HGZ9jZLNyFEp/OjnWe0//+bndQaOxljm+t+YJFLycxhw/aXtP0uFITpK31rH9r9+QN6cUSgUOx9bxqjrTmTQmVOi/lyyPM44Rx5sWS0UCREv4e4Nsqe5GXjOhQw858I4jCp5LPvIyw9vrGbWcU6ysxX339vA5Ve5uenWTKOHZkiFRsKQMBsJQSYQi3MRJRtPWR3bH/qQo++/HHdxHgDN+6tZdeM/yT16GK6i7Kg/p1SFzEFCkYiXRK0GRYNUgzprbtb86OYaHnw0j2NmtX4m3Xp7gIsXVXLcXCczZxv3OWX0EjWzvVYiecmeIJOJ9DxEyari460UfG1sewACSBuSS8G8cZR/vDWmzy3B1VyO3E9k9X1FQgjrnT9o+cdeJk5KaQ9AAHn5di5b7OaNV5sNG5fRAaiNWcYhkpuEIBOTMBQ+7Q+iUu1dLrel2tH+QMyfX4KQ+UkwEtFgtSYJfT15aiT6W4WN1jxnpgPrlhZNqqPrUkSHQ+H3x388ZmxSYLbxiOQjIcgCpDoUWv7c0ZR/uBlvZUP7Zb7qRsre38yAuWPiMoYRxeUShiymp6qRhCTRG6sFISuIVrc4s5j7NSdffuFj6+aW9suamoI8988mTjsjvksqzRw2zDw2kfhkT5DFtE0UcrDdmbs4j+ILj2HVd55m4GmTADj4zgaGnH807mH5cR2LNE1IHOEGIdmHJMysLydPjVSszhsULrPsN8nMsvGL/8rmyq9Xcs65aWRl23j95WZmzXFwwrz4hSArhAxpmCCMIiHIoiQMdTXsstnkzR5Fxcdb0Bqm/O4CMkYXGjIWaZqQXKRqlHykSUL0JVqThIXnpTH96FRef6WZpibN7/6Uw4xjU+N2OgwrBKCOzPK6ieQhIcjipLNcZxmjCsgYZZ7gIWFICJFsYl0NCicImcXQ4SnccFP8W2JbLQC1kSAk4kn2BCUQ2TdkXhJQhUg8VtobFM8GCZFIxCYJom/ktRPxIiEoAUkYMidpnCCEENFhpSYJ8ZYIISIR/g3C/CQEJTAJQ+aUTEFIB4M0f7WFqmdeo+bld2gpqzR6SEJElZWqQfEUSbMQqQaJWFr9hY8//a6Oe/9Qx6YNLaHvIBJW3EOQ8imcexzSTSmOJAyZTzJUhXQwSMVfn6H6/17HnplOsMlD6W8foPE/a40emhBJySpL4sIl1aCuEin4xeLf8of/ruOHN9XgSmttTnHtlVX8/eGGEPcSiSqsxghKqfuAYw/d/g6t9evd3OYCYKHW+hvhPvmRQUg6LMWWdJQzHys0TvDu3Efj8tVor4+0qeNJmzEJZQv9/UnT51/hr6hm0B03oVJbP2oy5s7g4N2PkjZtAjanfBEi+idWc5Iwj3A6xYVDNtpbW3ev347tfl58ronKyiAzZzs4e2EaTlfojntfrfHx5qvNvPxWAVnZrXPY5Vels+j0ck4/00XxUOkVlmxCHskopRYAo7TWc4BFwANKKdsRt8kBbol0MG0VIqkUxZZUhszHrMG0/p1PKf/Lk9gy3TiGDab29Q8of+Af6GAw5H2bVm8k46RZ7QEIwDFsMKlDi/Bu3hnLYYsEFs85KVyyJK578ZjHpRp0WCJVgTrq+O96920Piy+qBAXTjnbw2ovNXHVpJU1Noeek95Z6WXRBWnsAAhhYZGf+6S4+el/+DyejcJbDzQCWAmitS4AyYNgRt/kdcFdPD6CUuk4ptVIptTLQ2Bj24CQUxZaEIXMx2xK5QH0jNf9+i6Kf30DOovlknnocRT+/gUBlDc2rNoS8v0q1o31d11trXwuk2GMxZJEcojon+VrCn5OENSVqOEgmz9fNoKVF8+uf1fLgY7n88CdZXHKFm8f+kUdBgY1nn24K+RipqeDxdL3c49Gkpsbn3E3CXMIJQQ6gucPPjUD7GeKUUifTOgn1eFSktX5Yaz1Taz3Tnp7e37FKIIoRCUPmYpYw5Nm4Hef4kaQU5LVfplJSyDhhJs1rQ5+jI332dOrf+ZRAw+GDzOa1mwlU1eIaNzImYxZJIapzkiO1/3OSFVlpX1C485JUg5LDg5+PJjfPxvQZh48BlVJcfJmbj94LXck5c2Ear/y7mb27/e2XbVzfwicfejl1gSsmYxbmFs4CyBag47sjDfACKKXSgNuAC4CiqI+uF7KfKPqitQZbRMeI4nJDw6lyOAg2Nne5PNDUhHI6u7lHZ66jxuCeNYWSn95D2tTxBOob8e3aT8FNi1FSCRL9Z8o5KXu7l9rRof9fJJtYnzg1XLI3yPpS0+wcrEkhGNTYbIcrN3W1mjR36ErOyFEpfP+2DC48u4KvneykxQcrlnv57d3Z5OZJs+RkFM6rvgqYD6CUGgQUAnsPXTcVGAIsAZ4FzlBK/SAG4wxJqkTRIVUhczGyKpR21Bj8ZZU0fbmx/TJ/RTUN760g/fijQ95fKUXuhQso+uV3cY4dTsbcGQz5449xjR0Rw1GLJGCJOUlEh1SDRJuCURmk5zn58cP57ZfV1AR5+MEGzr0wLazHuHRxOq+/V8Bxc52ccpqTtz4uZMFZ4d1XJJ6QlSCt9RKl1DlKqWVAKnATsFgpVaS1vhuYDqCUGg78Wmt9b0xHHIaOQcgM30BZkXSSMxcjusip1BQKbryS8vufoq4wH1t6Gp5NO8m58HScI4vDfpzUwnxSC/ND31CIMFhxThLmkMjVoGTY96SU4ut/PJqnv/c5F77qY0ixnRWfern4UjcLzgp/OVtBoZ2LLnXHcKTCKsLqB6i1vjGM2+wGTNeKVAJRZCQMmUu8l8g5xwxjyJ9ux7N+G0Gvj/xvXoQ9M7n2UCSz3lpRK6UWAj879OOnWutb4zUus85JVlkSl7lbUz9cNoInkouyViVFEBowPJ2bX57Hzs8rOap5Mz/5VRaDh8jy6mSjlFoMnK21vuyIy2cC9wMB4ABwhda6xw1jSbUIUpbM9Z8skTOPeC+RUykppE2bQPqsqRKAkkgYrajvBk7XWh8HHK2Umm7EOEXikyVxoiObXTF6zgA8J8+VAJRklFJ5SqkvgD/2cJP7gJu01nOBEuDbvT1eUoWgjiQQ9Z3sFzIXs3SREwmrx1bUSqkU4Pta63qllBvIprVLmxBdmGmeTYZqSTKR1zO5aK2rtNbHAJf2cJNRWuvPD/19Ca0rGXqUtCGoIwlEfSNhyFysEoaCzV4alq+m/v0V+MurjB6OaDWg7Xw5h/5c1+G6HltRa639WuulSqlTgY3AfmBXvAYtRE+kGpR8+huEvB7NW28088xTjWzZ3PWcdsIQvc1J4eh41txOp0/oTlh7gpKJ7CEKn7TUNhcjmieEq3nDNioe/CfOMcOxpadR88JbZJ42l5xz5xs9NEuz+yI+70uF1npmD9f11oraAaRrrd9RSo0AHgGuBR6MZDBCxEMiN0gQ4dmyuYXrrqpixMgUhg6z8+BfGjhpvpPf3JXdqf226LsI56Xe5qRwdCzutM9ZPZEQ1AsJRKFJ4wTzMVsYCvpaqHjoWQq+dwWuiaMBCNQ1cOA39+OaOBrXuBFxG4tvbynNazahHCm4j51KSm5W3J7bglYB3wX+0k0r6iHAc0qpOVrrgFKqxqhBmo1VmiNYjXzpJnrTl2CrteZHN9dw822ZXHBxa5e4pqYgV19SxasvNnPuhfHrHLdvj5+3l3jQQZi/wMWIkXJYHqGdSqkZWutVwOnAyt5uLMvhwiTL5XpnlgNucZhZDhg8G7aROriwPQAB2LMyyJx/HI2ffRm3cVQ/v4SyPz9GoLYe395SDvziXhr/szZuz281WuslwJ5Drahf4nAr6tu11juBF4DPDl0/EPi7caMV4rBwlsTJXpLEE+5rum2Ln4YGzfkXHT4/kNtt41s3pPPay55YDa+LZ59u5KKFFezZFWD//gCXnV/J4480xO35E4VS6mql1O2HfrwRuE8p9QGte1gf6e2+Ejn7SKpDPZOqkPmYoirkD6AcqV0uVqkp4A/EZQierbto/GwNg+68BXtGa4c73+lzOXjX30g7aiy2dDlZXnd6a0Wttf498Ps4DkcI0YtkaZPdm3AqQi0t4HS2nneoI5dL0eKLaHlx2Er2B7j3D/W88NoAioe1Hopf/70Mzj+zgnmnuBg1Wg7Pe6O1/hD48NDfn+hw+UpgbriPI5WgCEh1qHvSOMF8jGye4Jo0Bt+23bSUlLVfFvS10PDR56QdPTEuY2j6/Csy581qD0AAjqGDcI4fSfNXm+MyBiGEELEXKgiOn5iCxwPLPz68XSQQ0PzzySZOOT38k65G4p23PJx2hqs9AAEMLLKz8Lw03n4zftWoZCdRMwragpBUhjqTNdzmY0RlyOZ2kXvFIkrv+hvpxx+NPd1Nw6ercY4aStq0OHVyis+Xe0KIHjj3OOI6Ry4tncBpRZt6vY00SEhOdrviv/+Qza031nD6mS6GDrPz1hseMjIUl1wev/1AwnhSCYoiqQx1JVUhc2qrDMUrpGbMnUHRz7+DLc1FsMlD3pXnkv/ti1G2+HwEuY+dQv2H/yHQ0NR+mW9vKd7NO0mbMj4uYxBCREbmEhGuUNWg405w8tKSAQwptlNeHuT6GzN45Kk8nK74dIY7dYGLpUs87Nvjb7/sYGmAV19q5vQz4lONElIJignZN9SVVIXMK17VodSiAeScd2pMn6MnrnEjSJ89jQM/vxf37Kloj5emL9aTd80Fsh9ICJEwZF/QYaEqfQOL7Fz3vYw4juiwwUPs/OBHmVx4TgVnnpOGzQ5vvurh2u+mM2qMHJrHi/ymY0yWyh0mjRPMzRRNFGIo9+IzSJ8zvbVF9oBcBp1/urTIFiIdS0tUAAAgAElEQVTJyZK4xGbm1/fSK9M5YZ6Tt95sbZH9zIv50iI7zuS3HSdGh6Gg10fDR5/T/NUWbC4nGXNnxG8/xhGkKmRuiRyGHEOLcAwtMnoYQiQ9HQjQuHw1TV+sB6VInzUV9+ypUV0iG86+IJH4zByEioel8K3rjalGCdkTFHdG7BvSvhbK/vgYnnVbyZx3LK6jxlD1z9eoefnduI6jo0Q8wE408d43JIRIDlprKh56loYPPyd9znTcx06h7u1lVP79BaOHlhDMesBvJFkiKLojIcgg8QxCjSvWoBypFNxyNe5jJpM5bxZFP7ue+rc+JlBTH7dxHEmaJliHhCEhRLR4N+3At7eUgbdfS/qcaWQcfzQDf3Idno3b8e7c1+t9YzFnyAGyEMlJQpCB4lUV8mzcTvqcaZ1ODGbPzsQ5fiSerbti/vyhSBCyDqkOCdG72tFOo4dgep5NO3AfO7n1hMmH2JwO3DOOwrNxu4EjE4lMwq44koQgE4h1GLJluPFX1Xa5PFBViz3DHD3xpSpkPRKGhBD9YctwE+h2TqrBnpnezT36b2mpMXtfjSZL4ronQUh0JCHIRGIVhjK+NpP69z7Dt6cEaF2PXf/eZwS9PpzjR0b9+SIhQch6pDokhOiL9NnTaF6zieb1W9sva1q9Ac+WXbhnTjZkTHJwnDzktRZtpDucCUX7zNqOoYPIu/wcDv7xMVIG5BJsbEY5Uym85eq4nayyL6R7nHV1fN0k0AohumPPyqDge1dQ8chzrefpCmqCXh8FN1+FLU1OFBktcs6gnpm5Y5yIn7iHILsPMndrAOqHx+fMvFYU7Zba6XOmkzbjKHw792FzOUkdNqjTHiGzkSBkfRKIhBA9cU0czZA//hjfzn2gFI6Rxab8Uk4kLglCwtBKUFsYgvgFIq01TTu20rB+DShF5qSppI0aa9pAEM2qkM2Ristky996I0EocUggEsmgv00RGsv3ULVtFQG/l+yhE8kZNjkpAoGy23GOGR7z5wn3fEGJeFAs1SAhemaaT9nM3br9TyyVL3mZstdewJFfgCNvAAdfe4HyJa/E9DkjZcS5hcxCDpgTT8c9RBJyRbI7uO4jtr/zOClpGbjzh1C65l12vP8UOhiM2XNafRWGzAsiWiQgJjfThKCOYhWIPAf20bB+DcOuv4XcuSeRO/ckhl13C/XrVuMtLYnqc8VCMgchmfQSlwQikaxamus5sPotJiy8mUHTT6Vw0gmMP+cmfPVV1O7dYPTwkpIcFCcfec2TlylDUEfRDERNWzeROXk6dlda+2X2tDQyJ0+nccvGiB8/HqQqZH6BZh+VK3ZQvXoPOhC7b3MTkVSJRDKpL9lG5qAxODJy2y+z2VPIHzuT2r3WmJOE+S1yfsG25eVsWVZGiyfQ78dJ5HbjEoSSk6W6w0W6h0g5HAQqK7pcHmxuRuXlRzS2eIt2BzkRHQff3ci2+98jY0whgSYfvqoGJv1qEVkTBxk9NEvqLghZJQyL5NKf/UC2VAd+b3OXy/3eJmwpyfllV6yEuy8o0Xz2iZfbbq7BPcSLLUXx/E/XcP5vpzLx5IF9epy2ANQxCCXa7zMR94SJ3pm+EtST/lSHMo+aTsOmdXhK9rZf5tm/l4bN68g8anq0hxhzyVgVMvMBcNOeSrb/9X2m33MJ0/54MTMeuIIxN81n/R0vE/T5jR5ewjiyWiQVI2FVWYPH4akto3bP4aVv3vpKKjZ/Rv6YYwwcWfRZ6Uu7RKkK1NYEueW71fzpvhzefsXFt//3OK568Fj+/Ys11JV5In78paUT2v8kikR57UV4LFUJ6k5fqkMpmVkUnXcJ+574G67BQwGNp2QfReddRkpGZoxHGjvJVhUya9e4g+9spGjBZNJHDmi/bMDxY9j/4moqV+yk4GtjDRxdYpOKkTBSf7vC2VJSGT3/Gna89wTOtQOwO100lO5k8DFn4h5QHN1BHmL1pggifEuXeJhzvJM5xx9+fxZPyeGo04pY+0YJJ1wzKnrPlUCVNqkIJQ/Lh6COwjn/UMbEKYwaNY7G7ZtRSuEeNQ6bs38TmJkkWxAyI3+jF1dRdpfLHTluAk1eA0aU3HoLyhKQhFlkDBzB5K//nPqSrQT9PkZ87VJSXOlGD8v0+vNlWF8O1BPhQLi+PkjegMMLftraZafnOfE0tIT9OOFWetpulwhhKBFefxGaZZfD9SZUMwWb00nmpKlkTJySEAGoTTItjzPjQWzuzBGUvbuRoP/wxlNfdRNVK3eRc/QwA0cmjtTdkjpZWieMYrOnkD10Irkjp0kAElEz90QnS9/0UF93uEGPrznAV2+WMG5u7ObQRFoeJxJbQlWCuhNOdSjRJEtVyGzL4vJnjaR0yTq+/MH/MeisKQSaW9j/4iqGXDADV2GW0cMTYQr3PWXGIC7ir79L4YQxEmnZVijjxqdy1sI0Lj2vkiuucWO3K/719DsMP6aIodNzQz9ABBKhKiTVoMSX8CGoTbKFoWQJQmai7DaO+tVCyj/cTOWnO7A5Uxh36+nkShUoIfUngO+OwTiECFeyzH/RkggHwT/5VSYfve/gjVc96CB89/uZVM+ZilLxeS9YPXQmwntA9CxpQlCbZApDyRCEzFYNUnYbhadMpPCUiUYPRQgRY1IFEmanlGLeKS7mneLqcOnquHZBkyAkzCoh9wSFI1onYDW7ZNkjJIQQIrn0d0lqX/asJGrL5Hgf1Ft9n1Civg+SXdKGoDZWCUO+ijIatmygpaa6z/dNpoYJQggRD1arAkV79UNLTTUNWzbgqyiL6uOaUbIfAEerimP1cwol+/sgESXdcriemHWZXNDr5cAL/8CzbzfOgYPxlOwlc9JUCs+5EGW39+mxEnV5nNmWxAkhRKLSgQBlr71A/Ya1uAYPxXuwBFfxMAZduNhS3VatvkQrGtpaZseblX/3sjQusSR9JehIZqsMlb/1Cjank1G3/pLiq69n1K2/xFdVSfXyD/r1eFIREkKIyCRzFah6+Yf4qio6zUk2Zxrlb70ctefoi3h1aZQqQHRJRUiYgYSgHpghDOlAgLq1qyhYsAiV0lq0szmdFJx+NrWr/tPvx03EICTtioUQ8WC1ABRttatWUHD6Oe1VH5WSQsGChdStXY0OBELc21z6eiCeiAe/4VY1YlG5kSAkjCYhKAQjg5AO+CEYwJ7m7nS5PSOLoKc5osdOxCAkhBAitoKeZuwZnc97Zk9zQzCA9vsNGZN8CRYZI5d3SRASRpIQFAajqkI2hxPnoGLq16/pdHndlytxjx4X8eNLEBJC9EYpdZ9S6jOl1Eql1NlHXLfw0HWfKaX+qpRK+PnEilWgaO9zdY8eR92Xn3d+jg1rcQ4qttSeoDZSDTKeBCERrhBz0nyl1CeHrn9YKRWy70HCT1rRZEQYKliwiPI3XqR86WvUb1jLwVefp+Y/y8g/eUFUHl+CkBCiO0qpBcAorfUcYBHwQFvQUa1nWrwXOPvQ9YXAGYYNNg6sGIBiIf+UM6j5zyccfPV56jespWLp65S//m8KFiw0dFyRVIMkCIVXDYplMwMJQiKUEHOSDXgYOPfQ9VnAFaEeU0JQP8QyCAWaGil78yV23HMnO//ff9O4ZQPF13wHAkHqvlyJPT2D4TfciiM/euV/CULJoWlvFQff3UjtV/vQ2jzNP4RpzQCWAmitS4AyYNih65zAr7XWlYd+rgP61q5SxFw0qkDBlhYq3lvCzr/cxY4//xfVyz9kyFXXY0/PoG7NSnQgwNBrbyZt2MheH8fs84wEofjzltdT9v4mqr7YhQ4EJQiJUHqbkwYAdVrrikM/fwQcH+oBpUV2P8Wipbb2+9n3xEM4BxczZPG3IRig8qN3KXvzZYqvuYHWL19jo22CsnILbWmV3T0dCLL5nrep+s9OcqYW07irErszhcl3no8jL93o4YkI2D2a7O3eSB5igFJqZYefH9ZaP3zo7w6gqsN1jbSGH7TWHuBpAKXUDcBQ4M1IBmJmyVoF0lpz4P+eAJuNQRdfic3hoGbFMkqfe5Jh1/8Am8NcwUbmgMiE0zL7tKJNEYcVrTW7Hv+EklfXkDN9KN6yerbWv8uUO89jKdI+2+oinJf6NScB5UCWUmoSsBVYCBwM9WRSCYpQNKtCDZvWYXO6GLjo6zgLi3AWDWHQRYsJNNbTvGt71J6nN2b/tk703f6XVuMprWP2099m0i8XMvPRq8mZMZwt/2+p0UMTxqvQWs/s8OfhDte1AK4OP6cB7TObUqpQKfUqMBg4S2ttzK54ETPekr34yksZfMnVuAYX4xhQSOHZF5Cak0f9utVGD69bsiwuMvE4iK/8dDsVy7Yy63+/wVF3LGLGA1cw9OvHsuHO19FaS0UoufVrTtKty1uuBO4DlgCrkBAUH9HaK+Qp2Yd79LhOFR9ls+EeNRbvgf0RP364JAgZ77SiTVH7NuzgOxsYcdVx2J2pACilGL54DjVf7qWl3hOV5xAJaRUwH0ApNYjWfT97D/1sA/4F3KG1/pXW2rol5BCsWgWKxioFz4H9pI0c2+XE3O4x4+M6J/WVBKHYinRuOrh0A0MvOZbU7MOdbwedPYVAs4/GHa2VPAlCohs9zkmHzKf1C7n5wEAOLZ3rjYSgKIo0CKXm5Xc7sXgP7Cc1Ny+ix+4rCULG6TjBtIWhSCadgKeFlExXp8tsjhRUio2gT768F93TWi8B9iillgEvATcBi5VStwPTgKnAn5VS7x/6c6KBw40JqwagaEnNycN7YH+XPYTeA/tIzc03aFThkSDUf7FukhDwtJCS0fn/llKKlEwXgeaW9sskCImOQsxJALuBFUqpj4Bq4N1QjykhKMoiCUJZU47Gs28P1Z99RNDvJ+jzUvn+W/gbG0gfNymKowyPBCFz6W8Yyjt2JAdeX9vpsoplW3EVZMqeINErrfWNWusTtNaztdava62f0FrfrbVerbXO1Vqf3OHPR0aPN5qsHICitVfVPWos6CAV77xO0OtB+/3UrPyUxm2byZo+s8+PF+85RYKQOeXPGsmBN9ahg4ePl+q3HMRbVk/muIGdbitBSHTU05x06LontdbTtdYnaq1/pMPoACUhKAb6G4RsThfF19xA46b1bL/rF2y/+w48Jfsovvr6LssR4kWCUHyFE3L6GoaGXT6b6lW7Wf/rlznwxlq2PfAeW//nHcbcND+mzTaEENambDaGXHkdLZUVbP/Dr9l21y+oX7uK4quvx+62xhcoEoT6J5bVoKKzpuBv9LLmtucoeW0NOx77mK9+9gJjb56PzdG1X5eVg5Awt7h3h7N7kqM1b3+7xzkGFFJ8zXcIej2gFDaH8d9GOvc4LN01LlF1nIB6myQcOW5mPLCYg0s3ULu+BFdhFjMeXIyrMKvH+wiRzKQKdFhKZhaDL72GoM+LDmrsLlfoO/XCiPkkkq5xS0v71q0skTqEhdMtrj/szlSm/eEiyj/cQs2avaRkuZj250tIH97zEsu+vg5mkUjvh0RkSIvsttZ5Vp5owpW5W/drUrI5I5took2CUOxF8gHfdt+ewlCK28GQc6f3+/GFSBbJMC/1hxm+kIuEBKHY6G/LbJsjhYGnTWLgaeEv9bdyEIL4dN4TfWPocrjs7d5Iz3FhCbE8uaoQHUWzq5wQycbqASjaVaBYMWqZtSyN6zuzHbhbeWlcorwnEokp9gQlQxiSICTiScKQEH1j9QBkNUYGof6GIQlC3Yv3XCNBSESLKUJQm0QPQ1YPQtIkwXokDPVOa031qt1sve9dtj34PrUbSowekhD9YpUqUEdGzikShKwtkYPQV2t83PXbOn77i1o+/sDTpUW9iB5ThaA2bWEoEQNRtE6sKkRfRON8Q4lGa822+99j6/+8i6swi9QcNxvvfI1dT31q9NBEnEkVyDgShKzBbNUgSMwg9PeHG7jx2mqyshTFw+z84c56fnZbrQShGAkrBCml7lNKfaaUWqmUOvuI6xYeuu4zpdRfD51JPGoSOQwJYQQJQ63qNx6gasUOZjy4mKGXHMvwy2cz44HFlLy0muaSGqOHJ3oRzTkp4LJeBeVIVqwCdWR0EOpPGJIg1JUEob458j1ReiDA3+5r4LlXBvC9WzL55nUZPPfqANas9vHZJ9KYKhZCBhal1AJglNZ6DrAIeKBtUlGtJxm5Fzj70PWFwBmxGKgEIXOQJXGJI9nDUOVnOyicP4kU9+H3tCPXzYC5Y6hcscPAkYnemGVOEtHl3OMwPAz11dLSCX06CE+EIBSKBKG+6fie+OQjL1872cnAosPnhUxLU5x7gZsP3k28Y2AzCKdqMwNYCqC1LgHKgGGHrnMCv9ZaVx76uQ6I2Vk9E7EqZMUg1Ma3r5SKx/7Fgd/cT/lDz+Lduc/oIYl+SNalcjZXKoHGrp8n/iYfdleqASMSYTLNnGQGVq8CHSmSMOSvqKbq6Vc48Jv7KfufJ2lev7VP949HVcjqQchs3eLaJEIQcqUp6uu6HhM2NARxpcV7VMkhnBDkAJo7/NxI60SD1tqjtX4aQCl1AzAUePPIB1BKXXdo2cJKX0tjxINOtDBkxSDk3bGXg3c/QmpRAXmLF+EcPZSyex6necM2o4cmIpBMYajwpPEcfG8jjbsr2y+r23SA6i92M2DuGANHJkKI6pzkb458ThLR1xaGwg1E/vIqSu98EOVIJe+KhbiPnkTlY8/T8PHKPj+3BKHemXFZHFg/CM07xcma1T4+X3H4+Hbvbj//fq6Zc86VFBQL4ZwstQXoeObONKD9FVJKFQKPAauBs7TW/iMfQGv9MPAwQFZmcdSO+BPppKv9PamqUWr+vZScixaQOW8WAM7Rw0jJy6bmX0tIu+NGg0dnPWYLHh3HY+WJpTdpg3MY852TWX3zM+RMLUYHgtRtPMCE288gNUsmHBOL6pyUXjDUet9CHWKlOSMSHYNQTyftrn3zI9JPmEnuRQta7zNmOI5hgym753HSjzsaldK3gmBbEOrLCVb7cjLPZDqhajxZ9YSqAEuCM7nn/k+5+foaJk1Owe1WfLbcx20/zWLseFmdEAvhVIJWAfMBlFKDaF1jvffQzzbgX8AdWutfaa0N2bmVKFUhq1SEnHsceLftxj1zcqfL046ehG93Cdrf5ZhDWFgiV4cGnjaJ2U99i8JTJlB0xmRm/+Na8ueMNnpYonemn5NE7HSsEHUKR1t34Z55VKfbOoYPRjlS8ZdX9fv5+loVSpaKkFmrQWDtL+5Kph3Hu8sLuOQKN6eflcbbHxVyyRVuo4eVsEKGIK31EmCPUmoZ8BJwE7BYKXU7MA2YCvxZKfX+oT8nxnTEPUiUJXJWCUL2nEz8pRWdLvOXV2FLTwN7Qi/BT1qJuncoNSuNwpMnUHDiuE5NEoQ5WWVOirVkqQKF0haGUp3ZsL6203XBZg+BhiZsmZEdRPZ1r5AEIeNZOQi94Z/J6WemsfC8NHLzTHkmm4QRznI4tNa9rW/KjdJYoiJ7u9fyy+OssDQu85Q5VP3jVQpuvpKUnCwCDY1UPfkSmSfPprVBk0hkybBcTpiXleYkER85s06gfMlLDBk0BMeAQoJeLwc+eAX39AnYM9Kj8hy79hWEvTyu7XMxnC+NEnVp3GlFmwydH6y8NC5R3xNmE1YIsppE2Ctk9iCUeerxBOobOfCze7DnZROoqiV97gyyz51v2Jj6snZbRE+8A5G3vJ5dT35K9cqd2NMcDDz9KIZePBNll2/MRHIw89xglIwJR+GvrWbvo/dhT8/AX19H+tgJDFx0CbY9jh73EvVVX4IQhH8gbtWD3ouyVvVazUqGINRc18L7D21lw7ulKKWYsmAQ864fg9Md2SG2Vd8TVpKQIaiN1atCZg5Crn0uci9cQPZZ8/BXVGPPy8aeLutWk12sA1FLXTOrb3mWwlMmMO3Pl9BS28zOx5fRtLuSCbefGfXnE0JYR87sE8iaMYuWygrsGZmkZGS2XxdOc4Vw9bVpQrIHIaPFMggFWoI8/u0VDJqQxVUPHkswoPno0e089d3P+ebf52CzRXYMZ9X3hFUk/FenVt8rZPY9QrY0F46hgyQARcCq5fpQYrGHqPTNdeRMKWbUt75G2uAcsiYOYvJvz6Pq850076+O2vMIYVZm/WLMLGypDpxFgzsFoCP1tf12T2KxT8jMYaK/zDDHxaoatemDg6Q4bZz3mykUjs6kaFwWF/1+Op66FnasqAz9AGFIxPeEWSR8CGojQUgI43QMRJFMiA3by8g9Zniny+yuVLInD6FhuyyHFEL0TaRhqC9NExI5CJm5W1ybWAShko11jDm+oNNeaJtNMfq4AZRuqu3lnn3zfN0MS74vzC5pQhBIEBLCLPobiFxF2dRvPdjpMh0I0rCtHNeg7GgOUQjTkSpQ7EQjDIVDgpCxoh2E8ordHNjYNeyUbKgltzj6K2Ss+L4ws6QKQWDt5XEShEQi6kuVaNDZUyl7byMH392IDgRpqfew7f73cA7MInPswDiNWAiRqCIJQxKErCGaQWjyGYMo2VDL8qd20uIN4GsO8MHftlJX5mX8SbGZk+R9ET1JF4LaSBCKXKTrqYXoTm+hyDUwi8m/PY/9L61m2bn3s+Lyh/E3+zjq14sMGq0Q8SFVoPjqbxhK9iBkhWoQRC8IOd0pfOPR2Wz+qIzfnbCUu05cyt61NVzzyCxSUmN3iG2194VZJXR3uFCs3j1OWJ9ZJgQz6/I7KoKlky7H3+BBpdqxO1ONGZgQIuG1BaG+dJQLt3tconaNM3vb7DbR6ho3YEQG33hkNt5GPygibo0dLqu9L8woaStBbaxYETJTNUgII5xWtIkzx+zijOHbo96BTgizkSqQ8fpTGQqnKpSoFSGriGYYc6anxC0AtZH3RWSSPgSBNYOQiIy3vJ6SV76k5LW1+KobjR6OiIIjl9FJOBJCRFusglBTjY+VL+zhs2d2UbWvqdvbWemA1yrL4iA+J/mOJSu9L8xGQtAhEoT6x4r7gkpe+ZKV1z1J3aZSar/ax+ffeJyD72zo8faNuyvZ/+IqDi7dgL8pOmcdB3NNAomsp3Akv39hBVIFMp++VoVCBaGK5dv4w5kfs215BQc21vHQpcv48JFt3d72+boZlOwP8MxTjfzrmSaqq4J9Gns8SRCKH2mh3T9JvSfoSFbaI5S5W8vk2A9N+6rZ9cRyZjy4mLRDLZUbd1fy5S3PknP0MJz5Ge231Vqz/a8fUPb+JgYcPwZfVSPb//YBR/36XLInDzHqnyCiKNQkbPWJUYhE05fl4LGeI517HGHvFdq1r6DbPUL+Ri+b//gWU+66kKwJRZxWtIlTbx7HXy/5hDHHDWDI5JxOt//kyR388eFyTjnNhdej+dPv6vjt3dksOCstKv+maLPK/iCI3h4hI8k+ob6REHQEKwUh0XcVH22h8JQJ7QEIIH14PvlzRlGxbBtDzp3efnnlp9upXrWbWY9/k5SM1vdE5YodbPzv15j99LUouxRSE11fJkSzTOQicSTjF13R3PPa02NF8/fal8YJ3QWhqv/sJHvyYLImFAGHD8RnXjiUtW+WdApBpZvr+PjvO/jO8yfzrXEbAdi4voVrLq1k9vFOcnJkToqUBKHkIv9jumGVpXFmaZBgpSVxOhhEpdi7XG5LtaMDnZcVlH+wmeLzZ7QHIID82aNIzXFTu74k5mMV1tLbsjtZfidEV5m7dZc/RjxvNIQ7Dx65NE4Hus5JS0snYHfYCAY6j+2rtw4w47xisovS2qsrE49KZfbxTt5f6olg9LFlpWVxkBhfaMnSuPBICOqBVYKQ6Jv848dQ9t5GfDWHN556y+spX7aV/ONGd7qt9gdRqT0EJn8gonGY7UNfCCFiyajAE45ojak/QSjv2BHUrN5D076q9sv8jV6W/esgk+YXdbpf0B/E3uHcM20Hug4n+P2RjDz2JAjFnwSh0CQE9cIKQchME4kVZIwqYNBZU/ni+qfY8ejHbP/bh3zx3acZdvnsTkvkAPLnjqHklTUEfYdnl7qNB2guqZE9QUII0QuzBp5QIh1vuE0T2oJQarab0TecxJfff5ZtD77Pzsc/4YvrnyRv1ki2Fh/f6T6T5hex+uV9NNe1tF/28IYJfPy+l3mnyDL+aJMglPhkT1AIskco8Yy4+ngGzB1D+bKt2Bx2pv3p66QPz+9yu8KTxlO5fBtffOdpCk4aj6+ykfKPNjP+R2dgc8h/HZH4lFL3AcfSOlfcobV+/Yjrc4G3gcVa680GDFGYhJWCTjja/j393T8UTtOEtj1CRWdMJnvKEMre30zQ52fiz84ma9JgoPMelaHTcpl8+iDuv/Bjpi8cgt8b4MtX93PKTUdROLCyX+OMJys1SWjTNh6zVar6ou13ngj7hHqbk5RS84A/AB5gD3CN1rrXZTtSCQqD2StCZph8rLQvCCBjTCEjr5nLiKuO7zYAASi7jYm/OIcx3zuZoKcFV1EWxzx0JQOOWDbXV1b+MBXJQym1ABiltZ4DLAIeUErZOlx/G7ACmGzQEIWBrFrp6atI/n19qQilDcll+OI5jPzmCe0BqDtn3DaRy+5pPah1ZqTy7f89jtmXDu/X+IxgtWVxbcwWzvrD6lWhUHMSrQHom1rreYACzgj1mBKCwmT2ICRiQylF7ozhjLr2RIZdOgtXYZbRQxIimgYopVZ2+HNdh+tmAEsBtNYlQBkwrO1KrfWftNbjgM/iOmJhmGQIPT3p77+7L0GoJ0cegBdPyeG0m8dzynfGUjCq9bQOVj/AtQIJQnHR7zkJqDp0fxuQB4Qsj8Z9TY/y+HBs2odvQnG8nzpisjROCGEmbZ+nEajQWs/s4ToHrZNKm0ZAPgCTSDKGnVD6s0yuL0vjehJO62artEa24rK4NtJCOzTl8eHYuLe/d49kTroPeA0oPfRnTagnM6wSFOHEbRizVoS6m6x8VZVUfvA25W+9StOOrWgd2wnNakvijGD1D0+RVFoAV4ef0xXZkqQAABWJSURBVABzfgCKqIj1EreWpjpKv3qffSteoWb3OnQwGPpOJtXX308sKkLdscA3/YB1l8WBVIQM1OOcpJRKB/4KjNNajwWWAz8J9YCGLodzbNpnyTBkhSBUt3YVex7+fwSaGrE5nRx87QUOvviMpScdIURcrQLmAyilBgGFQL+/3hN9F8ulZ/FuWV1/YDsbXvwT3ppyUlzpHPjyHba+9TBBf0voO5tUX39vEoQ6kyBkrOfrZljmvXJIb3OSPvSn9tDPZUDXc5wcwRQtrqy4PM7MS+MCHg9lr7/A0G/eiHPgIABy557Enof/QuPmDWRMlH3MQojeaa2XKKXOUUotA1KBm4DFSqkirfXdBg8vqXQ80O5PtzKjl7VpHWTXx//HiBMvI3voRAAGTjmZbUv/TvmmTxk4+URDxxepzN067NclGkvjhDkkwtI4Kwk1JymlfgN8pJRqAuqAq0I9pilCEEgQipbM3ZoDTdtwDR7WHoAAbKkOcmYeR8OmdTENQeF8wAshrEFrfWMYtzk5HmMRrY7ck2J0wAlHc3UpSimyig9/e65sNgonzaX0q/ctH4IgvkFI9geZhwSh+OptTtJaPw483pfHM1V3OCsujzPl0ji7Dd3NEoOg34+ym+olj6pQywiMJh+UQohosVKXNmWzowMBWlerHBYM+FEq5IqVHmVv9/brT6xEe2lcb2RZnHmYOaSJ3pnyiFiCUPiaKkvY8f7TrH/+bra+9Qh1+7cw0D4GX0UZTTu2tt8u0NhA7X+WkTnFGh+KQgghrMdbX8nuZf9i/Qt/YPMbD1K140tc2YXYnWlUbl3Zfrug38fBr94nb/TRIR8z2mEmlqGoL+E0VBCS/UGHSRASsWCa5XBHstryOCOWxjVV7GPrWw9TNG0+g6afSlPFPnZ99CzFsxcx6OIrOfDck7iGjcTudtO4aT3Zs+biHjkm5uOSJXFCCJF8fA3VbH7tfgaMm83Ikxfjq6tk/8o38DVUM3Le5Wx9+1Gqtq/CmTWA2r0byBo8jvyx3XfDjeeXix2fK1rzeLjL40LNl9FonS3iQ14L6zFtCALrBaF4K1n9NoNnnEHBxOMBSMstwpGZx+6P/4+jRv4E1w9+TsOmdQS9XvJOPBVH3gCDR5y85INRCJHoDq77iLzRMxh8TOuJ2t15g0nLH8LGl+6hYOLxTL74p9TuWU9Lcz0FE4/HnTe4/b5mWVreNo5ohKF4BaFQZH9Q/LSNT+Z8azDlcriOrLQ0Lt4f4k0Ve9s77bTJGDgSv6eBgK+Z7FInWdNmkjNrrgQgIYQQMdVYvofsoZM6XebMzMORkYuntgybPYXckdMonHRCewCK9T6d/orWuOKxNE6WxZmP2cOaaGX6EAQShHqSmp5Dc83BTpe1NNaAsmFPNVfXOiGEEInNkZ6D54g5Kej34WusITUtq/2yeDQpiJZojDNaQag3iXTQLUFIxIslQhBYKwjFy8Cjvsa+z17GU1MGtJ6Ne9fHz1Ewfg7K1tpxx6juQZF2vhFCCGEthZPmcuDLpTSW7QYg4Gtm9ycvkDV4HAWlLssEn+5EOvZozMWRNkqwSjUokUgQMjdT7wk6klX2CMWrSULe6Bm0NDew+fUHsKWkEvB5yB83q309thBCCBEvGUWjKJ69iB3vPYUmSMDnpTBvIuPGnmv00KImkvk9nD1Csj+oVSLsD2ojDRPMy5QhyBdspiXoxW3PQqnOxSoJQp0NnHwiBROPp6WplhRXRrfL4PpyEjcRffLhJ4S1+b3N+JvrcWTmYbObcto0jbxRRzMiOAGPp4aUlDRSU9OMHlLURdI8IdZBKJEOuCUIiVgz1XI4X9DD6uo3+aj8n3xe9Soflj9NqWd7l9vJ0rjObPYUnJn5ptsHJEvihBBWFvS3sPvj51j33J1se/tRvnr2vyjb8InRwzKttiVjStlIS8tLyADUUX+Xx4WzNC6WjRKstCwuUfYHgSyNMyNTfaW1pmYp6Sk5nFx4FXaVSrXvAKtrlpBmzyQ7tbDTba1QETLi3EE9SZZqUKRLBYQQos3eFS/j9zUz+es/J8XpprnmINuX/h2HO4ucEVOMHp5pWHWfTzT0d56PRkUoElZZFpdopIW2uZimEtTor6HBX8mEzOOxq1QAch2DGJk+nb1N67u9jxUqQsk8OQghhFUFfB6qtq9m+NyLSHG6AUjLGciQmWdRtmGZwaMznpU6vMWaUb8DaZvdyoqBQqpC5mCaEOQJNOK2Z2NT9k6Xp9tz8QQae7yfBKHwGdEpTpbECSGsyO9twp7qJMWV3ulyV85AfE21Bo3KeBJ8utef34vRy+KsRIKQiAXThKDM1Hzq/VVdAk+Zdyc5joG93tcKQcgsjGqZnazM/sHcXNfCy7/5iv+a8xa/nvEmz/5wFdX7m4welhCGc6RnA9BYsbfT5TW715FROMKAERlLwk94jAhCvbHa/iCfV/Pn39dx/NEHmTzqAN++soqN61sACUIi+kwTghw2FyPSp7Ky+lVKPdupbTnIxrplVPr2Mcw9OfT9TR6EZPIQZqO15qnvfY7Wmu+/Mo/bPziVgeMyeewbn+Ft9Bs9PCEMpWx2hsw8ix3vPEHl1pU0VezjwOqllK3/mKKppxg9vLiR8NN38Q5Coc4fFIqZgtDPf1zD9q1+nnkxn8/XF3HqAiffWlzF/n2JOydJEDKOaUIQwOj0mYxKP4Y9TetYV/sBoJiddwEOW3gdZiQIhSfe1SBZEmdOOz+vwlPfwrl3TCGr0EVaVionXz+WwZOyWfPafqOHJ4Th8sfOZPgJF1O980t2ffwsvoZqxp9zI66cwtB3tjgJP5GJRRDqTSIsi9u3x8+yD33c80Auw0ekkJamuHRxOuddlMYzT7auUEjEahC0vkZWeZ0Siam6wymlGJw2lsFpY/v9GGbvGmemjnEiuVXsbGDY9FyU6tyhaPiMXMp3NHS5fW+Tj5m+SRQimrKKx5NVPN7oYcSN1YKPY+Pe0DcCfBOHxngkXfV1vg/VMS6SbnGhzlNjhm5xO3cGmDgpBZer8+9gxkwHL/zf4WXaiXT+oCPJ+YTiy1SVoGiRilBoibw3KNKlAcmiYFQGe76sRuvO74Xdq6opGJ3R/vNFWatCTo5ttwnntkII87FC5cexcW+XP/G4bySi/TtN5GVxI0fZ2bjBT3Nz5zlp1Uofo8d0/s4+UStCYJ3KXSJIyBAEEoSECGXEzDxcmam8dMdX1JY201Tr472/bqVkQy3Tzh4SUaCRQCSENZg5/MQ6tMQrEPXl9xvL/UFmP7guHprC1+Y5ufV71eza6aexMcgzTzXy8gvNXHaV2+jhxZXZX6tEkbAhCMwfhIwWz2rQkR/a/soafPsPooPBuI1BdKaU4soHjsWeqrjvgo/44/x3Kd9ez7f+dw7O9OitlJUqkRDmY9bwE+9KTRu9bjPetV8SDBq/Ad/I/UFGV4Pu/EM24yaksPiiSuZMPcgH73p57Ok8hhR3nZMSuRoEEoTiwVR7gmLBzHuEknF/kL+yhopHn6Nl30FsbhfaHyDvyvNwT5f/7EZIy0pl0S+nsOiXU+L2nEdOXEZPukIkE7MGH6O0BL2sq/uAKt9+HLY0Wpa9yriM2RTOOD2qz2Om/UGhGLk/yOFU/ODHWfzgx1lh3T6R9wfB4SBk9UBnVgldCWpj5oqQ0RNSPKtBWmvK/vIEromjKb73Zwy5+0cMuPbrVD72L1oOlMdtHCK0eE6AUiUSIvbMVvkxquJzpHV17+OwuTip4Eq+NuAyjs1dyLaGldR/+WnUx2aW/UHhhAIrfTmV6BUhkKpQrCRFCAIJQmagl5WAP0D2wlNQKXYAXBNGkTFvFg0ffm7w6IQZyNI5IaLLrOHHDDyBBqp8B5iQORe7SgUgMyWf0enHsKd5PRD98UZ7f1BvrLw/KNokCInuJE0IAglCPYlXNcjfUEfKwAFdWjKnDswnUFsX1eeSDnGJQUKREP1jpvBjlqrPkbzBZlz2dOyq886A9JQcvMGmTpeZNQjF8jx8iVQNShQShKIrqUIQSBAykqt4GN4tOwk0Hp5ctNY0fbEe59gRxg1MWIaEIiF6Z8bwY1YZKbl4Ao00+ms6XX7Qu4Oc1IFdbm9UEAolUbvF9VUyLIuDxHvdjJR0IQgkCHUnHtWg1OxcsqfNpuwPj9K0ch2ezTuofPRf+CuqST/+6Jg/vwiPlcLFkaHISmMXIpok/PSdXaUwJuNYVta8TknzFqp9pWyqX85Bz05GuKd1ex8j/l2xnJ/N3i2ur5IpCEkYilzCd4friXSNM8aABQux7f8P9e9/RrDZS9rU8eRdvhCbKzH/vSL+pPucSCZmCT5gbJe3/hrunozbnsmepvV4g03kOgYxJ+98XPb0Hu/T9u/0TRwa0XP3Za6PpFvcrn0FjCjuf/MhI7vFxUKkHeM6Vtci+b1Gw9LSCQkT7IyQtCEIJAgdKdSHbDQopcg4/mgypPIj4qSnyVvCkbAyCT/RU+AcToFzeJ/v59i4N65BKJT+BqFEO5AO1TYbotc62wyBKNFev3hK6hAE5g5CQgBU7mlkxbO7qdjZQOHoTGZfPpzcwcl19uxY6C4cSTASZifhx1ziGYTi8UVlTzpWg8rLAjzzVBPr1rYwpNjOZVe5GTc+1ZBx9SScINRfI4rLu91r1XaZEWFIglD/JOWeoCOZdY+QmSa7aIplN5uOjO4QF40PpP3ranh48XJSHDZmXTIcFDx06SeUbo5uNz3Rqrs9RrLXSJiB7Pkxr3j+LiLpFheNcwft2+vn4oUVVFcFuXSxm8KBNq65tIplH5njvdlRLPcH9RZ0jDr2kH1CfRdWJUgpdR9w7KHb36G1fr3DdTOB+4EAcAC4Qmttvv8NIZi1IhTvZXFGftMkunrr3k2cfssEjrmg9ZvGCScNJHdIGkv/spkrHzw26s8nB/s96+13k6gVJLN+9pt1XLFgluADUvnpTaQVoXjN9ZEui/vbfQ2cf7Gb79+WCcApp7k4akoqv/9NHa++0/UUGEYLVRGK1rK4I0W6DysSiVwVCvHZ/waQdujHIuANrfUPe3u8kJUgpdQCYJTWeg6wCHhAKdXxfvf9//buLsSKOozj+Pcx1F2jF9JEezESKiLJML1Ii4rAxJeL6qZIEqlEqKCoi4LKCDK8CcrIEMKLjAKTLkraMkrFd1cpiiAKNQiz9QXaqDR1ny7ObM2u58ycdec/M2fO7wMHzsycnXmeGc/z+J+Z3QGecPdZwCHgkSHkUyq6IiRl0tfnHNhzjKnzLxswf+q8y9m/+2hBUUk9SVeQWvVKUllrf1njypqu/LSe4e6jZo93kc8O+mIrLLinc8C82+4YzZEjZzh6pC/YdkM61wFD2iDn4C+XFnpVqGrSar+7z3X3O4H5wI/A8rR1NnM73DRgY7SBQ0APMCm2fLK774ned1EbobWssg6E8pTHn8vO65a4VmYGHReMpLdnYGPs7TlB50Xaf62mBQdCZa39ZY0rExr8tLa89ldRt8V1XjiKdfsnD5j3R69z+jSMOb9cV4H6NVN7Qw2EQLfHZSit9vd7BXjb3Y+lrbCZ2+FGAcdj038C8Wu2fQnLADCzJcCSaPJk1+G3vmtiu8U5HHTt44Chn8bflHkcRTm3/M/Rz3ltqI7N9WcPOf/X5nxVd/7zUzbUnT8cz2e+xrPkevxL6LosV9Z7+shnXYffGjeMVXSYWXdserW7r47eD7v2B5J5T9r7ztPl7klhtft3ErLeB78N42c3ZRVEskG9cUD+DXrXf9Y8vIs1debffP1wEg8tsV9G+WffU/uV8P8icdn3pd9WnWtfGk5PwswmA9Pc/clmNtbMIOgU0BGb7gTip6lGJCwDIEpgdRRgt7tPbya4KlL+yl/5t3f+Wa7P3edkub5Bhl37A1FPylC75w/aB8pf+We5voB9Ka32AywC3mt2hc3cDrcPuAvAzCYC44H49d4DZtb/W2ezgUx3poiIFKKstb+scYmISDhptR9gLvBpsytMvRLk7l1mNt/MtgIjgSeAhWY2wd1XAI8DK83sFLXLqc81u3ERESmnstb+ssYlIiLhNFH7Aa7g7IFRQ039iWx3fzxhWTcwq9kNEt2C0MaUf3tT/u2tpfLPuPZnRj0pU+2eP2gfKP/21jL5J9X+aPnEoazP3MP/JTAREREREZGyaOZ3gkRERERERCpDgyAREREREWkrQQdBZrbSzHaaWbeZzRu0bHq0bJuZfWhmeT1jIjcp+S+Ilu00s1WDnnheCUn5xz5zr5nVe+RAy0s5/mPNrMvMdpnZdjO7uqg4Q0nJ//Yo981m9q6ZnVdUnCGZ2UIze7/O/MrXvzJST1JPUk9ST1JPUk/6j7sHeQF3Axui95cBB4ERseU7gBnR+zeAx0LFUsQrKX/AgJ+AsdH0emBu0THnefyj+RcDW4A1Rcebd/7AWmBm9H4mcHfRMeec/y7ghti+mFd0zBnnfwmwF/gVeL/O8krXvzK+1JPUk9ST1JMS8ldPqnD9a/QKeaZnGrARwN0PAT3ApNjyye6+J3rfBcwIGEsRkvIfDbzk7sei6V6gamcd0o4/wHLg1Zzjykta/lOA+8xsE7Un1+/IO8DA0vI/DoyLzjZfAhw7aw0tzN2Pu/vNwP0NPlL1+ldG6knqSepJqCepJ9VV9fpXV8hB0Cjg79j0n9QKbb++hGVV0DB/dz/h7msBzGwpcCVDeLhTi0g8/mZ2J7Ui9H3OceWlYf5mNgaYCmxz9zuonYF9Ie8AA0v7/q8EPgF+AC4AvskvtFKoev0rI/Uk9ST1pP+pJ6knxVW9/tUVchB0CuiITXcCJxtse/CyKkjM38zGm9nH1C7LznX30znHF1rD/M2sE3iG6p5xg+TjfxL4B/goml5P7SxVlSQd//OBVcC17n4NsB14NvcIi1X1+ldG6knqSepJ/1NPUk+Kq3r9qyvkIGgfcBeAmU0ExjPwKa4HzKz/SzYb6A4YSxEa5h9dbl0HLHP3F939n8KiDCfp+N8IXE7tkusHwBwze6qIIANqmL+7nwF2A7dEn70V+LaAGENKOv4evX6Ppnuo3q03aape/8pIPUk9ST0J9ST1pLqqXv/qCvqwVDN7E7gJGAm8DIwDJrj7CjObDrxObXR+FHjQ3Ss18myUP/A58CXwdezjy9x9S+5BBpR0/GOfuYraveiLi4kynJR//9cB7wJ/UbsX+dHY/fiVkJL/YuAxavn3Ag+5+/HCgg3EzG4Hlrr7A2a2iDaqf2WknqSehHqSepJ6knpSJOggSEREREREpGwq9xwAERERERGRJBoEiYiIiIhIW9EgSERERERE2ooGQSIiIiIi0lY0CBIRERERkbaiQZCIiIiIiLQVDYJERERERKSt/AsjdjmOFHxcowAAAABJRU5ErkJggg==\n",
"text/plain": [
"<Figure size 864x360 with 4 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"T_star, CV_star = model.predict(X,Y)\n",
"\n",
"fig = plt.figure()\n",
"fig.set_size_inches((12,5))\n",
"plt.subplot(121)\n",
"plt.contourf(X_m, Y_m, T_star.reshape(X_m.shape))\n",
"plt.colorbar()\n",
"plt.scatter(X_train, Y_train, facecolors = 'none', edgecolor = 'k') \n",
"plt.xlim([0,1])\n",
"plt.ylim([0,1])\n",
"plt.title('predicted activation times')\n",
"\n",
"plt.subplot(122)\n",
"plt.contourf(X_m, Y_m, CV_star.reshape(X_m.shape))\n",
"plt.colorbar()\n",
"plt.scatter(X_train, Y_train, facecolors = 'none', edgecolor = 'k') \n",
"plt.xlim([0,1])\n",
"plt.ylim([0,1])\n",
"plt.title('predicted conduction velocity')\n",
" \n",
"plt.tight_layout()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| Unknown |
3D | fsahli/EikonalNet | models_para_tf.py | .py | 22,923 | 562 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 11 12:30:53 2019
@author: fsc
"""
import tensorflow as tf
import timeit
import numpy as np
import time
from pyDOE import lhs
tf.random.set_random_seed(1234)
np.random.seed(1234)
class Eikonal2DnetCV2RPF:
# Initialize the class
def __init__(self, x, y, x_e, y_e, T_e, layers, CVlayers, Batch,
C = 1.0, alpha = 1e-6, alphaL2 = 1e-6, jobs = 8, noise_level = 0.01):
self.Batch = Batch
self.noise_level = noise_level
X = np.concatenate([x, y], 1)
# X_e = np.concatenate([x_e, t_e], 1)
self.lb = X.min(0)
self.ub = X.max(0)
self.X = X
# self.X_e = X_e
#normalization
self.x = x#2.0*(x - self.lb[0])/(self.ub[0] - self.lb[0]) - 1.0
self.y = y#2.0*(y - self.lb[1])/(self.ub[1] - self.lb[1]) - 1.0
self.N_data = T_e.shape[0]
T_e = np.tile(T_e[None,:], (self.Batch,1,1)) + self.noise_level*np.random.rand(self.Batch, self.N_data, 1)
self.T_e = T_e
self.x_e = x_e#2.0*(x_e - self.lb[0])/(self.ub[0] - self.lb[0]) - 1.0
self.y_e = y_e#2.0*(y_e - self.lb[1])/(self.ub[1] - self.lb[1]) - 1.0
self.layers = layers
self.CVlayers = CVlayers
# Initialize NN
self.weights, self.biases = self.initialize_NN(layers)
self.prior_weights, self.prior_biases = self.initialize_NN(layers, trainable = False)
self.CVweights, self.CVbiases = self.initialize_NN(CVlayers)
self.CVprior_weights, self.CVprior_biases = self.initialize_NN(layers, trainable = False)
self.C = tf.constant(C)
self.alpha = tf.constant(alpha)
self.alphaL2 = alphaL2
# tf placeholders and graph
self.sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True,
log_device_placement=True,
intra_op_parallelism_threads=jobs,
inter_op_parallelism_threads=jobs,
device_count={'CPU': jobs}))
self.x_tf = tf.placeholder(tf.float32, shape=[None, self.x.shape[1]])
self.y_tf = tf.placeholder(tf.float32, shape=[None, self.y.shape[1]])
self.T_e_tf = tf.placeholder(tf.float32, shape=[Batch, None, self.x_e.shape[1]])
self.x_e_tf = tf.placeholder(tf.float32, shape=[None, self.x_e.shape[1]])
self.y_e_tf = tf.placeholder(tf.float32, shape=[None, self.y_e.shape[1]])
self.dummy_x0_tf = tf.placeholder(tf.float32, shape=(None, Batch))
self.T_pred, self.CV_pred, self.f_T_pred, self.f_CV_pred = self.net_eikonal(self.x_tf, self.y_tf)
self.T_e_pred = self.net_T(self.x_e_tf, self.y_e_tf)
self.loss = np.float32(self.Batch)*(tf.reduce_mean(tf.square(self.T_e_tf - self.T_e_pred)) + \
tf.reduce_mean(tf.square(self.f_T_pred)) + \
self.alpha*tf.reduce_mean(tf.square(self.f_CV_pred))) + \
sum([self.alphaL2*tf.nn.l2_loss(w) for w in self.weights])
self.optimizer_Adam = tf.train.AdamOptimizer()
self.train_op_Adam = self.optimizer_Adam.minimize(self.loss)
# Define optimizer (use L-BFGS for better accuracy)
self.optimizer = tf.contrib.opt.ScipyOptimizerInterface(self.loss,
method = 'L-BFGS-B',
options = {'maxiter': 10000,
'maxfun': 10000,
'maxcor': 50,
'maxls': 50,
'ftol' : 1.0 * np.finfo(float).eps})
self.lossit = []
# Initialize Tensorflow variables
init = tf.global_variables_initializer()
self.sess.run(init)
# Initialize network weights and biases using Xavier initialization
def initialize_NN(self, layers, trainable = True):
# Xavier initialization
def xavier_init(size):
in_dim = size[0]
out_dim = size[1]
xavier_stddev = 1. / np.sqrt((in_dim + out_dim + self.Batch) / 3.)
return tf.Variable(tf.random_normal([self.Batch, in_dim, out_dim], dtype=tf.float32) * xavier_stddev, dtype=tf.float32, trainable = trainable)
weights = []
biases = []
num_layers = len(layers)
for l in range(0,num_layers-1):
W = xavier_init(size=[layers[l], layers[l+1]])
b = tf.Variable(tf.zeros([self.Batch, 1, layers[l+1]], dtype=tf.float32), dtype=tf.float32, trainable = trainable)
weights.append(W)
biases.append(b)
return weights, biases
def neural_net(self, X, weights, biases):
num_layers = len(weights) + 1
H = 2.0*(X - self.lb)/(self.ub - self.lb) - 1.0
for l in range(0,num_layers-2):
W = weights[l]
b = biases[l]
H = tf.tanh(tf.add(tf.matmul(H, W), b))
W = weights[-1]
b = biases[-1]
Y = tf.add(tf.matmul(H, W), b)
return Y
def net_eikonal(self, x, y):
X = tf.concat([x,y], 1)
X_temp = tf.tile(tf.expand_dims(X, 0),[self.Batch,1,1])
T = self.neural_net(X_temp, self.weights, self.biases)\
+ self.neural_net(X_temp, self.prior_weights, self.prior_biases)
CV = self.neural_net(X_temp, self.CVweights, self.CVbiases)\
+ self.neural_net(X_temp, self.CVprior_weights, self.CVprior_biases)
T = tf.squeeze(T, [2])
T = tf.transpose(T)
CV = tf.squeeze(CV, [2])
CV = tf.transpose(CV)
CV = self.C*tf.sigmoid(CV)
T_x = self.fwd_gradients_0(T, x)
T_y = self.fwd_gradients_0(T, y)
CV_x = self.fwd_gradients_0(CV, x)
CV_y = self.fwd_gradients_0(CV, y)
f_T = tf.sqrt(T_x**2 + T_y**2) - 1.0/CV
f_CV = tf.sqrt(CV_x**2 + CV_y**2)
return T, CV, tf.transpose(f_T), tf.transpose(f_CV)
def net_T(self, x, y):
X = tf.concat([x,y], 1)
X_temp = tf.tile(tf.expand_dims(X, 0),[self.Batch,1,1])
T = self.neural_net(X_temp, self.weights, self.biases)\
+ self.neural_net(X_temp, self.prior_weights, self.prior_biases)
return T
def fwd_gradients_0(self, U, x):
g = tf.gradients(U, x, grad_ys=self.dummy_x0_tf)[0]
return tf.gradients(g, self.dummy_x0_tf)[0]
def callback(self, loss):
self.lossit.append(loss)
print('Loss: %.5e' % (loss))
def train(self):
tf_dict = {self.x_tf: self.x, self.y_tf: self.y,
self.x_e_tf: self.x_e, self.y_e_tf: self.y_e, self.T_e_tf: self.T_e,
self.dummy_x0_tf: np.ones((self.x.shape[0], self.Batch))}
# Call SciPy's L-BFGS otpimizer
self.optimizer.minimize(self.sess,
feed_dict = tf_dict,
fetches = [self.loss],
loss_callback = self.callback)
def train_Adam(self, nIter):
self.lossit = []
tf_dict = {self.x_tf: self.x, self.y_tf: self.y,
self.x_e_tf: self.x_e, self.y_e_tf: self.y_e, self.T_e_tf: self.T_e,
self.dummy_x0_tf: np.ones((self.x.shape[0], self.Batch))}
start_time = time.time()
for it in range(nIter):
self.sess.run(self.train_op_Adam, tf_dict)
loss_value = self.sess.run(self.loss, tf_dict)
self.lossit.append(loss_value)
# Print
if it % 10 == 0:
elapsed = time.time() - start_time
C_value = np.exp(self.sess.run(self.C))
print('It: %d, Loss: %.3e, C: %.3f, Time: %.2f' %
(it, loss_value, C_value, elapsed))
start_time = time.time()
self.optimizer.minimize(self.sess,
feed_dict = tf_dict,
fetches = [self.loss],
loss_callback = self.callback)
def train_Adam_minibatch(self, nIter, size = 50):
init_start_time = time.time()
start_time = time.time()
for it in range(nIter):
X = lhs(2, size)
tf_dict = {self.x_tf: X[:,:1], self.y_tf: X[:,1:],
self.x_e_tf: self.x_e, self.y_e_tf: self.y_e, self.T_e_tf: self.T_e,
self.dummy_x0_tf: np.ones((size, self.Batch))}
self.sess.run(self.train_op_Adam, tf_dict)
loss_value = self.sess.run(self.loss, tf_dict)
self.lossit.append(loss_value)
# Print
if it % 10 == 0:
elapsed = time.time() - start_time
C_value = np.exp(self.sess.run(self.C))
print('It: %d, Loss: %.3e, C: %.3f, Time: %.2f' %
(it, loss_value, C_value, elapsed))
start_time = time.time()
total_time = time.time() - init_start_time
print('total time:', total_time)
return total_time
def predict(self, x_star, y_star):
# x_star = 2.0*(x_star - self.lb[0])/(self.ub[0] - self.lb[0]) - 1.0
# y_star = 2.0*(y_star - self.lb[1])/(self.ub[1] - self.lb[1]) - 1.0
#
tf_dict = {self.x_tf: x_star, self.y_tf: y_star,
self.x_e_tf: self.x_e, self.y_e_tf: self.y_e}
T_star = self.sess.run(self.T_pred, tf_dict)
CV_star = self.sess.run(self.CV_pred, tf_dict)
return T_star, CV_star
def get_adaptive_points(self, N = 1000, M = 10):
X = lhs(2, N)
tf_dict = {self.x_tf: X[:,:1], self.y_tf: X[:,1:],
self.x_e_tf: self.x_e, self.y_e_tf: self.y_e}
f_T_star = self.sess.run(self.f_T_pred, tf_dict)
ind = f_T_star[:,0].argsort()[-M:]
return X[ind], f_T_star[ind]
def add_point(self,x_new, y_new, T_new):
self.x_e = np.concatenate((self.x_e, x_new[:,None]))
self.y_e = np.concatenate((self.y_e, y_new[:,None]))
T_new = np.tile(T_new[None,:,None], [self.Batch, 1,1]) + self.noise_level*np.random.randn(self.Batch,1,1)
self.T_e = np.concatenate((self.T_e, T_new), axis = 1)
#%%
class Eikonalnet3DRPF:
# Initialize the class
def __init__(self, X, normals, X_e, T_e,
layers, CVlayers, Batch, Tmax,
C = 1.0, alpha = 1e-5, alphaL2 = 1e-6, jobs = 4, noise_level = 0.01):
self.Batch = Batch
self.noise_level = noise_level
self.Tmax = Tmax
# X_e = np.concatenate([x_e, t_e], 1)
self.lb = X.min(0)
self.ub = X.max(0)
self.X = X
self.normals = normals
# self.X_e = X_e
self.N_data = T_e.shape[0]
T_e = np.tile(T_e[None,:], (self.Batch,1,1)) + self.noise_level*np.random.rand(self.Batch, self.N_data, 1)
self.T_e = T_e
self.X_e = X_e
self.layers = layers
self.CVlayers = CVlayers
# Initialize NN
self.weights, self.biases = self.initialize_NN(layers)
self.prior_weights, self.prior_biases = self.initialize_NN(layers, trainable = False)
self.CVweights, self.CVbiases = self.initialize_NN(CVlayers)
self.CVprior_weights, self.CVprior_biases = self.initialize_NN(layers, trainable = False)
self.C = tf.constant(C)
self.alpha = tf.constant(alpha)
self.alphaL2 = alphaL2
# tf placeholders and graph
self.sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True,
log_device_placement=True,
intra_op_parallelism_threads=jobs,
inter_op_parallelism_threads=jobs,
device_count={'CPU': jobs}))
self.x_tf = tf.placeholder(tf.float32, shape=[None, 1])
self.y_tf = tf.placeholder(tf.float32, shape=[None, 1])
self.z_tf = tf.placeholder(tf.float32, shape=[None, 1])
self.normals_tf = tf.placeholder(tf.float32, shape=[None, self.normals.shape[1]])
self.T_e_tf = tf.placeholder(tf.float32, shape=[Batch, None, self.T_e.shape[-1]])
self.X_e_tf = tf.placeholder(tf.float32, shape=[None, self.X_e.shape[1]])
self.dummy_x0_tf = tf.placeholder(tf.float32, shape=(None, Batch))
self.CV_pred, self.f_T_pred, self.f_CV_pred, self.f_N_pred, self.gradT = self.net_eikonal(self.x_tf, self.y_tf, self.z_tf, self.normals_tf)
self.T_e_pred, self.CV_e_pred = self.net_data(self.X_e_tf)
self.pde_loss = tf.reduce_mean(tf.square(self.f_T_pred))
self.normal_loss = 1e-3*tf.reduce_mean(tf.square(self.f_N_pred))
self.loss = np.float32(self.Batch)*(tf.reduce_mean(tf.square(self.T_e_tf - self.T_e_pred)) + \
self.pde_loss + \
self.normal_loss + \
self.alpha*tf.reduce_mean(tf.square(self.f_CV_pred)) + \
sum([self.alphaL2*tf.nn.l2_loss(w) for w in self.weights]))
self.optimizer_Adam = tf.train.AdamOptimizer()
self.train_op_Adam = self.optimizer_Adam.minimize(self.loss)
# Define optimizer (use L-BFGS for better accuracy)
self.optimizer = tf.contrib.opt.ScipyOptimizerInterface(self.loss,
method = 'L-BFGS-B',
options = {'maxiter': 10000,
'maxfun': 50000,
'maxcor': 50,
'maxls': 50,
'ftol' : 1.0 * np.finfo(float).eps})
self.lossit = []
# Initialize Tensorflow variables
init = tf.global_variables_initializer()
self.sess.run(init)
# Initialize network weights and biases using Xavier initialization
def initialize_NN(self, layers, trainable = True):
# Xavier initialization
def xavier_init(size):
in_dim = size[0]
out_dim = size[1]
xavier_stddev = 1. / np.sqrt((in_dim + out_dim + self.Batch) / 2.)
return tf.Variable(tf.random_normal([self.Batch, in_dim, out_dim], dtype=tf.float32) * xavier_stddev, dtype=tf.float32, trainable = trainable)
weights = []
biases = []
num_layers = len(layers)
for l in range(0,num_layers-1):
W = xavier_init(size=[layers[l], layers[l+1]])
b = tf.Variable(tf.zeros([self.Batch, 1, layers[l+1]], dtype=tf.float32), dtype=tf.float32, trainable = trainable)
weights.append(W)
biases.append(b)
return weights, biases
def neural_net(self, X, weights, biases):
num_layers = len(weights) + 1
H = 2.0*(X - self.lb)/(self.ub - self.lb) - 1.0
for l in range(0,num_layers-2):
W = weights[l]
b = biases[l]
H = tf.tanh(tf.add(tf.matmul(H, W), b))
W = weights[-1]
b = biases[-1]
Y = tf.add(tf.matmul(H, W), b)
return Y
def net_eikonal(self, x, y, z, normals):
X_temp = tf.tile(tf.expand_dims(tf.concat((x,y,z), axis = 1), 0),[self.Batch,1,1])
T = self.neural_net(X_temp, self.weights, self.biases)\
+ self.neural_net(X_temp, self.prior_weights, self.prior_biases)
CV = self.neural_net(X_temp, self.CVweights, self.CVbiases)\
+ self.neural_net(X_temp, self.CVprior_weights, self.CVprior_biases)
T = tf.squeeze(T, [2])
T = tf.transpose(T)
CV = tf.squeeze(CV, [2])
CV = tf.transpose(CV)
CV = self.C*tf.sigmoid(CV)
T_x = self.fwd_gradients_0(T, x) # batch, N_data,
T_y = self.fwd_gradients_0(T, y) # batch, N_data,
T_z = self.fwd_gradients_0(T, z) # batch, N_data,
#
CV_x = self.fwd_gradients_0(CV, x) # batch, N_data,
CV_y = self.fwd_gradients_0(CV, y) # batch, N_data,
CV_z = self.fwd_gradients_0(CV, z) # batch, N_data,
#
gradT = tf.stack((tf.transpose(T_x), tf.transpose(T_y), tf.transpose(T_z)), axis = -1)
f_T = CV*self.Tmax*tf.sqrt((T_x**2 + T_y**2 + T_z**2)) - 1.0
# f_T = CV*tf.norm(self.Tmax*(gradT), axis = -1, keepdims = True) - 1.0
f_CV = tf.sqrt(CV_x**2 + CV_y**2 + CV_z**2)
f_N = self.C*self.Tmax*(T_x*normals[:,0:1] + T_y*normals[:,1:2] + T_z*normals[:,2:])
# f_N = self.C*self.Tmax*tf.reduce_sum(gradT*normals, axis = -1)
# f_N = 0.0
# f_T = f_CV = f_N = T_x = 0.0
return CV, f_T, f_CV, f_N, gradT
def net_data(self,X):
X_temp = tf.tile(tf.expand_dims(X, 0),[self.Batch,1,1])
T = self.neural_net(X_temp, self.weights, self.biases)
CV = self.C*tf.sigmoid(self.neural_net(X_temp, self.CVweights, self.CVbiases))
return T, CV
def fwd_gradients_0(self, U, x):
g = tf.gradients(U, x, grad_ys=self.dummy_x0_tf)[0]
return tf.gradients(g, self.dummy_x0_tf)[0]
def batch_grad(self,y, X):
J = tf.map_fn(lambda m: tf.gradients(y[:,m:m+1], X)[0], tf.range(tf.shape(y)[-1]), tf.float32)
return J
def callback(self, loss):
self.lossit.append(loss)
print('Loss: %.5e' % (loss))
def train(self):
tf_dict = {self.x_tf: self.X[:,0:1], self.y_tf: self.X[:,1:2], self.z_tf: self.X[:,2:],
self.X_e_tf: self.X_e,
self.T_e_tf: self.T_e,
self.dummy_x0_tf: np.ones((self.X.shape[0], self.Batch)),
self.normals_tf: self.normals
}
# Call SciPy's L-BFGS otpimizer
self.optimizer.minimize(self.sess,
feed_dict = tf_dict,
fetches = [self.loss],
loss_callback = self.callback)
def train_Adam(self, nIter):
tf_dict = {self.x_tf: self.X[:,0:1], self.y_tf: self.X[:,1:2], self.z_tf: self.X[:,2:],
self.X_e_tf: self.X_e,
self.T_e_tf: self.T_e,
self.dummy_x0_tf: np.ones((self.X.shape[0], self.Batch)),
self.normals_tf: self.normals
}
start_time = time.time()
for it in range(nIter):
self.sess.run(self.train_op_Adam, tf_dict)
loss_value = self.sess.run(self.loss, tf_dict)
self.lossit.append(loss_value)
# Print
if it % 10 == 0:
elapsed = time.time() - start_time
C_value = np.exp(self.sess.run(self.C))
print('It: %d, Loss: %.3e, C: %.3f, Time: %.2f' %
(it, loss_value, C_value, elapsed))
start_time = time.time()
self.optimizer.minimize(self.sess,
feed_dict = tf_dict,
fetches = [self.loss],
loss_callback = self.callback)
def train_Adam_minibatch(self, nEpoch, size = 50):
init_start_time = time.time()
start_time = time.time()
idx_global = np.arange(self.X.shape[0])
np.random.shuffle(idx_global)
splits = np.array_split(idx_global, idx_global.shape[0]//size)
for ep in range(nEpoch):
for it, idx in enumerate(splits):
tf_dict = {self.x_tf: self.X[idx,0:1], self.y_tf: self.X[idx,1:2], self.z_tf: self.X[idx,2:],
self.X_e_tf: self.X_e,
self.T_e_tf: self.T_e,
self.dummy_x0_tf: np.ones((idx.shape[0], self.Batch)),
self.normals_tf: self.normals[idx]
}
self.sess.run(self.train_op_Adam, tf_dict)
loss_value = self.sess.run(self.loss, tf_dict)
self.lossit.append(loss_value)
# Print
if it % 10 == 0:
elapsed = time.time() - start_time
pde_loss = self.sess.run(self.pde_loss, tf_dict)
normal_loss = self.sess.run(self.normal_loss, tf_dict)
print('Epoch: %d, It: %d, Loss: %.3e, pde loss: %.3e, normal loss: %.3e, Time: %.2f' %
(ep, it + ep*idx_global.shape[0]//size, loss_value, pde_loss, normal_loss,elapsed))
start_time = time.time()
print('total time:', time.time() - init_start_time)
def predict(self, X_star):
tf_dict = {self.X_e_tf: X_star}
T_star = self.sess.run(self.T_e_pred, tf_dict)
CV_star = self.sess.run(self.CV_e_pred, tf_dict)
return T_star, CV_star
def get_adaptive_points(self, N = 1000, M = 10):
X = lhs(2, N)
tf_dict = {self.x_tf: X[:,:1], self.y_tf: X[:,1:],
self.x_e_tf: self.x_e, self.y_e_tf: self.y_e}
f_T_star = self.sess.run(self.f_T_pred, tf_dict)
ind = f_T_star[:,0].argsort()[-M:]
return X[ind], f_T_star[ind]
def add_point(self,X_new, T_new):
self.X_e = np.concatenate((self.X_e, X_new[None,:]))
T_new = np.tile(T_new[None,:,None], [self.Batch, 1,1]) + self.noise_level*np.random.randn(self.Batch,1,1)
self.T_e = np.concatenate((self.T_e, T_new), axis = 1)
| Python |
3D | zhangjun001/ICNet | Code/Train.py | .py | 4,602 | 108 | import os
import glob
import sys
from argparse import ArgumentParser
import numpy as np
import torch
from torch.autograd import Variable
from Models import ModelFlow_stride,SpatialTransform,antifoldloss,mse_loss,smoothloss
from Functions import Dataset,generate_grid
import torch.utils.data as Data
parser = ArgumentParser()
parser.add_argument("--lr", type=float,
dest="lr", default=5e-4,help="learning rate")
parser.add_argument("--iteration", type=int,
dest="iteration", default=20001,
help="number of total iterations")
parser.add_argument("--inverse", type=float,
dest="inverse", default=0.05,
help="Inverse consistent:suggested range 0.001 to 0.1")
parser.add_argument("--antifold", type=float,
dest="antifold", default=100000,
help="Anti-fold loss: suggested range 100000 to 1000000")
parser.add_argument("--smooth", type=float,
dest="smooth", default=0.5,
help="Gradient smooth loss: suggested range 0.1 to 10")
parser.add_argument("--checkpoint", type=int,
dest="checkpoint", default=4000,
help="frequency of saving models")
parser.add_argument("--start_channel", type=int,
dest="start_channel", default=8,
help="number of start channels")
parser.add_argument("--datapath", type=str,
dest="datapath", default='../Dataset',
help="data path for training images")
opt = parser.parse_args()
lr = opt.lr
iteration = opt.iteration
start_channel = opt.start_channel
inverse = opt.inverse
antifold = opt.antifold
n_checkpoint = opt.checkpoint
smooth = opt.smooth
datapath = opt.datapath
def train():
model =ModelFlow_stride(2,3,start_channel).cuda()
loss_similarity =mse_loss
loss_inverse = mse_loss
loss_antifold = antifoldloss
loss_smooth = smoothloss
transform = SpatialTransform().cuda()
for param in transform.parameters():
param.requires_grad = False
param.volatile=True
names = glob.glob(datapath + '/*.gz')
grid = generate_grid(imgshape)
grid = Variable(torch.from_numpy(np.reshape(grid, (1,) + grid.shape))).cuda().float()
print(grid.type())
optimizer = torch.optim.Adam(model.parameters(),lr=lr)
model_dir = '../Model'
if not os.path.isdir(model_dir):
os.mkdir(model_dir)
lossall = np.zeros((5,iteration))
training_generator = Data.DataLoader(Dataset(names,iteration,True), batch_size=1,
shuffle=False, num_workers=2)
step=0
for X,Y in training_generator:
X = X.cuda().float()
Y = Y.cuda().float()
F_xy = model(X,Y)
F_yx = model(Y,X)
X_Y = transform(X,F_xy.permute(0,2,3,4,1)*range_flow,grid)
Y_X = transform(Y,F_yx.permute(0,2,3,4,1)*range_flow,grid)
# Note that, the generation of inverse flow depends on the definition of transform.
# The generation strategies are sligtly different for the backward warpping and forward warpping
F_xy_ = transform(-F_yx,F_xy.permute(0,2,3,4,1)*range_flow,grid)
F_yx_ = transform(-F_xy,F_yx.permute(0,2,3,4,1)*range_flow,grid)
loss1 = loss_similarity(Y,X_Y) + loss_similarity(X,Y_X)
loss2 = loss_inverse(F_xy*range_flow,F_xy_*range_flow) + loss_inverse(F_yx*range_flow,F_yx_*range_flow)
loss3 = loss_antifold(F_xy*range_flow) + loss_antifold(F_yx*range_flow)
loss4 = loss_smooth(F_xy*range_flow) + loss_smooth(F_yx*range_flow)
loss = loss1+inverse*loss2 + antifold*loss3 + smooth*loss4
optimizer.zero_grad() # clear gradients for this training step
loss.backward() # backpropagation, compute gradients
optimizer.step() # apply gradients
lossall[:,step] = np.array([loss.item(),loss1.item(),loss2.item(),loss3.item(),loss4.item()])
sys.stdout.write("\r" + 'step "{0}" -> training loss "{1:.4f}" - sim "{2:.4f}" - inv "{3:.4f}" \
- ant "{4:.4f}" -smo "{5:.4f}" '.format(step, loss.item(),loss1.item(),loss2.item(),loss3.item(),loss4.item()))
sys.stdout.flush()
if(step % n_checkpoint == 0):
modelname = model_dir + '/' + str(step) + '.pth'
torch.save(model.state_dict(), modelname)
step+=1
np.save(model_dir+'/loss.npy',lossall)
if __name__ == '__main__':
imgshape = (144, 192, 160)
range_flow = 7
train() | Python |
3D | zhangjun001/ICNet | Code/Functions.py | .py | 2,001 | 69 | import SimpleITK as sitk
import numpy as np
import torch.utils.data as Data
def generate_grid(imgshape):
x = np.arange(imgshape[0])
y = np.arange(imgshape[1])
z = np.arange(imgshape[2])
grid = np.rollaxis(np.array(np.meshgrid(z, y, x)), 0, 4)
grid = np.swapaxes(grid,0,2)
grid = np.swapaxes(grid,1,2)
return grid
def load_4D(name):
X = sitk.GetArrayFromImage(sitk.ReadImage(name, sitk.sitkFloat32 ))
X = np.reshape(X, (1,)+ X.shape)
return X
def load_5D(name):
X = sitk.GetArrayFromImage(sitk.ReadImage(name, sitk.sitkFloat32 ))
X = np.reshape(X, (1,)+(1,)+ X.shape)
return X
def imgnorm(N_I,index1=0.0001,index2=0.0001):
I_sort = np.sort(N_I.flatten())
I_min = I_sort[int(index1*len(I_sort))]
I_max = I_sort[-int(index2*len(I_sort))]
N_I =1.0*(N_I-I_min)/(I_max-I_min)
N_I[N_I>1.0]=1.0
N_I[N_I<0.0]=0.0
N_I2 = N_I.astype(np.float32)
return N_I2
def Norm_Zscore(img):
img= (img-np.mean(img))/np.std(img)
return img
def save_img(I_img,savename):
I2 = sitk.GetImageFromArray(I_img,isVector=False)
sitk.WriteImage(I2,savename)
def save_flow(I_img,savename):
I2 = sitk.GetImageFromArray(I_img,isVector=True)
sitk.WriteImage(I2,savename)
class Dataset(Data.Dataset):
'Characterizes a dataset for PyTorch'
def __init__(self, names,iterations,norm=True):
'Initialization'
self.names = names
self.norm = norm
self.iterations = iterations
def __len__(self):
'Denotes the total number of samples'
return self.iterations
def __getitem__(self, step):
'Generates one sample of data'
# Select sample
index_pair = np.random.permutation(len(self.names)) [0:2]
img_A = load_4D(self.names[index_pair[0]])
img_B = load_4D(self.names[index_pair[1]])
if self.norm:
return Norm_Zscore(imgnorm(img_A)) , Norm_Zscore(imgnorm(img_B))
else:
return img_A, img_B
| Python |
3D | zhangjun001/ICNet | Code/Models.py | .py | 6,569 | 160 | import torch
import torch.nn as nn
import torch.nn.functional as F
class ModelFlow_stride(nn.Module):
def __init__(self, in_channel, n_classes,start_channel):
self.in_channel = in_channel
self.n_classes = n_classes
self.start_channel = start_channel
super(ModelFlow_stride, self).__init__()
self.eninput = self.encoder(self.in_channel, self.start_channel, bias=False)
self.ec1 = self.encoder(self.start_channel, self.start_channel, bias=False)
self.ec2 = self.encoder(self.start_channel, self.start_channel*2, stride=2, bias=False)
self.ec3 = self.encoder(self.start_channel*2, self.start_channel*2, bias=False)
self.ec4 = self.encoder(self.start_channel*2, self.start_channel*4, stride=2, bias=False)
self.ec5 = self.encoder(self.start_channel*4, self.start_channel*4, bias=False)
self.ec6 = self.encoder(self.start_channel*4, self.start_channel*8, stride=2, bias=False)
self.ec7 = self.encoder(self.start_channel*8, self.start_channel*8, bias=False)
self.ec8 = self.encoder(self.start_channel*8, self.start_channel*16, stride=2, bias=False)
self.ec9 = self.encoder(self.start_channel*16, self.start_channel*8, bias=False)
self.dc1 = self.encoder(self.start_channel*8+self.start_channel*8, self.start_channel*8, kernel_size=3, stride=1, bias=False)
self.dc2 = self.encoder(self.start_channel*8, self.start_channel*4, kernel_size=3, stride=1, bias=False)
self.dc3 = self.encoder(self.start_channel*4+self.start_channel*4, self.start_channel*4, kernel_size=3, stride=1, bias=False)
self.dc4 = self.encoder(self.start_channel*4, self.start_channel*2, kernel_size=3, stride=1, bias=False)
self.dc5 = self.encoder(self.start_channel*2+self.start_channel*2, self.start_channel*4, kernel_size=3, stride=1, bias=False)
self.dc6 = self.encoder(self.start_channel*4, self.start_channel*2, kernel_size=3, stride=1, bias=False)
self.dc7 = self.encoder(self.start_channel*2+self.start_channel*1, self.start_channel*2, kernel_size=3, stride=1, bias=False)
self.dc8 = self.encoder(self.start_channel*2, self.start_channel*2, kernel_size=3, stride=1, bias=False)
self.dc9 = self.outputs(self.start_channel*2, self.n_classes, kernel_size=1, stride=1,padding=0, bias=False)
self.up1 = self.decoder(self.start_channel*8, self.start_channel*8)
self.up2 = self.decoder(self.start_channel*4, self.start_channel*4)
self.up3 = self.decoder(self.start_channel*2, self.start_channel*2)
self.up4 = self.decoder(self.start_channel*2, self.start_channel*2)
def encoder(self, in_channels, out_channels, kernel_size=3, stride=1, padding=1,
bias=False, batchnorm=False):
if batchnorm:
layer = nn.Sequential(
nn.Conv3d(in_channels, out_channels, kernel_size, stride=stride, padding=padding, bias=bias),
nn.BatchNorm3d(out_channels),
nn.ReLU())
else:
layer = nn.Sequential(
nn.Conv3d(in_channels, out_channels, kernel_size, stride=stride, padding=padding, bias=bias),
nn.ReLU())
return layer
def decoder(self, in_channels, out_channels, kernel_size=2, stride=2, padding=0,
output_padding=0, bias=True):
layer = nn.Sequential(
nn.ConvTranspose3d(in_channels, out_channels, kernel_size, stride=stride,
padding=padding, output_padding=output_padding, bias=bias),
nn.ReLU())
return layer
def outputs(self, in_channels, out_channels, kernel_size=3, stride=1, padding=0,
bias=False, batchnorm=False):
if batchnorm:
layer = nn.Sequential(
nn.Conv3d(in_channels, out_channels, kernel_size, stride=stride, padding=padding, bias=bias),
nn.BatchNorm3d(out_channels),
nn.Tanh())
else:
layer = nn.Sequential(
nn.Conv3d(in_channels, out_channels, kernel_size, stride=stride, padding=padding, bias=bias),
nn.Tanh())
return layer
def forward(self, x,y):
x_in=torch.cat((x, y), 1)
e0 = self.eninput(x_in)
e0 = self.ec1(e0)
e1 = self.ec2(e0)
e1 = self.ec3(e1)
e2 = self.ec4(e1)
e2 = self.ec5(e2)
e3 = self.ec6(e2)
e3 = self.ec7(e3)
e4 = self.ec8(e3)
e4 = self.ec9(e4)
d0 = torch.cat((self.up1(e4), e3), 1)
d0 = self.dc1(d0)
d0 = self.dc2(d0)
d1 = torch.cat((self.up2(d0), e2), 1)
d1 = self.dc3(d1)
d1 = self.dc4(d1)
d2 = torch.cat((self.up3(d1), e1), 1)
d2 = self.dc5(d2)
d2 = self.dc6(d2)
d3 = torch.cat((self.up4(d2), e0), 1)
d3 = self.dc7(d3)
d3 = self.dc8(d3)
d3 = self.dc9(d3)
return d3
class SpatialTransform(nn.Module):
def __init__(self):
super(SpatialTransform, self).__init__()
def forward(self, x,flow,sample_grid):
sample_grid = sample_grid+flow
size_tensor = sample_grid.size()
sample_grid[0,:,:,:,0] = (sample_grid[0,:,:,:,0]-((size_tensor[3]-1)/2))/size_tensor[3]*2
sample_grid[0,:,:,:,1] = (sample_grid[0,:,:,:,1]-((size_tensor[2]-1)/2))/size_tensor[2]*2
sample_grid[0,:,:,:,2] = (sample_grid[0,:,:,:,2]-((size_tensor[1]-1)/2))/size_tensor[1]*2
flow = torch.nn.functional.grid_sample(x, sample_grid,mode = 'bilinear')
return flow
def antifoldloss(y_pred):
dy = y_pred[:, :, :-1, :, :] - y_pred[:, :, 1:, :, :]-1
dx = y_pred[:, :, :, :-1, :] - y_pred[:, :, :, 1:, :]-1
dz = y_pred[:, :, :, :, :-1] - y_pred[:, :, :, :, 1:]-1
dy = F.relu(dy) * torch.abs(dy*dy)
dx = F.relu(dx) * torch.abs(dx*dx)
dz = F.relu(dz) * torch.abs(dz*dz)
return (torch.mean(dx)+torch.mean(dy)+torch.mean(dz))/3.0
def smoothloss(y_pred):
dy = torch.abs(y_pred[:,:,1:, :, :] - y_pred[:,:, :-1, :, :])
dx = torch.abs(y_pred[:,:,:, 1:, :] - y_pred[:,:, :, :-1, :])
dz = torch.abs(y_pred[:,:,:, :, 1:] - y_pred[:,:, :, :, :-1])
return (torch.mean(dx * dx)+torch.mean(dy*dy)+torch.mean(dz*dz))/3.0
def mse_loss(input, target):
y_true_f = input.view(-1)
y_pred_f = target.view(-1)
diff = y_true_f-y_pred_f
mse = torch.mul(diff,diff).mean()
return mse
| Python |
3D | zhangjun001/ICNet | Code/Test.py | .py | 3,525 | 84 | import os
from argparse import ArgumentParser
import numpy as np
import torch
from torch.autograd import Variable
from Models import ModelFlow_stride,SpatialTransform
from Functions import generate_grid,load_5D,save_img,save_flow
import timeit
parser = ArgumentParser()
parser.add_argument("--modelpath", type=str,
dest="modelpath", default='../Model/model.pth',
help="frequency of saving models")
parser.add_argument("--savepath", type=str,
dest="savepath", default='../Result',
help="path for saving images")
parser.add_argument("--start_channel", type=int,
dest="start_channel", default=8,
help="number of start channels")
parser.add_argument("--fixed", type=str,
dest="fixed", default='../Dataset/image_A.nii.gz',
help="fixed image")
parser.add_argument("--moving", type=str,
dest="moving", default='../Dataset/image_B.nii.gz',
help="moving image")
opt = parser.parse_args()
savepath = opt.savepath
if not os.path.isdir(savepath):
os.mkdir(savepath)
def test():
model =ModelFlow_stride(2,3,opt.start_channel).cuda()
transform = SpatialTransform().cuda()
model.load_state_dict(torch.load(opt.modelpath))
model.eval()
transform.eval()
grid = generate_grid(imgshape)
grid = Variable(torch.from_numpy(np.reshape(grid, (1,) + grid.shape))).cuda().float()
start = timeit.default_timer()
A = Variable(torch.from_numpy( load_5D(opt.fixed))).cuda().float()
B = Variable(torch.from_numpy( load_5D(opt.moving))).cuda().float()
start2 = timeit.default_timer()
print('Time for loading data: ', start2 - start)
pred_AB = model(A,B)
start2 = timeit.default_timer() # record only one forward pass
pred_BA = model(B,A)
F_AB = pred_AB.permute(0,2,3,4,1).data.cpu().numpy()[0, :, :, :, :]
F_AB = F_AB.astype(np.float32)*range_flow
warped_A = transform(A,pred_AB.permute(0,2,3,4,1)*range_flow,grid).data.cpu().numpy()[0, 0, :, :, :]
start3 = timeit.default_timer()
print('Time for registration: ', start3 - start2)
warped_F_AB = transform(-pred_BA,pred_AB.permute(0,2,3,4,1)*range_flow,grid).permute(0,2,3,4,1).data.cpu().numpy()[0, :, :, :, :]
warped_F_AB = warped_F_AB.astype(np.float32)*range_flow
start4 = timeit.default_timer()
print('Time for generating inverse flow: ', start4 - start3)
save_flow(F_AB,savepath+'/flow_A_B.nii.gz')
save_flow(warped_F_AB,savepath+'/inverse_flow_A_B.nii.gz')
save_img(warped_A,savepath+'/warped_A.nii.gz')
start5 = timeit.default_timer()
print('Time for saving results: ', start5 - start4)
F_BA = pred_BA.permute(0,2,3,4,1).data.cpu().numpy()[0, :, :, :, :]
F_BA = F_BA.astype(np.float32)*range_flow
warped_B = transform(B,pred_BA.permute(0,2,3,4,1)*range_flow,grid).data.cpu().numpy()[0, 0, :, :, :]
warped_F_BA = transform(-pred_AB,pred_BA.permute(0,2,3,4,1)*range_flow,grid).permute(0,2,3,4,1).data.cpu().numpy()[0, :, :, :, :]
warped_F_BA = warped_F_BA.astype(np.float32)*range_flow
save_flow(F_BA,savepath+'/flow_B_A.nii.gz')
save_flow(warped_F_BA,savepath+'/inverse_flow_B_A.nii.gz')
save_img(warped_B,savepath+'/warped_B.nii.gz')
if __name__ == '__main__':
imgshape = (144, 192, 160)
range_flow = 7
test()
| Python |
3D | yuhui-zh15/TransSeg | src/model.py | .py | 15,811 | 405 | import json
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
import pytorch_lightning as pl
import datetime
import pickle
from utils import (
eval_metrics,
eval_metrics_per_img,
get_img_num_slices,
to_list,
get_linear_schedule_with_warmup,
)
from monai.losses import DiceCELoss, DiceFocalLoss
from monai.networks.nets.vit import ViT
from backbones.encoders.beit3d import BEiT3D
from backbones.encoders.swin_transformer import SwinTransformer
from backbones.encoders.swin_transformer_3d import SwinTransformer3D
from backbones.encoders.dino3d import VisionTransformer3D
from backbones.decoders.upernet import UPerHead
from backbones.decoders.setrpup import SetrPupHead
from backbones.decoders.convtrans import ConvTransHead
from backbones.decoders.unetr import UnetrHead
class SegmentationModel(pl.LightningModule):
def __init__(
self,
force_2d: bool = False, # if set to True, the model will be trained on 2D images by only using the center slice as the input
use_pretrained: bool = True, # whether to use pretrained backbone (only applied to BEiT)
bootstrap_method: str = "centering", # whether to inflate or center weights from 2D to 3D
in_channels: int = 1,
out_channels: int = 14, # number of classes
patch_size: int = 16, # no depthwise
img_size: tuple = (512, 512, 5),
hidden_size: int = 768,
mlp_dim: int = 3072,
num_heads: int = 12,
num_layers: int = 12,
encoder: str = "beit",
decoder: str = "upernet",
loss_type: str = "ce",
save_preds: bool = False,
dropout_rate: float = 0.0,
learning_rate: float = 1e-4,
weight_decay: float = 1e-5,
warmup_steps: int = 500,
max_steps: int = 20000,
adam_epsilon: float = 1e-8,
):
super().__init__()
self.modified_loss = (
True # TODO: set True to debug (need to modify MONAI codes)
)
self.save_hyperparameters()
self.feat_size = (img_size[0] // patch_size, img_size[1] // patch_size, 1)
if encoder == "vit":
self.encoder = ViT(
in_channels=in_channels,
img_size=img_size if not force_2d else (img_size[0], img_size[1], 1),
patch_size=(patch_size, patch_size, img_size[-1])
if not force_2d
else (patch_size, patch_size, 1),
hidden_size=hidden_size,
mlp_dim=mlp_dim,
num_layers=num_layers,
num_heads=num_heads,
pos_embed="perceptron",
classification=False,
dropout_rate=dropout_rate,
)
elif encoder == "beit":
self.encoder = BEiT3D(
img_size=img_size if not force_2d else (img_size[0], img_size[1], 1),
patch_size=(patch_size, patch_size, img_size[-1])
if not force_2d
else (patch_size, patch_size, 1),
in_chans=in_channels,
embed_dim=hidden_size,
depth=num_layers,
num_heads=num_heads,
mlp_ratio=mlp_dim // hidden_size,
qkv_bias=True,
init_values=1,
use_abs_pos_emb=False,
use_rel_pos_bias=True,
)
if use_pretrained:
self.encoder.init_weights(bootstrap_method=bootstrap_method)
elif encoder == "swint":
self.encoder = SwinTransformer(
pretrain_img_size=(img_size[2], img_size[0], img_size[1])
if not force_2d
else (1, img_size[0], img_size[1]),
patch_size=(img_size[2], 4, 4) if not force_2d else (1, 4, 4),
in_chans=in_channels,
embed_dim=128,
depths=[2, 2, 18, 2],
num_heads=[4, 8, 16, 32],
window_size=7,
mlp_ratio=4.0,
qkv_bias=True,
qk_scale=None,
drop_rate=0.0,
attn_drop_rate=0.0,
drop_path_rate=0.3,
ape=False,
patch_norm=True,
out_indices=(0, 1, 2, 3),
use_checkpoint=False,
)
if use_pretrained:
self.encoder.init_weights(bootstrap_method=bootstrap_method)
elif encoder == "dino":
self.encoder = VisionTransformer3D(
img_size=img_size if not force_2d else (img_size[0], img_size[1], 1),
patch_size=(patch_size, patch_size, img_size[-1])
if not force_2d
else (patch_size, patch_size, 1),
in_chans=in_channels,
embed_dim=768,
depth=12,
num_heads=12,
mlp_ratio=4,
qkv_bias=True,
norm_layer=partial(nn.LayerNorm, eps=1e-6),
)
if use_pretrained:
self.encoder.init_weights(bootstrap_method=bootstrap_method)
elif encoder == "swint3d":
self.encoder = SwinTransformer3D(
pretrained2d=False,
patch_size=(2, 4, 4),
embed_dim=128,
depths=[2, 2, 18, 2],
num_heads=[4, 8, 16, 32],
window_size=(8, 7, 7),
mlp_ratio=4.0,
qkv_bias=True,
qk_scale=None,
drop_rate=0.0,
attn_drop_rate=0.0,
drop_path_rate=0.2,
patch_norm=True,
in_chans=in_channels,
)
if use_pretrained:
self.encoder.init_weights()
else:
raise
if decoder == "upernet":
self.decoder = UPerHead(
layer_idxs=[3, 5, 7, 11],
in_channels=[hidden_size, hidden_size, hidden_size, hidden_size],
channels=hidden_size,
num_classes=out_channels,
dropout_ratio=0.1,
fpns=True,
)
elif decoder == "upernet-swint":
self.decoder = UPerHead(
layer_idxs=[0, 1, 2, 3],
in_channels=[128, 256, 512, 1024],
channels=512,
num_classes=out_channels,
dropout_ratio=0.1,
fpns=False,
)
elif decoder == "setrpup":
self.decoder = SetrPupHead(
channels=hidden_size, num_classes=out_channels, norm_name="instance"
)
elif decoder == "convtrans":
self.decoder = ConvTransHead(
channels=hidden_size, num_classes=out_channels, norm_name="instance"
)
elif decoder == "unetr":
self.decoder = UnetrHead()
else:
raise
if loss_type == "dicece":
self.criterion = DiceCELoss(to_onehot_y=True, softmax=True)
elif loss_type == "dicefocal":
self.criterion = DiceFocalLoss(to_onehot_y=True, softmax=True)
elif loss_type == "ce":
self.criterion = DiceCELoss(to_onehot_y=True, softmax=True, lambda_dice=0)
else:
raise
def forward(self, inputs): # inputs: B x Cin x H x W x D
x = inputs.permute(0, 1, 4, 2, 3).contiguous().float() # x: B x Cin x D x H x W
xs = self.encoder(x) # hiddens: list of B x T x hidden, where T = H/P x W/P
if self.hparams.encoder not in ["swint", "swint3d"]:
xs = [
xs[i]
.view(inputs.shape[0], self.feat_size[0], self.feat_size[1], -1)
.permute(0, 3, 1, 2)
.contiguous()
for i in range(len(xs))
] # xs: list of B x hidden x H/P x W/P
x = self.decoder(xs) # x: B x Cout x H x W
return x
def training_step(self, batch, batch_idx):
inputs, labels = batch["image"], batch["label"]
n_slices = inputs.shape[-1]
assert n_slices == self.hparams.img_size[-1]
if self.hparams.force_2d:
inputs = inputs[:, :, :, :, n_slices // 2 : n_slices // 2 + 1].contiguous()
labels = labels[:, :, :, :, n_slices // 2].contiguous()
outputs = self(inputs)
if self.modified_loss:
loss, (dice_loss, ce_loss) = self.criterion(outputs, labels)
else:
loss = self.criterion(outputs, labels)
dice_loss, ce_loss = torch.tensor(0), torch.tensor(0)
result = {
"train/loss": loss.item(),
"train/dice_loss": dice_loss.item(),
"train/ce_loss": ce_loss.item(),
}
self.log_dict(result)
return loss
def validation_step(self, batch, batch_idx):
inputs, labels = batch["image"], batch["label"]
n_slices = inputs.shape[-1]
assert n_slices == self.hparams.img_size[-1]
if self.hparams.force_2d:
inputs = inputs[:, :, :, :, n_slices // 2 : n_slices // 2 + 1].contiguous()
labels = labels[:, :, :, :, n_slices // 2].contiguous()
outputs = self(inputs)
if self.modified_loss:
loss, (dice_loss, ce_loss) = self.criterion(outputs, labels)
else:
loss = self.criterion(outputs, labels)
dice_loss, ce_loss = torch.tensor(0), torch.tensor(0)
return {
"loss": loss.item(),
"dice_loss": dice_loss.item(),
"ce_loss": ce_loss.item(),
"labels": to_list(labels.squeeze(dim=1)),
"preds": to_list(outputs.argmax(dim=1)),
}
def validation_epoch_end(self, outputs):
loss = np.array([x["loss"] for x in outputs]).mean()
dice_loss = np.array([x["dice_loss"] for x in outputs]).mean()
ce_loss = np.array([x["ce_loss"] for x in outputs]).mean()
labels = [label for x in outputs for label in x["labels"]] # N of image shape
preds = [pred for x in outputs for pred in x["preds"]] # N of image shape
inputs = [None] * len(preds)
acc, accs, ious, dices = eval_metrics(
preds, labels, self.hparams.out_channels, metrics=["mIoU", "mDice"]
)
result = {
"val/loss": loss,
"val/dice_loss": dice_loss,
"val/ce_loss": ce_loss,
"val/acc": acc,
"val/macc": accs.mean(),
"val/miou": ious.mean(),
"val/mdice": dices.mean(),
"val/mdice_nobg": dices[1:].mean(),
}
if len(dices) == 14:
print(dices[[8, 4, 3, 2, 6, 11, 1, 7]])
dice_8 = dices[[1, 2, 3, 4, 6, 7, 8, 11]].mean()
result["val/mdice_8"] = dice_8
self.log_dict(result, sync_dist=True)
if self.hparams.save_preds:
cur_time = datetime.datetime.today().strftime("%Y%m%d-%H%M%S")
with open(f"dumps/val-{cur_time}.pkl", "wb") as fout:
pickler = pickle.Pickler(fout)
for input, pred, label in zip(inputs, preds, labels):
pickler.dump({"input": input, "pred": pred, "label": label})
return loss
def test_step(self, batch, batch_idx):
inputs = batch["image"]
n_slices = inputs.shape[-1]
assert n_slices == self.hparams.img_size[-1]
if self.hparams.force_2d:
inputs = inputs[:, :, :, :, n_slices // 2 : n_slices // 2 + 1].contiguous()
outputs = self(inputs)
if "label" in batch:
labels = batch["label"]
labels = labels[:, :, :, :, n_slices // 2].contiguous()
if self.modified_loss:
loss, (dice_loss, ce_loss) = self.criterion(outputs, labels)
else:
loss = self.criterion(outputs, labels)
dice_loss, ce_loss = torch.tensor(0), torch.tensor(0)
return {
"loss": loss.item(),
"dice_loss": dice_loss.item(),
"ce_loss": ce_loss.item(),
"labels": to_list(labels.squeeze(dim=1)),
"preds": to_list(outputs.argmax(dim=1)),
}
else:
return {
"preds": to_list(outputs.argmax(dim=1)),
}
def test_epoch_end(self, outputs):
preds = [pred for x in outputs for pred in x["preds"]] # N of image shape
inputs = [None] * len(preds)
if "labels" in outputs[0]:
loss = np.array([x["loss"] for x in outputs]).mean()
dice_loss = np.array([x["dice_loss"] for x in outputs]).mean()
ce_loss = np.array([x["ce_loss"] for x in outputs]).mean()
labels = [
label for x in outputs for label in x["labels"]
] # N of image shape
acc, accs, ious, dices = eval_metrics(
preds, labels, self.hparams.out_channels, metrics=["mIoU", "mDice"]
)
result = {
"test/loss": loss,
"test/dice_loss": dice_loss,
"test/ce_loss": ce_loss,
"test/acc": acc,
"test/macc": accs.mean(),
"test/miou": ious.mean(),
"test/mdice": dices.mean(),
"test/mdice_nobg": dices[1:].mean(),
}
if len(dices) == 14:
print(dices[[8, 4, 3, 2, 6, 11, 1, 7]])
dice_8 = dices[[1, 2, 3, 4, 6, 7, 8, 11]].mean()
result["test/mdice_8"] = dice_8
self.log_dict(result, sync_dist=True)
if self.hparams.save_preds:
cur_time = datetime.datetime.today().strftime("%Y%m%d-%H%M%S")
with open(f"dumps/test-{cur_time}.pkl", "wb") as fout:
pickler = pickle.Pickler(fout)
for input, pred, label in zip(inputs, preds, labels):
pickler.dump({"input": input, "pred": pred, "label": label})
return loss
else:
assert self.hparams.save_preds
cur_time = datetime.datetime.today().strftime("%Y%m%d-%H%M%S")
with open(f"dumps/test-{cur_time}.pkl", "wb") as fout:
pickler = pickle.Pickler(fout)
for input, pred in zip(inputs, preds):
pickler.dump({"input": input, "pred": pred})
def configure_optimizers(self):
"Prepare optimizer and schedule (linear warmup and decay)"
model = nn.ModuleList([self.encoder, self.decoder])
no_decay = ["bias", "LayerNorm.weight"]
optimizer_grouped_parameters = [
{
"params": [
p
for n, p in model.named_parameters()
if not any(nd in n for nd in no_decay)
],
"weight_decay": self.hparams.weight_decay,
},
{
"params": [
p
for n, p in model.named_parameters()
if any(nd in n for nd in no_decay)
],
"weight_decay": 0.0,
},
]
optimizer = torch.optim.AdamW(
optimizer_grouped_parameters,
lr=self.hparams.learning_rate,
eps=self.hparams.adam_epsilon,
)
scheduler = get_linear_schedule_with_warmup(
optimizer,
num_warmup_steps=self.hparams.warmup_steps,
num_training_steps=self.hparams.max_steps,
)
scheduler = {"scheduler": scheduler, "interval": "step", "frequency": 1}
return [optimizer], [scheduler]
if __name__ == "__main__":
model = SegmentationModel()
| Python |
3D | yuhui-zh15/TransSeg | src/compute_flops.py | .py | 3,016 | 92 | import logging
import sys
import json
from argparse import ArgumentParser
import pytorch_lightning as pl
from data import NIIDataLoader
from model import SegmentationModel
import utils
import torch
from torchprofile import profile_macs
def parse_args(args=None):
parser = ArgumentParser()
## Required parameters for model module
parser.add_argument("--force_2d", default=0, type=int)
parser.add_argument("--use_pretrained", default=0, type=int)
parser.add_argument("--bootstrap_method", default="centering", type=str)
parser.add_argument("--in_channels", default=1, type=int)
parser.add_argument("--out_channels", default=14, type=int)
parser.add_argument("--patch_size", default=16, type=int)
parser.add_argument("--img_size", default=(512, 512, 5), type=int, nargs="+")
parser.add_argument("--encoder", default="beit", type=str)
parser.add_argument("--decoder", default="upernet", type=str)
parser.add_argument("--loss_type", default="dicefocal", type=str)
parser.add_argument("--dropout_rate", default=0.0, type=float)
parser.add_argument("--learning_rate", default=3e-5, type=float)
parser.add_argument("--weight_decay", default=0.05, type=float)
parser.add_argument("--warmup_steps", default=20, type=int)
parser.add_argument("--max_steps", default=25000, type=int)
args = parser.parse_args(args)
return args
def compute_flops_2d(args):
model = SegmentationModel(
force_2d=1,
use_pretrained=args.use_pretrained,
bootstrap_method=args.bootstrap_method,
in_channels=args.in_channels,
out_channels=args.out_channels,
patch_size=args.patch_size,
img_size=args.img_size,
encoder=args.encoder,
decoder=args.decoder,
loss_type=args.loss_type,
dropout_rate=args.dropout_rate,
learning_rate=args.learning_rate,
weight_decay=args.weight_decay,
warmup_steps=args.warmup_steps,
max_steps=args.max_steps,
)
model.eval()
inputs = torch.randn(1, 1, 1, 512, 512)
macs = profile_macs(model.encoder, inputs)
print("2D FLOPS:", macs * 2)
def compute_flops_3d(args):
model = SegmentationModel(
force_2d=0,
use_pretrained=args.use_pretrained,
bootstrap_method=args.bootstrap_method,
in_channels=args.in_channels,
out_channels=args.out_channels,
patch_size=args.patch_size,
img_size=args.img_size,
encoder=args.encoder,
decoder=args.decoder,
loss_type=args.loss_type,
dropout_rate=args.dropout_rate,
learning_rate=args.learning_rate,
weight_decay=args.weight_decay,
warmup_steps=args.warmup_steps,
max_steps=args.max_steps,
)
model.eval()
inputs = torch.randn(1, 1, 5, 512, 512)
macs = profile_macs(model.encoder, inputs)
print("3D FLOPS:", macs * 2)
if __name__ == "__main__":
args = parse_args()
compute_flops_2d(args)
compute_flops_3d(args)
| Python |
3D | yuhui-zh15/TransSeg | src/data.py | .py | 7,598 | 241 | import os
import json
import pytorch_lightning as pl
import torch
from functools import partial
import random
import copy
import numpy as np
from monai.transforms import (
AsDiscrete,
AddChanneld,
Compose,
CropForegroundd,
LoadImaged,
Orientationd,
RandFlipd,
RandCropByPosNegLabeld,
RandSpatialCropSamplesd,
RandShiftIntensityd,
NormalizeIntensityd,
ScaleIntensityRanged,
Spacingd,
RandRotate90d,
ToTensord,
Resized,
RandZoomd,
RandSpatialCropd,
SpatialPadd,
MapTransform,
Randomizable,
)
from monai.data import (
DataLoader,
CacheDataset,
load_decathlon_datalist,
decollate_batch,
)
class NIIDataLoader(pl.LightningDataModule):
def __init__(
self,
data_dir: str = "data/bcv30/bcv18-12-5slices/",
split_json="dataset_5slices.json",
img_size: tuple = (512, 512, 5),
in_channels: int = 1,
clip_range: tuple = (-175, 250),
mean_std: tuple = None,
train_batch_size: int = 2,
eval_batch_size: int = 2,
):
super().__init__()
self.data_dir = data_dir
self.split_json = split_json
self.img_size = img_size
self.in_channels = in_channels
self.clip_range = clip_range
self.mean_std = mean_std # TODO: remove
self.train_batch_size = train_batch_size
self.eval_batch_size = eval_batch_size
self.train_transforms, self.val_transforms = self.create_transforms()
def create_transforms(self):
train_transforms = Compose(
[
LoadImaged(keys=["image", "label"]),
AddChanneld(keys=["image", "label"])
if self.in_channels == 1
else AddChanneld(keys=["label"]),
Resized(
keys=["image", "label"],
spatial_size=self.img_size,
mode=["area", "nearest"],
),
RandZoomd(
keys=["image", "label"],
min_zoom=0.5,
max_zoom=2.0,
prob=1,
mode=["area", "nearest"],
keep_size=False,
),
RandSpatialCropd(
keys=["image", "label"], roi_size=self.img_size, random_size=False
),
ScaleIntensityRanged(
keys=["image"],
a_min=self.clip_range[0],
a_max=self.clip_range[1],
b_min=0.0,
b_max=1.0,
clip=True,
),
# NormalizeIntensityd(
# keys=["image"],
# subtrahend=None if self.mean_std is None else [self.mean_std[0]],
# divisor=None if self.mean_std is None else [self.mean_std[1]],
# nonzero=False,
# channel_wise=True,
# ),
RandFlipd(
keys=["image", "label"],
spatial_axis=[0],
prob=0.10,
),
RandFlipd(
keys=["image", "label"],
spatial_axis=[1],
prob=0.10,
),
RandFlipd(
keys=["image", "label"],
spatial_axis=[2],
prob=0.10,
),
RandRotate90d(
keys=["image", "label"],
prob=0.10,
max_k=3,
),
RandShiftIntensityd(
keys=["image"],
offsets=0.10,
prob=0.50,
),
SpatialPadd(keys=["image", "label"], spatial_size=self.img_size),
ToTensord(keys=["image", "label"]),
]
)
val_transforms = Compose(
[
LoadImaged(keys=["image", "label"]),
AddChanneld(keys=["image", "label"])
if self.in_channels == 1
else AddChanneld(keys=["label"]),
Resized(
keys=["image", "label"],
spatial_size=self.img_size,
mode=["area", "nearest"],
),
ScaleIntensityRanged(
keys=["image"],
a_min=self.clip_range[0],
a_max=self.clip_range[1],
b_min=0.0,
b_max=1.0,
clip=True,
),
# NormalizeIntensityd(
# keys=["image"],
# subtrahend=None if self.mean_std is None else [self.mean_std[0]],
# divisor=None if self.mean_std is None else [self.mean_std[1]],
# nonzero=False,
# channel_wise=True,
# ),
ToTensord(keys=["image", "label"]),
]
)
return train_transforms, val_transforms
def setup(self, stage=None):
data_config_file = f"{self.data_dir}/{self.split_json}"
data_config = json.load(open(data_config_file))
print(f"Loading data config from {data_config_file}...")
train_files = load_decathlon_datalist(
data_config_file, data_list_key="training"
)
val_files = load_decathlon_datalist(
data_config_file, data_list_key="validation"
)
if "local_test" in data_config:
test_files = load_decathlon_datalist(
data_config_file, data_list_key="local_test"
)
else:
test_files = []
self.train_ds = CacheDataset(
data=train_files,
transform=self.train_transforms,
num_workers=6,
cache_num=64,
)
self.val_ds = CacheDataset(
data=val_files, transform=self.val_transforms, num_workers=3, cache_num=64
)
self.test_ds = CacheDataset(
data=test_files, transform=self.val_transforms, num_workers=3, cache_num=64
)
print(
f"# Train: {len(self.train_ds)}, # Val: {len(self.val_ds)}, # Test: {len(self.test_ds)}..."
)
def train_dataloader(self):
return DataLoader(
self.train_ds,
batch_size=self.train_batch_size,
shuffle=True,
num_workers=6,
pin_memory=True,
)
def val_dataloader(self):
return DataLoader(
self.val_ds,
batch_size=self.eval_batch_size,
shuffle=False,
num_workers=3,
pin_memory=True,
)
def test_dataloader(self):
return DataLoader(
self.test_ds,
batch_size=self.eval_batch_size,
shuffle=False,
num_workers=3,
pin_memory=True,
)
if __name__ == "__main__":
dm = NIIDataLoader(data_dir="jsons/", split_json="dataset.json")
dm.setup()
# print(dm.train_ds[0]['image'].shape, dm.train_ds[0]['label'].shape)
print(dm.val_ds[0]["image"].shape, dm.val_ds[0]["label"].shape)
input("To be continued...")
for batch in dm.train_dataloader():
print([key for key in batch])
print(
[
(key, batch[key].shape)
for key in batch
if isinstance(batch[key], torch.Tensor)
]
)
break
| Python |
3D | yuhui-zh15/TransSeg | src/utils.py | .py | 15,253 | 433 | import json
from collections import OrderedDict
import mmcv
import numpy as np
from torch.optim.lr_scheduler import LambdaLR
# FIXME: This should have been a member var of the model class
# But putting it in utils for now to avoid interface mismatch with old checkpoints
# Format: val/test -> list(int) of number of slices per image
IMG_NUM_SLICES = None
def load_img_num_slices(data_dir, split_json):
global IMG_NUM_SLICES
if IMG_NUM_SLICES is not None:
return IMG_NUM_SLICES
IMG_NUM_SLICES = dict()
data_config = json.load(open(f"{data_dir}/{split_json}"))
for split in ["validation", "local_test"]:
img_id_to_num_slices = OrderedDict()
for img_config in data_config[split]:
img_id, slice_id = (
img_config["label"]
.split("/")[-1]
.replace(".nii.gz", "")
.split("_")[1:3]
)
img_id, slice_id = int(img_id), int(slice_id)
if img_id not in img_id_to_num_slices:
img_id_to_num_slices[img_id] = 0
img_id_to_num_slices[img_id] += 1
IMG_NUM_SLICES[split] = []
for img_id, num_slices in img_id_to_num_slices.items():
IMG_NUM_SLICES[split].append(num_slices)
def get_img_num_slices(split):
return IMG_NUM_SLICES[split]
def to_list(tensor):
tensor_np = tensor.detach().cpu().numpy()
return [tensor_np[i] for i in range(tensor_np.shape[0])]
def get_linear_schedule_with_warmup(
optimizer, num_warmup_steps, num_training_steps, last_epoch=-1
):
"""
Create a schedule with a learning rate that decreases linearly from the initial lr set in the optimizer to 0, after
a warmup period during which it increases linearly from 0 to the initial lr set in the optimizer.
Args:
optimizer (:class:`~torch.optim.Optimizer`):
The optimizer for which to schedule the learning rate.
num_warmup_steps (:obj:`int`):
The number of steps for the warmup phase.
num_training_steps (:obj:`int`):
The total number of training steps.
last_epoch (:obj:`int`, `optional`, defaults to -1):
The index of the last epoch when resuming training.
Return:
:obj:`torch.optim.lr_scheduler.LambdaLR` with the appropriate schedule.
"""
def lr_lambda(current_step: int):
if current_step < num_warmup_steps:
return float(current_step) / float(max(1, num_warmup_steps))
return max(
0.0,
float(num_training_steps - current_step)
/ float(max(1, num_training_steps - num_warmup_steps)),
)
return LambdaLR(optimizer, lr_lambda, last_epoch)
def intersect_and_union(
pred_label,
label,
num_classes,
ignore_index,
label_map=dict(),
reduce_zero_label=False,
):
"""Calculate intersection and Union.
Args:
pred_label (ndarray): Prediction segmentation map.
label (ndarray): Ground truth segmentation map.
num_classes (int): Number of categories.
ignore_index (int): Index that will be ignored in evaluation.
label_map (dict): Mapping old labels to new labels. The parameter will
work only when label is str. Default: dict().
reduce_zero_label (bool): Wether ignore zero label. The parameter will
work only when label is str. Default: False.
Returns:
ndarray: The intersection of prediction and ground truth histogram
on all classes.
ndarray: The union of prediction and ground truth histogram on all
classes.
ndarray: The prediction histogram on all classes.
ndarray: The ground truth histogram on all classes.
"""
if isinstance(pred_label, str):
pred_label = np.load(pred_label)
if isinstance(label, str):
label = mmcv.imread(label, flag="unchanged", backend="pillow")
# modify if custom classes
if label_map is not None:
for old_id, new_id in label_map.items():
label[label == old_id] = new_id
if reduce_zero_label:
# avoid using underflow conversion
label[label == 0] = 255
label = label - 1
label[label == 254] = 255
mask = label != ignore_index
pred_label = pred_label[mask]
label = label[mask]
intersect = pred_label[pred_label == label]
area_intersect, _ = np.histogram(intersect, bins=np.arange(num_classes + 1))
area_pred_label, _ = np.histogram(pred_label, bins=np.arange(num_classes + 1))
area_label, _ = np.histogram(label, bins=np.arange(num_classes + 1))
area_union = area_pred_label + area_label - area_intersect
return area_intersect, area_union, area_pred_label, area_label
def total_intersect_and_union(
results,
gt_seg_maps,
num_classes,
ignore_index,
label_map=dict(),
reduce_zero_label=False,
):
"""Calculate Total Intersection and Union.
Args:
results (list[ndarray]): List of prediction segmentation maps.
gt_seg_maps (list[ndarray]): list of ground truth segmentation maps.
num_classes (int): Number of categories.
ignore_index (int): Index that will be ignored in evaluation.
label_map (dict): Mapping old labels to new labels. Default: dict().
reduce_zero_label (bool): Wether ignore zero label. Default: False.
Returns:
ndarray: The intersection of prediction and ground truth histogram
on all classes.
ndarray: The union of prediction and ground truth histogram on all
classes.
ndarray: The prediction histogram on all classes.
ndarray: The ground truth histogram on all classes.
"""
num_imgs = len(results)
assert len(gt_seg_maps) == num_imgs
total_area_intersect = np.zeros((num_classes,), dtype=np.float)
total_area_union = np.zeros((num_classes,), dtype=np.float)
total_area_pred_label = np.zeros((num_classes,), dtype=np.float)
total_area_label = np.zeros((num_classes,), dtype=np.float)
for i in range(num_imgs):
area_intersect, area_union, area_pred_label, area_label = intersect_and_union(
results[i],
gt_seg_maps[i],
num_classes,
ignore_index,
label_map,
reduce_zero_label,
)
total_area_intersect += area_intersect
total_area_union += area_union
total_area_pred_label += area_pred_label
total_area_label += area_label
return (
total_area_intersect,
total_area_union,
total_area_pred_label,
total_area_label,
)
def mean_iou(
results,
gt_seg_maps,
num_classes,
ignore_index,
nan_to_num=None,
label_map=dict(),
reduce_zero_label=False,
):
"""Calculate Mean Intersection and Union (mIoU)
Args:
results (list[ndarray]): List of prediction segmentation maps.
gt_seg_maps (list[ndarray]): list of ground truth segmentation maps.
num_classes (int): Number of categories.
ignore_index (int): Index that will be ignored in evaluation.
nan_to_num (int, optional): If specified, NaN values will be replaced
by the numbers defined by the user. Default: None.
label_map (dict): Mapping old labels to new labels. Default: dict().
reduce_zero_label (bool): Wether ignore zero label. Default: False.
Returns:
float: Overall accuracy on all images.
ndarray: Per category accuracy, shape (num_classes, ).
ndarray: Per category IoU, shape (num_classes, ).
"""
all_acc, acc, iou = eval_metrics(
results=results,
gt_seg_maps=gt_seg_maps,
num_classes=num_classes,
ignore_index=ignore_index,
metrics=["mIoU"],
nan_to_num=nan_to_num,
label_map=label_map,
reduce_zero_label=reduce_zero_label,
)
return all_acc, acc, iou
def mean_dice(
results,
gt_seg_maps,
num_classes,
ignore_index,
nan_to_num=None,
label_map=dict(),
reduce_zero_label=False,
):
"""Calculate Mean Dice (mDice)
Args:
results (list[ndarray]): List of prediction segmentation maps.
gt_seg_maps (list[ndarray]): list of ground truth segmentation maps.
num_classes (int): Number of categories.
ignore_index (int): Index that will be ignored in evaluation.
nan_to_num (int, optional): If specified, NaN values will be replaced
by the numbers defined by the user. Default: None.
label_map (dict): Mapping old labels to new labels. Default: dict().
reduce_zero_label (bool): Wether ignore zero label. Default: False.
Returns:
float: Overall accuracy on all images.
ndarray: Per category accuracy, shape (num_classes, ).
ndarray: Per category dice, shape (num_classes, ).
"""
all_acc, acc, dice = eval_metrics(
results=results,
gt_seg_maps=gt_seg_maps,
num_classes=num_classes,
ignore_index=ignore_index,
metrics=["mDice"],
nan_to_num=nan_to_num,
label_map=label_map,
reduce_zero_label=reduce_zero_label,
)
return all_acc, acc, dice
def eval_metrics(
results,
gt_seg_maps,
num_classes,
ignore_index=255,
metrics=["mIoU"],
nan_to_num=None,
label_map=dict(),
reduce_zero_label=False,
):
"""Calculate evaluation metrics
Args:
results (list[ndarray]): List of prediction segmentation maps.
gt_seg_maps (list[ndarray]): list of ground truth segmentation maps.
num_classes (int): Number of categories.
ignore_index (int): Index that will be ignored in evaluation.
metrics (list[str] | str): Metrics to be evaluated, 'mIoU' and 'mDice'.
nan_to_num (int, optional): If specified, NaN values will be replaced
by the numbers defined by the user. Default: None.
label_map (dict): Mapping old labels to new labels. Default: dict().
reduce_zero_label (bool): Wether ignore zero label. Default: False.
Returns:
float: Overall accuracy on all images.
ndarray: Per category accuracy, shape (num_classes, ).
ndarray: Per category evalution metrics, shape (num_classes, ).
"""
if isinstance(metrics, str):
metrics = [metrics]
allowed_metrics = ["mIoU", "mDice"]
if not set(metrics).issubset(set(allowed_metrics)):
raise KeyError("metrics {} is not supported".format(metrics))
(
total_area_intersect,
total_area_union,
total_area_pred_label,
total_area_label,
) = total_intersect_and_union(
results, gt_seg_maps, num_classes, ignore_index, label_map, reduce_zero_label
)
all_acc = total_area_intersect.sum() / total_area_label.sum()
acc = total_area_intersect / total_area_label
ret_metrics = [all_acc, acc]
for metric in metrics:
if metric == "mIoU":
iou = total_area_intersect / total_area_union
ret_metrics.append(iou)
elif metric == "mDice":
dice = 2 * total_area_intersect / (total_area_pred_label + total_area_label)
ret_metrics.append(dice)
if nan_to_num is not None:
ret_metrics = [np.nan_to_num(metric, nan=nan_to_num) for metric in ret_metrics]
return ret_metrics
def eval_metrics_per_img(
results,
gt_seg_maps,
num_classes,
img_num_slices,
ignore_index=255,
metrics=["mIoU"],
nan_to_num=None,
label_map=dict(),
reduce_zero_label=False,
):
"""Calculate evaluation metrics, grouped by each 3D image.
Args:
results (list[ndarray]): List of prediction segmentation maps.
gt_seg_maps (list[ndarray]): list of ground truth segmentation maps.
num_classes (int): Number of categories.
img_num_slices (list[int]): list of number of slices for each patient.
ignore_index (int): Index that will be ignored in evaluation.
metrics (list[str] | str): Metrics to be evaluated, 'mIoU' and 'mDice'.
nan_to_num (int, optional): If specified, NaN values will be replaced
by the numbers defined by the user. Default: None.
label_map (dict): Mapping old labels to new labels. Default: dict().
reduce_zero_label (bool): Wether ignore zero label. Default: False.
Returns:
float: Overall accuracy on all images.
ndarray: Per category accuracy, shape (num_classes, ).
ndarray: Per category evalution metrics, shape (num_classes, ).
"""
if isinstance(metrics, str):
metrics = [metrics]
allowed_metrics = ["mIoU", "mDice"]
if not set(metrics).issubset(set(allowed_metrics)):
raise KeyError("metrics {} is not supported".format(metrics))
if sum(img_num_slices) != len(results):
raise ValueError(
"Total number of image slices must be equal to results."
f"Got {sum(img_num_slices)} != {len(results)}."
)
ret_all_acc = []
ret_acc = []
ret_iou = []
ret_dice = []
idx_start = 0
for num_slices in img_num_slices:
idx_end = idx_start + num_slices
img_results = results[idx_start:idx_end]
img_gt_seg_maps = gt_seg_maps[idx_start:idx_end]
(
img_area_intersect,
img_area_union,
img_area_pred_label,
img_area_label,
) = total_intersect_and_union(
img_results,
img_gt_seg_maps,
num_classes,
ignore_index,
label_map,
reduce_zero_label,
)
img_all_acc = img_area_intersect.sum() / img_area_label.sum()
img_acc = img_area_intersect / img_area_label
img_iou = img_area_intersect / img_area_union
img_dice = 2 * img_area_intersect / (img_area_pred_label + img_area_label)
ret_all_acc.append(img_all_acc)
ret_acc.append(img_acc)
ret_iou.append(img_iou)
ret_dice.append(img_dice)
idx_start = idx_end
# If an image has NaN metric, then skip it in the global mean.
ret_metrics = [np.nanmean(ret_all_acc), np.nanmean(ret_acc, axis=0)]
for metric in metrics:
if metric == "mIoU":
ret_metrics.append(np.nanmean(ret_iou, axis=0))
elif metric == "mDice":
ret_metrics.append(np.nanmean(ret_dice, axis=0))
if nan_to_num is not None:
ret_metrics = [np.nan_to_num(metric, nan=nan_to_num) for metric in ret_metrics]
return ret_metrics
if __name__ == "__main__":
results = [np.ones((3, 3)), np.ones((3, 3))]
gt_seg_maps = [np.ones((3, 3)), np.ones((3, 3))]
num_classes = 5
img_num_slices = [1, 1]
metrics = eval_metrics_per_img(
results, gt_seg_maps, num_classes, img_num_slices, metrics=["mIoU", "mDice"]
)
# metrics = eval_metrics(results, gt_seg_maps, num_classes, metrics=["mIoU", "mDice"])
print(metrics)
load_img_num_slices(
data_dir="data/msd/processed/Task07_Pancreas/",
split_json="dataset_5slices.json",
)
print(get_img_num_slices("validation"))
print(get_img_num_slices("local_test"))
| Python |
3D | yuhui-zh15/TransSeg | src/main.py | .py | 6,240 | 167 | import logging
import sys
import json
from argparse import ArgumentParser
import pytorch_lightning as pl
from data import NIIDataLoader
from model import SegmentationModel
def parse_args(args=None):
parser = ArgumentParser()
## Required parameters for data module
parser.add_argument("--data_dir", default="jsons/", type=str)
parser.add_argument("--split_json", default="dataset.json", type=str)
parser.add_argument("--train_batch_size", default=1, type=int)
parser.add_argument("--eval_batch_size", default=1, type=int)
parser.add_argument("--clip_range", default=(-175, 250), type=int, nargs="+")
parser.add_argument("--mean_std", default=None, type=float, nargs="+")
## Required parameters for model module
parser.add_argument("--force_2d", default=0, type=int)
parser.add_argument("--use_pretrained", default=1, type=int)
parser.add_argument("--bootstrap_method", default="centering", type=str)
parser.add_argument("--in_channels", default=1, type=int)
parser.add_argument("--out_channels", default=14, type=int)
parser.add_argument("--patch_size", default=16, type=int)
parser.add_argument("--img_size", default=(512, 512, 5), type=int, nargs="+")
parser.add_argument("--encoder", default="beit", type=str)
parser.add_argument("--decoder", default="upernet", type=str)
parser.add_argument("--loss_type", default="dicefocal", type=str)
parser.add_argument("--dropout_rate", default=0.0, type=float)
parser.add_argument("--learning_rate", default=3e-5, type=float)
parser.add_argument("--weight_decay", default=0.05, type=float)
parser.add_argument("--warmup_steps", default=20, type=int)
parser.add_argument("--max_steps", default=25000, type=int)
## Required parameters for trainer module
parser.add_argument("--default_root_dir", default=".", type=str)
parser.add_argument("--gpus", default=-1, type=int)
parser.add_argument("--val_check_interval", default=1.0, type=float)
parser.add_argument("--check_val_every_n_epoch", default=100, type=int)
parser.add_argument("--gradient_clip_val", default=1.0, type=float)
parser.add_argument("--accumulate_grad_batches", default=1, type=int)
parser.add_argument("--log_every_n_steps", default=1, type=int)
parser.add_argument("--precision", default=32, type=int)
parser.add_argument("--accelerator", default="ddp", type=str)
parser.add_argument("--seed", default=1234, type=int)
## Require parameters for evaluation
parser.add_argument("--evaluation", default=0, type=int)
parser.add_argument("--model_path", default=None, type=str)
args = parser.parse_args(args)
return args
def train(args):
wandb_logger = pl.loggers.WandbLogger(
project="MedicalSegmentation", config=vars(args), log_model=False
)
pl.seed_everything(args.seed)
dm = NIIDataLoader(
data_dir=args.data_dir,
split_json=args.split_json,
img_size=args.img_size,
in_channels=args.in_channels,
clip_range=args.clip_range,
mean_std=args.mean_std,
train_batch_size=args.train_batch_size,
eval_batch_size=args.eval_batch_size,
)
if args.model_path is None:
model = SegmentationModel(
force_2d=args.force_2d,
use_pretrained=args.use_pretrained,
bootstrap_method=args.bootstrap_method,
in_channels=args.in_channels,
out_channels=args.out_channels,
patch_size=args.patch_size,
img_size=args.img_size,
encoder=args.encoder,
decoder=args.decoder,
loss_type=args.loss_type,
dropout_rate=args.dropout_rate,
learning_rate=args.learning_rate,
weight_decay=args.weight_decay,
warmup_steps=args.warmup_steps,
max_steps=args.max_steps,
)
else:
model = SegmentationModel.load_from_checkpoint(args.model_path)
checkpoint_callback = pl.callbacks.ModelCheckpoint(
save_top_k=1, monitor="val/mdice", mode="max"
)
lr_monitor = pl.callbacks.LearningRateMonitor(logging_interval="step")
trainer = pl.Trainer(
default_root_dir=args.default_root_dir,
gpus=args.gpus,
val_check_interval=args.val_check_interval,
# check_val_every_n_epoch=args.check_val_every_n_epoch,
max_steps=args.max_steps,
gradient_clip_val=args.gradient_clip_val,
accumulate_grad_batches=args.accumulate_grad_batches,
log_every_n_steps=args.log_every_n_steps,
precision=args.precision,
callbacks=[checkpoint_callback, lr_monitor],
accelerator=args.accelerator,
logger=wandb_logger,
# limit_train_batches=1, # TODO: uncomment for debugging
# limit_val_batches=1, # TODO: uncomment for debugging
# limit_test_batches=1, # TODO: uncomment for debugging
)
trainer.fit(model, datamodule=dm)
trainer.validate(datamodule=dm)
trainer.test(datamodule=dm)
def evaluate(args):
wandb_logger = pl.loggers.WandbLogger(
project="MedicalSegmentation", config=vars(args), log_model=False
)
pl.seed_everything(args.seed)
dm = NIIDataLoader(
data_dir=args.data_dir,
split_json=args.split_json,
img_size=args.img_size,
in_channels=args.in_channels,
clip_range=args.clip_range,
mean_std=args.mean_std,
train_batch_size=args.train_batch_size,
eval_batch_size=args.eval_batch_size,
)
model = SegmentationModel.load_from_checkpoint(args.model_path)
model.hparams.save_preds = True
trainer = pl.Trainer(
default_root_dir=args.default_root_dir,
gpus=args.gpus,
precision=args.precision,
accelerator=args.accelerator,
logger=wandb_logger,
num_sanity_val_steps=-1,
# limit_val_batches=1, # TODO: uncomment for debugging
# limit_test_batches=1, # TODO: uncomment for debugging
)
trainer.validate(model, datamodule=dm)
trainer.test(model, datamodule=dm)
if __name__ == "__main__":
args = parse_args()
if not args.evaluation:
train(args)
else:
evaluate(args)
| Python |
3D | yuhui-zh15/TransSeg | src/backbones/decoders/unetr.py | .py | 4,003 | 128 | import torch
import torch.nn as nn
import torch.nn.functional as F
from monai.networks.layers.utils import get_act_layer, get_norm_layer
from monai.networks.blocks.dynunet_block import UnetOutBlock
from monai.networks.blocks.unetr_block import (
UnetrBasicBlock,
UnetrPrUpBlock,
UnetrUpBlock,
)
class UnetrHead(nn.Module):
def __init__(
self,
in_channels=1,
out_channels=14,
hidden_size=768,
feature_size=16,
conv_block=False,
res_block=True,
norm_name="instance",
):
super(UnetrHead, self).__init__()
self.encoder1 = UnetrBasicBlock(
spatial_dims=3,
in_channels=in_channels,
out_channels=feature_size,
kernel_size=3,
stride=1,
norm_name=norm_name,
res_block=res_block,
)
self.encoder2 = UnetrPrUpBlock(
spatial_dims=3,
in_channels=hidden_size,
out_channels=feature_size * 2,
num_layer=2,
kernel_size=3,
stride=1,
upsample_kernel_size=2,
norm_name=norm_name,
conv_block=conv_block,
res_block=res_block,
)
self.encoder3 = UnetrPrUpBlock(
spatial_dims=3,
in_channels=hidden_size,
out_channels=feature_size * 4,
num_layer=1,
kernel_size=3,
stride=1,
upsample_kernel_size=2,
norm_name=norm_name,
conv_block=conv_block,
res_block=res_block,
)
self.encoder4 = UnetrPrUpBlock(
spatial_dims=3,
in_channels=hidden_size,
out_channels=feature_size * 8,
num_layer=0,
kernel_size=3,
stride=1,
upsample_kernel_size=2,
norm_name=norm_name,
conv_block=conv_block,
res_block=res_block,
)
self.decoder5 = UnetrUpBlock(
spatial_dims=3,
in_channels=hidden_size,
out_channels=feature_size * 8,
stride=1,
kernel_size=3,
upsample_kernel_size=2,
norm_name=norm_name,
res_block=res_block,
)
self.decoder4 = UnetrUpBlock(
spatial_dims=3,
in_channels=feature_size * 8,
out_channels=feature_size * 4,
stride=1,
kernel_size=3,
upsample_kernel_size=2,
norm_name=norm_name,
res_block=res_block,
)
self.decoder3 = UnetrUpBlock(
spatial_dims=3,
in_channels=feature_size * 4,
out_channels=feature_size * 2,
stride=1,
kernel_size=3,
upsample_kernel_size=2,
norm_name=norm_name,
res_block=res_block,
)
self.decoder2 = UnetrUpBlock(
spatial_dims=3,
in_channels=feature_size * 2,
out_channels=feature_size,
stride=1,
kernel_size=3,
upsample_kernel_size=2,
norm_name=norm_name,
res_block=res_block,
)
self.out = UnetOutBlock(spatial_dims=3, in_channels=feature_size, out_channels=out_channels) # type: ignore
def forward(self, inputs):
enc1 = self.encoder1(inputs[0])
x2 = inputs[3]
enc2 = self.encoder2(self.proj_feat(x2, self.hidden_size, self.feat_size))
x3 = inputs[6]
enc3 = self.encoder3(self.proj_feat(x3, self.hidden_size, self.feat_size))
x4 = inputs[9]
enc4 = self.encoder4(self.proj_feat(x4, self.hidden_size, self.feat_size))
dec4 = self.proj_feat(x, self.hidden_size, self.feat_size)
dec3 = self.decoder5(dec4, enc4)
dec2 = self.decoder4(dec3, enc3)
dec1 = self.decoder3(dec2, enc2)
out = self.decoder2(dec1, enc1)
logits = self.out(out)
return logits
| Python |
3D | yuhui-zh15/TransSeg | src/backbones/decoders/setrpup.py | .py | 1,948 | 58 | import torch
import torch.nn as nn
import torch.nn.functional as F
from monai.networks.layers.utils import get_act_layer, get_norm_layer
class SetrPupHead(nn.Module):
def __init__(
self,
channels=768,
num_classes=14,
norm_name="instance",
):
super(SetrPupHead, self).__init__()
self.decoder0 = nn.Conv2d(
channels, channels, kernel_size=3, stride=1, padding=1
)
self.decoder1 = nn.Conv2d(
channels, channels, kernel_size=3, stride=1, padding=1
)
self.decoder2 = nn.Conv2d(
channels, channels, kernel_size=3, stride=1, padding=1
)
self.decoder3 = nn.Conv2d(
channels, channels, kernel_size=3, stride=1, padding=1
)
self.decoder4 = nn.Conv2d(channels, num_classes, kernel_size=1, stride=1)
self.norm0 = get_norm_layer(name=norm_name, spatial_dims=2, channels=channels)
self.norm1 = get_norm_layer(name=norm_name, spatial_dims=2, channels=channels)
self.norm2 = get_norm_layer(name=norm_name, spatial_dims=2, channels=channels)
self.norm3 = get_norm_layer(name=norm_name, spatial_dims=2, channels=channels)
def forward(self, inputs):
x = inputs[-1]
x = self.decoder0(x)
x = self.norm0(x)
x = F.relu(x, inplace=True)
x = F.interpolate(x, size=x.shape[-1] * 2, mode="bilinear")
x = self.decoder1(x)
x = self.norm1(x)
x = F.relu(x, inplace=True)
x = F.interpolate(x, size=x.shape[-1] * 2, mode="bilinear")
x = self.decoder2(x)
x = self.norm2(x)
x = F.relu(x, inplace=True)
x = F.interpolate(x, size=x.shape[-1] * 2, mode="bilinear")
x = self.decoder3(x)
x = self.norm3(x)
x = F.relu(x, inplace=True)
x = self.decoder4(x)
x = F.interpolate(x, size=x.shape[-1] * 2, mode="bilinear")
return x
| Python |
3D | yuhui-zh15/TransSeg | src/backbones/decoders/upernet.py | .py | 7,261 | 231 | import torch
import torch.nn as nn
from mmcv.cnn import ConvModule
from mmseg.ops import resize
from abc import ABCMeta, abstractmethod
import torch.nn.functional as F
class PPM(nn.ModuleList):
"""Pooling Pyramid Module used in PSPNet.
Args:
pool_scales (tuple[int]): Pooling scales used in Pooling Pyramid
Module.
in_channels (int): Input channels.
channels (int): Channels after modules, before conv_seg.
conv_cfg (dict|None): Config of conv layers.
norm_cfg (dict|None): Config of norm layers.
act_cfg (dict): Config of activation layers.
align_corners (bool): align_corners argument of F.interpolate.
"""
def __init__(
self,
pool_scales,
in_channels,
channels,
conv_cfg,
norm_cfg,
act_cfg,
align_corners=False,
):
super(PPM, self).__init__()
self.pool_scales = pool_scales
self.align_corners = align_corners
self.in_channels = in_channels
self.channels = channels
self.conv_cfg = conv_cfg
self.norm_cfg = norm_cfg
self.act_cfg = act_cfg
for pool_scale in pool_scales:
self.append(
nn.Sequential(
nn.AdaptiveAvgPool2d(pool_scale),
ConvModule(
self.in_channels,
self.channels,
1,
conv_cfg=self.conv_cfg,
norm_cfg=self.norm_cfg,
act_cfg=self.act_cfg,
),
)
)
def forward(self, x):
"""Forward function."""
ppm_outs = []
for ppm in self:
ppm_out = ppm(x)
upsampled_ppm_out = resize(
ppm_out,
size=x.size()[2:],
mode="bilinear",
align_corners=self.align_corners,
)
ppm_outs.append(upsampled_ppm_out)
return ppm_outs
class UPerHead(nn.Module):
"""Unified Perceptual Parsing for Scene Understanding.
This head is the implementation of `UPerNet
<https://arxiv.org/abs/1807.10221>`_.
Args:
pool_scales (tuple[int]): Pooling scales used in Pooling Pyramid
Module applied on the last feature. Default: (1, 2, 3, 6).
"""
def __init__(
self,
layer_idxs=[3, 5, 7, 11],
in_channels=[768, 768, 768, 768],
channels=768,
num_classes=14,
dropout_ratio=0.1,
conv_cfg=None,
norm_cfg=dict(type="SyncBN", requires_grad=True),
act_cfg=dict(type="ReLU"),
pool_scales=[1, 2, 3, 6],
fpns=True,
):
super(UPerHead, self).__init__()
self.layer_idxs = layer_idxs
self.in_channels = in_channels
self.channels = channels
self.num_classes = num_classes
self.conv_cfg = conv_cfg
self.norm_cfg = norm_cfg
self.act_cfg = act_cfg
self.fpns = fpns
self.dropout = nn.Dropout2d(dropout_ratio)
self.conv_seg = nn.Conv2d(channels, num_classes, kernel_size=1)
if self.fpns:
self.fpn1 = nn.Sequential(
nn.ConvTranspose2d(channels, channels, kernel_size=2, stride=2),
nn.SyncBatchNorm(channels),
nn.GELU(),
nn.ConvTranspose2d(channels, channels, kernel_size=2, stride=2),
)
self.fpn2 = nn.Sequential(
nn.ConvTranspose2d(channels, channels, kernel_size=2, stride=2),
)
self.fpn3 = nn.Identity()
self.fpn4 = nn.MaxPool2d(kernel_size=2, stride=2)
# PSP Module
self.psp_modules = PPM(
pool_scales,
self.in_channels[-1],
self.channels,
conv_cfg=self.conv_cfg,
norm_cfg=self.norm_cfg,
act_cfg=self.act_cfg,
)
self.bottleneck = ConvModule(
self.in_channels[-1] + len(pool_scales) * self.channels,
self.channels,
3,
padding=1,
conv_cfg=self.conv_cfg,
norm_cfg=self.norm_cfg,
act_cfg=self.act_cfg,
)
# FPN Module
self.lateral_convs = nn.ModuleList()
self.fpn_convs = nn.ModuleList()
for in_channels in self.in_channels[:-1]: # skip the top layer
l_conv = ConvModule(
in_channels,
self.channels,
1,
conv_cfg=self.conv_cfg,
norm_cfg=self.norm_cfg,
act_cfg=self.act_cfg,
inplace=False,
)
fpn_conv = ConvModule(
self.channels,
self.channels,
3,
padding=1,
conv_cfg=self.conv_cfg,
norm_cfg=self.norm_cfg,
act_cfg=self.act_cfg,
inplace=False,
)
self.lateral_convs.append(l_conv)
self.fpn_convs.append(fpn_conv)
self.fpn_bottleneck = ConvModule(
len(self.in_channels) * self.channels,
self.channels,
3,
padding=1,
conv_cfg=self.conv_cfg,
norm_cfg=self.norm_cfg,
act_cfg=self.act_cfg,
)
def cls_seg(self, feat):
"""Classify each pixel."""
if self.dropout is not None:
feat = self.dropout(feat)
output = self.conv_seg(feat)
return output
def psp_forward(self, inputs):
"""Forward function of PSP module."""
x = inputs[-1]
psp_outs = [x]
psp_outs.extend(self.psp_modules(x))
psp_outs = torch.cat(psp_outs, dim=1)
output = self.bottleneck(psp_outs)
return output
def forward(self, inputs):
"""Forward function."""
inputs = [inputs[i] for i in self.layer_idxs]
if self.fpns:
ops = [self.fpn1, self.fpn2, self.fpn3, self.fpn4]
for i in range(len(inputs)):
inputs[i] = ops[i](inputs[i])
# build laterals
laterals = [
lateral_conv(inputs[i]) for i, lateral_conv in enumerate(self.lateral_convs)
]
laterals.append(self.psp_forward(inputs))
# build top-down path
used_backbone_levels = len(laterals)
for i in range(used_backbone_levels - 1, 0, -1):
prev_shape = laterals[i - 1].shape[2:]
laterals[i - 1] += resize(laterals[i], size=prev_shape, mode="bilinear")
# build outputs
fpn_outs = [
self.fpn_convs[i](laterals[i]) for i in range(used_backbone_levels - 1)
]
# append psp feature
fpn_outs.append(laterals[-1])
for i in range(used_backbone_levels - 1, 0, -1):
fpn_outs[i] = resize(
fpn_outs[i], size=fpn_outs[0].shape[2:], mode="bilinear"
)
fpn_outs = torch.cat(fpn_outs, dim=1)
output = self.fpn_bottleneck(fpn_outs)
output = self.cls_seg(output)
output = F.interpolate(output, size=output.shape[-1] * 4, mode="bilinear")
return output
| Python |
3D | yuhui-zh15/TransSeg | src/backbones/decoders/convtrans.py | .py | 1,446 | 41 | import torch
import torch.nn as nn
import torch.nn.functional as F
from monai.networks.layers.utils import get_act_layer, get_norm_layer
class ConvTransHead(nn.Module):
def __init__(
self,
channels=768,
num_classes=14,
norm_name="instance",
):
super(ConvTransHead, self).__init__()
self.decoder1 = nn.ConvTranspose2d(
channels, channels, kernel_size=(2, 2), stride=(2, 2)
)
self.decoder2 = nn.ConvTranspose2d(
channels, channels, kernel_size=(2, 2), stride=(2, 2)
)
self.decoder3 = nn.ConvTranspose2d(
channels, channels, kernel_size=(2, 2), stride=(2, 2)
)
self.decoder4 = nn.ConvTranspose2d(
channels, num_classes, kernel_size=(2, 2), stride=(2, 2)
)
self.norm0 = get_norm_layer(name=norm_name, spatial_dims=2, channels=channels)
self.norm1 = get_norm_layer(name=norm_name, spatial_dims=2, channels=channels)
self.norm2 = get_norm_layer(name=norm_name, spatial_dims=2, channels=channels)
self.norm3 = get_norm_layer(name=norm_name, spatial_dims=2, channels=channels)
def forward(self, inputs):
x = inputs[-1]
x = F.relu(self.norm0(x))
x = F.relu(self.norm1(self.decoder1(x)))
x = F.relu(self.norm2(self.decoder2(x)))
x = F.relu(self.norm3(self.decoder3(x)))
x = self.decoder4(x)
return x
| Python |
3D | yuhui-zh15/TransSeg | src/backbones/encoders/beit3d.py | .py | 33,380 | 889 | # --------------------------------------------------------
# BEIT: BERT Pre-Training of Image Transformers (https://arxiv.org/abs/2106.08254)
# Github source: https://github.com/microsoft/unilm/tree/master/beit
# Copyright (c) 2021 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# By Hangbo Bao
# Based on timm, mmseg, setr, xcit and swin code bases
# https://github.com/rwightman/pytorch-image-models/tree/master/timm
# https://github.com/fudan-zvg/SETR
# https://github.com/facebookresearch/xcit/
# https://github.com/microsoft/Swin-Transformer
# --------------------------------------------------------'
import math
import torch
from functools import partial
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.checkpoint as checkpoint
import numpy as np
from scipy import interpolate
from timm.models.layers import drop_path, to_2tuple, to_3tuple, trunc_normal_
from mmseg.utils import get_root_logger
from mmcv.parallel import is_module_wrapper
from mmcv.runner import get_dist_info
def load_state_dict(module, state_dict, strict=False, logger=None):
"""Load state_dict to a module.
This method is modified from :meth:`torch.nn.Module.load_state_dict`.
Default value for ``strict`` is set to ``False`` and the message for
param mismatch will be shown even if strict is False.
Args:
module (Module): Module that receives the state_dict.
state_dict (OrderedDict): Weights.
strict (bool): whether to strictly enforce that the keys
in :attr:`state_dict` match the keys returned by this module's
:meth:`~torch.nn.Module.state_dict` function. Default: ``False``.
logger (:obj:`logging.Logger`, optional): Logger to log the error
message. If not specified, print function will be used.
"""
unexpected_keys = []
all_missing_keys = []
err_msg = []
metadata = getattr(state_dict, "_metadata", None)
state_dict = state_dict.copy()
if metadata is not None:
state_dict._metadata = metadata
# use _load_from_state_dict to enable checkpoint version control
def load(module, prefix=""):
# recursively check parallel module in case that the model has a
# complicated structure, e.g., nn.Module(nn.Module(DDP))
if is_module_wrapper(module):
module = module.module
local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {})
module._load_from_state_dict(
state_dict,
prefix,
local_metadata,
True,
all_missing_keys,
unexpected_keys,
err_msg,
)
for name, child in module._modules.items():
if child is not None:
load(child, prefix + name + ".")
load(module)
load = None # break load->load reference cycle
# ignore "num_batches_tracked" of BN layers
missing_keys = [key for key in all_missing_keys if "num_batches_tracked" not in key]
if unexpected_keys:
err_msg.append(
"unexpected key in source " f'state_dict: {", ".join(unexpected_keys)}\n'
)
if missing_keys:
err_msg.append(
f'missing keys in source state_dict: {", ".join(missing_keys)}\n'
)
rank, _ = get_dist_info()
if len(err_msg) > 0 and rank == 0:
err_msg.insert(0, "The model and loaded state dict do not match exactly\n")
err_msg = "\n".join(err_msg)
if strict:
raise RuntimeError(err_msg)
elif logger is not None:
logger.warning(err_msg)
else:
print(err_msg)
def load_checkpoint(
model,
filename,
bootstrap_method="centering",
map_location="cpu",
strict=False,
logger=None,
):
"""Load checkpoint from a file or URI.
Args:
model (Module): Module to load checkpoint.
filename (str): Accept local filepath, URL, ``torchvision://xxx``,
``open-mmlab://xxx``. Please refer to ``docs/model_zoo.md`` for
details.
map_location (str): Same as :func:`torch.load`.
strict (bool): Whether to allow different params for the model and
checkpoint.
logger (:mod:`logging.Logger` or None): The logger for error message.
Returns:
dict or OrderedDict: The loaded checkpoint.
"""
checkpoint = torch.load(filename, map_location=map_location)
# --- starting inflate/center weights ---
n_slices = model.patch_embed.img_size[-1]
n_chans = model.patch_embed.in_chans
key = "patch_embed.proj.weight"
emb = checkpoint["model"][key]
print("Old:", emb.shape, emb.sum())
emb = emb.sum(1, keepdim=True) # from colored to grayed
emb = (
emb.repeat(1, n_chans, 1, 1) / n_chans
) # from 1-channel grayed to n-channel grayed
emb = emb.unsqueeze(2).repeat(1, 1, n_slices, 1, 1) # from 2D to 3D
if bootstrap_method == "inflation":
print("Inflation!!!")
emb = emb / n_slices
elif bootstrap_method == "centering":
print("Centering!!!")
center_idx = n_slices // 2
all_idxs = list(range(n_slices))
all_idxs.pop(center_idx)
emb[:, :, all_idxs, :, :] = 0
else:
raise
print("New:", emb.shape, emb.sum())
checkpoint["model"][key] = emb
# print('Remove "patch_embed" pre-trained weights!!!!')
# removed_keys = ["patch_embed.proj.weight", "patch_embed.proj.bias"]
# for key in removed_keys:
# checkpoint["model"].pop(key)
# --- ending inflate/center weights ---
# OrderedDict is a subclass of dict
if not isinstance(checkpoint, dict):
raise RuntimeError(f"No state_dict found in checkpoint file {filename}")
# get state_dict from checkpoint
if "state_dict" in checkpoint:
state_dict = checkpoint["state_dict"]
elif "model" in checkpoint:
state_dict = checkpoint["model"]
elif "module" in checkpoint:
state_dict = checkpoint["module"]
else:
state_dict = checkpoint
# strip prefix of state_dict
if list(state_dict.keys())[0].startswith("module."):
state_dict = {k[7:]: v for k, v in state_dict.items()}
# for MoBY, load model of online branch
if sorted(list(state_dict.keys()))[0].startswith("encoder"):
state_dict = {
k.replace("encoder.", ""): v
for k, v in state_dict.items()
if k.startswith("encoder.")
}
# reshape absolute position embedding for Swin
if state_dict.get("absolute_pos_embed") is not None:
absolute_pos_embed = state_dict["absolute_pos_embed"]
N1, L, C1 = absolute_pos_embed.size()
N2, C2, H, W = model.absolute_pos_embed.size()
if N1 != N2 or C1 != C2 or L != H * W:
logger.warning("Error in loading absolute_pos_embed, pass")
else:
state_dict["absolute_pos_embed"] = absolute_pos_embed.view(
N2, H, W, C2
).permute(0, 3, 1, 2)
rank, _ = get_dist_info()
if "rel_pos_bias.relative_position_bias_table" in state_dict:
if rank == 0:
print("Expand the shared relative position embedding to each layers. ")
num_layers = model.get_num_layers()
rel_pos_bias = state_dict["rel_pos_bias.relative_position_bias_table"]
for i in range(num_layers):
state_dict[
"blocks.%d.attn.relative_position_bias_table" % i
] = rel_pos_bias.clone()
state_dict.pop("rel_pos_bias.relative_position_bias_table")
all_keys = list(state_dict.keys())
for key in all_keys:
if "relative_position_index" in key:
state_dict.pop(key)
if "relative_position_bias_table" in key:
rel_pos_bias = state_dict[key]
src_num_pos, num_attn_heads = rel_pos_bias.size()
dst_num_pos, _ = model.state_dict()[key].size()
dst_patch_shape = model.patch_embed.patch_shape
if dst_patch_shape[0] != dst_patch_shape[1]:
raise NotImplementedError()
num_extra_tokens = dst_num_pos - (dst_patch_shape[0] * 2 - 1) * (
dst_patch_shape[1] * 2 - 1
) * (dst_patch_shape[2] * 2 - 1)
src_size = round((src_num_pos - num_extra_tokens) ** 0.5)
dst_size = (
dst_patch_shape[0] * 2 - 1
) # round((dst_num_pos - num_extra_tokens) ** 0.33333333)
if src_size != dst_size:
if rank == 0:
print(
"Position interpolate for %s from %dx%d to %dx%d"
% (key, src_size, src_size, dst_size, dst_size)
)
extra_tokens = rel_pos_bias[-num_extra_tokens:, :]
rel_pos_bias = rel_pos_bias[:-num_extra_tokens, :]
def geometric_progression(a, r, n):
return a * (1.0 - r**n) / (1.0 - r)
left, right = 1.01, 1.5
while right - left > 1e-6:
q = (left + right) / 2.0
gp = geometric_progression(1, q, src_size // 2)
if gp > dst_size // 2:
right = q
else:
left = q
# if q > 1.13492:
# q = 1.13492
dis = []
cur = 1
for i in range(src_size // 2):
dis.append(cur)
cur += q ** (i + 1)
r_ids = [-_ for _ in reversed(dis)]
x = r_ids + [0] + dis
y = r_ids + [0] + dis
t = dst_size // 2.0
dx = np.arange(-t, t + 0.1, 1.0)
dy = np.arange(-t, t + 0.1, 1.0)
# if rank == 0:
# print("x = {}".format(x))
# print("dx = {}".format(dx))
all_rel_pos_bias = []
for i in range(num_attn_heads):
z = rel_pos_bias[:, i].view(src_size, src_size).float().numpy()
f = interpolate.interp2d(x, y, z, kind="cubic")
all_rel_pos_bias.append(
torch.Tensor(f(dx, dy))
.contiguous()
.view(-1, 1)
.to(rel_pos_bias.device)
)
rel_pos_bias = torch.cat(all_rel_pos_bias, dim=-1).repeat(
dst_patch_shape[2], 1
) # inflate
assert len(rel_pos_bias.shape) == 2
new_rel_pos_bias = torch.cat((rel_pos_bias, extra_tokens), dim=0)
state_dict[key] = new_rel_pos_bias
if "pos_embed" in state_dict:
pos_embed_checkpoint = state_dict["pos_embed"]
embedding_size = pos_embed_checkpoint.shape[-1]
num_patches = model.patch_embed.num_patches
num_extra_tokens = model.pos_embed.shape[-2] - num_patches
# height (== width) for the checkpoint position embedding
orig_size = int((pos_embed_checkpoint.shape[-2] - num_extra_tokens) ** 0.5)
# height (== width) for the new position embedding
new_size = int(num_patches**0.5)
# class_token and dist_token are kept unchanged
if orig_size != new_size:
if rank == 0:
print(
"Position interpolate from %dx%d to %dx%d"
% (orig_size, orig_size, new_size, new_size)
)
extra_tokens = pos_embed_checkpoint[:, :num_extra_tokens]
# only the position tokens are interpolated
pos_tokens = pos_embed_checkpoint[:, num_extra_tokens:]
pos_tokens = pos_tokens.reshape(
-1, orig_size, orig_size, embedding_size
).permute(0, 3, 1, 2)
pos_tokens = torch.nn.functional.interpolate(
pos_tokens,
size=(new_size, new_size),
mode="bicubic",
align_corners=False,
)
pos_tokens = pos_tokens.permute(0, 2, 3, 1).flatten(1, 2)
new_pos_embed = torch.cat((extra_tokens, pos_tokens), dim=1)
state_dict["pos_embed"] = new_pos_embed
# interpolate position bias table if needed (no use)
relative_position_bias_table_keys = [
k for k in state_dict.keys() if "relative_position_bias_table" in k
]
for table_key in relative_position_bias_table_keys:
table_pretrained = state_dict[table_key]
table_current = model.state_dict()[table_key]
L1, nH1 = table_pretrained.size()
L2, nH2 = table_current.size()
# print(L1, nH1, L2, nH2)
# input()
if nH1 != nH2:
logger.warning(f"Error in loading {table_key}, pass")
else:
if L1 != L2:
S1 = round(L1**0.5)
S2 = round(L2**0.33333333)
table_pretrained_resized = F.interpolate(
table_pretrained.permute(1, 0).view(1, nH1, S1, S1),
size=(S2, S2),
mode="bicubic",
)
state_dict[table_key] = table_pretrained_resized.view(nH2, L2).permute(
1, 0
)
# load state_dict
load_state_dict(model, state_dict, strict, logger)
return checkpoint
class DropPath(nn.Module):
"""Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks)."""
def __init__(self, drop_prob=None):
super(DropPath, self).__init__()
self.drop_prob = drop_prob
def forward(self, x):
return drop_path(x, self.drop_prob, self.training)
def extra_repr(self) -> str:
return "p={}".format(self.drop_prob)
class Mlp(nn.Module):
def __init__(
self,
in_features,
hidden_features=None,
out_features=None,
act_layer=nn.GELU,
drop=0.0,
):
super().__init__()
out_features = out_features or in_features
hidden_features = hidden_features or in_features
self.fc1 = nn.Linear(in_features, hidden_features)
self.act = act_layer()
self.fc2 = nn.Linear(hidden_features, out_features)
self.drop = nn.Dropout(drop)
def forward(self, x):
x = self.fc1(x)
x = self.act(x)
# x = self.drop(x)
# commit this for the orignal BERT implement
x = self.fc2(x)
x = self.drop(x)
return x
class Attention(nn.Module):
def __init__(
self,
dim,
num_heads=8,
qkv_bias=False,
qk_scale=None,
attn_drop=0.0,
proj_drop=0.0,
window_size=None,
attn_head_dim=None,
):
super().__init__()
self.num_heads = num_heads
head_dim = dim // num_heads
if attn_head_dim is not None:
head_dim = attn_head_dim
all_head_dim = head_dim * self.num_heads
# NOTE scale factor was wrong in my original version, can set manually to be compat with prev weights
self.scale = qk_scale or head_dim**-0.5
self.qkv = nn.Linear(dim, all_head_dim * 3, bias=False)
if qkv_bias:
self.q_bias = nn.Parameter(torch.zeros(all_head_dim))
self.v_bias = nn.Parameter(torch.zeros(all_head_dim))
else:
self.q_bias = None
self.v_bias = None
if window_size:
self.window_size = window_size
self.num_relative_distance = (2 * window_size[0] - 1) * (
2 * window_size[1] - 1
) * (2 * window_size[2] - 1) + 3
self.relative_position_bias_table = nn.Parameter(
torch.zeros(self.num_relative_distance, num_heads)
) # 2*Wh-1 * 2*Ww-1, nH
# cls to token & token 2 cls & cls to cls
# get pair-wise relative position index for each token inside the window
coords_h = torch.arange(window_size[0])
coords_w = torch.arange(window_size[1])
coords_d = torch.arange(window_size[2])
coords = torch.stack(
torch.meshgrid([coords_h, coords_w, coords_d])
) # 3, Wh, Ww, Wd
coords_flatten = torch.flatten(coords, 1) # 3, Wh*Ww*wd
relative_coords = (
coords_flatten[:, :, None] - coords_flatten[:, None, :]
) # 3, Wh*Ww*wd, Wh*Ww*wd
relative_coords = relative_coords.permute(
1, 2, 0
).contiguous() # Wh*Ww*wd, Wh*Ww*wd, 3
relative_coords[:, :, 0] += window_size[0] - 1 # shift to start from 0
relative_coords[:, :, 1] += window_size[1] - 1
relative_coords[:, :, 2] += window_size[2] - 1
relative_coords[:, :, 0] *= (2 * window_size[1] - 1) * (
2 * window_size[2] - 1
)
relative_coords[:, :, 1] *= 2 * window_size[2] - 1
relative_position_index = torch.zeros(
size=(window_size[0] * window_size[1] * window_size[2] + 1,) * 2,
dtype=relative_coords.dtype,
)
relative_position_index[1:, 1:] = relative_coords.sum(
-1
) # Wh*Ww*Wd, Wh*Ww*Wd
relative_position_index[0, 0:] = self.num_relative_distance - 3
relative_position_index[0:, 0] = self.num_relative_distance - 2
relative_position_index[0, 0] = self.num_relative_distance - 1
self.register_buffer("relative_position_index", relative_position_index)
# trunc_normal_(self.relative_position_bias_table, std=.0)
else:
self.window_size = None
self.relative_position_bias_table = None
self.relative_position_index = None
self.attn_drop = nn.Dropout(attn_drop)
self.proj = nn.Linear(all_head_dim, dim)
self.proj_drop = nn.Dropout(proj_drop)
def forward(self, x, rel_pos_bias=None):
B, N, C = x.shape
qkv_bias = None
if self.q_bias is not None:
qkv_bias = torch.cat(
(
self.q_bias,
torch.zeros_like(self.v_bias, requires_grad=False),
self.v_bias,
)
)
# qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)
qkv = F.linear(input=x, weight=self.qkv.weight, bias=qkv_bias)
qkv = qkv.reshape(B, N, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4)
q, k, v = (
qkv[0],
qkv[1],
qkv[2],
) # make torchscript happy (cannot use tensor as tuple)
q = q * self.scale
attn = q @ k.transpose(-2, -1)
if self.relative_position_bias_table is not None:
relative_position_bias = self.relative_position_bias_table[
self.relative_position_index.view(-1)
].view(
self.window_size[0] * self.window_size[1] * self.window_size[2] + 1,
self.window_size[0] * self.window_size[1] * self.window_size[2] + 1,
-1,
) # Wh*Ww*Wd,Wh*Ww*Wd,nH
relative_position_bias = relative_position_bias.permute(
2, 0, 1
).contiguous() # nH, Wh*Ww, Wh*Ww
attn = attn + relative_position_bias.unsqueeze(0)
if rel_pos_bias is not None:
attn = attn + rel_pos_bias
attn = attn.softmax(dim=-1)
attn = self.attn_drop(attn)
x = (attn @ v).transpose(1, 2).reshape(B, N, -1)
x = self.proj(x)
x = self.proj_drop(x)
return x
class Block(nn.Module):
def __init__(
self,
dim,
num_heads,
mlp_ratio=4.0,
qkv_bias=False,
qk_scale=None,
drop=0.0,
attn_drop=0.0,
drop_path=0.0,
init_values=None,
act_layer=nn.GELU,
norm_layer=nn.LayerNorm,
window_size=None,
attn_head_dim=None,
):
super().__init__()
self.norm1 = norm_layer(dim)
self.attn = Attention(
dim,
num_heads=num_heads,
qkv_bias=qkv_bias,
qk_scale=qk_scale,
attn_drop=attn_drop,
proj_drop=drop,
window_size=window_size,
attn_head_dim=attn_head_dim,
)
# NOTE: drop path for stochastic depth, we shall see if this is better than dropout here
self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()
self.norm2 = norm_layer(dim)
mlp_hidden_dim = int(dim * mlp_ratio)
self.mlp = Mlp(
in_features=dim,
hidden_features=mlp_hidden_dim,
act_layer=act_layer,
drop=drop,
)
if init_values is not None:
self.gamma_1 = nn.Parameter(
init_values * torch.ones((dim)), requires_grad=True
)
self.gamma_2 = nn.Parameter(
init_values * torch.ones((dim)), requires_grad=True
)
else:
self.gamma_1, self.gamma_2 = None, None
def forward(self, x, rel_pos_bias=None):
if self.gamma_1 is None:
x = x + self.drop_path(self.attn(self.norm1(x), rel_pos_bias=rel_pos_bias))
x = x + self.drop_path(self.mlp(self.norm2(x)))
else:
x = x + self.drop_path(
self.gamma_1 * self.attn(self.norm1(x), rel_pos_bias=rel_pos_bias)
)
x = x + self.drop_path(self.gamma_2 * self.mlp(self.norm2(x)))
return x
class PatchEmbed(nn.Module):
"""Image to Patch Embedding"""
def __init__(self, img_size=224, patch_size=16, in_chans=3, embed_dim=768):
super().__init__()
if isinstance(img_size, int):
img_size = to_3tuple(img_size)
if isinstance(patch_size, int):
patch_size = to_3tuple(patch_size)
num_patches = (
(img_size[2] // patch_size[2])
* (img_size[1] // patch_size[1])
* (img_size[0] // patch_size[0])
)
self.patch_shape = (
img_size[0] // patch_size[0],
img_size[1] // patch_size[1],
img_size[2] // patch_size[2],
)
self.img_size = img_size
self.patch_size = patch_size
self.in_chans = in_chans
self.embed_dim = embed_dim
self.num_patches = num_patches
patch_size_transpose = (patch_size[2], patch_size[0], patch_size[1])
self.proj = nn.Conv3d(
in_chans,
embed_dim,
kernel_size=patch_size_transpose,
stride=patch_size_transpose,
) # TODO: check very carefully about the initialization
def forward(self, x, **kwargs):
B, C, D, H, W = x.shape
# FIXME look at relaxing size constraints
# assert H == self.img_size[0] and W == self.img_size[1], \
# f"Input image size ({H}*{W}) doesn't match model ({self.img_size[0]}*{self.img_size[1]})."
x = self.proj(x)
Dp, Hp, Wp = x.shape[2], x.shape[3], x.shape[4]
x = x.flatten(2).transpose(1, 2)
return x, (Dp, Hp, Wp)
# class HybridEmbed(nn.Module):
# """ CNN Feature Map Embedding
# Extract feature map from CNN, flatten, project to embedding dim.
# """
# def __init__(self, backbone, img_size=224, feature_size=None, in_chans=3, embed_dim=768):
# super().__init__()
# assert isinstance(backbone, nn.Module)
# img_size = to_2tuple(img_size)
# self.img_size = img_size
# self.backbone = backbone
# if feature_size is None:
# with torch.no_grad():
# # FIXME this is hacky, but most reliable way of determining the exact dim of the output feature
# # map for all networks, the feature metadata has reliable channel and stride info, but using
# # stride to calc feature dim requires info about padding of each stage that isn't captured.
# training = backbone.training
# if training:
# backbone.eval()
# o = self.backbone(torch.zeros(1, in_chans, img_size[0], img_size[1]))[-1]
# feature_size = o.shape[-2:]
# feature_dim = o.shape[1]
# backbone.train(training)
# else:
# feature_size = to_2tuple(feature_size)
# feature_dim = self.backbone.feature_info.channels()[-1]
# self.num_patches = feature_size[0] * feature_size[1]
# self.proj = nn.Linear(feature_dim, embed_dim)
# def forward(self, x):
# x = self.backbone(x)[-1]
# x = x.flatten(2).transpose(1, 2)
# x = self.proj(x)
# return x
# class RelativePositionBias(nn.Module):
# def __init__(self, window_size, num_heads):
# super().__init__()
# self.window_size = window_size
# self.num_relative_distance = (2 * window_size[0] - 1) * (2 * window_size[1] - 1) + 3
# self.relative_position_bias_table = nn.Parameter(
# torch.zeros(self.num_relative_distance, num_heads)) # 2*Wh-1 * 2*Ww-1, nH
# # cls to token & token 2 cls & cls to cls
# # get pair-wise relative position index for each token inside the window
# coords_h = torch.arange(window_size[0])
# coords_w = torch.arange(window_size[1])
# coords = torch.stack(torch.meshgrid([coords_h, coords_w])) # 2, Wh, Ww
# coords_flatten = torch.flatten(coords, 1) # 2, Wh*Ww
# relative_coords = coords_flatten[:, :, None] - coords_flatten[:, None, :] # 2, Wh*Ww, Wh*Ww
# relative_coords = relative_coords.permute(1, 2, 0).contiguous() # Wh*Ww, Wh*Ww, 2
# relative_coords[:, :, 0] += window_size[0] - 1 # shift to start from 0
# relative_coords[:, :, 1] += window_size[1] - 1
# relative_coords[:, :, 0] *= 2 * window_size[1] - 1
# relative_position_index = \
# torch.zeros(size=(window_size[0] * window_size[1] + 1,) * 2, dtype=relative_coords.dtype)
# relative_position_index[1:, 1:] = relative_coords.sum(-1) # Wh*Ww, Wh*Ww
# relative_position_index[0, 0:] = self.num_relative_distance - 3
# relative_position_index[0:, 0] = self.num_relative_distance - 2
# relative_position_index[0, 0] = self.num_relative_distance - 1
# self.register_buffer("relative_position_index", relative_position_index)
# # trunc_normal_(self.relative_position_bias_table, std=.02)
# def forward(self):
# relative_position_bias = \
# self.relative_position_bias_table[self.relative_position_index.view(-1)].view(
# self.window_size[0] * self.window_size[1] + 1,
# self.window_size[0] * self.window_size[1] + 1, -1) # Wh*Ww,Wh*Ww,nH
# return relative_position_bias.permute(2, 0, 1).contiguous() # nH, Wh*Ww, Wh*Ww
class BEiT3D(nn.Module):
"""Vision Transformer with support for patch or hybrid CNN input stage"""
def __init__(
self,
img_size=224,
patch_size=16,
in_chans=3,
num_classes=80,
embed_dim=768,
depth=12,
num_heads=12,
mlp_ratio=4.0,
qkv_bias=False,
qk_scale=None,
drop_rate=0.0,
attn_drop_rate=0.0,
drop_path_rate=0.0,
hybrid_backbone=None,
norm_layer=None,
init_values=None,
use_checkpoint=False,
use_abs_pos_emb=True,
use_rel_pos_bias=False,
use_shared_rel_pos_bias=False,
out_indices=[3, 5, 7, 11],
):
super().__init__()
norm_layer = norm_layer or partial(nn.LayerNorm, eps=1e-6)
self.num_classes = num_classes
self.num_features = (
self.embed_dim
) = embed_dim # num_features for consistency with other models
if hybrid_backbone is not None:
raise
# self.patch_embed = HybridEmbed(
# hybrid_backbone, img_size=img_size, in_chans=in_chans, embed_dim=embed_dim)
else:
self.patch_embed = PatchEmbed(
img_size=img_size,
patch_size=patch_size,
in_chans=in_chans,
embed_dim=embed_dim,
)
num_patches = self.patch_embed.num_patches
self.out_indices = out_indices
self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim))
# self.mask_token = nn.Parameter(torch.zeros(1, 1, embed_dim))
if use_abs_pos_emb:
self.pos_embed = nn.Parameter(torch.zeros(1, num_patches + 1, embed_dim))
else:
self.pos_embed = None
self.pos_drop = nn.Dropout(p=drop_rate)
if use_shared_rel_pos_bias:
raise
# self.rel_pos_bias = RelativePositionBias(window_size=self.patch_embed.patch_shape, num_heads=num_heads)
else:
self.rel_pos_bias = None
dpr = [
x.item() for x in torch.linspace(0, drop_path_rate, depth)
] # stochastic depth decay rule
self.use_rel_pos_bias = use_rel_pos_bias
self.use_checkpoint = use_checkpoint
self.blocks = nn.ModuleList(
[
Block(
dim=embed_dim,
num_heads=num_heads,
mlp_ratio=mlp_ratio,
qkv_bias=qkv_bias,
qk_scale=qk_scale,
drop=drop_rate,
attn_drop=attn_drop_rate,
drop_path=dpr[i],
norm_layer=norm_layer,
init_values=init_values,
window_size=self.patch_embed.patch_shape
if use_rel_pos_bias
else None,
)
for i in range(depth)
]
)
if self.pos_embed is not None:
trunc_normal_(self.pos_embed, std=0.02)
trunc_normal_(self.cls_token, std=0.02)
# trunc_normal_(self.mask_token, std=.02)
self.out_indices = out_indices
self.apply(self._init_weights)
self.fix_init_weight()
def fix_init_weight(self):
def rescale(param, layer_id):
param.div_(math.sqrt(2.0 * layer_id))
for layer_id, layer in enumerate(self.blocks):
rescale(layer.attn.proj.weight.data, layer_id + 1)
rescale(layer.mlp.fc2.weight.data, layer_id + 1)
def _init_weights(self, m):
if isinstance(m, nn.Linear):
trunc_normal_(m.weight, std=0.02)
if isinstance(m, nn.Linear) and m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.LayerNorm):
nn.init.constant_(m.bias, 0)
nn.init.constant_(m.weight, 1.0)
def init_weights(
self,
pretrained="backbones/encoders/pretrained_models/beit_base_patch16_224_pt22k_ft22k.pth",
bootstrap_method="centering",
):
"""Initialize the weights in backbone.
Args:
pretrained (str, optional): Path to pre-trained weights.
Defaults to None.
"""
def _init_weights(m):
if isinstance(m, nn.Linear):
trunc_normal_(m.weight, std=0.02)
if isinstance(m, nn.Linear) and m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.LayerNorm):
nn.init.constant_(m.bias, 0)
nn.init.constant_(m.weight, 1.0)
if isinstance(pretrained, str):
self.apply(_init_weights)
logger = get_root_logger()
load_checkpoint(
self,
pretrained,
bootstrap_method=bootstrap_method,
strict=False,
logger=logger,
)
elif pretrained is None:
self.apply(_init_weights)
else:
raise TypeError("pretrained must be a str or None")
def get_num_layers(self):
return len(self.blocks)
@torch.jit.ignore
def no_weight_decay(self):
return {"pos_embed", "cls_token"}
def forward_features(self, x):
B, C, D, H, W = x.shape
x, (Dp, Hp, Wp) = self.patch_embed(x) # B x T x D
batch_size, seq_len, _ = x.size()
cls_tokens = self.cls_token.expand(
batch_size, -1, -1
) # stole cls_tokens impl from Phil Wang, thanks
x = torch.cat((cls_tokens, x), dim=1) # B x (T + 1) x D
if self.pos_embed is not None:
x = x + self.pos_embed
x = self.pos_drop(x)
rel_pos_bias = self.rel_pos_bias() if self.rel_pos_bias is not None else None
features = []
for i, blk in enumerate(self.blocks):
if self.use_checkpoint:
x = checkpoint.checkpoint(blk, x, rel_pos_bias)
else:
x = blk(x, rel_pos_bias)
xp = x[:, 1:, :]
features.append(xp.contiguous())
return features
def forward(self, x):
x = self.forward_features(x)
return x
| Python |
3D | yuhui-zh15/TransSeg | src/backbones/encoders/dino3d.py | .py | 16,185 | 500 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Mostly copy-paste from timm library.
https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vision_transformer.py
"""
import math
from functools import partial
import torch.nn.functional as F
import torch
import torch.nn as nn
from timm.models.layers import DropPath, to_2tuple, to_3tuple
def load_checkpoint(
model,
pretrained_weights=None,
checkpoint_key="teacher",
map_location="cpu",
bootstrap_method="centering",
):
state_dict = torch.load(pretrained_weights, map_location=map_location)
if checkpoint_key is not None and checkpoint_key in state_dict:
print(f"Take key {checkpoint_key} in provided checkpoint dict")
state_dict = state_dict[checkpoint_key]
# --- starting inflate/center weights ---
n_slices = model.patch_embed.patch_size[-1]
n_chans = model.patch_embed.in_chans
key = "patch_embed.proj.weight"
emb = state_dict[key]
print("Old:", emb.shape, emb.sum())
emb = emb.sum(1, keepdim=True) # from colored to grayed
emb = (
emb.repeat(1, n_chans, 1, 1) / n_chans
) # from 1-channel grayed to n-channel grayed
# emb = emb.unsqueeze(2).repeat(1, 1, n_slices, 1, 1) # from 2D to 3D
emb = emb.unsqueeze(4)
emb = emb.repeat(1, 1, 1, 1, n_slices) # from 2D to 3D
if bootstrap_method == "inflation":
print("Inflation!!!")
emb = emb / n_slices
elif bootstrap_method == "centering":
print("Centering!!!")
center_idx = n_slices // 2
all_idxs = list(range(n_slices))
all_idxs.pop(center_idx)
emb[:, :, :, :, all_idxs] = 0
else:
raise
print("New:", emb.shape, emb.sum())
state_dict[key] = emb
# --- ending inflate/center weights ---
ori_num_patches = state_dict["pos_embed"].shape[1] - 1
cur_num_patches = model.patch_embed.num_patches
if ori_num_patches != cur_num_patches:
emb = state_dict["pos_embed"]
cls_emb = emb[:, 0]
emb = emb[:, 1:]
ori_patch_size = int(ori_num_patches**0.5)
cur_patch_size = int(cur_num_patches**0.5)
feature_size = emb.shape[-1]
emb_resize = emb.view(1, ori_patch_size, ori_patch_size, feature_size)
emb_resize = emb_resize.permute(0, 3, 1, 2)
emb_new = F.interpolate(emb_resize, (cur_patch_size, cur_patch_size))
emb_new = emb_new.permute(0, 2, 3, 1)
emb_new = emb_new.reshape(1, cur_patch_size * cur_patch_size, feature_size)
emb_new = emb_new.squeeze(0)
emb_new = torch.cat((emb_new, cls_emb))
emb_new = emb_new.unsqueeze(0)
state_dict["pos_embed"] = emb_new
# remove `module.` prefix
state_dict = {k.replace("module.", ""): v for k, v in state_dict.items()}
# remove `backbone.` prefix induced by multicrop wrapper
state_dict = {k.replace("backbone.", ""): v for k, v in state_dict.items()}
msg = model.load_state_dict(state_dict, strict=False)
print(
"Pretrained weights found at {} and loaded with msg: {}".format(
pretrained_weights, msg
)
)
def drop_path(x, drop_prob: float = 0.0, training: bool = False):
if drop_prob == 0.0 or not training:
return x
keep_prob = 1 - drop_prob
shape = (x.shape[0],) + (1,) * (
x.ndim - 1
) # work with diff dim tensors, not just 2D ConvNets
random_tensor = keep_prob + torch.rand(shape, dtype=x.dtype, device=x.device)
random_tensor.floor_() # binarize
output = x.div(keep_prob) * random_tensor
return output
def trunc_normal_(tensor, mean=0.0, std=1.0, a=-2.0, b=2.0):
# type: (Tensor, float, float, float, float) -> Tensor
return _no_grad_trunc_normal_(tensor, mean, std, a, b)
def _no_grad_trunc_normal_(tensor, mean, std, a, b):
# Cut & paste from PyTorch official master until it's in a few official releases - RW
# Method based on https://people.sc.fsu.edu/~jburkardt/presentations/truncated_normal.pdf
def norm_cdf(x):
# Computes standard normal cumulative distribution function
return (1.0 + math.erf(x / math.sqrt(2.0))) / 2.0
if (mean < a - 2 * std) or (mean > b + 2 * std):
warnings.warn(
"mean is more than 2 std from [a, b] in nn.init.trunc_normal_. "
"The distribution of values may be incorrect.",
stacklevel=2,
)
with torch.no_grad():
# Values are generated by using a truncated uniform distribution and
# then using the inverse CDF for the normal distribution.
# Get upper and lower cdf values
l = norm_cdf((a - mean) / std)
u = norm_cdf((b - mean) / std)
# Uniformly fill tensor with values from [l, u], then translate to
# [2l-1, 2u-1].
tensor.uniform_(2 * l - 1, 2 * u - 1)
# Use inverse cdf transform for normal distribution to get truncated
# standard normal
tensor.erfinv_()
# Transform to proper mean, std
tensor.mul_(std * math.sqrt(2.0))
tensor.add_(mean)
# Clamp to ensure it's in the proper range
tensor.clamp_(min=a, max=b)
return tensor
class DropPath(nn.Module):
"""Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks)."""
def __init__(self, drop_prob=None):
super(DropPath, self).__init__()
self.drop_prob = drop_prob
def forward(self, x):
return drop_path(x, self.drop_prob, self.training)
class Mlp(nn.Module):
def __init__(
self,
in_features,
hidden_features=None,
out_features=None,
act_layer=nn.GELU,
drop=0.0,
):
super().__init__()
out_features = out_features or in_features
hidden_features = hidden_features or in_features
self.fc1 = nn.Linear(in_features, hidden_features)
self.act = act_layer()
self.fc2 = nn.Linear(hidden_features, out_features)
self.drop = nn.Dropout(drop)
def forward(self, x):
x = self.fc1(x)
x = self.act(x)
x = self.drop(x)
x = self.fc2(x)
x = self.drop(x)
return x
class Attention(nn.Module):
def __init__(
self,
dim,
num_heads=8,
qkv_bias=False,
qk_scale=None,
attn_drop=0.0,
proj_drop=0.0,
):
super().__init__()
self.num_heads = num_heads
head_dim = dim // num_heads
self.scale = qk_scale or head_dim**-0.5
self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)
self.attn_drop = nn.Dropout(attn_drop)
self.proj = nn.Linear(dim, dim)
self.proj_drop = nn.Dropout(proj_drop)
def forward(self, x):
B, N, C = x.shape
qkv = (
self.qkv(x)
.reshape(B, N, 3, self.num_heads, C // self.num_heads)
.permute(2, 0, 3, 1, 4)
)
q, k, v = qkv[0], qkv[1], qkv[2]
attn = (q @ k.transpose(-2, -1)) * self.scale
attn = attn.softmax(dim=-1)
attn = self.attn_drop(attn)
x = (attn @ v).transpose(1, 2).reshape(B, N, C)
x = self.proj(x)
x = self.proj_drop(x)
return x, attn
class Block(nn.Module):
def __init__(
self,
dim,
num_heads,
mlp_ratio=4.0,
qkv_bias=False,
qk_scale=None,
drop=0.0,
attn_drop=0.0,
drop_path=0.0,
act_layer=nn.GELU,
norm_layer=nn.LayerNorm,
):
super().__init__()
self.norm1 = norm_layer(dim)
self.attn = Attention(
dim,
num_heads=num_heads,
qkv_bias=qkv_bias,
qk_scale=qk_scale,
attn_drop=attn_drop,
proj_drop=drop,
)
self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()
self.norm2 = norm_layer(dim)
mlp_hidden_dim = int(dim * mlp_ratio)
self.mlp = Mlp(
in_features=dim,
hidden_features=mlp_hidden_dim,
act_layer=act_layer,
drop=drop,
)
def forward(self, x, return_attention=False):
y, attn = self.attn(self.norm1(x))
if return_attention:
return attn
x = x + self.drop_path(y)
x = x + self.drop_path(self.mlp(self.norm2(x)))
return x
class PatchEmbed(nn.Module):
"""Image to Patch Embedding"""
def __init__(self, img_size=224, patch_size=16, in_chans=3, embed_dim=768):
super().__init__()
if isinstance(patch_size, int):
patch_size = to_3tuple(patch_size)
if type(img_size) == int:
num_patches = (img_size // patch_size[0]) * (img_size // patch_size[0])
else:
num_patches = (img_size[0] // patch_size[0]) * (
img_size[0] // patch_size[0]
)
self.img_size = img_size
self.patch_size = patch_size
self.num_patches = num_patches
self.in_chans = in_chans
self.proj = nn.Conv3d(
in_chans, embed_dim, kernel_size=patch_size, stride=patch_size
)
def forward(self, x):
B, C, D, H, W = x.shape
x = x.permute(0, 1, 3, 4, 2)
x = self.proj(x)
x = x.squeeze(2)
x = x.flatten(2).transpose(1, 2)
return x
class VisionTransformer3D(nn.Module):
"""Vision Transformer"""
def __init__(
self,
img_size=[224],
patch_size=16,
in_chans=3,
num_classes=0,
embed_dim=768,
depth=12,
num_heads=12,
mlp_ratio=4.0,
qkv_bias=False,
qk_scale=None,
drop_rate=0.0,
attn_drop_rate=0.0,
drop_path_rate=0.0,
norm_layer=nn.LayerNorm,
**kwargs,
):
super().__init__()
self.num_features = self.embed_dim = embed_dim
self.patch_embed = PatchEmbed(
img_size=img_size[-1],
patch_size=patch_size,
in_chans=in_chans,
embed_dim=embed_dim,
)
num_patches = self.patch_embed.num_patches
self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim))
self.pos_embed = nn.Parameter(torch.zeros(1, num_patches + 1, embed_dim))
self.pos_drop = nn.Dropout(p=drop_rate)
dpr = [
x.item() for x in torch.linspace(0, drop_path_rate, depth)
] # stochastic depth decay rule
self.blocks = nn.ModuleList(
[
Block(
dim=embed_dim,
num_heads=num_heads,
mlp_ratio=mlp_ratio,
qkv_bias=qkv_bias,
qk_scale=qk_scale,
drop=drop_rate,
attn_drop=attn_drop_rate,
drop_path=dpr[i],
norm_layer=norm_layer,
)
for i in range(depth)
]
)
self.norm = norm_layer(embed_dim)
# Classifier head
self.head = (
nn.Linear(embed_dim, num_classes) if num_classes > 0 else nn.Identity()
)
trunc_normal_(self.pos_embed, std=0.02)
trunc_normal_(self.cls_token, std=0.02)
# self.apply(self._init_weights)
def init_weights(
self,
pretrained="./backbones/encoders/pretrained_models/dino_vitbase16_pretrain.pth",
bootstrap_method="centering",
):
"""Initialize the weights in backbone.
Args:
pretrained (str, optional): Path to pre-trained weights.
Defaults to None.
"""
def _init_weights(m):
if isinstance(m, nn.Linear):
trunc_normal_(m.weight, std=0.02)
if isinstance(m, nn.Linear) and m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.LayerNorm):
nn.init.constant_(m.bias, 0)
nn.init.constant_(m.weight, 1.0)
if isinstance(pretrained, str):
self.apply(_init_weights)
load_checkpoint(
self,
pretrained_weights=pretrained,
bootstrap_method=bootstrap_method,
checkpoint_key="teacher",
)
elif pretrained is None:
self.apply(_init_weights)
else:
raise TypeError("pretrained must be a str or None")
def interpolate_pos_encoding(self, x, w, h):
npatch = x.shape[1] - 1
N = self.pos_embed.shape[1] - 1
if npatch == N and w == h:
return self.pos_embed
class_pos_embed = self.pos_embed[:, 0]
patch_pos_embed = self.pos_embed[:, 1:]
dim = x.shape[-1]
w0 = w // self.patch_embed.patch_size
h0 = h // self.patch_embed.patch_size
# we add a small number to avoid floating point error in the interpolation
# see discussion at https://github.com/facebookresearch/dino/issues/8
w0, h0 = w0 + 0.1, h0 + 0.1
patch_pos_embed = nn.functional.interpolate(
patch_pos_embed.reshape(
1, int(math.sqrt(N)), int(math.sqrt(N)), dim
).permute(0, 3, 1, 2),
scale_factor=(w0 / math.sqrt(N), h0 / math.sqrt(N)),
mode="bicubic",
)
assert (
int(w0) == patch_pos_embed.shape[-2]
and int(h0) == patch_pos_embed.shape[-1]
)
patch_pos_embed = patch_pos_embed.permute(0, 2, 3, 1).view(1, -1, dim)
return torch.cat((class_pos_embed.unsqueeze(0), patch_pos_embed), dim=1)
def prepare_tokens(self, x):
# B, nc, w, h = x.shape
B, c, d, w, h = x.shape
x = self.patch_embed(x) # patch linear embedding
# add the [CLS] token to the embed patch tokens
cls_tokens = self.cls_token.expand(B, -1, -1)
x = torch.cat((cls_tokens, x), dim=1)
# add positional encoding to each token
x = x + self.interpolate_pos_encoding(x, w, h)
return self.pos_drop(x)
def forward(self, x):
x = self.prepare_tokens(x)
for blk in self.blocks:
x = blk(x)
x = self.norm(x)
return x[:, 0]
def get_last_selfattention(self, x):
x = self.prepare_tokens(x)
for i, blk in enumerate(self.blocks):
if i < len(self.blocks) - 1:
x = blk(x)
else:
# return attention of the last block
return blk(x, return_attention=True)
def get_intermediate_layers(self, x, n=1):
x = self.prepare_tokens(x)
# we return the output tokens from the `n` last blocks
output = []
for i, blk in enumerate(self.blocks):
x = blk(x)
if len(self.blocks) - i <= n:
output.append(self.norm(x))
return output
if __name__ == "__main__":
force_2d = False
img_size = (5, 512, 512)
patch_size = 16
bootstrap_method = "centering"
encoder = VisionTransformer3D(
img_size=img_size if not force_2d else (1, img_size[1], img_size[2]),
patch_size=(patch_size, patch_size, img_size[0])
if not force_2d
else (patch_size, patch_size, 1),
embed_dim=768,
depth=12,
in_chans=1,
num_heads=12,
mlp_ratio=4,
qkv_bias=True,
norm_layer=partial(nn.LayerNorm, eps=1e-6),
)
encoder.init_weights(bootstrap_method=bootstrap_method)
x = torch.randn(4, 1, 5, 512, 512)
out = encoder(x)
print([y.shape for y in out])
| Python |
3D | yuhui-zh15/TransSeg | src/backbones/encoders/swin_transformer.py | .py | 33,378 | 971 | # --------------------------------------------------------
# Swin Transformer
# Copyright (c) 2021 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ze Liu, Yutong Lin, Yixuan Wei
# --------------------------------------------------------
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.checkpoint as checkpoint
import numpy as np
from timm.models.layers import DropPath, to_2tuple, to_3tuple, trunc_normal_
from mmseg.utils import get_root_logger
from mmcv.parallel import is_module_wrapper
from mmcv.runner import get_dist_info
def load_state_dict(module, state_dict, strict=False, logger=None):
"""Load state_dict to a module.
This method is modified from :meth:`torch.nn.Module.load_state_dict`.
Default value for ``strict`` is set to ``False`` and the message for
param mismatch will be shown even if strict is False.
Args:
module (Module): Module that receives the state_dict.
state_dict (OrderedDict): Weights.
strict (bool): whether to strictly enforce that the keys
in :attr:`state_dict` match the keys returned by this module's
:meth:`~torch.nn.Module.state_dict` function. Default: ``False``.
logger (:obj:`logging.Logger`, optional): Logger to log the error
message. If not specified, print function will be used.
"""
unexpected_keys = []
all_missing_keys = []
err_msg = []
metadata = getattr(state_dict, "_metadata", None)
state_dict = state_dict.copy()
if metadata is not None:
state_dict._metadata = metadata
# use _load_from_state_dict to enable checkpoint version control
def load(module, prefix=""):
# recursively check parallel module in case that the model has a
# complicated structure, e.g., nn.Module(nn.Module(DDP))
if is_module_wrapper(module):
module = module.module
local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {})
module._load_from_state_dict(
state_dict,
prefix,
local_metadata,
True,
all_missing_keys,
unexpected_keys,
err_msg,
)
for name, child in module._modules.items():
if child is not None:
load(child, prefix + name + ".")
load(module)
load = None # break load->load reference cycle
# ignore "num_batches_tracked" of BN layers
missing_keys = [key for key in all_missing_keys if "num_batches_tracked" not in key]
if unexpected_keys:
err_msg.append(
"unexpected key in source " f'state_dict: {", ".join(unexpected_keys)}\n'
)
if missing_keys:
err_msg.append(
f'missing keys in source state_dict: {", ".join(missing_keys)}\n'
)
rank, _ = get_dist_info()
if len(err_msg) > 0 and rank == 0:
err_msg.insert(0, "The model and loaded state dict do not match exactly\n")
err_msg = "\n".join(err_msg)
if strict:
raise RuntimeError(err_msg)
elif logger is not None:
logger.warning(err_msg)
else:
print(err_msg)
def load_checkpoint(
model,
filename,
bootstrap_method="centering",
map_location="cpu",
strict=False,
logger=None,
):
"""Load checkpoint from a file or URI.
Args:
model (Module): Module to load checkpoint.
filename (str): Accept local filepath, URL, ``torchvision://xxx``,
``open-mmlab://xxx``. Please refer to ``docs/model_zoo.md`` for
details.
map_location (str): Same as :func:`torch.load`.
strict (bool): Whether to allow different params for the model and
checkpoint.
logger (:mod:`logging.Logger` or None): The logger for error message.
Returns:
dict or OrderedDict: The loaded checkpoint.
"""
checkpoint = torch.load(filename, map_location=map_location)
# --- starting inflate/center weights ---
n_slices = model.patch_embed.patch_size[0]
n_chans = model.patch_embed.in_chans
key = "patch_embed.proj.weight"
emb = checkpoint["model"][key]
print("Old:", emb.shape, emb.sum())
emb = emb.sum(1, keepdim=True) # from colored to grayed
emb = (
emb.repeat(1, n_chans, 1, 1) / n_chans
) # from 1-channel grayed to n-channel grayed
emb = emb.unsqueeze(2).repeat(1, 1, n_slices, 1, 1) # from 2D to 3D
if bootstrap_method == "inflation":
print("Inflation!!!")
emb = emb / n_slices
elif bootstrap_method == "centering":
print("Centering!!!")
center_idx = n_slices // 2
all_idxs = list(range(n_slices))
all_idxs.pop(center_idx)
emb[:, :, all_idxs, :, :] = 0
else:
raise
print("New:", emb.shape, emb.sum())
checkpoint["model"][key] = emb
# --- ending inflate/center weights ---
# OrderedDict is a subclass of dict
if not isinstance(checkpoint, dict):
raise RuntimeError(f"No state_dict found in checkpoint file {filename}")
# get state_dict from checkpoint
if "state_dict" in checkpoint:
state_dict = checkpoint["state_dict"]
elif "model" in checkpoint:
state_dict = checkpoint["model"]
else:
state_dict = checkpoint
# strip prefix of state_dict
if list(state_dict.keys())[0].startswith("module."):
state_dict = {k[7:]: v for k, v in state_dict.items()}
# for MoBY, load model of online branch
if sorted(list(state_dict.keys()))[0].startswith("encoder"):
state_dict = {
k.replace("encoder.", ""): v
for k, v in state_dict.items()
if k.startswith("encoder.")
}
# reshape absolute position embedding
if state_dict.get("absolute_pos_embed") is not None:
absolute_pos_embed = state_dict["absolute_pos_embed"]
N1, L, C1 = absolute_pos_embed.size()
N2, C2, H, W = model.absolute_pos_embed.size()
if N1 != N2 or C1 != C2 or L != H * W:
logger.warning("Error in loading absolute_pos_embed, pass")
else:
state_dict["absolute_pos_embed"] = absolute_pos_embed.view(
N2, H, W, C2
).permute(0, 3, 1, 2)
# interpolate position bias table if needed
relative_position_bias_table_keys = [
k for k in state_dict.keys() if "relative_position_bias_table" in k
]
for table_key in relative_position_bias_table_keys:
table_pretrained = state_dict[table_key]
table_current = model.state_dict()[table_key]
L1, nH1 = table_pretrained.size()
L2, nH2 = table_current.size()
if nH1 != nH2:
logger.warning(f"Error in loading {table_key}, pass")
else:
if L1 != L2:
S1 = int(L1**0.5)
S2 = int(L2**0.5)
table_pretrained_resized = F.interpolate(
table_pretrained.permute(1, 0).view(1, nH1, S1, S1),
size=(S2, S2),
mode="bicubic",
)
state_dict[table_key] = table_pretrained_resized.view(nH2, L2).permute(
1, 0
)
# load state_dict
load_state_dict(model, state_dict, strict, logger)
return checkpoint
class Mlp(nn.Module):
"""Multilayer perceptron."""
def __init__(
self,
in_features,
hidden_features=None,
out_features=None,
act_layer=nn.GELU,
drop=0.0,
):
super().__init__()
out_features = out_features or in_features
hidden_features = hidden_features or in_features
self.fc1 = nn.Linear(in_features, hidden_features)
self.act = act_layer()
self.fc2 = nn.Linear(hidden_features, out_features)
self.drop = nn.Dropout(drop)
def forward(self, x):
x = self.fc1(x)
x = self.act(x)
x = self.drop(x)
x = self.fc2(x)
x = self.drop(x)
return x
def window_partition(x, window_size):
"""
Args:
x: (B, H, W, C)
window_size (int): window size
Returns:
windows: (num_windows*B, window_size, window_size, C)
"""
B, H, W, C = x.shape
x = x.view(B, H // window_size, window_size, W // window_size, window_size, C)
windows = (
x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C)
)
return windows
def window_reverse(windows, window_size, H, W):
"""
Args:
windows: (num_windows*B, window_size, window_size, C)
window_size (int): Window size
H (int): Height of image
W (int): Width of image
Returns:
x: (B, H, W, C)
"""
B = int(windows.shape[0] / (H * W / window_size / window_size))
x = windows.view(
B, H // window_size, W // window_size, window_size, window_size, -1
)
x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, H, W, -1)
return x
class WindowAttention(nn.Module):
"""Window based multi-head self attention (W-MSA) module with relative position bias.
It supports both of shifted and non-shifted window.
Args:
dim (int): Number of input channels.
window_size (tuple[int]): The height and width of the window.
num_heads (int): Number of attention heads.
qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set
attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0
proj_drop (float, optional): Dropout ratio of output. Default: 0.0
"""
def __init__(
self,
dim,
window_size,
num_heads,
qkv_bias=True,
qk_scale=None,
attn_drop=0.0,
proj_drop=0.0,
):
super().__init__()
self.dim = dim
self.window_size = window_size # Wh, Ww
self.num_heads = num_heads
head_dim = dim // num_heads
self.scale = qk_scale or head_dim**-0.5
# define a parameter table of relative position bias
self.relative_position_bias_table = nn.Parameter(
torch.zeros((2 * window_size[0] - 1) * (2 * window_size[1] - 1), num_heads)
) # 2*Wh-1 * 2*Ww-1, nH
# get pair-wise relative position index for each token inside the window
coords_h = torch.arange(self.window_size[0])
coords_w = torch.arange(self.window_size[1])
coords = torch.stack(torch.meshgrid([coords_h, coords_w])) # 2, Wh, Ww
coords_flatten = torch.flatten(coords, 1) # 2, Wh*Ww
relative_coords = (
coords_flatten[:, :, None] - coords_flatten[:, None, :]
) # 2, Wh*Ww, Wh*Ww
relative_coords = relative_coords.permute(
1, 2, 0
).contiguous() # Wh*Ww, Wh*Ww, 2
relative_coords[:, :, 0] += self.window_size[0] - 1 # shift to start from 0
relative_coords[:, :, 1] += self.window_size[1] - 1
relative_coords[:, :, 0] *= 2 * self.window_size[1] - 1
relative_position_index = relative_coords.sum(-1) # Wh*Ww, Wh*Ww
self.register_buffer("relative_position_index", relative_position_index)
self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)
self.attn_drop = nn.Dropout(attn_drop)
self.proj = nn.Linear(dim, dim)
self.proj_drop = nn.Dropout(proj_drop)
trunc_normal_(self.relative_position_bias_table, std=0.02)
self.softmax = nn.Softmax(dim=-1)
def forward(self, x, mask=None):
"""Forward function.
Args:
x: input features with shape of (num_windows*B, N, C)
mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None
"""
B_, N, C = x.shape
qkv = (
self.qkv(x)
.reshape(B_, N, 3, self.num_heads, C // self.num_heads)
.permute(2, 0, 3, 1, 4)
)
q, k, v = (
qkv[0],
qkv[1],
qkv[2],
) # make torchscript happy (cannot use tensor as tuple)
q = q * self.scale
attn = q @ k.transpose(-2, -1)
relative_position_bias = self.relative_position_bias_table[
self.relative_position_index.view(-1)
].view(
self.window_size[0] * self.window_size[1],
self.window_size[0] * self.window_size[1],
-1,
) # Wh*Ww,Wh*Ww,nH
relative_position_bias = relative_position_bias.permute(
2, 0, 1
).contiguous() # nH, Wh*Ww, Wh*Ww
attn = attn + relative_position_bias.unsqueeze(0)
if mask is not None:
nW = mask.shape[0]
attn = attn.view(B_ // nW, nW, self.num_heads, N, N) + mask.unsqueeze(
1
).unsqueeze(0)
attn = attn.view(-1, self.num_heads, N, N)
attn = self.softmax(attn)
else:
attn = self.softmax(attn)
attn = self.attn_drop(attn)
x = (attn @ v).transpose(1, 2).reshape(B_, N, C)
x = self.proj(x)
x = self.proj_drop(x)
return x
class SwinTransformerBlock(nn.Module):
"""Swin Transformer Block.
Args:
dim (int): Number of input channels.
num_heads (int): Number of attention heads.
window_size (int): Window size.
shift_size (int): Shift size for SW-MSA.
mlp_ratio (float): Ratio of mlp hidden dim to embedding dim.
qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set.
drop (float, optional): Dropout rate. Default: 0.0
attn_drop (float, optional): Attention dropout rate. Default: 0.0
drop_path (float, optional): Stochastic depth rate. Default: 0.0
act_layer (nn.Module, optional): Activation layer. Default: nn.GELU
norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
"""
def __init__(
self,
dim,
num_heads,
window_size=7,
shift_size=0,
mlp_ratio=4.0,
qkv_bias=True,
qk_scale=None,
drop=0.0,
attn_drop=0.0,
drop_path=0.0,
act_layer=nn.GELU,
norm_layer=nn.LayerNorm,
):
super().__init__()
self.dim = dim
self.num_heads = num_heads
self.window_size = window_size
self.shift_size = shift_size
self.mlp_ratio = mlp_ratio
assert (
0 <= self.shift_size < self.window_size
), "shift_size must in 0-window_size"
self.norm1 = norm_layer(dim)
self.attn = WindowAttention(
dim,
window_size=to_2tuple(self.window_size),
num_heads=num_heads,
qkv_bias=qkv_bias,
qk_scale=qk_scale,
attn_drop=attn_drop,
proj_drop=drop,
)
self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()
self.norm2 = norm_layer(dim)
mlp_hidden_dim = int(dim * mlp_ratio)
self.mlp = Mlp(
in_features=dim,
hidden_features=mlp_hidden_dim,
act_layer=act_layer,
drop=drop,
)
self.H = None
self.W = None
def forward(self, x, mask_matrix):
"""Forward function.
Args:
x: Input feature, tensor size (B, H*W, C).
H, W: Spatial resolution of the input feature.
mask_matrix: Attention mask for cyclic shift.
"""
B, L, C = x.shape
H, W = self.H, self.W
assert L == H * W, "input feature has wrong size"
shortcut = x
x = self.norm1(x)
x = x.view(B, H, W, C)
# pad feature maps to multiples of window size
pad_l = pad_t = 0
pad_r = (self.window_size - W % self.window_size) % self.window_size
pad_b = (self.window_size - H % self.window_size) % self.window_size
x = F.pad(x, (0, 0, pad_l, pad_r, pad_t, pad_b))
_, Hp, Wp, _ = x.shape
# cyclic shift
if self.shift_size > 0:
shifted_x = torch.roll(
x, shifts=(-self.shift_size, -self.shift_size), dims=(1, 2)
)
attn_mask = mask_matrix
else:
shifted_x = x
attn_mask = None
# partition windows
x_windows = window_partition(
shifted_x, self.window_size
) # nW*B, window_size, window_size, C
x_windows = x_windows.view(
-1, self.window_size * self.window_size, C
) # nW*B, window_size*window_size, C
# W-MSA/SW-MSA
attn_windows = self.attn(
x_windows, mask=attn_mask
) # nW*B, window_size*window_size, C
# merge windows
attn_windows = attn_windows.view(-1, self.window_size, self.window_size, C)
shifted_x = window_reverse(attn_windows, self.window_size, Hp, Wp) # B H' W' C
# reverse cyclic shift
if self.shift_size > 0:
x = torch.roll(
shifted_x, shifts=(self.shift_size, self.shift_size), dims=(1, 2)
)
else:
x = shifted_x
if pad_r > 0 or pad_b > 0:
x = x[:, :H, :W, :].contiguous()
x = x.view(B, H * W, C)
# FFN
x = shortcut + self.drop_path(x)
x = x + self.drop_path(self.mlp(self.norm2(x)))
return x
class PatchMerging(nn.Module):
"""Patch Merging Layer
Args:
dim (int): Number of input channels.
norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
"""
def __init__(self, dim, norm_layer=nn.LayerNorm):
super().__init__()
self.dim = dim
self.reduction = nn.Linear(4 * dim, 2 * dim, bias=False)
self.norm = norm_layer(4 * dim)
def forward(self, x, H, W):
"""Forward function.
Args:
x: Input feature, tensor size (B, H*W, C).
H, W: Spatial resolution of the input feature.
"""
B, L, C = x.shape
assert L == H * W, "input feature has wrong size"
x = x.view(B, H, W, C)
# padding
pad_input = (H % 2 == 1) or (W % 2 == 1)
if pad_input:
x = F.pad(x, (0, 0, 0, W % 2, 0, H % 2))
x0 = x[:, 0::2, 0::2, :] # B H/2 W/2 C
x1 = x[:, 1::2, 0::2, :] # B H/2 W/2 C
x2 = x[:, 0::2, 1::2, :] # B H/2 W/2 C
x3 = x[:, 1::2, 1::2, :] # B H/2 W/2 C
x = torch.cat([x0, x1, x2, x3], -1) # B H/2 W/2 4*C
x = x.view(B, -1, 4 * C) # B H/2*W/2 4*C
x = self.norm(x)
x = self.reduction(x)
return x
class BasicLayer(nn.Module):
"""A basic Swin Transformer layer for one stage.
Args:
dim (int): Number of feature channels
depth (int): Depths of this stage.
num_heads (int): Number of attention head.
window_size (int): Local window size. Default: 7.
mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4.
qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set.
drop (float, optional): Dropout rate. Default: 0.0
attn_drop (float, optional): Attention dropout rate. Default: 0.0
drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0
norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None
use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False.
"""
def __init__(
self,
dim,
depth,
num_heads,
window_size=7,
mlp_ratio=4.0,
qkv_bias=True,
qk_scale=None,
drop=0.0,
attn_drop=0.0,
drop_path=0.0,
norm_layer=nn.LayerNorm,
downsample=None,
use_checkpoint=False,
):
super().__init__()
self.window_size = window_size
self.shift_size = window_size // 2
self.depth = depth
self.use_checkpoint = use_checkpoint
# build blocks
self.blocks = nn.ModuleList(
[
SwinTransformerBlock(
dim=dim,
num_heads=num_heads,
window_size=window_size,
shift_size=0 if (i % 2 == 0) else window_size // 2,
mlp_ratio=mlp_ratio,
qkv_bias=qkv_bias,
qk_scale=qk_scale,
drop=drop,
attn_drop=attn_drop,
drop_path=drop_path[i]
if isinstance(drop_path, list)
else drop_path,
norm_layer=norm_layer,
)
for i in range(depth)
]
)
# patch merging layer
if downsample is not None:
self.downsample = downsample(dim=dim, norm_layer=norm_layer)
else:
self.downsample = None
def forward(self, x, H, W):
"""Forward function.
Args:
x: Input feature, tensor size (B, H*W, C).
H, W: Spatial resolution of the input feature.
"""
# calculate attention mask for SW-MSA
Hp = int(np.ceil(H / self.window_size)) * self.window_size
Wp = int(np.ceil(W / self.window_size)) * self.window_size
img_mask = torch.zeros((1, Hp, Wp, 1), device=x.device) # 1 Hp Wp 1
h_slices = (
slice(0, -self.window_size),
slice(-self.window_size, -self.shift_size),
slice(-self.shift_size, None),
)
w_slices = (
slice(0, -self.window_size),
slice(-self.window_size, -self.shift_size),
slice(-self.shift_size, None),
)
cnt = 0
for h in h_slices:
for w in w_slices:
img_mask[:, h, w, :] = cnt
cnt += 1
mask_windows = window_partition(
img_mask, self.window_size
) # nW, window_size, window_size, 1
mask_windows = mask_windows.view(-1, self.window_size * self.window_size)
attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2)
attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill(
attn_mask == 0, float(0.0)
)
for blk in self.blocks:
blk.H, blk.W = H, W
if self.use_checkpoint:
x = checkpoint.checkpoint(blk, x, attn_mask)
else:
x = blk(x, attn_mask)
if self.downsample is not None:
x_down = self.downsample(x, H, W)
Wh, Ww = (H + 1) // 2, (W + 1) // 2
return x, H, W, x_down, Wh, Ww
else:
return x, H, W, x, H, W
class PatchEmbed(nn.Module):
"""Image to Patch Embedding
Args:
patch_size (int): Patch token size. Default: 4.
in_chans (int): Number of input image channels. Default: 3.
embed_dim (int): Number of linear projection output channels. Default: 96.
norm_layer (nn.Module, optional): Normalization layer. Default: None
"""
def __init__(self, patch_size=4, in_chans=3, embed_dim=96, norm_layer=None):
super().__init__()
if isinstance(patch_size, int):
patch_size = to_3tuple(patch_size)
self.patch_size = patch_size
self.in_chans = in_chans
self.embed_dim = embed_dim
self.proj = nn.Conv3d(
in_chans, embed_dim, kernel_size=patch_size, stride=patch_size
)
if norm_layer is not None:
self.norm = norm_layer(embed_dim)
else:
self.norm = None
def forward(self, x):
"""Forward function."""
# padding
_, _, D, H, W = x.size()
if W % self.patch_size[1] != 0:
x = F.pad(x, (0, self.patch_size[1] - W % self.patch_size[1]))
if H % self.patch_size[0] != 0:
x = F.pad(x, (0, 0, 0, self.patch_size[0] - H % self.patch_size[0]))
x = self.proj(x) # B C D Wh Ww
x = x.squeeze(2) # B C Wh Ww (assert D == 1)
if self.norm is not None:
Wh, Ww = x.size(2), x.size(3)
x = x.flatten(2).transpose(1, 2)
x = self.norm(x)
x = x.transpose(1, 2).view(-1, self.embed_dim, Wh, Ww)
return x
class SwinTransformer(nn.Module):
"""Swin Transformer backbone.
A PyTorch impl of : `Swin Transformer: Hierarchical Vision Transformer using Shifted Windows` -
https://arxiv.org/pdf/2103.14030
Args:
pretrain_img_size (int): Input image size for training the pretrained model,
used in absolute postion embedding. Default 224.
patch_size (int | tuple(int)): Patch size. Default: 4.
in_chans (int): Number of input image channels. Default: 3.
embed_dim (int): Number of linear projection output channels. Default: 96.
depths (tuple[int]): Depths of each Swin Transformer stage.
num_heads (tuple[int]): Number of attention head of each stage.
window_size (int): Window size. Default: 7.
mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4.
qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: True
qk_scale (float): Override default qk scale of head_dim ** -0.5 if set.
drop_rate (float): Dropout rate.
attn_drop_rate (float): Attention dropout rate. Default: 0.
drop_path_rate (float): Stochastic depth rate. Default: 0.2.
norm_layer (nn.Module): Normalization layer. Default: nn.LayerNorm.
ape (bool): If True, add absolute position embedding to the patch embedding. Default: False.
patch_norm (bool): If True, add normalization after patch embedding. Default: True.
out_indices (Sequence[int]): Output from which stages.
frozen_stages (int): Stages to be frozen (stop grad and set eval mode).
-1 means not freezing any parameters.
use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False.
"""
def __init__(
self,
pretrain_img_size=224,
patch_size=4,
in_chans=3,
embed_dim=96,
depths=[2, 2, 6, 2],
num_heads=[3, 6, 12, 24],
window_size=7,
mlp_ratio=4.0,
qkv_bias=True,
qk_scale=None,
drop_rate=0.0,
attn_drop_rate=0.0,
drop_path_rate=0.2,
norm_layer=nn.LayerNorm,
ape=False,
patch_norm=True,
out_indices=(0, 1, 2, 3),
frozen_stages=-1,
use_checkpoint=False,
):
super().__init__()
self.pretrain_img_size = pretrain_img_size
self.num_layers = len(depths)
self.embed_dim = embed_dim
self.ape = ape
self.patch_norm = patch_norm
self.out_indices = out_indices
self.frozen_stages = frozen_stages
# split image into non-overlapping patches
self.patch_embed = PatchEmbed(
patch_size=patch_size,
in_chans=in_chans,
embed_dim=embed_dim,
norm_layer=norm_layer if self.patch_norm else None,
)
# absolute position embedding
if self.ape:
pretrain_img_size = to_2tuple(pretrain_img_size)
patch_size = to_2tuple(patch_size)
patches_resolution = [
pretrain_img_size[0] // patch_size[0],
pretrain_img_size[1] // patch_size[1],
]
self.absolute_pos_embed = nn.Parameter(
torch.zeros(1, embed_dim, patches_resolution[0], patches_resolution[1])
)
trunc_normal_(self.absolute_pos_embed, std=0.02)
self.pos_drop = nn.Dropout(p=drop_rate)
# stochastic depth
dpr = [
x.item() for x in torch.linspace(0, drop_path_rate, sum(depths))
] # stochastic depth decay rule
# build layers
self.layers = nn.ModuleList()
for i_layer in range(self.num_layers):
layer = BasicLayer(
dim=int(embed_dim * 2**i_layer),
depth=depths[i_layer],
num_heads=num_heads[i_layer],
window_size=window_size,
mlp_ratio=mlp_ratio,
qkv_bias=qkv_bias,
qk_scale=qk_scale,
drop=drop_rate,
attn_drop=attn_drop_rate,
drop_path=dpr[sum(depths[:i_layer]) : sum(depths[: i_layer + 1])],
norm_layer=norm_layer,
downsample=PatchMerging if (i_layer < self.num_layers - 1) else None,
use_checkpoint=use_checkpoint,
)
self.layers.append(layer)
num_features = [int(embed_dim * 2**i) for i in range(self.num_layers)]
self.num_features = num_features
# add a norm layer for each output
for i_layer in out_indices:
layer = norm_layer(num_features[i_layer])
layer_name = f"norm{i_layer}"
self.add_module(layer_name, layer)
self._freeze_stages()
def _freeze_stages(self):
if self.frozen_stages >= 0:
self.patch_embed.eval()
for param in self.patch_embed.parameters():
param.requires_grad = False
if self.frozen_stages >= 1 and self.ape:
self.absolute_pos_embed.requires_grad = False
if self.frozen_stages >= 2:
self.pos_drop.eval()
for i in range(0, self.frozen_stages - 1):
m = self.layers[i]
m.eval()
for param in m.parameters():
param.requires_grad = False
def init_weights(
self,
pretrained="backbones/encoders/pretrained_models/swin_base_patch4_window7_224_22k.pth",
bootstrap_method="centering",
):
"""Initialize the weights in backbone.
Args:
pretrained (str, optional): Path to pre-trained weights.
Defaults to None.
"""
def _init_weights(m):
if isinstance(m, nn.Linear):
trunc_normal_(m.weight, std=0.02)
if isinstance(m, nn.Linear) and m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.LayerNorm):
nn.init.constant_(m.bias, 0)
nn.init.constant_(m.weight, 1.0)
if isinstance(pretrained, str):
self.apply(_init_weights)
logger = get_root_logger()
load_checkpoint(
self,
pretrained,
bootstrap_method=bootstrap_method,
strict=False,
logger=logger,
)
elif pretrained is None:
self.apply(_init_weights)
else:
raise TypeError("pretrained must be a str or None")
def forward(self, x):
"""Forward function."""
x = self.patch_embed(x)
Wh, Ww = x.size(2), x.size(3)
if self.ape:
# interpolate the position embedding to the corresponding size
absolute_pos_embed = F.interpolate(
self.absolute_pos_embed, size=(Wh, Ww), mode="bicubic"
)
x = (x + absolute_pos_embed).flatten(2).transpose(1, 2) # B Wh*Ww C
else:
x = x.flatten(2).transpose(1, 2)
x = self.pos_drop(x)
outs = []
for i in range(self.num_layers):
layer = self.layers[i]
x_out, H, W, x, Wh, Ww = layer(x, Wh, Ww)
if i in self.out_indices:
norm_layer = getattr(self, f"norm{i}")
x_out = norm_layer(x_out)
out = (
x_out.view(-1, H, W, self.num_features[i])
.permute(0, 3, 1, 2)
.contiguous()
)
outs.append(out)
return tuple(outs)
def train(self, mode=True):
"""Convert the model into training mode while keep layers freezed."""
super(SwinTransformer, self).train(mode)
self._freeze_stages()
if __name__ == "__main__":
model = SwinTransformer(
pretrain_img_size=(5, 512, 512),
patch_size=(5, 4, 4),
in_chans=1,
embed_dim=128,
depths=[2, 2, 18, 2],
num_heads=[4, 8, 16, 32],
window_size=7,
mlp_ratio=4.0,
qkv_bias=True,
qk_scale=None,
drop_rate=0.0,
attn_drop_rate=0.0,
drop_path_rate=0.3,
ape=False,
patch_norm=True,
out_indices=(0, 1, 2, 3),
use_checkpoint=False,
)
model.init_weights("pretrained_models/swin_base_patch4_window7_224_22k.pth")
x = torch.randn(4, 1, 5, 512, 512)
out = model(x)
print([y.shape for y in out])
| Python |
3D | yuhui-zh15/TransSeg | src/backbones/encoders/swin_transformer_3d.py | .py | 32,424 | 940 | import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.checkpoint as checkpoint
import numpy as np
from timm.models.layers import DropPath, trunc_normal_
from mmcv.runner import load_state_dict
from mmseg.utils import get_root_logger
from functools import reduce, lru_cache
from operator import mul
from einops import rearrange
import re
def load_checkpoint(
model,
filename,
map_location=None,
strict=False,
logger=None,
revise_keys=[(r"^module\.", "")],
):
"""Load checkpoint from a file or URI.
Args:
model (Module): Module to load checkpoint.
filename (str): Accept local filepath, URL, ``torchvision://xxx``,
``open-mmlab://xxx``. Please refer to ``docs/model_zoo.md`` for
details.
map_location (str): Same as :func:`torch.load`.
strict (bool): Whether to allow different params for the model and
checkpoint.
logger (:mod:`logging.Logger` or None): The logger for error message.
revise_keys (list): A list of customized keywords to modify the
state_dict in checkpoint. Each item is a (pattern, replacement)
pair of the regular expression operations. Default: strip
the prefix 'module.' by [(r'^module\\.', '')].
Returns:
dict or OrderedDict: The loaded checkpoint.
"""
checkpoint = torch.load(filename, map_location=map_location)["state_dict"]
checkpoint = {key.split(".", 1)[1]: checkpoint[key] for key in checkpoint}
# --- starting inflate/center weights ---
n_slices = model.patch_embed.patch_size[0]
n_chans = model.patch_embed.in_chans
key = "patch_embed.proj.weight"
emb = checkpoint[key]
print("Old:", emb.shape, emb.sum())
emb = emb.sum(1, keepdim=True) # from colored to grayed
emb = (
emb.repeat(1, n_chans, 1, 1, 1) / n_chans
) # from 1-channel grayed to n-channel grayed
# emb = emb.unsqueeze(2).repeat(1, 1, n_slices, 1, 1) # from 2D to 3D
# if bootstrap_method == 'inflation':
# print('Inflation!!!')
# emb = emb / n_slices
# elif bootstrap_method == 'centering':
# print('Centering!!!')
# center_idx = n_slices // 2
# all_idxs = list(range(n_slices))
# all_idxs.pop(center_idx)
# emb[:, :, all_idxs, :, :] = 0
# else:
# raise
print("New:", emb.shape, emb.sum())
checkpoint[key] = emb
# --- ending inflate/center weights ---
# OrderedDict is a subclass of dict
if not isinstance(checkpoint, dict):
raise RuntimeError(f"No state_dict found in checkpoint file {filename}")
# get state_dict from checkpoint
if "state_dict" in checkpoint:
state_dict = checkpoint["state_dict"]
else:
state_dict = checkpoint
# strip prefix of state_dict
for p, r in revise_keys:
state_dict = {re.sub(p, r, k): v for k, v in state_dict.items()}
# load state_dict
load_state_dict(model, state_dict, strict, logger)
return checkpoint
class Mlp(nn.Module):
"""Multilayer perceptron."""
def __init__(
self,
in_features,
hidden_features=None,
out_features=None,
act_layer=nn.GELU,
drop=0.0,
):
super().__init__()
out_features = out_features or in_features
hidden_features = hidden_features or in_features
self.fc1 = nn.Linear(in_features, hidden_features)
self.act = act_layer()
self.fc2 = nn.Linear(hidden_features, out_features)
self.drop = nn.Dropout(drop)
def forward(self, x):
x = self.fc1(x)
x = self.act(x)
x = self.drop(x)
x = self.fc2(x)
x = self.drop(x)
return x
def window_partition(x, window_size):
"""
Args:
x: (B, D, H, W, C)
window_size (tuple[int]): window size
Returns:
windows: (B*num_windows, window_size*window_size, C)
"""
B, D, H, W, C = x.shape
x = x.view(
B,
D // window_size[0],
window_size[0],
H // window_size[1],
window_size[1],
W // window_size[2],
window_size[2],
C,
)
windows = (
x.permute(0, 1, 3, 5, 2, 4, 6, 7)
.contiguous()
.view(-1, reduce(mul, window_size), C)
)
return windows
def window_reverse(windows, window_size, B, D, H, W):
"""
Args:
windows: (B*num_windows, window_size, window_size, C)
window_size (tuple[int]): Window size
H (int): Height of image
W (int): Width of image
Returns:
x: (B, D, H, W, C)
"""
x = windows.view(
B,
D // window_size[0],
H // window_size[1],
W // window_size[2],
window_size[0],
window_size[1],
window_size[2],
-1,
)
x = x.permute(0, 1, 4, 2, 5, 3, 6, 7).contiguous().view(B, D, H, W, -1)
return x
def get_window_size(x_size, window_size, shift_size=None):
use_window_size = list(window_size)
if shift_size is not None:
use_shift_size = list(shift_size)
for i in range(len(x_size)):
if x_size[i] <= window_size[i]:
use_window_size[i] = x_size[i]
if shift_size is not None:
use_shift_size[i] = 0
if shift_size is None:
return tuple(use_window_size)
else:
return tuple(use_window_size), tuple(use_shift_size)
class WindowAttention3D(nn.Module):
"""Window based multi-head self attention (W-MSA) module with relative position bias.
It supports both of shifted and non-shifted window.
Args:
dim (int): Number of input channels.
window_size (tuple[int]): The temporal length, height and width of the window.
num_heads (int): Number of attention heads.
qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set
attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0
proj_drop (float, optional): Dropout ratio of output. Default: 0.0
"""
def __init__(
self,
dim,
window_size,
num_heads,
qkv_bias=False,
qk_scale=None,
attn_drop=0.0,
proj_drop=0.0,
):
super().__init__()
self.dim = dim
self.window_size = window_size # Wd, Wh, Ww
self.num_heads = num_heads
head_dim = dim // num_heads
self.scale = qk_scale or head_dim**-0.5
# define a parameter table of relative position bias
self.relative_position_bias_table = nn.Parameter(
torch.zeros(
(2 * window_size[0] - 1)
* (2 * window_size[1] - 1)
* (2 * window_size[2] - 1),
num_heads,
)
) # 2*Wd-1 * 2*Wh-1 * 2*Ww-1, nH
# get pair-wise relative position index for each token inside the window
coords_d = torch.arange(self.window_size[0])
coords_h = torch.arange(self.window_size[1])
coords_w = torch.arange(self.window_size[2])
coords = torch.stack(
torch.meshgrid(coords_d, coords_h, coords_w)
) # 3, Wd, Wh, Ww
coords_flatten = torch.flatten(coords, 1) # 3, Wd*Wh*Ww
relative_coords = (
coords_flatten[:, :, None] - coords_flatten[:, None, :]
) # 3, Wd*Wh*Ww, Wd*Wh*Ww
relative_coords = relative_coords.permute(
1, 2, 0
).contiguous() # Wd*Wh*Ww, Wd*Wh*Ww, 3
relative_coords[:, :, 0] += self.window_size[0] - 1 # shift to start from 0
relative_coords[:, :, 1] += self.window_size[1] - 1
relative_coords[:, :, 2] += self.window_size[2] - 1
relative_coords[:, :, 0] *= (2 * self.window_size[1] - 1) * (
2 * self.window_size[2] - 1
)
relative_coords[:, :, 1] *= 2 * self.window_size[2] - 1
relative_position_index = relative_coords.sum(-1) # Wd*Wh*Ww, Wd*Wh*Ww
self.register_buffer("relative_position_index", relative_position_index)
self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)
self.attn_drop = nn.Dropout(attn_drop)
self.proj = nn.Linear(dim, dim)
self.proj_drop = nn.Dropout(proj_drop)
trunc_normal_(self.relative_position_bias_table, std=0.02)
self.softmax = nn.Softmax(dim=-1)
def forward(self, x, mask=None):
"""Forward function.
Args:
x: input features with shape of (num_windows*B, N, C)
mask: (0/-inf) mask with shape of (num_windows, N, N) or None
"""
B_, N, C = x.shape
qkv = (
self.qkv(x)
.reshape(B_, N, 3, self.num_heads, C // self.num_heads)
.permute(2, 0, 3, 1, 4)
)
q, k, v = qkv[0], qkv[1], qkv[2] # B_, nH, N, C
q = q * self.scale
attn = q @ k.transpose(-2, -1)
relative_position_bias = self.relative_position_bias_table[
self.relative_position_index[:N, :N].reshape(-1)
].reshape(
N, N, -1
) # Wd*Wh*Ww,Wd*Wh*Ww,nH
relative_position_bias = relative_position_bias.permute(
2, 0, 1
).contiguous() # nH, Wd*Wh*Ww, Wd*Wh*Ww
attn = attn + relative_position_bias.unsqueeze(0) # B_, nH, N, N
if mask is not None:
nW = mask.shape[0]
attn = attn.view(B_ // nW, nW, self.num_heads, N, N) + mask.unsqueeze(
1
).unsqueeze(0)
attn = attn.view(-1, self.num_heads, N, N)
attn = self.softmax(attn)
else:
attn = self.softmax(attn)
attn = self.attn_drop(attn)
x = (attn @ v).transpose(1, 2).reshape(B_, N, C)
x = self.proj(x)
x = self.proj_drop(x)
return x
class SwinTransformerBlock3D(nn.Module):
"""Swin Transformer Block.
Args:
dim (int): Number of input channels.
num_heads (int): Number of attention heads.
window_size (tuple[int]): Window size.
shift_size (tuple[int]): Shift size for SW-MSA.
mlp_ratio (float): Ratio of mlp hidden dim to embedding dim.
qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set.
drop (float, optional): Dropout rate. Default: 0.0
attn_drop (float, optional): Attention dropout rate. Default: 0.0
drop_path (float, optional): Stochastic depth rate. Default: 0.0
act_layer (nn.Module, optional): Activation layer. Default: nn.GELU
norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
"""
def __init__(
self,
dim,
num_heads,
window_size=(2, 7, 7),
shift_size=(0, 0, 0),
mlp_ratio=4.0,
qkv_bias=True,
qk_scale=None,
drop=0.0,
attn_drop=0.0,
drop_path=0.0,
act_layer=nn.GELU,
norm_layer=nn.LayerNorm,
use_checkpoint=False,
):
super().__init__()
self.dim = dim
self.num_heads = num_heads
self.window_size = window_size
self.shift_size = shift_size
self.mlp_ratio = mlp_ratio
self.use_checkpoint = use_checkpoint
assert (
0 <= self.shift_size[0] < self.window_size[0]
), "shift_size must in 0-window_size"
assert (
0 <= self.shift_size[1] < self.window_size[1]
), "shift_size must in 0-window_size"
assert (
0 <= self.shift_size[2] < self.window_size[2]
), "shift_size must in 0-window_size"
self.norm1 = norm_layer(dim)
self.attn = WindowAttention3D(
dim,
window_size=self.window_size,
num_heads=num_heads,
qkv_bias=qkv_bias,
qk_scale=qk_scale,
attn_drop=attn_drop,
proj_drop=drop,
)
self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()
self.norm2 = norm_layer(dim)
mlp_hidden_dim = int(dim * mlp_ratio)
self.mlp = Mlp(
in_features=dim,
hidden_features=mlp_hidden_dim,
act_layer=act_layer,
drop=drop,
)
def forward_part1(self, x, mask_matrix):
B, D, H, W, C = x.shape
window_size, shift_size = get_window_size(
(D, H, W), self.window_size, self.shift_size
)
x = self.norm1(x)
# pad feature maps to multiples of window size
pad_l = pad_t = pad_d0 = 0
pad_d1 = (window_size[0] - D % window_size[0]) % window_size[0]
pad_b = (window_size[1] - H % window_size[1]) % window_size[1]
pad_r = (window_size[2] - W % window_size[2]) % window_size[2]
x = F.pad(x, (0, 0, pad_l, pad_r, pad_t, pad_b, pad_d0, pad_d1))
_, Dp, Hp, Wp, _ = x.shape
# cyclic shift
if any(i > 0 for i in shift_size):
shifted_x = torch.roll(
x,
shifts=(-shift_size[0], -shift_size[1], -shift_size[2]),
dims=(1, 2, 3),
)
attn_mask = mask_matrix
else:
shifted_x = x
attn_mask = None
# partition windows
x_windows = window_partition(shifted_x, window_size) # B*nW, Wd*Wh*Ww, C
# W-MSA/SW-MSA
attn_windows = self.attn(x_windows, mask=attn_mask) # B*nW, Wd*Wh*Ww, C
# merge windows
attn_windows = attn_windows.view(-1, *(window_size + (C,)))
shifted_x = window_reverse(
attn_windows, window_size, B, Dp, Hp, Wp
) # B D' H' W' C
# reverse cyclic shift
if any(i > 0 for i in shift_size):
x = torch.roll(
shifted_x,
shifts=(shift_size[0], shift_size[1], shift_size[2]),
dims=(1, 2, 3),
)
else:
x = shifted_x
if pad_d1 > 0 or pad_r > 0 or pad_b > 0:
x = x[:, :D, :H, :W, :].contiguous()
return x
def forward_part2(self, x):
return self.drop_path(self.mlp(self.norm2(x)))
def forward(self, x, mask_matrix):
"""Forward function.
Args:
x: Input feature, tensor size (B, D, H, W, C).
mask_matrix: Attention mask for cyclic shift.
"""
shortcut = x
if self.use_checkpoint:
x = checkpoint.checkpoint(self.forward_part1, x, mask_matrix)
else:
x = self.forward_part1(x, mask_matrix)
x = shortcut + self.drop_path(x)
if self.use_checkpoint:
x = x + checkpoint.checkpoint(self.forward_part2, x)
else:
x = x + self.forward_part2(x)
return x
class PatchMerging(nn.Module):
"""Patch Merging Layer
Args:
dim (int): Number of input channels.
norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
"""
def __init__(self, dim, norm_layer=nn.LayerNorm):
super().__init__()
self.dim = dim
self.reduction = nn.Linear(4 * dim, 2 * dim, bias=False)
self.norm = norm_layer(4 * dim)
def forward(self, x):
"""Forward function.
Args:
x: Input feature, tensor size (B, D, H, W, C).
"""
B, D, H, W, C = x.shape
# padding
pad_input = (H % 2 == 1) or (W % 2 == 1)
if pad_input:
x = F.pad(x, (0, 0, 0, W % 2, 0, H % 2))
x0 = x[:, :, 0::2, 0::2, :] # B D H/2 W/2 C
x1 = x[:, :, 1::2, 0::2, :] # B D H/2 W/2 C
x2 = x[:, :, 0::2, 1::2, :] # B D H/2 W/2 C
x3 = x[:, :, 1::2, 1::2, :] # B D H/2 W/2 C
x = torch.cat([x0, x1, x2, x3], -1) # B D H/2 W/2 4*C
x = self.norm(x)
x = self.reduction(x)
return x
# cache each stage results
@lru_cache()
def compute_mask(D, H, W, window_size, shift_size, device):
img_mask = torch.zeros((1, D, H, W, 1), device=device) # 1 Dp Hp Wp 1
cnt = 0
for d in (
slice(-window_size[0]),
slice(-window_size[0], -shift_size[0]),
slice(-shift_size[0], None),
):
for h in (
slice(-window_size[1]),
slice(-window_size[1], -shift_size[1]),
slice(-shift_size[1], None),
):
for w in (
slice(-window_size[2]),
slice(-window_size[2], -shift_size[2]),
slice(-shift_size[2], None),
):
img_mask[:, d, h, w, :] = cnt
cnt += 1
mask_windows = window_partition(img_mask, window_size) # nW, ws[0]*ws[1]*ws[2], 1
mask_windows = mask_windows.squeeze(-1) # nW, ws[0]*ws[1]*ws[2]
attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2)
attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill(
attn_mask == 0, float(0.0)
)
return attn_mask
class BasicLayer(nn.Module):
"""A basic Swin Transformer layer for one stage.
Args:
dim (int): Number of feature channels
depth (int): Depths of this stage.
num_heads (int): Number of attention head.
window_size (tuple[int]): Local window size. Default: (1,7,7).
mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4.
qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set.
drop (float, optional): Dropout rate. Default: 0.0
attn_drop (float, optional): Attention dropout rate. Default: 0.0
drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0
norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None
"""
def __init__(
self,
dim,
depth,
num_heads,
window_size=(1, 7, 7),
mlp_ratio=4.0,
qkv_bias=False,
qk_scale=None,
drop=0.0,
attn_drop=0.0,
drop_path=0.0,
norm_layer=nn.LayerNorm,
downsample=None,
use_checkpoint=False,
):
super().__init__()
self.window_size = window_size
self.shift_size = tuple(i // 2 for i in window_size)
self.depth = depth
self.use_checkpoint = use_checkpoint
# build blocks
self.blocks = nn.ModuleList(
[
SwinTransformerBlock3D(
dim=dim,
num_heads=num_heads,
window_size=window_size,
shift_size=(0, 0, 0) if (i % 2 == 0) else self.shift_size,
mlp_ratio=mlp_ratio,
qkv_bias=qkv_bias,
qk_scale=qk_scale,
drop=drop,
attn_drop=attn_drop,
drop_path=drop_path[i]
if isinstance(drop_path, list)
else drop_path,
norm_layer=norm_layer,
use_checkpoint=use_checkpoint,
)
for i in range(depth)
]
)
self.downsample = downsample
if self.downsample is not None:
self.downsample = downsample(dim=dim, norm_layer=norm_layer)
def forward(self, x):
"""Forward function.
Args:
x: Input feature, tensor size (B, C, D, H, W).
"""
# calculate attention mask for SW-MSA
B, C, D, H, W = x.shape
window_size, shift_size = get_window_size(
(D, H, W), self.window_size, self.shift_size
)
x = rearrange(x, "b c d h w -> b d h w c")
Dp = int(np.ceil(D / window_size[0])) * window_size[0]
Hp = int(np.ceil(H / window_size[1])) * window_size[1]
Wp = int(np.ceil(W / window_size[2])) * window_size[2]
attn_mask = compute_mask(Dp, Hp, Wp, window_size, shift_size, x.device)
for blk in self.blocks:
x = blk(x, attn_mask)
x = x.view(B, D, H, W, -1)
out = x
out = rearrange(out, "b d h w c -> b c d h w")
if self.downsample is not None:
x = self.downsample(x)
x = rearrange(x, "b d h w c -> b c d h w")
return x, out
class PatchEmbed3D(nn.Module):
"""Video to Patch Embedding.
Args:
patch_size (int): Patch token size. Default: (2,4,4).
in_chans (int): Number of input video channels. Default: 3.
embed_dim (int): Number of linear projection output channels. Default: 96.
norm_layer (nn.Module, optional): Normalization layer. Default: None
"""
def __init__(self, patch_size=(2, 4, 4), in_chans=3, embed_dim=96, norm_layer=None):
super().__init__()
self.patch_size = patch_size
self.in_chans = in_chans
self.embed_dim = embed_dim
self.proj = nn.Conv3d(
in_chans, embed_dim, kernel_size=patch_size, stride=patch_size
)
if norm_layer is not None:
self.norm = norm_layer(embed_dim)
else:
self.norm = None
def forward(self, x):
"""Forward function."""
# padding
_, _, D, H, W = x.size()
if W % self.patch_size[2] != 0:
x = F.pad(x, (0, self.patch_size[2] - W % self.patch_size[2]))
if H % self.patch_size[1] != 0:
x = F.pad(x, (0, 0, 0, self.patch_size[1] - H % self.patch_size[1]))
if D % self.patch_size[0] != 0:
x = F.pad(x, (0, 0, 0, 0, 0, self.patch_size[0] - D % self.patch_size[0]))
x = self.proj(x) # B C D Wh Ww
if self.norm is not None:
D, Wh, Ww = x.size(2), x.size(3), x.size(4)
x = x.flatten(2).transpose(1, 2)
x = self.norm(x)
x = x.transpose(1, 2).view(-1, self.embed_dim, D, Wh, Ww)
return x
class SwinTransformer3D(nn.Module):
"""Swin Transformer backbone.
A PyTorch impl of : `Swin Transformer: Hierarchical Vision Transformer using Shifted Windows` -
https://arxiv.org/pdf/2103.14030
Args:
patch_size (int | tuple(int)): Patch size. Default: (4,4,4).
in_chans (int): Number of input image channels. Default: 3.
embed_dim (int): Number of linear projection output channels. Default: 96.
depths (tuple[int]): Depths of each Swin Transformer stage.
num_heads (tuple[int]): Number of attention head of each stage.
window_size (int): Window size. Default: 7.
mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4.
qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: Truee
qk_scale (float): Override default qk scale of head_dim ** -0.5 if set.
drop_rate (float): Dropout rate.
attn_drop_rate (float): Attention dropout rate. Default: 0.
drop_path_rate (float): Stochastic depth rate. Default: 0.2.
norm_layer: Normalization layer. Default: nn.LayerNorm.
patch_norm (bool): If True, add normalization after patch embedding. Default: False.
frozen_stages (int): Stages to be frozen (stop grad and set eval mode).
-1 means not freezing any parameters.
"""
def __init__(
self,
pretrained=None,
pretrained2d=True,
patch_size=(4, 4, 4),
in_chans=3,
embed_dim=96,
depths=[2, 2, 6, 2],
num_heads=[3, 6, 12, 24],
window_size=(2, 7, 7),
mlp_ratio=4.0,
qkv_bias=True,
qk_scale=None,
drop_rate=0.0,
attn_drop_rate=0.0,
drop_path_rate=0.2,
norm_layer=nn.LayerNorm,
patch_norm=False,
frozen_stages=-1,
use_checkpoint=False,
):
super().__init__()
self.pretrained = pretrained
self.pretrained2d = pretrained2d
self.num_layers = len(depths)
self.embed_dim = embed_dim
self.patch_norm = patch_norm
self.frozen_stages = frozen_stages
self.window_size = window_size
self.patch_size = patch_size
# split image into non-overlapping patches
self.patch_embed = PatchEmbed3D(
patch_size=patch_size,
in_chans=in_chans,
embed_dim=embed_dim,
norm_layer=norm_layer if self.patch_norm else None,
)
self.pos_drop = nn.Dropout(p=drop_rate)
# stochastic depth
dpr = [
x.item() for x in torch.linspace(0, drop_path_rate, sum(depths))
] # stochastic depth decay rule
# build layers
self.layers = nn.ModuleList()
for i_layer in range(self.num_layers):
layer = BasicLayer(
dim=int(embed_dim * 2**i_layer),
depth=depths[i_layer],
num_heads=num_heads[i_layer],
window_size=window_size,
mlp_ratio=mlp_ratio,
qkv_bias=qkv_bias,
qk_scale=qk_scale,
drop=drop_rate,
attn_drop=attn_drop_rate,
drop_path=dpr[sum(depths[:i_layer]) : sum(depths[: i_layer + 1])],
norm_layer=norm_layer,
downsample=PatchMerging if i_layer < self.num_layers - 1 else None,
use_checkpoint=use_checkpoint,
)
self.layers.append(layer)
self.num_features = int(embed_dim * 2 ** (self.num_layers - 1))
# add a norm layer for each output
self.norm = norm_layer(self.num_features)
self._freeze_stages()
def _freeze_stages(self):
if self.frozen_stages >= 0:
self.patch_embed.eval()
for param in self.patch_embed.parameters():
param.requires_grad = False
if self.frozen_stages >= 1:
self.pos_drop.eval()
for i in range(0, self.frozen_stages):
m = self.layers[i]
m.eval()
for param in m.parameters():
param.requires_grad = False
def inflate_weights(self, logger):
"""Inflate the swin2d parameters to swin3d.
The differences between swin3d and swin2d mainly lie in an extra
axis. To utilize the pretrained parameters in 2d model,
the weight of swin2d models should be inflated to fit in the shapes of
the 3d counterpart.
Args:
logger (logging.Logger): The logger used to print
debugging infomation.
"""
checkpoint = torch.load(self.pretrained, map_location="cpu")
state_dict = checkpoint["model"]
# delete relative_position_index since we always re-init it
relative_position_index_keys = [
k for k in state_dict.keys() if "relative_position_index" in k
]
for k in relative_position_index_keys:
del state_dict[k]
# delete attn_mask since we always re-init it
attn_mask_keys = [k for k in state_dict.keys() if "attn_mask" in k]
for k in attn_mask_keys:
del state_dict[k]
state_dict["patch_embed.proj.weight"] = (
state_dict["patch_embed.proj.weight"]
.unsqueeze(2)
.repeat(1, 1, self.patch_size[0], 1, 1)
/ self.patch_size[0]
)
# bicubic interpolate relative_position_bias_table if not match
relative_position_bias_table_keys = [
k for k in state_dict.keys() if "relative_position_bias_table" in k
]
for k in relative_position_bias_table_keys:
relative_position_bias_table_pretrained = state_dict[k]
relative_position_bias_table_current = self.state_dict()[k]
L1, nH1 = relative_position_bias_table_pretrained.size()
L2, nH2 = relative_position_bias_table_current.size()
L2 = (2 * self.window_size[1] - 1) * (2 * self.window_size[2] - 1)
wd = self.window_size[0]
if nH1 != nH2:
logger.warning(f"Error in loading {k}, passing")
else:
if L1 != L2:
S1 = int(L1**0.5)
relative_position_bias_table_pretrained_resized = (
torch.nn.functional.interpolate(
relative_position_bias_table_pretrained.permute(1, 0).view(
1, nH1, S1, S1
),
size=(
2 * self.window_size[1] - 1,
2 * self.window_size[2] - 1,
),
mode="bicubic",
)
)
relative_position_bias_table_pretrained = (
relative_position_bias_table_pretrained_resized.view(
nH2, L2
).permute(1, 0)
)
state_dict[k] = relative_position_bias_table_pretrained.repeat(
2 * wd - 1, 1
)
msg = self.load_state_dict(state_dict, strict=False)
logger.info(msg)
logger.info(f"=> loaded successfully '{self.pretrained}'")
del checkpoint
torch.cuda.empty_cache()
def init_weights(
self,
pretrained="backbones/encoders/pretrained_models/swin_base_patch244_window877_kinetics600_22k.pth",
): # swin_base_patch244_window1677_sthv2.pth
"""Initialize the weights in backbone.
Args:
pretrained (str, optional): Path to pre-trained weights.
Defaults to None.
"""
def _init_weights(m):
if isinstance(m, nn.Linear):
trunc_normal_(m.weight, std=0.02)
if isinstance(m, nn.Linear) and m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.LayerNorm):
nn.init.constant_(m.bias, 0)
nn.init.constant_(m.weight, 1.0)
if pretrained:
self.pretrained = pretrained
if isinstance(self.pretrained, str):
self.apply(_init_weights)
logger = get_root_logger()
logger.info(f"load model from: {self.pretrained}")
if self.pretrained2d:
# Inflate 2D model into 3D model.
self.inflate_weights(logger)
else:
# Directly load 3D model.
load_checkpoint(self, self.pretrained, strict=False, logger=logger)
elif self.pretrained is None:
self.apply(_init_weights)
else:
raise TypeError("pretrained must be a str or None")
def forward(self, x):
"""Forward function."""
x = self.patch_embed(x)
x = self.pos_drop(x)
outs = []
for layer in self.layers:
x, out = layer(x.contiguous())
outs.append(out[:, :, x.size(2) // 2, :, :].contiguous())
# x = rearrange(x, 'n c d h w -> n d h w c')
# x = self.norm(x)
# x = rearrange(x, 'n d h w c -> n c d h w')
return outs
def train(self, mode=True):
"""Convert the model into training mode while keep layers freezed."""
super(SwinTransformer3D, self).train(mode)
self._freeze_stages()
if __name__ == "__main__":
model = SwinTransformer3D(
pretrained2d=False,
patch_size=(2, 4, 4),
embed_dim=128,
depths=[2, 2, 18, 2],
num_heads=[4, 8, 16, 32],
window_size=(8, 7, 7),
mlp_ratio=4.0,
qkv_bias=True,
qk_scale=None,
drop_rate=0.0,
attn_drop_rate=0.0,
drop_path_rate=0.2,
patch_norm=True,
in_chans=1,
)
model.init_weights(
"pretrained_models/swin_base_patch244_window877_kinetics600_22k.pth"
) # swin_base_patch244_window1677_sthv2.pth
# model.cuda()
x = torch.randn(2, 1, 5, 512, 512)
out = model(x)
print([y.shape for y in out])
| Python |
3D | yuhui-zh15/TransSeg | src/backbones/encoders/pretrained_models/download_weights.sh | .sh | 487 | 6 | wget https://unilm.blob.core.windows.net/beit/beit_base_patch16_224_pt22k_ft22k.pth
wget https://unilm.blob.core.windows.net/beit/beit_base_patch16_224_pt22k.pth
wget https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_base_patch4_window7_224_22k.pth
wget https://github.com/SwinTransformer/storage/releases/download/v1.0.4/swin_base_patch244_window877_kinetics600_22k.pth
wget https://dl.fbaipublicfiles.com/dino/dino_vitbase16_pretrain/dino_vitbase16_pretrain.pth
| Shell |
3D | yuhui-zh15/TransSeg | src/unetr/unetr.py | .py | 7,751 | 255 | import os
import shutil
import tempfile
import matplotlib.pyplot as plt
import numpy as np
from tqdm import tqdm
from monai.losses import DiceCELoss
from monai.inferers import sliding_window_inference
from monai.transforms import (
AsDiscrete,
EnsureChannelFirstd,
Compose,
CropForegroundd,
LoadImaged,
Orientationd,
RandFlipd,
RandCropByPosNegLabeld,
RandShiftIntensityd,
ScaleIntensityRanged,
Spacingd,
RandRotate90d,
)
from monai.config import print_config
from monai.metrics import DiceMetric
from monai.networks.nets import UNETR
from monai.data import (
DataLoader,
CacheDataset,
load_decathlon_datalist,
decollate_batch,
)
import torch
print_config()
directory = os.environ.get("MONAI_DATA_DIRECTORY")
root_dir = tempfile.mkdtemp() if directory is None else directory
print(root_dir)
train_transforms = Compose(
[
LoadImaged(keys=["image", "label"]),
EnsureChannelFirstd(keys=["image", "label"]),
Orientationd(keys=["image", "label"], axcodes="RAS"),
Spacingd(
keys=["image", "label"],
pixdim=(1.5, 1.5, 2.0),
mode=("bilinear", "nearest"),
),
ScaleIntensityRanged(
keys=["image"],
a_min=-175,
a_max=250,
b_min=0.0,
b_max=1.0,
clip=True,
),
CropForegroundd(keys=["image", "label"], source_key="image"),
RandCropByPosNegLabeld(
keys=["image", "label"],
label_key="label",
spatial_size=(96, 96, 96),
pos=1,
neg=1,
num_samples=4,
image_key="image",
image_threshold=0,
),
RandFlipd(
keys=["image", "label"],
spatial_axis=[0],
prob=0.10,
),
RandFlipd(
keys=["image", "label"],
spatial_axis=[1],
prob=0.10,
),
RandFlipd(
keys=["image", "label"],
spatial_axis=[2],
prob=0.10,
),
RandRotate90d(
keys=["image", "label"],
prob=0.10,
max_k=3,
),
RandShiftIntensityd(
keys=["image"],
offsets=0.10,
prob=0.50,
),
]
)
val_transforms = Compose(
[
LoadImaged(keys=["image", "label"]),
EnsureChannelFirstd(keys=["image", "label"]),
Orientationd(keys=["image", "label"], axcodes="RAS"),
Spacingd(
keys=["image", "label"],
pixdim=(1.5, 1.5, 2.0),
mode=("bilinear", "nearest"),
),
ScaleIntensityRanged(
keys=["image"], a_min=-175, a_max=250, b_min=0.0, b_max=1.0, clip=True
),
CropForegroundd(keys=["image", "label"], source_key="image"),
]
)
data_dir = "data/MedicalImages/bcv30/RawData/UNETR_raw_data/Task250_BCV/"
split_JSON = "dataset_unetr.json"
datasets = data_dir + split_JSON
datalist = load_decathlon_datalist(datasets, True, "training")
val_files = load_decathlon_datalist(datasets, True, "validation")
train_ds = CacheDataset(
data=datalist,
transform=train_transforms,
cache_rate=1.0,
num_workers=8,
)
train_loader = DataLoader(
train_ds, batch_size=1, shuffle=True, num_workers=8, pin_memory=True
)
val_ds = CacheDataset(
data=val_files, transform=val_transforms, cache_rate=1.0, num_workers=4
)
val_loader = DataLoader(
val_ds, batch_size=1, shuffle=False, num_workers=4, pin_memory=True
)
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = UNETR(
in_channels=1,
out_channels=14,
img_size=(96, 96, 96),
feature_size=16,
hidden_size=768,
mlp_dim=3072,
num_heads=12,
pos_embed="perceptron",
norm_name="instance",
res_block=True,
dropout_rate=0.0,
).to(device)
loss_function = DiceCELoss(to_onehot_y=True, softmax=True)
torch.backends.cudnn.benchmark = True
optimizer = torch.optim.AdamW(model.parameters(), lr=1e-4, weight_decay=1e-5)
def validation(epoch_iterator_val):
model.eval()
with torch.no_grad():
for step, batch in enumerate(epoch_iterator_val):
val_inputs, val_labels = (batch["image"].cuda(), batch["label"].cuda())
val_outputs = sliding_window_inference(val_inputs, (96, 96, 96), 4, model)
val_labels_list = decollate_batch(val_labels)
val_labels_convert = [
post_label(val_label_tensor) for val_label_tensor in val_labels_list
]
val_outputs_list = decollate_batch(val_outputs)
val_output_convert = [
post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list
]
dice_metric(y_pred=val_output_convert, y=val_labels_convert)
epoch_iterator_val.set_description(
"Validate (%d / %d Steps)" % (global_step, 10.0)
)
mean_dice_val = dice_metric.aggregate().item()
dice_metric.reset()
return mean_dice_val
def train(global_step, train_loader, dice_val_best, global_step_best):
model.train()
epoch_loss = 0
step = 0
epoch_iterator = tqdm(
train_loader, desc="Training (X / X Steps) (loss=X.X)", dynamic_ncols=True
)
for step, batch in enumerate(epoch_iterator):
step += 1
x, y = (batch["image"].cuda(), batch["label"].cuda())
logit_map = model(x)
loss, _ = loss_function(logit_map, y)
loss.backward()
epoch_loss += loss.item()
optimizer.step()
optimizer.zero_grad()
epoch_iterator.set_description(
"Training (%d / %d Steps) (loss=%2.5f)" % (global_step, max_iterations, loss)
)
if (
global_step % eval_num == 0 and global_step != 0
) or global_step == max_iterations:
epoch_iterator_val = tqdm(
val_loader, desc="Validate (X / X Steps) (dice=X.X)", dynamic_ncols=True
)
dice_val = validation(epoch_iterator_val)
epoch_loss /= step
epoch_loss_values.append(epoch_loss)
metric_values.append(dice_val)
if dice_val > dice_val_best:
dice_val_best = dice_val
global_step_best = global_step
torch.save(
model.state_dict(), os.path.join(root_dir, "best_metric_model.pth")
)
print(
"Model Was Saved ! Current Best Avg. Dice: {} Current Avg. Dice: {}".format(
dice_val_best, dice_val
)
)
else:
print(
"Model Was Not Saved ! Current Best Avg. Dice: {} Current Avg. Dice: {}".format(
dice_val_best, dice_val
)
)
global_step += 1
return global_step, dice_val_best, global_step_best
max_iterations = 25000
eval_num = 500
post_label = AsDiscrete(to_onehot=True, n_classes=14)
post_pred = AsDiscrete(argmax=True, to_onehot=True, n_classes=14)
dice_metric = DiceMetric(include_background=True, reduction="mean", get_not_nans=False)
global_step = 0
dice_val_best = 0.0
global_step_best = 0
epoch_loss_values = []
metric_values = []
while global_step < max_iterations:
global_step, dice_val_best, global_step_best = train(
global_step, train_loader, dice_val_best, global_step_best
)
model.load_state_dict(torch.load(os.path.join(root_dir, "best_metric_model.pth")))
print(
f"train completed, best_metric: {dice_val_best:.4f} "
f"at iteration: {global_step_best}"
)
| Python |
3D | yuhui-zh15/TransSeg | src/unetr/unetr_eval.py | .py | 7,867 | 256 | from multiprocessing import reduction
import os
import shutil
import tempfile
import matplotlib.pyplot as plt
import numpy as np
from tqdm import tqdm
from monai.losses import DiceCELoss
from monai.inferers import sliding_window_inference
from monai.transforms import (
AsDiscrete,
EnsureChannelFirstd,
Compose,
CropForegroundd,
LoadImaged,
Orientationd,
RandFlipd,
RandCropByPosNegLabeld,
RandShiftIntensityd,
ScaleIntensityRanged,
Spacingd,
RandRotate90d,
)
from monai.config import print_config
from monai.metrics import DiceMetric
from monai.networks.nets import UNETR
from monai.data import (
DataLoader,
CacheDataset,
load_decathlon_datalist,
decollate_batch,
)
import torch
print_config()
directory = os.environ.get("MONAI_DATA_DIRECTORY")
root_dir = tempfile.mkdtemp() if directory is None else directory
print(root_dir)
train_transforms = Compose(
[
LoadImaged(keys=["image", "label"]),
EnsureChannelFirstd(keys=["image", "label"]),
Orientationd(keys=["image", "label"], axcodes="RAS"),
Spacingd(
keys=["image", "label"],
pixdim=(1.5, 1.5, 2.0),
mode=("bilinear", "nearest"),
),
ScaleIntensityRanged(
keys=["image"],
a_min=-175,
a_max=250,
b_min=0.0,
b_max=1.0,
clip=True,
),
CropForegroundd(keys=["image", "label"], source_key="image"),
RandCropByPosNegLabeld(
keys=["image", "label"],
label_key="label",
spatial_size=(96, 96, 96),
pos=1,
neg=1,
num_samples=4,
image_key="image",
image_threshold=0,
),
RandFlipd(
keys=["image", "label"],
spatial_axis=[0],
prob=0.10,
),
RandFlipd(
keys=["image", "label"],
spatial_axis=[1],
prob=0.10,
),
RandFlipd(
keys=["image", "label"],
spatial_axis=[2],
prob=0.10,
),
RandRotate90d(
keys=["image", "label"],
prob=0.10,
max_k=3,
),
RandShiftIntensityd(
keys=["image"],
offsets=0.10,
prob=0.50,
),
]
)
val_transforms = Compose(
[
LoadImaged(keys=["image", "label"]),
EnsureChannelFirstd(keys=["image", "label"]),
Orientationd(keys=["image", "label"], axcodes="RAS"),
Spacingd(
keys=["image", "label"],
pixdim=(1.5, 1.5, 2.0),
mode=("bilinear", "nearest"),
),
ScaleIntensityRanged(
keys=["image"], a_min=-175, a_max=250, b_min=0.0, b_max=1.0, clip=True
),
CropForegroundd(keys=["image", "label"], source_key="image"),
]
)
data_dir = "data/MedicalImages/bcv30/RawData/UNETR_raw_data/Task250_BCV/"
split_JSON = "dataset_unetr.json"
datasets = data_dir + split_JSON
datalist = load_decathlon_datalist(datasets, True, "training")
val_files = load_decathlon_datalist(datasets, True, "validation")
# train_ds = CacheDataset(
# data=datalist,
# transform=train_transforms,
# cache_rate=1.0,
# num_workers=8,
# )
# train_loader = DataLoader(
# train_ds, batch_size=1, shuffle=True, num_workers=8, pin_memory=True
# )
val_ds = CacheDataset(
data=val_files, transform=val_transforms, cache_rate=1.0, num_workers=4
)
val_loader = DataLoader(
val_ds, batch_size=1, shuffle=False, num_workers=4, pin_memory=True
)
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = UNETR(
in_channels=1,
out_channels=14,
img_size=(96, 96, 96),
feature_size=16,
hidden_size=768,
mlp_dim=3072,
num_heads=12,
pos_embed="perceptron",
norm_name="instance",
res_block=True,
dropout_rate=0.0,
).to(device)
loss_function = DiceCELoss(to_onehot_y=True, softmax=True)
torch.backends.cudnn.benchmark = True
optimizer = torch.optim.AdamW(model.parameters(), lr=1e-4, weight_decay=1e-5)
def validation(epoch_iterator_val):
model.eval()
with torch.no_grad():
for step, batch in enumerate(epoch_iterator_val):
val_inputs, val_labels = (batch["image"].cuda(), batch["label"].cuda())
val_outputs = sliding_window_inference(val_inputs, (96, 96, 96), 4, model)
val_labels_list = decollate_batch(val_labels)
val_labels_convert = [
post_label(val_label_tensor) for val_label_tensor in val_labels_list
]
val_outputs_list = decollate_batch(val_outputs)
val_output_convert = [
post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list
]
dice_metric(y_pred=val_output_convert, y=val_labels_convert)
epoch_iterator_val.set_description(
"Validate (%d / %d Steps)" % (global_step, 10.0)
)
mean_dice_val = dice_metric.aggregate()
print(mean_dice_val)
dice_metric.reset()
return mean_dice_val
def train(global_step, train_loader, dice_val_best, global_step_best):
model.train()
epoch_loss = 0
step = 0
epoch_iterator = tqdm(
train_loader, desc="Training (X / X Steps) (loss=X.X)", dynamic_ncols=True
)
for step, batch in enumerate(epoch_iterator):
step += 1
x, y = (batch["image"].cuda(), batch["label"].cuda())
logit_map = model(x)
loss, _ = loss_function(logit_map, y)
loss.backward()
epoch_loss += loss.item()
optimizer.step()
optimizer.zero_grad()
epoch_iterator.set_description(
"Training (%d / %d Steps) (loss=%2.5f)" % (global_step, max_iterations, loss)
)
if (
global_step % eval_num == 0 and global_step != 0
) or global_step == max_iterations:
epoch_iterator_val = tqdm(
val_loader, desc="Validate (X / X Steps) (dice=X.X)", dynamic_ncols=True
)
dice_val = validation(epoch_iterator_val)
epoch_loss /= step
epoch_loss_values.append(epoch_loss)
metric_values.append(dice_val)
if dice_val > dice_val_best:
dice_val_best = dice_val
global_step_best = global_step
torch.save(
model.state_dict(), os.path.join(root_dir, "best_metric_model.pth")
)
print(
"Model Was Saved ! Current Best Avg. Dice: {} Current Avg. Dice: {}".format(
dice_val_best, dice_val
)
)
else:
print(
"Model Was Not Saved ! Current Best Avg. Dice: {} Current Avg. Dice: {}".format(
dice_val_best, dice_val
)
)
global_step += 1
return global_step, dice_val_best, global_step_best
max_iterations = 25000
eval_num = 500
post_label = AsDiscrete(to_onehot=True, n_classes=14)
post_pred = AsDiscrete(argmax=True, to_onehot=True, n_classes=14)
dice_metric = DiceMetric(include_background=True, reduction="mean_batch", get_not_nans=False)
global_step = 0
dice_val_best = 0.0
global_step_best = 0
epoch_loss_values = []
metric_values = []
# while global_step < max_iterations:
# global_step, dice_val_best, global_step_best = train(
# global_step, train_loader, dice_val_best, global_step_best
# )
model.load_state_dict(torch.load("unetr.pth"))
epoch_iterator_val = tqdm(
val_loader, desc="Validate (X / X Steps) (dice=X.X)", dynamic_ncols=True
)
print(validation(epoch_iterator_val))
| Python |
3D | yuhui-zh15/TransSeg | src/scripts/train_bcv_2d.sh | .sh | 518 | 23 | #!/bin/bash
#SBATCH --job-name=bcv
#SBATCH --cpus-per-task=32
#SBATCH --mem-per-cpu=3gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:8
#SBATCH --time=24:00:00
#SBATCH --output=bcv_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/bcv/processed/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 14 \
--max_steps 25000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 1 \
--force_2d 1 | Shell |
3D | yuhui-zh15/TransSeg | src/scripts/eval_bcv.sh | .sh | 1,373 | 56 | python main.py \
--data_dir data/bcv/processed/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 14 \
--max_steps 25000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 1 \
--evaluation 1 \
--model_path checkpoints/ours.pt
python main.py \
--data_dir data/bcv/processed/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 14 \
--max_steps 25000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 1 \
--evaluation 1 \
--model_path checkpoints/ourswod.pt
python main.py \
--data_dir data/bcv/processed/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 14 \
--max_steps 25000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 1 \
--evaluation 1 \
--model_path checkpoints/ourswot.pt
python main.py \
--data_dir data/bcv/processed/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 14 \
--max_steps 25000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 1 \
--evaluation 1 \
--model_path checkpoints/ourswotd.pt
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_07.sh | .sh | 524 | 23 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=32
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=100:00:00
#SBATCH --output=msd_07_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task07_Pancreas/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 3 \
--max_steps 100000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 2
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_02.sh | .sh | 519 | 23 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=16
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=24:00:00
#SBATCH --output=msd_02_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task02_Heart/ \
--split_json dataset_5slices.json \
--img_size 320 320 5 \
--clip_range 601 1511 \
--in_channels 1 \
--out_channels 2 \
--max_steps 25000 \
--train_batch_size 4 \
--eval_batch_size 4 \
--accumulate_grad_batches 1
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_acdc_2d.sh | .sh | 521 | 24 | #!/bin/bash
#SBATCH --job-name=acdc
#SBATCH --cpus-per-task=16
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=24:00:00
#SBATCH --output=acdc_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/acdc/processed/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 4 \
--max_steps 25000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 2 \
--force_2d 1
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_05.sh | .sh | 519 | 22 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=16
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=24:00:00
#SBATCH --output=msd_05_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task05_Prostate/ \
--split_json dataset_5slices.json \
--img_size 320 320 5 \
--clip_range 0 2152 \
--in_channels 2 \
--out_channels 3 \
--max_steps 25000 \
--train_batch_size 4 \
--eval_batch_size 4 \
--accumulate_grad_batches 1 | Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_01.sh | .sh | 535 | 23 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=16
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=100:00:00
#SBATCH --output=msd_01_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task01_BrainTumour/ \
--split_json dataset_5slices.json \
--img_size 240 240 5 \
--clip_range -1000000 1000000 \
--in_channels 4 \
--out_channels 4 \
--max_steps 250000 \
--train_batch_size 4 \
--eval_batch_size 4 \
--accumulate_grad_batches 1
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_09_2d.sh | .sh | 537 | 24 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=16
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=24:00:00
#SBATCH --output=msd_09_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task09_Spleen/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 2 \
--max_steps 25000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 2 \
--force_2d 1
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_07_2d.sh | .sh | 541 | 24 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=32
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=100:00:00
#SBATCH --output=msd_07_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task07_Pancreas/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 3 \
--max_steps 100000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 2 \
--force_2d 1
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_10_2d.sh | .sh | 536 | 24 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=16
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=24:00:00
#SBATCH --output=msd_10_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task10_Colon/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 2 \
--max_steps 25000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 2 \
--force_2d 1
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_03.sh | .sh | 518 | 22 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=32
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=24:00:00
#SBATCH --output=msd_03_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task03_Liver/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 3 \
--max_steps 25000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 2 | Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_10.sh | .sh | 519 | 23 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=16
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=24:00:00
#SBATCH --output=msd_10_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task10_Colon/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 2 \
--max_steps 25000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 2
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_03_2d.sh | .sh | 535 | 23 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=32
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=24:00:00
#SBATCH --output=msd_03_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task03_Liver/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 3 \
--max_steps 25000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 2 \
--force_2d 1 | Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_06.sh | .sh | 515 | 23 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=16
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=24:00:00
#SBATCH --output=msd_06_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task06_Lung \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 2 \
--max_steps 25000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 2
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_09.sh | .sh | 520 | 23 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=16
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=24:00:00
#SBATCH --output=msd_09_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task09_Spleen/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 2 \
--max_steps 25000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 2
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_04_2d.sh | .sh | 543 | 24 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=16
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=24:00:00
#SBATCH --output=msd_04_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task04_Hippocampus/ \
--split_json dataset_5slices.json \
--img_size 240 240 5 \
--clip_range 29 205431 \
--in_channels 1 \
--out_channels 3 \
--max_steps 25000 \
--train_batch_size 4 \
--eval_batch_size 4 \
--accumulate_grad_batches 1 \
--force_2d 1
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_06_2d.sh | .sh | 532 | 24 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=16
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=24:00:00
#SBATCH --output=msd_06_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task06_Lung \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 2 \
--max_steps 25000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 2 \
--force_2d 1
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_05_2d.sh | .sh | 536 | 23 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=16
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=24:00:00
#SBATCH --output=msd_05_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task05_Prostate/ \
--split_json dataset_5slices.json \
--img_size 320 320 5 \
--clip_range 0 2152 \
--in_channels 2 \
--out_channels 3 \
--max_steps 25000 \
--train_batch_size 4 \
--eval_batch_size 4 \
--accumulate_grad_batches 1 \
--force_2d 1 | Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_08_2d.sh | .sh | 546 | 24 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=32
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=100:00:00
#SBATCH --output=msd_08_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task08_HepaticVessel/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 3 \
--max_steps 100000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 2 \
--force_2d 1
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_01_2d.sh | .sh | 552 | 24 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=16
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=100:00:00
#SBATCH --output=msd_01_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task01_BrainTumour/ \
--split_json dataset_5slices.json \
--img_size 240 240 5 \
--clip_range -1000000 1000000 \
--in_channels 4 \
--out_channels 4 \
--max_steps 250000 \
--train_batch_size 4 \
--eval_batch_size 4 \
--accumulate_grad_batches 1 \
--force_2d 1
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_02_2d.sh | .sh | 536 | 24 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=16
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=24:00:00
#SBATCH --output=msd_02_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task02_Heart/ \
--split_json dataset_5slices.json \
--img_size 320 320 5 \
--clip_range 601 1511 \
--in_channels 1 \
--out_channels 2 \
--max_steps 25000 \
--train_batch_size 4 \
--eval_batch_size 4 \
--accumulate_grad_batches 1 \
--force_2d 1
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_08.sh | .sh | 529 | 23 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=32
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=100:00:00
#SBATCH --output=msd_08_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task08_HepaticVessel/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 3 \
--max_steps 100000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 2
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_bcv.sh | .sh | 502 | 23 | #!/bin/bash
#SBATCH --job-name=bcv
#SBATCH --cpus-per-task=32
#SBATCH --mem-per-cpu=3gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:8
#SBATCH --time=24:00:00
#SBATCH --output=bcv_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/bcv/processed/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 14 \
--max_steps 25000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 1
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_acdc.sh | .sh | 504 | 23 | #!/bin/bash
#SBATCH --job-name=acdc
#SBATCH --cpus-per-task=16
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=24:00:00
#SBATCH --output=acdc_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/acdc/processed/ \
--split_json dataset_5slices.json \
--img_size 512 512 5 \
--clip_range -175 250 \
--in_channels 1 \
--out_channels 4 \
--max_steps 25000 \
--train_batch_size 2 \
--eval_batch_size 2 \
--accumulate_grad_batches 2
| Shell |
3D | yuhui-zh15/TransSeg | src/scripts/train_msd_04.sh | .sh | 525 | 22 | #!/bin/bash
#SBATCH --job-name=msd
#SBATCH --cpus-per-task=16
#SBATCH --mem-per-cpu=4gb
#SBATCH --partition=pasteur
#SBATCH --gres=gpu:4
#SBATCH --time=24:00:00
#SBATCH --output=msd_04_%A_%a.out
#SBATCH --mail-type=ALL
python main.py \
--data_dir data/msd/processed/Task04_Hippocampus/ \
--split_json dataset_5slices.json \
--img_size 240 240 5 \
--clip_range 29 205431 \
--in_channels 1 \
--out_channels 3 \
--max_steps 25000 \
--train_batch_size 4 \
--eval_batch_size 4 \
--accumulate_grad_batches 1 | Shell |
3D | yuhui-zh15/TransSeg | src/data/bcv/split_data_to_slices_nii.py | .py | 3,849 | 140 | import os
import sys
from shutil import copyfile
from PIL import Image
import cv2
import nibabel as nib
import numpy as np
import json
N = 30
val_slc = [1, 2, 3, 4, 8, 22, 25, 29, 32, 35, 36, 38]
basedir = "RawData/Training/"
outputdir = f"processed"
file_idxs = list(range(1, 11)) + list(range(21, 41))
def ensure_dir(file_path):
directory = os.path.dirname(file_path)
if not os.path.exists(directory):
os.makedirs(directory)
return file_path
print("# Data:", len(file_idxs))
json_metadata = {"training": [], "validation": []}
for i, idx in enumerate(file_idxs):
if idx not in val_slc:
splitdir = "training"
elif idx in val_slc:
splitdir = "validation"
else:
raise
img = nib.load(f"{basedir}/img/img{idx:04d}.nii.gz").get_data()
label = nib.load(f"{basedir}/label/label{idx:04d}.nii.gz").get_data()
n_slices = img.shape[-1]
# img = np.array([cv2.resize(img[:, :, i], (224, 224), interpolation=cv2.INTER_LANCZOS4) for i in range(n_slices)]).transpose(1, 2, 0)
# label = np.array([cv2.resize(label[:, :, i], (224, 224), interpolation=cv2.INTER_NEAREST) for i in range(n_slices)]).transpose(1, 2, 0)
print(
splitdir,
i,
idx,
img.shape,
label.shape,
(img.mean(), img.std(), img.min(), img.max()),
(label.mean(), label.std(), label.min(), label.max()),
)
for slice_idx in range(n_slices):
slice_idxs = [
max(slice_idx - 2, 0),
max(slice_idx - 1, 0),
slice_idx,
min(slice_idx + 1, n_slices - 1),
min(slice_idx + 2, n_slices - 1),
]
img_slices = img[:, :, slice_idxs]
label_slices = label[:, :, slice_idxs]
img_new = nib.Nifti1Image(img_slices, np.eye(4))
nib.save(
img_new,
ensure_dir(
f"{outputdir}/images/{splitdir}/img{idx:04d}_{slice_idx:03d}.nii.gz"
),
)
label_new = nib.Nifti1Image(label_slices, np.eye(4))
nib.save(
label_new,
ensure_dir(
f"{outputdir}/annotations/{splitdir}/label{idx:04d}_{slice_idx:03d}.nii.gz"
),
)
json_metadata[splitdir].append(
{
"image": f"images/{splitdir}/img{idx:04d}_{slice_idx:03d}.nii.gz",
"label": f"annotations/{splitdir}/label{idx:04d}_{slice_idx:03d}.nii.gz",
}
)
basedir = "RawData/Testing/"
file_idxs = list(range(61, 81))
print("# Data:", len(file_idxs))
json_metadata["test"] = []
for i, idx in enumerate(file_idxs):
splitdir = "test"
img = nib.load(f"{basedir}/img/img{idx:04d}.nii.gz").get_data()
n_slices = img.shape[-1]
print(splitdir, i, idx, img.shape, (img.mean(), img.std(), img.min(), img.max()))
for slice_idx in range(n_slices):
slice_idxs = [
max(slice_idx - 2, 0),
max(slice_idx - 1, 0),
slice_idx,
min(slice_idx + 1, n_slices - 1),
min(slice_idx + 2, n_slices - 1),
]
img_slices = img[:, :, slice_idxs]
img_new = nib.Nifti1Image(img_slices, np.eye(4))
nib.save(
img_new,
ensure_dir(
f"{outputdir}/images/{splitdir}/img{idx:04d}_{slice_idx:03d}.nii.gz"
),
)
json_metadata[splitdir].append(
f"images/{splitdir}/img{idx:04d}_{slice_idx:03d}.nii.gz"
)
json_metadata["labels"] = {
"0": "background",
"1": "spleen",
"2": "rkid",
"3": "lkid",
"4": "gall",
"5": "eso",
"6": "liver",
"7": "sto",
"8": "aorta",
"9": "IVC",
"10": "veins",
"11": "pancreas",
"12": "rad",
"13": "lad",
}
json.dump(json_metadata, open(f"{outputdir}/dataset_5slices.json", "w"))
| Python |
3D | yuhui-zh15/TransSeg | src/data/msd/split_data_to_slices_nii.py | .py | 5,024 | 169 | import os
import sys
import nibabel as nib
import numpy as np
import json
import random
from multiprocessing import Pool
from functools import partial
from tqdm import tqdm
random.seed(1234)
def ensure_dir(file_path):
directory = os.path.dirname(file_path)
os.makedirs(directory, exist_ok=True)
return file_path
def process(filename, inputdir):
split = filename["split"]
filename_noext = filename["image"].split("/")[-1].replace(".nii.gz", "")
img = nib.load(f"{inputdir}/{filename['image']}").get_fdata()
label = (
nib.load(f"{inputdir}/{filename['label']}").get_fdata()
if filename["label"] is not None
else None
)
n_dim = len(img.shape)
assert n_dim in [3, 4]
n_slices = img.shape[-1] if n_dim == 3 else img.shape[-2]
print(
split,
filename,
img.shape,
label.shape if label is not None else "-",
(img.mean(), img.std(), img.min(), img.max()),
(label.mean(), label.std(), label.min(), label.max())
if label is not None
else "-",
)
if n_dim == 4:
img = img.transpose(3, 0, 1, 2)
for slice_idx in range(n_slices):
slice_idxs = [
max(slice_idx - 2, 0),
max(slice_idx - 1, 0),
slice_idx,
min(slice_idx + 1, n_slices - 1),
min(slice_idx + 2, n_slices - 1),
]
img_slices = img[..., slice_idxs]
img_new = nib.Nifti1Image(img_slices, np.eye(4))
nib.save(
img_new,
ensure_dir(
f"processed/{inputdir}/images/{split}/{filename_noext}_{slice_idx:03d}.nii.gz"
),
)
if label is not None:
label_slices = label[..., slice_idxs]
label_new = nib.Nifti1Image(label_slices, np.eye(4))
nib.save(
label_new,
ensure_dir(
f"processed/{inputdir}/annotations/{split}/{filename_noext}_{slice_idx:03d}.nii.gz"
),
)
def process_metadata(filename, inputdir, json_metadata):
split = filename["split"]
filename_noext = filename["image"].split("/")[-1].replace(".nii.gz", "")
img = nib.load(f"{inputdir}/{filename['image']}").get_fdata()
n_dim = len(img.shape)
assert n_dim in [3, 4]
n_slices = img.shape[-1] if n_dim == 3 else img.shape[-2]
for slice_idx in range(n_slices):
json_metadata[split].append(
{
"image": f"images/{split}/{filename_noext}_{slice_idx:03d}.nii.gz",
"label": f"annotations/{split}/{filename_noext}_{slice_idx:03d}.nii.gz",
}
if filename["label"] is not None
else f"images/{split}/{filename_noext}_{slice_idx:03d}.nii.gz"
)
def main():
random.seed(1234)
for inputdir in sorted(
[
"Task01_BrainTumour",
"Task03_Liver",
"Task05_Prostate",
"Task07_Pancreas",
"Task09_Spleen",
"Task02_Heart",
"Task04_Hippocampus",
"Task06_Lung",
"Task08_HepaticVessel",
"Task10_Colon",
]
):
dataset = json.load(open(f"{inputdir}/dataset.json"))
all_filenames = dataset["training"]
random.shuffle(all_filenames)
train_filenames = all_filenames[: int(0.8 * len(all_filenames))]
val_filenames = all_filenames[
int(0.8 * len(all_filenames)) : int(0.95 * len(all_filenames))
]
test_filenames = all_filenames[int(0.95 * len(all_filenames)) :]
leaderboard_filenames = [
{"image": image, "label": None} for image in dataset["test"]
]
# add split information
for filename in train_filenames:
filename["split"] = "training"
for filename in val_filenames:
filename["split"] = "validation"
for filename in test_filenames:
filename["split"] = "local_test"
for filename in leaderboard_filenames:
filename["split"] = "test"
print(
inputdir,
len(train_filenames),
len(val_filenames),
len(test_filenames),
len(leaderboard_filenames),
)
pool = Pool(48)
pool.map(
partial(process, inputdir=inputdir),
train_filenames + val_filenames + test_filenames + leaderboard_filenames,
)
pool.close()
pool.join()
json_metadata = {"training": [], "validation": [], "local_test": [], "test": []}
for filename in (
train_filenames + val_filenames + test_filenames + leaderboard_filenames
):
print(filename)
process_metadata(filename, inputdir, json_metadata)
json_metadata["labels"] = dataset["labels"]
json.dump(
json_metadata, open(f"processed/{inputdir}/dataset_5slices.json", "w")
)
if __name__ == "__main__":
main()
| Python |
3D | yuhui-zh15/TransSeg | src/data/acdc/split_data_to_slices_nii.py | .py | 5,080 | 171 | import os
import sys
from shutil import copyfile
from PIL import Image
import cv2
import nibabel as nib
import numpy as np
import json
train_filenames = json.load(open("ACDC_dataset.json"))["training"]
train_filenames = [
name["image"].split("/")[-1].replace("imagesTr", "training")
for name in train_filenames
]
test_filenames = json.load(open("ACDC_dataset.json"))["test"]
test_filenames = [name.split("/")[-1] for name in test_filenames]
leaderboard_filenames = []
for dirpath, dirnames, filenames in os.walk("testing"):
for filename in [f for f in filenames if f.endswith(".nii.gz") and "frame" in f]:
leaderboard_filenames.append(filename)
print(len(train_filenames), len(test_filenames), len(leaderboard_filenames))
print(test_filenames, leaderboard_filenames)
input()
basedir = "./training/"
outputdir = f"processed"
def ensure_dir(file_path):
directory = os.path.dirname(file_path)
if not os.path.exists(directory):
os.makedirs(directory)
return file_path
all_filenames = train_filenames + test_filenames
json_metadata = {"training": [], "local_test": []}
for i, filename in enumerate(all_filenames):
if filename in train_filenames:
splitdir = "training"
elif filename in test_filenames:
splitdir = "local_test"
else:
raise
filename_noext = filename.split(".")[0]
patient_id = filename.split("_")[0]
img = nib.load(f"{basedir}/{patient_id}/{filename}").get_data()
label = nib.load(
f'{basedir}/{patient_id}/{filename.replace(".nii.gz", "_gt.nii.gz")}'
).get_data()
n_slices = img.shape[-1]
img = np.array(
[
cv2.resize(img[:, :, i], (512, 512), interpolation=cv2.INTER_LANCZOS4)
for i in range(n_slices)
]
).transpose(1, 2, 0)
label = np.array(
[
cv2.resize(label[:, :, i], (512, 512), interpolation=cv2.INTER_NEAREST)
for i in range(n_slices)
]
).transpose(1, 2, 0)
print(
splitdir,
filename,
img.shape,
label.shape,
(img.mean(), img.std(), img.min(), img.max()),
(label.mean(), label.std(), label.min(), label.max()),
)
for slice_idx in range(n_slices):
slice_idxs = [
max(slice_idx - 2, 0),
max(slice_idx - 1, 0),
slice_idx,
min(slice_idx + 1, n_slices - 1),
min(slice_idx + 2, n_slices - 1),
]
img_slices = img[:, :, slice_idxs]
label_slices = label[:, :, slice_idxs]
img_new = nib.Nifti1Image(img_slices, np.eye(4))
nib.save(
img_new,
ensure_dir(
f"{outputdir}/images/{splitdir}/{filename_noext}_{slice_idx:03d}.nii.gz"
),
)
label_new = nib.Nifti1Image(label_slices, np.eye(4))
nib.save(
label_new,
ensure_dir(
f"{outputdir}/annotations/{splitdir}/{filename_noext}_{slice_idx:03d}.nii.gz"
),
)
json_metadata[splitdir].append(
{
"image": f'images/{splitdir}/{filename.replace(".nii.gz", "")}_{slice_idx:03d}.nii.gz',
"label": f"annotations/{splitdir}/{filename_noext}_{slice_idx:03d}.nii.gz",
}
)
basedir = "./testing/"
all_filenames = leaderboard_filenames
json_metadata["test"] = []
file2size = {}
for i, filename in enumerate(all_filenames):
splitdir = "test"
filename_noext = filename.split(".")[0]
patient_id = filename.split("_")[0]
img = nib.load(f"{basedir}/{patient_id}/{filename}").get_data()
h, w, d = img.shape[0], img.shape[1], img.shape[2]
file2size[filename_noext] = (h, w, d)
n_slices = img.shape[-1]
img = np.array(
[
cv2.resize(img[:, :, i], (512, 512), interpolation=cv2.INTER_LANCZOS4)
for i in range(n_slices)
]
).transpose(1, 2, 0)
print(splitdir, filename, img.shape, (img.mean(), img.std(), img.min(), img.max()))
for slice_idx in range(n_slices):
slice_idxs = [
max(slice_idx - 2, 0),
max(slice_idx - 1, 0),
slice_idx,
min(slice_idx + 1, n_slices - 1),
min(slice_idx + 2, n_slices - 1),
]
img_slices = img[:, :, slice_idxs]
img_new = nib.Nifti1Image(img_slices, np.eye(4))
nib.save(
img_new,
ensure_dir(
f"{outputdir}/images/{splitdir}/{filename_noext}_{slice_idx:03d}.nii.gz"
),
)
json_metadata[splitdir].append(
f'images/{splitdir}/{filename.replace(".nii.gz", "")}_{slice_idx:03d}.nii.gz'
)
json.dump(file2size, open("file2size.json", "w"))
json_metadata["labels"] = {
"0": "background",
"1": "the right ventricular cavity",
"2": "myocardium",
"3": "the left ventricular cavity",
}
json_metadata["validation"] = json_metadata["local_test"]
del json_metadata["local_test"]
json.dump(json_metadata, open(f"{outputdir}/dataset_5slices.json", "w"))
| Python |
3D | mpes-kit/fuller | fuller/metrics.py | .py | 3,219 | 116 | #! /usr/bin/env python
import inspect
import itertools as it
import numpy as np
from numpy import nan_to_num as n2n
# from sklearn.metrics import pairwise_distances as smp
def dcos(a, b):
"""Cosine distance between vectors a and b."""
aa, bb = list(map(np.linalg.norm, [a, b]))
cos = np.dot(a, b) / (aa * bb)
return cos
def demean(arr, meanax=1, idx=0, **kwds):
"""Subtract the mean of an axial direction in an array (2D or higher) from all entries in that direciton.
**Parameters**\n
arr: list/tuple/numpy array
Input array (at least 2D).
meanax: int | 1
Axis along which to calculate the mean.
idx: int | 0
Entry index in the axis specified previously.
**kwds: keyword arguments
Additional arguments for the `numpy.mean()` function.
"""
arr = np.array(arr)
arrdm = np.moveaxis(arr, meanax, 0) # demeaned array
mn = np.mean(arrdm[idx, ...], **kwds)
arrdm[idx, ...] -= mn
arrdm = np.moveaxis(arrdm, 0, meanax)
return arrdm
def similarity_matrix(feature_mat, axis=0, fmetric=dcos, **kwds):
"""Calculation of the similarity matrix.
**Parameters**\n
feature_mat: list/tuple/numpy array
Feature matrix (2D or higher dimenions).
axis: int
Axis along which the features are aligned to.
fmetric: function | dcos
Metric function for calculating the similarity between each pair of features.
**kwds: keyword arguments
Extra arguments for the metric function ``fmetric``.
**Return**\n
smat: 2D numpy array
Calculated similarity matrix.
"""
if not inspect.isfunction(fmetric):
raise ValueError("The specified metric should be a function.")
else:
fmat = np.moveaxis(np.array(feature_mat), axis, 0)
nfeat = fmat.shape[0]
smat = np.zeros((nfeat, nfeat))
ids = list(it.product(range(nfeat), repeat=2))
for pair in ids:
i, j = pair[0], pair[1]
smat[i, j] = fmetric(fmat[i, 1:], fmat[j, 1:], **kwds)
return smat
def abserror(result, ref, keys, ofs=None, mask=1, **kwargs):
"""Calculate the averaged absolute approximation error per band.
**Parameters**\n
result: dict
Dictionary containing the reconstruction results.
ref: 3D array
Reference bands or band structure to compare against.
keys: list/tuple
Dictionary keys.
ofs: int | None
Pixel offset on each side.
mask: 2D array | 1
Brillouin zone mask applied to the reconstruction results.
"""
abserr = {}
outkeys = kwargs.pop("outkeys", keys)
ret = kwargs.pop("ret", "dict")
nnz = np.sum(~np.isnan(mask))
for k, ok in zip(keys, outkeys):
kstr = str(k)
okstr = str(ok)
if ofs is not None:
ofs = int(ofs)
diffs = mask * (result[kstr][:, ofs:-ofs, ofs:-ofs] - ref) ** 2
else:
diffs = mask * (result[kstr] - ref) ** 2
diffavgs = np.sqrt(np.sum(n2n(diffs), axis=(1, 2)) / nnz)
abserr[okstr] = diffavgs
if ret == "dict":
return abserr
elif ret == "array":
return np.asarray(list(abserr.values()))
| Python |
3D | mpes-kit/fuller | fuller/__init__.py | .py | 399 | 25 | #! /usr/bin/env python
import warnings as wn
from . import metrics
from . import utils
with wn.catch_warnings():
wn.simplefilter("ignore")
wn.warn("deprecated", DeprecationWarning)
wn.warn("future", FutureWarning)
try:
from . import generator
except:
pass
try:
from . import mrfRec
except:
pass
__version__ = "0.9.9"
__author__ = "Vincent Stimper, R. Patrick Xian"
| Python |
3D | mpes-kit/fuller | fuller/utils.py | .py | 14,321 | 515 | #! /usr/bin/env python
import glob as g
import natsort as nts
import numpy as np
import scipy.io as sio
from h5py import File
from scipy.interpolate import RegularGridInterpolator as RGI
from silx.io import dictdump
from tqdm import tqdm as tqdm_classic
from tqdm import tqdm_notebook
# import tensorflow as tf
# from tensorflow.python.framework import ops
# from tensorflow.python.ops import gen_math_ops
def nonneg_sum_decomposition(absum, a=None, b=None):
"""Nonnegative decomposition of a sum.
Paramters:
a, b: numeric/None, numeric/None | None, None
Two numerics for decomposition.
absum: numeric
Sum of the values.
Returns:
a, b: numeric, numeric
Nonnegative values of a and b from the decomposition.
"""
if a is not None:
a = min(a, absum)
b = absum - a
return a, b
elif b is not None:
b = min(b, absum)
a = absum - b
return a, b
elif (a is None) and (b is None):
raise ValueError("At least one of the components should be a numeric.")
def tqdmenv(env):
"""Choose tqdm progress bar executing environment.
Parameter:
env: str
Name of the environment, 'classic' for ordinary environment,
'notebook' for Jupyter notebook.
"""
if env == "classic":
tqdm = tqdm_classic
elif env == "notebook":
tqdm = tqdm_notebook
return tqdm
def to_masked(arr, val=0):
"""Convert to masked array based on specified value."""
arrm = arr.copy()
arrm[arrm == val] = np.nan
return arrm
def valrange(arr):
"""Output the value range of an array."""
return arr.min(), arr.max()
def interpolate2d(oldx, oldy, vals, nx=None, ny=None, ret="interpolant", **kwargs):
"""Interpolate values in a newer and/or finer grid.
**Parameters**\n
oldx, oldy: 1D array, 1D array
Values of the old x and y axes.
vals: 2D array
Image pixel values associated with the old x and y axes.
nx, ny: int, int | None, None
Number of elements in the interpolated axes.
ret: str | 'interpolant'
Specification of the return parts.
**kwargs: keyword arguments
newx, newy: 1D array, 1D array
Axes' values after interpolation.
"""
newx = kwargs.pop("newx", np.linspace(oldx.min(), oldx.max(), nx, endpoint=True))
newy = kwargs.pop("newy", np.linspace(oldy.min(), oldy.max(), ny, endpoint=True))
newxymesh = np.meshgrid(newx, newy, indexing="ij")
newxy = np.stack(newxymesh, axis=-1).reshape((nx * ny, 2))
vip = RGI((oldx, oldy), vals)
vals_interp = vip(newxy).reshape((nx, ny))
if ret == "interpolant":
return vals_interp, vip
elif ret == "all":
return vals_interp, vip, newxymesh
def cut_margins(image, margins, offsetx=0, offsety=0):
"""Trim a 2D image by the given margins."""
offsetx, offsety = int(offsetx), int(offsety)
yim, xim = image.shape
t, b, l, r = margins
if offsetx != 0:
l, r = l - offsetx, r - offsetx
if offsety != 0:
t, b = t - offsety, b - offsety
image_cut = image[t : yim - b, l : xim - r]
return image_cut
def findFiles(fdir, fstring="", ftype="h5", **kwds):
"""
Retrieve files named in a similar way from a folder.
Parameters:
fdir: str
Folder name where the files are stored.
fstring: str | ''
Extra string in the filename.
ftype: str | 'h5'
The type of files to retrieve.
**kwds: keyword arguments
Extra keywords for `natsorted()`.
"""
files = nts.natsorted(g.glob(fdir + fstring + "." + ftype), **kwds)
return files
def saveHDF(*groups, save_addr="./file.h5", track_order=True, **kwds):
"""Combine dictionaries and save into a hierarchical structure.
**Parameters**\n
groups: list/tuple
Group specified in the following manner that incorporates the name as a string
and the content and or substructure as a dictionary, ['folder_name', folder_dict].
save_addr: str | './file.h5'
File directory for saving the HDF.
"""
try:
hdf = File(save_addr, "w")
for g in groups:
grp = hdf.create_group(g[0], track_order=track_order)
for gk, gv in g[1].items():
grp.create_dataset(gk, data=gv, **kwds)
finally:
hdf.close()
def loadHDF(load_addr, hierarchy="flat", groups="all", track_order=True, dtyp="float", **kwds):
"""Load contents in an HDF.
**Parameters**\n
load_addr: str
Address of the file to load.
hierarchy: str | 'flat'
Hierarchy of the file structure to load into.
groups: list/tuple/str
Name of the groups.
dtype: str | 'float'
Data type to be loaded into.
**kwds: keyword arguments
See ``h5py.File()``.
**Return**\n
outdict: dict
Dictionary containing the hierarchical contents of the file.
"""
outdict = {}
if hierarchy == "nested":
outdict = dictdump.load(load_addr, fmat="h5")
elif hierarchy == "flat":
with File(load_addr, track_order=track_order, **kwds) as f:
if groups == "all":
groups = list(f)
for g in groups:
for gk, gv in f[g].items():
outdict[gk] = np.asarray(gv, dtype=dtyp)
return outdict
def loadH5Parts(filename, content, outtype="dict", alias=None):
"""
Load specified content from a single complex HDF5 file.
**Parameters**\n
filename: str
Namestring of the file.
content: list/tuple
Collection of names for the content to retrieve.
outtype: str | 'dict'
Option to specify the format of output ('dict', 'list', 'vals').
alias: list/tuple | None
Collection of aliases to assign to each entry in content in the output dictionary.
"""
with File(filename) as f:
if alias is None:
outdict = {k: np.array(f[k]) for k in content}
else:
if len(content) != len(alias):
raise ValueError("Not every content entry is assigned an alias!")
else:
outdict = {ka: np.array(f[k]) for k in content for ka in alias}
if outtype == "dict":
return outdict
elif outtype == "list":
return list(outdict.items())
elif outtype == "vals":
return list(outdict.values())
def load_bandstruct(path, form, varnames=[]):
"""Load band structure information from file.
**Parameters**\n
path: str
File path to load from.
form: str
Format of the file to load.
varnames: list | []
Names of the variables to load.
"""
nvars = len(varnames)
if nvars == 0:
varnames = ["bands", "kxx", "kyy"]
if form == "mat":
mat = sio.loadmat(path)
return [mat[vn] for vn in varnames]
elif form in ("h5", "hdf5"):
dct = loadHDF(path, hierarchy="flat", group=varnames)
return [dct[vn] for vn in varnames]
def load_multiple_bands(folder, ename="", kname="", form="h5", dtyp="float", **kwargs):
"""Custom loader for multiple reconstructed bands.
**Parameters**\n
folder: str
Name of the folder.
ename, kname: str, str | '', ''
Name of the energy and momentum variables stored in the files.
form: str | 'h5'
Format of the files.
dtype: str | 'float'
Data type to load the files into.
**kwargs: keyword arguments
Extra keywords for ``h5py.File()``.
"""
if form in ("h5", "hdf5"):
files = nts.natsorted(g.glob(f"{folder}/*.h5"))
else:
files = nts.natsorted(g.glob(f"{folder}/*.{form}"))
# Load energy values
econtents = []
for f in files:
f_inst = File(f, **kwargs)
econtent = np.array(f_inst[ename], dtype=dtyp)
econtents.append(econtent)
econtents = np.asarray(econtents)
# Load momentum values
kcontents = []
with f_inst as f_instance:
kgroups = list(f_instance[kname])
for kg in kgroups:
kcontents.append(np.asarray(f_instance[kname][kg], dtype=dtyp))
return econtents, kcontents
def load_calculation(path, nkx=120, nky=55, delim=" ", drop_pos=2, drop_axis=1, baxis=None, maxid=None):
"""Read and reshape energy band calculation results.
**Parameters**\n
path: str
File path where the calculation output file is located.
nkx, nky: int, int
Number of k points sampled along the kx and ky directions.
delim: str | ' '
Delimiter used for reading the calculation output file (default a space string).
drop_pos, drop_axis: int, int | 2, 1
The position and axis along which to drop the elements.
baxis: int | 2
Axis of the energy band index.
maxid: int | None
Maximum limiting index of the read array.
**Return**\n
ebands: 3D array
Collection of energy bands indexed by their energies.
"""
nkx, nky = int(nkx), int(nky)
nk = nkx * nky
arr = np.fromfile(path, sep=delim)
neb = int(arr.size / nk)
if maxid is None:
ebands = arr[: nk * neb].reshape((nk, neb))
else:
maxid = int(maxid)
ebands = arr[:maxid].reshape((nk, neb))
if drop_axis is not None: # Drop the constant column (i.e. the kz axis)
ebands = np.delete(ebands, drop_pos, axis=drop_axis).reshape((nky, nkx, neb - 1))
if baxis is not None:
baxis = int(baxis)
ebands = np.moveaxis(ebands, 2, baxis)
return ebands
def pick_operator(fstring, package="numpy"):
"""Return an operator function from the specified pacakge.
Parameter:
sstring: str
The namestring of the numpy function.
package: str | 'numpy'
The name of the software package to extract the function.
"""
try:
exec("import " + package)
return eval(package + "." + fstring)
except:
return fstring
def nzbound(arr):
"""Find index bounds of the nonzero elements of a 1D array."""
arr = np.asarray(arr)
axis_nz_index = np.argwhere(arr != 0).ravel()
return axis_nz_index[0], axis_nz_index[-1]
def segmod(indices):
"""Add 1 to the intermediate indices."""
alt_indices = indices + 1
alt_indices[0] -= 1
alt_indices[-1] -= 1
return alt_indices
def fexp(ke, length):
"""Exponential function."""
return np.exp(-ke * np.arange(0, length, 1))
def coeffgen(size, amp=1, distribution="uniform", mask=None, modulation=None, seed=None, **kwargs):
"""Generate random sequence from a distribution modulated by an envelope function and a mask.
**Parameters**\n
size: list/tuple
Size of the coefficient array.
amp: numeric | 1
Global amplitude scaling of the random sequence.
distribution: str | 'uniform'
Type of distribution to draw from.
mask: ndarray | None
Amplitude mask array.
modulation: ndarray/str | None
Amplitude modulation array.
seed: numeric | None:
Seed value for the random number generator.
**kwargs: keyword arguments
Additional arguments for the specified distribution function.s
"""
op_package = kwargs.pop("package", "numpy.random")
# Seeding random number generation
if seed is not None:
np.random.seed(seed)
# Apply envelope modulation
if modulation is not None:
if modulation == "exp":
ke = kwargs.pop("ke", 2e-2)
length = kwargs.pop("length", size[1])
cfmod = fexp(ke, length)[None, :]
elif type(modulation) == np.ndarray:
cfmod = modulation
else:
cfmod = np.ones(size)
# Apply zero mask
if mask is not None:
if mask.ndim == 1:
cfmask = mask[None, :]
elif type(mask) == np.ndarray:
cfmask = mask
else:
cfmask = np.ones(size)
# Generate basis coefficient
opr = pick_operator(distribution, package=op_package)
cfout = opr(size=size, **kwargs)
cfout *= amp * cfmask * cfmod
return cfout
def binarize(cfs, threshold, vals=[0, 1], absolute=True, eq="geq"):
"""Binarize an array by a threshold.
**Parameters**\n
cfs: list/tuple/numpy array
Numerical object.
threshold: numeric
Numerical threshold for binarization.
vals: list/tuple/numpy array
Values assigned to the two sides of the threshold.
absolute: bool | True
Option to use the absolute value for thresholding.
eq: str | 'geq'
Options to treat the values equal to the threshold (`'leq'` for less or equal,
`'geq'` for greater or equal, `None` for drop the threshold-equalling values).
**Return**\n
arr: list/tuple/numpy array
Binarized array.
"""
arr = np.array(cfs)
if absolute:
arr = np.abs(arr)
if eq == "leq":
arr[arr <= threshold] = vals[0]
arr[arr > threshold] = vals[1]
elif eq == "geq":
arr[arr < threshold] = vals[0]
arr[arr >= threshold] = vals[1]
elif eq is None:
arr[arr < threshold] = vals[0]
arr[arr > threshold] = vals[1]
return arr
def trim_2d_edge(arr, edges, axes=(0, 1)):
"""Trim 2D edges in the first two dimensions of an nD array.
**Parameters**\n
arr: numpy array
Array to trim .
edges: numeric/list/tuple/numpy array
The amount of edges to trim. If a single value is assigned, the two ends of the
axes are trimmed equally. If a list of four different values is assigned, they are
applied to the two axes in the order `(start_1, end_1, start_2, end_2)`.
axes: list/tuple
Specified axes/dimensions to trim.
**Return**\n
trimmed: numpy array
Axis-trimmed array.
"""
edges = np.array(edges)
trimmed = np.moveaxis(arr, axes, (0, 1))
if edges.size == 1:
eg = edges.item()
trimmed = trimmed[eg:-eg, eg:-eg, ...]
elif edges.size == 4:
top, bot, left, rite = edges
trimmed = trimmed[top:-bot, left:-rite, ...]
trimmed = np.moveaxis(trimmed, (0, 1), axes)
return trimmed
| Python |
3D | mpes-kit/fuller | fuller/reconstruction_mrf2d.py | .py | 3,647 | 120 | #! /usr/bin/env python
import matplotlib.pyplot as plt
import numpy as np
# Reconstruction object
class ReconstructionMRF2d:
def __init__(self, k, E, I=None, E0=None, sigma=0.1):
"""
Initialize object
:param k: Momentum as numpy vector
:param E: Energy as numpy vector
:param I: Measured intensity wrt momentum (rows) and energy (columns), generated if None
:param E0: Initial guess for band structure energy values, if None mean of E is taken
:param sigma: Standard deviation of neighboring energies
"""
self.k = k.copy()
self.E = E.copy()
self.kk, self.EE = np.meshgrid(self.k, self.E)
self.I = I
self.sigma = sigma
self.sigmaGenerate = 0.1
# Generate I if needed
if I is None:
self.generateI()
# Initialize band structure
if E0 is None:
self.indEb = np.ones_like(k, np.int) * int(E.size / 2)
else:
EE, EE0 = np.meshgrid(E, E0)
self.indEb = np.argmin(np.abs(EE - EE0), 1)
# Initialize change of log likelihood
self.deltaLogP = np.array([0.0])
def generateI(self):
"""
Generate intensity with made up band structure
"""
self.I = (0.95 * self.kk**2 + 0.05) / (
1 + ((self.EE - self.bandStructGen(self.kk)) / 0.25) ** 2
) + np.random.normal(0, self.sigmaGenerate, size=self.kk.shape)
self.I = np.maximum(self.I, 0.1)
def bandStructGen(self, k):
"""
Function of band if bands are generated
:param k: Vector or matrix of momenta
:return: Energy values for each momentum
"""
return k**3
def iter(self, num=1):
"""
Iterate band structure reconstruction process
:param num: Number of iterations
"""
# Do iterations
deltaLogP = np.zeros(num)
indList = np.random.choice(self.k.size, num)
for i, ind in enumerate(indList):
if ind == 0:
Ebm = self.E[self.indEb[1]]
Ebp = Ebm
elif ind == (self.k.size - 1):
Ebm = self.E[self.indEb[ind - 1]]
Ebp = Ebm
else:
Ebm = self.E[self.indEb[ind - 1]]
Ebp = self.E[self.indEb[ind + 1]]
logP = np.log(self.I[:, ind]) - ((self.E - Ebm) ** 2 + (self.E - Ebp) ** 2) / 2 / self.sigma
indMax = np.argmax(logP)
deltaLogP[i] = logP[self.indEb[ind]] - logP[indMax]
self.indEb[ind] = indMax
# Update delta log likelihood
self.deltaLogP = np.append(self.deltaLogP, np.cumsum(deltaLogP) + self.deltaLogP[-1])
def getEb(self):
"""
Get energy values of the electronic band
:return: energy values of the electronic band
"""
return self.E[self.indEb]
def plotI(self):
"""
Plot the intensity against k and E
"""
fig = plt.figure()
ax = fig.add_subplot(111, projection="3d")
ax.plot_wireframe(self.kk, self.EE, self.I)
def plotBands(self, groundTruth=True):
"""
Plot reconstructed electronic band structure
:param groundTruth: Flag whether to plot true band from which data got generated
"""
plt.figure()
plt.plot(self.k, self.getEb(), marker=".")
plt.plot(self.k, self.bandStructGen(self.k))
plt.show()
def plotLogP(self):
"""
Plot the change of the log likelihood
"""
plt.figure()
plt.plot(self.deltaLogP)
| Python |
3D | mpes-kit/fuller | fuller/generator.py | .py | 41,448 | 1,236 | #! /usr/bin/env python
import warnings as wn
import matplotlib.pyplot as plt
import numpy as np
import poppy.zernike as ppz
import scipy.io as sio
import scipy.ndimage as ndi
from scipy import interpolate
from symmetrize import pointops as po
from symmetrize import sym
from . import utils as u
try:
from mpes import analysis as aly
except:
wn.warn("The package mpes is not install, this could disable certain functionalities of the pacakge.")
def hexmask(
hexdiag=128,
imside=256,
image=None,
padded=False,
margins=[],
pad_top=None,
pad_bottom=None,
pad_left=None,
pad_right=None,
vertical=True,
outside="nan",
ret="mask",
**kwargs,
):
"""Generate a hexagonal mask. To use the function, either the argument ``imside`` or ``image`` should be
given. The image padding on four sides could be specified with either ``margins`` altogether or separately
with the individual arguments ``pad_xxx``. For the latter, at least two independent padding values are needed.
**Parameters**\n
hexdiag: int | 128
Number of pixels along the hexagon's diagonal.
imside: int | 256
Number of pixels along the side of the (square) reference image.
image: 2D array | None
2D reference image to construct the mask for. If the reference (image) is given, each side
of the generated mask is at least that of the smallest dimension of the reference.
padded: bool | False
Option to pad the image (need to set to True to enable the margins).
margins: list/tuple | []
Margins of the image [top, bottom, left, right]. Overrides the `pad_xxx` arguments.
pad_top, pad_bottom, pad_left, pad_right : int, int, int, int | None, None, None, None
Number of padded pixels on each of the four sides of the image.
vertical: bool | True
Option to align the diagonal of the hexagon with the vertical image axis.
outside: numeric/str | 'nan'
Pixel value outside the masked region.
ret: str | 'mask'
Return option ('mask', 'masked_image', 'all').
"""
if image is not None:
imshape = image.shape
minside = min(imshape)
mask = ppz.hexike_basis(nterms=1, npix=minside, vertical=vertical)[0, ...]
else:
imshape = kwargs.pop("imshape", (imside, imside))
mask = ppz.hexike_basis(nterms=1, npix=hexdiag, vertical=vertical)[0, ...]
# Use a padded version of the original mask
if padded:
# Padding image margins on all sides
if len(margins) == 4:
top, bottom, left, right = margins
else:
# Total padding pixel numbers along horizontal and vertical directions
padsides = np.abs(np.asarray(imshape) - hexdiag)
top, bottom = u.nonneg_sum_decomposition(a=pad_top, b=pad_bottom, absum=padsides[0])
left, right = u.nonneg_sum_decomposition(a=pad_left, b=pad_right, absum=padsides[1])
mask = np.pad(mask, ((top, bottom), (left, right)), mode="constant", constant_values=np.nan)
if outside == 0:
mask = np.nan_to_num(mask)
if ret == "mask":
return mask
elif ret == "masked_image":
return mask * image
elif ret == "all":
margins = [top, bottom, left, right]
return mask, margins
def decomposition_hex2d(band, bases=None, baxis=0, nterms=100, basis_type="Zernike", ret="coeffs"):
"""Decompose energy band in 3D momentum space using the orthogonal polynomials in a hexagon.
**Parameters**\n
band: 2D array
2D electronic band structure.
bases: 3D array | None
Matrix composed of bases to decompose into.
baxis: int | 0
Axis of the basis index.
nterms: int | 100
Number of basis terms.
basis_type: str | 'Zernike'
Type of basis to use.
ret: str | 'coeffs'
Options for the return values.
"""
nbr, nbc = band.shape
if nbr != nbc:
raise ValueError("Input band surface should be square!")
if bases is None:
if basis_type == "Zernike":
bases = ppz.hexike_basis(nterms=nterms, npix=nbr, vertical=True, outside=0)
elif basis_type == "Fourier":
raise NotImplementedError
else:
raise NotImplementedError
else:
if baxis != 0:
bases = np.moveaxis(bases, baxis, 0)
nbas, nbasr, nbasc = bases.shape
band_flat = band.reshape((band.size,))
coeffs = np.linalg.pinv(bases.reshape((nbas, nbasr * nbasc))).T.dot(band_flat)
if ret == "coeffs":
return coeffs
elif ret == "all":
return coeffs, bases
def reconstruction_hex2d(coeffs, bases=None, baxis=0, npix=256, basis_type="Zernike", ret="band"):
"""Reconstruction of energy band in 3D momentum space using orthogonal polynomials
and the term-wise coefficients.
**Parameters**\n
coeffs: 1D array
Polynomial coefficients to use in reconstruction.
bases: 3D array | None
Matrix composed of bases to decompose into.
baxis: int | 0
Axis of the basis index.
npix: int | 256
Number of pixels along one side in the square image.
basis_type: str | 'Zernike'
Type of basis to use.
ret: str | 'band'
Options for the return values.
"""
coeffs = coeffs.ravel()
nterms = coeffs.size
if bases is None:
if basis_type == "Zernike":
bases = ppz.hexike_basis(nterms=nterms, npix=npix, vertical=True, outside=0)
elif basis_type == "Fourier":
raise NotImplementedError
else:
raise NotImplementedError
else:
if baxis != 0:
bases = np.moveaxis(bases, baxis, 0)
nbas, nbasr, nbasc = bases.shape
band_recon = bases.reshape((nbas, nbasr * nbasc)).T.dot(coeffs).reshape((nbasr, nbasc))
if ret == "band":
return band_recon
elif ret == "all":
return band_recon, bases
def projectionfilter(data, nterms=None, bases=None, npix=None, basis_type="Zernike", outside="nan", basis_kwds={}):
"""Filtering reconstructed band structure using orthogonal polynomial approximation.
**Parameters**\n
data: 2D array
Band dispersion in 2D to filter.
nterms: int | None
Number of terms.
bases: 3D array | None
Bases for decomposition.
npix: int | None
Size (number of pixels) in one direction of each basis term.
basis_type: str | 'Zernike'
Type of basis to use for filtering.
outside: numeric/str | 'nan'
Values to fill for regions outside the Brillouin zone boundary.
basis_kwds: dictionary | {}
Keywords for basis generator (see `poppy.zernike.hexike_basis()` if hexagonal Zernike polynomials are used).
"""
nterms = int(nterms)
# Generate basis functions
if bases is None:
if basis_type == "Zernike":
bases = ppz.hexike_basis(nterms=nterms, npix=npix, **basis_kwds)
# Decompose into the given basis
coeffs = decomposition_hex2d(data, bases=bases, baxis=0, nterms=nterms, basis_type=basis_type, ret="coeffs")
# Reconstruct the smoothed version of the energy band
recon = reconstruction_hex2d(coeffs, bases=bases, baxis=0, npix=npix, basis_type=basis_type, ret="band")
if outside == "nan":
recon = u.to_masked(recon, val=0)
return recon, coeffs
elif outside == 0:
return recon, coeffs
def polydecompose(trace, deg, ids=None, method="piecewise", polytype="Legendre", **kwds):
"""Decompose the trace into orthogonal polynomials."""
nseg = len(ids) - 1
altids = u.segmod(ids)
res = []
if method == "piecewise":
for i in range(nseg):
ida, idb = ids[i], altids[i + 1]
x = list(range(ida, idb))
y = trace[ida:idb]
if polytype == "Legendre":
res.append(np.polynomial.legendre.legfit(x, y, deg, **kwds))
elif polytype == "Chebyshev":
res.append(np.polynomial.chebyshev.chebfit(x, y, deg, **kwds))
elif method == "complete":
raise NotImplementedError
try:
res = np.asarray(res)
except:
pass
return res
def polyreconstruct(coeffs, ids=None, polytype="Legendre", flatten=True):
"""Reconstruct line segments using provided coefficients."""
nseg = len(ids) - 1
res = []
for i in range(nseg):
ida, idb = ids[i], ids[i + 1]
x = list(range(ida, idb))
cf = coeffs[i, :]
if polytype == "Legendre":
res.append(np.polynomial.legendre.legval(x, cf))
elif polytype == "Chebyshev":
res.append(np.polynomial.chebyshev.chebval(x, cf))
if flatten:
res = np.concatenate(res, axis=0)
return res
def transdeform(imbase, xtrans=0, ytrans=0, interp_order=1, **kwargs):
"""Image translation using deformation field.
**Parameters**\n
imbase: 2D array
Base image before translation.
xtrans, ytrans: numeric, numeric | 0, 0
Magnitude of translation along the x and y axes.
**kwargs: keyword arguments
See additional arguments in `scipy.ndimage.map_coordinates()`.
"""
coordmat = sym.coordinate_matrix_2D(imbase, coordtype="homogeneous", stackaxis=0)
rdisp, cdisp = sym.translationDF(coordmat, stackaxis=0, ret="displacement", xtrans=xtrans, ytrans=ytrans)
rdeform, cdeform = coordmat[1, ...] + rdisp, coordmat[0, ...] + cdisp
imtrans = ndi.map_coordinates(imbase, [rdeform, cdeform], order=interp_order, **kwargs)
return imtrans
def rotodeform(imbase, angle, center, interp_order=1, **kwargs):
"""Image rotation using deformation field.
**Parameters**\n
imbase: 2D array
Base image before rotation.
angle: numeric
Angle of rotation.
center: list/tuple
Center pixel coordinates of the image.
**kwargs: keyword arguments
See additional arguments in `scipy.ndimage.map_coordinates()`.
**Return**\n
imshift: 2D array
Rotated image.
"""
coordmat = sym.coordinate_matrix_2D(imbase, coordtype="homogeneous", stackaxis=0)
rdisp, cdisp = sym.rotationDF(coordmat, stackaxis=0, ret="displacement", center=center, angle=angle)
rdeform, cdeform = coordmat[1, ...] + rdisp, coordmat[0, ...] + cdisp
imshift = ndi.map_coordinates(imbase, [rdeform, cdeform], order=interp_order, **kwargs)
return imshift
def rotosymmetrize(image, center, rotsym=None, angles=None, outside="nan", **kwargs):
"""Symmetrize the pattern according to the rotational symmetry.
**Parameters**\n
image: 2D array
Image to symmetrize.
center: list/tuple
Image center pixel position (row, column).
rotsym: int | None
Order of rotation symmetry (if regular symmetry is assumed). If ``rotsym``
is specified, the values from ``angles`` are ignored.
angles: numeric | None
Angles of rotation.
outside: str/numeric | 'nan'
The values of the symmetrized image outside the masked boundary.
"""
image = np.nan_to_num(image)
if rotsym is not None:
rotsym = int(rotsym)
angles = np.linspace(0, 360, rotsym, endpoint=False)
# Generate symmetry equivalents
rotoeqs = []
for angle in angles:
rotoeqs.append(rotodeform(imbase=image, angle=angle, center=center, **kwargs))
rotoeqs = np.asarray(rotoeqs)
rotoavg = rotoeqs.mean(axis=0)
if outside == "nan":
rotoavg = u.to_masked(rotoavg, val=0)
return rotoavg, angles
elif outside == 0:
return rotoavg, angles
def rotosymdetect(image, center, rotrange=list(range(-30, 330, 5)), lookahead=4, pbar=True, pbenv="classic"):
"""Detect the degree of rotational symmetry of an image.
**Parameters**\n
image: 2D array
Image for rotational symmetry detection.
center: list/tuple
Image center coordinates.
rotrange: list/tuple | list(range(-30, 330, 5))
Rotation values to test.
lookahead: int | 4
Number of points ahead taken into consideration in peak detection.
pbar: bool | True
Option to show progress bar.
pbenv: str | 'classic'
Progress bar environment ('classic' or 'notebook').
**Return**\n
nmax: int
Order of rotational symmetry.
"""
val = []
tqdm = u.tqdmenv(pbenv)
for angle in tqdm(rotrange, disable=not (pbar)):
imdf = rotodeform(image, angle, center=center)
val.append(-np.linalg.norm(imdf - image))
val = np.asarray(val)
try:
peaks = aly.peakdetect1d(val, x_axis=rotrange, lookahead=lookahead)
nmax = len(peaks[0])
except:
nmax = 0
return nmax
def hexfilter(
images,
center,
axis=0,
rotrange=list(range(-30, 330, 5)),
lookahead=4,
pbar=True,
pbenv="classic",
ret="all",
):
"""Filter out sixfold-symmetric images.
**Parameters**\n
images: 3D array
Stack of 2D images.
center: list/tuple/1D array
Image center pixel coordinates.
axis: int | 0
Axis to extract images from stack.
rotrange: list/tuple | list(range(-30, 330, 5))
All rotations tested.
lookahead: int | 4
Number of points ahead taken into consideration in peak detection.
pbar: bool | True
Option to turn on/off progress bar.
pbenv: str | 'classic'
Notebook environment.
ret: str | 'all'
Option for return ('filtered' returns only filtered images, 'all' returns filtered images and the indices of symmetric images within the stack).
"""
images = np.moveaxis(images, axis, 0)
nimg = images.shape[0]
symord = []
tqdm = u.tqdmenv(pbenv)
for i in tqdm(range(nimg), disable=not (pbar)):
symord.append(rotosymdetect(images[i, ...], center=center, rotrange=rotrange, lookahead=lookahead, pbar=False))
symord = np.asarray(symord)
seq = np.where((symord > 5) & (symord <= 7))[0]
hexbase = images[seq, ...]
if ret == "filtered":
return hexbase
elif ret == "all":
return hexbase, symord, seq
def reflectodeform(imbase, refangle, center, axis=0, interp_order=1, **kwargs):
"""Reflect the image with respect to the symmetry line across the image center
using deformation field.
**Parameters**\n
imbase: 2D array
Base image.
refangle: numeric
Reflection angle with respect to the image horizontal axis.
center: list/tuple
Center coordinates of the image.
axis: int | 0
Axis to reflect along.
"""
imbase = np.nan_to_num(imbase)
nr, nc = imbase.shape
coordmat = sym.coordinate_matrix_2D(imbase, coordtype="homogeneous", stackaxis=0)
R1 = sym.rotation2D(angle=refangle, center=center)
if axis == 0:
S = sym.scaling2D(xscale=1, yscale=-1)
T = sym.translation2D(xtrans=0, ytrans=nr)
elif axis == 1:
S = sym.scaling2D(xscale=-1, yscale=1)
T = sym.translation2D(xtrans=nc, ytrans=0)
R2 = sym.rotation2D(angle=-refangle, center=center)
M = np.linalg.multi_dot([R2, T, S, R1])
rdeform, cdeform = sym.compose_deform_field(coordmat, M, stackaxis=0, ret="deformation", ret_indexing="rc")
imshift = ndi.map_coordinates(imbase, [rdeform, cdeform], order=interp_order, **kwargs)
return imshift
def reflectosymmetrize(image, center, refangles, axis=0, outside="nan"):
"""Symmetrize the pattern according to reflection symmetry."""
image = np.nan_to_num(image)
# Generate reflection-equivalent images
reflectoeqs = []
for refangle in refangles:
reflectoeqs.append(reflectodeform(imbase=image, refangle=refangle, center=center, axis=axis))
reflectoeqs = np.asarray(reflectoeqs)
reflectoavg = reflectoeqs.mean(axis=0)
if outside == "nan":
reflectoavg = u.to_masked(reflectoavg, val=0)
return reflectoavg
elif outside == 0:
return reflectoavg
def refsym(img, op="nanmax", op_package="numpy", axis=0, pbenv="classic", pbar=True):
"""Symmetrize by reflections."""
opr = u.pick_operator(op, package=op_package)
tqdm = u.tqdmenv(pbenv)
if axis != 0:
imgsym = np.rollaxis(img, axis, 0)
else:
imgsym = img.copy()
nimg = imgsym.shape[0]
for i in tqdm(range(nimg), disable=not (pbar)):
imcurr = imgsym[i, ...]
transviews = [imcurr, imcurr[::-1, :], imcurr[:, ::-1], imcurr[::-1, ::-1]]
imgsym[i, ...] = opr(np.asarray(transviews), axis=0)
return imgsym
def cutedge(image, check_axis=1, boundary="square", ret="cutimage"):
"""Cutting out the region beyond the edge of an image.
**Parameters**\n
image: 2D array
Image (containing nan or 0 outside the region of interest) before cutting.
check_axis: int | 1
The long axis for determining the boundary.
boundary: str | 'square'
``'square'`` Square image boundary.\n
``'tight'`` Tightest rectangular image boundary.
The shape of the image boundary.
ret: str | 'cutimage'
Option to specify return quantity ('cutimage', 'cutrange', 'all').
"""
image_alt = np.moveaxis(image, check_axis, 0)
image_real = np.nan_to_num(image_alt)
# Calculate the cut range along the row axis
raxis_sum = image_real.sum(axis=0)
indr_lower, indr_upper = u.nzbound(raxis_sum)
edge = indr_upper - indr_lower
half_edge = edge // 2
# Calculate the cut range along the column axis
caxis_sum = image_real.sum(axis=1)
indc_lower, indc_upper = u.nzbound(caxis_sum)
midpoint = (indc_upper + indc_lower) // 2
# Cut edges of an image using the specified boundary condition.
if boundary == "square":
indc_lower, indc_upper = midpoint - half_edge, midpoint + half_edge
image_cut = image[indr_lower : indr_upper + 1, indc_lower : indc_upper + 1]
elif boundary == "tight":
image_cut = image[indr_lower : indr_upper + 1, indc_lower : indc_upper + 1]
cutrange = [indr_lower, indr_upper + 1, indc_lower, indc_upper + 1]
if ret == "cutimage":
return image_cut
elif ret == "cutrange":
return cutrange
elif ret == "all":
return image_cut, cutrange
class BrillouinZoner:
"""Class for truncating the band mapping data to the first Brillouin zone."""
def __init__(self, folder="", bands=[], axis=0, mask=None, kvals=[[], []]):
self.folder = folder
try:
self.bands = np.moveaxis(bands, axis, 0)
except:
pass
self.eaxis = axis
self.mask = mask
self.kvals = kvals
def set_bands(self, bands):
"""Set the energy bands."""
self.bands = bands
def set_kvals(self, kvals):
"""Set the k values."""
self.kvals = kvals
def set_eaxis(self, axis):
"""Set the index of the energy axis."""
self.eaxis = axis
def set_mask(self, mask):
"""Set the mask for the energy band."""
self.mask = mask
def summary(self, rettype="dict"):
"""A container of truncated band structure and parameters.
**Parameters**\n
rettype: str | 'dict'
Data type of the returned summary (``'dict'`` or ``'list'``).
"""
if rettype == "dict":
out = {"axes": {"kx": self.kvals[0], "ky": self.kvals[1]}, "bands": {"bands": self.bandcuts}}
return out
elif rettype == "list":
out = [["axes", {"kx": self.kvals[0], "ky": self.kvals[1]}], ["bands", {"bands": self.bandcuts}]]
return out
@property
def nbands(self):
"""Number of bands."""
try:
nbs = self.bands.shape[0]
except:
nbs = 0
return nbs
def load_data(self, filename, loadfunc=None, ret=False, **kwargs):
"""Load band structure data (energy values and momentum axes)."""
# Load the energy and momentum values of the electronic bands
readout = loadfunc(self.folder + filename, **kwargs)
self.bands = readout[0]
self.kvals = readout[1:]
if ret:
return self.bands, self.kvals
def select_slice(self, selector, axis=None):
"""Select the image slice for landmark detection.
**Parameters**\n
selector: slice object
A slice object for selection of image stacks for feature detection.
"""
if axis is not None:
self.set_eaxis(axis=axis)
self.bands = np.moveaxis(self.bands, self.eaxis, 0)
self.slice = self.bands[selector, ...]
if self.slice.ndim == 3:
self.slice = self.slice.sum(axis=0)
def set_landmarks(self, landmarks):
"""Set the landmark locations for the image features."""
self.landmarks = landmarks
def findlandmarks(self, method="daofind", direction="ccw", center_det="centroidnn", ret=False, **kwargs):
"""Determine the landmark locations, further details see ``mpes.analysis.peakdetect2d()``.
**Parameters**\n
method: str | 'daofind'
Method for detecting landmarks ('daofind' or 'maxfind').
direction: str | 'ccw'
Direction to arrange the detected vertices ('cw' for clockwise or 'ccw' for counterclockwise).
center_det: str | 'centroidnn'
Method to determine the center position.
ret: bool | False
Option to return the outcome.
**kwargs: keyword arguments
image: 2D array | ``self.bands[0,...]``
Image to extract landmark from.
image_ofs: list/tuple | [0, 0, 0, 0]
Systematic offsets applied to the detected landmarks.
"""
img = kwargs.pop("image", self.bands[0, ...])
imofs = np.array(kwargs.pop("image_ofs", [0, 0, 0, 0]))
img = u.cut_margins(img, imofs)
self.landmarks = aly.peakdetect2d(img, method=method, **kwargs)
self.landmarks += imofs[[0, 2]]
if center_det is None:
self.pouter = self.landmarks
self.pcent = None
else:
self.pcent, self.pouter = po.pointset_center(self.landmarks, method=center_det, ret="cnc")
self.pcent = tuple(self.pcent)
# Order the point landmarks
self.pouter_ord = po.pointset_order(self.pouter, direction=direction)
if ret:
return self.landmarks
def maskgen(self, ret="all", **kwargs):
"""Generate a mask using given parameters.
**Parameters**\n
See ``fuller.generator.hexmask()``.
"""
imshape = kwargs.pop("imshape", self.slice.shape)
self.mask, self.margins = hexmask(ret=ret, imshape=imshape, **kwargs)
def resample(self, kvals, band, nx=None, ny=None, ret="all", **kwargs):
"""Resample the energy band in a finer grid.
**Parameters**\n
See ``fuller.utils.interpolate2d()``.
"""
rsband = u.interpolate2d(kvals[0][:, 0], kvals[1][0, :], band, nx=nx, ny=ny, ret=ret, **kwargs)
return rsband
def bandcutter(self, nx=None, ny=None, dmean=False, resampled=False, ret=False, **kwargs):
"""Truncate the band within the first Brillouin zone.
**Parameters**\n
nx, ny: int, int | None, None
Pixel numbers of the cut band along the image axes.
dmean: bool | False
Option to subtract the mean from the band structure.
resampled: bool | False
Option to resample the energy band in another k-grid.
ret: bool | False
Specifications for return values.
**kwargs: keyword arguments
mask: 2D array | ``self.mask``
Mask matrix to apply to image.
margins: list/tuple | ``self.margins``
Four-sided margins for the truncated band structure.
selector: list/slice object/None | None
Selector along the band index axis.
offsx, offsy: int, int | 0, 0
Offsets to a square along x and y directions.
"""
mask = kwargs.pop("mask", self.mask)
margins = kwargs.pop("margins", self.margins)
selector = kwargs.pop("selector", slice(0, self.nbands))
offsx = kwargs.pop("offsx", 0)
offsy = kwargs.pop("offsy", 0)
bands = self.bands[selector, :, :]
nbands = bands.shape[0]
bandcuts = []
for i in range(nbands):
if resampled:
# Augment the band structure
band = self.resample(self.kvals, bands[i, ...], nx=nx, ny=ny, ret="all", **kwargs)
else:
band = bands[i, ...]
# Construct the truncated band structure
bandcut = u.cut_margins(band, margins, offsetx=offsx, offsety=offsy)
bandcuts.append(bandcut)
# Construct the truncated mask
maskcut = u.cut_margins(mask, margins)
# Mask out the band region outside the first Brillouin zone
self.bandcuts = np.asarray(bandcuts) * maskcut[None, ...]
try: # likewise trim the extents of the k-values
self.kvals[0] = self.kvals[0][margins[0], -margins[1]]
self.kvals[1] = self.kvals[1][margins[2], -margins[3]]
except:
pass
if dmean: # Subtract the mean value from band energies
self.bandcuts -= np.nanmean(self.bandcuts, axis=(1, 2))[:, None, None]
if ret == "cutbands":
return self.bandcuts
elif ret == "all":
return self.bandcuts, bands
def save_data(self, form="h5", save_addr="./bandcuts.h5", **kwargs):
"""Save truncated band structure data.
**Parameters**\n
form: str | 'h5'
Format of the file to save.
save_addr: str | './bandcuts'
File-saving address.
**kwargs: keyword arguments
Additional arguments for the file-saving functions.
"""
if form == "mat":
sio.savemat(save_addr, self.summary(rettype="dict"), **kwargs)
elif form == "h5":
u.saveHDF(*self.summary(rettype="list"), save_addr=save_addr)
def visualize(self, image, figsize=(4, 4), origin="lower", annotated=False, points=None, scatterkws={}, **kwargs):
"""Display (cut) bands."""
f, ax = plt.subplots(figsize=figsize)
ax.imshow(image, origin=origin, **kwargs)
# Add annotation to the figure
if annotated:
tsr, tsc = kwargs.pop("textshift", (3, 3))
txtsize = kwargs.pop("textsize", 12)
for pk, pvs in points.items():
try:
ax.scatter(pvs[:, 1], pvs[:, 0], **scatterkws)
except:
ax.scatter(pvs[1], pvs[0], **scatterkws)
if pvs.size > 2:
for ipv, pv in enumerate(pvs):
ax.text(pv[1] + tsc, pv[0] + tsr, str(ipv), fontsize=txtsize)
class EBandSynthesizer:
"""Class for synthesizing electronic band structure from basis functions."""
def __init__(self, nbands, **kwargs):
self.nbands = nbands
self.bands = []
self.kvals = [[], []]
self.spacing = kwargs.pop("spacing", [])
self.coeffs = kwargs.pop("coeffs", [])
self.mask = kwargs.pop("mask", [])
def set_kvals(self, kvals):
"""Set momentum values."""
self.kvals = kvals
def set_mask(self, mask):
"""Set the mask for the synthesized data."""
self.mask = mask
def set_nbands(self, nbands):
"""Set the number of energy bands to synthesize."""
self.nbands = nbands
def set_spacing(self, spacing):
"""Set the energy spacing between energy bands."""
self.spacing = spacing
def summary(self, rettype="dict"):
"""A container of synthetic band structure and parameters."""
if rettype == "dict":
out = {"axes": {"kx": self.kvals[0], "ky": self.kvals[1]}, "bands": {"band": self.bands}}
return out
elif rettype == "list":
out = [["axes", {"kx": self.kvals[0], "ky": self.kvals[1]}], ["bands", {"band": self.bands}]]
return out
def basisgen(self, nterms, npix, vertical=True, outside=0, basis_type="Zernike"):
"""Generate polynomial bases for energy band synthesis."""
if basis_type == "Zernike":
self.bases = ppz.hexike_basis(nterms=nterms, npix=npix, vertical=vertical, outside=outside)
def coeffgen(self, nterms, method="rand_gauss", **kwargs):
"""Generate coefficients for energy band synthesis."""
if method == "rand_gauss":
self.coeffs = np.random.randn(self.nbands, nterms, **kwargs)
def synthesize(self, basis_type="Zernike", **kwargs):
"""Generate 3D electronic band structure."""
self.bands = []
for n in range(self.nbands):
coeffs = self.coeffs[n, ...]
self.bands.append(reconstruction_hex2d(coeffs, bases=self.bases, **kwargs))
self.bands = np.asarray(self.bands)
def save_bands(self, form, save_addr="", **kwargs):
"""Save the synthesized energy bands."""
if form == "mat": # Save in mat format
compression = kwargs.pop("mat_compression", False)
sio.savemat(save_addr, self.summary(rettype="dict"), do_compression=compression, **kwargs)
elif form in ("h5", "hdf5"): # Save in hdf5 format
u.saveHDF(*self.summary(rettype="list"), save_addr=save_addr)
def visualize(self, selector=None, indaxis=0, backend="plotly", **kwargs):
"""Plot synthesized band structure."""
title = kwargs.pop("title", "")
if backend == "plotly":
import bandana as bd
fname = kwargs.pop("fname", "")
bd.plotter.bandplot3d(self.bands, selector, indaxis=indaxis, title=title, fname=fname)
elif backend == "matplotlib":
raise NotImplementedError
class MPESDataGenerator:
"""Class for generating three-dimensional photoemssion data."""
def __init__(self, bands, lineshape, baxis=0, **kwargs):
if baxis != 0:
bands = np.moveaxis(bands, baxis, 0)
self.all_bands = bands
self.bands = bands
self.lineshape = lineshape
try:
self.nr, self.nc = self.bands[0, ...].shape
except:
self.nr, self.nc = self.bands.shape
self.amplitude = kwargs.pop("amplitude", [])
self.sigma = kwargs.pop("srfwidth", [])
self.gamma = kwargs.pop("linewidth", [])
self.energy = kwargs.pop("energy", [])
self.kvals = [[], []]
self.data = []
@property
def parameters(self):
"""A dictionary of lineshape parameters."""
pars = {"amp": self.amplitude, "xvar": self.energy, "sig": self.sigma, "gam": self.gamma, "ctr": self.bands}
return pars
@property
def nbands(self):
"""Number of bands used in the simulation."""
bnd = self.bands.ndim
if bnd == 2:
return 1
elif bnd > 2:
return self.bands.shape[0]
def summary(self, rettype="dict"):
"""A container of synthetic band mapping data and parameters."""
if rettype == "dict":
out = {"axes": {"E": self.energy, "kx": self.kvals[0], "ky": self.kvals[1]}, "binned": {"V": self.data}}
return out
elif rettype == "list":
out = [["axes", {"E": self.energy, "kx": self.kvals[0], "ky": self.kvals[1]}], ["binned", {"V": self.data}]]
return out
def set_amplitude(self, amplitude):
"""Set the amplitude of the lineshape function."""
self.amplitude = amplitude
def set_bands(self, bands):
"""Set the energy band positions."""
self.all_bands = bands
def add_bands(self, bands, edir="lower"):
"""Add an energy band the existing list."""
if edir == "lower":
self.all_bands = np.concatenate((self.all_bands, bands))
elif edir == "higher":
self.all_bands = np.concatenate((bands, self.all_bands))
def select_bands(self, selector):
"""Select energy bands by their indices."""
self.bands = self.all_bands[selector, ...]
def set_matrix_elements(self, matelems):
"""Set the matrix element intensity modulation in photoemission process."""
self.matelems = matelems
def set_kvals(self, kvals):
"""Set the momentum values for the data."""
self.kvals = kvals
def set_lineshape(self, lineshape):
"""Set the lineshape function."""
self.lineshape = lineshape
def set_energy(self, energy):
"""Set the binding energy of the photoelectrons."""
self.energy = energy
def set_srfwidth(self, sigma):
"""Set the width of the system response function (SRF)."""
self.sigma = sigma
def set_linewidth(self, gamma):
"""Set the intrinsic linewidth of electronic state."""
self.gamma = gamma
def generate_data(self, matrix_elements="off"):
"""Generate photoemission data."""
params = self.parameters.copy()
params["xvar"] = self.energy[:, None, None]
params["ctr"] = self.bands[:1, ...]
# Generate 3D data for at least 1 electronic band
self.data = self.lineshape(feval=True, vardict=params)
if self.nbands > 1:
for b in range(1, self.nbands):
params["ctr"] = self.bands[b, ...]
self.data += self.lineshape(feval=True, vardict=params)
if matrix_elements == "on":
self.data = self.matelems[None, ...] * self.data
def save_data(self, form="h5", save_addr="", save_items="all", **kwargs):
"""Save generated photoemission data."""
dtyp = kwargs.pop("dtyp", "float32")
if form == "mat": # Save as mat file (for Matlab)
compression = kwargs.pop("mat_compression", False)
sio.savemat(save_addr, self.summary(rettype="dict"), do_compression=compression, **kwargs)
elif form in ("h5", "hdf5"): # Save as hdf5 file
u.saveHDF(*self.summary(rettype="list"), save_addr=save_addr)
elif form == "tiff": # Save as tiff stack
try:
import tifffile as ti
ti.imsave(save_addr, data=self.data.astype(dtyp), **kwargs)
except ImportError:
raise ImportError("tifffile package is not installed locally!")
def to_bandstructure(self):
return
def hexpad(img, cvd, edgepad=None, **kwargs):
"""Symmetrically pad an image in directions perpendicular to the hexagonal edges.
**Parameters**\n
img: 2d array
Image to pad.
cvd: numeric
Center-vertex distance of the hexagon.
edgepad: list/tuple
Number of padded pixels on the edge of the image, ((left, right), (top, bottom)).
**kwargs: keyword arguments
op, op_package: str, str | 'nanmax', 'numpy'
Namestring of the function and package using for image padding (package.function will be executed and applied when merging the original and the paddings).
mask: str | 'numpy'
Mask applied to the unpadded image before merging with the paddings (used to suppress the potential discontinuities of boundary pixels).
edgevals: numeric | ``np.nan``
Edge values outside the boundary of the mask.
**Return**\n
padded_view: 2d array
Rectangular image after padding hexagonally.
"""
op = kwargs.pop("op", "nanmax")
op_package = kwargs.pop("op_package", "numpy")
mask = kwargs.pop("mask", np.ones_like(img))
edgevals = kwargs.pop("edgevals", np.nan)
if edgepad is not None:
img = np.pad(img, edgepad, mode="constant", constant_values=edgevals)
mask = np.pad(mask, edgepad, mode="constant", constant_values=edgevals)
ag = np.radians(30)
cosa, sina = np.cos(ag), np.sin(ag)
xt, yt = (cosa**2) * cvd, (cosa * sina) * cvd
opr = u.pick_operator(op, package=op_package)
# Translation and fill
xyshifts = [(2 * xt, -2 * yt), (2 * xt, 2 * yt), (-2 * xt, 2 * yt), (-2 * xt, -2 * yt), (0, -4 * yt), (0, 4 * yt)]
transviews = [transdeform((img * mask).T, xtrans=x, ytrans=y, cval=np.nan) for (x, y) in xyshifts]
padded_view = opr(np.asarray(transviews + [(img * mask).T]), axis=0)
return padded_view.T
def hextiler(image, final_size, cvd, method="geometric", op="nanmax", op_package="numpy", ret="final"):
"""Tiling the image plane with hexagonal patterns.
**Parameters**\n
image: 2D array
Base image before hexagonal tiling.
final_size: list/tuple
Final size of the padded image (row_size, colum_size).
cvd: numeric
Center-vertex distance.
method: str | 'geometric'
Method for hexagonal tiling.
op: str | 'nanmax'
Namestring of the operator.
op_package: str | 'numpy'
Namestring of the software package to obtain the operator.
ret: str | 'final'
final: Return only the final result.
all: Return results from all intermediate steps.
"""
# Symmetric padding
nr, nc = image.shape
sympad = np.pad(image, ((nr - 1, 0), (nc - 1, 0)), mode="reflect", reflect_type="even")
# Enlarge by padding nan values beyond the boundary
spr, spc = sympad.shape
fin_nr, fin_nc = final_size
augr, augc = (fin_nr - spr) // 2, (fin_nc - spc) // 2
impad = np.pad(sympad, ((augr, augr), (augc, augc)), mode="constant", constant_values=np.nan)
if method == "geometric":
opr = u.pick_operator(op, package=op_package)
nrp, ncp = impad.shape
rp, cp = (nrp - 1) // 2, (ncp - 1) // 2
# Rotation and fill
impadrot = [rotodeform(impad, angle=i, center=(rp, cp), cval=np.nan) for i in [-60, 60]]
rotviews = np.asarray(impadrot + [impad])
rot_merged_view = opr(rotviews, axis=0)
ag = np.radians(30)
cosa, sina = np.cos(ag), np.sin(ag)
xt, yt = (cosa**2) * cvd, (cosa * sina) * cvd
# Translation and fill
xyshifts = [
(2 * xt, -2 * yt),
(2 * xt, 2 * yt),
(-2 * xt, 2 * yt),
(-2 * xt, -2 * yt),
(0, -4 * yt),
(0, 4 * yt),
]
transviews = [transdeform(rot_merged_view, xtrans=x, ytrans=y, cval=np.nan) for (x, y) in xyshifts]
trans_merged_view = opr(np.asarray(transviews + [rot_merged_view]), axis=0)
# Reiterate previous two steps (if needed)
if ret == "final":
return trans_merged_view
elif ret == "all":
return [trans_merged_view, rot_merged_view, impad, sympad]
def bandstack(data, baxis=2, nvb=None, ncb=None, gap_id=None, pbar=True, pbenv="classic", **kwargs):
"""Construct a stack of energy bands after symmetrization.
**Parameters**\n
data: 3D array
Patches of band structure data with the axes in any ordering of (kx, ky, band_index).
baxis: int | 2
Axis of the band index.
nvb, ncb: int, int | None, None
Number of valence and conduction bands to extract.
gap_id: int | None
Index number of the topmost valence band or bottommost conduction band,
depending on the stacking order in the data variable.
pbar: bool | True
Option to turn on/off the progress bar in computation.
pbenv: str | 'classic'
Progress bar environment ('classic' or 'notebook').
**kwargs: keyword arguments
**Returns**\n
vbands, cbands: 3D array, 3D array
Stacked valence and conduction bands after symmetrization.
"""
nvb, ncb, gap_id = int(nvb), int(ncb), int(gap_id)
op = kwargs.pop("op", "nanmax")
tiler_ret = kwargs.pop("tiler_ret", "final")
final_size = kwargs.pop("final_size", [319, 339])
cvd = kwargs.pop("cvd", 103.9)
tqdm = u.tqdmenv(pbenv)
data = np.moveaxis(data, baxis, 2)
vbands, cbands = [], []
if nvb is not None: # Process valence band data
vbparts = data[..., :gap_id][..., ::-1]
for ivb in tqdm(range(nvb), disable=not (pbar)):
vbands.append(hextiler(vbparts[..., ivb], final_size=final_size, cvd=cvd, ret=tiler_ret, op=op, **kwargs))
vbands = np.asarray(vbands)
if ncb is not None: # Process conduction band data
cbparts = data[..., gap_id:]
for icb in tqdm(range(ncb), disable=not (pbar)):
cbands.append(hextiler(cbparts[..., icb], final_size=final_size, cvd=cvd, ret=tiler_ret, op=op, **kwargs))
cbands = np.asarray(cbands)
return vbands, cbands
def restore(img, **kwargs):
"""Restore an image with irregularly distributed missing values (as nan's).
**Parameters**\n
img: nd array
Multidimensional image array with missing data (as nan's).
**kwargs: keyword arguments
Additional arguments supplied to ``scipy.interpolate.griddata()``.
"""
imgrst = img.copy()
nanpos = np.where(np.isnan(img))
realpos = np.where(np.invert(np.isnan(img)))
interpval = interpolate.griddata(realpos, img[realpos], nanpos, **kwargs)
imgrst[nanpos] = interpval
return imgrst
| Python |
3D | mpes-kit/fuller | fuller/mrfRec.py | .py | 36,808 | 972 | #! /usr/bin/env python
import contextlib
import warnings as wn
import h5py
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from scipy import interpolate
from scipy import io
from scipy import ndimage
from tqdm import tqdm
from .generator import rotosymmetrize
class MrfRec:
"""Class for reconstructing band structure from band mapping data."""
def __init__(
self,
E,
kx=None,
ky=None,
I=None,
E0=None,
eta=0.1,
includeCurv=False,
etaCurv=0.1,
):
"""Initialize the class.
**Parameters**\n
E: 1D array | None
Energy as numpy array.
kx: 1D array | None
Momentum along x axis as numpy array.
ky: 1D array | None
Momentum along y axis as numpy array.
I: 3D array | None
Measured intensity wrt momentum (rows) and energy (columns), generated if None.
E0: numeric | None
Initial guess for band structure energy values, if None mean of E is taken.
eta: numeric | 0.1
Standard deviation of neighbor interaction term
includeCurv: bool | False
Flag, if true curvature term is included during optimization.
etaCurv: numeric | 0.1
Standard deviation of curvature term.
"""
# Check input
if kx is None and ky is None:
raise Exception("Either kx or ky need to be specified!")
elif kx is None:
kx = np.array([0.0])
elif ky is None:
ky = np.array([0.0])
# Store data in object
self.kx = kx.copy()
self.ky = ky.copy()
self.E = E.copy()
self.lengthKx = kx.size
self.lengthKy = ky.size
self.lengthE = E.size
self.I = I
# Shift I because of log
self.I -= np.min(self.I)
self.I += np.min(self.I[self.I > 0])
# Parameter for reconstruction
self.eta = eta
self.includeCurv = includeCurv
self.etaCurv = etaCurv
# Generate I if needed
if I is None:
self.generateI()
# Initialize band structure
if E0 is None:
self.indEb = np.ones((self.lengthKx, self.lengthKx), int) * int(self.lengthE / 2)
else:
EE, EE0 = np.meshgrid(E, E0)
ind1d = np.argmin(np.abs(EE - EE0), 1)
self.indEb = ind1d.reshape(E0.shape)
self.indE0 = self.indEb.copy()
# Initialize change of log likelihood up to constant
self.logP = np.array([self.getLogP()])
self.epochsDone = 0
# Set normalization flag
self.I_normalized = False
@classmethod
def fromFile(cls, fileName, E0=None, eta=0.1):
"""Initialize reconstruction object from h5 file, returns reconstruction object initialized from h5 file.
**Parameters**\n
fileName: str
Path to the file as string
E0: numeric | None
Initial guess for band structure energy values, if None mean of E is taken.
"""
# Read data from file
from mpes import fprocessing as fp
data = fp.readBinnedhdf5(fileName)
kx = data["kx"]
ky = data["ky"]
I = data["V"]
if "E" in data:
E = data["E"]
else:
tof = data["tof"]
E = tof ** (-2)
E -= np.min(E)
E /= np.max(E)
# Construct object
return cls(E, kx, ky, I=I, E0=E0, eta=eta)
@classmethod
def loadBandsMat(cls, path):
"""Load bands from mat file in numpy matrix.
**Parameters**\n
path: str
Path to the mat file.
**Return**\n
Tuple of momentum vectors and energy grid.
"""
# Import data
data = io.loadmat(path)
# Save to numpy variables
if np.abs(np.sum(np.diff(data["kxxsc"][:, 0]))) > np.abs(np.sum(np.diff(data["kxxsc"][0, :]))):
kx = data["kxxsc"][:, 0]
ky = data["kyysc"][0, :]
else:
kx = data["kxxsc"][0, :]
ky = data["kyysc"][:, 0]
evb = data["evb"]
return (kx, ky, evb)
def initializeBand(
self,
kx,
ky,
Eb,
offset=0.0,
flipKAxes=False,
kScale=1.0,
interp_method="linear",
):
"""Set E0 according to reference band, e.g. DFT calculation.
**Parameters**\n
kx, ky: 1D array, 1D array
Momentum values for data along x and y directions.
Eb: 1D array
Energy values for band mapping data.
offset: numeric | 0.
Offset to be added to reference energy values.
flipKAxes: bool | False
Flag, if true the momentum axes of the references are interchanged.
kxScale: numeric | 1.
Scaling factor applied to k axes of reference band (after flipping if done).
interp_method: str | 'linear'
Method used to interpolate reference band on grid of measured data, 'linear' and 'nearest' are possible
choices. Details see ``scipy.interpolate.RegularGridInterpolator()``.
"""
# Detach data from input vars
kx_in = kx.copy()
ky_in = ky.copy()
Eb_in = Eb.copy()
# Preprocessing
if flipKAxes:
kx_in, ky_in = (ky_in, kx_in)
Eb_in = np.transpose(Eb_in)
# Scale axis
self.kscale = kScale
kx_in *= self.kscale
ky_in *= self.kscale
# Interpolation to grid of experimental data
intFunc = interpolate.RegularGridInterpolator(
(kx_in, ky_in),
Eb_in,
method=interp_method,
bounds_error=False,
fill_value=None,
)
kxx, kyy = np.meshgrid(self.kx, self.ky, indexing="ij")
kxx = np.reshape(kxx, (self.lengthKx * self.lengthKy,))
kyy = np.reshape(kyy, (self.lengthKx * self.lengthKy,))
Einterp = intFunc(np.column_stack((kxx, kyy)))
# Add shift to the energy values
self.offset = offset
self.E0 = np.reshape(Einterp + self.offset, (self.lengthKx, self.lengthKy))
# Get indices of interpolated data
EE, EE0 = np.meshgrid(self.E, self.E0)
ind1d = np.argmin(np.abs(EE - EE0), 1)
self.indEb = ind1d.reshape(self.E0.shape)
self.indE0 = self.indEb.copy()
# Reinitialize logP
self.delHist()
def smoothenI(self, sigma=(1.0, 1.0, 1.0)):
"""Apply a multidimensional Gaussian filter to the band mapping data (intensity values).
**Parameters**\n
sigma: list/tuple | (1, 1, 1)
The vector containing the Gaussian filter standard deviations in pixel space for kx, ky, and E.
"""
self.I = ndimage.gaussian_filter(self.I, sigma=sigma)
# Reinitialize logP
self.delHist()
def normalizeI(
self,
kernel_size=None,
n_bins=128,
clip_limit=0.01,
use_gpu=True,
threshold=1e-6,
):
"""Normalizes the intensity using multidimensional CLAHE (MCLAHE).
**Parameters**\n
kernel_size: list/tuple | None
Tuple of kernel sizes, 1/8 of dimension lengths of x if None.
n_bins: int | 128
Number of bins to be used in the histogram.
clip_limit: numeric | 0.01
Relative intensity limit to be ignored in the histogram equalization.
use_gpu: bool | True
Flag, if true gpu is used for computations if available.
threshold: numeric | 1e-6
Threshold below which intensity values are set to zero.
"""
I_dtype = self.I.dtype
try:
from mclahe import mclahe
self.I = mclahe(
self.I,
kernel_size=kernel_size,
n_bins=n_bins,
clip_limit=clip_limit,
use_gpu=use_gpu,
)
except ImportError:
wn.warn("The package mclahe is not installed, therefore no contrast enhancement is performed.")
self.I = self.I / np.max(self.I)
indSmall = self.I < threshold
self.I[indSmall] = threshold
self.I = self.I.astype(I_dtype)
# Reinitialize logP
self.delHist()
# Update normalization flag
self.I_normalized = True
def symmetrizeI(self, mirror=True, rotational=True, rotational_order=6):
"""Symmetrize I with respect to reflection along x and y axis.
**Parameters**\n
mirror: bool | True
Flag, if True mirror symmetrization is done wrt planes perpendicular to kx and ky axis.
rotational: bool | True
Flag, if True rotational symmetrization is done along axis at kx = ky = 0.
rotational_order: int | 6
Order of the rotational symmetry.
"""
# Mirror symmetrization
if mirror:
# Symmetrize wrt plane perpendicular to kx axis
indXRef = np.min(np.where(self.kx > 0.0)[0])
lIndX = np.min([indXRef, self.lengthKx - indXRef])
indX = np.arange(indXRef - lIndX, indXRef + lIndX)
self.I[indX, :, :] = (self.I[indX, :, :] + self.I[np.flip(indX, axis=0), :, :]) / 2
# Symmetrize wrt plane perpendicular to ky axis
indYRef = np.min(np.where(self.ky > 0.0)[0])
lIndY = np.min([indYRef, self.lengthKy - indYRef])
indY = np.arange(indYRef - lIndY, indYRef + lIndY)
self.I[:, indY, :] = (self.I[:, indY, :] + self.I[:, np.flip(indY, axis=0), :]) / 2
# Rotational symmetrization
if rotational:
center = (np.argmin(np.abs(self.kx)), np.argmin(np.abs(self.ky)))
for i in range(self.I.shape[2]):
self.I[:, :, i], _ = rotosymmetrize(self.I[:, :, i], center, rotsym=rotational_order)
# Reinitialize logP
self.delHist()
def generateI(self):
pass
def iter_seq(self, num_epoch=1, updateLogP=False, disable_tqdm=False):
"""Iterate band structure reconstruction process.
**Parameters**\n
num_epoch: int | 1
Number of iterations.
updateLogP: bool | False
Flag, if true logP is updated every half epoch.
disable_tqdm: bool | False
Flag, it true no progress bar is shown during optimization.
"""
# Prepare parameter for iteration
logI = np.log(self.I)
ENN = self.E / (np.sqrt(2) * self.eta)
if self.includeCurv:
ECurv = self.E / (np.sqrt(2) * self.etaCurv)
# Do iterations
indList = np.random.choice(self.lengthKx * self.lengthKy, self.lengthKx * self.lengthKy * num_epoch)
for i, ind in enumerate(tqdm(indList, disable=disable_tqdm)):
indx = ind // self.lengthKy
indy = ind % self.lengthKy
# Get logP for given index
logP = 0
if indx > 0:
logP -= (ENN - ENN[self.indEb[indx - 1, indy]]) ** 2
if self.includeCurv:
if indx > 1:
logP -= (ECurv[self.indEb[indx - 2, indy]] - 2 * ECurv[self.indEb[indx - 1, indy]] + ECurv) ** 2
if indx < (self.lengthKx - 1):
logP -= (ECurv[self.indEb[indx - 1, indy]] - 2 * ECurv + ECurv[self.indEb[indx + 1, indy]]) ** 2
if indx < (self.lengthKx - 1):
logP -= (ENN - ENN[self.indEb[indx + 1, indy]]) ** 2
if self.includeCurv:
if indx < (self.lengthKx - 2):
logP -= (ECurv[self.indEb[indx - 2, indy]] - 2 * ECurv[self.indEb[indx - 1, indy]] + ECurv) ** 2
if indy > 0:
logP -= (ENN - ENN[self.indEb[indx, indy - 1]]) ** 2
if self.includeCurv:
if indy > 1:
logP -= (ECurv[self.indEb[indx, indy - 2]] - 2 * ECurv[self.indEb[indx, indy - 1]] + ECurv) ** 2
if indy < (self.lengthKy - 1):
logP -= (ECurv[self.indEb[indx, indy - 1]] - 2 * ECurv + ECurv[self.indEb[indx, indy + 1]]) ** 2
if indy < (self.lengthKy - 1):
logP -= (ENN - ENN[self.indEb[indx, indy + 1]]) ** 2
if self.includeCurv:
if indy < (self.lengthKy - 2):
logP -= (ECurv[self.indEb[indx, indy - 2]] - 2 * ECurv[self.indEb[indx, indy - 1]] + ECurv) ** 2
logP += logI[indx, indy, :]
self.indEb[indx, indy] = np.argmax(logP)
if updateLogP and (
((i + 1) % (self.lengthKx * self.lengthKy)) == 0
or ((i + 1) % (self.lengthKx * self.lengthKy)) == (self.lengthKx * self.lengthKy // 2)
):
self.logP = np.append(self.logP, self.getLogP())
self.epochsDone += num_epoch
@tf.function
def compute_logP(self, E1d, E3d, logI, indEb, lengthKx):
squDiff = [[tf.square(tf.gather(E1d, indEb[i][j]) - E3d) for j in range(2)] for i in range(2)]
logP = self._initSquMat(2)
for i in range(2):
for j in range(2):
logP[i][j] = (
logI[i][j]
- squDiff[i - 1][j]
- squDiff[i][j - 1]
- tf.pad(
squDiff[i - 1][j][i : (lengthKx // 2 - 1 + i), :, :],
[[1 - i, i], [0, 0], [0, 0]],
)
- tf.pad(
squDiff[i][j - 1][:, j : (lengthKx // 2 - 1 + j), :],
[[0, 0], [1 - j, j], [0, 0]],
)
)
return logP
@tf.function
def compute_logPTot(self, logP, logI, indEb):
return (
tf.reduce_sum(tf.gather(logP[0][0], indEb[0][0], batch_dims=2))
+ tf.reduce_sum(tf.gather(logP[1][1], indEb[1][1], batch_dims=2))
+ tf.reduce_sum(tf.gather(logI[0][1], indEb[0][1], batch_dims=2))
+ tf.reduce_sum(tf.gather(logI[1][0], indEb[1][0], batch_dims=2))
)
@tf.function
def compute_updateW(self, logP):
# white Nodes
updateW = [tf.argmax(logP[i][i], axis=2, output_type=tf.int32) for i in range(2)]
return updateW
@tf.function
def compute_updateB(self, logP):
# black Nodes
updateB = [tf.argmax(logP[i][1 - i], axis=2, output_type=tf.int32) for i in range(2)]
return updateB
def iter_para(
self,
num_epoch=1,
updateLogP=False,
use_gpu=True,
disable_tqdm=False,
):
"""Iterate band structure reconstruction process (no curvature), computations done in parallel using Tensorflow.
**Parameters**\n
num_epoch: int | 1
Number of iteration epochs.
updateLogP: bool | False
Flag, if true logP is updated every half epoch
use_gpu: bool | True
Flag, if true gpu is used for computations if available
disable_tqdm: bool | False
Flag, it true no progress bar is shown during optimization
"""
if use_gpu:
physical_devices = tf.config.list_physical_devices("GPU")
for device in physical_devices:
tf.config.experimental.set_memory_growth(device, True)
with contextlib.nullcontext() if use_gpu else tf.device("/CPU:0"):
if updateLogP:
self.logP = np.append(self.logP, np.zeros(2 * num_epoch))
lengthKx = 2 * (self.lengthKx // 2)
lengthKy = 2 * (self.lengthKy // 2)
indX, indY = np.meshgrid(np.arange(lengthKx, step=2), np.arange(lengthKy, step=2), indexing="ij")
logI = [[tf.constant(np.log(self.I[indX + i, indY + j, :])) for j in range(2)] for i in range(2)]
indEb = [
[tf.Variable(np.expand_dims(self.indEb[indX + i, indY + j], 2), dtype=tf.int32) for j in range(2)]
for i in range(2)
]
E1d = tf.constant(self.E / (np.sqrt(2) * self.eta))
E3d = tf.constant(self.E / (np.sqrt(2) * self.eta), shape=(1, 1, self.E.shape[0]))
logP = self.compute_logP(E1d, E3d, logI, indEb, lengthKx)
for epoch in tqdm(range(num_epoch), disable=disable_tqdm):
# white nodes
updateW = self.compute_updateW(logP)
for i in range(2):
indEb[i][i].assign(tf.expand_dims(updateW[i], 2))
logP = self.compute_logP(E1d, E3d, logI, indEb, lengthKx)
if updateLogP:
self.logP[2 * epoch + 1] = self.compute_logPTot(logP, logI, indEb).numpy()
# black nodes
updateB = self.compute_updateB(logP)
for i in range(2):
indEb[i][1 - i].assign(tf.expand_dims(updateB[i], 2))
logP = self.compute_logP(E1d, E3d, logI, indEb, lengthKx)
if updateLogP:
self.logP[2 * epoch + 2] = self.compute_logPTot(logP, logI, indEb).numpy()
# Extract results
indEbOut = [[indEb_val.numpy()[:, :, 0] for indEb_val in indEb_row] for indEb_row in indEb]
# Store results
for i in range(2):
for j in range(2):
self.indEb[indX + i, indY + j] = indEbOut[i][j]
self.epochsDone += num_epoch
def iter_para_curv(
self,
num_epoch=1,
updateLogP=False,
use_gpu=True,
disable_tqdm=False,
graph_reset=True,
**kwargs,
):
"""Iterate band structure reconstruction process (with curvature), computations done in parallel using
Tensorflow.
**Parameters**\n
num_epoch: int | 1
Number of iteration epochs.
updateLogP: bool | False
Flag, if true logP is updated every half epoch.
use_gpu: bool | True
Flag, if true gpu is used for computations if available.
disable_tqdm: bool | False
Flag, it true no progress bar is shown during optimization.
"""
if not self.includeCurv:
raise (Exception("Curvature is not considered in this MRF object. Please use iter_para instead."))
# Preprocessing
if updateLogP:
self.logP = np.append(self.logP, np.zeros(3 * num_epoch))
nKx = self.lengthKx // 3
nKy = self.lengthKy // 3
nE = self.lengthE
lengthKx = 3 * nKx
lengthKy = 3 * nKy
indX, indY = np.meshgrid(np.arange(lengthKx, step=3), np.arange(lengthKy, step=3), indexing="ij")
# Initialize logI and indEb for each field type
logI = [[tf.constant(np.log(self.I[indX + i, indY + j, :])) for j in range(3)] for i in range(3)]
indEb = [
[tf.Variable(np.expand_dims(self.indEb[indX + i, indY + j], 2), dtype=tf.int32) for j in range(3)]
for i in range(3)
]
ENN1d = tf.constant(self.E / (np.sqrt(2) * self.eta))
ENN3d = tf.constant(self.E / (np.sqrt(2) * self.eta), shape=(1, 1, self.E.shape[0]))
ECurv1d = tf.constant(self.E / (np.sqrt(2) * self.etaCurv))
ECurv3d = tf.constant(self.E / (np.sqrt(2) * self.etaCurv), shape=(1, 1, self.E.shape[0]))
EbCurv = [[tf.gather(ECurv1d, indEb[i][j]) for j in range(3)] for i in range(3)]
# Calculate square differences
squDiff = [[tf.square(tf.gather(ENN1d, indEb[i][j]) - ENN3d) for j in range(3)] for i in range(3)]
# Calculate log(P)
logP = self._initSquMat(3)
for i in range(3):
for j in range(3):
pi = [max(0, 1 - i), max(0, i - 1)]
pj = [max(0, 1 - j), max(0, j - 1)]
logP[i][j] = (
logI[i][j]
- tf.pad(
tf.slice(squDiff[i - 2][j], [pi[1], 0, 0], [nKx - pi[1], nKy, nE]),
[[0, pi[1]], [0, 0], [0, 0]],
)
- tf.pad(
tf.slice(squDiff[i][j - 2], [0, pj[1], 0], [nKx, nKy - pj[1], nE]),
[[0, 0], [0, pj[1]], [0, 0]],
)
- tf.pad(
tf.slice(squDiff[i - 1][j], [0, 0, 0], [nKx - pi[0], nKy, nE]),
[[pi[0], 0], [0, 0], [0, 0]],
)
- tf.pad(
tf.slice(squDiff[i][j - 1], [0, 0, 0], [nKx, nKy - pj[0], nE]),
[[0, 0], [pj[0], 0], [0, 0]],
)
- tf.pad(
tf.square(
tf.slice(
tf.roll(EbCurv[i - 2][j], shift=1 - pi[1], axis=0)
- 2 * tf.roll(EbCurv[i - 1][j], shift=pi[0], axis=0)
+ ECurv3d,
[1 - pi[1], 0, 0],
[nKx - 1 + pi[1], nKy, nE],
),
),
[[1 - pi[1], 0], [0, 0], [0, 0]],
)
- tf.pad(
tf.square(
tf.slice(
tf.roll(EbCurv[i - 1][j], shift=pi[0], axis=0)
- 2 * ECurv3d
+ tf.roll(EbCurv[i - 2][j], shift=-pi[1], axis=0),
[pi[0], 0, 0],
[nKx - pi[0] - pi[1], nKy, nE],
),
),
[[pi[0], pi[1]], [0, 0], [0, 0]],
)
- tf.pad(
tf.square(
tf.slice(
ECurv3d
- 2 * tf.roll(EbCurv[i - 2][j], shift=-pi[1], axis=0)
+ tf.roll(EbCurv[i - 1][j], shift=pi[0] - 1, axis=0),
[0, 0, 0],
[nKx - 1 + pi[0], nKy, nE],
),
),
[[0, 1 - pi[0]], [0, 0], [0, 0]],
)
- tf.pad(
tf.square(
tf.slice(
tf.roll(EbCurv[i][j - 2], shift=1 - pj[1], axis=1)
- 2 * tf.roll(EbCurv[i][j - 1], shift=pj[0], axis=1)
+ ECurv3d,
[0, 1 - pj[1], 0],
[nKx, nKy - 1 + pj[1], nE],
),
),
[[0, 0], [1 - pj[1], 0], [0, 0]],
)
- tf.pad(
tf.square(
tf.slice(
tf.roll(EbCurv[i][j - 1], shift=pj[0], axis=1)
- 2 * ECurv3d
+ tf.roll(EbCurv[i][j - 2], shift=-pj[1], axis=1),
[0, pj[0], 0],
[nKx, nKy - pj[0] - pj[1], nE],
),
),
[[0, 0], [pj[0], pj[1]], [0, 0]],
)
- tf.pad(
tf.square(
tf.slice(
ECurv3d
- 2 * tf.roll(EbCurv[i][j - 2], shift=-pj[1], axis=1)
+ tf.roll(EbCurv[i][j - 1], shift=pj[0] - 1, axis=1),
[0, 0, 0],
[nKx, nKy - 1 + pj[0], nE],
),
),
[[0, 0], [0, 1 - pj[0]], [0, 0]],
)
)
if updateLogP:
logPTot = (
tf.reduce_sum(tf.compat.v1.batch_gather(logP[0][0], indEb[0][0]))
+ tf.reduce_sum(tf.compat.v1.batch_gather(logP[1][1], indEb[1][1]))
+ tf.reduce_sum(tf.compat.v1.batch_gather(logP[2][2], indEb[2][2]))
+ tf.reduce_sum(tf.compat.v1.batch_gather(logI[0][1], indEb[0][1]))
+ tf.reduce_sum(tf.compat.v1.batch_gather(logI[1][0], indEb[1][0]))
+ tf.reduce_sum(tf.compat.v1.batch_gather(logI[2][0], indEb[2][0]))
+ tf.reduce_sum(tf.compat.v1.batch_gather(logI[0][2], indEb[0][2]))
+ tf.reduce_sum(tf.compat.v1.batch_gather(logI[2][1], indEb[2][1]))
+ tf.reduce_sum(tf.compat.v1.batch_gather(logI[1][2], indEb[1][2]))
)
# Do updates
update = [
[
tf.compat.v1.assign(
indEb[i][j],
tf.expand_dims(tf.argmax(logP[i][j], axis=2, output_type=tf.int32), 2),
)
for j in range(3)
]
for i in range(3)
]
updateW = [update[i][i] for i in range(3)]
updateB = [update[(i + 1) % 3][i] for i in range(3)]
updateO = [update[(i + 2) % 3][i] for i in range(3)]
# Do optimization
if use_gpu:
config = kwargs.pop("config", None)
else:
config = kwargs.pop("config", tf.compat.v1.ConfigProto(device_count={"GPU": 0}))
with tf.compat.v1.Session(config=config) as sess:
sess.run(tf.compat.v1.global_variables_initializer())
for i in tqdm(range(num_epoch), disable=disable_tqdm):
sess.run(updateW)
if updateLogP:
self.logP[3 * (i - num_epoch)] = sess.run(logPTot)
sess.run(updateB)
if updateLogP:
self.logP[3 * (i - num_epoch) + 1] = sess.run(logPTot)
sess.run(updateO)
if updateLogP:
self.logP[3 * (i - num_epoch) + 2] = sess.run(logPTot)
# Extract results
indEbOut = sess.run(indEb)
# Store results
for i in range(3):
for j in range(3):
self.indEb[indX + i, indY + j] = indEbOut[i][j][:, :, 0]
self.epochsDone += num_epoch
if graph_reset:
tf.compat.v1.reset_default_graph()
def getEb(self):
"""Retrieve the energy values of the reconstructed band."""
return self.E[self.indEb].copy()
def getLogP(self):
"""Retrieve the log likelihood of the electronic band structure given the model."""
# Likelihood terms
indKx, indKy = np.meshgrid(np.arange(self.lengthKx), np.arange(self.lengthKy), indexing="ij")
logP = np.sum(np.log(self.I[indKx, indKy, self.indEb]))
# Interaction terms
Eb = self.getEb()
if self.lengthKx > 1:
logP -= np.sum((Eb[0 : (self.lengthKx - 1), :] - Eb[1 : self.lengthKx, :]) ** 2) / (2 * self.eta**2)
if self.lengthKy > 1:
logP -= np.sum((Eb[:, 0 : (self.lengthKy - 1)] - Eb[:, 1 : self.lengthKy]) ** 2) / (2 * self.eta**2)
return logP
def plotI(
self,
kx=None,
ky=None,
E=None,
cmapName="viridis",
plotBand=False,
plotBandInit=False,
bandColor="r",
initColor="k",
plotSliceInBand=False,
figsize=[9, 9],
equal_axes=False,
):
"""Plot the intensity against k and E.
**Parameters**\n
kx, ky: 1D array, 1D array | None, None
kx, ky to plot respective slice.
E: 1D array | None
E to plot respective slice.
plotBand: bool | False
Flag, if true current electronic band is plotted in image.
plotBandInit: bool | False
Flag, if true E0 is plotted in image.
bandColor: str | 'r'
Color string for band for matplotlib.pyplot function.
initColor: str | 'k'
Color string for initial band for matplotlib.pyplot function.
plotSliceInBand: bool | False
Flag, if true plots band as colormesh and corresponding slice in red.
figsize: list/tuple | [9, 9]
size of the figure produced.
equal_axes: bool | False
use same scaling for both axes.
"""
# Prepare data to plot
if kx is not None:
indKx = np.argmin(np.abs(self.kx - kx))
x, y = np.meshgrid(self.ky, self.E)
z = np.transpose(self.I[indKx, :, :])
lab = [r"$k_y (\AA^{-1})$", "$E (eV)$"]
Eb = self.getEb()
E0 = self.E[self.indE0].copy()
bandX = self.ky
bandY = Eb[indKx, :]
initY = E0[indKx, :]
if ky is not None:
indKy = np.argmin(np.abs(self.ky - ky))
x, y = np.meshgrid(self.kx, self.E)
z = np.transpose(self.I[:, indKy, :])
lab = [r"$k_x (\AA^{-1})$", "$E (eV)$"]
Eb = self.getEb()
E0 = self.E[self.indE0].copy()
bandX = self.kx
bandY = Eb[:, indKy]
initY = E0[:, indKy]
if E is not None:
indE = np.argmin(np.abs(self.E - E))
x, y = np.meshgrid(self.kx, self.ky)
z = np.transpose(self.I[:, :, indE])
lab = [r"$k_x (\AA^{-1})$", r"$k_y (\AA^{-1})$"]
# Plot I
plt.rcParams["figure.figsize"] = figsize
plt.figure()
cmap = plt.get_cmap(cmapName)
plt.pcolormesh(x, y, z, cmap=cmap)
plt.xticks(fontsize=20)
plt.yticks(fontsize=20)
plt.xlabel(lab[0], fontsize=24)
plt.ylabel(lab[1], fontsize=24)
cb = plt.colorbar(pad=0.02)
if self.I_normalized:
colorbar_label = "$I/I_{max}$"
else:
colorbar_label = "$I (counts)$"
cb.set_label(label=colorbar_label, fontsize=24)
cb.ax.tick_params(labelsize=20)
if equal_axes:
ax = plt.gca()
ax.set_aspect("equal", "box")
# Plot band if requested
if (plotBand or plotBandInit) and (E is None):
if plotBand:
plt.plot(bandX, bandY, bandColor, linewidth=2.0)
if plotBandInit:
plt.plot(bandX, initY, initColor, linewidth=2.0)
if plotSliceInBand:
x, y = np.meshgrid(self.kx, self.ky, indexing="ij")
plt.figure()
plt.pcolormesh(x, y, self.getEb())
plt.xticks(fontsize=20)
plt.yticks(fontsize=20)
plt.xlabel(r"$k_x (\AA^{-1})$", fontsize=24)
plt.ylabel(r"$k_y (\AA^{-1})$", fontsize=24)
cb = plt.colorbar(pad=0.02)
cb.ax.tick_params(labelsize=20)
cb.set_label(label="$E (eV)$", fontsize=24)
if equal_axes:
ax = plt.gca()
ax.set_aspect("equal", "box")
if kx is not None:
plt.plot(
np.array(self.lengthKy * [self.kx[indKx]]),
self.ky,
"r",
linewidth=2.0,
)
else:
plt.plot(
self.kx,
np.array(self.lengthKx * [self.ky[indKy]]),
"r",
linewidth=2.0,
)
def plotBands(self, surfPlot=False, cmapName="viridis", figsize=[9, 9], equal_axes=False):
"""Plot reconstructed electronic band structure.
**Parameters**\n
surfPlot: bool | False
Flag, if true a surface plot is shown in addition.
cmapName: str | 'viridis'
Name of the colormap.
figsize: list/tuple | [9, 9]
Size of the figure produced.
equal_axes: bool | False
Option to apply the same scaling for both axes.
"""
x, y = np.meshgrid(self.kx, self.ky, indexing="ij")
# Colormesh plot
plt.rcParams["figure.figsize"] = figsize
plt.figure()
cmap = plt.get_cmap(cmapName)
plt.pcolormesh(x, y, self.getEb(), cmap=cmap)
plt.xticks(fontsize=20)
plt.yticks(fontsize=20)
plt.xlabel(r"$k_x (\AA^{-1})$", fontsize=24)
plt.ylabel(r"$k_y (\AA^{-1})$", fontsize=24)
cb = plt.colorbar(pad=0.02)
cb.ax.tick_params(labelsize=20)
cb.set_label(label="$E (eV)$", fontsize=24)
if equal_axes:
ax = plt.gca()
ax.set_aspect("equal", "box")
# Surface plot
if surfPlot:
fig = plt.figure()
ax = fig.gca(projection="3d")
ax.plot_surface(x, y, np.transpose(self.getEb()))
ax.set_xlabel(r"$k_x (\AA^{-1})$", fontsize=24)
ax.set_ylabel(r"$k_y (\AA^{-1})$", fontsize=24)
ax.set_zlabel("$E (eV)$", fontsize=24)
plt.xticks(fontsize=20)
plt.yticks(fontsize=20)
for tick in ax.zaxis.get_major_ticks():
tick.label.set_fontsize(20)
def plotLoss(self):
"""Plot the change of the negative log likelihood."""
epoch = np.linspace(0, self.epochsDone, len(self.logP), endpoint=True)
plt.rcParams["figure.figsize"] = [9, 9]
fig = plt.figure()
ax = fig.gca()
ax.plot(epoch, -self.logP, linewidth=2.0)
ax.set_xlabel("epochs", fontsize=24)
ax.set_ylabel(r"$-\log(p)+$" + "const", fontsize=24)
plt.xticks(fontsize=20)
plt.yticks(fontsize=20)
def delHist(self):
"""Deletes the training history by resetting delta log(p) to its initial value."""
self.logP = np.array([self.getLogP()])
self.epochsDone = 0
def saveBand(self, fileName, hyperparams=True, index=None):
"""Save the reconstructed electronic band and associated optimization parameters to file.
**Parameters**\n
fileName: str
Name of the file to save data to.
hyperparams: bool | True
Option to save hyperparameters.
index: int | None
Energy band index.
"""
with h5py.File(fileName, "w") as file:
file.create_dataset("/axes/kx", data=self.kx)
file.create_dataset("/axes/ky", data=self.ky)
file.create_dataset("/bands/Einit", data=self.E0)
file.create_dataset("/bands/Eb", data=self.getEb())
if hyperparams:
if index is None:
band_index = self.band_index
else:
band_index = index
file.create_dataset("/hyper/band_index", data=band_index)
file.create_dataset("/hyper/k_scale", data=self.kscale)
file.create_dataset("/hyper/E_offset", data=self.offset)
file.create_dataset("/hyper/nn_eta", data=self.eta)
def loadBand(self, Eb=None, fileName=None, use_as_init=True):
"""Load bands in reconstruction object, either using numpy matrix or directly from file.
**Parameters**\n
Eb: numpy array | None
Energy values of an electronic band.
fileName: str | None
Name of h5 file containing band.
use_as_init: bool | True
Flag, if true loaded band is used as initialization of the object
"""
if fileName is not None:
file = h5py.File(fileName, "r")
if self.lengthKx == file["/axes/kx"].shape[0] and self.lengthKy == file["/axes/ky"].shape[0]:
Eb = np.asarray(file["/bands/Eb"])
if Eb is not None:
EE, EEb = np.meshgrid(self.E, Eb)
ind1d = np.argmin(np.abs(EE - EEb), 1)
self.indEb = ind1d.reshape(Eb.shape)
# Set initial band
if use_as_init:
# Set indices of initial band
self.indE0 = self.indEb.copy()
# Reinitialize logP
self.delHist()
def _initSquMat(self, n, el=None):
"""Returns as square matrix of size nxn with el as element in each element.
**Parameters**\n
n: int
Size of the square matrix.
el: numeric | None
Values of each element.
**Return**\n
Square matrix of size nxn with el as element in each element.
"""
return [[el for _ in range(n)] for _ in range(n)]
| Python |
3D | mpes-kit/fuller | figures/Fig2_Theory_vs_Reconstruction.ipynb | .ipynb | 11,153 | 247 | {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Comparison between initialization (LDA-DFT) with reconstruction"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import warnings as wn\n",
"wn.filterwarnings(\"ignore\")\n",
"\n",
"import os\n",
"import fuller\n",
"from natsort import natsorted\n",
"import glob as g\n",
"import numpy as np\n",
"import scipy.io as sio\n",
"from mpes import analysis as aly\n",
"import matplotlib.pyplot as plt\n",
"import matplotlib as mpl\n",
"from scipy import interpolate\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mpl.rcParams['font.family'] = 'sans-serif'\n",
"mpl.rcParams['font.sans-serif'] = 'Arial'\n",
"mpl.rcParams['axes.linewidth'] = 2\n",
"mpl.rcParams['pdf.fonttype'] = 42\n",
"mpl.rcParams['ps.fonttype'] = 42\n",
"\n",
"colornames = ['#646464', '#666666', '#6a6a6a', '#6f6f6f', '#737373', '#787878', '#7d7d7d', '#828282', '#878787', '#8d8d8d', '#929292', '#989898', '#9e9e9e', '#a4a4a4', '#aaaaaa', '#b0b0b0', '#b6b6b6', '#bcbcbc', '#c2c2c2', '#c9c9c9', '#cfcfcf', '#d6d6d6', '#dcdcdc', '#e3e3e3', '#eaeaea', '#efefee', '#efeee5', '#efeddc', '#efecd3', '#eeebca', '#eeeac0', '#eee9b7', '#eee8ad', '#ede7a4', '#ede69a', '#ede590', '#ede487', '#ece37d', '#ece273', '#ece069', '#ecdf5f', '#ebde55', '#ebdd4b', '#ebdc41', '#ebdb37', '#ebd333', '#ebc933', '#ecbe32', '#ecb432', '#eda931', '#ee9e31', '#ee9330', '#ef8830', '#ef7d2f', '#f0722f', '#f0672e', '#f15c2e', '#f2512d', '#f2462d', '#f33b2c', '#f3302c', '#f4252b', '#f4192b', '#ef182f', '#e81834', '#e21939', '#db1a3e', '#d51a43', '#ce1b48', '#c71b4d', '#c11c52', '#ba1c58', '#b31d5d', '#ac1d62', '#a61e67', '#9f1e6c', '#981f72', '#911f77', '#8a207c', '#842182']\n",
"custom_cmap = mpl.colors.LinearSegmentedColormap.from_list('custom', colornames, N=256)\n",
"\n",
"# Create plot folder if needed\n",
"if not os.path.exists('../results/figures'):\n",
" os.mkdir('../results/figures')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Load energy bands from reconstrucion\n",
"recbands = np.load(r'../data/processed/wse2_recon/postproc_refrotsym_bands_lda.npy')\n",
"reconfile = r'../data/processed/wse2_recon/lda_recon_mrf_rec_00.h5'\n",
"einband = fuller.utils.loadHDF(reconfile, groups=['axes'])\n",
"kx, ky = einband['kx'], einband['ky']\n",
"nkx, nky = kx.size, ky.size\n",
"\n",
"# Load energy bands from theory (used as initialization for the reconstruction)\n",
"theory = sio.loadmat(r'../data/theory/WSe2_LDA_bands.mat')\n",
"bslda = theory['evb']\n",
"kxxth, kyyth = theory['kxxsc'], theory['kyysc']\n",
"kxth, kyth = kxxth[:,0], kyyth[0,:]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Calculate the path for the auxiliary lines on figure\n",
"verts = np.array([[ 80.2399401 , 208.0445881 ],\n",
" [175.36646777, 208.04459675],\n",
" [221.9318386 , 126.24592903],\n",
" [175.36871023, 44.44731539],\n",
" [ 80.23765786, 44.44726939],\n",
" [ 33.69597945, 126.24588838]])\n",
"pks = np.vstack((verts, verts[0,:]))\n",
"kxip = interpolate.interp1d(np.arange(0, 256), kx)\n",
"kyip = interpolate.interp1d(np.arange(0, 256), ky)\n",
"kpks = np.asarray([[kxip(i)-0.04, kyip(j)] for (i, j) in pks])\n",
"msk = aly.circmask(recbands[0,...], 128, 125, 105, sign='xnan', method='algebraic')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Main Figure 2d"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ksc = 1.1 # Hyperparameter used in reconstruction to align the theory and data to same resolution\n",
"kxshift = 0.05 # Corrections for imperfect axis calibration\n",
"f, axs = plt.subplots(7, 2, figsize=(8.3, 30))\n",
"for i, ind in enumerate(range(0, 7)):\n",
" \n",
" intFunc = interpolate.RegularGridInterpolator((kx/ksc, ky/ksc), bslda[2*ind,33:-30, 41:-42],\n",
" bounds_error=False, fill_value=None)\n",
" kxx, kyy = np.meshgrid(kx, ky, indexing='ij')\n",
" kxx = np.reshape(kxx, (nkx * nky,))\n",
" kyy = np.reshape(kyy, (nkx * nky,))\n",
" Ek_scaled = np.reshape(intFunc(np.column_stack((kxx, kyy))), (nkx, nky))\n",
" \n",
" axs[i,0].imshow(Ek_scaled.T*msk, cmap=custom_cmap,\n",
" extent=[kx[0]+kxshift, kx[-1]+kxshift, ky[0], ky[-1]])\n",
" axs[i,0].spines['right'].set_color('none')\n",
" axs[i,0].spines['top'].set_color('none')\n",
" axs[i,0].tick_params(axis='both', length=8, width=2, labelsize=28)\n",
" axs[i,0].set_xlabel('$k_x \\, (\\mathrm{\\AA}^{-1})$', fontsize=20) #, transform=axs[i,0].transAxes\n",
" axs[i,0].xaxis.set_label_coords(0.88, 0.15)\n",
" axs[i,0].set_ylabel('$k_y \\, (\\mathrm{\\AA}^{-1})$', fontsize=20, rotation=0)\n",
" axs[i,0].yaxis.set_label_coords(0.20, 0.90)\n",
" if i < 6:\n",
" axs[i,0].set_xticklabels('')\n",
" axs[i,0].text(0.85, 0.93, '# '+str(ind+1), transform=axs[i,0].transAxes, fontsize=20)\n",
" axs[i,0].axvline(x=0, ls='--', lw=1, color='k', dashes=[5, 4])\n",
" axs[i,0].axhline(y=0, ls='--', lw=1, color='k', dashes=[5, 4])\n",
" \n",
" # Draw hexagon\n",
" axs[i,0].plot(kpks[:,1]-0.02, kpks[:,0], '--k', lw=1, dashes=[5, 4])\n",
" \n",
" # Draw the reconstruction side\n",
" axs[i,1].imshow(recbands[ind,...]*msk, cmap=custom_cmap, extent=[kx[0]+kxshift, kx[-1]+kxshift, ky[0], ky[-1]])\n",
" axs[i,1].spines['right'].set_color('none')\n",
" axs[i,1].spines['top'].set_color('none')\n",
" axs[i,1].tick_params(axis='both', length=8, width=2, labelsize=28)\n",
" axs[i,1].set_xlabel('$k_x \\, (\\mathrm{\\AA}^{-1})$', fontsize=20)\n",
" axs[i,1].xaxis.set_label_coords(0.88, 0.15)\n",
" axs[i,1].set_ylabel('$k_y \\, (\\mathrm{\\AA}^{-1})$', fontsize=20, rotation=0)\n",
" axs[i,1].yaxis.set_label_coords(0.20, 0.90)\n",
" if i < 6:\n",
" axs[i,1].set_xticklabels('')\n",
" axs[i,1].set_yticklabels('')\n",
" axs[i,1].text(0.85, 0.93, '# '+str(ind+1), transform=axs[i,1].transAxes, fontsize=20)\n",
" axs[i,1].axvline(x=0, ls='--', lw=1, color='k', dashes=[5, 4])\n",
" axs[i,1].axhline(y=0, ls='--', lw=1, color='k', dashes=[5, 4])\n",
" \n",
" # Draw hexagon\n",
" axs[i,1].plot(kpks[:,1]-0.01, kpks[:,0], '--k', lw=1, dashes=[5, 4])\n",
" \n",
" \n",
"axs[0,0]. set_title('Theory', fontsize=20, y=1.05)\n",
"axs[0,1]. set_title('Reconstruction', fontsize=20, y=1.05)\n",
"plt.subplots_adjust(wspace=0.08, hspace=0.08)\n",
"plt.savefig('../results/figures/fig_2d1.png', bbox_inches='tight', transparent=False, dpi=300)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": false
},
"outputs": [],
"source": [
"ksc = 1.1 # Hyperparameter used in reconstruction to align the theory and data to same resolution\n",
"f, axs = plt.subplots(7, 2, figsize=(8.3, 30))\n",
"for i, ind in enumerate(range(7, 14)):\n",
" \n",
" intFunc = interpolate.RegularGridInterpolator((kx/ksc, ky/ksc), bslda[2*ind,33:-30, 41:-42],\n",
" bounds_error=False, fill_value=None)\n",
" kxx, kyy = np.meshgrid(kx, ky, indexing='ij')\n",
" kxx = np.reshape(kxx, (nkx * nky,))\n",
" kyy = np.reshape(kyy, (nkx * nky,))\n",
" Ek_scaled = np.reshape(intFunc(np.column_stack((kxx, kyy))), (nkx, nky))\n",
" \n",
" axs[i,0].imshow(Ek_scaled.T*msk, cmap=custom_cmap,\n",
" extent=[kx[0]+0.05, kx[-1]+0.05, ky[0], ky[-1]])\n",
" axs[i,0].spines['right'].set_color('none')\n",
" axs[i,0].spines['top'].set_color('none')\n",
" axs[i,0].tick_params(axis='both', length=8, width=2, labelsize=28)\n",
" axs[i,0].set_xlabel('$k_x \\, (\\mathrm{\\AA}^{-1})$', fontsize=20) #, transform=axs[i,0].transAxes\n",
" axs[i,0].xaxis.set_label_coords(0.88, 0.15)\n",
" axs[i,0].set_ylabel('$k_y \\, (\\mathrm{\\AA}^{-1})$', fontsize=20, rotation=0)\n",
" axs[i,0].yaxis.set_label_coords(0.20, 0.90)\n",
" if i < 6:\n",
" axs[i,0].set_xticklabels('')\n",
" axs[i,0].text(0.85, 0.93, '# '+str(ind+1), transform=axs[i,0].transAxes, fontsize=20)\n",
" axs[i,0].axvline(x=0, ls='--', lw=1, color='k', dashes=[5, 4])\n",
" axs[i,0].axhline(y=0, ls='--', lw=1, color='k', dashes=[5, 4])\n",
" \n",
" # Draw hexagon\n",
" axs[i,0].plot(kpks[:,1]-0.02, kpks[:,0], '--k', lw=1, dashes=[5, 4])\n",
" \n",
" # Draw the reconstruction side\n",
" axs[i,1].imshow(recbands[ind,...]*msk, cmap=custom_cmap, extent=[kx[0]+kxshift, kx[-1]+kxshift, ky[0], ky[-1]])\n",
" axs[i,1].spines['right'].set_color('none')\n",
" axs[i,1].spines['top'].set_color('none')\n",
" axs[i,1].tick_params(axis='both', length=8, width=2, labelsize=28)\n",
" axs[i,1].set_xlabel('$k_x \\, (\\mathrm{\\AA}^{-1})$', fontsize=20)\n",
" axs[i,1].xaxis.set_label_coords(0.88, 0.15)\n",
" axs[i,1].set_ylabel('$k_y \\, (\\mathrm{\\AA}^{-1})$', fontsize=20, rotation=0)\n",
" axs[i,1].yaxis.set_label_coords(0.20, 0.90)\n",
" if i < 6:\n",
" axs[i,1].set_xticklabels('')\n",
" axs[i,1].set_yticklabels('')\n",
" axs[i,1].text(0.85, 0.93, '# '+str(ind+1), transform=axs[i,1].transAxes, fontsize=20)\n",
" axs[i,1].axvline(x=0, ls='--', lw=1, color='k', dashes=[5, 4])\n",
" axs[i,1].axhline(y=0, ls='--', lw=1, color='k', dashes=[5, 4])\n",
" \n",
" # Draw hexagon\n",
" axs[i,1].plot(kpks[:,1]-0.01, kpks[:,0], '--k', lw=1, dashes=[5, 4])\n",
" \n",
" \n",
"axs[0,0]. set_title('Theory', fontsize=20, y=1.05)\n",
"axs[0,1]. set_title('Reconstruction', fontsize=20, y=1.05)\n",
"plt.subplots_adjust(wspace=0.08, hspace=0.08)\n",
"plt.savefig('../results/figures/fig_2d2.png', bbox_inches='tight', transparent=False, dpi=300)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| Unknown |
3D | mpes-kit/fuller | figures/Fig2_SFig5_Four_DFTs_Reconstruction.ipynb | .ipynb | 12,884 | 312 | {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Reconstruction with four DFT calculations as initializations"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import warnings as wn\n",
"wn.filterwarnings(\"ignore\")\n",
"\n",
"import os\n",
"import numpy as np\n",
"import fuller\n",
"import matplotlib.pyplot as plt\n",
"import matplotlib as mpl\n",
"from mpl_toolkits.axes_grid1.inset_locator import inset_axes\n",
"from mpes import fprocessing as fp, analysis as aly\n",
"from scipy import io as sio\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mpl.rcParams['font.family'] = 'sans-serif'\n",
"mpl.rcParams['font.sans-serif'] = 'Arial'\n",
"mpl.rcParams['axes.linewidth'] = 2\n",
"mpl.rcParams['pdf.fonttype'] = 42\n",
"mpl.rcParams['ps.fonttype'] = 42\n",
"\n",
"# Create plot folder if needed\n",
"if not os.path.exists('../results/figures'):\n",
" os.mkdir('../results/figures')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Load preprocessed photoemission data\n",
"bcsm = np.load(r'../data/processed/hslines/WSe2_vcut.npy')\n",
"Evals = fp.readBinnedhdf5(r'../data/pes/3_smooth.h5')['E']\n",
"ehi, elo = Evals[0], Evals[469]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Main Figure 2a"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Specify the band shift hyperparameters (including the global shift of 0.65)\n",
"dftbands = sio.loadmat('../data/theory/hslines/initials_DFT_G-M.mat') \n",
"lda_th_shift = dftbands['LDA'][100:125, 0].max()\n",
"pbe_th_shift = dftbands['PBE'][100:125, 0].max()\n",
"pbesol_th_shift = dftbands['PBEsol'][100:125, 0].max()\n",
"hse_th_shift = dftbands['HSE'][100:125, 0].max()\n",
"eshift = 0.65\n",
"\n",
"lda_shift = np.array([0.4, 0.2, 0.3, 0.25, 0.22, 0.3, 0.3, 0.25, 0.2, 0.32, 0.4, 0.4, 0.4, 0.4]) + eshift\n",
"pbe_shift = np.array([0, -0.3, -0.2, -0.25, -0.35, -0.3, -0.25, -0.2, -0.4, -0.4, -0.4, -0.4, -0.37, -0.4]) + eshift\n",
"pbesol_shift = np.array([0, -0.2, -0.1, -0.15, -0.2, -0.05, 0, 0, -0.2, -0.1, -0.1, -0.1, 0, 0]) + eshift\n",
"hse_shift = np.array([-0.15, -0.4, -0.2, -0.2, -0.2, -0.15, 0, 0, 0.1, 0, 0.1, 0.1, 0.2, 0.2]) + eshift"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot initializations vs theories\n",
"bseg = bcsm[:, :70]\n",
"\n",
"f, ax = plt.subplots(2, 2, figsize=(4.5, 9))\n",
"ax[0,0].imshow(bseg, cmap='Blues', extent=[0, 69, elo, ehi], aspect=17)\n",
"ax[0,1].imshow(bseg, cmap='Blues', extent=[0, 69, elo, ehi], aspect=17)\n",
"ax[1,0].imshow(bseg, cmap='Blues', extent=[0, 69, elo, ehi], aspect=17)\n",
"ax[1,1].imshow(bseg, cmap='Blues', extent=[0, 69, elo, ehi], aspect=17)\n",
"\n",
"# Plot original theory energy bands after zero adjustment\n",
"ax[0,0].plot(dftbands['LDA'][:69,:14] - lda_th_shift, 'r-')\n",
"ax[0,0].plot(dftbands['LDA'][:69, 13] - lda_th_shift, 'r-', label='Theory')\n",
"ax[0,1].plot(dftbands['PBE'][:69,:14] - pbe_th_shift, 'r-')\n",
"ax[1,0].plot(dftbands['PBEsol'][:69,:14] - pbesol_th_shift, 'r-')\n",
"ax[1,1].plot(dftbands['HSE'][:69,:14] - hse_th_shift, 'r-')\n",
"\n",
"# Plot shift-aligned energy bands\n",
"for ib in range(14):\n",
" ax[0,0].plot(dftbands['LDA'][:69,ib] + lda_shift[ib], '--', color='#F0FF24', dashes=[5, 2])\n",
" ax[0,1].plot(dftbands['PBE'][:69,ib] + pbe_shift[ib], '--', color='#F0FF24', dashes=[5, 2])\n",
" ax[1,0].plot(dftbands['PBEsol'][:69,ib] + pbesol_shift[ib], '--', color='#F0FF24', dashes=[5, 2])\n",
" ax[1,1].plot(dftbands['HSE'][:69,ib] + hse_shift[ib], '--', color='#F0FF24', dashes=[5, 2])\n",
"ax[0,0].plot(dftbands['LDA'][:69,ib] + lda_shift[ib], '--', color='#F0FF24', dashes=[5, 2], label='Initialization')\n",
"\n",
"ax[0,0].set_title('LDA', fontsize=15, x=0.8, y=0.88)\n",
"ax[0,1].set_title('PBE', fontsize=15, x=0.8, y=0.88)\n",
"ax[1,0].set_title('PBEsol', fontsize=15, x=0.75, y=0.88)\n",
"ax[1,1].set_title('HSE06', fontsize=15, x=0.76, y=0.88)\n",
"\n",
"for i in range(2):\n",
" for j in range(2):\n",
" ax[i,j].tick_params(axis='both', length=8, width=2, labelsize=15)\n",
" ax[i,j].set_ylim([elo, ehi])\n",
"\n",
"ax[0,0].set_xticks([])\n",
"ax[0,1].set_xticks([])\n",
"ax[0,1].set_yticks([])\n",
"ax[1,1].set_yticks([])\n",
"ax[0,0].set_yticks(range(-7, 1))\n",
"ax[1,0].set_yticks(range(-7, 1))\n",
"\n",
"ax[0,0].set_ylabel('Energy (eV)', fontsize=20)\n",
"ax[1,0].set_ylabel('Energy (eV)', fontsize=20)\n",
"\n",
"ax[1,0].tick_params(axis='x', width=0, pad=0)\n",
"ax[1,1].tick_params(axis='x', width=0, pad=0)\n",
"\n",
"ax[1,0].set_xticks([0, 69])\n",
"ax[1,0].set_xticklabels(['$\\overline{\\Gamma}$', '$\\overline{\\mathrm{M}}$'])\n",
"ax[1,1].set_xticks([0, 69])\n",
"ax[1,1].set_xticklabels(['$\\overline{\\Gamma}$', '$\\overline{\\mathrm{M}}$'])\n",
"\n",
"lg = ax[0,0].legend(fontsize=15, ncol=2, facecolor='#A9CCE3', edgecolor='#A9CCE3', bbox_to_anchor=(2.21, 1.18))\n",
"\n",
"plt.subplots_adjust(hspace=0.02, wspace=0.15)\n",
"plt.savefig('../results/figures/fig_2a.png', bbox_inches='tight', transparent=True, dpi=300)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Main Figure 2c"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"paths = np.load(r'../data/processed/hslines/WSe2_kpath.npz')\n",
"\n",
"reconbands = {}\n",
"for name in ['LDA', 'PBE', 'PBEsol', 'HSE']:\n",
" bands = np.load(r'../data/processed/wse2_recon/postproc_refrotsym_bands_'+name.lower()+'.npy')\n",
" bdi = aly.bandpath_map(np.moveaxis(bands, 0, 2), pathr=paths['rowInds'], pathc=paths['colInds'], eaxis=2)\n",
" reconbands[name] = bdi.T"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"pos = paths['pathInds']\n",
"pos[-1] -= 1\n",
"\n",
"ff, axa = plt.subplots(1, 1, figsize=(10.5, 8))\n",
"im = axa.imshow(bcsm, cmap='Blues', extent=[0, 185, elo, ehi], aspect=12)\n",
"# axa.plot(dftbands['HSE'][:,:14] - hse_th_shift, 'r--', zorder=2)\n",
"for ib in range(14):\n",
" axa.plot(reconbands['LDA'][:,ib] + 0.65, color='r', zorder=1);\n",
"axa.tick_params(axis='y', length=8, width=2, labelsize=15)\n",
"axa.tick_params(axis='x', length=0, labelsize=15, pad=8)\n",
"axa.set_ylim([elo, ehi])\n",
"axa.set_xticks(pos)\n",
"axa.set_xticklabels(['$\\overline{\\Gamma}$', '$\\overline{\\mathrm{M}}$',\n",
" '$\\overline{\\mathrm{K}}$', '$\\overline{\\Gamma}$']);\n",
"axa.set_ylabel('Energy (eV)', fontsize=20)\n",
"for p in pos[:-1]:\n",
" axa.axvline(x=p, c='k', ls='--', lw=2, dashes=[4, 2])\n",
" \n",
"axa.set_title('Reconstruction', fontsize=15, x=0.8, y=0.9)\n",
"cax = inset_axes(axa, width=\"3%\", height=\"30%\", bbox_to_anchor=(220, 90, 440, 200))\n",
"cb = plt.colorbar(im, cax=cax, ticks=[])\n",
"cb.ax.set_ylabel('Intensity', fontsize=15, rotation=-90, labelpad=17)\n",
"\n",
"plt.savefig('../results/figures/fig_2c.png', bbox_inches='tight', transparent=True, dpi=300)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 5"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"dftbands = {}\n",
"for dft in ['LDA','PBE','PBEsol','HSE']:\n",
" dftbands[dft] = sio.loadmat(r'../data/theory/hslines/WSe2_bandlines_'+dft+'_186.mat')['lines']"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"f, axs = plt.subplots(2, 2, figsize=(17, 10))\n",
"axs[0,0].imshow(bcsm, cmap='Blues', extent=[0, 185, elo, ehi], aspect=12)\n",
"axs[0,1].imshow(bcsm, cmap='Blues', extent=[0, 185, elo, ehi], aspect=12)\n",
"axs[1,0].imshow(bcsm, cmap='Blues', extent=[0, 185, elo, ehi], aspect=12)\n",
"axs[1,1].imshow(bcsm, cmap='Blues', extent=[0, 185, elo, ehi], aspect=12)\n",
"\n",
"c_over = '#FFD54F'\n",
"axs[0,0].plot(dftbands['LDA'][:,:14] - lda_th_shift, color=c_over, ls='--', dashes=[3, 1], zorder=2)\n",
"axs[0,1].plot(dftbands['PBE'][:,:14] - pbe_th_shift, color=c_over, ls='--', dashes=[3, 1], zorder=2)\n",
"axs[1,0].plot(dftbands['PBEsol'][:,:14] - pbesol_th_shift, color=c_over, ls='--', dashes=[3, 1], zorder=2)\n",
"axs[1,1].plot(dftbands['HSE'][:,:14] - hse_th_shift, color=c_over, ls='--', dashes=[3, 1], zorder=2)\n",
"\n",
"axs[0,0].set_title('LDA', fontsize=15, x=0.8, y=0.9)\n",
"axs[0,1].set_title('PBE', fontsize=15, x=0.8, y=0.9)\n",
"axs[1,0].set_title('PBEsol', fontsize=15, x=0.8, y=0.9)\n",
"axs[1,1].set_title('HSE06', fontsize=15, x=0.8, y=0.9)\n",
"\n",
"for i in range(2):\n",
" for j in range(2):\n",
" axs[i,j].tick_params(axis='y', length=8, width=2, labelsize=15)\n",
" axs[i,j].tick_params(axis='x', length=0, labelsize=15, pad=8)\n",
" axs[i,j].set_yticks(np.arange(-8, 1))\n",
" axs[i,j].set_ylim([elo, ehi])\n",
" axs[i,j].set_xticks(pos)\n",
" axs[i,j].set_xticklabels(['$\\overline{\\Gamma}$', '$\\overline{\\mathrm{M}}$',\n",
" '$\\overline{\\mathrm{K}}$', '$\\overline{\\Gamma}$'])\n",
" \n",
" for p in pos[:-1]:\n",
" axs[i,j].axvline(x=p, c='k', ls='--', lw=2, dashes=[3, 1.7])\n",
" \n",
"for ib in range(14):\n",
" axs[0,0].plot(reconbands['LDA'][:,ib] + 0.65, '-', color='r', zorder=1)\n",
" axs[0,1].plot(reconbands['PBE'][:,ib] + 0.65, '-', color='r', zorder=1)\n",
" axs[1,0].plot(reconbands['PBEsol'][:,ib] + 0.65, '-', color='r', zorder=1)\n",
" axs[1,1].plot(reconbands['HSE'][:,ib] + 0.65, '-', color='r', zorder=1)\n",
"\n",
"# Add legend\n",
"axs[0,0].plot(dftbands['LDA'][:,0] - lda_th_shift, color=c_over, ls='--', dashes=[3, 1], zorder=2, label='LDA Calc.')\n",
"axs[0,0].plot(reconbands['LDA'][:,ib] + 0.65, '-', color='r', zorder=1, label='LDA Recon.')\n",
"lg = axs[0,0].legend(fontsize=15, ncol=1, facecolor='w', edgecolor='k', framealpha=1, bbox_to_anchor=(0.94, 0.1))\n",
"lg.get_frame().set_linewidth(2)\n",
"\n",
"axs[0,1].plot(dftbands['PBE'][:,0] - pbe_th_shift, color=c_over, ls='--', dashes=[3, 1], zorder=2, label='PBE Calc.')\n",
"axs[0,1].plot(reconbands['PBE'][:,ib] + 0.65, '-', color='r', zorder=1, label='PBE Recon.')\n",
"lg = axs[0,1].legend(fontsize=15, ncol=1, facecolor='w', edgecolor='k', framealpha=1, bbox_to_anchor=(0.94, 0.1))\n",
"lg.get_frame().set_linewidth(2)\n",
"\n",
"axs[1,0].plot(dftbands['PBEsol'][:,0] - pbesol_th_shift, color=c_over, ls='--', dashes=[3, 1], zorder=2, label='PBEsol Calc.')\n",
"axs[1,0].plot(reconbands['PBEsol'][:,ib] + 0.65, '-', color='r', zorder=1, label='PBEsol Recon.')\n",
"lg = axs[1,0].legend(fontsize=15, ncol=1, facecolor='w', edgecolor='k', framealpha=1, bbox_to_anchor=(0.98, 0.1))\n",
"lg.get_frame().set_linewidth(2)\n",
"\n",
"axs[1,1].plot(dftbands['HSE'][:,0] - hse_th_shift, color=c_over, ls='--', dashes=[3, 1], zorder=2, label='HSE06 Calc.')\n",
"axs[1,1].plot(reconbands['HSE'][:,ib] + 0.65, '-', color='r', zorder=1, label='HSE06 Recon.')\n",
"lg = axs[1,1].legend(fontsize=15, ncol=1, facecolor='w', edgecolor='k', framealpha=1, bbox_to_anchor=(0.94, 0.1))\n",
"lg.get_frame().set_linewidth(2)\n",
"\n",
"plt.subplots_adjust(hspace=0.2, wspace=0.1)\n",
"plt.savefig('../results/figures/sfig_5.png', bbox_inches='tight', transparent=True, dpi=300)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| Unknown |
3D | mpes-kit/fuller | figures/Fig1_Data_preprocessing.ipynb | .ipynb | 7,620 | 273 | {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Stages of data preprocessing"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import warnings as wn\n",
"wn.filterwarnings(\"ignore\")\n",
"\n",
"import numpy as np\n",
"from fuller.mrfRec import MrfRec\n",
"from fuller.generator import rotosymmetrize\n",
"from fuller.utils import saveHDF\n",
"from mpes import analysis as aly, fprocessing as fp\n",
"\n",
"import os\n",
"import matplotlib.pyplot as plt\n",
"import matplotlib as mpl\n",
"from matplotlib.ticker import MultipleLocator, FormatStrFormatter\n",
"%matplotlib inline\n",
"\n",
"# mpl.rcParams['font.family'] = 'sans-serif'\n",
"# mpl.rcParams['font.sans-serif'] = 'Arial'\n",
"mpl.rcParams['axes.linewidth'] = 2\n",
"mpl.rcParams['pdf.fonttype'] = 42\n",
"mpl.rcParams['ps.fonttype'] = 42"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Before we start with the preprocessing, we determine the location of the high symmetry points which we need for plotting the data. For simplicity and linearity of the code, we do this using the symmetrized data from file but of course we could also do the preprocessing first before plotting the resulting data."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fdata = fp.readBinnedhdf5('../data/pes/1_sym.h5')\n",
"mc = aly.MomentumCorrector(fdata['V'])\n",
"\n",
"mc.selectSlice2D(selector=slice(30, 32), axis=2)\n",
"mc.featureExtract(mc.slice, method='daofind', sigma=6, fwhm=20, symscores=False)\n",
"\n",
"# False detection filter, if needed\n",
"try:\n",
" mc.pouter_ord = mc.pouter_ord[[0,1,3,5,6,9],:]\n",
"except:\n",
" pass"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mc.view(image=mc.slice, annotated=True, points=mc.features)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Define high-symmetry points\n",
"G = mc.pcent # Gamma point\n",
"K = mc.pouter_ord[0,:] # K point\n",
"K1 = mc.pouter_ord[1,:] # K' point\n",
"M = (K + K1) / 2 # M point\n",
"\n",
"# Define cutting path\n",
"pathPoints = np.asarray([G, M, K, G])\n",
"nGM, nMK, nKG = 70, 39, 79\n",
"segPoints = [nGM, nMK, nKG]\n",
"rowInds, colInds, pathInds = aly.points2path(pathPoints[:,0], pathPoints[:,1], npoints=segPoints)\n",
"nSegPoints = len(rowInds)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Define plotting function\n",
"\n",
"def plot_path(mrf, vmax, save_path):\n",
" # Normalize data\n",
" imNorm = mrf.I / mrf.I.max()\n",
"\n",
" # Sample the data along high-symmetry lines (k-path) connecting the corresponding high-symmetry points\n",
" pathDiagram = aly.bandpath_map(imNorm, pathr=rowInds, pathc=colInds, eaxis=2)\n",
"\n",
" Evals = mrf.E\n",
" ehi, elo = Evals[0], Evals[449]\n",
"\n",
" f, ax = plt.subplots(figsize=(10, 6))\n",
" plt.imshow(pathDiagram[:450, :], cmap='Blues', aspect=10.9, extent=[0, nSegPoints, elo, ehi], vmin=0, vmax=vmax)\n",
" ax.set_xticks(pathInds)\n",
" ax.set_xticklabels(['$\\overline{\\Gamma}$', '$\\overline{\\mathrm{M}}$',\n",
" '$\\overline{\\mathrm{K}}$', '$\\overline{\\Gamma}$'], fontsize=15)\n",
" for p in pathInds[:-1]:\n",
" ax.axvline(x=p, c='r', ls='--', lw=2, dashes=[4, 2])\n",
" # ax.axhline(y=0, ls='--', color='r', lw=2)\n",
" ax.yaxis.set_major_locator(MultipleLocator(2))\n",
" ax.yaxis.set_minor_locator(MultipleLocator(1))\n",
" ax.yaxis.set_label_position(\"right\")\n",
" ax.yaxis.tick_right()\n",
" ax.set_ylabel('Energy (eV)', fontsize=15, rotation=-90, labelpad=20)\n",
" ax.tick_params(axis='x', length=0, pad=6)\n",
" ax.tick_params(which='both', axis='y', length=8, width=2, labelsize=15)\n",
" \n",
" plt.savefig(save_path, dpi=200)\n",
" plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Load data\n",
"data = fp.readBinnedhdf5('../data/pes/0_binned.h5')\n",
"I = data['V']\n",
"E = data['E']\n",
"kx = data['kx']\n",
"ky = data['ky']\n",
"\n",
"# Create reconstruction object from data file\n",
"mrf = MrfRec(E=E, kx=kx, ky=ky, I=I)\n",
"\n",
"# Create plot folder if needed\n",
"if not os.path.exists('../results/figures'):\n",
" os.mkdir('../results/figures')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Main Figure 1c: photoemission band mapping data"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": false
},
"outputs": [],
"source": [
"plot_path(mrf, 0.5, '../results/figures/fig_1c.png')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Main Figure 1d: Pattern symmetrization in $(k_x, k_y)$ plane (rotation and reflection)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"mrf.symmetrizeI()\n",
"plot_path(mrf, 0.5, '../results/figures/fig_1d.png')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Main Figure 1e: Normalization and contrast enhancement (MCLAHE)\n",
"MCLAHE stands for multidimensional contrast limited adaptive histogram equalization (see publication [here](https://ieeexplore.ieee.org/document/8895993))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": false
},
"outputs": [],
"source": [
"mrf.normalizeI(kernel_size=(20, 20, 25), n_bins=256, clip_limit=0.15, use_gpu=True)\n",
"plot_path(mrf, 1, '../results/figures/fig_1e.png')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Main Figure 1f: Multidimensional smoothing using Gaussian filter"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mrf.smoothenI(sigma=(.8, .8, 1.))\n",
"plot_path(mrf, 1, '../results/figures/fig_1f.png')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"pycharm": {
"name": "#%%\n"
}
},
"outputs": [],
"source": [
"# # Save data to disc if needed\n",
"# data_save = [['axes', {'E': mrf.E, 'kx': mrf.kx, 'ky': mrf.ky}], ['binned', {'V': mrf.I}]]\n",
"# saveHDF(*data_save, save_addr='../data/preprocessed.h5')"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.13"
},
"pycharm": {
"stem_cell": {
"cell_type": "raw",
"metadata": {
"collapsed": false
},
"source": ""
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| Unknown |
3D | mpes-kit/fuller | figures/Fig3_Approximations_to_reconstruction.ipynb | .ipynb | 5,405 | 163 | {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Illustration of approximations to a reconstructed band"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import warnings as wn\n",
"wn.filterwarnings(\"ignore\")\n",
"\n",
"import os\n",
"import numpy as np\n",
"import fuller\n",
"import matplotlib.pyplot as plt\n",
"import matplotlib as mpl\n",
"from tqdm import tqdm_notebook as tqdm\n",
"from matplotlib.ticker import AutoMinorLocator\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mpl.rcParams['font.family'] = 'sans-serif'\n",
"mpl.rcParams['font.sans-serif'] = 'Arial'\n",
"mpl.rcParams['axes.linewidth'] = 2\n",
"mpl.rcParams['pdf.fonttype'] = 42\n",
"mpl.rcParams['ps.fonttype'] = 42\n",
"\n",
"# Create plot folder if needed\n",
"if not os.path.exists('../results/figures'):\n",
" os.mkdir('../results/figures')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"bandcuts = np.load(r'../data/processed/wse2_recon_1BZ/postproc_bandcuts_lda.npz')['bandcuts']\n",
"plt.imshow(bandcuts[3,...])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Approximations using different numbers of basis terms and in different orders\n",
"idx = 3\n",
"recon = np.zeros_like(bandcuts[0,...])\n",
"indarr = list(range(5, 400, 1))\n",
"# errors for summation in default polynomial order (errseq) and in coefficient-ranked order (errmaj)\n",
"errseq, errmaj = [], []\n",
"# The pixel-averaged versions of errseq and errmaj\n",
"errseqavg, errmajavg = [], []\n",
"reconms = []\n",
"bandref = np.nan_to_num(bandcuts[idx,...])\n",
"bcf, bss0 = fuller.generator.decomposition_hex2d(bandref, nterms=400, ret='all')\n",
"npixbz = np.sum(bss0[0,...] == 1) # Number of pixels within the first Brillouin zone\n",
"magind = np.argsort(np.abs(bcf))[::-1]\n",
"\n",
"for nt in tqdm(indarr):\n",
" # Generate mask\n",
" currcf = np.zeros_like(bcf)\n",
" currcfm = np.zeros_like(bcf)\n",
" currcf[:nt] = bcf[:nt]\n",
" currcfm[magind[:nt]] = bcf[magind[:nt]]\n",
" recon = fuller.generator.reconstruction_hex2d(currcf, bss0)\n",
" reconm = fuller.generator.reconstruction_hex2d(currcfm, bss0)\n",
"\n",
" reconms.append(reconm)\n",
" errseq.append(np.linalg.norm(recon - bandref)/np.linalg.norm(bandref))\n",
" errmaj.append(np.linalg.norm(reconm - bandref)/np.linalg.norm(bandref))\n",
" errseqavg.append(np.linalg.norm(recon - bandref)/np.sqrt(npixbz))\n",
" errmajavg.append(np.linalg.norm(reconm - bandref)/np.sqrt(npixbz))\n",
"\n",
"errseq, errmaj, errseqavg, errmajavg = list(map(np.asarray, [errseq, errmaj, errseqavg, errmajavg]))\n",
"reconms = np.asarray(reconms)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Main Figure 3b"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"f, ax = plt.subplots(figsize=(5, 3.5))\n",
"\n",
"ax.plot(indarr, errmajavg*1000, '-', lw=2, c='#0000FF')\n",
"ax.plot(indarr, errseqavg*1000, '-', lw=2, c='#CC6600')\n",
"\n",
"ax.set_xlabel('Number of terms', fontsize=18)\n",
"ax.set_ylabel('Avg. approx. error (meV)', fontsize=18)\n",
"ax.set_xticks(range(0, 181, 20))\n",
"ax.set_ylim([0, 200])\n",
"ax.set_xlim([0, 100])\n",
"\n",
"ax.tick_params(which='major', axis='both', length=8, width=2, labelsize=18)\n",
"ax.tick_params(which='minor', axis='both', length=8, width=1, labelsize=18)\n",
"\n",
"ax.axvline(x=5, ls='--', c='k', dashes=(5, 3))\n",
"ax.axvline(x=15, ls='--', c='k', dashes=(5, 3))\n",
"ax.axvline(x=45, ls='--', c='k', dashes=(5, 3))\n",
"\n",
"ax.xaxis.set_minor_locator(AutoMinorLocator(4))\n",
"ax.yaxis.set_minor_locator(AutoMinorLocator(5))\n",
"ax.set_title('Polynomial\\n approximation\\n to band #4', fontsize=18, x=0.7, y=0.68, transform=ax.transAxes)\n",
"\n",
"ax2 = ax.twinx()\n",
"ax2.set_yticks(np.arange(0, 0.11, 0.02))\n",
"ax2.set_ylim([0, 200*errmaj[0]/(errmajavg[0]*1000)])\n",
"ax2.set_ylabel('Rel. approx. error', fontsize=18, rotation=-90, labelpad=25)\n",
"ax2.tick_params(which='major', axis='both', length=8, width=2, labelsize=18)\n",
"ax2.tick_params(which='minor', axis='both', length=8, width=1, labelsize=18)\n",
"ax2.yaxis.set_minor_locator(AutoMinorLocator(2))\n",
"plt.savefig('../results/figures/fig_3b.png', bbox_inches='tight', transparent=True, dpi=300)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| Unknown |
3D | mpes-kit/fuller | figures/SFig6_Tests_on_synthetic_2D_data.ipynb | .ipynb | 10,024 | 331 | {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Tests of the Markov random field model for reconstructing 2D synthetic data"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import warnings as wn\n",
"wn.filterwarnings(\"ignore\")\n",
"\n",
"import numpy as np\n",
"import fuller\n",
"import matplotlib.pyplot as plt\n",
"from mpes import analysis as aly\n",
"import matplotlib as mpl\n",
"import matplotlib.gridspec as gridspec\n",
"import os\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mpl.rcParams['font.family'] = 'sans-serif'\n",
"mpl.rcParams['font.sans-serif'] = 'Arial'\n",
"mpl.rcParams['axes.linewidth'] = 2\n",
"mpl.rcParams['pdf.fonttype'] = 42\n",
"mpl.rcParams['ps.fonttype'] = 42\n",
"\n",
"# Create plot folder if needed\n",
"if not os.path.exists('../results/figures'):\n",
" os.mkdir('../results/figures')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 2D single sinosodial band"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Data generation"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Generate synthetic data\n",
"kx = np.arange(-1, 1, 0.01)\n",
"band_sin2d = 3*np.sin(13*kx) + 2*np.cos(12*kx) - 4\n",
"b2d_min, b2d_max = band_sin2d.min(), band_sin2d.max()\n",
"# plt.plot(kx, band_sin2d)\n",
"\n",
"Evals = np.arange(b2d_min-2, b2d_max+2, 0.01)\n",
"pes_data_2d = aly.voigt(feval=True, vardict={'amp':1, 'xvar':Evals[:,None],\n",
" 'ctr':band_sin2d, 'sig':1, 'gam':0.3})\n",
"plt.imshow(pes_data_2d[::-1,:], aspect=0.1, extent=[-1, 1, b2d_min-2, b2d_max+2], cmap='Blues')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Construct initialization\n",
"init = np.zeros_like(kx)\n",
"plt.plot(kx, band_sin2d, c='r', label='ground truth')\n",
"plt.plot(kx, init, c='b', label='initialization')\n",
"plt.legend(loc='lower left', fontsize=12)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Reconstruction"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"I = np.transpose(pes_data_2d)\n",
"I = I / I.max()\n",
"mrf = fuller.mrfRec.MrfRec(E=Evals, kx=kx, ky=np.array([0.]), I=np.reshape(I, (len(kx), 1, len(Evals))),\n",
" eta=1, E0=init[:, None])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mrf.iter_seq(200)\n",
"recon = mrf.getEb()[:, 0]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 5a"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Summary plot\n",
"gs = gridspec.GridSpec(1, 2, width_ratios=[5,5])\n",
"fig = plt.figure()\n",
"axs = []\n",
"for i in range(2):\n",
" axs.append(fig.add_subplot(gs[i]))\n",
" \n",
"im = axs[0].imshow(pes_data_2d[::-1,:], aspect=0.15, extent=[-1, 1, b2d_min-2, b2d_max+2], cmap='Blues')\n",
"axs[0].plot(kx, band_sin2d, 'r')\n",
"axs[0].set_aspect(aspect=0.15)\n",
"axs[0].set_ylabel('Energy (a.u.)', fontsize=15)\n",
"cax = fig.add_axes([0.94, 0.54, 0.03, 0.2])\n",
"cb = fig.colorbar(im, cax=cax, orientation='vertical', ticks=[])\n",
"cb.ax.set_ylabel('Intensity', fontsize=15, rotation=-90, labelpad=18)\n",
"\n",
"axs[1].plot(kx, band_sin2d, 'r', label='ground truth')\n",
"axs[1].plot(kx, init, 'b', label='initialization')\n",
"axs[1].plot(kx, recon, 'g', label='reconstruction')\n",
"axs[1].set_xlim([-1, 1])\n",
"axs[1].set_ylim([b2d_min-2, b2d_max+2])\n",
"axs[1].set_aspect(aspect=0.15)\n",
"axs[1].set_yticks([])\n",
"lg = axs[1].legend(fontsize=15, bbox_to_anchor=(1.04,0.5), frameon=False,\n",
" borderpad=0, labelspacing=0.8, handlelength=1.2, handletextpad=0.5)\n",
"\n",
"for i in range(2):\n",
" axs[i].set_xlabel('$k$ (a.u.)', fontsize=15)\n",
" axs[i].tick_params(axis='both', length=8, width=2, labelsize=15)\n",
"\n",
"plt.subplots_adjust(wspace=0.1)\n",
"plt.savefig('../results/figures/sfig_6a.png', dpi=300, bbox_inches='tight', transparent=True)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 2D band crossing"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Data generation"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Generate synthetic data\n",
"kx = np.arange(-1, 1, 0.014)\n",
"band_pb2d_up = 5*kx**2 - kx/5 - 5\n",
"band_pb2d_down = -(0.2*kx**2 + kx/4 + 2.5)\n",
"plt.figure(figsize=(5,4))\n",
"b2d_min, b2d_max = band_pb2d_down.min(), band_pb2d_up.max()\n",
"# plt.plot(kx, band_pb2d_up)\n",
"# plt.plot(kx, band_pb2d_down)\n",
"\n",
"Evals = np.arange(b2d_min-4, b2d_max+2, 0.012)\n",
"pes_data_2d_up = aly.voigt(feval=True, vardict={'amp':1.6, 'xvar':Evals[:,None],\n",
" 'ctr':band_pb2d_up, 'sig':0.07, 'gam':0.15})\n",
"pes_data_2d_down = aly.voigt(feval=True, vardict={'amp':1, 'xvar':Evals[:,None],\n",
" 'ctr':band_pb2d_down, 'sig':0.07, 'gam':0.1})\n",
"pes_data_2d = pes_data_2d_up + pes_data_2d_down\n",
"plt.imshow(pes_data_2d[::-1,:], aspect=0.2, extent=[-1, 1, b2d_min-4, b2d_max+2], cmap='Blues')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Construct initialization\n",
"band_init2d_up = 3.5 * kx ** 2 - kx / 20 - 4\n",
"band_init2d_down = -3 * np.ones_like(kx)\n",
"\n",
"plt.plot(kx, band_pb2d_up, c='r')\n",
"plt.plot(kx, band_pb2d_down, c='r', label='ground truth')\n",
"plt.plot(kx, band_init2d_up, c='b')\n",
"plt.plot(kx, band_init2d_down, c='b', label='initialization')\n",
"plt.legend(loc='upper center', fontsize=12)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Reconstruction"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Reconstruct first band\n",
"I = np.transpose(pes_data_2d)\n",
"I = I / I.max()\n",
"mrf = fuller.mrfRec.MrfRec(E=Evals, kx=kx, ky=np.array([0.]), I=np.reshape(I, (len(kx), 1, len(Evals))),\n",
" eta=0.085, E0=band_init2d_down[:, None])\n",
"mrf.iter_seq(500)\n",
"recon_down = mrf.getEb()[:, 0]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Reconstruct second band\n",
"mrf = fuller.mrfRec.MrfRec(E=Evals, kx=kx, ky=np.array([0.]), I=np.reshape(I, (len(kx), 1, len(Evals))),\n",
" eta=0.2, E0=band_init2d_up[:, None])\n",
"mrf.iter_seq(500)\n",
"recon_up = mrf.getEb()[:, 0]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 6b"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Summary plot\n",
"emin, emax = Evals.min(), Evals.max()\n",
"\n",
"gs = gridspec.GridSpec(1, 2, width_ratios=[8,8])\n",
"fig = plt.figure()\n",
"axs = []\n",
"for i in range(2):\n",
" axs.append(fig.add_subplot(gs[i]))\n",
" \n",
"im = axs[0].imshow(pes_data_2d, aspect=0.2, extent=[-1, 1, emin, emax], cmap='Blues', origin='lower', vmax=2)\n",
"axs[0].plot(kx, band_pb2d_up, 'r')\n",
"axs[0].plot(kx, band_pb2d_down, 'r')\n",
"axs[0].set_aspect(aspect=0.2)\n",
"axs[0].set_ylabel('Energy (a.u.)', fontsize=15)\n",
"cax = fig.add_axes([0.94, 0.54, 0.03, 0.2])\n",
"cb = fig.colorbar(im, cax=cax, orientation='vertical', ticks=[])\n",
"cb.ax.set_ylabel('Intensity', fontsize=15, rotation=-90, labelpad=18)\n",
"\n",
"axs[1].plot(kx, band_pb2d_up, 'r')\n",
"axs[1].plot(kx, band_pb2d_down, 'r', label='ground truth')\n",
"axs[1].plot(kx, band_init2d_up, 'b')\n",
"axs[1].plot(kx, band_init2d_down, 'b', label='initialization')\n",
"axs[1].plot(kx, recon_up, 'g')\n",
"axs[1].plot(kx, recon_down, 'g', label='reconstruction')\n",
"axs[1].set_xlim([-1, 1])\n",
"axs[1].set_ylim([emin, emax])\n",
"axs[1].set_aspect(aspect=0.2)\n",
"axs[1].set_yticks([])\n",
"lg = axs[1].legend(fontsize=15, bbox_to_anchor=(1.04,0.5), frameon=False,\n",
" borderpad=0, labelspacing=0.8, handlelength=1.2, handletextpad=0.5)\n",
"\n",
"for i in range(2):\n",
" axs[i].set_xlabel('$k$ (a.u.)', fontsize=15)\n",
" axs[i].tick_params(axis='both', length=8, width=2, labelsize=15)\n",
"\n",
"plt.subplots_adjust(wspace=0.1)\n",
"plt.savefig('../results/figures/sfig_6b.png', dpi=300, bbox_inches='tight', transparent=True)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| Unknown |
3D | mpes-kit/fuller | figures/SFig9_Synthetic_data_and_initial_conditions.ipynb | .ipynb | 11,426 | 357 | {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Generate synthetic multiband photoemission data using DFT calculations"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import warnings as wn\n",
"wn.filterwarnings(\"ignore\")\n",
"\n",
"import os\n",
"import numpy as np\n",
"import fuller\n",
"from mpes import analysis as aly\n",
"import matplotlib.pyplot as plt\n",
"from matplotlib.ticker import (MultipleLocator, FormatStrFormatter,\n",
" AutoMinorLocator)\n",
"from tqdm import tqdm_notebook as tqdm\n",
"import tifffile as ti\n",
"import matplotlib as mpl\n",
"from scipy import interpolate\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mpl.rcParams['font.family'] = 'sans-serif'\n",
"mpl.rcParams['font.sans-serif'] = 'Arial'\n",
"mpl.rcParams['axes.linewidth'] = 2\n",
"mpl.rcParams['pdf.fonttype'] = 42\n",
"mpl.rcParams['ps.fonttype'] = 42\n",
"\n",
"# Create plot folder if needed\n",
"if not os.path.exists('../results/figures'):\n",
" os.mkdir('../results/figures')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ncfs = 400\n",
"bases = fuller.generator.ppz.hexike_basis(nterms=ncfs, npix=207, vertical=True, outside=0)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Compute the polynomial decomposition coefficients\n",
"bandout = np.nan_to_num(fuller.utils.loadHDF('../data/theory/bands_1BZ/wse2_lda_bandcuts.h5')['bands'])\n",
"ldashift = 0.86813 # For zeroing the energy at K points\n",
"bcfs = []\n",
"for i in tqdm(range(14)):\n",
" bcfs.append(fuller.generator.decomposition_hex2d(bandout[i,...] + ldashift, bases=bases, baxis=0, ret='coeffs'))\n",
"bcfs = np.array(bcfs)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Generate Brillouin zone mask\n",
"bzmsk = fuller.generator.hexmask(hexdiag=207, imside=207, padded=False, margins=[1, 1, 1, 1])\n",
"bzmsk_tight = fuller.generator.hexmask(hexdiag=201, imside=207, padded=True, margins=[3, 3, 3, 3])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Generate photoemission data without padding\n",
"nbands = 8\n",
"bshape = (207, 207)\n",
"amps = np.ones(bshape)\n",
"xs = np.linspace(-4.5, 0.5, 285, endpoint=True)\n",
"syndat = np.zeros((285, 207, 207))\n",
"gamss = []\n",
"for i in tqdm(range(nbands)):\n",
" gams = 0.05\n",
" syndat += aly.voigt(feval=True, vardict={'amp':amps, 'xvar':xs[:,None,None], 'ctr':(bandout[i,...] + 0.86813),\n",
" 'sig':0.1, 'gam':gams})"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"hwd = 103.5 # Half width of projected Brillouin zone in pixels"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Generate edge-padded bands\n",
"synfbands = []\n",
"padsize = ((24, 24), (24, 24))\n",
"for i in tqdm(range(nbands)): \n",
" impad = fuller.generator.hexpad(bandout[i,...] + 0.86813, cvd=hwd, mask=bzmsk, edgepad=padsize)\n",
" synfbands.append(fuller.generator.restore(impad, method='cubic'))\n",
"synfbands = np.asarray(synfbands)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Generate edge-padded photoemission data\n",
"bshape = (255, 255)\n",
"amps = np.ones(bshape)\n",
"xs = np.linspace(-4.5, 0.5, 285, endpoint=True)\n",
"synfdat = np.zeros((285, 255, 255))\n",
"gamss = []\n",
"for i in tqdm(range(nbands)):\n",
"# btemp = np.nan_to_num(synbands[i,...])\n",
"# gams = np.abs(synfbands[i,...] - np.nanmean(synfbands[i,...]))/3\n",
" gams = 0.05\n",
"# gamss.append(gams)\n",
" synfdat += aly.voigt(feval=True, vardict={'amp':amps, 'xvar':xs[:,None,None], 'ctr':(synfbands[i,...]),\n",
" 'sig':0.1, 'gam':gams})\n",
"# gamss = np.asarray(gamss)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"xss = np.linspace(-4.5, 0.5, 285, endpoint=True)\n",
"xss[1] - xss[0], xss.size"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.imshow(synfdat[:,80,:], aspect=0.8, origin='lower', cmap='terrain_r')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Generate mask for large coefficients\n",
"cfmask = fuller.utils.binarize(bcfs, threshold=1e-2, vals=[0, 1])\n",
"cfmask[:, 0] = 0 # No rigid shift modulation"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Generate coefficient-scaled data\n",
"synfscaled = {}\n",
"# errs = np.around(np.arange(0.3, 2.01, 0.05), 2)\n",
"errs = [0.8, 1.0, 1.2]\n",
"bscmod = bcfs.copy()\n",
"\n",
"for err in tqdm(errs):\n",
" \n",
" synbands = []\n",
" for i in range(nbands):\n",
" \n",
" bscmod[i, 1:] = err*bcfs[i, 1:] # Scale only the dispersion terms (leave out the first offset term)\n",
" bandmod = fuller.generator.reconstruction_hex2d(bscmod[i, :], bases=bases)\n",
" \n",
" # Sixfold rotational symmetrization\n",
" symmed = fuller.generator.rotosymmetrize(bandmod, center=(hwd, hwd), rotsym=6)[0]\n",
" symmed = fuller.generator.reflectosymmetrize(symmed, center=(hwd, hwd), refangles=[0, 90])\n",
" padded = fuller.generator.hexpad(symmed, cvd=103.5, mask=bzmsk_tight, edgepad=padsize)\n",
" synbands.append(fuller.generator.restore(padded, method='nearest'))\n",
" \n",
" synfscaled[str(err)] = np.asarray(synbands)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.figure(figsize=(6, 6))\n",
"plt.imshow(synbands[0])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Calibrate momentum axes\n",
"mc = aly.MomentumCorrector(np.asarray(synbands))\n",
"mc.selectSlice2D(selector=slice(0,1), axis=0)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mc.featureExtract(mc.slice, method='daofind', fwhm=30, sigma=20)\n",
"#mc.view(mc.slice, annotated=True, points=mc.features)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Calculate distances\n",
"dg = 1.27/np.cos(np.radians(30))\n",
"axes = mc.calibrate(mc.slice, mc.pouter_ord[0,:], mc.pcent, dist=dg, equiscale=True, ret='axes')\n",
"dg, axes['axes'][0][0], axes['axes'][0][-1]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 9c"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"kx, ky = axes['axes'][0], axes['axes'][1]\n",
"emin, emax = xs.min(), xs.max()\n",
"kxtight = kx[24:-24]\n",
"kytight = ky[24:-24]\n",
"kxmin, kxmax = kxtight.min(), kxtight.max()\n",
"kymin, kymax = kytight.min(), kytight.max()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"islc = 90 # slice index\n",
"f, axs = plt.subplots(1, 2, figsize=(10, 6))\n",
"bands_tight = bandout + ldashift\n",
"\n",
"cs = ['r']\n",
"labels = [0.8]\n",
"for ni, i in enumerate(labels):\n",
" lbl = str(i)\n",
" reconbands = bzmsk_tight*(synfscaled[lbl][:,24:-24,24:-24])\n",
" for j in range(8):\n",
" axs[0].plot(kxtight, reconbands[j, islc, :], c=cs[ni])\n",
" axs[1].plot(kytight, reconbands[j, :, islc], c=cs[ni])\n",
" if j == 7:\n",
" axs[0].plot(kxtight, reconbands[j, islc, :], c=cs[ni], label='Scaled LDA ('+lbl+r'$\\times$)')\n",
" axs[1].plot(kytight, reconbands[j, :, islc], c=cs[ni], label='Scaled LDA ('+lbl+r'$\\times$)')\n",
"\n",
"gtband = bzmsk_tight*bands_tight\n",
"for j in range(8):\n",
" axs[0].plot(kxtight, gtband[j, islc, :], c='k', lw=2)\n",
" axs[1].plot(kytight, gtband[j, :, islc], c='k', lw=2)\n",
" if j == 7:\n",
" axs[0].plot(kxtight, gtband[j, islc, :], c='k', lw=2, label=r'LDA calc. (1.0$\\times$)')\n",
" axs[1].plot(kytight, gtband[j, :, islc], c='k', lw=2, label=r'LDA calc. (1.0$\\times$)')\n",
"\n",
"cs = ['g']\n",
"labels = [1.2]\n",
"for ni, i in enumerate(labels):\n",
" lbl = str(i)\n",
" reconbands = bzmsk_tight*(synfscaled[lbl][:,24:-24,24:-24])\n",
" for j in range(8):\n",
" axs[0].plot(kxtight, reconbands[j, islc, :], c=cs[ni])\n",
" axs[1].plot(kytight, reconbands[j, :, islc], c=cs[ni])\n",
" if j == 7:\n",
" axs[0].plot(kxtight, reconbands[j, islc, :], c=cs[ni], label='Scaled LDA ('+lbl+r'$\\times$)')\n",
" axs[1].plot(kytight, reconbands[j, :, islc], c=cs[ni], label='Scaled LDA ('+lbl+r'$\\times$)')\n",
"\n",
"for i in range(2):\n",
" axs[i].tick_params(which='major', axis='both', length=8, width=2, labelsize=15)\n",
" axs[i].tick_params(which='minor', axis='both', length=8, width=1)\n",
" axs[i].set_xticks(np.arange(-1., 1.1, 1))\n",
" axs[i].xaxis.set_minor_locator(AutoMinorLocator(2))\n",
" axs[i].legend(loc='upper left', frameon=False, fontsize=15, ncol=1, labelspacing=0.1, borderpad=0, columnspacing=1)\n",
" axs[i].set_yticks(np.arange(-4, 2, 1))\n",
" axs[i].set_ylim([-4.2, 1.3])\n",
" \n",
"axs[0].yaxis.set_minor_locator(AutoMinorLocator(2))\n",
"axs[0].set_xlabel('$k_x$ $(\\mathrm{\\AA}^{-1})$', fontsize=18)\n",
"axs[0].set_ylabel('Energy (eV)', fontsize=18)\n",
"axs[1].set_xlabel('$k_y$ $(\\mathrm{\\AA}^{-1})$', fontsize=18)\n",
"axs[1].set_yticks([])\n",
"plt.subplots_adjust(wspace=0.1)\n",
"plt.savefig('../results/figures/sfig_9c.png', bbox_inches='tight', transparent=True, dpi=300)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| Unknown |
3D | mpes-kit/fuller | figures/Fig3_HexagonalZernike.ipynb | .ipynb | 9,534 | 250 | {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Digitization of reconstructed bands using hexagonal Zernike polynomials"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import warnings as wn\n",
"wn.filterwarnings(\"ignore\")\n",
"\n",
"import os\n",
"import numpy as np\n",
"import fuller\n",
"import matplotlib.pyplot as plt\n",
"import matplotlib as mpl\n",
"from mpl_toolkits.axes_grid1 import make_axes_locatable\n",
"from matplotlib.ticker import (MultipleLocator, FormatStrFormatter,\n",
" AutoMinorLocator)\n",
"import matplotlib.colors as cs\n",
"import itertools as it\n",
"from tqdm import tqdm_notebook as tqdm\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mpl.rcParams['font.family'] = 'sans-serif'\n",
"mpl.rcParams['font.sans-serif'] = 'Arial'\n",
"mpl.rcParams['axes.linewidth'] = 2\n",
"mpl.rcParams['pdf.fonttype'] = 42\n",
"mpl.rcParams['ps.fonttype'] = 42\n",
"\n",
"colornames = ['#646464', '#666666', '#6a6a6a', '#6f6f6f', '#737373', '#787878', '#7d7d7d', '#828282', '#878787', '#8d8d8d', '#929292', '#989898', '#9e9e9e', '#a4a4a4', '#aaaaaa', '#b0b0b0', '#b6b6b6', '#bcbcbc', '#c2c2c2', '#c9c9c9', '#cfcfcf', '#d6d6d6', '#dcdcdc', '#e3e3e3', '#eaeaea', '#efefee', '#efeee5', '#efeddc', '#efecd3', '#eeebca', '#eeeac0', '#eee9b7', '#eee8ad', '#ede7a4', '#ede69a', '#ede590', '#ede487', '#ece37d', '#ece273', '#ece069', '#ecdf5f', '#ebde55', '#ebdd4b', '#ebdc41', '#ebdb37', '#ebd333', '#ebc933', '#ecbe32', '#ecb432', '#eda931', '#ee9e31', '#ee9330', '#ef8830', '#ef7d2f', '#f0722f', '#f0672e', '#f15c2e', '#f2512d', '#f2462d', '#f33b2c', '#f3302c', '#f4252b', '#f4192b', '#ef182f', '#e81834', '#e21939', '#db1a3e', '#d51a43', '#ce1b48', '#c71b4d', '#c11c52', '#ba1c58', '#b31d5d', '#ac1d62', '#a61e67', '#9f1e6c', '#981f72', '#911f77', '#8a207c', '#842182']\n",
"custom_cmap = mpl.colors.LinearSegmentedColormap.from_list('custom', colornames, N=256)\n",
"\n",
"# Create plot folder if needed\n",
"if not os.path.exists('../results/figures'):\n",
" os.mkdir('../results/figures')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Main Figure 3a"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Generate hexagonal Zernike basis\n",
"basis = fuller.generator.ppz.hexike_basis(nterms=100, npix=257, vertical=True, outside=0)\n",
"\n",
"# Mask the region beyond the hexagonal boundary\n",
"bmask = fuller.generator.hexmask(hexdiag=257, imside=257, padded=False, margins=[1, 1, 1, 1])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plotting a selection of hexagonal Zernike polynomials\n",
"ff, axs = plt.subplots(5, 1, figsize=(3, 10))\n",
"\n",
"for ind, ibs in enumerate([3, 10, 27, 41, 89]):\n",
" im = axs[ind].imshow(basis[ibs,...]*bmask[...], cmap=custom_cmap, vmin=-1.8, vmax=1.8)\n",
" axs[ind].axis('off')\n",
" axs[ind].text(5, 5, str(ind+1), fontsize=15, fontname=\"Arial\")\n",
"\n",
"cax = ff.add_axes([0.36, 0.08, 0.3, 0.02])\n",
"cb = plt.colorbar(im, cax=cax, ticks=[-1.8, 1.8], orientation='horizontal')\n",
"cb.ax.tick_params(axis='both', length=0)\n",
"cb.ax.set_xticklabels(['low', 'high'], fontsize=15) #'{0}'.format(u'\\u2014')\n",
"# cb.ax.set_ylabel('Height', rotation=-90, fontsize=15)\n",
"plt.subplots_adjust(hspace=0.1)\n",
"plt.savefig('../results/figures/fig_3a1.png', bbox_inches='tight', transparent=True, dpi=300)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Calculate the decomposition coefficients for all bands\n",
"bandout = np.nan_to_num(np.load(r'../data/processed/wse2_recon_1BZ/postproc_bandcuts_lda.npz')['bandcuts'])\n",
"bases_recon = fuller.generator.ppz.hexike_basis(nterms=400, npix=175, vertical=True, outside=0)\n",
"cfs_rec_lda = []\n",
"for i in tqdm(range(14)):\n",
" cfs_rec_lda.append(fuller.generator.decomposition_hex2d(bandout[i,...], bases=bases_recon, baxis=0, ret='coeffs'))\n",
"cfs_rec_lda = np.array(cfs_rec_lda)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Collect the large coefficients needed for approximating each energy band\n",
"cfs_large = fuller.utils.binarize(cfs_rec_lda, threshold=1e-2, vals=[0, 1])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"cl = plt.cm.tab20(np.linspace(0,1,14))\n",
"f, ax = plt.subplots(figsize=(6, 10))\n",
"xs = np.arange(1, 400)\n",
"for i in range(14):\n",
" ax.plot(xs, cfs_rec_lda[i,1:]-i/3, lw=1, color=cl[i])\n",
" ax.axhline(y=-i/3, lw=1, color=cl[i])\n",
" ax.text(320, -i/3+0.08, 'Band #'+str(i+1), fontsize=15, fontname=\"Arial\")\n",
"\n",
"ax.bar(xs, (np.abs(cfs_large[:,1:])).sum(axis=0)/40, bottom=-4.9, width=1, color=(0.3, 0.3, 0.3))\n",
"ax.axhline(y=-4.9, lw=1, color=(0.3, 0.3, 0.3))\n",
"# ax.set_title('HSE06', fontsize=15, y=0.88)\n",
"ax.text(320, -4.9+0.08, 'All bands', fontsize=15, fontname=\"Arial\")\n",
"# ax.text('Counts', transform=ax.transAxes)\n",
"\n",
"ax.set_xticks(list(range(0, 401, 50)))\n",
"ax.set_yticks([])\n",
"ax.set_ylim([-5, 1])\n",
"ax.tick_params(axis='x', length=8, width=2, labelsize=15)\n",
"ax.set_ylabel('Amplitude (a. u.)', fontsize=15)\n",
"ax.set_xlim([0, 400])\n",
"ax.set_xlabel('Coefficient index', fontsize=15)\n",
"ax.spines['left'].set_visible(False)\n",
"ax.spines['right'].set_visible(False)\n",
"ax.spines['top'].set_visible(False)\n",
"ax.text(-22, -4.5, 'Counts', rotation=90, fontsize=15);\n",
"plt.savefig('../results/figures/fig_3a2.png', bbox_inches='tight', transparent=True, dpi=300)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Main Figure 3c"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Calculate the pairwise correlation matrix\n",
"ncfs = 14\n",
"dcm = np.zeros((ncfs, ncfs))\n",
"ids = list(it.product(range(ncfs), repeat=2))\n",
"for ipair, pair in enumerate(ids):\n",
" i, j = pair[0], pair[1]\n",
" dcm[i,j] = fuller.metrics.dcos(cfs_rec_lda[i,1:], cfs_rec_lda[j,1:])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Construct new colormap 'KRdBu' and 'KRdBu_r' (based on 'RdBu' with black blended into the very end of the red side)\n",
"cmap_rdbu = mpl.cm.get_cmap('RdBu')\n",
"cmap_gr = mpl.cm.get_cmap('Greys_r')\n",
"colors = [cmap_gr(0.1), cmap_rdbu(0.1)]\n",
"nk = 13\n",
"\n",
"KRd = cs.LinearSegmentedColormap.from_list('KRdBu', colors, N=nk)\n",
"KRdvals = KRd(np.linspace(0, 1, nk))\n",
"RdBuvals = cmap_rdbu(np.linspace(0.1, 1, 256-nk))\n",
"KRdBu_vals = np.concatenate((KRdvals, RdBuvals))\n",
"KRdBu_r_vals = np.flipud(KRdBu_vals)\n",
"KRdBu = cs.ListedColormap(KRdBu_vals)\n",
"KRdBu_r = cs.ListedColormap(KRdBu_r_vals)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot the pairwise correlation matrix for reconstructed bands\n",
"f, ax = plt.subplots(figsize=(6, 6))\n",
"im = ax.matshow(dcm, cmap=KRdBu_r, extent=[0, 14, 14, 0], origin='upper', vmin=-1, vmax=1)\n",
"tks = list(np.arange(0.5, 14, 1))\n",
"ax.set_xticks(tks)\n",
"ax.set_yticks(tks)\n",
"ax.set_xticklabels(['#' + str(int(i+0.5)) for i in tks], fontsize=15, rotation=90)\n",
"ax.set_yticklabels(['#' + str(int(i+0.5)) for i in tks], fontsize=15, rotation=0)\n",
"ax.tick_params(axis='both', size=8, width=2, labelsize=15)\n",
"ax.tick_params(axis='x', bottom=False)\n",
"ax.tick_params(axis='x', pad=8)\n",
"# ax.set_title('HSE06', fontsize=15, x=0.5, y=1.15)\n",
"divider = make_axes_locatable(ax)\n",
"cax = divider.append_axes(\"right\", size=\"5%\", pad=0.2)\n",
"cax.tick_params(axis='y', size=8)\n",
"cb = plt.colorbar(im, cax=cax, ticks=np.arange(-1, 1.01, 0.2))\n",
"cb.ax.set_ylabel('Cosine similarity', fontsize=15, rotation=-90, labelpad=20)\n",
"cb.ax.tick_params(axis='both', length=8, width=2, labelsize=15)\n",
"# plt.colorbar(im, cax=cax, ticks=[])\n",
"ax.text(-0.18, 1.08, ' Band\\n index', rotation=-45, transform=ax.transAxes, fontsize=15)\n",
"plt.savefig('../results/figures/fig_3c.png', bbox_inches='tight', transparent=True, dpi=300)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| Unknown |
3D | mpes-kit/fuller | figures/SFig4_Hyperparameter_tuning.ipynb | .ipynb | 12,515 | 321 | {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Example visualizations of hyperparameter tuning for reconstruction"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import warnings as wn\n",
"wn.filterwarnings(\"ignore\")\n",
"\n",
"import os\n",
"import numpy as np\n",
"import fuller\n",
"import matplotlib.pyplot as plt\n",
"from matplotlib.figure import figaspect\n",
"from h5py import File\n",
"import natsort as nts\n",
"import glob as g\n",
"from mpes import analysis as aly, visualization as vis\n",
"import matplotlib as mpl\n",
"import scipy.io as sio\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mpl.rcParams['font.family'] = 'sans-serif'\n",
"mpl.rcParams['font.sans-serif'] = 'Arial'\n",
"mpl.rcParams['axes.linewidth'] = 2\n",
"mpl.rcParams['pdf.fonttype'] = 42\n",
"mpl.rcParams['ps.fonttype'] = 42\n",
"\n",
"colornames = ['#646464', '#666666', '#6a6a6a', '#6f6f6f', '#737373', '#787878', '#7d7d7d', '#828282', '#878787', '#8d8d8d', '#929292', '#989898', '#9e9e9e', '#a4a4a4', '#aaaaaa', '#b0b0b0', '#b6b6b6', '#bcbcbc', '#c2c2c2', '#c9c9c9', '#cfcfcf', '#d6d6d6', '#dcdcdc', '#e3e3e3', '#eaeaea', '#efefee', '#efeee5', '#efeddc', '#efecd3', '#eeebca', '#eeeac0', '#eee9b7', '#eee8ad', '#ede7a4', '#ede69a', '#ede590', '#ede487', '#ece37d', '#ece273', '#ece069', '#ecdf5f', '#ebde55', '#ebdd4b', '#ebdc41', '#ebdb37', '#ebd333', '#ebc933', '#ecbe32', '#ecb432', '#eda931', '#ee9e31', '#ee9330', '#ef8830', '#ef7d2f', '#f0722f', '#f0672e', '#f15c2e', '#f2512d', '#f2462d', '#f33b2c', '#f3302c', '#f4252b', '#f4192b', '#ef182f', '#e81834', '#e21939', '#db1a3e', '#d51a43', '#ce1b48', '#c71b4d', '#c11c52', '#ba1c58', '#b31d5d', '#ac1d62', '#a61e67', '#9f1e6c', '#981f72', '#911f77', '#8a207c', '#842182']\n",
"custom_cmap = mpl.colors.LinearSegmentedColormap.from_list('custom', colornames, N=256)\n",
"\n",
"# Create plot folder if needed\n",
"if not os.path.exists('../results/figures'):\n",
" os.mkdir('../results/figures')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Load reconstruction data\n",
"fdir = r'../data/hyperparameter/tuning_SFig3'\n",
"flist = fuller.utils.findFiles(fdir, fstring='/*', ftype='h5')\n",
"\n",
"recband = []\n",
"for f in flist:\n",
" with File(f) as file:\n",
" recband.append(file['bands/Eb'][:])\n",
"recband = np.asarray(recband)\n",
"recband.shape"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Reshape the reconstruction data into a grid for plotting\n",
"smaband = recband.reshape((14, 11, 256, 256))[::2, 1::2, ...].reshape((35, 256, 256))\n",
"smaband.shape"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Generate circular mask for the data\n",
"msk = aly.circmask(recband[0,...], 128, 125, 115, sign='xnan', method='algebraic')\n",
"\n",
"# Ranges of hyperparameters\n",
"etas = np.around(np.arange(0.02, 0.29, 0.02), decimals=2)\n",
"shifts = np.around(np.arange(0.0, 0.51, 0.05), decimals=2)\n",
"\n",
"# Obtain axis values of kx, ky\n",
"axes = fuller.utils.loadHDF(flist[0], groups=['axes'])\n",
"kx, ky = axes['kx'], axes['ky']"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 4a"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot a grid of reconstructed bands\n",
"ims, axs = vis.sliceview3d(smaband*msk[None,...], axis=0, ncol=7, vmin=-1, vmax=0,\n",
" colormap=custom_cmap, axisreturn='nested', numbered=False, figsize=(20, 20*5/7),\n",
" imkwds={'extent':[kx[0]+0.05, kx[-1]+0.05, ky[0]-0.02, ky[-1]-0.02]});\n",
"\n",
"# Annotate the plot-frames of good reconstructions\n",
"pairs = [[1, 4], [1, 5], [1, 6], [2, 0], [2, 1], [2, 2]]\n",
"for ri, ci in pairs:\n",
" [i.set_linewidth(4) for i in axs[ri, ci].spines.values()]\n",
" [i.set_color('#FFA500') for i in axs[ri, ci].spines.values()]\n",
"\n",
"# Annotate figures on the uppermost and bottommost rows\n",
"for j in range(7):\n",
" jj = j*2\n",
" axs[0, j].set_title('$\\eta$ = '+str(\"%1.2f\" %etas[jj])+' eV', fontsize=20, pad=10)\n",
" \n",
" axs[-1, j].get_xaxis().set_visible(True)\n",
" axs[-1, j].set_xticks(np.arange(-1.5, 1.6, 0.5))\n",
" axs[-1, j].set_xticklabels(['', '-1', '', '0', '', '1', ''])\n",
" axs[-1, j].tick_params(axis='x', length=8, width=2, labelsize=20)\n",
" axs[-1, j].set_xlabel('$k_x$ ($\\mathrm{\\AA}^{-1}$)', fontsize=20)\n",
"\n",
"# Annotate figures on leftmost and rightmost columns\n",
"for i in range(5):\n",
" ii = i+1\n",
" axs[i, 0].get_yaxis().set_visible(True)\n",
" axs[i, 0].set_yticks([])\n",
" axs[i, 0].set_ylabel('$\\Delta$E = '+str(\"%1.2f\" %shifts[ii])+' eV', fontsize=20, labelpad=5)\n",
" \n",
" axs[i, -1].get_yaxis().set_visible(True)\n",
" axs[i, -1].yaxis.set_label_position(\"right\")\n",
" axs[i, -1].yaxis.tick_right()\n",
" axs[i, -1].set_yticks(np.arange(-1.5, 1.6, 0.5))\n",
" axs[i, -1].set_yticklabels(['', '-1', '', '0', '', '1', ''])\n",
" axs[i, -1].tick_params(axis='y', length=8, width=2, labelsize=20)\n",
" axs[i, -1].set_ylabel('$k_y$ ($\\mathrm{\\AA}^{-1}$)', fontsize=20, rotation=-90, labelpad=25)\n",
"\n",
"plt.subplots_adjust(\n",
" left=0.05,\n",
" right=0.95,\n",
" bottom=0.05,\n",
" top=0.95,\n",
" wspace=0.05,\n",
" hspace=0.05)\n",
"\n",
"plt.savefig(r'../results/figures/sfig_4a.png', transparent=False, bbox_inches='tight', dpi=300)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot the colorbar\n",
"fig = plt.figure(figsize=(8, 3))\n",
"ax = fig.add_axes([0.05, 0.80, 0.04, 0.5])\n",
"norm = mpl.colors.Normalize(vmin=-1, vmax=0)\n",
"cb = mpl.colorbar.ColorbarBase(ax, cmap=custom_cmap,\n",
" norm=norm, ticks=[-1, 0],\n",
" orientation='vertical')\n",
"# cb.ax.set_yticks([-1, 0])\n",
"cb.ax.set_yticklabels(['low', 'high'], fontsize=20)\n",
"cb.ax.set_ylabel('E (eV)', fontsize=20, rotation=-90, labelpad=-10)\n",
"cb.ax.tick_params(axis='y', length=0, pad=5)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Energy coordinates\n",
"elo, ehi = (0.0, -8.252199)\n",
"eshift = 0.65 # global shift\n",
"elo += eshift\n",
"ehi += eshift\n",
"\n",
"# Extract high-symmetry lines from reconstructed bands\n",
"pth = np.load(r'../data/hyperparameter/tuning_SFig3/path_coords.npz')\n",
"# pth = np.load(r'../data/processed/hslines/WSe2_kpath.npz')\n",
"rowInds, colInds = pthr, pthc = pth['pathr'], pth['pathc']\n",
"bcuts = aly.bandpath_map(smaband, pathr=rowInds, pathc=colInds, eaxis=0)\n",
"bcuts = bcuts.reshape((5, 7, 186))\n",
"\n",
"# Photoemission data cut along high-symmetry lines\n",
"vcut = np.load(r'../data/processed/hslines/WSe2_vcut.npy')\n",
"\n",
"# Load LDA-DFT band structure calculation along high-symmetry lines\n",
"bandlines = sio.loadmat(r'../data/theory/hslines/WSe2_bandlines_LDA_186.mat')['lines']\n",
"bandlines.shape"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 4b"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot a grid of photoemission data cuts along high symmetry lines\n",
"ims, axs = vis.sliceview3d(np.tile(vcut[::-1,:], (35, 1, 1)), axis=0, ncol=7, imkwds={'extent':[0, 185, ehi, elo]},\n",
" colormap='Blues', axisreturn='nested', numbered=False, figsize=(20, 10));\n",
"\n",
"# Annotate the plot-frames of good reconstructions\n",
"shifts_small = np.arange(0.10, 0.31, 0.05)\n",
"pairs = [[1, 4], [1, 5], [1, 6], [2, 0], [2, 1], [2, 2]]\n",
"pairs = [[1, 4], [1, 5], [1, 6], [2, 0], [2, 1], [2, 2]]\n",
"for ri, ci in pairs:\n",
" [i.set_linewidth(4) for i in axs[ri, ci].spines.values()]\n",
" [i.set_color('#FFA500') for i in axs[ri, ci].spines.values()]\n",
"\n",
"# Plot reconstructions and initializations\n",
"pos = np.array([ 0, 69, 107, 185]) # High-symmetry point locations\n",
"for j in range(7):\n",
" for i in range(5):\n",
" axs[i, j].plot(bcuts[i, j, :] + 0.65, '-', c='r', lw=1, zorder=2) # Plot reconstruction\n",
" axs[i, j].plot(bandlines[:, 1] + 0.65 + 0.02*i, '-', c='#00FF00', lw=1, zorder=1) # Plot initialization\n",
" \n",
" # Annotate high symmetry positions\n",
" for p in pos[:-1]:\n",
" axs[i, j].axvline(x=p, c='k', ls='--', lw=2, dashes=[4, 3])\n",
"\n",
"# Annotate figures on the uppermost and bottommost rows\n",
"for j in range(7):\n",
" jj = j*2\n",
" axs[0, j].set_title('$\\eta$ = '+str(\"%1.2f\" %etas[jj])+' eV', fontsize=20, pad=10)\n",
" \n",
" axs[-1, j].get_xaxis().set_visible(True)\n",
" axs[-1, j].tick_params(axis='both', length=8, width=2, labelsize=15)\n",
" axs[-1, j].set_ylim([ehi, elo])\n",
" axs[-1, j].set_xticks(pos)\n",
" axs[-1, j].set_xticklabels(['$\\overline{\\Gamma}$', '$\\overline{\\mathrm{M}}$',\n",
" '$\\overline{\\mathrm{K}}$', '$\\overline{\\Gamma}$'])\n",
"\n",
"# Annotate figures on leftmost and rightmost columns\n",
"for i in range(5):\n",
" ii = 1 + i*2\n",
" axs[i, 0].get_yaxis().set_visible(True)\n",
" axs[i, 0].set_yticks([])\n",
" axs[i, 0].set_ylabel('$\\Delta$E = '+str(\"%1.2f\" %shifts[i])+' eV', fontsize=20, labelpad=5)\n",
" \n",
" axs[i, -1].get_yaxis().set_visible(True)\n",
" axs[i, -1].set_yticks(np.arange(-7, 0.1))\n",
" axs[i, -1].set_yticklabels(['', '-6', '', '-4', '', '-2', '', '0'])\n",
" axs[i, -1].set_ylabel('E (eV)', fontsize=20, rotation=-90, labelpad=20)\n",
" axs[i, -1].yaxis.set_label_position(\"right\")\n",
" axs[i, -1].yaxis.tick_right()\n",
" axs[i, -1].tick_params(axis='y', length=8, width=2, labelsize=20)\n",
"\n",
"plt.subplots_adjust(\n",
" left=0.05,\n",
" right=0.95,\n",
" bottom=0.05,\n",
" top=0.95,\n",
" wspace=0.05,\n",
" hspace=0.08)\n",
"\n",
"plt.savefig(r'../results/figures/sfig_4b.png', transparent=True, bbox_inches='tight', pad_inches=0.05, dpi=300)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot the figure legend\n",
"fig, ax = plt.subplots(figsize=(8, 3))\n",
"ax.imshow(vcut, cmap='Blues', extent=[0, 185, ehi, elo], aspect=12)\n",
"ax.tick_params(axis='both', length=0)\n",
"ax.set_xticklabels([])\n",
"ax.set_yticklabels([])\n",
"ax.plot(bandlines[:, 1] + 0.65 + 0.05*i, '-', c='#00FF00', lw=1, zorder=1, label='Initialization')\n",
"ax.plot(bcuts[i, j, :] + 0.65, '-', c='r', lw=1, zorder=2, label='Reconstruction')\n",
"ax.legend(loc=9, fontsize=15, frameon=True, bbox_to_anchor=(1.2, 0.2), facecolor='#C0C0C0')"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| Unknown |
3D | mpes-kit/fuller | figures/Fig3_SFig13_Similarity_matrix_and_basis_decomposition.ipynb | .ipynb | 16,213 | 418 | {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Construct similarity matrix between theoretical and reconstructed band structures"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import warnings as wn\n",
"wn.filterwarnings(\"ignore\")\n",
"\n",
"import numpy as np\n",
"import fuller\n",
"from mpes import analysis as aly\n",
"import matplotlib as mpl\n",
"import matplotlib.pyplot as plt\n",
"import matplotlib.colors as cs\n",
"from mpl_toolkits.axes_grid1 import make_axes_locatable\n",
"import itertools as it\n",
"import scipy.spatial.distance as ssd\n",
"from numpy.linalg import norm\n",
"from tqdm import tqdm_notebook as tqdm\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Create plot folder if needed\n",
"import os\n",
"if not os.path.exists('../results/figures'):\n",
" os.mkdir('../results/figures')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mpl.rcParams['font.family'] = 'sans-serif'\n",
"mpl.rcParams['font.sans-serif'] = 'Arial'\n",
"mpl.rcParams['axes.linewidth'] = 2\n",
"mpl.rcParams['pdf.fonttype'] = 42\n",
"mpl.rcParams['ps.fonttype'] = 42"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"bases = fuller.generator.ppz.hexike_basis(nterms=400, npix=207, vertical=True, outside=0)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"bandout = np.nan_to_num(fuller.utils.loadHDF(r'../data/theory/bands_1BZ/wse2_pbesol_bandcuts.h5')['bands'])\n",
"cfs_pbesol = []\n",
"for i in tqdm(range(14)):\n",
" cfs_pbesol.append(fuller.generator.decomposition_hex2d(bandout[i,...], bases=bases, baxis=0, ret='coeffs'))\n",
"cfs_pbesol = np.array(cfs_pbesol)\n",
"\n",
"bandout = np.nan_to_num(fuller.utils.loadHDF(r'../data/theory/bands_1BZ/wse2_pbe_bandcuts.h5')['bands'])\n",
"cfs_pbe = []\n",
"for i in tqdm(range(14)):\n",
" cfs_pbe.append(fuller.generator.decomposition_hex2d(bandout[i,...], bases=bases, baxis=0, ret='coeffs'))\n",
"cfs_pbe = np.array(cfs_pbe)\n",
"\n",
"bandout = np.nan_to_num(fuller.utils.loadHDF(r'../data/theory/bands_1BZ/wse2_hse_bandcuts.h5')['bands'])\n",
"cfs_hse = []\n",
"for i in tqdm(range(14)):\n",
" cfs_hse.append(fuller.generator.decomposition_hex2d(bandout[i,...], bases=bases, baxis=0, ret='coeffs'))\n",
"cfs_hse = np.array(cfs_hse)\n",
"\n",
"bandout = np.nan_to_num(fuller.utils.loadHDF(r'../data/theory/bands_1BZ/wse2_lda_bandcuts.h5')['bands'])\n",
"cfs_lda = []\n",
"for i in tqdm(range(14)):\n",
" cfs_lda.append(fuller.generator.decomposition_hex2d(bandout[i,...], bases=bases, baxis=0, ret='coeffs'))\n",
"cfs_lda = np.array(cfs_lda)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"bases_recon = fuller.generator.ppz.hexike_basis(nterms=400, npix=175, vertical=True, outside=0)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"bandout = np.nan_to_num(np.load(r'../data/processed/wse2_recon_1BZ/postproc_bandcuts_pbe.npz')['bandcuts'])\n",
"cfs_rec_pbe_sym = []\n",
"for i in tqdm(range(14)):\n",
" cfs_rec_pbe_sym.append(fuller.generator.decomposition_hex2d(bandout[i,...], bases=bases_recon, baxis=0, ret='coeffs'))\n",
"cfs_rec_pbe_sym = np.array(cfs_rec_pbe_sym)\n",
"\n",
"bandout = np.nan_to_num(np.load(r'../data/processed/wse2_recon_1BZ/postproc_bandcuts_pbesol.npz')['bandcuts'])\n",
"cfs_rec_pbesol_sym = []\n",
"for i in tqdm(range(14)):\n",
" cfs_rec_pbesol_sym.append(fuller.generator.decomposition_hex2d(bandout[i,...], bases=bases_recon, baxis=0, ret='coeffs'))\n",
"cfs_rec_pbesol_sym = np.array(cfs_rec_pbesol_sym)\n",
"\n",
"bandout = np.nan_to_num(np.load(r'../data/processed/wse2_recon_1BZ/postproc_bandcuts_lda.npz')['bandcuts'])\n",
"cfs_rec_lda_sym = []\n",
"for i in tqdm(range(14)):\n",
" cfs_rec_lda_sym.append(fuller.generator.decomposition_hex2d(bandout[i,...], bases=bases_recon, baxis=0, ret='coeffs'))\n",
"cfs_rec_lda_sym = np.array(cfs_rec_lda_sym)\n",
"\n",
"bandout = np.nan_to_num(np.load(r'../data/processed/wse2_recon_1BZ/postproc_bandcuts_hse.npz')['bandcuts'])\n",
"cfs_rec_hse_sym = []\n",
"for i in tqdm(range(14)):\n",
" cfs_rec_hse_sym.append(fuller.generator.decomposition_hex2d(bandout[i,...], bases=bases_recon, baxis=0, ret='coeffs'))\n",
"cfs_rec_hse_sym = np.array(cfs_rec_hse_sym)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Main Figure 3d"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def demean(bscoefs):\n",
" c = np.mean(bscoefs[:,0])\n",
" bscoefsdm = bscoefs.copy()\n",
" bscoefsdm[:, 0] -= c\n",
" return bscoefsdm"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Calculate distance metrics with zeroed DFT band structure\n",
"# Per-band Euclidean distance between band structures\n",
"cfs = [cfs_lda, cfs_pbe, cfs_pbesol, cfs_hse, cfs_rec_lda_sym, cfs_rec_pbe_sym, cfs_rec_pbesol_sym, cfs_rec_hse_sym]\n",
"ncfs = len(cfs)\n",
"dcdcent = np.zeros((ncfs, ncfs))\n",
"ids = list(it.product(range(ncfs), repeat=2))\n",
"for ipair, pair in enumerate(ids):\n",
" i, j = pair[0], pair[1]\n",
" icfsdc, jcfsdc = demean(cfs[i]), demean(cfs[j])\n",
" dnorm = 0\n",
" for ii in range(14):\n",
" dnorm += norm(icfsdc[ii,:] - jcfsdc[ii,:])\n",
" dcdcent[i,j] = dnorm / 14\n",
"\n",
"dcmstdsym = np.zeros((ncfs, ncfs))\n",
"ids = list(it.product(range(ncfs), repeat=2))\n",
"for ipair, pair in enumerate(ids):\n",
" i, j = pair[0], pair[1]\n",
" iest, jest = cfs[i].copy(), cfs[j].copy()\n",
" icfsdc, jcfsdc = demean(iest), demean(jest)\n",
" dnorms = []\n",
" for ii in range(14):\n",
" dnorms.append(norm(icfsdc[ii,:] - jcfsdc[ii,:]))\n",
" dcmstdsym[i,j] = np.std(dnorms)/np.sqrt(14)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"nr, nc = dcdcent.shape\n",
"dcm = dcdcent.copy()\n",
"dcmstd = dcmstdsym.copy()\n",
"\n",
"# Combine two triangular matrix plots\n",
"matnan = np.ones((8, 8))*np.nan\n",
"ut = np.triu(dcm, k=0) + np.tril(matnan, k=-1)\n",
"lt = np.tril(dcmstd, k=-1) + np.triu(matnan, k=0)\n",
"f, ax = plt.subplots(figsize=(6, 6))\n",
"fup = ax.matshow(ut*1000, cmap='viridis', vmin=0, vmax=250)\n",
"flo = ax.matshow(lt*1000, cmap='viridis', vmin=0)\n",
"\n",
"divider = make_axes_locatable(ax)\n",
"caxu = divider.append_axes(\"right\", size=\"5%\", pad=0.2)\n",
"caxu.tick_params(axis='y', size=8, length=8, width=2, labelsize=15)\n",
"caxl = divider.append_axes(\"bottom\", size=\"5%\", pad=0.2)\n",
"caxl.tick_params(axis='x', size=8, length=8, width=2, labelsize=15)\n",
"cbup = f.colorbar(fup, orientation='vertical', cax=caxu, ticks=np.arange(0, 351, 50))\n",
"cblo = f.colorbar(flo, orientation='horizontal', cax=caxl, ticks=np.arange(0, 31, 5))\n",
"cbup.ax.set_yticklabels(np.arange(0, 351, 50))\n",
"cbup.ax.set_ylabel('Band structure distance (meV/band)', fontsize=15, rotation=-90, labelpad=20)\n",
"# cbup.ax.set_ylim([0, 250])\n",
"cblo.ax.set_xlabel('Standard error (meV/band)', fontsize=15, rotation=0, labelpad=5)\n",
"\n",
"meths = ['LDA', 'PBE', 'PBEsol', 'HSE06', 'LDA \\nrecon.', 'PBE \\nrecon.', 'PBEsol \\nrecon.', 'HSE06 \\nrecon.']\n",
"ax.set_xticklabels([''] + meths, fontsize=15, rotation=90)\n",
"ax.set_yticklabels([''] + meths, fontsize=15, rotation=0)\n",
"ax.tick_params(axis='both', size=8, width=2)\n",
"ax.tick_params(axis='x', bottom=False, pad=8)\n",
"ax.tick_params(axis='y', pad=4)\n",
"\n",
"dcm_merged = np.zeros_like(dcm) + np.triu(dcm, k=1) + np.tril(dcmstd, k=-1)\n",
"dcm_merged = np.rint(dcm_merged*1000).astype('int')\n",
"for i in range(nr):\n",
" for j in range(nc):\n",
" if i == j:\n",
" ax.text(j, i, 0, ha='center', va='center', color='w', fontsize=15, fontweight='bold')\n",
" else:\n",
" ax.text(j, i, dcm_merged[i, j], ha='center', va='center', color='#FF4500', fontsize=15, fontweight='bold')\n",
" \n",
"plt.savefig('../results/figures/fig_3d.png', bbox_inches='tight', transparent=True, dpi=300)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 13e-h"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Construct new colormap 'KRdBu' and 'KRdBu_r' (based on 'RdBu' with black blended into the very end of the red side)\n",
"cmap_rdbu = mpl.cm.get_cmap('RdBu')\n",
"cmap_gr = mpl.cm.get_cmap('Greys_r')\n",
"colors = [cmap_gr(0.1), cmap_rdbu(0.1)]\n",
"nk = 13\n",
"\n",
"KRd = cs.LinearSegmentedColormap.from_list('KRdBu', colors, N=nk)\n",
"KRdvals = KRd(np.linspace(0, 1, nk))\n",
"RdBuvals = cmap_rdbu(np.linspace(0.1, 1, 256-nk))\n",
"KRdBu_vals = np.concatenate((KRdvals, RdBuvals))\n",
"KRdBu_r_vals = np.flipud(KRdBu_vals)\n",
"KRdBu = cs.ListedColormap(KRdBu_vals)\n",
"KRdBu_r = cs.ListedColormap(KRdBu_r_vals)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def similarity_matrix_plot(cmat, title=''):\n",
" \"\"\" Plot similarity matrix in the manuscript.\n",
" \"\"\"\n",
" \n",
" f, ax = plt.subplots(figsize=(6, 6))\n",
" im = ax.matshow(cmat, cmap=KRdBu_r, extent=[0, 14, 14, 0], origin='upper', vmin=-1, vmax=1)\n",
" tks = list(np.arange(0.5, 14, 1))\n",
" ax.set_xticks(tks)\n",
" ax.set_yticks(tks)\n",
" ax.set_xticklabels(['#' + str(int(i+0.5)) for i in tks], fontsize=15, rotation=90)\n",
" ax.set_yticklabels(['#' + str(int(i+0.5)) for i in tks], fontsize=15, rotation=0)\n",
" ax.tick_params(axis='both', size=8, width=2, labelsize=15)\n",
" ax.tick_params(axis='x', bottom=False)\n",
" ax.tick_params(axis='x', pad=8)\n",
" ax.set_title(title, fontsize=15, y=1.15)\n",
" # ax.set_title('HSE06', fontsize=15, x=0.5, y=1.15)\n",
" divider = make_axes_locatable(ax)\n",
" cax = divider.append_axes(\"right\", size=\"5%\", pad=0.2)\n",
" cax.tick_params(axis='y', size=8)\n",
" cb = plt.colorbar(im, cax=cax, ticks=np.arange(-1, 1.01, 0.2))\n",
" cb.ax.set_ylabel('Cosine similarity', fontsize=15, rotation=-90, labelpad=20)\n",
" cb.ax.tick_params(axis='both', length=8, width=2, labelsize=15)\n",
" # plt.colorbar(im, cax=cax, ticks=[])\n",
" ax.text(-0.18, 1.08, ' Band\\n index', rotation=-45, transform=ax.transAxes, fontsize=15)\n",
" \n",
" return ax"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": false
},
"outputs": [],
"source": [
"# Plot the cosince similarity matrices for each DFT calculation (indicated in figure title)\n",
"dcm_lda = fuller.metrics.similarity_matrix(cfs_lda, fmetric=fuller.metrics.dcos)\n",
"similarity_matrix_plot(dcm_lda, title='LDA')\n",
"plt.savefig('../results/figures/sfig_13e.png', bbox_inches='tight', transparent=True, dpi=300)\n",
"\n",
"dcm_pbe = fuller.metrics.similarity_matrix(cfs_pbe, fmetric=fuller.metrics.dcos)\n",
"similarity_matrix_plot(dcm_pbe, title='PBE')\n",
"plt.savefig('../results/figures/sfig_13f.png', bbox_inches='tight', transparent=True, dpi=300)\n",
"\n",
"dcm_pbesol = fuller.metrics.similarity_matrix(cfs_pbesol, fmetric=fuller.metrics.dcos)\n",
"similarity_matrix_plot(dcm_pbesol, title='PBEsol')\n",
"plt.savefig('../results/figures/sfig_13g.png', bbox_inches='tight', transparent=True, dpi=300)\n",
"\n",
"dcm_hse = fuller.metrics.similarity_matrix(cfs_hse, fmetric=fuller.metrics.dcos)\n",
"similarity_matrix_plot(dcm_hse, title='HSE06');\n",
"plt.savefig('../results/figures/sfig_13h.png', bbox_inches='tight', transparent=True, dpi=300)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 13a-d"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def decomposition_plot(coefs, coef_count, title):\n",
" \n",
" cl = plt.cm.tab20(np.linspace(0,1,14))\n",
" f, ax = plt.subplots(figsize=(6, 10))\n",
" xs = np.arange(1, 400)\n",
" for i in range(14):\n",
" ax.plot(xs, coefs[i,1:]-i/3, lw=1, color=cl[i])\n",
" ax.axhline(y=-i/3, lw=1, color=cl[i])\n",
" ax.text(320, -i/3+0.08, 'Band #'+str(i+1), fontsize=15, fontname=\"Arial\")\n",
" ax.bar(xs, coef_count, bottom=-4.9, width=1, color=(0.3, 0.3, 0.3))\n",
" ax.axhline(y=-4.9, lw=1, color=(0.3, 0.3, 0.3))\n",
" ax.set_title(title, fontsize=15, y=0.88)\n",
" ax.text(320, -4.9+0.08, 'All bands', fontsize=15, fontname=\"Arial\")\n",
" # ax.text('Counts', transform=ax.transAxes)\n",
"\n",
" # ax.tick_params(axis='y', length=0)\n",
" ax.set_xticks(list(range(0, 401, 50)))\n",
" ax.set_yticks([])\n",
" ax.set_ylim([-5, 1])\n",
" ax.tick_params(axis='x', length=8, width=2, labelsize=15)\n",
" ax.set_ylabel('Amplitude (a. u.)', fontsize=15)\n",
" ax.set_xlim([0, 400])\n",
" ax.set_xlabel('Coefficient index', fontsize=15)\n",
" ax.spines['left'].set_visible(False)\n",
" ax.spines['right'].set_visible(False)\n",
" ax.spines['top'].set_visible(False)\n",
" \n",
" return ax"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": false
},
"outputs": [],
"source": [
"cfs_lda_large = fuller.utils.binarize(cfs_lda, threshold=1e-2)[:,1:].sum(axis=0)/40\n",
"decomposition_plot(cfs_lda, cfs_lda_large, title='LDA')\n",
"plt.savefig('../results/figures/sfig_13a.png', bbox_inches='tight', transparent=True, dpi=300)\n",
"\n",
"cfs_pbe_large = fuller.utils.binarize(cfs_pbe, threshold=1e-2)[:,1:].sum(axis=0)/40\n",
"decomposition_plot(cfs_pbe, cfs_pbe_large, title='PBE')\n",
"plt.savefig('../results/figures/sfig_13b.png', bbox_inches='tight', transparent=True, dpi=300)\n",
"\n",
"cfs_pbesol_large = fuller.utils.binarize(cfs_pbesol, threshold=1e-2)[:,1:].sum(axis=0)/40\n",
"decomposition_plot(cfs_pbesol, cfs_pbesol_large, title='PBEsol')\n",
"plt.savefig('../results/figures/sfig_13c.png', bbox_inches='tight', transparent=True, dpi=300)\n",
"\n",
"cfs_hse_large = fuller.utils.binarize(cfs_hse, threshold=1e-2)[:,1:].sum(axis=0)/40\n",
"decomposition_plot(cfs_hse, cfs_hse_large, title='HSE06');\n",
"plt.savefig('../results/figures/sfig_13d.png', bbox_inches='tight', transparent=True, dpi=300)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| Unknown |
3D | mpes-kit/fuller | figures/Fig5_K_and_Mprime.ipynb | .ipynb | 10,254 | 245 | {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Compare reconstructed and refined band patches around high-symmetry points"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import warnings as wn\n",
"wn.filterwarnings(\"ignore\")\n",
"\n",
"import os\n",
"import numpy as np\n",
"from mpes import analysis as aly\n",
"import matplotlib as mpl\n",
"import matplotlib.pyplot as plt\n",
"from mpl_toolkits.axes_grid1.inset_locator import inset_axes\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mpl.rcParams['font.family'] = 'sans-serif'\n",
"mpl.rcParams['font.sans-serif'] = 'Arial'\n",
"mpl.rcParams['axes.linewidth'] = 2\n",
"mpl.rcParams['pdf.fonttype'] = 42\n",
"mpl.rcParams['ps.fonttype'] = 42\n",
"\n",
"colornames = ['#646464', '#666666', '#6a6a6a', '#6f6f6f', '#737373', '#787878', '#7d7d7d', '#828282', '#878787', '#8d8d8d', '#929292', '#989898', '#9e9e9e', '#a4a4a4', '#aaaaaa', '#b0b0b0', '#b6b6b6', '#bcbcbc', '#c2c2c2', '#c9c9c9', '#cfcfcf', '#d6d6d6', '#dcdcdc', '#e3e3e3', '#eaeaea', '#efefee', '#efeee5', '#efeddc', '#efecd3', '#eeebca', '#eeeac0', '#eee9b7', '#eee8ad', '#ede7a4', '#ede69a', '#ede590', '#ede487', '#ece37d', '#ece273', '#ece069', '#ecdf5f', '#ebde55', '#ebdd4b', '#ebdc41', '#ebdb37', '#ebd333', '#ebc933', '#ecbe32', '#ecb432', '#eda931', '#ee9e31', '#ee9330', '#ef8830', '#ef7d2f', '#f0722f', '#f0672e', '#f15c2e', '#f2512d', '#f2462d', '#f33b2c', '#f3302c', '#f4252b', '#f4192b', '#ef182f', '#e81834', '#e21939', '#db1a3e', '#d51a43', '#ce1b48', '#c71b4d', '#c11c52', '#ba1c58', '#b31d5d', '#ac1d62', '#a61e67', '#9f1e6c', '#981f72', '#911f77', '#8a207c', '#842182']\n",
"custom_cmap = mpl.colors.LinearSegmentedColormap.from_list('custom', colornames, N=256)\n",
"\n",
"# Create plot folder if needed\n",
"if not os.path.exists('../results/figures'):\n",
" os.mkdir('../results/figures')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Region around $\\overline{\\text{M}^\\prime}$ point"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Load reconstruction outcome\n",
"recon = np.load(r'../data/processed/wse2_recon/postproc_refrotsym_bands_lda.npy')\n",
"rm, cm = 128, 203\n",
"mofs = 25\n",
"mpatch = recon[:2, rm-mofs:rm+mofs, cm-mofs:cm+mofs]\n",
"\n",
"# Load line (pointwise) fitting results using reconstruction as initialization\n",
"brec = np.load(r'../data/processed/patches/WSe2_Mpoint_recon_.npz')\n",
"bands = brec['bands']\n",
"kx = brec['kx']\n",
"ky = brec['ky']\n",
"kxx, kyy = np.meshgrid(kx+0.05, ky-0.05)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Figure 5c"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot comparison between reconstruction and line fitting\n",
"f, ax = plt.subplots(2, 2, figsize=(7, 7))\n",
"ax[0, 0].set_title('Reconstruction', fontsize=15)\n",
"ax[0, 1].set_title('Line fitting', fontsize=15)\n",
"\n",
"ax[0, 0].set_xticks([])\n",
"ax[0, 0].tick_params(axis='both', labelsize=15, length=8, width=2)\n",
"ax[0, 0].set_ylabel('$k_y$ ($\\mathrm{\\AA}^{-1}$)', fontsize=15)\n",
"\n",
"ax[1, 0].tick_params(axis='both', labelsize=15, length=8, width=2)\n",
"ax[1, 0].set_xlabel('$k_x$ ($\\mathrm{\\AA}^{-1}$)', fontsize=15)\n",
"ax[1, 0].set_ylabel('$k_y$ ($\\mathrm{\\AA}^{-1}$)', fontsize=15)\n",
"\n",
"for i in range(1, 2):\n",
" ax[0, i].set_yticks([])\n",
" ax[0, i].set_xticks([])\n",
" ax[1, i].tick_params(axis='both', labelsize=15, length=8, width=2)\n",
" ax[1, i].set_yticks([])\n",
" ax[1, i].set_xlabel('$k_x$ ($\\mathrm{\\AA}^{-1}$)', fontsize=15)\n",
" \n",
"cs00 = ax[0, 0].contourf(kxx, kyy, mpatch[0,...], 20, cmap=custom_cmap, vmin=-1.6, vmax=-1)\n",
"ax[0, 0].contour(cs00, colors='k', linestyles='-', linewidths=0.5)\n",
"ax[0, 0].text(0.65, 0.9, 'Band #1', fontsize=15, transform=ax[0,0].transAxes)\n",
"\n",
"cs10 = ax[1, 0].contourf(kxx, kyy, mpatch[1,:,:], 20, cmap=custom_cmap, vmin=-1.95, vmax=-1.4)\n",
"ax[1, 0].contour(cs10, colors='k', linestyles='-', linewidths=0.5)\n",
"ax[1, 0].text(0.65, 0.9, 'Band #2', fontsize=15, transform=ax[1,0].transAxes)\n",
" \n",
"cs01 = ax[0, 1].contourf(kxx, kyy, bands[0,...], 20, cmap=custom_cmap, vmin=-1.6, vmax=-1)\n",
"ax[0, 1].contour(cs01, colors='k', linestyles='-', linewidths=0.5)\n",
"\n",
"cs11 = ax[1, 1].contourf(kxx, kyy, bands[1,...], 20, cmap=custom_cmap, vmin=-1.95, vmax=-1.4)\n",
"ax[1, 1].contour(cs11, colors='k', linestyles='-', linewidths=0.5)\n",
"\n",
"plt.subplots_adjust(hspace=0.08, wspace=0.08)\n",
"plt.savefig('../results/figures/fig_5c.png', bbox_inches='tight', transparent=True, dpi=300)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Region around $\\overline{\\text{K}}$ point"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Load reconstruction and line (pointwise) fitting result\n",
"rfb = np.load(r'../data/processed/patches/WSe2_Kpoint_linefit.npz')\n",
"rcfb = np.load(r'../data/processed/patches/WSe2_Kpoint_recon.npz')\n",
"fitbs = np.load(r'../data/processed/patches/WSe2_Kpoint_TWfitting.npz')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Figure 5e"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot comparison between reconstruction, line fitting, and model fitting\n",
"ofs = 6\n",
"kxx, kyy = np.meshgrid(rfb['kx'][ofs:-ofs]+0.05, rfb['ky'][ofs:-ofs])\n",
"\n",
"minsft = 0.26\n",
"kxext = [rfb['kx'][0], rfb['kx'][-1]]\n",
"kyext = [rfb['ky'][0], rfb['ky'][-1]]\n",
"\n",
"f, ax = plt.subplots(2, 4, figsize=(14, 7))\n",
"ax[0, 0].set_title('Reconstruction', fontsize=15)\n",
"ax[0, 1].set_title('Line fitting (LF)', fontsize=15)\n",
"ax[0, 2].set_title('TW model fitting', fontsize=15)\n",
"ax[0, 3].set_title('Difference (TW$-$LF)', fontsize=15)\n",
"\n",
"ax[0, 0].set_xticks([])\n",
"ax[0, 0].tick_params(axis='both', labelsize=15, length=8, width=2)\n",
"ax[0, 0].set_ylabel('$k_y$ ($\\mathrm{\\AA}^{-1}$)', fontsize=15)\n",
"\n",
"ax[1, 0].tick_params(axis='both', labelsize=15, length=8, width=2)\n",
"ax[1, 0].set_xlabel('$k_x$ ($\\mathrm{\\AA}^{-1}$)', fontsize=15)\n",
"ax[1, 0].set_ylabel('$k_y$ ($\\mathrm{\\AA}^{-1}$)', fontsize=15)\n",
"for i in range(1, 4):\n",
" ax[0, i].set_yticks([])\n",
" ax[0, i].set_xticks([])\n",
" ax[1, i].tick_params(axis='both', labelsize=15, length=8, width=2)\n",
" ax[1, i].set_yticks([])\n",
" ax[1, i].set_xlabel('$k_x$ ($\\mathrm{\\AA}^{-1}$)', fontsize=15)\n",
" \n",
"cs00 = ax[0, 0].contourf(kxx, kyy, rcfb['bands'][0,ofs+1:-ofs+1,ofs:-ofs], 23, cmap=custom_cmap, vmax=-0.7, vmin=-1.2+minsft)\n",
"ax[0, 0].contour(cs00, colors='k', linestyles='-', linewidths=0.5)\n",
"ax[0, 0].text(0.65, 0.9, 'Band #1', fontsize=15, transform=ax[0,0].transAxes)\n",
"\n",
"cs10 = ax[1, 0].contourf(kxx, kyy, rcfb['bands'][1,ofs+1:-ofs+1,ofs:-ofs], 23, cmap=custom_cmap, vmax=-1.15, vmin=-1.55+minsft)\n",
"ax[1, 0].contour(cs10, colors='k', linestyles='-', linewidths=0.5)\n",
"ax[1, 0].text(0.65, 0.9, 'Band #2', fontsize=15, transform=ax[1,0].transAxes)\n",
" \n",
"cs01 = ax[0, 1].contourf(kxx, kyy, rfb['bands'][0,ofs+1:-ofs+1,ofs:-ofs], 23, cmap=custom_cmap, vmax=-0.7, vmin=-1.2+minsft)\n",
"ax[0, 1].contour(cs01, colors='k', linestyles='-', linewidths=0.5)\n",
"\n",
"cs11 = ax[1, 1].contourf(kxx, kyy, rfb['bands'][1,ofs+1:-ofs+1,ofs:-ofs], 23, cmap=custom_cmap, vmax=-1.15, vmin=-1.55+minsft)\n",
"ax[1, 1].contour(cs11, colors='k', linestyles='-', linewidths=0.5)\n",
"\n",
"cs02 = ax[0, 2].contourf(kxx, kyy, fitbs['b1'], 23, cmap=custom_cmap, vmax=-0.7, vmin=-1.2+minsft)\n",
"ax[0, 2].contour(cs02, colors='k', linestyles='-', linewidths=0.5)\n",
"\n",
"cs12 = ax[1, 2].contourf(kxx, kyy, fitbs['b2'], 23, cmap=custom_cmap, vmax=-1.15, vmin=-1.55+minsft)\n",
"ax[1, 2].contour(cs12, colors='k', linestyles='-', linewidths=0.5)\n",
"\n",
"cs03 = ax[0, 3].contourf(kxx, kyy, 1e3*(fitbs['b1'] - rfb['bands'][0,ofs+1:-ofs+1,ofs:-ofs]), 23, cmap='RdBu_r', vmax=12, vmin=-12)\n",
"ax[0, 3].contour(cs03, colors='k', linestyles='-', linewidths=0.5)\n",
"\n",
"cs13 = ax[1, 3].contourf(kxx, kyy, 1e3*(fitbs['b2'] - rfb['bands'][1,ofs+1:-ofs+1,ofs:-ofs]), 23, cmap='RdBu_r', vmax=12, vmin=-12)\n",
"ct = ax[1, 3].contour(cs13, colors='k', linestyles='-', linewidths=0.5)\n",
"# cbar = f.colorbar(ct)\n",
"cax = inset_axes(ax[1,3], width=\"3%\", height=\"30%\", bbox_to_anchor=(585, -210, 350, 400))\n",
"# cb = plt.colorbar(cs13, cax=cax, ticks=np.arange(-15, 16, 5))\n",
"cb = plt.colorbar(cs13, cax=cax, ticks=np.arange(-12, 13, 4))\n",
"cb.ax.tick_params(axis='both', labelsize=15, length=8, width=2)\n",
"cb.ax.set_title('meV', fontsize=15, x=1.2)\n",
"\n",
"plt.subplots_adjust(hspace=0.08, wspace=0.08)\n",
"plt.savefig('../results/figures/fig_5e.png', bbox_inches='tight', transparent=True, dpi=300)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| Unknown |
3D | mpes-kit/fuller | figures/SFig6_Tests_on_synthetic_3D_data.ipynb | .ipynb | 16,138 | 438 | {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Tests of the Markov random field model for reconstructing 3D synthetic data"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import warnings as wn\n",
"wn.filterwarnings(\"ignore\")\n",
"\n",
"import numpy as np\n",
"import fuller\n",
"import matplotlib.pyplot as plt\n",
"from mpes import analysis as aly\n",
"import matplotlib as mpl\n",
"import matplotlib.gridspec as gridspec\n",
"from mpl_toolkits.mplot3d import Axes3D\n",
"import matplotlib.tri as mtri\n",
"import tifffile as ti\n",
"from scipy import io\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mpl.rcParams['font.family'] = 'sans-serif'\n",
"mpl.rcParams['font.sans-serif'] = 'Arial'\n",
"mpl.rcParams['axes.linewidth'] = 2\n",
"mpl.rcParams['pdf.fonttype'] = 42\n",
"mpl.rcParams['ps.fonttype'] = 42\n",
"\n",
"colornames = ['#646464', '#666666', '#6a6a6a', '#6f6f6f', '#737373', '#787878', '#7d7d7d', '#828282', '#878787', '#8d8d8d', '#929292', '#989898', '#9e9e9e', '#a4a4a4', '#aaaaaa', '#b0b0b0', '#b6b6b6', '#bcbcbc', '#c2c2c2', '#c9c9c9', '#cfcfcf', '#d6d6d6', '#dcdcdc', '#e3e3e3', '#eaeaea', '#efefee', '#efeee5', '#efeddc', '#efecd3', '#eeebca', '#eeeac0', '#eee9b7', '#eee8ad', '#ede7a4', '#ede69a', '#ede590', '#ede487', '#ece37d', '#ece273', '#ece069', '#ecdf5f', '#ebde55', '#ebdd4b', '#ebdc41', '#ebdb37', '#ebd333', '#ebc933', '#ecbe32', '#ecb432', '#eda931', '#ee9e31', '#ee9330', '#ef8830', '#ef7d2f', '#f0722f', '#f0672e', '#f15c2e', '#f2512d', '#f2462d', '#f33b2c', '#f3302c', '#f4252b', '#f4192b', '#ef182f', '#e81834', '#e21939', '#db1a3e', '#d51a43', '#ce1b48', '#c71b4d', '#c11c52', '#ba1c58', '#b31d5d', '#ac1d62', '#a61e67', '#9f1e6c', '#981f72', '#911f77', '#8a207c', '#842182']\n",
"custom_cmap = mpl.colors.LinearSegmentedColormap.from_list('custom', colornames, N=256)\n",
"\n",
"# Create plot folder if needed\n",
"if not os.path.exists('../results/figures'):\n",
" os.mkdir('../results/figures')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 3D single band: Second-order Griewank function"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Data generation"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"kx = np.arange(-6, 6, 0.04)\n",
"ky = np.arange(-6, 6, 0.04)\n",
"kyy, kxx = np.meshgrid(kx, ky)\n",
"\n",
"band_griewank = 1/4000*sum((kxx/2)**2 + (kyy/2)**2) - np.cos(2*kxx)*np.cos(2*(np.sqrt(2)/2)*kyy) - 1.5\n",
"b3d_min, b3d_max = band_griewank.min(), band_griewank.max()\n",
"\n",
"Evals = np.linspace(b3d_min-2, b3d_max+2, 400)\n",
"pes_data_3d = aly.voigt(feval=True, vardict={'amp':1, 'xvar':Evals[:, None, None],\n",
" 'ctr':band_griewank, 'sig':1, 'gam':0.3})\n",
"\n",
"plt.imshow(pes_data_3d[:, :, 150], cmap='Blues')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Reconstruction"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Create model\n",
"I = np.transpose(pes_data_3d, (1, 2, 0))\n",
"I = I / I.max()\n",
"E0 = np.zeros_like(band_griewank)\n",
"\n",
"mrf = fuller.mrfRec.MrfRec(E=Evals, kx=kx, ky=ky, I=I, E0=E0, eta=1)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Perform reconstruction\n",
"mrf.iter_para(200)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 5d"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Summary plot\n",
"\n",
"recon3d = mrf.getEb()\n",
"\n",
"gs = gridspec.GridSpec(1, 3, width_ratios=[5,5,5])\n",
"fig = plt.figure(figsize=(10, 4))\n",
"axs = []\n",
"for i in range(3):\n",
" axs.append(fig.add_subplot(gs[i]))\n",
" \n",
"im = axs[0].imshow(band_griewank, aspect=1, extent=[-6, 6, -6, 6], vmin=-2.2, vmax=0, cmap=custom_cmap)\n",
"axs[0].set_yticks(range(-6, 7, 2))\n",
"axs[0].set_ylabel('$k_y$ (a.u.)', fontsize=15)\n",
"axs[0].set_title('Ground truth', fontsize=15)\n",
"cax = fig.add_axes([0.93, 0.2, 0.02, 0.2])\n",
"cb = fig.colorbar(im, cax=cax, orientation='vertical', ticks=np.arange(-2, 0.1, 1))\n",
"cb.ax.set_title('Energy\\n(a.u.)', fontsize=15, pad=10)\n",
"cb.ax.tick_params(axis='both', length=8, width=2, labelsize=15)\n",
"\n",
"axs[1].imshow(E0, cmap=custom_cmap, extent=[-6, 6, -6, 6], vmin=-2.2, vmax=0)\n",
"axs[1].set_title('Initialization', fontsize=15)\n",
"axs[1].tick_params(axis='y', length=0)\n",
"axs[1].set_yticks([])\n",
"\n",
"axs[2].imshow(recon3d, aspect=1, extent=[-6, 6, -6, 6], vmin=-2.2, vmax=0, cmap=custom_cmap)\n",
"axs[2].set_yticks([])\n",
"axs[2].set_title('Reconstruction', fontsize=15)\n",
"\n",
"for i in [0,1,2]:\n",
" axs[i].set_xticks(range(-6, 7, 2))\n",
" axs[i].set_xlabel('$k_x$ (a.u.)', fontsize=15)\n",
" axs[i].tick_params(axis='both', length=8, width=2, labelsize=15)\n",
"\n",
"plt.subplots_adjust(wspace=0.15)\n",
"plt.savefig('../results/figures/sfig_6d1.png', dpi=300, bbox_inches='tight', transparent=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"f, ax = plt.subplots(figsize=(4, 3))\n",
"im = ax.imshow(recon3d - band_griewank, cmap='RdBu_r', vmax=0.1, vmin=-0.1, extent=[-6, 6, -6, 6])\n",
"ax.tick_params(axis='both', length=8, width=2, labelsize=15)\n",
"ax.set_xticks(range(-6, 7, 2))\n",
"ax.set_xlabel('$k_x$ (a.u.)', fontsize=15)\n",
"ax.set_yticks(range(-6, 7, 2))\n",
"ax.set_ylabel('$k_y$ (a.u.)', fontsize=15, rotation=-90, labelpad=20)\n",
"ax.yaxis.set_label_position(\"right\")\n",
"ax.yaxis.tick_right()\n",
"ax.set_title('Difference', fontsize=15)\n",
"cax = f.add_axes([-0.02, 0.53, 0.05, 0.25])\n",
"cb = plt.colorbar(im, cax=cax, orientation='vertical')\n",
"cb.ax.tick_params(axis='both', length=8, width=2, labelsize=15)\n",
"cb.ax.set_title('Energy\\n(a.u.)', fontsize=15, pad=10)\n",
"plt.savefig('../results/figures/sfig_6d2.png', dpi=300, bbox_inches='tight', transparent=True)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 3D band near-crossing: graphene band struction nearby Fermi level"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Data generation"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"kx = np.arange(-1, 1, 0.01)\n",
"ky = np.arange(-1, 1, 0.01)\n",
"kyy, kxx = np.meshgrid(kx, ky)\n",
"\n",
"sq3 = np.sqrt(3)\n",
"t, a = 1, 2*np.pi / (sq3)\n",
"band_graphene = 1 + 4 * (np.cos(sq3 * kyy * a / 2) ** 2) + 4 * np.cos(sq3 * kyy * a / 2) * np.cos(3 * kxx * a / 2)\n",
"band_graphene[band_graphene < 0] = 1.e-10\n",
"band_graphene_upper = t*np.sqrt(band_graphene)\n",
"band_graphene_lower = - t*np.sqrt(band_graphene)\n",
"b3d_max, b3d_min = band_graphene_upper.max(), band_graphene_lower.min()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"f = plt.figure(figsize=(6, 5))\n",
"ax = f.add_subplot(111, projection='3d')\n",
"\n",
"tri = mtri.Triangulation(kyy.flatten(), kxx.flatten())\n",
"ax.plot_trisurf(kxx.flatten(), kyy.flatten(), band_graphene_upper.flatten(),\n",
" triangles=tri.triangles, cmap=custom_cmap, antialiased=False)\n",
"ax.plot_trisurf(kxx.flatten(), kyy.flatten(), band_graphene_lower.flatten(),\n",
" triangles=tri.triangles, cmap=custom_cmap, antialiased=False)\n",
"\n",
"ax.set_xlabel('$k_x$', labelpad=15)\n",
"ax.set_ylabel('$k_y$', labelpad=15)\n",
"ax.set_zlabel('Energy', labelpad=15);"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"Evals = np.linspace(b3d_min-2, b3d_max+2, 400)\n",
"pes_data_3d_upper = aly.voigt(feval=True, vardict={'amp':1, 'xvar':Evals[:, None, None],\n",
" 'ctr':band_graphene_upper, 'sig':0.2, 'gam':0.3})\n",
"pes_data_3d_lower = aly.voigt(feval=True, vardict={'amp':1, 'xvar':Evals[:, None, None],\n",
" 'ctr':band_graphene_lower, 'sig':0.2, 'gam':0.3})\n",
"pes_data_3d = pes_data_3d_upper + pes_data_3d_lower\n",
"\n",
"plt.imshow(pes_data_3d[:, 90, :], aspect=0.15, extent=[-1, 1, b3d_min-2, b3d_max+2], cmap='Blues')\n",
"plt.xlabel('$k_x$', fontsize=15)\n",
"plt.ylabel('Energy', fontsize=15)\n",
"plt.tick_params(axis='both', length=8, width=2, labelsize=15)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Reconstruction"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"I = np.transpose(pes_data_3d, (1, 2, 0))\n",
"I = I / I.max()\n",
"\n",
"results = np.zeros((2,) + band_graphene.shape)\n",
"E0 = np.ones((2,) + band_graphene.shape) * 4\n",
"E0[1, :, :] *= -1\n",
"\n",
"for i in range(2):\n",
" mrf = fuller.mrfRec.MrfRec(E=Evals, kx=kx, ky=ky, I=I, E0=E0[i,...], eta=0.3)\n",
" mrf.iter_para(200)\n",
" results[i,...] = mrf.getEb()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 5f"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Summary plot\n",
"recon3d_upper = results[0,...]\n",
"recon3d_lower = results[1,...]\n",
"\n",
"init_upper = E0[0,...]\n",
"init_lower = E0[1,...]\n",
"\n",
"gs = gridspec.GridSpec(2, 3)\n",
"fig = plt.figure(figsize=(9.8, 6.5))\n",
"axs = []\n",
"for i in range(6):\n",
" axs.append(fig.add_subplot(gs[i]))\n",
" \n",
"axs[0].imshow(band_graphene_upper, aspect=1, extent=[-1, 1, -1, 1], vmin=0, vmax=3, cmap=custom_cmap)\n",
"axs[0].set_yticks(np.arange(-1, 1.1, 0.5))\n",
"axs[0].set_ylabel('$k_y$ $(\\mathrm{\\AA^{-1}})$', fontsize=15)\n",
"axs[0].set_title('Ground truth', fontsize=15)\n",
"axs[0].tick_params(axis='both', length=8, width=2, labelsize=15)\n",
"axs[0].text(0.15, 0.9, 'Conduction Band', fontsize=15, transform=axs[0].transAxes)\n",
"\n",
"axs[1].imshow(init_upper, cmap=custom_cmap, aspect=1, extent=[-1, 1, -1, 1], vmin=0, vmax=3)\n",
"axs[1].set_title('Initialization', fontsize=15)\n",
"axs[1].tick_params(axis='y', length=0)\n",
"axs[1].set_yticks([])\n",
"\n",
"imu = axs[2].imshow(recon3d_upper, aspect=1, extent=[-1, 1, -1, 1], vmin=0, vmax=3, cmap=custom_cmap)\n",
"axs[2].set_yticks([])\n",
"axs[2].set_title('Reconstruction', fontsize=15)\n",
"axs[2].yaxis.set_label_position(\"right\")\n",
"\n",
"# Upper band colorbar\n",
"caxu = fig.add_axes([0.94, 0.5, 0.02, 0.12])\n",
"cbu = fig.colorbar(imu, cax=caxu, orientation='vertical', ticks=np.arange(0, 3.1, 1))\n",
"cbu.ax.set_title('Energy\\n(eV)', fontsize=15, pad=10)\n",
"cbu.ax.tick_params(axis='both', length=8, width=2, labelsize=15)\n",
"\n",
"iml = axs[3].imshow(band_graphene_lower, aspect=1, extent=[-1, 1, -1, 1], vmin=-3, vmax=0, cmap=custom_cmap)\n",
"axs[3].set_yticks(np.arange(-1, 1.1, 0.5))\n",
"axs[3].set_ylabel('$k_y$ $(\\mathrm{\\AA^{-1}})$', fontsize=15)\n",
"axs[3].text(0.3, 0.9, 'Valence Band', fontsize=15, transform=axs[3].transAxes)\n",
"\n",
"axs[4].imshow(init_lower, cmap=custom_cmap, aspect=1, extent=[-1, 1, -1, 1], vmin=-3, vmax=0)\n",
"axs[4].tick_params(axis='y', length=0)\n",
"axs[4].set_yticks([])\n",
"\n",
"axs[5].imshow(recon3d_lower, aspect=1, extent=[-1, 1, -1, 1], vmin=-3, vmax=0, cmap=custom_cmap)\n",
"axs[5].set_yticks([])\n",
"axs[5].yaxis.set_label_position(\"right\")\n",
"\n",
"# Lower band colorbar\n",
"caxl = fig.add_axes([0.94, 0.03, 0.02, 0.12])\n",
"cbl = fig.colorbar(iml, cax=caxl, orientation='vertical', ticks=np.arange(-3, 0.1, 1))\n",
"cbl.ax.set_title('Energy\\n(eV)', fontsize=15, pad=10)\n",
"cbl.ax.tick_params(axis='both', length=8, width=2, labelsize=15)\n",
"\n",
"for i in [0, 1, 2]:\n",
" axs[i].set_xticks([])\n",
" \n",
"for i in [3, 4, 5]:\n",
" axs[i].set_xticks(np.arange(-1, 1.1, 0.5))\n",
" axs[i].set_xlabel('$k_x$ $(\\mathrm{\\AA^{-1}})$', fontsize=15)\n",
" axs[i].tick_params(axis='both', length=8, width=2, labelsize=15)\n",
" \n",
"plt.subplots_adjust(hspace=0.18, wspace=0.1)\n",
"plt.savefig('../results/figures/sfig_6f1.png', dpi=300, bbox_inches='tight', transparent=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"f, ax = plt.subplots(figsize=(4, 3))\n",
"im = ax.imshow(recon3d_upper - band_graphene_upper, cmap='RdBu_r', vmax=0.1, vmin=-0.1, extent=[-1, 1, -1, 1])\n",
"ax.tick_params(axis='both', length=8, width=2, labelsize=15)\n",
"ax.set_xticks(np.arange(-1, 1.1, 0.5))\n",
"ax.set_xlabel('$k_x$ $(\\mathrm{\\AA^{-1}})$', fontsize=15)\n",
"ax.set_yticks(np.arange(-1, 1.1, 0.5))\n",
"ax.set_ylabel('$k_y$ $(\\mathrm{\\AA^{-1}})$', fontsize=15, rotation=-90, labelpad=20)\n",
"ax.yaxis.set_label_position(\"right\")\n",
"ax.yaxis.tick_right()\n",
"ax.set_title('Difference', fontsize=15)\n",
"cax = f.add_axes([-0.02, 0.53, 0.05, 0.25])\n",
"cb = plt.colorbar(im, cax=cax, orientation='vertical')\n",
"cb.ax.tick_params(axis='both', length=8, width=2, labelsize=15)\n",
"cb.ax.set_title('Energy\\n(eV)', fontsize=15, pad=10)\n",
"plt.savefig('../results/figures/sfig_6f2.png', dpi=300, bbox_inches='tight', transparent=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"f, ax = plt.subplots(figsize=(4, 3))\n",
"im = ax.imshow(recon3d_lower - band_graphene_lower, cmap='RdBu_r', vmax=0.1, vmin=-0.1, extent=[-1, 1, -1, 1])\n",
"ax.tick_params(axis='both', length=8, width=2, labelsize=15)\n",
"ax.set_xticks(np.arange(-1, 1.1, 0.5))\n",
"ax.set_xlabel('$k_x$ $(\\mathrm{\\AA^{-1}})$', fontsize=15)\n",
"ax.set_yticks(np.arange(-1, 1.1, 0.5))\n",
"ax.set_ylabel('$k_y$ $(\\mathrm{\\AA^{-1}})$', fontsize=15, rotation=-90, labelpad=20)\n",
"ax.yaxis.set_label_position(\"right\")\n",
"ax.yaxis.tick_right()\n",
"ax.set_title('Difference', fontsize=15)\n",
"cax = f.add_axes([-0.02, 0.53, 0.05, 0.25])\n",
"cb = plt.colorbar(im, cax=cax, orientation='vertical')\n",
"cb.ax.tick_params(axis='both', length=8, width=2, labelsize=15)\n",
"cb.ax.set_title('Energy\\n(eV)', fontsize=15, pad=10)\n",
"plt.savefig('../results/figures/sfig_6f3.png', dpi=300, bbox_inches='tight', transparent=True)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| Unknown |
3D | mpes-kit/fuller | figures/SFig9_Reconstruction_with_scaled_theory.ipynb | .ipynb | 9,468 | 247 | {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Reconstruction for synthetic data with scaled theoretical band structure (LDA-DFT) as initialization"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import warnings as wn\n",
"wn.filterwarnings(\"ignore\")\n",
"\n",
"import os\n",
"import numpy as np\n",
"from numpy import nan_to_num as n2n\n",
"import fuller\n",
"from mpes import fprocessing as fp, analysis as aly\n",
"import matplotlib.pyplot as plt\n",
"import matplotlib as mpl\n",
"from natsort import natsorted\n",
"import glob as g\n",
"from mpl_toolkits.axes_grid1 import make_axes_locatable\n",
"from mpl_toolkits.axes_grid1.inset_locator import inset_axes\n",
"from matplotlib.ticker import AutoMinorLocator\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mpl.rcParams['font.family'] = 'sans-serif'\n",
"mpl.rcParams['font.sans-serif'] = 'Arial'\n",
"mpl.rcParams['axes.linewidth'] = 2\n",
"mpl.rcParams['pdf.fonttype'] = 42\n",
"mpl.rcParams['ps.fonttype'] = 42\n",
"\n",
"# Create plot folder if needed\n",
"if not os.path.exists('../results/figures'):\n",
" os.mkdir('../results/figures')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Load synthetic data and ground truth band structure (gt)\n",
"data = fuller.utils.loadHDF(r'../data/synthetic/synth_data_WSe2_LDA_top8.h5')\n",
"gtbands = data['bands_padded']\n",
"kxvals, kyvals = data['kx'], data['ky']\n",
"msk = data['mask_tight']"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 9d"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Load reconstructions and corresponding initializations\n",
"scales = ['0.8', '1.2']\n",
"\n",
"recons, inits = {}, {}\n",
"for isc, sc in enumerate(scales):\n",
" scalestr = str(sc)\n",
" files = fuller.utils.findFiles(r'../data/synthetic/sc='+scalestr+'_lda', fstring=r'/*')\n",
" recon = []\n",
" for f in files:\n",
" recon.append(fuller.utils.loadH5Parts(f, ['bands/Eb'], outtype='vals'))\n",
" \n",
" recons[scalestr] = np.squeeze(np.array(recon))\n",
" inits[scalestr] = data[scalestr]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Calculate errors in initialization (einit) and reconstruction (erec)\n",
"bands_tight = fuller.utils.trim_2d_edge(gtbands, edges=24, axes=(1, 2))\n",
"erec = fuller.metrics.abserror(recons, bands_tight, [0.8, 1.2], ofs=24, mask=msk, outkeys=[0.8, 1.2], ret='dict')\n",
"einit = fuller.metrics.abserror(inits, bands_tight, [0.8, 1.2], ofs=24, mask=msk, outkeys=[0.8, 1.2], ret='dict')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot comparison between reconstruction using scaled DFT calculations\n",
"dt=0.08 # horizontal jitter amplitude in visualization (to separate overlapping points)\n",
"f, ax = plt.subplots(figsize=(8.5, 5.5))\n",
"for i in range(8):\n",
" ax.axvline(x=i+1, ls='--', lw=1, c='g', zorder=0)\n",
" \n",
" if i < 7:\n",
" ax.scatter(i+1-dt, einit['0.8'][i]*1000, s=100, facecolors='b', edgecolors='b', lw=2, zorder=1)\n",
" ax.scatter(i+1-dt, erec['0.8'][i]*1000, s=100, facecolors='w', edgecolors='b', lw=2, zorder=1)\n",
"\n",
" ax.scatter(i+1+dt, einit['1.2'][i]*1000, s=100, facecolors='k', edgecolors='k', lw=2, zorder=1)\n",
" ax.scatter(i+1+dt, erec['1.2'][i]*1000, s=100, facecolors='w', edgecolors='k', lw=2, zorder=1)\n",
" \n",
" if i == 7:\n",
" ax.scatter(i+1-dt, einit['0.8'][i]*1000, s=100, facecolors='b', edgecolors='b', lw=2, zorder=1,\n",
" label=r'Scaled LDA (0.8$\\times$)')\n",
" ax.scatter(i+1-dt, erec['0.8'][i]*1000, s=100, facecolors='w', edgecolors='b', lw=2, zorder=1,\n",
" label=r'Recon. with 0.8$\\times$')\n",
"\n",
" ax.scatter(i+1+dt, einit['1.2'][i]*1000, s=100, facecolors='k', edgecolors='k', lw=2, zorder=1,\n",
" label=r'Scaled LDA (1.2$\\times$)')\n",
" ax.scatter(i+1+dt, erec['1.2'][i]*1000, s=100, facecolors='w', edgecolors='k', lw=2, zorder=1,\n",
" label=r'Recon. with 1.2$\\times$')\n",
" \n",
"ax.set_ylabel('Average error $\\eta_{\\mathrm{avg}}$ wrt ground truth (meV)', fontsize=18)\n",
"ax.set_yticks(range(0, 181, 20))\n",
"ax.tick_params(axis='both', length=8, width=2, labelsize=15)\n",
"ax.set_xticks(range(1, 9))\n",
"ax.set_xlabel('Band index', fontsize=18)\n",
"ax.set_ylim([0, 180])\n",
"ax.set_title('Reconstruction from scaled LDA calculations', fontsize=18)\n",
"lg = ax.legend(bbox_transform=ax.transAxes, bbox_to_anchor=(0.45, 0.93), frameon=True, fontsize=15,\n",
" facecolor='w', labelspacing=0.2, handletextpad=0.3)\n",
"frame = lg.get_frame()\n",
"frame.set_facecolor('w')\n",
"frame.set_edgecolor('k')\n",
"frame.set_linewidth(2)\n",
"plt.savefig(r'../results/figures/sfig_9d.png', bbox_inches='tight', transparent=True, dpi=300)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 9f"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Using know positions (ways to obtain these see notebooks in /code/extra/)\n",
"G = np.array([127.0, 127.27828129766911])\n",
"K = np.array([ 23.83002655, 127. ])\n",
"M = np.array([ 49.38033047, 171.8133136 ])\n",
"\n",
"pathPoints = np.asarray([G, M, K, G])\n",
"nGM, nMK, nKG = 70, 39, 79\n",
"segPoints = [nGM, nMK, nKG]\n",
"rowInds, colInds, pathInds = aly.points2path(pathPoints[:,0], pathPoints[:,1], npoints=segPoints)\n",
"nSegPoints = len(rowInds)\n",
"\n",
"pdGT = aly.bandpath_map(np.moveaxis(gtbands, 0, 2), pathr=rowInds, pathc=colInds, eaxis=2)\n",
"pdInit = aly.bandpath_map(np.moveaxis(inits['0.8'], 0, 2), pathr=rowInds, pathc=colInds, eaxis=2)\n",
"pdMPES = aly.bandpath_map(np.moveaxis(data['mpes_padded'], 0, 2), pathr=rowInds, pathc=colInds, eaxis=2)\n",
"Emin, Emax = data['E'].min(), data['E'].max()\n",
"\n",
"# Symmetrize the reconstructed bands\n",
"symrecbands = []\n",
"for i in range(8):\n",
" symmed = fuller.generator.rotosymmetrize(recons['0.8'][i,...], (127.5, 127.5), rotsym=6)[0]\n",
" symrecbands.append(fuller.generator.refsym(symmed[None,...], op='nanmean', pbar=False)[0,...])\n",
"symrecbands = np.asarray(symrecbands)\n",
"pdRecon = aly.bandpath_map(np.moveaxis(symrecbands, 0, 2), pathr=rowInds, pathc=colInds, eaxis=2)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot comparison between initialization, reconstruction and ground truth along high-symmetry lines\n",
"xaxis = np.array(range(rowInds.size))\n",
"pos = pathInds.copy()\n",
"pos[-1] -= 1\n",
"\n",
"f, ax = plt.subplots(figsize=(8.3, 6))\n",
"imax = ax.imshow(pdMPES, cmap='Blues', origin='lower', extent=[0, nSegPoints, Emin, Emax], aspect=22, vmax=7, zorder=0)\n",
"ax.plot(pdGT.T, c='k', zorder=2)\n",
"ax.plot(pdRecon.T, c='r', zorder=3)\n",
"ax.plot(pdInit.T + 0.12, '--', c='g', zorder=1)\n",
"\n",
"ax.plot(xaxis, pdGT[-1, :], c='k', zorder=2, label='Ground truth (LDA)')\n",
"ax.plot(xaxis, pdInit[-1, :] + 0.12, '--', c='g', zorder=1, label=r'Initial. (0.8$\\times$)')\n",
"ax.plot(xaxis, pdRecon[-1, :], c='r', zorder=3, label='Reconstruction')\n",
"\n",
"ax.tick_params(axis='y', length=8, width=2, labelsize=15)\n",
"ax.tick_params(axis='x', length=0, width=0, labelsize=15, pad=8)\n",
"ax.set_xlim([pos[0], pos[-1]])\n",
"ax.set_xticks(pos)\n",
"ax.set_xticklabels(['$\\overline{\\Gamma}$', '$\\overline{\\mathrm{M}}$',\n",
" '$\\overline{\\mathrm{K}}$', '$\\overline{\\Gamma}$'])\n",
"ax.set_ylabel('Energy (eV)', fontsize=18)\n",
"# ax.set_ylim([])\n",
"for p in pos[:-1]:\n",
" ax.axvline(x=p, c='k', ls='--', lw=2, dashes=[4, 2])\n",
"ax.legend(loc='lower left', frameon=False, fontsize=15,\n",
" facecolor='None', labelspacing=0.2, handletextpad=0.3, borderpad=0)\n",
"plt.savefig('../results/figures/sfig_9f.png', bbox_inches='tight', transparent=True, dpi=300)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| Unknown |
3D | mpes-kit/fuller | figures/SFig14_Approximation_along_high-symmetry_lines.ipynb | .ipynb | 10,595 | 331 | {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Approximation of reconstructed bands viewed from high-symmetry lines"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import warnings as wn\n",
"wn.filterwarnings(\"ignore\")\n",
"\n",
"import os\n",
"import numpy as np\n",
"import fuller\n",
"from mpes import analysis as aly\n",
"import matplotlib.pyplot as plt\n",
"import matplotlib as mpl\n",
"from tqdm import tqdm_notebook as tqdm\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# mpl.rcParams['font.family'] = 'sans-serif'\n",
"# mpl.rcParams['font.sans-serif'] = 'Arial'\n",
"mpl.rcParams['axes.linewidth'] = 2\n",
"mpl.rcParams['pdf.fonttype'] = 42\n",
"mpl.rcParams['ps.fonttype'] = 42\n",
"\n",
"# Create plot folder if needed\n",
"if not os.path.exists('../results/figures'):\n",
" os.mkdir('../results/figures')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Generate hexagonal Zernike basis\n",
"bss = fuller.generator.ppz.hexike_basis(nterms=400, npix=175, vertical=True, outside=0)\n",
"\n",
"with np.load('../data/processed/LDARecon_Approx_Polynomials.npz') as fl:\n",
" bandcuts = fl['bands']"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"center = np.array([88.04458508, 85.93048041])\n",
"corners = np.array([[ 43.96685519, 162.03029721],\n",
" [132.1174288 , 162.02202934],\n",
" [175.73285949, 85.93552008],\n",
" [132.11926251, 9.83338561],\n",
" [ 43.96347185, 9.85004975],\n",
" [ 0.36015304, 85.93011921]])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Define high-symmetry points\n",
"G = center\n",
"K = corners[0,:]\n",
"K1 = corners[1,:]\n",
"M = (K + K1) / 2\n",
"\n",
"# Define high-symmetry lines (k-path)\n",
"pathPoints = np.asarray([G, M, K, G])\n",
"nGM, nMK, nKG = 70, 39, 79\n",
"segPoints = [nGM, nMK, nKG]\n",
"rowInds, colInds, pathInds = aly.points2path(pathPoints[:,0], pathPoints[:,1], npoints=segPoints)\n",
"nSegPoints = len(rowInds)\n",
"bandDiagramRecon = aly.bandpath_map(np.moveaxis(bandcuts, 0, 2), pathr=rowInds, pathc=colInds, eaxis=2)\n",
"\n",
"lda_shift = -0.86813 + 0.15 # Global energy shift"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Test plot\n",
"plt.plot(bandDiagramRecon.T)\n",
"plt.axis('off');"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Approximation of all 14 energy bands by polynomials in coefficient order"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"# Approximation by polynomials in coefficient order\n",
"allbcf = []\n",
"allapprox, approx = {}, []\n",
"nterms = [5, 45, 150]\n",
"for iband in tqdm(range(14)):\n",
" brec = np.nan_to_num(bandcuts[iband,...])\n",
" bcf = fuller.generator.decomposition_hex2d(brec, nterms=400, bases=bss, ret='coeffs')\n",
" \n",
" allbcf.append(bcf)\n",
" for nt in nterms:\n",
" currcf = bcf.copy()\n",
" currcf[nt:] = 0\n",
" recon = fuller.generator.reconstruction_hex2d(currcf, bss)\n",
" try:\n",
" allapprox[str(nt)].append(recon)\n",
" except:\n",
" allapprox[str(nt)] = []\n",
" allapprox[str(nt)].append(recon)\n",
"\n",
"allbcf = np.asarray(allbcf)\n",
"for k, v in allapprox.items():\n",
" allapprox[k] = np.asarray(v)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"bci_approx = {}\n",
"for k in allapprox.keys():\n",
" bci_approx[k] = aly.bandpath_map(np.moveaxis(allapprox[k], 0, 2), pathr=rowInds, pathc=colInds, eaxis=2)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 14i"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"f, axs = plt.subplots(3, 1, figsize=(7.5, 14))\n",
"\n",
"for i, nt in enumerate(nterms):\n",
" axs[i].plot(bandDiagramRecon.T - lda_shift, c='b')\n",
" axs[i].plot(bandDiagramRecon[-1,:] - lda_shift, c='b', label='Reconstruction')\n",
" axs[i].plot(bci_approx[str(nt)].T - lda_shift, c='r')\n",
" axs[i].plot(bci_approx[str(nt)][-1,:] - lda_shift, c='r', label='Approximation')\n",
" axs[i].set_xlim([0, nSegPoints])\n",
" axs[i].tick_params(axis='y', length=8, width=2, labelsize=15)\n",
" axs[i].set_yticks(np.arange(-8, 0.1, 1))\n",
" axs[i].set_ylim([-7.5, 0.8])\n",
" axs[i].set_ylabel('Energy (eV)', fontsize=15)\n",
" axs[i].set_title(str(nt)+' terms', x=0.76, y=0.9, fontsize=15)\n",
" axs[i].legend(fontsize=15, frameon=False, borderpad=0, bbox_to_anchor=(0.5, 0.2))\n",
"# axs[i].axhline(y=0, lw=2, c='k', ls='dashed')\n",
" if i < 2:\n",
" axs[i].tick_params(axis='x', length=0, labelsize=0)\n",
" \n",
" for p in pathInds[:-1]:\n",
" axs[i].axvline(x=p, c='k', ls='--', lw=2, dashes=[4, 2])\n",
"\n",
"axs[-1].set_xticks(pathInds)\n",
"axs[-1].set_xticklabels(['$\\overline{\\Gamma}$', '$\\overline{\\mathrm{M}}$',\n",
" '$\\overline{\\mathrm{K}}$', '$\\overline{\\Gamma}$'])\n",
"axs[-1].tick_params(axis='x', length=8, width=2, labelsize=15)\n",
"plt.suptitle('Approximation by polynomials in default order', y=0.9, fontsize=15)\n",
"plt.subplots_adjust(hspace=0.08, wspace=0)\n",
"\n",
"plt.savefig('../results/figures/sfig_14i.png', dpi=300, bbox_inches='tight', transparent=True)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Approximation of all 14 energy bands by polynomials in default order"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Approximation by polynomials in default order\n",
"allbcf = []\n",
"allapproxx, approx = {}, []\n",
"nterms = [5, 45, 150]\n",
"for iband in tqdm(range(14)):\n",
" brec = np.nan_to_num(bandcuts[iband,...])\n",
" bcf = fuller.generator.decomposition_hex2d(brec, nterms=400, bases=bss, ret='coeffs')\n",
" \n",
" order = np.argsort(np.abs(bcf))[::-1]\n",
" ordcf = bcf[order]\n",
" ordbss = bss[order,...]\n",
" \n",
" allbcf.append(bcf)\n",
" for nt in nterms:\n",
" currcf = ordcf.copy()\n",
" currcf[nt:] = 0\n",
" recon = fuller.generator.reconstruction_hex2d(currcf, ordbss)\n",
" try:\n",
" allapproxx[str(nt)].append(recon)\n",
" except:\n",
" allapproxx[str(nt)] = []\n",
" allapproxx[str(nt)].append(recon)\n",
"\n",
"allbcf = np.asarray(allbcf)\n",
"for k, v in allapproxx.items():\n",
" allapproxx[k] = np.asarray(v)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"bci_approxx = {}\n",
"for k in allapproxx.keys():\n",
" bci_approxx[k] = aly.bandpath_map(np.moveaxis(allapproxx[k], 0, 2), pathr=rowInds, pathc=colInds, eaxis=2)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 14j"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"f, axs = plt.subplots(3, 1, figsize=(7.5, 14))\n",
"\n",
"for i, nt in enumerate(nterms):\n",
" axs[i].plot(bandDiagramRecon.T - lda_shift, c='b')\n",
" axs[i].plot(bandDiagramRecon[-1,:] - lda_shift, c='b', label='Reconstruction')\n",
" axs[i].plot(bci_approxx[str(nt)].T - lda_shift, c='r')\n",
" axs[i].plot(bci_approxx[str(nt)][-1,:] - lda_shift, c='r', label='Approximation')\n",
" axs[i].set_xlim([0, nSegPoints])\n",
" axs[i].tick_params(axis='y', length=8, width=2, labelsize=15)\n",
" axs[i].set_yticks(np.arange(-8, 0.1, 1))\n",
" axs[i].set_ylim([-7.5, 0.8])\n",
" axs[i].set_ylabel('Energy (eV)', fontsize=15)\n",
" axs[i].set_title(str(nt)+' terms', x=0.76, y=0.9, fontsize=15)\n",
" axs[i].legend(fontsize=15, frameon=False, borderpad=0, bbox_to_anchor=(0.5, 0.2))\n",
"# axs[i].axhline(y=0, lw=2, c='k', ls='dashed')\n",
" if i < 2:\n",
" axs[i].tick_params(axis='x', length=0, labelsize=0)\n",
" \n",
" for p in pathInds[:-1]:\n",
" axs[i].axvline(x=p, c='k', ls='--', lw=2, dashes=[4, 2])\n",
"\n",
"axs[-1].set_xticks(pathInds)\n",
"axs[-1].set_xticklabels(['$\\overline{\\Gamma}$', '$\\overline{\\mathrm{M}}$',\n",
" '$\\overline{\\mathrm{K}}$', '$\\overline{\\Gamma}$'])\n",
"axs[-1].tick_params(axis='x', length=8, width=2, labelsize=15)\n",
"plt.suptitle('Approximation by polynomials in coefficient order', y=0.9, fontsize=15)\n",
"plt.subplots_adjust(hspace=0.08, wspace=0)\n",
"\n",
"plt.savefig('../results/figures/sfig_14j.png', dpi=300, bbox_inches='tight', transparent=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# np.savez('LDARecon_approx_bands.npz', allapprox=allapprox, allapproxord=allapproxx,\n",
"# linecut=bci_approx, linecutord=bci_approxx)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| Unknown |
3D | mpes-kit/fuller | figures/SFig9_Reconstruction_with_different_theories.ipynb | .ipynb | 18,997 | 429 | {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Reconstruction for synthetic data with different DFT band structures (PBE, PBEsol, HSE06) as initialization "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import warnings as wn\n",
"wn.filterwarnings(\"ignore\")\n",
"\n",
"import os\n",
"import numpy as np\n",
"from numpy import nan_to_num as n2n\n",
"import fuller\n",
"from mpes import fprocessing as fp, analysis as aly\n",
"import matplotlib.pyplot as plt\n",
"import matplotlib as mpl\n",
"from natsort import natsorted\n",
"import glob as g\n",
"from mpl_toolkits.axes_grid1 import make_axes_locatable\n",
"from mpl_toolkits.axes_grid1.inset_locator import inset_axes\n",
"from matplotlib.ticker import AutoMinorLocator\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mpl.rcParams['font.family'] = 'sans-serif'\n",
"mpl.rcParams['font.sans-serif'] = 'Arial'\n",
"mpl.rcParams['axes.linewidth'] = 2\n",
"mpl.rcParams['pdf.fonttype'] = 42\n",
"mpl.rcParams['ps.fonttype'] = 42\n",
"\n",
"colornames = ['#646464', '#666666', '#6a6a6a', '#6f6f6f', '#737373', '#787878', '#7d7d7d', '#828282', '#878787', '#8d8d8d', '#929292', '#989898', '#9e9e9e', '#a4a4a4', '#aaaaaa', '#b0b0b0', '#b6b6b6', '#bcbcbc', '#c2c2c2', '#c9c9c9', '#cfcfcf', '#d6d6d6', '#dcdcdc', '#e3e3e3', '#eaeaea', '#efefee', '#efeee5', '#efeddc', '#efecd3', '#eeebca', '#eeeac0', '#eee9b7', '#eee8ad', '#ede7a4', '#ede69a', '#ede590', '#ede487', '#ece37d', '#ece273', '#ece069', '#ecdf5f', '#ebde55', '#ebdd4b', '#ebdc41', '#ebdb37', '#ebd333', '#ebc933', '#ecbe32', '#ecb432', '#eda931', '#ee9e31', '#ee9330', '#ef8830', '#ef7d2f', '#f0722f', '#f0672e', '#f15c2e', '#f2512d', '#f2462d', '#f33b2c', '#f3302c', '#f4252b', '#f4192b', '#ef182f', '#e81834', '#e21939', '#db1a3e', '#d51a43', '#ce1b48', '#c71b4d', '#c11c52', '#ba1c58', '#b31d5d', '#ac1d62', '#a61e67', '#9f1e6c', '#981f72', '#911f77', '#8a207c', '#842182']\n",
"custom_cmap = mpl.colors.LinearSegmentedColormap.from_list('custom', colornames, N=256)\n",
"\n",
"# Create plot folder if needed\n",
"if not os.path.exists('../results/figures'):\n",
" os.mkdir('../results/figures')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Load synthetic data and ground truth band structure (gt)\n",
"data = fuller.utils.loadHDF(r'../data/synthetic/synth_data_WSe2_LDA_top8.h5')\n",
"gtbands = data['bands_padded']\n",
"kxvals, kyvals = data['kx'], data['ky']\n",
"msk = data['mask_tight']"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 9e"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Load initializations\n",
"xcfs = ['pbe', 'pbesol', 'hse'] # Names of the exchange-correlation functionals (XCF) used in the DFT calculations\n",
"eshifts = {'pbe':0.33063, 'pbesol':0.49865, 'hse':0.10955} # Energy values to zero the band structure at K points\n",
"\n",
"inits = {}\n",
"for xc in xcfs:\n",
" inits[xc] = fuller.utils.loadHDF(r'../data/theory/bands_padded/wse2_'+xc+'_bands_padded.h5')['bands_padded'] + eshifts[xc]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Calculate errors in initialization (einit) and reconstruction (erec)\n",
"reconbands, erec, einit = {}, {}, {}\n",
"nk = np.sum(~np.isnan(msk))\n",
"\n",
"for xc in xcfs:\n",
" \n",
" folderstr = xc + '_lda'\n",
" recons = {}\n",
" bandidx = list(range(0, 8))\n",
" \n",
" for istp, stp in enumerate(bandidx):\n",
" stepstr = str(istp).zfill(2)\n",
" files = fuller.utils.findFiles(r'../data/synthetic' + '//' + folderstr + r'/mrf_rec_band='+stepstr, fstring=r'*')\n",
" recon = []\n",
" recon.append(fuller.utils.loadH5Parts(files[0], ['bands/Eb'], outtype='vals'))\n",
" recons[stepstr] = np.squeeze(np.array(recon))\n",
"\n",
" brecons = [v for k, v in recons.items() if k != 'init']\n",
" brecons = np.asarray(brecons)\n",
" reconbands[xc] = brecons\n",
"\n",
" errinit, errrecon = [], []\n",
" for i in range(8):\n",
" ediff = (inits[xc][i,...] - gtbands[i,...])**2\n",
" ediffrec = (recons[str(i).zfill(2)] - gtbands[i,...])**2\n",
" ediff = fuller.utils.trim_2d_edge(ediff, edges=24)\n",
" ediffrec = fuller.utils.trim_2d_edge(ediffrec, edges=24)\n",
" errinit.append(np.sqrt(np.sum(n2n(msk*ediff) / nk)))\n",
" errrecon.append(np.sqrt(np.sum(n2n(msk*ediffrec) / nk)))\n",
"\n",
" einit[xc] = np.array(errinit)\n",
" erec[xc] = np.array(errrecon)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot comparison between reconstruction using different DFT theories\n",
"dt=0.12 # horizontal jitter amplitude in visualization (to separate overlapping points)\n",
"f, ax = plt.subplots(figsize=(7, 10))\n",
"for i in range(8):\n",
" ax.axvline(x=i+1, ls='--', lw=1, c='g', zorder=0)\n",
" \n",
" if i < 7:\n",
" ax.scatter(i+1, einit['pbe'][i]*1000, s=100, facecolors='b', edgecolors='b', lw=2, zorder=1)\n",
" ax.scatter(i+1-dt, erec['pbe'][i]*1000, s=100, facecolors='w', edgecolors='b', lw=2, zorder=1)\n",
"\n",
" ax.scatter(i+1, einit['pbesol'][i]*1000, s=100, facecolors='k', edgecolors='k', lw=2, zorder=1)\n",
" ax.scatter(i+1, erec['pbesol'][i]*1000, s=100, facecolors='w', edgecolors='k', lw=2, zorder=1)\n",
"\n",
" ax.scatter(i+1, einit['hse'][i]*1000, s=100, facecolors='m', edgecolors='m', lw=2, zorder=1)\n",
" ax.scatter(i+1+dt, erec['hse'][i]*1000, s=100, facecolors='w', edgecolors='m', lw=2, zorder=1)\n",
" \n",
" if i == 7:\n",
" ax.scatter(i+1, einit['pbe'][i]*1000, s=100, facecolors='b', edgecolors='b', lw=2, zorder=1, label='PBE calc.')\n",
" ax.scatter(i+1-dt, erec['pbe'][i]*1000, s=100, facecolors='w', edgecolors='b', lw=2, zorder=1, label='PBE recon.')\n",
"\n",
" ax.scatter(i+1, einit['pbesol'][i]*1000, s=100, facecolors='k', edgecolors='k', lw=2, zorder=1, label='PBEsol calc.')\n",
" ax.scatter(i+1, erec['pbesol'][i]*1000, s=100, facecolors='w', edgecolors='k', lw=2, zorder=1, label='PBEsol recon.')\n",
"\n",
" ax.scatter(i+1, einit['hse'][i]*1000, s=100, facecolors='m', edgecolors='m', lw=2, zorder=1, label='HSE06 calc.')\n",
" ax.scatter(i+1+dt, erec['hse'][i]*1000, s=100, facecolors='w', edgecolors='m', lw=2, zorder=1, label='HSE06 recon.')\n",
" \n",
"ax.set_ylabel('Average error $\\eta_{\\mathrm{avg}}$ wrt ground truth (meV)', fontsize=18)\n",
"ax.set_yticks(range(0, 551, 50))\n",
"ax.tick_params(axis='both', length=8, width=2, labelsize=15)\n",
"ax.set_xticks(range(1, 9))\n",
"ax.set_xlabel('Band index', fontsize=18)\n",
"# ax.set_ylim([0, 200])\n",
"ax.set_title('Reconstruction from other DFT calculations', fontsize=18)\n",
"lg = ax.legend(loc='best', bbox_to_anchor=(0.56, 0.42), bbox_transform=ax.transAxes, frameon=True, fontsize=15,\n",
" facecolor='w', labelspacing=0.2, handletextpad=0.3)\n",
"frame = lg.get_frame()\n",
"frame.set_facecolor('w')\n",
"frame.set_edgecolor('k')\n",
"frame.set_linewidth(2)\n",
"plt.savefig(r'../results/figures/sfig_9e.png', bbox_inches='tight', transparent=True, dpi=300)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 9g"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Using know positions (ways to obtain these see notebooks in /code/extra/)\n",
"G = np.array([127.0, 127.27828129766911])\n",
"K = np.array([ 23.83002655, 127. ])\n",
"M = np.array([ 49.38033047, 171.8133136 ])\n",
"\n",
"pathPoints = np.asarray([G, M, K, G])\n",
"nGM, nMK, nKG = 70, 39, 79\n",
"segPoints = [nGM, nMK, nKG]\n",
"rowInds, colInds, pathInds = aly.points2path(pathPoints[:,0], pathPoints[:,1], npoints=segPoints)\n",
"nSegPoints = len(rowInds)\n",
"\n",
"pdGT = aly.bandpath_map(np.moveaxis(gtbands, 0, 2), pathr=rowInds, pathc=colInds, eaxis=2)\n",
"pdInit = aly.bandpath_map(np.moveaxis(inits['pbe'], 0, 2), pathr=rowInds, pathc=colInds, eaxis=2)\n",
"pdMPES = aly.bandpath_map(np.moveaxis(data['mpes_padded'], 0, 2), pathr=rowInds, pathc=colInds, eaxis=2)\n",
"Emin, Emax = data['E'].min(), data['E'].max()\n",
"\n",
"# Symmetrize the reconstructed bands\n",
"symrecbands = []\n",
"for i in range(8):\n",
" symmed = fuller.generator.rotosymmetrize(reconbands['pbe'][i,...], (127.5, 127.5), rotsym=6)[0]\n",
" symrecbands.append(fuller.generator.refsym(symmed[None,...], op='nanmean', pbar=False)[0,...])\n",
"symrecbands = np.asarray(symrecbands)\n",
"pdRecon = aly.bandpath_map(np.moveaxis(symrecbands, 0, 2), pathr=rowInds, pathc=colInds, eaxis=2)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot comparison between initialization, reconstruction and ground truth along high-symmetry lines\n",
"xaxis = np.array(range(rowInds.size))\n",
"pos = pathInds.copy()\n",
"pos[-1] -= 1\n",
"\n",
"f, ax = plt.subplots(figsize=(8.3, 6))\n",
"imax = ax.imshow(pdMPES, cmap='Blues', origin='lower', extent=[0, nSegPoints, Emin, Emax], aspect=22, vmax=7, zorder=0)\n",
"ax.plot(pdGT.T, c='k', zorder=2)\n",
"ax.plot(pdRecon.T, c='r', zorder=3)\n",
"ax.plot(pdInit.T - eshifts['pbe'], '--', c='g', zorder=1)\n",
"\n",
"ax.plot(xaxis, pdGT[-1, :], c='k', zorder=2, label='Ground truth (LDA)')\n",
"ax.plot(xaxis, pdInit[-1, :] - eshifts['pbe'], '--', c='g', zorder=1, label='Initial. (PBE)')\n",
"ax.plot(xaxis, pdRecon[-1, :], c='r', zorder=3, label='Reconstruction')\n",
"\n",
"ax.tick_params(which='both', axis='y', length=8, width=2, labelsize=15)\n",
"ax.tick_params(axis='x', length=0, width=0, labelsize=15, pad=8)\n",
"ax.set_xlim([pos[0], pos[-1]])\n",
"ax.set_xticks(pos)\n",
"ax.set_xticklabels(['$\\overline{\\Gamma}$', '$\\overline{\\mathrm{M}}$',\n",
" '$\\overline{\\mathrm{K}}$', '$\\overline{\\Gamma}$'])\n",
"ax.set_ylabel('Energy (eV)', fontsize=18)\n",
"ax.set_yticks(np.arange(-4, 1, 1))\n",
"ax.yaxis.set_minor_locator(AutoMinorLocator(2))\n",
"# ax.set_yticks(np.arange(-4.5, 0.6, 0.5))\n",
"# ax.set_ylim([])\n",
"for p in pos[:-1]:\n",
" ax.axvline(x=p, c='k', ls='--', lw=2, dashes=[4, 2])\n",
"ax.legend(loc='lower left', frameon=False, fontsize=15, borderpad=0,\n",
" facecolor='None', labelspacing=0.2, handletextpad=0.3)\n",
"cax = inset_axes(ax, width=\"3%\", height=\"30%\", bbox_to_anchor=(70, -30, 440, 200))\n",
"cb = plt.colorbar(imax, cax=cax, ticks=[])\n",
"cb.ax.set_ylabel('Intensity', fontsize=15, rotation=-90, labelpad=17)\n",
"plt.savefig(r'../results/figures/sfig_9g.png', bbox_inches='tight', transparent=True, dpi=300)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Supplementary Figure 9h"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"cmsk = aly.circmask(np.ones((255, 255)), 127.5, 127.5, 115, sign='xnan', method='algebraic')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Retrieve reconstructions with PBE-DFT as inititalization\n",
"recons = {}\n",
"bandidx = list(range(0, 8))\n",
"for istp, stp in enumerate(bandidx):\n",
" stepstr = str(istp).zfill(2)\n",
" files = fuller.utils.findFiles(r'../data/synthetic/pbe_lda/mrf_rec_band='+stepstr, fstring=r'*')\n",
" recon = []\n",
" recon.append(fuller.utils.loadH5Parts(files[0], ['bands/Eb'], outtype='vals'))\n",
" recons[stepstr] = np.squeeze(np.array(recon))\n",
" \n",
"brecons = [v for k, v in recons.items() if k != 'init']\n",
"brecons = np.asarray(brecons)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"f, ax = plt.subplots(8, 2, figsize=(3, 25))\n",
"kxminl, kxmaxl, kxminr, kxmaxr = kxvals[0], kxvals[127], kxvals[128], kxvals[-1]\n",
"kyminl, kymaxl, kyminr, kymaxr = kyvals[0], kyvals[-1], kyvals[0], kyvals[-1]\n",
"\n",
"for i in range(8):\n",
" \n",
" band_gt = gtbands[i,:,:]*cmsk\n",
" band_rec = brecons[i,:,:]*cmsk\n",
" band_init = inits['pbe'][i,:,:]*cmsk\n",
" band_diff = band_rec - band_gt\n",
" vmin_gt, vmax_gt = gtbands[i,:,:].min(), gtbands[i,:,:].max()\n",
" vmin_init, vmax_init = band_init.min(), band_init.max()\n",
" vmin = min([vmin_gt, vmin_init]) - 0.1\n",
" vmax = max([vmax_gt, vmax_init]) + 0.1\n",
" \n",
" ax[i, 0].imshow(band_gt[:, :127], cmap=custom_cmap, extent=[kxminl, kxmaxl, kyminl, kymaxl],\n",
" aspect=1, vmin=vmin, vmax=vmax)\n",
" ax[i, 1].imshow(band_rec[:, 128:], cmap=custom_cmap, extent=[kxminr, kxmaxr, kyminr, kymaxr],\n",
" aspect=1, vmin=vmin, vmax=vmax)\n",
" ax[i, 0].set_xticks(np.arange(-1.5, 0, 0.5))\n",
" ax[i, 0].set_xticklabels(['', '-1', ''])\n",
" ax[i, 1].set_xticks(np.arange(0.5, 1.6, 0.5))\n",
" ax[i, 1].set_xticklabels(['', '1', ''])\n",
" ax[i, 0].set_yticks(np.arange(-1, 1.1, 1))\n",
" ax[i, 0].yaxis.set_minor_locator(AutoMinorLocator(2))\n",
" ax[i, 1].set_yticks([])\n",
" ax[i, 0].tick_params(axis='both', which='both', length=8, width=2, labelsize=18)\n",
" ax[i, 1].tick_params(axis='both', length=8, width=2, labelsize=18)\n",
" ax[i, 0].set_ylabel('$k_y$ $(\\mathrm{\\AA}^{-1})$', fontsize=18)\n",
" ax[i, 0].text(0.1, 0.9, '#'+str(i+1), fontsize=15, transform=ax[i,0].transAxes)\n",
" \n",
" if i < 7:\n",
" ax[i, 0].set_xticks([])\n",
" ax[i, 1].set_xticks([])\n",
"\n",
"ax[0, 0].set_title('Ground\\ntruth (LDA)', fontsize=18)\n",
"ax[0, 1].set_title('Recon-\\nstruction', fontsize=18)\n",
"ax[-1, 0].set_xlabel('$k_x$ $(\\mathrm{\\AA}^{-1})$', fontsize=18, x=1)\n",
"plt.subplots_adjust(wspace=0, hspace=0.1)\n",
"plt.savefig(r'../results/figures/sfig_9h1.png', bbox_inches='tight', transparent=True, dpi=300)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"f, ax = plt.subplots(8, 2, figsize=(3, 25))\n",
"kxminl, kxmaxl, kxminr, kxmaxr = kxvals[0], kxvals[127], kxvals[128], kxvals[-1]\n",
"kyminl, kymaxl, kyminr, kymaxr = kyvals[0], kyvals[-1], kyvals[0], kyvals[-1]\n",
"\n",
"for i in range(8):\n",
" \n",
" band_gt = gtbands[i,:,:]*cmsk\n",
" band_rec = brecons[i,:,:]*cmsk\n",
" band_init = inits['pbe'][i,:,:]*cmsk\n",
" init_diff_all = (inits['pbe'] - gtbands)*cmsk\n",
" init_diff = init_diff_all[i,...]\n",
" band_diff_all = (brecons - gtbands)*cmsk\n",
" band_diff = band_diff_all[i,...]\n",
" \n",
" vmin_gt, vmax_gt = gtbands[i,:,:].min(), gtbands[i,:,:].max()\n",
" vmin_rec, vmax_rec = np.nanmin(band_diff_all), np.nanmax(band_diff_all)\n",
" vmin_init, vmax_init = np.nanmin(init_diff_all), np.nanmax(init_diff_all)\n",
" \n",
" vmin = np.nanmin([vmin_init, vmin_rec]) - 0.01\n",
" vmax = np.nanmax([vmax_init, vmax_rec]) + 0.01\n",
" imaxl = ax[i, 0].imshow(init_diff[:, :127], cmap='terrain_r', extent=[kxminl, kxmaxl, kyminl, kymaxl],\n",
" aspect=1, vmin=vmin_init, vmax=vmax_init)\n",
" imax = ax[i, 1].imshow(band_diff[:, 128:], cmap='RdBu_r', extent=[kxminr, kxmaxr, kyminr, kymaxr],\n",
" aspect=1, vmin=-0.2, vmax=0.2)\n",
"\n",
" ax[i, 0].set_xticks(np.arange(-1.5, 0, 0.5))\n",
" ax[i, 0].set_xticklabels(['', '-1', ''])\n",
" ax[i, 1].set_xticks(np.arange(0.5, 1.6, 0.5))\n",
" ax[i, 1].set_xticklabels(['', '1', ''])\n",
" ax[i, 0].set_yticks([])\n",
" ax[i, 1].set_yticks([])\n",
" ax[i, 0].tick_params(axis='both', length=8, width=2, labelsize=18)\n",
" ax[i, 1].tick_params(axis='both', length=8, width=2, labelsize=18)\n",
" ax[i, 0].text(0.1, 0.9, '#'+str(i+1), fontsize=15, transform=ax[i,0].transAxes)\n",
" \n",
" if i < 7:\n",
" ax[i, 0].set_xticks([])\n",
" ax[i, 1].set_xticks([])\n",
" \n",
"ax[0, 0].set_title('\\ninitial.-g.t.', fontsize=18)\n",
"ax[0, 1].set_title('\\nrecon.-g.t.', fontsize=18)\n",
"plt.suptitle('Energy difference', y=0.902, fontsize=18)\n",
"ax[-1, 0].set_xlabel('$k_x$ $(\\mathrm{\\AA}^{-1})$', fontsize=18, x=1)\n",
"\n",
"caxl = inset_axes(ax[6,0], width=\"3%\", height=\"30%\", bbox_to_anchor=(-130, 122, 350, 400))\n",
"cbl = plt.colorbar(imaxl, cax=caxl, ticks=np.arange(0.0, 0.61, 0.1))\n",
"cbl.ax.set_title('eV', fontsize=15)\n",
"cbl.ax.tick_params(axis='y', width=2, length=6, labelsize=15)\n",
"\n",
"cax = inset_axes(ax[7,0], width=\"3%\", height=\"30%\", bbox_to_anchor=(-130, -50, 350, 400))\n",
"cb = plt.colorbar(imax, cax=cax, ticks=np.arange(-0.2, 0.21, 0.1))\n",
"# cb.ax.set_ylabel('Intensity', fontsize=15, rotation=-90, labelpad=17)\n",
"cb.ax.set_title('eV', fontsize=15)\n",
"cb.ax.tick_params(axis='y', width=2, length=6, labelsize=15)\n",
"\n",
"plt.subplots_adjust(wspace=0, hspace=0.1)\n",
"plt.savefig(r'../results/figures/sfig_9h2.png', bbox_inches='tight', transparent=True, dpi=300)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| Unknown |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.