{ "cells": [ { "cell_type": "code", "execution_count": 65, "metadata": {}, "outputs": [], "source": [ "import torch\n", "\n", "t_c = torch.tensor([0.5, 14.0, 15.0, 28.0, 11.0, 8.0, 3.0, -4.0, 6.0, 13.0, 21.0])\n", "# t_u = torch.tensor([32.9000, 57.2000, 59.0000, 82.4000, 51.8000, 46.4000, 37.4000, 24.8000, 42.8000, 55.4000, 69.8000])\n", "# t_u = torch.tensor([30.3057, 56.7230, 57.9168, 79.4823, 52.3995, 47.3290, 37.7892, 25.9138, 43.6609, 56.5834, 71.3901])\n", "t_u = torch.tensor([35.7, 55.9, 58.2, 81.9, 56.3, 48.9, 33.9, 21.8, 48.4, 60.4, 68.4])" ] }, { "cell_type": "code", "execution_count": 66, "metadata": {}, "outputs": [], "source": [ "def model(t_u, w, b):\n", " return w * t_u + b" ] }, { "cell_type": "code", "execution_count": 67, "metadata": {}, "outputs": [], "source": [ "def loss_fn(t_p, t_c):\n", " squared_diffs = (t_p - t_c)**2\n", " return squared_diffs.mean()" ] }, { "cell_type": "code", "execution_count": 68, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "tensor([35.7000, 55.9000, 58.2000, 81.9000, 56.3000, 48.9000, 33.9000, 21.8000,\n", " 48.4000, 60.4000, 68.4000])" ] }, "execution_count": 68, "metadata": {}, "output_type": "execute_result" } ], "source": [ "w = torch.ones(1)\n", "b = torch.zeros(1)\n", "\n", "t_p = model(t_u, w, b)\n", "t_p" ] }, { "cell_type": "code", "execution_count": 69, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "tensor(1763.8846)" ] }, "execution_count": 69, "metadata": {}, "output_type": "execute_result" } ], "source": [ "loss = loss_fn(t_p, t_c)\n", "loss" ] }, { "cell_type": "code", "execution_count": 70, "metadata": {}, "outputs": [], "source": [ "delta = 0.1\n", "\n", "loss_rate_of_change_w = (loss_fn(model(t_u, w + delta, b), t_c) - loss_fn(model(t_u, w - delta, b), t_c)) / (2.0 * delta)" ] }, { "cell_type": "code", "execution_count": 71, "metadata": {}, "outputs": [], "source": [ "learning_rate = 1e-2\n", "\n", "w = w - learning_rate * loss_rate_of_change_w" ] }, { "cell_type": "code", "execution_count": 72, "metadata": {}, "outputs": [], "source": [ "loss_rate_of_change_b = (loss_fn(model(t_u, w, b + delta), t_c) - loss_fn(model(t_u, w, b - delta), t_c)) / (2.0 * delta)\n", "\n", "b = b - learning_rate * loss_rate_of_change_b" ] }, { "cell_type": "code", "execution_count": 73, "metadata": {}, "outputs": [], "source": [ "def dloss_fn(t_p, t_c):\n", " dsq_diffs = 2 * (t_p - t_c)\n", " return dsq_diffs" ] }, { "cell_type": "code", "execution_count": 74, "metadata": {}, "outputs": [], "source": [ "def model(t_u, w, b):\n", " return w * t_u + b" ] }, { "cell_type": "code", "execution_count": 75, "metadata": {}, "outputs": [], "source": [ "def dmodel_dw(t_u, w, b):\n", " return t_u" ] }, { "cell_type": "code", "execution_count": 76, "metadata": {}, "outputs": [], "source": [ "def dmodel_db(t_u, w, b):\n", " return 1.0" ] }, { "cell_type": "code", "execution_count": 77, "metadata": {}, "outputs": [], "source": [ "def grad_fn(t_u, t_c, t_p, w, b):\n", " dloss_dw = dloss_fn(t_p, t_c) * dmodel_dw(t_u, w, b)\n", " dloss_db = dloss_fn(t_p, t_c) * dmodel_db(t_u, w, b)\n", " return torch.stack([dloss_dw.mean(), dloss_db.mean()])" ] }, { "cell_type": "code", "execution_count": 78, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 0, Loss 1763.884644\n", "Params: tensor([1., 0.])\n", "Grad: tensor([4517.2964, 82.6000])\n", "Epoch 1, Loss 5802484.500000\n", "Params: tensor([-44.1730, -0.8260])\n", "Grad: tensor([-261257.4062, -4598.9707])\n", "Epoch 2, Loss 19408031744.000000\n", "Params: tensor([2568.4011, 45.1637])\n", "Grad: tensor([15109615.0000, 266155.7188])\n", "Epoch 3, Loss 64915909902336.000000\n", "Params: tensor([-148527.7344, -2616.3933])\n", "Grad: tensor([-873852544., -15392727.])\n", "Epoch 4, Loss 217130439561707520.000000\n", "Params: tensor([8589997.0000, 151310.8750])\n", "Grad: tensor([50538569728., 890226304.])\n", "Epoch 5, Loss 726257020202974707712.000000\n", "Params: tensor([-496795712., -8750952.])\n", "Grad: tensor([-2922858414080., -51485540352.])\n", "Epoch 6, Loss 2429181687085405986357248.000000\n", "Params: tensor([28731787264., 506104448.])\n", "Grad: tensor([169041222172672., 2977625538560.])\n", "Epoch 7, Loss 8125117236949438203699396608.000000\n", "Params: tensor([-1661680353280., -29270151168.])\n", "Grad: tensor([-9776366694170624., -172208609558528.])\n", "Epoch 8, Loss 27176865195881116022129584766976.000000\n", "Params: tensor([96101982011392., 1692815851520.])\n", "Grad: tensor([565408461421019136., 9959550270570496.])\n", "Epoch 9, Loss 90901075478458130961171361977860096.000000\n", "Params: tensor([-5557982423154688., -97902688993280.])\n", "Grad: tensor([-32699952998104694784., -576002771350716416.])\n", "Epoch 10, Loss inf\n", "Params: tensor([321441537185546240., 5662124642664448.])\n", "Grad: tensor([1891176184585880862720., 33312666847799148544.])\n", "Epoch 11, Loss inf\n", "Params: tensor([-18590318302064541696., -327464524443549696.])\n", "Grad: tensor([-109374681759098253344768., -1926611913828915478528.])\n", "Epoch 12, Loss inf\n", "Params: tensor([1075156512834901245952., 18938652381837000704.])\n", "Grad: tensor([6325598374588843695276032., 111424089805529471254528.])\n", "Epoch 13, Loss inf\n", "Params: tensor([-62180826256335366520832., -1095302239868036317184.])\n", "Grad: tensor([-365835976043760417324924928., -6444125046330702902067200.])\n", "Epoch 14, Loss inf\n", "Params: tensor([3596178922111621805375488., 63345948012332760170496.])\n", "Grad: tensor([21157835782059083454602018816., 372690823035039034124533760.])\n", "Epoch 15, Loss inf\n", "Params: tensor([-207982187937383808858324992., -3663562284139497432023040.])\n", "Grad: tensor([-1223646909320076514166573105152., -21554280809479231605114404864.])\n", "Epoch 16, Loss inf\n", "Params: tensor([12028486061878242282806706176., 211879247090395688732721152.])\n", "Grad: tensor([70768650898774906899853796704256., 1246574867509841489693786505216.])\n", "Epoch 17, Loss inf\n", "Params: tensor([-695658002336352961404078129152., -12253869266414541122509471744.])\n", "Grad: tensor([-4092848885264420701125161829531648.,\n", " -72094682944683965973455410561024.])\n", "Epoch 18, Loss inf\n", "Params: tensor([40232831252275689071711740755968., 708692942754892796823053795328.])\n", "Grad: tensor([236706693175123219458221128344928256.,\n", " 4169539270698150009155162575208448.])\n", "Epoch 19, Loss inf\n", "Params: tensor([-2326833954750859692770806240116736.,\n", " -40986700468425897220250066223104.])\n", "Grad: tensor([-13689745465297718916730354409333063680.,\n", " -241141984747874879907129710614675456.])\n", "Epoch 20, Loss inf\n", "Params: tensor([134570618593728262689386270524702720.,\n", " 2370433119736956411345012858552320.])\n", "Grad: tensor([ inf, 1.3946e+37])\n", "Epoch 21, Loss inf\n", "Params: tensor([ -inf,\n", " -137092134093907239497638400288096256.0000])\n", "Grad: tensor([-inf, -inf])\n", "Epoch 22, Loss nan\n", "Params: tensor([nan, inf])\n", "Grad: tensor([nan, nan])\n", "Epoch 23, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 24, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 25, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 26, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 27, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 28, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 29, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 30, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 31, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 32, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 33, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 34, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 35, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 36, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 37, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 38, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 39, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 40, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 41, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 42, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 43, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 44, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 45, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 46, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 47, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 48, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 49, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 50, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 51, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 52, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 53, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 54, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 55, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 56, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 57, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 58, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 59, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 60, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 61, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 62, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 63, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 64, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 65, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 66, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 67, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 68, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 69, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 70, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 71, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 72, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 73, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 74, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 75, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 76, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 77, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 78, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 79, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 80, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 81, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 82, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 83, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 84, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 85, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 86, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 87, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 88, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 89, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 90, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 91, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 92, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 93, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 94, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 95, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 96, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 97, Loss nan\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 98, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n", "Epoch 99, Loss nan\n", "Params: tensor([nan, nan])\n", "Grad: tensor([nan, nan])\n" ] }, { "data": { "text/plain": [ "tensor([nan, nan])" ] }, "execution_count": 78, "metadata": {}, "output_type": "execute_result" } ], "source": [ "params = torch.tensor([1.0, 0.0])\n", "\n", "nepochs = 100\n", "\n", "learning_rate = 1e-2\n", "\n", "for epoch in range(nepochs):\n", " # forward pass\n", " w, b = params\n", " t_p = model(t_u, w, b)\n", "\n", " loss = loss_fn(t_p, t_c)\n", " print('Epoch %d, Loss %f' % (epoch, float(loss)))\n", " \n", " # backward pass\n", " grad = grad_fn(t_u, t_c, t_p, w, b)\n", "\n", " print('Params:', params)\n", " print('Grad:', grad)\n", " \n", " params = params - learning_rate * grad\n", " \n", "params" ] }, { "cell_type": "code", "execution_count": 79, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 0, Loss 1763.884644\n", "Params: tensor([1., 0.])\n", "Grad: tensor([4517.2964, 82.6000])\n", "Epoch 1, Loss 323.090546\n", "Params: tensor([ 0.5483, -0.0083])\n", "Grad: tensor([1859.5493, 35.7843])\n", "Epoch 2, Loss 78.929634\n", "Params: tensor([ 0.3623, -0.0118])\n", "Grad: tensor([765.4666, 16.5122])\n", "Epoch 3, Loss 37.552845\n", "Params: tensor([ 0.2858, -0.0135])\n", "Grad: tensor([315.0790, 8.5787])\n", "Epoch 4, Loss 30.540285\n", "Params: tensor([ 0.2543, -0.0143])\n", "Grad: tensor([129.6733, 5.3127])\n", "Epoch 5, Loss 29.351152\n", "Params: tensor([ 0.2413, -0.0149])\n", "Grad: tensor([53.3496, 3.9682])\n", "Epoch 6, Loss 29.148882\n", "Params: tensor([ 0.2360, -0.0153])\n", "Grad: tensor([21.9304, 3.4148])\n", "Epoch 7, Loss 29.113848\n", "Params: tensor([ 0.2338, -0.0156])\n", "Grad: tensor([8.9964, 3.1869])\n", "Epoch 8, Loss 29.107145\n", "Params: tensor([ 0.2329, -0.0159])\n", "Grad: tensor([3.6721, 3.0930])\n", "Epoch 9, Loss 29.105242\n", "Params: tensor([ 0.2325, -0.0162])\n", "Grad: tensor([1.4803, 3.0544])\n", "Epoch 10, Loss 29.104168\n", "Params: tensor([ 0.2324, -0.0166])\n", "Grad: tensor([0.5780, 3.0384])\n", "Epoch 11, Loss 29.103222\n", "Params: tensor([ 0.2323, -0.0169])\n", "Grad: tensor([0.2066, 3.0318])\n", "Epoch 12, Loss 29.102297\n", "Params: tensor([ 0.2323, -0.0172])\n", "Grad: tensor([0.0537, 3.0291])\n", "Epoch 13, Loss 29.101379\n", "Params: tensor([ 0.2323, -0.0175])\n", "Grad: tensor([-0.0093, 3.0279])\n", "Epoch 14, Loss 29.100470\n", "Params: tensor([ 0.2323, -0.0178])\n", "Grad: tensor([-0.0353, 3.0274])\n", "Epoch 15, Loss 29.099548\n", "Params: tensor([ 0.2323, -0.0181])\n", "Grad: tensor([-0.0459, 3.0272])\n", "Epoch 16, Loss 29.098631\n", "Params: tensor([ 0.2323, -0.0184])\n", "Grad: tensor([-0.0502, 3.0270])\n", "Epoch 17, Loss 29.097715\n", "Params: tensor([ 0.2323, -0.0187])\n", "Grad: tensor([-0.0520, 3.0270])\n", "Epoch 18, Loss 29.096796\n", "Params: tensor([ 0.2323, -0.0190])\n", "Grad: tensor([-0.0528, 3.0269])\n", "Epoch 19, Loss 29.095884\n", "Params: tensor([ 0.2323, -0.0193])\n", "Grad: tensor([-0.0531, 3.0268])\n", "Epoch 20, Loss 29.094959\n", "Params: tensor([ 0.2323, -0.0196])\n", "Grad: tensor([-0.0533, 3.0268])\n", "Epoch 21, Loss 29.094049\n", "Params: tensor([ 0.2323, -0.0199])\n", "Grad: tensor([-0.0533, 3.0267])\n", "Epoch 22, Loss 29.093134\n", "Params: tensor([ 0.2323, -0.0202])\n", "Grad: tensor([-0.0533, 3.0267])\n", "Epoch 23, Loss 29.092213\n", "Params: tensor([ 0.2323, -0.0205])\n", "Grad: tensor([-0.0533, 3.0266])\n", "Epoch 24, Loss 29.091297\n", "Params: tensor([ 0.2323, -0.0208])\n", "Grad: tensor([-0.0533, 3.0266])\n", "Epoch 25, Loss 29.090382\n", "Params: tensor([ 0.2323, -0.0211])\n", "Grad: tensor([-0.0533, 3.0265])\n", "Epoch 26, Loss 29.089460\n", "Params: tensor([ 0.2323, -0.0214])\n", "Grad: tensor([-0.0533, 3.0265])\n", "Epoch 27, Loss 29.088549\n", "Params: tensor([ 0.2323, -0.0217])\n", "Grad: tensor([-0.0532, 3.0264])\n", "Epoch 28, Loss 29.087635\n", "Params: tensor([ 0.2323, -0.0220])\n", "Grad: tensor([-0.0533, 3.0264])\n", "Epoch 29, Loss 29.086718\n", "Params: tensor([ 0.2323, -0.0223])\n", "Grad: tensor([-0.0533, 3.0263])\n", "Epoch 30, Loss 29.085808\n", "Params: tensor([ 0.2323, -0.0226])\n", "Grad: tensor([-0.0532, 3.0262])\n", "Epoch 31, Loss 29.084888\n", "Params: tensor([ 0.2324, -0.0229])\n", "Grad: tensor([-0.0533, 3.0262])\n", "Epoch 32, Loss 29.083965\n", "Params: tensor([ 0.2324, -0.0232])\n", "Grad: tensor([-0.0533, 3.0261])\n", "Epoch 33, Loss 29.083057\n", "Params: tensor([ 0.2324, -0.0235])\n", "Grad: tensor([-0.0533, 3.0261])\n", "Epoch 34, Loss 29.082142\n", "Params: tensor([ 0.2324, -0.0238])\n", "Grad: tensor([-0.0532, 3.0260])\n", "Epoch 35, Loss 29.081219\n", "Params: tensor([ 0.2324, -0.0241])\n", "Grad: tensor([-0.0533, 3.0260])\n", "Epoch 36, Loss 29.080309\n", "Params: tensor([ 0.2324, -0.0244])\n", "Grad: tensor([-0.0533, 3.0259])\n", "Epoch 37, Loss 29.079393\n", "Params: tensor([ 0.2324, -0.0247])\n", "Grad: tensor([-0.0532, 3.0259])\n", "Epoch 38, Loss 29.078474\n", "Params: tensor([ 0.2324, -0.0250])\n", "Grad: tensor([-0.0533, 3.0258])\n", "Epoch 39, Loss 29.077559\n", "Params: tensor([ 0.2324, -0.0253])\n", "Grad: tensor([-0.0533, 3.0258])\n", "Epoch 40, Loss 29.076653\n", "Params: tensor([ 0.2324, -0.0256])\n", "Grad: tensor([-0.0533, 3.0257])\n", "Epoch 41, Loss 29.075731\n", "Params: tensor([ 0.2324, -0.0259])\n", "Grad: tensor([-0.0532, 3.0257])\n", "Epoch 42, Loss 29.074812\n", "Params: tensor([ 0.2324, -0.0262])\n", "Grad: tensor([-0.0533, 3.0256])\n", "Epoch 43, Loss 29.073896\n", "Params: tensor([ 0.2324, -0.0265])\n", "Grad: tensor([-0.0533, 3.0256])\n", "Epoch 44, Loss 29.072985\n", "Params: tensor([ 0.2324, -0.0268])\n", "Grad: tensor([-0.0533, 3.0255])\n", "Epoch 45, Loss 29.072069\n", "Params: tensor([ 0.2324, -0.0271])\n", "Grad: tensor([-0.0533, 3.0254])\n", "Epoch 46, Loss 29.071148\n", "Params: tensor([ 0.2324, -0.0274])\n", "Grad: tensor([-0.0533, 3.0254])\n", "Epoch 47, Loss 29.070234\n", "Params: tensor([ 0.2324, -0.0277])\n", "Grad: tensor([-0.0533, 3.0253])\n", "Epoch 48, Loss 29.069323\n", "Params: tensor([ 0.2324, -0.0281])\n", "Grad: tensor([-0.0533, 3.0253])\n", "Epoch 49, Loss 29.068401\n", "Params: tensor([ 0.2325, -0.0284])\n", "Grad: tensor([-0.0532, 3.0252])\n", "Epoch 50, Loss 29.067486\n", "Params: tensor([ 0.2325, -0.0287])\n", "Grad: tensor([-0.0533, 3.0252])\n", "Epoch 51, Loss 29.066570\n", "Params: tensor([ 0.2325, -0.0290])\n", "Grad: tensor([-0.0533, 3.0251])\n", "Epoch 52, Loss 29.065655\n", "Params: tensor([ 0.2325, -0.0293])\n", "Grad: tensor([-0.0533, 3.0251])\n", "Epoch 53, Loss 29.064739\n", "Params: tensor([ 0.2325, -0.0296])\n", "Grad: tensor([-0.0533, 3.0250])\n", "Epoch 54, Loss 29.063829\n", "Params: tensor([ 0.2325, -0.0299])\n", "Grad: tensor([-0.0532, 3.0250])\n", "Epoch 55, Loss 29.062910\n", "Params: tensor([ 0.2325, -0.0302])\n", "Grad: tensor([-0.0533, 3.0249])\n", "Epoch 56, Loss 29.061989\n", "Params: tensor([ 0.2325, -0.0305])\n", "Grad: tensor([-0.0532, 3.0249])\n", "Epoch 57, Loss 29.061079\n", "Params: tensor([ 0.2325, -0.0308])\n", "Grad: tensor([-0.0533, 3.0248])\n", "Epoch 58, Loss 29.060169\n", "Params: tensor([ 0.2325, -0.0311])\n", "Grad: tensor([-0.0533, 3.0248])\n", "Epoch 59, Loss 29.059252\n", "Params: tensor([ 0.2325, -0.0314])\n", "Grad: tensor([-0.0533, 3.0247])\n", "Epoch 60, Loss 29.058331\n", "Params: tensor([ 0.2325, -0.0317])\n", "Grad: tensor([-0.0532, 3.0247])\n", "Epoch 61, Loss 29.057417\n", "Params: tensor([ 0.2325, -0.0320])\n", "Grad: tensor([-0.0533, 3.0246])\n", "Epoch 62, Loss 29.056507\n", "Params: tensor([ 0.2325, -0.0323])\n", "Grad: tensor([-0.0533, 3.0245])\n", "Epoch 63, Loss 29.055586\n", "Params: tensor([ 0.2325, -0.0326])\n", "Grad: tensor([-0.0532, 3.0245])\n", "Epoch 64, Loss 29.054670\n", "Params: tensor([ 0.2325, -0.0329])\n", "Grad: tensor([-0.0533, 3.0244])\n", "Epoch 65, Loss 29.053761\n", "Params: tensor([ 0.2325, -0.0332])\n", "Grad: tensor([-0.0533, 3.0244])\n", "Epoch 66, Loss 29.052843\n", "Params: tensor([ 0.2325, -0.0335])\n", "Grad: tensor([-0.0533, 3.0243])\n", "Epoch 67, Loss 29.051929\n", "Params: tensor([ 0.2325, -0.0338])\n", "Grad: tensor([-0.0533, 3.0243])\n", "Epoch 68, Loss 29.051014\n", "Params: tensor([ 0.2326, -0.0341])\n", "Grad: tensor([-0.0533, 3.0242])\n", "Epoch 69, Loss 29.050098\n", "Params: tensor([ 0.2326, -0.0344])\n", "Grad: tensor([-0.0532, 3.0242])\n", "Epoch 70, Loss 29.049183\n", "Params: tensor([ 0.2326, -0.0347])\n", "Grad: tensor([-0.0533, 3.0241])\n", "Epoch 71, Loss 29.048271\n", "Params: tensor([ 0.2326, -0.0350])\n", "Grad: tensor([-0.0533, 3.0241])\n", "Epoch 72, Loss 29.047346\n", "Params: tensor([ 0.2326, -0.0353])\n", "Grad: tensor([-0.0532, 3.0240])\n", "Epoch 73, Loss 29.046442\n", "Params: tensor([ 0.2326, -0.0356])\n", "Grad: tensor([-0.0533, 3.0240])\n", "Epoch 74, Loss 29.045530\n", "Params: tensor([ 0.2326, -0.0359])\n", "Grad: tensor([-0.0533, 3.0239])\n", "Epoch 75, Loss 29.044611\n", "Params: tensor([ 0.2326, -0.0362])\n", "Grad: tensor([-0.0533, 3.0239])\n", "Epoch 76, Loss 29.043699\n", "Params: tensor([ 0.2326, -0.0365])\n", "Grad: tensor([-0.0533, 3.0238])\n", "Epoch 77, Loss 29.042780\n", "Params: tensor([ 0.2326, -0.0368])\n", "Grad: tensor([-0.0533, 3.0238])\n", "Epoch 78, Loss 29.041870\n", "Params: tensor([ 0.2326, -0.0371])\n", "Grad: tensor([-0.0533, 3.0237])\n", "Epoch 79, Loss 29.040955\n", "Params: tensor([ 0.2326, -0.0374])\n", "Grad: tensor([-0.0532, 3.0236])\n", "Epoch 80, Loss 29.040039\n", "Params: tensor([ 0.2326, -0.0377])\n", "Grad: tensor([-0.0534, 3.0236])\n", "Epoch 81, Loss 29.039122\n", "Params: tensor([ 0.2326, -0.0380])\n", "Grad: tensor([-0.0533, 3.0235])\n", "Epoch 82, Loss 29.038214\n", "Params: tensor([ 0.2326, -0.0383])\n", "Grad: tensor([-0.0532, 3.0235])\n", "Epoch 83, Loss 29.037292\n", "Params: tensor([ 0.2326, -0.0386])\n", "Grad: tensor([-0.0533, 3.0234])\n", "Epoch 84, Loss 29.036379\n", "Params: tensor([ 0.2326, -0.0389])\n", "Grad: tensor([-0.0532, 3.0234])\n", "Epoch 85, Loss 29.035463\n", "Params: tensor([ 0.2326, -0.0392])\n", "Grad: tensor([-0.0532, 3.0233])\n", "Epoch 86, Loss 29.034557\n", "Params: tensor([ 0.2326, -0.0395])\n", "Grad: tensor([-0.0533, 3.0233])\n", "Epoch 87, Loss 29.033638\n", "Params: tensor([ 0.2327, -0.0398])\n", "Grad: tensor([-0.0532, 3.0232])\n", "Epoch 88, Loss 29.032721\n", "Params: tensor([ 0.2327, -0.0401])\n", "Grad: tensor([-0.0533, 3.0232])\n", "Epoch 89, Loss 29.031805\n", "Params: tensor([ 0.2327, -0.0405])\n", "Grad: tensor([-0.0533, 3.0231])\n", "Epoch 90, Loss 29.030895\n", "Params: tensor([ 0.2327, -0.0408])\n", "Grad: tensor([-0.0532, 3.0231])\n", "Epoch 91, Loss 29.029976\n", "Params: tensor([ 0.2327, -0.0411])\n", "Grad: tensor([-0.0532, 3.0230])\n", "Epoch 92, Loss 29.029066\n", "Params: tensor([ 0.2327, -0.0414])\n", "Grad: tensor([-0.0533, 3.0230])\n", "Epoch 93, Loss 29.028151\n", "Params: tensor([ 0.2327, -0.0417])\n", "Grad: tensor([-0.0532, 3.0229])\n", "Epoch 94, Loss 29.027235\n", "Params: tensor([ 0.2327, -0.0420])\n", "Grad: tensor([-0.0532, 3.0229])\n", "Epoch 95, Loss 29.026323\n", "Params: tensor([ 0.2327, -0.0423])\n", "Grad: tensor([-0.0533, 3.0228])\n", "Epoch 96, Loss 29.025410\n", "Params: tensor([ 0.2327, -0.0426])\n", "Grad: tensor([-0.0533, 3.0227])\n", "Epoch 97, Loss 29.024494\n", "Params: tensor([ 0.2327, -0.0429])\n", "Grad: tensor([-0.0533, 3.0227])\n", "Epoch 98, Loss 29.023582\n", "Params: tensor([ 0.2327, -0.0432])\n", "Grad: tensor([-0.0533, 3.0226])\n", "Epoch 99, Loss 29.022669\n", "Params: tensor([ 0.2327, -0.0435])\n", "Grad: tensor([-0.0532, 3.0226])\n" ] }, { "data": { "text/plain": [ "tensor([ 0.2327, -0.0438])" ] }, "execution_count": 79, "metadata": {}, "output_type": "execute_result" } ], "source": [ "params = torch.tensor([1.0, 0.0])\n", "\n", "nepochs = 100\n", "\n", "learning_rate = 1e-4\n", "\n", "for epoch in range(nepochs):\n", " # forward pass\n", " w, b = params\n", " t_p = model(t_u, w, b)\n", "\n", " loss = loss_fn(t_p, t_c)\n", " print('Epoch %d, Loss %f' % (epoch, float(loss)))\n", " \n", " # backward pass\n", " grad = grad_fn(t_u, t_c, t_p, w, b)\n", "\n", " print('Params:', params)\n", " print('Grad:', grad)\n", " \n", " params = params - learning_rate * grad\n", " \n", "params" ] }, { "cell_type": "code", "execution_count": 62, "metadata": {}, "outputs": [], "source": [ "t_un = 0.1 * t_u" ] }, { "cell_type": "code", "execution_count": 83, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 0, Loss 80.364342\n", "Params: tensor([1., 0.])\n", "Grad: tensor([-77.6140, -10.6400])\n", "Epoch 1, Loss 37.574917\n", "Params: tensor([1.7761, 0.1064])\n", "Grad: tensor([-30.8623, -2.3864])\n", "Epoch 2, Loss 30.871077\n", "Params: tensor([2.0848, 0.1303])\n", "Grad: tensor([-12.4631, 0.8587])\n", "Epoch 3, Loss 29.756193\n", "Params: tensor([2.2094, 0.1217])\n", "Grad: tensor([-5.2218, 2.1327])\n", "Epoch 4, Loss 29.507149\n", "Params: tensor([2.2616, 0.1004])\n", "Grad: tensor([-2.3715, 2.6310])\n", "Epoch 5, Loss 29.392458\n", "Params: tensor([2.2853, 0.0740])\n", "Grad: tensor([-1.2492, 2.8241])\n", "Epoch 6, Loss 29.298828\n", "Params: tensor([2.2978, 0.0458])\n", "Grad: tensor([-0.8071, 2.8970])\n", "Epoch 7, Loss 29.208717\n", "Params: tensor([2.3059, 0.0168])\n", "Grad: tensor([-0.6325, 2.9227])\n", "Epoch 8, Loss 29.119417\n", "Params: tensor([ 2.3122, -0.0124])\n", "Grad: tensor([-0.5633, 2.9298])\n", "Epoch 9, Loss 29.030487\n", "Params: tensor([ 2.3178, -0.0417])\n", "Grad: tensor([-0.5355, 2.9295])\n", "Epoch 10, Loss 28.941875\n", "Params: tensor([ 2.3232, -0.0710])\n", "Grad: tensor([-0.5240, 2.9264])\n", "Epoch 11, Loss 28.853565\n", "Params: tensor([ 2.3284, -0.1003])\n", "Grad: tensor([-0.5190, 2.9222])\n", "Epoch 12, Loss 28.765556\n", "Params: tensor([ 2.3336, -0.1295])\n", "Grad: tensor([-0.5165, 2.9175])\n", "Epoch 13, Loss 28.677851\n", "Params: tensor([ 2.3388, -0.1587])\n", "Grad: tensor([-0.5150, 2.9126])\n", "Epoch 14, Loss 28.590431\n", "Params: tensor([ 2.3439, -0.1878])\n", "Grad: tensor([-0.5138, 2.9077])\n", "Epoch 15, Loss 28.503321\n", "Params: tensor([ 2.3491, -0.2169])\n", "Grad: tensor([-0.5129, 2.9028])\n", "Epoch 16, Loss 28.416496\n", "Params: tensor([ 2.3542, -0.2459])\n", "Grad: tensor([-0.5120, 2.8979])\n", "Epoch 17, Loss 28.329975\n", "Params: tensor([ 2.3593, -0.2749])\n", "Grad: tensor([-0.5111, 2.8930])\n", "Epoch 18, Loss 28.243738\n", "Params: tensor([ 2.3644, -0.3038])\n", "Grad: tensor([-0.5102, 2.8881])\n", "Epoch 19, Loss 28.157801\n", "Params: tensor([ 2.3695, -0.3327])\n", "Grad: tensor([-0.5093, 2.8832])\n", "Epoch 20, Loss 28.072151\n", "Params: tensor([ 2.3746, -0.3615])\n", "Grad: tensor([-0.5084, 2.8783])\n", "Epoch 21, Loss 27.986799\n", "Params: tensor([ 2.3797, -0.3903])\n", "Grad: tensor([-0.5076, 2.8734])\n", "Epoch 22, Loss 27.901731\n", "Params: tensor([ 2.3848, -0.4190])\n", "Grad: tensor([-0.5067, 2.8685])\n", "Epoch 23, Loss 27.816956\n", "Params: tensor([ 2.3899, -0.4477])\n", "Grad: tensor([-0.5059, 2.8636])\n", "Epoch 24, Loss 27.732466\n", "Params: tensor([ 2.3949, -0.4763])\n", "Grad: tensor([-0.5050, 2.8588])\n", "Epoch 25, Loss 27.648256\n", "Params: tensor([ 2.4000, -0.5049])\n", "Grad: tensor([-0.5042, 2.8539])\n", "Epoch 26, Loss 27.564342\n", "Params: tensor([ 2.4050, -0.5335])\n", "Grad: tensor([-0.5033, 2.8490])\n", "Epoch 27, Loss 27.480711\n", "Params: tensor([ 2.4101, -0.5620])\n", "Grad: tensor([-0.5024, 2.8442])\n", "Epoch 28, Loss 27.397358\n", "Params: tensor([ 2.4151, -0.5904])\n", "Grad: tensor([-0.5016, 2.8394])\n", "Epoch 29, Loss 27.314295\n", "Params: tensor([ 2.4201, -0.6188])\n", "Grad: tensor([-0.5007, 2.8346])\n", "Epoch 30, Loss 27.231512\n", "Params: tensor([ 2.4251, -0.6471])\n", "Grad: tensor([-0.4999, 2.8297])\n", "Epoch 31, Loss 27.149006\n", "Params: tensor([ 2.4301, -0.6754])\n", "Grad: tensor([-0.4990, 2.8249])\n", "Epoch 32, Loss 27.066790\n", "Params: tensor([ 2.4351, -0.7037])\n", "Grad: tensor([-0.4982, 2.8201])\n", "Epoch 33, Loss 26.984844\n", "Params: tensor([ 2.4401, -0.7319])\n", "Grad: tensor([-0.4973, 2.8153])\n", "Epoch 34, Loss 26.903173\n", "Params: tensor([ 2.4450, -0.7600])\n", "Grad: tensor([-0.4965, 2.8106])\n", "Epoch 35, Loss 26.821791\n", "Params: tensor([ 2.4500, -0.7881])\n", "Grad: tensor([-0.4957, 2.8058])\n", "Epoch 36, Loss 26.740675\n", "Params: tensor([ 2.4550, -0.8162])\n", "Grad: tensor([-0.4948, 2.8010])\n", "Epoch 37, Loss 26.659838\n", "Params: tensor([ 2.4599, -0.8442])\n", "Grad: tensor([-0.4940, 2.7963])\n", "Epoch 38, Loss 26.579279\n", "Params: tensor([ 2.4649, -0.8722])\n", "Grad: tensor([-0.4931, 2.7915])\n", "Epoch 39, Loss 26.498987\n", "Params: tensor([ 2.4698, -0.9001])\n", "Grad: tensor([-0.4923, 2.7868])\n", "Epoch 40, Loss 26.418974\n", "Params: tensor([ 2.4747, -0.9280])\n", "Grad: tensor([-0.4915, 2.7820])\n", "Epoch 41, Loss 26.339228\n", "Params: tensor([ 2.4796, -0.9558])\n", "Grad: tensor([-0.4906, 2.7773])\n", "Epoch 42, Loss 26.259752\n", "Params: tensor([ 2.4845, -0.9836])\n", "Grad: tensor([-0.4898, 2.7726])\n", "Epoch 43, Loss 26.180548\n", "Params: tensor([ 2.4894, -1.0113])\n", "Grad: tensor([-0.4890, 2.7679])\n", "Epoch 44, Loss 26.101616\n", "Params: tensor([ 2.4943, -1.0390])\n", "Grad: tensor([-0.4881, 2.7632])\n", "Epoch 45, Loss 26.022949\n", "Params: tensor([ 2.4992, -1.0666])\n", "Grad: tensor([-0.4873, 2.7585])\n", "Epoch 46, Loss 25.944542\n", "Params: tensor([ 2.5041, -1.0942])\n", "Grad: tensor([-0.4865, 2.7538])\n", "Epoch 47, Loss 25.866417\n", "Params: tensor([ 2.5089, -1.1217])\n", "Grad: tensor([-0.4856, 2.7491])\n", "Epoch 48, Loss 25.788546\n", "Params: tensor([ 2.5138, -1.1492])\n", "Grad: tensor([-0.4848, 2.7444])\n", "Epoch 49, Loss 25.710936\n", "Params: tensor([ 2.5186, -1.1766])\n", "Grad: tensor([-0.4840, 2.7398])\n", "Epoch 50, Loss 25.633600\n", "Params: tensor([ 2.5235, -1.2040])\n", "Grad: tensor([-0.4832, 2.7351])\n", "Epoch 51, Loss 25.556524\n", "Params: tensor([ 2.5283, -1.2314])\n", "Grad: tensor([-0.4823, 2.7305])\n", "Epoch 52, Loss 25.479700\n", "Params: tensor([ 2.5331, -1.2587])\n", "Grad: tensor([-0.4815, 2.7258])\n", "Epoch 53, Loss 25.403149\n", "Params: tensor([ 2.5379, -1.2860])\n", "Grad: tensor([-0.4807, 2.7212])\n", "Epoch 54, Loss 25.326851\n", "Params: tensor([ 2.5428, -1.3132])\n", "Grad: tensor([-0.4799, 2.7166])\n", "Epoch 55, Loss 25.250811\n", "Params: tensor([ 2.5476, -1.3403])\n", "Grad: tensor([-0.4791, 2.7120])\n", "Epoch 56, Loss 25.175035\n", "Params: tensor([ 2.5523, -1.3675])\n", "Grad: tensor([-0.4783, 2.7074])\n", "Epoch 57, Loss 25.099510\n", "Params: tensor([ 2.5571, -1.3945])\n", "Grad: tensor([-0.4775, 2.7028])\n", "Epoch 58, Loss 25.024248\n", "Params: tensor([ 2.5619, -1.4216])\n", "Grad: tensor([-0.4766, 2.6982])\n", "Epoch 59, Loss 24.949238\n", "Params: tensor([ 2.5667, -1.4485])\n", "Grad: tensor([-0.4758, 2.6936])\n", "Epoch 60, Loss 24.874483\n", "Params: tensor([ 2.5714, -1.4755])\n", "Grad: tensor([-0.4750, 2.6890])\n", "Epoch 61, Loss 24.799980\n", "Params: tensor([ 2.5762, -1.5024])\n", "Grad: tensor([-0.4742, 2.6845])\n", "Epoch 62, Loss 24.725737\n", "Params: tensor([ 2.5809, -1.5292])\n", "Grad: tensor([-0.4734, 2.6799])\n", "Epoch 63, Loss 24.651735\n", "Params: tensor([ 2.5857, -1.5560])\n", "Grad: tensor([-0.4726, 2.6753])\n", "Epoch 64, Loss 24.577986\n", "Params: tensor([ 2.5904, -1.5828])\n", "Grad: tensor([-0.4718, 2.6708])\n", "Epoch 65, Loss 24.504494\n", "Params: tensor([ 2.5951, -1.6095])\n", "Grad: tensor([-0.4710, 2.6663])\n", "Epoch 66, Loss 24.431250\n", "Params: tensor([ 2.5998, -1.6361])\n", "Grad: tensor([-0.4702, 2.6617])\n", "Epoch 67, Loss 24.358257\n", "Params: tensor([ 2.6045, -1.6628])\n", "Grad: tensor([-0.4694, 2.6572])\n", "Epoch 68, Loss 24.285505\n", "Params: tensor([ 2.6092, -1.6893])\n", "Grad: tensor([-0.4686, 2.6527])\n", "Epoch 69, Loss 24.212996\n", "Params: tensor([ 2.6139, -1.7159])\n", "Grad: tensor([-0.4678, 2.6482])\n", "Epoch 70, Loss 24.140741\n", "Params: tensor([ 2.6186, -1.7423])\n", "Grad: tensor([-0.4670, 2.6437])\n", "Epoch 71, Loss 24.068733\n", "Params: tensor([ 2.6232, -1.7688])\n", "Grad: tensor([-0.4662, 2.6392])\n", "Epoch 72, Loss 23.996967\n", "Params: tensor([ 2.6279, -1.7952])\n", "Grad: tensor([-0.4654, 2.6347])\n", "Epoch 73, Loss 23.925446\n", "Params: tensor([ 2.6326, -1.8215])\n", "Grad: tensor([-0.4646, 2.6302])\n", "Epoch 74, Loss 23.854168\n", "Params: tensor([ 2.6372, -1.8478])\n", "Grad: tensor([-0.4638, 2.6258])\n", "Epoch 75, Loss 23.783125\n", "Params: tensor([ 2.6418, -1.8741])\n", "Grad: tensor([-0.4631, 2.6213])\n", "Epoch 76, Loss 23.712328\n", "Params: tensor([ 2.6465, -1.9003])\n", "Grad: tensor([-0.4623, 2.6169])\n", "Epoch 77, Loss 23.641771\n", "Params: tensor([ 2.6511, -1.9265])\n", "Grad: tensor([-0.4615, 2.6124])\n", "Epoch 78, Loss 23.571455\n", "Params: tensor([ 2.6557, -1.9526])\n", "Grad: tensor([-0.4607, 2.6080])\n", "Epoch 79, Loss 23.501379\n", "Params: tensor([ 2.6603, -1.9787])\n", "Grad: tensor([-0.4599, 2.6035])\n", "Epoch 80, Loss 23.431538\n", "Params: tensor([ 2.6649, -2.0047])\n", "Grad: tensor([-0.4591, 2.5991])\n", "Epoch 81, Loss 23.361938\n", "Params: tensor([ 2.6695, -2.0307])\n", "Grad: tensor([-0.4584, 2.5947])\n", "Epoch 82, Loss 23.292570\n", "Params: tensor([ 2.6741, -2.0566])\n", "Grad: tensor([-0.4576, 2.5903])\n", "Epoch 83, Loss 23.223436\n", "Params: tensor([ 2.6787, -2.0825])\n", "Grad: tensor([-0.4568, 2.5859])\n", "Epoch 84, Loss 23.154539\n", "Params: tensor([ 2.6832, -2.1084])\n", "Grad: tensor([-0.4560, 2.5815])\n", "Epoch 85, Loss 23.085882\n", "Params: tensor([ 2.6878, -2.1342])\n", "Grad: tensor([-0.4553, 2.5771])\n", "Epoch 86, Loss 23.017447\n", "Params: tensor([ 2.6923, -2.1600])\n", "Grad: tensor([-0.4545, 2.5727])\n", "Epoch 87, Loss 22.949251\n", "Params: tensor([ 2.6969, -2.1857])\n", "Grad: tensor([-0.4537, 2.5684])\n", "Epoch 88, Loss 22.881283\n", "Params: tensor([ 2.7014, -2.2114])\n", "Grad: tensor([-0.4529, 2.5640])\n", "Epoch 89, Loss 22.813547\n", "Params: tensor([ 2.7060, -2.2370])\n", "Grad: tensor([-0.4522, 2.5597])\n", "Epoch 90, Loss 22.746044\n", "Params: tensor([ 2.7105, -2.2626])\n", "Grad: tensor([-0.4514, 2.5553])\n", "Epoch 91, Loss 22.678770\n", "Params: tensor([ 2.7150, -2.2882])\n", "Grad: tensor([-0.4506, 2.5510])\n", "Epoch 92, Loss 22.611717\n", "Params: tensor([ 2.7195, -2.3137])\n", "Grad: tensor([-0.4499, 2.5466])\n", "Epoch 93, Loss 22.544899\n", "Params: tensor([ 2.7240, -2.3392])\n", "Grad: tensor([-0.4491, 2.5423])\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 94, Loss 22.478304\n", "Params: tensor([ 2.7285, -2.3646])\n", "Grad: tensor([-0.4483, 2.5380])\n", "Epoch 95, Loss 22.411938\n", "Params: tensor([ 2.7330, -2.3900])\n", "Grad: tensor([-0.4476, 2.5337])\n", "Epoch 96, Loss 22.345795\n", "Params: tensor([ 2.7374, -2.4153])\n", "Grad: tensor([-0.4468, 2.5294])\n", "Epoch 97, Loss 22.279875\n", "Params: tensor([ 2.7419, -2.4406])\n", "Grad: tensor([-0.4461, 2.5251])\n", "Epoch 98, Loss 22.214186\n", "Params: tensor([ 2.7464, -2.4658])\n", "Grad: tensor([-0.4453, 2.5208])\n", "Epoch 99, Loss 22.148710\n", "Params: tensor([ 2.7508, -2.4910])\n", "Grad: tensor([-0.4445, 2.5165])\n" ] }, { "data": { "text/plain": [ "tensor([ 2.7553, -2.5162])" ] }, "execution_count": 83, "metadata": {}, "output_type": "execute_result" } ], "source": [ "params = torch.tensor([1.0, 0.0])\n", "\n", "nepochs = 100\n", "\n", "learning_rate = 1e-2\n", "\n", "for epoch in range(nepochs):\n", " # forward pass\n", " w, b = params\n", " t_p = model(t_un, w, b)\n", "\n", " loss = loss_fn(t_p, t_c)\n", " print('Epoch %d, Loss %f' % (epoch, float(loss)))\n", " \n", " # backward pass\n", " grad = grad_fn(t_un, t_c, t_p, w, b)\n", "\n", " print('Params:', params)\n", " print('Grad:', grad)\n", " \n", " params = params - learning_rate * grad\n", " \n", "params" ] }, { "cell_type": "code", "execution_count": 84, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 0, Loss 80.364342\n", "Epoch 1, Loss 37.574917\n", "Epoch 2, Loss 30.871077\n", "Epoch 3, Loss 29.756193\n", "Epoch 4, Loss 29.507149\n", "Epoch 5, Loss 29.392458\n", "Epoch 6, Loss 29.298828\n", "Epoch 7, Loss 29.208717\n", "Epoch 8, Loss 29.119417\n", "Epoch 9, Loss 29.030487\n", "Epoch 10, Loss 28.941875\n", "Epoch 11, Loss 28.853565\n", "Epoch 12, Loss 28.765556\n", "Epoch 13, Loss 28.677851\n", "Epoch 14, Loss 28.590431\n", "Epoch 15, Loss 28.503321\n", "Epoch 16, Loss 28.416496\n", "Epoch 17, Loss 28.329975\n", "Epoch 18, Loss 28.243738\n", "Epoch 19, Loss 28.157801\n", "Epoch 20, Loss 28.072151\n", "Epoch 21, Loss 27.986799\n", "Epoch 22, Loss 27.901731\n", "Epoch 23, Loss 27.816956\n", "Epoch 24, Loss 27.732466\n", "Epoch 25, Loss 27.648256\n", "Epoch 26, Loss 27.564342\n", "Epoch 27, Loss 27.480711\n", "Epoch 28, Loss 27.397358\n", "Epoch 29, Loss 27.314295\n", "Epoch 30, Loss 27.231512\n", "Epoch 31, Loss 27.149006\n", "Epoch 32, Loss 27.066790\n", "Epoch 33, Loss 26.984844\n", "Epoch 34, Loss 26.903173\n", "Epoch 35, Loss 26.821791\n", "Epoch 36, Loss 26.740675\n", "Epoch 37, Loss 26.659838\n", "Epoch 38, Loss 26.579279\n", "Epoch 39, Loss 26.498987\n", "Epoch 40, Loss 26.418974\n", "Epoch 41, Loss 26.339228\n", "Epoch 42, Loss 26.259752\n", "Epoch 43, Loss 26.180548\n", "Epoch 44, Loss 26.101616\n", "Epoch 45, Loss 26.022949\n", "Epoch 46, Loss 25.944542\n", "Epoch 47, Loss 25.866417\n", "Epoch 48, Loss 25.788546\n", "Epoch 49, Loss 25.710936\n", "Epoch 50, Loss 25.633600\n", "Epoch 51, Loss 25.556524\n", "Epoch 52, Loss 25.479700\n", "Epoch 53, Loss 25.403149\n", "Epoch 54, Loss 25.326851\n", "Epoch 55, Loss 25.250811\n", "Epoch 56, Loss 25.175035\n", "Epoch 57, Loss 25.099510\n", "Epoch 58, Loss 25.024248\n", "Epoch 59, Loss 24.949238\n", "Epoch 60, Loss 24.874483\n", "Epoch 61, Loss 24.799980\n", "Epoch 62, Loss 24.725737\n", "Epoch 63, Loss 24.651735\n", "Epoch 64, Loss 24.577986\n", "Epoch 65, Loss 24.504494\n", "Epoch 66, Loss 24.431250\n", "Epoch 67, Loss 24.358257\n", "Epoch 68, Loss 24.285505\n", "Epoch 69, Loss 24.212996\n", "Epoch 70, Loss 24.140741\n", "Epoch 71, Loss 24.068733\n", "Epoch 72, Loss 23.996967\n", "Epoch 73, Loss 23.925446\n", "Epoch 74, Loss 23.854168\n", "Epoch 75, Loss 23.783125\n", "Epoch 76, Loss 23.712328\n", "Epoch 77, Loss 23.641771\n", "Epoch 78, Loss 23.571455\n", "Epoch 79, Loss 23.501379\n", "Epoch 80, Loss 23.431538\n", "Epoch 81, Loss 23.361938\n", "Epoch 82, Loss 23.292570\n", "Epoch 83, Loss 23.223436\n", "Epoch 84, Loss 23.154539\n", "Epoch 85, Loss 23.085882\n", "Epoch 86, Loss 23.017447\n", "Epoch 87, Loss 22.949251\n", "Epoch 88, Loss 22.881283\n", "Epoch 89, Loss 22.813547\n", "Epoch 90, Loss 22.746044\n", "Epoch 91, Loss 22.678770\n", "Epoch 92, Loss 22.611717\n", "Epoch 93, Loss 22.544899\n", "Epoch 94, Loss 22.478304\n", "Epoch 95, Loss 22.411938\n", "Epoch 96, Loss 22.345795\n", "Epoch 97, Loss 22.279875\n", "Epoch 98, Loss 22.214186\n", "Epoch 99, Loss 22.148710\n", "Epoch 100, Loss 22.083464\n", "Epoch 101, Loss 22.018436\n", "Epoch 102, Loss 21.953630\n", "Epoch 103, Loss 21.889046\n", "Epoch 104, Loss 21.824677\n", "Epoch 105, Loss 21.760530\n", "Epoch 106, Loss 21.696600\n", "Epoch 107, Loss 21.632881\n", "Epoch 108, Loss 21.569389\n", "Epoch 109, Loss 21.506104\n", "Epoch 110, Loss 21.443037\n", "Epoch 111, Loss 21.380190\n", "Epoch 112, Loss 21.317547\n", "Epoch 113, Loss 21.255119\n", "Epoch 114, Loss 21.192904\n", "Epoch 115, Loss 21.130901\n", "Epoch 116, Loss 21.069105\n", "Epoch 117, Loss 21.007528\n", "Epoch 118, Loss 20.946152\n", "Epoch 119, Loss 20.884983\n", "Epoch 120, Loss 20.824028\n", "Epoch 121, Loss 20.763273\n", "Epoch 122, Loss 20.702726\n", "Epoch 123, Loss 20.642384\n", "Epoch 124, Loss 20.582251\n", "Epoch 125, Loss 20.522322\n", "Epoch 126, Loss 20.462593\n", "Epoch 127, Loss 20.403069\n", "Epoch 128, Loss 20.343746\n", "Epoch 129, Loss 20.284622\n", "Epoch 130, Loss 20.225704\n", "Epoch 131, Loss 20.166983\n", "Epoch 132, Loss 20.108461\n", "Epoch 133, Loss 20.050135\n", "Epoch 134, Loss 19.992012\n", "Epoch 135, Loss 19.934088\n", "Epoch 136, Loss 19.876352\n", "Epoch 137, Loss 19.818825\n", "Epoch 138, Loss 19.761480\n", "Epoch 139, Loss 19.704338\n", "Epoch 140, Loss 19.647383\n", "Epoch 141, Loss 19.590630\n", "Epoch 142, Loss 19.534063\n", "Epoch 143, Loss 19.477690\n", "Epoch 144, Loss 19.421507\n", "Epoch 145, Loss 19.365517\n", "Epoch 146, Loss 19.309715\n", "Epoch 147, Loss 19.254107\n", "Epoch 148, Loss 19.198685\n", "Epoch 149, Loss 19.143446\n", "Epoch 150, Loss 19.088400\n", "Epoch 151, Loss 19.033545\n", "Epoch 152, Loss 18.978868\n", "Epoch 153, Loss 18.924377\n", "Epoch 154, Loss 18.870081\n", "Epoch 155, Loss 18.815960\n", "Epoch 156, Loss 18.762022\n", "Epoch 157, Loss 18.708269\n", "Epoch 158, Loss 18.654703\n", "Epoch 159, Loss 18.601313\n", "Epoch 160, Loss 18.548111\n", "Epoch 161, Loss 18.495083\n", "Epoch 162, Loss 18.442238\n", "Epoch 163, Loss 18.389570\n", "Epoch 164, Loss 18.337080\n", "Epoch 165, Loss 18.284777\n", "Epoch 166, Loss 18.232643\n", "Epoch 167, Loss 18.180687\n", "Epoch 168, Loss 18.128904\n", "Epoch 169, Loss 18.077299\n", "Epoch 170, Loss 18.025879\n", "Epoch 171, Loss 17.974623\n", "Epoch 172, Loss 17.923546\n", "Epoch 173, Loss 17.872641\n", "Epoch 174, Loss 17.821907\n", "Epoch 175, Loss 17.771343\n", "Epoch 176, Loss 17.720955\n", "Epoch 177, Loss 17.670738\n", "Epoch 178, Loss 17.620691\n", "Epoch 179, Loss 17.570814\n", "Epoch 180, Loss 17.521105\n", "Epoch 181, Loss 17.471563\n", "Epoch 182, Loss 17.422194\n", "Epoch 183, Loss 17.372992\n", "Epoch 184, Loss 17.323954\n", "Epoch 185, Loss 17.275085\n", "Epoch 186, Loss 17.226379\n", "Epoch 187, Loss 17.177839\n", "Epoch 188, Loss 17.129467\n", "Epoch 189, Loss 17.081255\n", "Epoch 190, Loss 17.033207\n", "Epoch 191, Loss 16.985327\n", "Epoch 192, Loss 16.937605\n", "Epoch 193, Loss 16.890047\n", "Epoch 194, Loss 16.842649\n", "Epoch 195, Loss 16.795412\n", "Epoch 196, Loss 16.748339\n", "Epoch 197, Loss 16.701424\n", "Epoch 198, Loss 16.654661\n", "Epoch 199, Loss 16.608065\n", "Epoch 200, Loss 16.561625\n", "Epoch 201, Loss 16.515343\n", "Epoch 202, Loss 16.469219\n", "Epoch 203, Loss 16.423250\n", "Epoch 204, Loss 16.377434\n", "Epoch 205, Loss 16.331776\n", "Epoch 206, Loss 16.286276\n", "Epoch 207, Loss 16.240925\n", "Epoch 208, Loss 16.195734\n", "Epoch 209, Loss 16.150694\n", "Epoch 210, Loss 16.105806\n", "Epoch 211, Loss 16.061071\n", "Epoch 212, Loss 16.016487\n", "Epoch 213, Loss 15.972058\n", "Epoch 214, Loss 15.927777\n", "Epoch 215, Loss 15.883645\n", "Epoch 216, Loss 15.839664\n", "Epoch 217, Loss 15.795832\n", "Epoch 218, Loss 15.752152\n", "Epoch 219, Loss 15.708612\n", "Epoch 220, Loss 15.665228\n", "Epoch 221, Loss 15.621990\n", "Epoch 222, Loss 15.578897\n", "Epoch 223, Loss 15.535950\n", "Epoch 224, Loss 15.493152\n", "Epoch 225, Loss 15.450497\n", "Epoch 226, Loss 15.407981\n", "Epoch 227, Loss 15.365615\n", "Epoch 228, Loss 15.323395\n", "Epoch 229, Loss 15.281318\n", "Epoch 230, Loss 15.239380\n", "Epoch 231, Loss 15.197586\n", "Epoch 232, Loss 15.155931\n", "Epoch 233, Loss 15.114425\n", "Epoch 234, Loss 15.073053\n", "Epoch 235, Loss 15.031823\n", "Epoch 236, Loss 14.990737\n", "Epoch 237, Loss 14.949784\n", "Epoch 238, Loss 14.908973\n", "Epoch 239, Loss 14.868304\n", "Epoch 240, Loss 14.827767\n", "Epoch 241, Loss 14.787370\n", "Epoch 242, Loss 14.747110\n", "Epoch 243, Loss 14.706989\n", "Epoch 244, Loss 14.667002\n", "Epoch 245, Loss 14.627149\n", "Epoch 246, Loss 14.587436\n", "Epoch 247, Loss 14.547854\n", "Epoch 248, Loss 14.508408\n", "Epoch 249, Loss 14.469095\n", "Epoch 250, Loss 14.429919\n", "Epoch 251, Loss 14.390872\n", "Epoch 252, Loss 14.351956\n", "Epoch 253, Loss 14.313177\n", "Epoch 254, Loss 14.274525\n", "Epoch 255, Loss 14.236008\n", "Epoch 256, Loss 14.197620\n", "Epoch 257, Loss 14.159363\n", "Epoch 258, Loss 14.121234\n", "Epoch 259, Loss 14.083237\n", "Epoch 260, Loss 14.045368\n", "Epoch 261, Loss 14.007627\n", "Epoch 262, Loss 13.970016\n", "Epoch 263, Loss 13.932532\n", "Epoch 264, Loss 13.895172\n", "Epoch 265, Loss 13.857942\n", "Epoch 266, Loss 13.820837\n", "Epoch 267, Loss 13.783858\n", "Epoch 268, Loss 13.747006\n", "Epoch 269, Loss 13.710278\n", "Epoch 270, Loss 13.673676\n", "Epoch 271, Loss 13.637196\n", "Epoch 272, Loss 13.600842\n", "Epoch 273, Loss 13.564609\n", "Epoch 274, Loss 13.528501\n", "Epoch 275, Loss 13.492515\n", "Epoch 276, Loss 13.456651\n", "Epoch 277, Loss 13.420910\n", "Epoch 278, Loss 13.385287\n", "Epoch 279, Loss 13.349787\n", "Epoch 280, Loss 13.314410\n", "Epoch 281, Loss 13.279148\n", "Epoch 282, Loss 13.244009\n", "Epoch 283, Loss 13.208993\n", "Epoch 284, Loss 13.174088\n", "Epoch 285, Loss 13.139307\n", "Epoch 286, Loss 13.104638\n", "Epoch 287, Loss 13.070093\n", "Epoch 288, Loss 13.035663\n", "Epoch 289, Loss 13.001349\n", "Epoch 290, Loss 12.967154\n", "Epoch 291, Loss 12.933074\n", "Epoch 292, Loss 12.899109\n", "Epoch 293, Loss 12.865259\n", "Epoch 294, Loss 12.831525\n", "Epoch 295, Loss 12.797904\n", "Epoch 296, Loss 12.764399\n", "Epoch 297, Loss 12.731007\n", "Epoch 298, Loss 12.697727\n", "Epoch 299, Loss 12.664560\n", "Epoch 300, Loss 12.631507\n", "Epoch 301, Loss 12.598566\n", "Epoch 302, Loss 12.565738\n", "Epoch 303, Loss 12.533021\n", "Epoch 304, Loss 12.500415\n", "Epoch 305, Loss 12.467919\n", "Epoch 306, Loss 12.435533\n", "Epoch 307, Loss 12.403255\n", "Epoch 308, Loss 12.371088\n", "Epoch 309, Loss 12.339031\n", "Epoch 310, Loss 12.307083\n", "Epoch 311, Loss 12.275247\n", "Epoch 312, Loss 12.243509\n", "Epoch 313, Loss 12.211887\n", "Epoch 314, Loss 12.180370\n", "Epoch 315, Loss 12.148962\n", "Epoch 316, Loss 12.117655\n", "Epoch 317, Loss 12.086463\n", "Epoch 318, Loss 12.055373\n", "Epoch 319, Loss 12.024384\n", "Epoch 320, Loss 11.993508\n", "Epoch 321, Loss 11.962732\n", "Epoch 322, Loss 11.932056\n", "Epoch 323, Loss 11.901492\n", "Epoch 324, Loss 11.871029\n", "Epoch 325, Loss 11.840671\n", "Epoch 326, Loss 11.810413\n", "Epoch 327, Loss 11.780257\n", "Epoch 328, Loss 11.750208\n", "Epoch 329, Loss 11.720258\n", "Epoch 330, Loss 11.690412\n", "Epoch 331, Loss 11.660664\n", "Epoch 332, Loss 11.631016\n", "Epoch 333, Loss 11.601473\n", "Epoch 334, Loss 11.572030\n", "Epoch 335, Loss 11.542686\n", "Epoch 336, Loss 11.513440\n", "Epoch 337, Loss 11.484293\n", "Epoch 338, Loss 11.455247\n", "Epoch 339, Loss 11.426300\n", "Epoch 340, Loss 11.397448\n", "Epoch 341, Loss 11.368697\n", "Epoch 342, Loss 11.340043\n", "Epoch 343, Loss 11.311487\n", "Epoch 344, Loss 11.283028\n", "Epoch 345, Loss 11.254662\n", "Epoch 346, Loss 11.226396\n", "Epoch 347, Loss 11.198221\n", "Epoch 348, Loss 11.170149\n", "Epoch 349, Loss 11.142170\n", "Epoch 350, Loss 11.114283\n", "Epoch 351, Loss 11.086493\n", "Epoch 352, Loss 11.058796\n", "Epoch 353, Loss 11.031192\n", "Epoch 354, Loss 11.003686\n", "Epoch 355, Loss 10.976271\n", "Epoch 356, Loss 10.948948\n", "Epoch 357, Loss 10.921718\n", "Epoch 358, Loss 10.894581\n", "Epoch 359, Loss 10.867537\n", "Epoch 360, Loss 10.840583\n", "Epoch 361, Loss 10.813720\n", "Epoch 362, Loss 10.786951\n", "Epoch 363, Loss 10.760270\n", "Epoch 364, Loss 10.733681\n", "Epoch 365, Loss 10.707183\n", "Epoch 366, Loss 10.680775\n", "Epoch 367, Loss 10.654453\n", "Epoch 368, Loss 10.628225\n", "Epoch 369, Loss 10.602084\n", "Epoch 370, Loss 10.576032\n", "Epoch 371, Loss 10.550071\n", "Epoch 372, Loss 10.524195\n", "Epoch 373, Loss 10.498408\n", "Epoch 374, Loss 10.472707\n", "Epoch 375, Loss 10.447094\n", "Epoch 376, Loss 10.421568\n", "Epoch 377, Loss 10.396132\n", "Epoch 378, Loss 10.370778\n", "Epoch 379, Loss 10.345510\n", "Epoch 380, Loss 10.320329\n", "Epoch 381, Loss 10.295236\n", "Epoch 382, Loss 10.270224\n", "Epoch 383, Loss 10.245296\n", "Epoch 384, Loss 10.220456\n", "Epoch 385, Loss 10.195701\n", "Epoch 386, Loss 10.171027\n", "Epoch 387, Loss 10.146436\n", "Epoch 388, Loss 10.121934\n", "Epoch 389, Loss 10.097512\n", "Epoch 390, Loss 10.073174\n", "Epoch 391, Loss 10.048919\n", "Epoch 392, Loss 10.024742\n", "Epoch 393, Loss 10.000652\n", "Epoch 394, Loss 9.976640\n", "Epoch 395, Loss 9.952712\n", "Epoch 396, Loss 9.928863\n", "Epoch 397, Loss 9.905092\n", "Epoch 398, Loss 9.881409\n", "Epoch 399, Loss 9.857802\n", "Epoch 400, Loss 9.834277\n", "Epoch 401, Loss 9.810832\n", "Epoch 402, Loss 9.787466\n", "Epoch 403, Loss 9.764176\n", "Epoch 404, Loss 9.740971\n", "Epoch 405, Loss 9.717843\n", "Epoch 406, Loss 9.694793\n", "Epoch 407, Loss 9.671823\n", "Epoch 408, Loss 9.648926\n", "Epoch 409, Loss 9.626110\n", "Epoch 410, Loss 9.603373\n", "Epoch 411, Loss 9.580710\n", "Epoch 412, Loss 9.558124\n", "Epoch 413, Loss 9.535618\n", "Epoch 414, Loss 9.513185\n", "Epoch 415, Loss 9.490829\n", "Epoch 416, Loss 9.468551\n", "Epoch 417, Loss 9.446347\n", "Epoch 418, Loss 9.424216\n", "Epoch 419, Loss 9.402163\n", "Epoch 420, Loss 9.380185\n", "Epoch 421, Loss 9.358281\n", "Epoch 422, Loss 9.336448\n", "Epoch 423, Loss 9.314696\n", "Epoch 424, Loss 9.293013\n", "Epoch 425, Loss 9.271402\n", "Epoch 426, Loss 9.249870\n", "Epoch 427, Loss 9.228409\n", "Epoch 428, Loss 9.207021\n", "Epoch 429, Loss 9.185704\n", "Epoch 430, Loss 9.164462\n", "Epoch 431, Loss 9.143288\n", "Epoch 432, Loss 9.122189\n", "Epoch 433, Loss 9.101160\n", "Epoch 434, Loss 9.080204\n", "Epoch 435, Loss 9.059317\n", "Epoch 436, Loss 9.038502\n", "Epoch 437, Loss 9.017757\n", "Epoch 438, Loss 8.997085\n", "Epoch 439, Loss 8.976479\n", "Epoch 440, Loss 8.955945\n", "Epoch 441, Loss 8.935481\n", "Epoch 442, Loss 8.915089\n", "Epoch 443, Loss 8.894763\n", "Epoch 444, Loss 8.874508\n", "Epoch 445, Loss 8.854318\n", "Epoch 446, Loss 8.834197\n", "Epoch 447, Loss 8.814149\n", "Epoch 448, Loss 8.794162\n", "Epoch 449, Loss 8.774252\n", "Epoch 450, Loss 8.754406\n", "Epoch 451, Loss 8.734625\n", "Epoch 452, Loss 8.714911\n", "Epoch 453, Loss 8.695266\n", "Epoch 454, Loss 8.675689\n", "Epoch 455, Loss 8.656174\n", "Epoch 456, Loss 8.636728\n", "Epoch 457, Loss 8.617346\n", "Epoch 458, Loss 8.598029\n", "Epoch 459, Loss 8.578781\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 460, Loss 8.559597\n", "Epoch 461, Loss 8.540478\n", "Epoch 462, Loss 8.521426\n", "Epoch 463, Loss 8.502438\n", "Epoch 464, Loss 8.483516\n", "Epoch 465, Loss 8.464652\n", "Epoch 466, Loss 8.445858\n", "Epoch 467, Loss 8.427128\n", "Epoch 468, Loss 8.408456\n", "Epoch 469, Loss 8.389848\n", "Epoch 470, Loss 8.371305\n", "Epoch 471, Loss 8.352828\n", "Epoch 472, Loss 8.334408\n", "Epoch 473, Loss 8.316055\n", "Epoch 474, Loss 8.297764\n", "Epoch 475, Loss 8.279534\n", "Epoch 476, Loss 8.261369\n", "Epoch 477, Loss 8.243261\n", "Epoch 478, Loss 8.225213\n", "Epoch 479, Loss 8.207232\n", "Epoch 480, Loss 8.189310\n", "Epoch 481, Loss 8.171450\n", "Epoch 482, Loss 8.153648\n", "Epoch 483, Loss 8.135907\n", "Epoch 484, Loss 8.118226\n", "Epoch 485, Loss 8.100607\n", "Epoch 486, Loss 8.083045\n", "Epoch 487, Loss 8.065548\n", "Epoch 488, Loss 8.048104\n", "Epoch 489, Loss 8.030723\n", "Epoch 490, Loss 8.013400\n", "Epoch 491, Loss 7.996135\n", "Epoch 492, Loss 7.978929\n", "Epoch 493, Loss 7.961784\n", "Epoch 494, Loss 7.944690\n", "Epoch 495, Loss 7.927662\n", "Epoch 496, Loss 7.910690\n", "Epoch 497, Loss 7.893775\n", "Epoch 498, Loss 7.876915\n", "Epoch 499, Loss 7.860116\n", "Epoch 500, Loss 7.843370\n", "Epoch 501, Loss 7.826681\n", "Epoch 502, Loss 7.810053\n", "Epoch 503, Loss 7.793480\n", "Epoch 504, Loss 7.776962\n", "Epoch 505, Loss 7.760498\n", "Epoch 506, Loss 7.744092\n", "Epoch 507, Loss 7.727745\n", "Epoch 508, Loss 7.711447\n", "Epoch 509, Loss 7.695212\n", "Epoch 510, Loss 7.679024\n", "Epoch 511, Loss 7.662895\n", "Epoch 512, Loss 7.646819\n", "Epoch 513, Loss 7.630803\n", "Epoch 514, Loss 7.614836\n", "Epoch 515, Loss 7.598925\n", "Epoch 516, Loss 7.583069\n", "Epoch 517, Loss 7.567266\n", "Epoch 518, Loss 7.551516\n", "Epoch 519, Loss 7.535819\n", "Epoch 520, Loss 7.520176\n", "Epoch 521, Loss 7.504588\n", "Epoch 522, Loss 7.489048\n", "Epoch 523, Loss 7.473566\n", "Epoch 524, Loss 7.458135\n", "Epoch 525, Loss 7.442751\n", "Epoch 526, Loss 7.427426\n", "Epoch 527, Loss 7.412152\n", "Epoch 528, Loss 7.396928\n", "Epoch 529, Loss 7.381756\n", "Epoch 530, Loss 7.366636\n", "Epoch 531, Loss 7.351566\n", "Epoch 532, Loss 7.336550\n", "Epoch 533, Loss 7.321585\n", "Epoch 534, Loss 7.306670\n", "Epoch 535, Loss 7.291803\n", "Epoch 536, Loss 7.276989\n", "Epoch 537, Loss 7.262227\n", "Epoch 538, Loss 7.247512\n", "Epoch 539, Loss 7.232846\n", "Epoch 540, Loss 7.218231\n", "Epoch 541, Loss 7.203666\n", "Epoch 542, Loss 7.189151\n", "Epoch 543, Loss 7.174683\n", "Epoch 544, Loss 7.160267\n", "Epoch 545, Loss 7.145897\n", "Epoch 546, Loss 7.131578\n", "Epoch 547, Loss 7.117305\n", "Epoch 548, Loss 7.103083\n", "Epoch 549, Loss 7.088911\n", "Epoch 550, Loss 7.074785\n", "Epoch 551, Loss 7.060707\n", "Epoch 552, Loss 7.046677\n", "Epoch 553, Loss 7.032695\n", "Epoch 554, Loss 7.018756\n", "Epoch 555, Loss 7.004869\n", "Epoch 556, Loss 6.991029\n", "Epoch 557, Loss 6.977232\n", "Epoch 558, Loss 6.963488\n", "Epoch 559, Loss 6.949786\n", "Epoch 560, Loss 6.936135\n", "Epoch 561, Loss 6.922528\n", "Epoch 562, Loss 6.908967\n", "Epoch 563, Loss 6.895452\n", "Epoch 564, Loss 6.881980\n", "Epoch 565, Loss 6.868558\n", "Epoch 566, Loss 6.855180\n", "Epoch 567, Loss 6.841848\n", "Epoch 568, Loss 6.828561\n", "Epoch 569, Loss 6.815319\n", "Epoch 570, Loss 6.802118\n", "Epoch 571, Loss 6.788968\n", "Epoch 572, Loss 6.775864\n", "Epoch 573, Loss 6.762798\n", "Epoch 574, Loss 6.749779\n", "Epoch 575, Loss 6.736803\n", "Epoch 576, Loss 6.723875\n", "Epoch 577, Loss 6.710986\n", "Epoch 578, Loss 6.698142\n", "Epoch 579, Loss 6.685344\n", "Epoch 580, Loss 6.672589\n", "Epoch 581, Loss 6.659874\n", "Epoch 582, Loss 6.647207\n", "Epoch 583, Loss 6.634577\n", "Epoch 584, Loss 6.621995\n", "Epoch 585, Loss 6.609454\n", "Epoch 586, Loss 6.596954\n", "Epoch 587, Loss 6.584500\n", "Epoch 588, Loss 6.572087\n", "Epoch 589, Loss 6.559712\n", "Epoch 590, Loss 6.547384\n", "Epoch 591, Loss 6.535097\n", "Epoch 592, Loss 6.522851\n", "Epoch 593, Loss 6.510646\n", "Epoch 594, Loss 6.498481\n", "Epoch 595, Loss 6.486362\n", "Epoch 596, Loss 6.474282\n", "Epoch 597, Loss 6.462242\n", "Epoch 598, Loss 6.450243\n", "Epoch 599, Loss 6.438284\n", "Epoch 600, Loss 6.426367\n", "Epoch 601, Loss 6.414490\n", "Epoch 602, Loss 6.402655\n", "Epoch 603, Loss 6.390859\n", "Epoch 604, Loss 6.379102\n", "Epoch 605, Loss 6.367384\n", "Epoch 606, Loss 6.355706\n", "Epoch 607, Loss 6.344071\n", "Epoch 608, Loss 6.332472\n", "Epoch 609, Loss 6.320912\n", "Epoch 610, Loss 6.309395\n", "Epoch 611, Loss 6.297915\n", "Epoch 612, Loss 6.286473\n", "Epoch 613, Loss 6.275074\n", "Epoch 614, Loss 6.263707\n", "Epoch 615, Loss 6.252382\n", "Epoch 616, Loss 6.241098\n", "Epoch 617, Loss 6.229849\n", "Epoch 618, Loss 6.218639\n", "Epoch 619, Loss 6.207471\n", "Epoch 620, Loss 6.196334\n", "Epoch 621, Loss 6.185240\n", "Epoch 622, Loss 6.174181\n", "Epoch 623, Loss 6.163159\n", "Epoch 624, Loss 6.152177\n", "Epoch 625, Loss 6.141229\n", "Epoch 626, Loss 6.130321\n", "Epoch 627, Loss 6.119448\n", "Epoch 628, Loss 6.108614\n", "Epoch 629, Loss 6.097815\n", "Epoch 630, Loss 6.087054\n", "Epoch 631, Loss 6.076329\n", "Epoch 632, Loss 6.065643\n", "Epoch 633, Loss 6.054988\n", "Epoch 634, Loss 6.044372\n", "Epoch 635, Loss 6.033794\n", "Epoch 636, Loss 6.023247\n", "Epoch 637, Loss 6.012738\n", "Epoch 638, Loss 6.002264\n", "Epoch 639, Loss 5.991829\n", "Epoch 640, Loss 5.981426\n", "Epoch 641, Loss 5.971057\n", "Epoch 642, Loss 5.960727\n", "Epoch 643, Loss 5.950432\n", "Epoch 644, Loss 5.940171\n", "Epoch 645, Loss 5.929944\n", "Epoch 646, Loss 5.919752\n", "Epoch 647, Loss 5.909597\n", "Epoch 648, Loss 5.899473\n", "Epoch 649, Loss 5.889384\n", "Epoch 650, Loss 5.879326\n", "Epoch 651, Loss 5.869310\n", "Epoch 652, Loss 5.859322\n", "Epoch 653, Loss 5.849374\n", "Epoch 654, Loss 5.839453\n", "Epoch 655, Loss 5.829570\n", "Epoch 656, Loss 5.819718\n", "Epoch 657, Loss 5.809900\n", "Epoch 658, Loss 5.800117\n", "Epoch 659, Loss 5.790367\n", "Epoch 660, Loss 5.780647\n", "Epoch 661, Loss 5.770962\n", "Epoch 662, Loss 5.761312\n", "Epoch 663, Loss 5.751693\n", "Epoch 664, Loss 5.742105\n", "Epoch 665, Loss 5.732550\n", "Epoch 666, Loss 5.723031\n", "Epoch 667, Loss 5.713539\n", "Epoch 668, Loss 5.704084\n", "Epoch 669, Loss 5.694658\n", "Epoch 670, Loss 5.685265\n", "Epoch 671, Loss 5.675904\n", "Epoch 672, Loss 5.666573\n", "Epoch 673, Loss 5.657277\n", "Epoch 674, Loss 5.648010\n", "Epoch 675, Loss 5.638776\n", "Epoch 676, Loss 5.629575\n", "Epoch 677, Loss 5.620402\n", "Epoch 678, Loss 5.611260\n", "Epoch 679, Loss 5.602148\n", "Epoch 680, Loss 5.593071\n", "Epoch 681, Loss 5.584022\n", "Epoch 682, Loss 5.575005\n", "Epoch 683, Loss 5.566019\n", "Epoch 684, Loss 5.557063\n", "Epoch 685, Loss 5.548136\n", "Epoch 686, Loss 5.539241\n", "Epoch 687, Loss 5.530376\n", "Epoch 688, Loss 5.521540\n", "Epoch 689, Loss 5.512733\n", "Epoch 690, Loss 5.503958\n", "Epoch 691, Loss 5.495212\n", "Epoch 692, Loss 5.486496\n", "Epoch 693, Loss 5.477808\n", "Epoch 694, Loss 5.469152\n", "Epoch 695, Loss 5.460525\n", "Epoch 696, Loss 5.451928\n", "Epoch 697, Loss 5.443359\n", "Epoch 698, Loss 5.434820\n", "Epoch 699, Loss 5.426310\n", "Epoch 700, Loss 5.417827\n", "Epoch 701, Loss 5.409373\n", "Epoch 702, Loss 5.400949\n", "Epoch 703, Loss 5.392551\n", "Epoch 704, Loss 5.384184\n", "Epoch 705, Loss 5.375845\n", "Epoch 706, Loss 5.367537\n", "Epoch 707, Loss 5.359253\n", "Epoch 708, Loss 5.350998\n", "Epoch 709, Loss 5.342771\n", "Epoch 710, Loss 5.334575\n", "Epoch 711, Loss 5.326403\n", "Epoch 712, Loss 5.318259\n", "Epoch 713, Loss 5.310144\n", "Epoch 714, Loss 5.302055\n", "Epoch 715, Loss 5.293994\n", "Epoch 716, Loss 5.285964\n", "Epoch 717, Loss 5.277958\n", "Epoch 718, Loss 5.269979\n", "Epoch 719, Loss 5.262026\n", "Epoch 720, Loss 5.254103\n", "Epoch 721, Loss 5.246205\n", "Epoch 722, Loss 5.238335\n", "Epoch 723, Loss 5.230491\n", "Epoch 724, Loss 5.222673\n", "Epoch 725, Loss 5.214881\n", "Epoch 726, Loss 5.207120\n", "Epoch 727, Loss 5.199381\n", "Epoch 728, Loss 5.191670\n", "Epoch 729, Loss 5.183984\n", "Epoch 730, Loss 5.176324\n", "Epoch 731, Loss 5.168688\n", "Epoch 732, Loss 5.161084\n", "Epoch 733, Loss 5.153500\n", "Epoch 734, Loss 5.145943\n", "Epoch 735, Loss 5.138412\n", "Epoch 736, Loss 5.130910\n", "Epoch 737, Loss 5.123428\n", "Epoch 738, Loss 5.115977\n", "Epoch 739, Loss 5.108547\n", "Epoch 740, Loss 5.101144\n", "Epoch 741, Loss 5.093765\n", "Epoch 742, Loss 5.086413\n", "Epoch 743, Loss 5.079085\n", "Epoch 744, Loss 5.071782\n", "Epoch 745, Loss 5.064505\n", "Epoch 746, Loss 5.057247\n", "Epoch 747, Loss 5.050022\n", "Epoch 748, Loss 5.042817\n", "Epoch 749, Loss 5.035636\n", "Epoch 750, Loss 5.028476\n", "Epoch 751, Loss 5.021346\n", "Epoch 752, Loss 5.014239\n", "Epoch 753, Loss 5.007157\n", "Epoch 754, Loss 5.000099\n", "Epoch 755, Loss 4.993064\n", "Epoch 756, Loss 4.986051\n", "Epoch 757, Loss 4.979065\n", "Epoch 758, Loss 4.972100\n", "Epoch 759, Loss 4.965159\n", "Epoch 760, Loss 4.958245\n", "Epoch 761, Loss 4.951350\n", "Epoch 762, Loss 4.944479\n", "Epoch 763, Loss 4.937633\n", "Epoch 764, Loss 4.930812\n", "Epoch 765, Loss 4.924009\n", "Epoch 766, Loss 4.917234\n", "Epoch 767, Loss 4.910480\n", "Epoch 768, Loss 4.903749\n", "Epoch 769, Loss 4.897040\n", "Epoch 770, Loss 4.890356\n", "Epoch 771, Loss 4.883691\n", "Epoch 772, Loss 4.877052\n", "Epoch 773, Loss 4.870436\n", "Epoch 774, Loss 4.863839\n", "Epoch 775, Loss 4.857267\n", "Epoch 776, Loss 4.850717\n", "Epoch 777, Loss 4.844189\n", "Epoch 778, Loss 4.837683\n", "Epoch 779, Loss 4.831196\n", "Epoch 780, Loss 4.824737\n", "Epoch 781, Loss 4.818298\n", "Epoch 782, Loss 4.811880\n", "Epoch 783, Loss 4.805481\n", "Epoch 784, Loss 4.799106\n", "Epoch 785, Loss 4.792755\n", "Epoch 786, Loss 4.786422\n", "Epoch 787, Loss 4.780112\n", "Epoch 788, Loss 4.773824\n", "Epoch 789, Loss 4.767559\n", "Epoch 790, Loss 4.761311\n", "Epoch 791, Loss 4.755087\n", "Epoch 792, Loss 4.748885\n", "Epoch 793, Loss 4.742701\n", "Epoch 794, Loss 4.736537\n", "Epoch 795, Loss 4.730397\n", "Epoch 796, Loss 4.724279\n", "Epoch 797, Loss 4.718181\n", "Epoch 798, Loss 4.712101\n", "Epoch 799, Loss 4.706046\n", "Epoch 800, Loss 4.700009\n", "Epoch 801, Loss 4.693989\n", "Epoch 802, Loss 4.687995\n", "Epoch 803, Loss 4.682020\n", "Epoch 804, Loss 4.676063\n", "Epoch 805, Loss 4.670130\n", "Epoch 806, Loss 4.664214\n", "Epoch 807, Loss 4.658320\n", "Epoch 808, Loss 4.652445\n", "Epoch 809, Loss 4.646592\n", "Epoch 810, Loss 4.640753\n", "Epoch 811, Loss 4.634938\n", "Epoch 812, Loss 4.629142\n", "Epoch 813, Loss 4.623368\n", "Epoch 814, Loss 4.617611\n", "Epoch 815, Loss 4.611873\n", "Epoch 816, Loss 4.606156\n", "Epoch 817, Loss 4.600458\n", "Epoch 818, Loss 4.594780\n", "Epoch 819, Loss 4.589119\n", "Epoch 820, Loss 4.583479\n", "Epoch 821, Loss 4.577857\n", "Epoch 822, Loss 4.572256\n", "Epoch 823, Loss 4.566675\n", "Epoch 824, Loss 4.561109\n", "Epoch 825, Loss 4.555565\n", "Epoch 826, Loss 4.550039\n", "Epoch 827, Loss 4.544533\n", "Epoch 828, Loss 4.539044\n", "Epoch 829, Loss 4.533575\n", "Epoch 830, Loss 4.528122\n", "Epoch 831, Loss 4.522691\n", "Epoch 832, Loss 4.517276\n", "Epoch 833, Loss 4.511879\n", "Epoch 834, Loss 4.506504\n", "Epoch 835, Loss 4.501141\n", "Epoch 836, Loss 4.495801\n", "Epoch 837, Loss 4.490474\n", "Epoch 838, Loss 4.485170\n", "Epoch 839, Loss 4.479884\n", "Epoch 840, Loss 4.474614\n", "Epoch 841, Loss 4.469364\n", "Epoch 842, Loss 4.464129\n", "Epoch 843, Loss 4.458913\n", "Epoch 844, Loss 4.453716\n", "Epoch 845, Loss 4.448534\n", "Epoch 846, Loss 4.443372\n", "Epoch 847, Loss 4.438227\n", "Epoch 848, Loss 4.433099\n", "Epoch 849, Loss 4.427989\n", "Epoch 850, Loss 4.422897\n", "Epoch 851, Loss 4.417819\n", "Epoch 852, Loss 4.412762\n", "Epoch 853, Loss 4.407720\n", "Epoch 854, Loss 4.402697\n", "Epoch 855, Loss 4.397688\n", "Epoch 856, Loss 4.392697\n", "Epoch 857, Loss 4.387725\n", "Epoch 858, Loss 4.382769\n", "Epoch 859, Loss 4.377828\n", "Epoch 860, Loss 4.372905\n", "Epoch 861, Loss 4.368000\n", "Epoch 862, Loss 4.363111\n", "Epoch 863, Loss 4.358238\n", "Epoch 864, Loss 4.353383\n", "Epoch 865, Loss 4.348542\n", "Epoch 866, Loss 4.343716\n", "Epoch 867, Loss 4.338911\n", "Epoch 868, Loss 4.334121\n", "Epoch 869, Loss 4.329345\n", "Epoch 870, Loss 4.324588\n", "Epoch 871, Loss 4.319845\n", "Epoch 872, Loss 4.315118\n", "Epoch 873, Loss 4.310409\n", "Epoch 874, Loss 4.305714\n", "Epoch 875, Loss 4.301035\n", "Epoch 876, Loss 4.296376\n", "Epoch 877, Loss 4.291727\n", "Epoch 878, Loss 4.287097\n", "Epoch 879, Loss 4.282482\n", "Epoch 880, Loss 4.277882\n", "Epoch 881, Loss 4.273299\n", "Epoch 882, Loss 4.268732\n", "Epoch 883, Loss 4.264178\n", "Epoch 884, Loss 4.259643\n", "Epoch 885, Loss 4.255120\n", "Epoch 886, Loss 4.250613\n", "Epoch 887, Loss 4.246124\n", "Epoch 888, Loss 4.241648\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 889, Loss 4.237185\n", "Epoch 890, Loss 4.232740\n", "Epoch 891, Loss 4.228308\n", "Epoch 892, Loss 4.223895\n", "Epoch 893, Loss 4.219494\n", "Epoch 894, Loss 4.215109\n", "Epoch 895, Loss 4.210737\n", "Epoch 896, Loss 4.206383\n", "Epoch 897, Loss 4.202042\n", "Epoch 898, Loss 4.197715\n", "Epoch 899, Loss 4.193405\n", "Epoch 900, Loss 4.189108\n", "Epoch 901, Loss 4.184825\n", "Epoch 902, Loss 4.180559\n", "Epoch 903, Loss 4.176305\n", "Epoch 904, Loss 4.172065\n", "Epoch 905, Loss 4.167842\n", "Epoch 906, Loss 4.163631\n", "Epoch 907, Loss 4.159436\n", "Epoch 908, Loss 4.155253\n", "Epoch 909, Loss 4.151086\n", "Epoch 910, Loss 4.146934\n", "Epoch 911, Loss 4.142795\n", "Epoch 912, Loss 4.138669\n", "Epoch 913, Loss 4.134559\n", "Epoch 914, Loss 4.130464\n", "Epoch 915, Loss 4.126378\n", "Epoch 916, Loss 4.122310\n", "Epoch 917, Loss 4.118254\n", "Epoch 918, Loss 4.114213\n", "Epoch 919, Loss 4.110184\n", "Epoch 920, Loss 4.106169\n", "Epoch 921, Loss 4.102170\n", "Epoch 922, Loss 4.098181\n", "Epoch 923, Loss 4.094210\n", "Epoch 924, Loss 4.090249\n", "Epoch 925, Loss 4.086300\n", "Epoch 926, Loss 4.082366\n", "Epoch 927, Loss 4.078448\n", "Epoch 928, Loss 4.074541\n", "Epoch 929, Loss 4.070649\n", "Epoch 930, Loss 4.066768\n", "Epoch 931, Loss 4.062900\n", "Epoch 932, Loss 4.059047\n", "Epoch 933, Loss 4.055204\n", "Epoch 934, Loss 4.051379\n", "Epoch 935, Loss 4.047564\n", "Epoch 936, Loss 4.043762\n", "Epoch 937, Loss 4.039972\n", "Epoch 938, Loss 4.036197\n", "Epoch 939, Loss 4.032434\n", "Epoch 940, Loss 4.028686\n", "Epoch 941, Loss 4.024947\n", "Epoch 942, Loss 4.021224\n", "Epoch 943, Loss 4.017509\n", "Epoch 944, Loss 4.013810\n", "Epoch 945, Loss 4.010122\n", "Epoch 946, Loss 4.006450\n", "Epoch 947, Loss 4.002785\n", "Epoch 948, Loss 3.999137\n", "Epoch 949, Loss 3.995498\n", "Epoch 950, Loss 3.991874\n", "Epoch 951, Loss 3.988262\n", "Epoch 952, Loss 3.984661\n", "Epoch 953, Loss 3.981072\n", "Epoch 954, Loss 3.977496\n", "Epoch 955, Loss 3.973933\n", "Epoch 956, Loss 3.970380\n", "Epoch 957, Loss 3.966843\n", "Epoch 958, Loss 3.963314\n", "Epoch 959, Loss 3.959798\n", "Epoch 960, Loss 3.956294\n", "Epoch 961, Loss 3.952804\n", "Epoch 962, Loss 3.949322\n", "Epoch 963, Loss 3.945856\n", "Epoch 964, Loss 3.942399\n", "Epoch 965, Loss 3.938953\n", "Epoch 966, Loss 3.935520\n", "Epoch 967, Loss 3.932097\n", "Epoch 968, Loss 3.928689\n", "Epoch 969, Loss 3.925292\n", "Epoch 970, Loss 3.921905\n", "Epoch 971, Loss 3.918529\n", "Epoch 972, Loss 3.915166\n", "Epoch 973, Loss 3.911817\n", "Epoch 974, Loss 3.908473\n", "Epoch 975, Loss 3.905145\n", "Epoch 976, Loss 3.901826\n", "Epoch 977, Loss 3.898518\n", "Epoch 978, Loss 3.895221\n", "Epoch 979, Loss 3.891936\n", "Epoch 980, Loss 3.888664\n", "Epoch 981, Loss 3.885402\n", "Epoch 982, Loss 3.882150\n", "Epoch 983, Loss 3.878911\n", "Epoch 984, Loss 3.875682\n", "Epoch 985, Loss 3.872465\n", "Epoch 986, Loss 3.869256\n", "Epoch 987, Loss 3.866060\n", "Epoch 988, Loss 3.862874\n", "Epoch 989, Loss 3.859699\n", "Epoch 990, Loss 3.856537\n", "Epoch 991, Loss 3.853381\n", "Epoch 992, Loss 3.850239\n", "Epoch 993, Loss 3.847108\n", "Epoch 994, Loss 3.843986\n", "Epoch 995, Loss 3.840876\n", "Epoch 996, Loss 3.837775\n", "Epoch 997, Loss 3.834686\n", "Epoch 998, Loss 3.831606\n", "Epoch 999, Loss 3.828538\n", "Epoch 1000, Loss 3.825484\n", "Epoch 1001, Loss 3.822433\n", "Epoch 1002, Loss 3.819398\n", "Epoch 1003, Loss 3.816369\n", "Epoch 1004, Loss 3.813350\n", "Epoch 1005, Loss 3.810344\n", "Epoch 1006, Loss 3.807348\n", "Epoch 1007, Loss 3.804360\n", "Epoch 1008, Loss 3.801384\n", "Epoch 1009, Loss 3.798421\n", "Epoch 1010, Loss 3.795465\n", "Epoch 1011, Loss 3.792518\n", "Epoch 1012, Loss 3.789584\n", "Epoch 1013, Loss 3.786658\n", "Epoch 1014, Loss 3.783740\n", "Epoch 1015, Loss 3.780832\n", "Epoch 1016, Loss 3.777939\n", "Epoch 1017, Loss 3.775053\n", "Epoch 1018, Loss 3.772173\n", "Epoch 1019, Loss 3.769310\n", "Epoch 1020, Loss 3.766451\n", "Epoch 1021, Loss 3.763602\n", "Epoch 1022, Loss 3.760766\n", "Epoch 1023, Loss 3.757936\n", "Epoch 1024, Loss 3.755118\n", "Epoch 1025, Loss 3.752309\n", "Epoch 1026, Loss 3.749511\n", "Epoch 1027, Loss 3.746722\n", "Epoch 1028, Loss 3.743940\n", "Epoch 1029, Loss 3.741169\n", "Epoch 1030, Loss 3.738407\n", "Epoch 1031, Loss 3.735656\n", "Epoch 1032, Loss 3.732914\n", "Epoch 1033, Loss 3.730181\n", "Epoch 1034, Loss 3.727456\n", "Epoch 1035, Loss 3.724741\n", "Epoch 1036, Loss 3.722034\n", "Epoch 1037, Loss 3.719337\n", "Epoch 1038, Loss 3.716650\n", "Epoch 1039, Loss 3.713972\n", "Epoch 1040, Loss 3.711302\n", "Epoch 1041, Loss 3.708643\n", "Epoch 1042, Loss 3.705990\n", "Epoch 1043, Loss 3.703351\n", "Epoch 1044, Loss 3.700716\n", "Epoch 1045, Loss 3.698092\n", "Epoch 1046, Loss 3.695476\n", "Epoch 1047, Loss 3.692869\n", "Epoch 1048, Loss 3.690273\n", "Epoch 1049, Loss 3.687683\n", "Epoch 1050, Loss 3.685103\n", "Epoch 1051, Loss 3.682532\n", "Epoch 1052, Loss 3.679969\n", "Epoch 1053, Loss 3.677417\n", "Epoch 1054, Loss 3.674871\n", "Epoch 1055, Loss 3.672334\n", "Epoch 1056, Loss 3.669805\n", "Epoch 1057, Loss 3.667287\n", "Epoch 1058, Loss 3.664775\n", "Epoch 1059, Loss 3.662273\n", "Epoch 1060, Loss 3.659778\n", "Epoch 1061, Loss 3.657295\n", "Epoch 1062, Loss 3.654816\n", "Epoch 1063, Loss 3.652350\n", "Epoch 1064, Loss 3.649889\n", "Epoch 1065, Loss 3.647437\n", "Epoch 1066, Loss 3.644991\n", "Epoch 1067, Loss 3.642559\n", "Epoch 1068, Loss 3.640131\n", "Epoch 1069, Loss 3.637711\n", "Epoch 1070, Loss 3.635302\n", "Epoch 1071, Loss 3.632902\n", "Epoch 1072, Loss 3.630508\n", "Epoch 1073, Loss 3.628119\n", "Epoch 1074, Loss 3.625741\n", "Epoch 1075, Loss 3.623374\n", "Epoch 1076, Loss 3.621010\n", "Epoch 1077, Loss 3.618659\n", "Epoch 1078, Loss 3.616311\n", "Epoch 1079, Loss 3.613973\n", "Epoch 1080, Loss 3.611643\n", "Epoch 1081, Loss 3.609322\n", "Epoch 1082, Loss 3.607007\n", "Epoch 1083, Loss 3.604701\n", "Epoch 1084, Loss 3.602404\n", "Epoch 1085, Loss 3.600114\n", "Epoch 1086, Loss 3.597830\n", "Epoch 1087, Loss 3.595553\n", "Epoch 1088, Loss 3.593287\n", "Epoch 1089, Loss 3.591030\n", "Epoch 1090, Loss 3.588776\n", "Epoch 1091, Loss 3.586534\n", "Epoch 1092, Loss 3.584295\n", "Epoch 1093, Loss 3.582067\n", "Epoch 1094, Loss 3.579846\n", "Epoch 1095, Loss 3.577631\n", "Epoch 1096, Loss 3.575424\n", "Epoch 1097, Loss 3.573225\n", "Epoch 1098, Loss 3.571034\n", "Epoch 1099, Loss 3.568848\n", "Epoch 1100, Loss 3.566673\n", "Epoch 1101, Loss 3.564506\n", "Epoch 1102, Loss 3.562340\n", "Epoch 1103, Loss 3.560185\n", "Epoch 1104, Loss 3.558040\n", "Epoch 1105, Loss 3.555901\n", "Epoch 1106, Loss 3.553767\n", "Epoch 1107, Loss 3.551641\n", "Epoch 1108, Loss 3.549524\n", "Epoch 1109, Loss 3.547411\n", "Epoch 1110, Loss 3.545309\n", "Epoch 1111, Loss 3.543211\n", "Epoch 1112, Loss 3.541124\n", "Epoch 1113, Loss 3.539041\n", "Epoch 1114, Loss 3.536966\n", "Epoch 1115, Loss 3.534897\n", "Epoch 1116, Loss 3.532835\n", "Epoch 1117, Loss 3.530781\n", "Epoch 1118, Loss 3.528734\n", "Epoch 1119, Loss 3.526694\n", "Epoch 1120, Loss 3.524662\n", "Epoch 1121, Loss 3.522633\n", "Epoch 1122, Loss 3.520614\n", "Epoch 1123, Loss 3.518601\n", "Epoch 1124, Loss 3.516594\n", "Epoch 1125, Loss 3.514594\n", "Epoch 1126, Loss 3.512602\n", "Epoch 1127, Loss 3.510619\n", "Epoch 1128, Loss 3.508637\n", "Epoch 1129, Loss 3.506665\n", "Epoch 1130, Loss 3.504700\n", "Epoch 1131, Loss 3.502740\n", "Epoch 1132, Loss 3.500789\n", "Epoch 1133, Loss 3.498843\n", "Epoch 1134, Loss 3.496905\n", "Epoch 1135, Loss 3.494972\n", "Epoch 1136, Loss 3.493046\n", "Epoch 1137, Loss 3.491127\n", "Epoch 1138, Loss 3.489215\n", "Epoch 1139, Loss 3.487308\n", "Epoch 1140, Loss 3.485410\n", "Epoch 1141, Loss 3.483515\n", "Epoch 1142, Loss 3.481627\n", "Epoch 1143, Loss 3.479746\n", "Epoch 1144, Loss 3.477872\n", "Epoch 1145, Loss 3.476005\n", "Epoch 1146, Loss 3.474143\n", "Epoch 1147, Loss 3.472288\n", "Epoch 1148, Loss 3.470441\n", "Epoch 1149, Loss 3.468597\n", "Epoch 1150, Loss 3.466762\n", "Epoch 1151, Loss 3.464930\n", "Epoch 1152, Loss 3.463105\n", "Epoch 1153, Loss 3.461289\n", "Epoch 1154, Loss 3.459477\n", "Epoch 1155, Loss 3.457672\n", "Epoch 1156, Loss 3.455873\n", "Epoch 1157, Loss 3.454080\n", "Epoch 1158, Loss 3.452293\n", "Epoch 1159, Loss 3.450512\n", "Epoch 1160, Loss 3.448736\n", "Epoch 1161, Loss 3.446968\n", "Epoch 1162, Loss 3.445203\n", "Epoch 1163, Loss 3.443449\n", "Epoch 1164, Loss 3.441696\n", "Epoch 1165, Loss 3.439952\n", "Epoch 1166, Loss 3.438210\n", "Epoch 1167, Loss 3.436478\n", "Epoch 1168, Loss 3.434753\n", "Epoch 1169, Loss 3.433029\n", "Epoch 1170, Loss 3.431314\n", "Epoch 1171, Loss 3.429608\n", "Epoch 1172, Loss 3.427903\n", "Epoch 1173, Loss 3.426204\n", "Epoch 1174, Loss 3.424509\n", "Epoch 1175, Loss 3.422824\n", "Epoch 1176, Loss 3.421144\n", "Epoch 1177, Loss 3.419468\n", "Epoch 1178, Loss 3.417798\n", "Epoch 1179, Loss 3.416134\n", "Epoch 1180, Loss 3.414477\n", "Epoch 1181, Loss 3.412824\n", "Epoch 1182, Loss 3.411176\n", "Epoch 1183, Loss 3.409534\n", "Epoch 1184, Loss 3.407899\n", "Epoch 1185, Loss 3.406272\n", "Epoch 1186, Loss 3.404645\n", "Epoch 1187, Loss 3.403024\n", "Epoch 1188, Loss 3.401413\n", "Epoch 1189, Loss 3.399802\n", "Epoch 1190, Loss 3.398200\n", "Epoch 1191, Loss 3.396602\n", "Epoch 1192, Loss 3.395011\n", "Epoch 1193, Loss 3.393425\n", "Epoch 1194, Loss 3.391845\n", "Epoch 1195, Loss 3.390267\n", "Epoch 1196, Loss 3.388697\n", "Epoch 1197, Loss 3.387132\n", "Epoch 1198, Loss 3.385571\n", "Epoch 1199, Loss 3.384017\n", "Epoch 1200, Loss 3.382467\n", "Epoch 1201, Loss 3.380925\n", "Epoch 1202, Loss 3.379386\n", "Epoch 1203, Loss 3.377852\n", "Epoch 1204, Loss 3.376323\n", "Epoch 1205, Loss 3.374800\n", "Epoch 1206, Loss 3.373284\n", "Epoch 1207, Loss 3.371769\n", "Epoch 1208, Loss 3.370261\n", "Epoch 1209, Loss 3.368759\n", "Epoch 1210, Loss 3.367262\n", "Epoch 1211, Loss 3.365771\n", "Epoch 1212, Loss 3.364282\n", "Epoch 1213, Loss 3.362800\n", "Epoch 1214, Loss 3.361325\n", "Epoch 1215, Loss 3.359851\n", "Epoch 1216, Loss 3.358383\n", "Epoch 1217, Loss 3.356921\n", "Epoch 1218, Loss 3.355464\n", "Epoch 1219, Loss 3.354013\n", "Epoch 1220, Loss 3.352564\n", "Epoch 1221, Loss 3.351122\n", "Epoch 1222, Loss 3.349685\n", "Epoch 1223, Loss 3.348251\n", "Epoch 1224, Loss 3.346825\n", "Epoch 1225, Loss 3.345403\n", "Epoch 1226, Loss 3.343982\n", "Epoch 1227, Loss 3.342571\n", "Epoch 1228, Loss 3.341161\n", "Epoch 1229, Loss 3.339758\n", "Epoch 1230, Loss 3.338359\n", "Epoch 1231, Loss 3.336965\n", "Epoch 1232, Loss 3.335577\n", "Epoch 1233, Loss 3.334191\n", "Epoch 1234, Loss 3.332811\n", "Epoch 1235, Loss 3.331435\n", "Epoch 1236, Loss 3.330065\n", "Epoch 1237, Loss 3.328699\n", "Epoch 1238, Loss 3.327338\n", "Epoch 1239, Loss 3.325980\n", "Epoch 1240, Loss 3.324628\n", "Epoch 1241, Loss 3.323280\n", "Epoch 1242, Loss 3.321935\n", "Epoch 1243, Loss 3.320599\n", "Epoch 1244, Loss 3.319264\n", "Epoch 1245, Loss 3.317935\n", "Epoch 1246, Loss 3.316610\n", "Epoch 1247, Loss 3.315289\n", "Epoch 1248, Loss 3.313974\n", "Epoch 1249, Loss 3.312663\n", "Epoch 1250, Loss 3.311353\n", "Epoch 1251, Loss 3.310053\n", "Epoch 1252, Loss 3.308756\n", "Epoch 1253, Loss 3.307462\n", "Epoch 1254, Loss 3.306170\n", "Epoch 1255, Loss 3.304887\n", "Epoch 1256, Loss 3.303605\n", "Epoch 1257, Loss 3.302329\n", "Epoch 1258, Loss 3.301058\n", "Epoch 1259, Loss 3.299791\n", "Epoch 1260, Loss 3.298527\n", "Epoch 1261, Loss 3.297266\n", "Epoch 1262, Loss 3.296014\n", "Epoch 1263, Loss 3.294762\n", "Epoch 1264, Loss 3.293517\n", "Epoch 1265, Loss 3.292275\n", "Epoch 1266, Loss 3.291036\n", "Epoch 1267, Loss 3.289804\n", "Epoch 1268, Loss 3.288573\n", "Epoch 1269, Loss 3.287347\n", "Epoch 1270, Loss 3.286129\n", "Epoch 1271, Loss 3.284911\n", "Epoch 1272, Loss 3.283698\n", "Epoch 1273, Loss 3.282488\n", "Epoch 1274, Loss 3.281284\n", "Epoch 1275, Loss 3.280086\n", "Epoch 1276, Loss 3.278888\n", "Epoch 1277, Loss 3.277696\n", "Epoch 1278, Loss 3.276506\n", "Epoch 1279, Loss 3.275322\n", "Epoch 1280, Loss 3.274142\n", "Epoch 1281, Loss 3.272967\n", "Epoch 1282, Loss 3.271793\n", "Epoch 1283, Loss 3.270625\n", "Epoch 1284, Loss 3.269460\n", "Epoch 1285, Loss 3.268301\n", "Epoch 1286, Loss 3.267143\n", "Epoch 1287, Loss 3.265991\n", "Epoch 1288, Loss 3.264842\n", "Epoch 1289, Loss 3.263700\n", "Epoch 1290, Loss 3.262556\n", "Epoch 1291, Loss 3.261421\n", "Epoch 1292, Loss 3.260288\n", "Epoch 1293, Loss 3.259161\n", "Epoch 1294, Loss 3.258033\n", "Epoch 1295, Loss 3.256912\n", "Epoch 1296, Loss 3.255795\n", "Epoch 1297, Loss 3.254681\n", "Epoch 1298, Loss 3.253569\n", "Epoch 1299, Loss 3.252462\n", "Epoch 1300, Loss 3.251362\n", "Epoch 1301, Loss 3.250264\n", "Epoch 1302, Loss 3.249168\n", "Epoch 1303, Loss 3.248077\n", "Epoch 1304, Loss 3.246989\n", "Epoch 1305, Loss 3.245904\n", "Epoch 1306, Loss 3.244824\n", "Epoch 1307, Loss 3.243747\n", "Epoch 1308, Loss 3.242674\n", "Epoch 1309, Loss 3.241606\n", "Epoch 1310, Loss 3.240538\n", "Epoch 1311, Loss 3.239475\n", "Epoch 1312, Loss 3.238420\n", "Epoch 1313, Loss 3.237364\n", "Epoch 1314, Loss 3.236314\n", "Epoch 1315, Loss 3.235264\n", "Epoch 1316, Loss 3.234218\n", "Epoch 1317, Loss 3.233179\n", "Epoch 1318, Loss 3.232143\n", "Epoch 1319, Loss 3.231108\n", "Epoch 1320, Loss 3.230078\n", "Epoch 1321, Loss 3.229051\n", "Epoch 1322, Loss 3.228027\n", "Epoch 1323, Loss 3.227010\n", "Epoch 1324, Loss 3.225993\n", "Epoch 1325, Loss 3.224979\n", "Epoch 1326, Loss 3.223971\n", "Epoch 1327, Loss 3.222965\n", "Epoch 1328, Loss 3.221961\n", "Epoch 1329, Loss 3.220962\n", "Epoch 1330, Loss 3.219967\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1331, Loss 3.218975\n", "Epoch 1332, Loss 3.217986\n", "Epoch 1333, Loss 3.217000\n", "Epoch 1334, Loss 3.216017\n", "Epoch 1335, Loss 3.215039\n", "Epoch 1336, Loss 3.214062\n", "Epoch 1337, Loss 3.213092\n", "Epoch 1338, Loss 3.212122\n", "Epoch 1339, Loss 3.211157\n", "Epoch 1340, Loss 3.210193\n", "Epoch 1341, Loss 3.209235\n", "Epoch 1342, Loss 3.208279\n", "Epoch 1343, Loss 3.207326\n", "Epoch 1344, Loss 3.206376\n", "Epoch 1345, Loss 3.205430\n", "Epoch 1346, Loss 3.204488\n", "Epoch 1347, Loss 3.203547\n", "Epoch 1348, Loss 3.202611\n", "Epoch 1349, Loss 3.201678\n", "Epoch 1350, Loss 3.200747\n", "Epoch 1351, Loss 3.199820\n", "Epoch 1352, Loss 3.198897\n", "Epoch 1353, Loss 3.197976\n", "Epoch 1354, Loss 3.197060\n", "Epoch 1355, Loss 3.196144\n", "Epoch 1356, Loss 3.195231\n", "Epoch 1357, Loss 3.194324\n", "Epoch 1358, Loss 3.193419\n", "Epoch 1359, Loss 3.192517\n", "Epoch 1360, Loss 3.191616\n", "Epoch 1361, Loss 3.190720\n", "Epoch 1362, Loss 3.189829\n", "Epoch 1363, Loss 3.188937\n", "Epoch 1364, Loss 3.188051\n", "Epoch 1365, Loss 3.187166\n", "Epoch 1366, Loss 3.186288\n", "Epoch 1367, Loss 3.185409\n", "Epoch 1368, Loss 3.184535\n", "Epoch 1369, Loss 3.183662\n", "Epoch 1370, Loss 3.182791\n", "Epoch 1371, Loss 3.181925\n", "Epoch 1372, Loss 3.181063\n", "Epoch 1373, Loss 3.180201\n", "Epoch 1374, Loss 3.179347\n", "Epoch 1375, Loss 3.178490\n", "Epoch 1376, Loss 3.177638\n", "Epoch 1377, Loss 3.176789\n", "Epoch 1378, Loss 3.175945\n", "Epoch 1379, Loss 3.175101\n", "Epoch 1380, Loss 3.174262\n", "Epoch 1381, Loss 3.173424\n", "Epoch 1382, Loss 3.172590\n", "Epoch 1383, Loss 3.171759\n", "Epoch 1384, Loss 3.170929\n", "Epoch 1385, Loss 3.170103\n", "Epoch 1386, Loss 3.169280\n", "Epoch 1387, Loss 3.168462\n", "Epoch 1388, Loss 3.167644\n", "Epoch 1389, Loss 3.166827\n", "Epoch 1390, Loss 3.166017\n", "Epoch 1391, Loss 3.165206\n", "Epoch 1392, Loss 3.164401\n", "Epoch 1393, Loss 3.163594\n", "Epoch 1394, Loss 3.162795\n", "Epoch 1395, Loss 3.161996\n", "Epoch 1396, Loss 3.161201\n", "Epoch 1397, Loss 3.160410\n", "Epoch 1398, Loss 3.159618\n", "Epoch 1399, Loss 3.158831\n", "Epoch 1400, Loss 3.158046\n", "Epoch 1401, Loss 3.157263\n", "Epoch 1402, Loss 3.156484\n", "Epoch 1403, Loss 3.155708\n", "Epoch 1404, Loss 3.154933\n", "Epoch 1405, Loss 3.154162\n", "Epoch 1406, Loss 3.153393\n", "Epoch 1407, Loss 3.152627\n", "Epoch 1408, Loss 3.151864\n", "Epoch 1409, Loss 3.151101\n", "Epoch 1410, Loss 3.150343\n", "Epoch 1411, Loss 3.149587\n", "Epoch 1412, Loss 3.148833\n", "Epoch 1413, Loss 3.148082\n", "Epoch 1414, Loss 3.147335\n", "Epoch 1415, Loss 3.146588\n", "Epoch 1416, Loss 3.145845\n", "Epoch 1417, Loss 3.145105\n", "Epoch 1418, Loss 3.144367\n", "Epoch 1419, Loss 3.143630\n", "Epoch 1420, Loss 3.142899\n", "Epoch 1421, Loss 3.142166\n", "Epoch 1422, Loss 3.141439\n", "Epoch 1423, Loss 3.140712\n", "Epoch 1424, Loss 3.139989\n", "Epoch 1425, Loss 3.139271\n", "Epoch 1426, Loss 3.138551\n", "Epoch 1427, Loss 3.137834\n", "Epoch 1428, Loss 3.137121\n", "Epoch 1429, Loss 3.136410\n", "Epoch 1430, Loss 3.135702\n", "Epoch 1431, Loss 3.134995\n", "Epoch 1432, Loss 3.134292\n", "Epoch 1433, Loss 3.133590\n", "Epoch 1434, Loss 3.132889\n", "Epoch 1435, Loss 3.132194\n", "Epoch 1436, Loss 3.131500\n", "Epoch 1437, Loss 3.130810\n", "Epoch 1438, Loss 3.130119\n", "Epoch 1439, Loss 3.129432\n", "Epoch 1440, Loss 3.128746\n", "Epoch 1441, Loss 3.128064\n", "Epoch 1442, Loss 3.127381\n", "Epoch 1443, Loss 3.126705\n", "Epoch 1444, Loss 3.126031\n", "Epoch 1445, Loss 3.125356\n", "Epoch 1446, Loss 3.124683\n", "Epoch 1447, Loss 3.124017\n", "Epoch 1448, Loss 3.123348\n", "Epoch 1449, Loss 3.122685\n", "Epoch 1450, Loss 3.122022\n", "Epoch 1451, Loss 3.121362\n", "Epoch 1452, Loss 3.120706\n", "Epoch 1453, Loss 3.120049\n", "Epoch 1454, Loss 3.119396\n", "Epoch 1455, Loss 3.118746\n", "Epoch 1456, Loss 3.118098\n", "Epoch 1457, Loss 3.117452\n", "Epoch 1458, Loss 3.116805\n", "Epoch 1459, Loss 3.116164\n", "Epoch 1460, Loss 3.115525\n", "Epoch 1461, Loss 3.114886\n", "Epoch 1462, Loss 3.114250\n", "Epoch 1463, Loss 3.113617\n", "Epoch 1464, Loss 3.112984\n", "Epoch 1465, Loss 3.112357\n", "Epoch 1466, Loss 3.111731\n", "Epoch 1467, Loss 3.111103\n", "Epoch 1468, Loss 3.110484\n", "Epoch 1469, Loss 3.109860\n", "Epoch 1470, Loss 3.109242\n", "Epoch 1471, Loss 3.108627\n", "Epoch 1472, Loss 3.108011\n", "Epoch 1473, Loss 3.107401\n", "Epoch 1474, Loss 3.106791\n", "Epoch 1475, Loss 3.106180\n", "Epoch 1476, Loss 3.105575\n", "Epoch 1477, Loss 3.104972\n", "Epoch 1478, Loss 3.104370\n", "Epoch 1479, Loss 3.103770\n", "Epoch 1480, Loss 3.103172\n", "Epoch 1481, Loss 3.102576\n", "Epoch 1482, Loss 3.101982\n", "Epoch 1483, Loss 3.101390\n", "Epoch 1484, Loss 3.100802\n", "Epoch 1485, Loss 3.100213\n", "Epoch 1486, Loss 3.099627\n", "Epoch 1487, Loss 3.099044\n", "Epoch 1488, Loss 3.098462\n", "Epoch 1489, Loss 3.097883\n", "Epoch 1490, Loss 3.097302\n", "Epoch 1491, Loss 3.096727\n", "Epoch 1492, Loss 3.096153\n", "Epoch 1493, Loss 3.095583\n", "Epoch 1494, Loss 3.095011\n", "Epoch 1495, Loss 3.094444\n", "Epoch 1496, Loss 3.093876\n", "Epoch 1497, Loss 3.093314\n", "Epoch 1498, Loss 3.092751\n", "Epoch 1499, Loss 3.092191\n", "Epoch 1500, Loss 3.091631\n", "Epoch 1501, Loss 3.091074\n", "Epoch 1502, Loss 3.090520\n", "Epoch 1503, Loss 3.089969\n", "Epoch 1504, Loss 3.089417\n", "Epoch 1505, Loss 3.088867\n", "Epoch 1506, Loss 3.088320\n", "Epoch 1507, Loss 3.087775\n", "Epoch 1508, Loss 3.087232\n", "Epoch 1509, Loss 3.086690\n", "Epoch 1510, Loss 3.086150\n", "Epoch 1511, Loss 3.085612\n", "Epoch 1512, Loss 3.085075\n", "Epoch 1513, Loss 3.084542\n", "Epoch 1514, Loss 3.084009\n", "Epoch 1515, Loss 3.083478\n", "Epoch 1516, Loss 3.082948\n", "Epoch 1517, Loss 3.082422\n", "Epoch 1518, Loss 3.081897\n", "Epoch 1519, Loss 3.081373\n", "Epoch 1520, Loss 3.080850\n", "Epoch 1521, Loss 3.080331\n", "Epoch 1522, Loss 3.079811\n", "Epoch 1523, Loss 3.079297\n", "Epoch 1524, Loss 3.078781\n", "Epoch 1525, Loss 3.078268\n", "Epoch 1526, Loss 3.077757\n", "Epoch 1527, Loss 3.077247\n", "Epoch 1528, Loss 3.076739\n", "Epoch 1529, Loss 3.076232\n", "Epoch 1530, Loss 3.075729\n", "Epoch 1531, Loss 3.075225\n", "Epoch 1532, Loss 3.074724\n", "Epoch 1533, Loss 3.074227\n", "Epoch 1534, Loss 3.073726\n", "Epoch 1535, Loss 3.073232\n", "Epoch 1536, Loss 3.072739\n", "Epoch 1537, Loss 3.072245\n", "Epoch 1538, Loss 3.071753\n", "Epoch 1539, Loss 3.071265\n", "Epoch 1540, Loss 3.070778\n", "Epoch 1541, Loss 3.070293\n", "Epoch 1542, Loss 3.069808\n", "Epoch 1543, Loss 3.069326\n", "Epoch 1544, Loss 3.068845\n", "Epoch 1545, Loss 3.068366\n", "Epoch 1546, Loss 3.067887\n", "Epoch 1547, Loss 3.067412\n", "Epoch 1548, Loss 3.066937\n", "Epoch 1549, Loss 3.066464\n", "Epoch 1550, Loss 3.065993\n", "Epoch 1551, Loss 3.065524\n", "Epoch 1552, Loss 3.065055\n", "Epoch 1553, Loss 3.064588\n", "Epoch 1554, Loss 3.064123\n", "Epoch 1555, Loss 3.063660\n", "Epoch 1556, Loss 3.063199\n", "Epoch 1557, Loss 3.062738\n", "Epoch 1558, Loss 3.062280\n", "Epoch 1559, Loss 3.061822\n", "Epoch 1560, Loss 3.061368\n", "Epoch 1561, Loss 3.060913\n", "Epoch 1562, Loss 3.060461\n", "Epoch 1563, Loss 3.060011\n", "Epoch 1564, Loss 3.059561\n", "Epoch 1565, Loss 3.059114\n", "Epoch 1566, Loss 3.058668\n", "Epoch 1567, Loss 3.058221\n", "Epoch 1568, Loss 3.057780\n", "Epoch 1569, Loss 3.057338\n", "Epoch 1570, Loss 3.056898\n", "Epoch 1571, Loss 3.056458\n", "Epoch 1572, Loss 3.056019\n", "Epoch 1573, Loss 3.055585\n", "Epoch 1574, Loss 3.055151\n", "Epoch 1575, Loss 3.054717\n", "Epoch 1576, Loss 3.054286\n", "Epoch 1577, Loss 3.053857\n", "Epoch 1578, Loss 3.053428\n", "Epoch 1579, Loss 3.053001\n", "Epoch 1580, Loss 3.052576\n", "Epoch 1581, Loss 3.052152\n", "Epoch 1582, Loss 3.051730\n", "Epoch 1583, Loss 3.051307\n", "Epoch 1584, Loss 3.050888\n", "Epoch 1585, Loss 3.050471\n", "Epoch 1586, Loss 3.050052\n", "Epoch 1587, Loss 3.049639\n", "Epoch 1588, Loss 3.049223\n", "Epoch 1589, Loss 3.048811\n", "Epoch 1590, Loss 3.048398\n", "Epoch 1591, Loss 3.047991\n", "Epoch 1592, Loss 3.047581\n", "Epoch 1593, Loss 3.047173\n", "Epoch 1594, Loss 3.046768\n", "Epoch 1595, Loss 3.046363\n", "Epoch 1596, Loss 3.045960\n", "Epoch 1597, Loss 3.045559\n", "Epoch 1598, Loss 3.045160\n", "Epoch 1599, Loss 3.044759\n", "Epoch 1600, Loss 3.044361\n", "Epoch 1601, Loss 3.043966\n", "Epoch 1602, Loss 3.043571\n", "Epoch 1603, Loss 3.043176\n", "Epoch 1604, Loss 3.042785\n", "Epoch 1605, Loss 3.042395\n", "Epoch 1606, Loss 3.042004\n", "Epoch 1607, Loss 3.041615\n", "Epoch 1608, Loss 3.041230\n", "Epoch 1609, Loss 3.040844\n", "Epoch 1610, Loss 3.040460\n", "Epoch 1611, Loss 3.040077\n", "Epoch 1612, Loss 3.039695\n", "Epoch 1613, Loss 3.039314\n", "Epoch 1614, Loss 3.038934\n", "Epoch 1615, Loss 3.038557\n", "Epoch 1616, Loss 3.038182\n", "Epoch 1617, Loss 3.037806\n", "Epoch 1618, Loss 3.037431\n", "Epoch 1619, Loss 3.037059\n", "Epoch 1620, Loss 3.036689\n", "Epoch 1621, Loss 3.036319\n", "Epoch 1622, Loss 3.035949\n", "Epoch 1623, Loss 3.035583\n", "Epoch 1624, Loss 3.035215\n", "Epoch 1625, Loss 3.034849\n", "Epoch 1626, Loss 3.034485\n", "Epoch 1627, Loss 3.034122\n", "Epoch 1628, Loss 3.033762\n", "Epoch 1629, Loss 3.033402\n", "Epoch 1630, Loss 3.033042\n", "Epoch 1631, Loss 3.032685\n", "Epoch 1632, Loss 3.032329\n", "Epoch 1633, Loss 3.031973\n", "Epoch 1634, Loss 3.031619\n", "Epoch 1635, Loss 3.031265\n", "Epoch 1636, Loss 3.030913\n", "Epoch 1637, Loss 3.030564\n", "Epoch 1638, Loss 3.030215\n", "Epoch 1639, Loss 3.029866\n", "Epoch 1640, Loss 3.029518\n", "Epoch 1641, Loss 3.029172\n", "Epoch 1642, Loss 3.028829\n", "Epoch 1643, Loss 3.028486\n", "Epoch 1644, Loss 3.028142\n", "Epoch 1645, Loss 3.027802\n", "Epoch 1646, Loss 3.027462\n", "Epoch 1647, Loss 3.027122\n", "Epoch 1648, Loss 3.026784\n", "Epoch 1649, Loss 3.026447\n", "Epoch 1650, Loss 3.026111\n", "Epoch 1651, Loss 3.025780\n", "Epoch 1652, Loss 3.025446\n", "Epoch 1653, Loss 3.025114\n", "Epoch 1654, Loss 3.024782\n", "Epoch 1655, Loss 3.024452\n", "Epoch 1656, Loss 3.024125\n", "Epoch 1657, Loss 3.023797\n", "Epoch 1658, Loss 3.023471\n", "Epoch 1659, Loss 3.023146\n", "Epoch 1660, Loss 3.022821\n", "Epoch 1661, Loss 3.022498\n", "Epoch 1662, Loss 3.022177\n", "Epoch 1663, Loss 3.021855\n", "Epoch 1664, Loss 3.021534\n", "Epoch 1665, Loss 3.021217\n", "Epoch 1666, Loss 3.020898\n", "Epoch 1667, Loss 3.020582\n", "Epoch 1668, Loss 3.020266\n", "Epoch 1669, Loss 3.019952\n", "Epoch 1670, Loss 3.019639\n", "Epoch 1671, Loss 3.019325\n", "Epoch 1672, Loss 3.019016\n", "Epoch 1673, Loss 3.018706\n", "Epoch 1674, Loss 3.018395\n", "Epoch 1675, Loss 3.018089\n", "Epoch 1676, Loss 3.017780\n", "Epoch 1677, Loss 3.017475\n", "Epoch 1678, Loss 3.017170\n", "Epoch 1679, Loss 3.016867\n", "Epoch 1680, Loss 3.016564\n", "Epoch 1681, Loss 3.016262\n", "Epoch 1682, Loss 3.015959\n", "Epoch 1683, Loss 3.015661\n", "Epoch 1684, Loss 3.015362\n", "Epoch 1685, Loss 3.015064\n", "Epoch 1686, Loss 3.014768\n", "Epoch 1687, Loss 3.014472\n", "Epoch 1688, Loss 3.014179\n", "Epoch 1689, Loss 3.013884\n", "Epoch 1690, Loss 3.013591\n", "Epoch 1691, Loss 3.013299\n", "Epoch 1692, Loss 3.013008\n", "Epoch 1693, Loss 3.012719\n", "Epoch 1694, Loss 3.012431\n", "Epoch 1695, Loss 3.012141\n", "Epoch 1696, Loss 3.011855\n", "Epoch 1697, Loss 3.011570\n", "Epoch 1698, Loss 3.011284\n", "Epoch 1699, Loss 3.011001\n", "Epoch 1700, Loss 3.010718\n", "Epoch 1701, Loss 3.010436\n", "Epoch 1702, Loss 3.010156\n", "Epoch 1703, Loss 3.009876\n", "Epoch 1704, Loss 3.009595\n", "Epoch 1705, Loss 3.009319\n", "Epoch 1706, Loss 3.009040\n", "Epoch 1707, Loss 3.008763\n", "Epoch 1708, Loss 3.008487\n", "Epoch 1709, Loss 3.008214\n", "Epoch 1710, Loss 3.007941\n", "Epoch 1711, Loss 3.007668\n", "Epoch 1712, Loss 3.007396\n", "Epoch 1713, Loss 3.007126\n", "Epoch 1714, Loss 3.006856\n", "Epoch 1715, Loss 3.006587\n", "Epoch 1716, Loss 3.006318\n", "Epoch 1717, Loss 3.006052\n", "Epoch 1718, Loss 3.005785\n", "Epoch 1719, Loss 3.005520\n", "Epoch 1720, Loss 3.005256\n", "Epoch 1721, Loss 3.004993\n", "Epoch 1722, Loss 3.004729\n", "Epoch 1723, Loss 3.004467\n", "Epoch 1724, Loss 3.004207\n", "Epoch 1725, Loss 3.003947\n", "Epoch 1726, Loss 3.003690\n", "Epoch 1727, Loss 3.003430\n", "Epoch 1728, Loss 3.003174\n", "Epoch 1729, Loss 3.002918\n", "Epoch 1730, Loss 3.002661\n", "Epoch 1731, Loss 3.002406\n", "Epoch 1732, Loss 3.002152\n", "Epoch 1733, Loss 3.001901\n", "Epoch 1734, Loss 3.001649\n", "Epoch 1735, Loss 3.001395\n", "Epoch 1736, Loss 3.001145\n", "Epoch 1737, Loss 3.000898\n", "Epoch 1738, Loss 3.000648\n", "Epoch 1739, Loss 3.000400\n", "Epoch 1740, Loss 3.000154\n", "Epoch 1741, Loss 2.999907\n", "Epoch 1742, Loss 2.999662\n", "Epoch 1743, Loss 2.999417\n", "Epoch 1744, Loss 2.999174\n", "Epoch 1745, Loss 2.998930\n", "Epoch 1746, Loss 2.998688\n", "Epoch 1747, Loss 2.998448\n", "Epoch 1748, Loss 2.998208\n", "Epoch 1749, Loss 2.997968\n", "Epoch 1750, Loss 2.997730\n", "Epoch 1751, Loss 2.997490\n", "Epoch 1752, Loss 2.997254\n", "Epoch 1753, Loss 2.997018\n", "Epoch 1754, Loss 2.996783\n", "Epoch 1755, Loss 2.996548\n", "Epoch 1756, Loss 2.996313\n", "Epoch 1757, Loss 2.996081\n", "Epoch 1758, Loss 2.995847\n", "Epoch 1759, Loss 2.995615\n", "Epoch 1760, Loss 2.995387\n", "Epoch 1761, Loss 2.995156\n", "Epoch 1762, Loss 2.994929\n", "Epoch 1763, Loss 2.994699\n", "Epoch 1764, Loss 2.994472\n", "Epoch 1765, Loss 2.994245\n", "Epoch 1766, Loss 2.994019\n", "Epoch 1767, Loss 2.993794\n", "Epoch 1768, Loss 2.993569\n", "Epoch 1769, Loss 2.993344\n", "Epoch 1770, Loss 2.993122\n", "Epoch 1771, Loss 2.992900\n", "Epoch 1772, Loss 2.992678\n", "Epoch 1773, Loss 2.992457\n", "Epoch 1774, Loss 2.992238\n", "Epoch 1775, Loss 2.992017\n", "Epoch 1776, Loss 2.991798\n", "Epoch 1777, Loss 2.991583\n", "Epoch 1778, Loss 2.991366\n", "Epoch 1779, Loss 2.991146\n", "Epoch 1780, Loss 2.990932\n", "Epoch 1781, Loss 2.990719\n", "Epoch 1782, Loss 2.990503\n", "Epoch 1783, Loss 2.990289\n", "Epoch 1784, Loss 2.990078\n", "Epoch 1785, Loss 2.989866\n", "Epoch 1786, Loss 2.989654\n", "Epoch 1787, Loss 2.989443\n", "Epoch 1788, Loss 2.989233\n", "Epoch 1789, Loss 2.989025\n", "Epoch 1790, Loss 2.988817\n", "Epoch 1791, Loss 2.988609\n", "Epoch 1792, Loss 2.988401\n", "Epoch 1793, Loss 2.988195\n", "Epoch 1794, Loss 2.987989\n", "Epoch 1795, Loss 2.987784\n", "Epoch 1796, Loss 2.987581\n", "Epoch 1797, Loss 2.987377\n", "Epoch 1798, Loss 2.987174\n", "Epoch 1799, Loss 2.986974\n", "Epoch 1800, Loss 2.986771\n", "Epoch 1801, Loss 2.986570\n", "Epoch 1802, Loss 2.986371\n", "Epoch 1803, Loss 2.986171\n", "Epoch 1804, Loss 2.985972\n", "Epoch 1805, Loss 2.985774\n", "Epoch 1806, Loss 2.985578\n", "Epoch 1807, Loss 2.985381\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1808, Loss 2.985184\n", "Epoch 1809, Loss 2.984989\n", "Epoch 1810, Loss 2.984793\n", "Epoch 1811, Loss 2.984601\n", "Epoch 1812, Loss 2.984406\n", "Epoch 1813, Loss 2.984215\n", "Epoch 1814, Loss 2.984022\n", "Epoch 1815, Loss 2.983830\n", "Epoch 1816, Loss 2.983639\n", "Epoch 1817, Loss 2.983449\n", "Epoch 1818, Loss 2.983260\n", "Epoch 1819, Loss 2.983073\n", "Epoch 1820, Loss 2.982884\n", "Epoch 1821, Loss 2.982697\n", "Epoch 1822, Loss 2.982510\n", "Epoch 1823, Loss 2.982322\n", "Epoch 1824, Loss 2.982137\n", "Epoch 1825, Loss 2.981953\n", "Epoch 1826, Loss 2.981769\n", "Epoch 1827, Loss 2.981586\n", "Epoch 1828, Loss 2.981402\n", "Epoch 1829, Loss 2.981219\n", "Epoch 1830, Loss 2.981037\n", "Epoch 1831, Loss 2.980856\n", "Epoch 1832, Loss 2.980676\n", "Epoch 1833, Loss 2.980495\n", "Epoch 1834, Loss 2.980316\n", "Epoch 1835, Loss 2.980137\n", "Epoch 1836, Loss 2.979958\n", "Epoch 1837, Loss 2.979782\n", "Epoch 1838, Loss 2.979604\n", "Epoch 1839, Loss 2.979428\n", "Epoch 1840, Loss 2.979253\n", "Epoch 1841, Loss 2.979078\n", "Epoch 1842, Loss 2.978902\n", "Epoch 1843, Loss 2.978729\n", "Epoch 1844, Loss 2.978555\n", "Epoch 1845, Loss 2.978382\n", "Epoch 1846, Loss 2.978211\n", "Epoch 1847, Loss 2.978039\n", "Epoch 1848, Loss 2.977867\n", "Epoch 1849, Loss 2.977696\n", "Epoch 1850, Loss 2.977527\n", "Epoch 1851, Loss 2.977357\n", "Epoch 1852, Loss 2.977188\n", "Epoch 1853, Loss 2.977021\n", "Epoch 1854, Loss 2.976853\n", "Epoch 1855, Loss 2.976687\n", "Epoch 1856, Loss 2.976520\n", "Epoch 1857, Loss 2.976354\n", "Epoch 1858, Loss 2.976189\n", "Epoch 1859, Loss 2.976023\n", "Epoch 1860, Loss 2.975860\n", "Epoch 1861, Loss 2.975697\n", "Epoch 1862, Loss 2.975532\n", "Epoch 1863, Loss 2.975369\n", "Epoch 1864, Loss 2.975208\n", "Epoch 1865, Loss 2.975046\n", "Epoch 1866, Loss 2.974886\n", "Epoch 1867, Loss 2.974725\n", "Epoch 1868, Loss 2.974565\n", "Epoch 1869, Loss 2.974406\n", "Epoch 1870, Loss 2.974248\n", "Epoch 1871, Loss 2.974088\n", "Epoch 1872, Loss 2.973930\n", "Epoch 1873, Loss 2.973776\n", "Epoch 1874, Loss 2.973618\n", "Epoch 1875, Loss 2.973463\n", "Epoch 1876, Loss 2.973307\n", "Epoch 1877, Loss 2.973150\n", "Epoch 1878, Loss 2.972996\n", "Epoch 1879, Loss 2.972844\n", "Epoch 1880, Loss 2.972690\n", "Epoch 1881, Loss 2.972536\n", "Epoch 1882, Loss 2.972383\n", "Epoch 1883, Loss 2.972232\n", "Epoch 1884, Loss 2.972081\n", "Epoch 1885, Loss 2.971931\n", "Epoch 1886, Loss 2.971780\n", "Epoch 1887, Loss 2.971629\n", "Epoch 1888, Loss 2.971481\n", "Epoch 1889, Loss 2.971332\n", "Epoch 1890, Loss 2.971185\n", "Epoch 1891, Loss 2.971035\n", "Epoch 1892, Loss 2.970888\n", "Epoch 1893, Loss 2.970741\n", "Epoch 1894, Loss 2.970596\n", "Epoch 1895, Loss 2.970449\n", "Epoch 1896, Loss 2.970304\n", "Epoch 1897, Loss 2.970159\n", "Epoch 1898, Loss 2.970015\n", "Epoch 1899, Loss 2.969871\n", "Epoch 1900, Loss 2.969727\n", "Epoch 1901, Loss 2.969586\n", "Epoch 1902, Loss 2.969443\n", "Epoch 1903, Loss 2.969302\n", "Epoch 1904, Loss 2.969160\n", "Epoch 1905, Loss 2.969017\n", "Epoch 1906, Loss 2.968879\n", "Epoch 1907, Loss 2.968739\n", "Epoch 1908, Loss 2.968599\n", "Epoch 1909, Loss 2.968460\n", "Epoch 1910, Loss 2.968322\n", "Epoch 1911, Loss 2.968184\n", "Epoch 1912, Loss 2.968046\n", "Epoch 1913, Loss 2.967908\n", "Epoch 1914, Loss 2.967772\n", "Epoch 1915, Loss 2.967636\n", "Epoch 1916, Loss 2.967499\n", "Epoch 1917, Loss 2.967365\n", "Epoch 1918, Loss 2.967230\n", "Epoch 1919, Loss 2.967095\n", "Epoch 1920, Loss 2.966961\n", "Epoch 1921, Loss 2.966827\n", "Epoch 1922, Loss 2.966693\n", "Epoch 1923, Loss 2.966561\n", "Epoch 1924, Loss 2.966429\n", "Epoch 1925, Loss 2.966297\n", "Epoch 1926, Loss 2.966167\n", "Epoch 1927, Loss 2.966036\n", "Epoch 1928, Loss 2.965904\n", "Epoch 1929, Loss 2.965776\n", "Epoch 1930, Loss 2.965646\n", "Epoch 1931, Loss 2.965517\n", "Epoch 1932, Loss 2.965387\n", "Epoch 1933, Loss 2.965261\n", "Epoch 1934, Loss 2.965131\n", "Epoch 1935, Loss 2.965005\n", "Epoch 1936, Loss 2.964878\n", "Epoch 1937, Loss 2.964751\n", "Epoch 1938, Loss 2.964625\n", "Epoch 1939, Loss 2.964500\n", "Epoch 1940, Loss 2.964375\n", "Epoch 1941, Loss 2.964250\n", "Epoch 1942, Loss 2.964126\n", "Epoch 1943, Loss 2.964001\n", "Epoch 1944, Loss 2.963879\n", "Epoch 1945, Loss 2.963756\n", "Epoch 1946, Loss 2.963632\n", "Epoch 1947, Loss 2.963511\n", "Epoch 1948, Loss 2.963388\n", "Epoch 1949, Loss 2.963266\n", "Epoch 1950, Loss 2.963149\n", "Epoch 1951, Loss 2.963026\n", "Epoch 1952, Loss 2.962907\n", "Epoch 1953, Loss 2.962788\n", "Epoch 1954, Loss 2.962666\n", "Epoch 1955, Loss 2.962547\n", "Epoch 1956, Loss 2.962429\n", "Epoch 1957, Loss 2.962312\n", "Epoch 1958, Loss 2.962195\n", "Epoch 1959, Loss 2.962078\n", "Epoch 1960, Loss 2.961959\n", "Epoch 1961, Loss 2.961843\n", "Epoch 1962, Loss 2.961728\n", "Epoch 1963, Loss 2.961611\n", "Epoch 1964, Loss 2.961497\n", "Epoch 1965, Loss 2.961382\n", "Epoch 1966, Loss 2.961268\n", "Epoch 1967, Loss 2.961153\n", "Epoch 1968, Loss 2.961038\n", "Epoch 1969, Loss 2.960926\n", "Epoch 1970, Loss 2.960814\n", "Epoch 1971, Loss 2.960699\n", "Epoch 1972, Loss 2.960587\n", "Epoch 1973, Loss 2.960475\n", "Epoch 1974, Loss 2.960365\n", "Epoch 1975, Loss 2.960254\n", "Epoch 1976, Loss 2.960143\n", "Epoch 1977, Loss 2.960033\n", "Epoch 1978, Loss 2.959922\n", "Epoch 1979, Loss 2.959813\n", "Epoch 1980, Loss 2.959703\n", "Epoch 1981, Loss 2.959594\n", "Epoch 1982, Loss 2.959486\n", "Epoch 1983, Loss 2.959378\n", "Epoch 1984, Loss 2.959271\n", "Epoch 1985, Loss 2.959163\n", "Epoch 1986, Loss 2.959055\n", "Epoch 1987, Loss 2.958950\n", "Epoch 1988, Loss 2.958842\n", "Epoch 1989, Loss 2.958738\n", "Epoch 1990, Loss 2.958632\n", "Epoch 1991, Loss 2.958526\n", "Epoch 1992, Loss 2.958421\n", "Epoch 1993, Loss 2.958317\n", "Epoch 1994, Loss 2.958212\n", "Epoch 1995, Loss 2.958109\n", "Epoch 1996, Loss 2.958006\n", "Epoch 1997, Loss 2.957903\n", "Epoch 1998, Loss 2.957801\n", "Epoch 1999, Loss 2.957697\n", "Epoch 2000, Loss 2.957596\n", "Epoch 2001, Loss 2.957494\n", "Epoch 2002, Loss 2.957393\n", "Epoch 2003, Loss 2.957293\n", "Epoch 2004, Loss 2.957193\n", "Epoch 2005, Loss 2.957091\n", "Epoch 2006, Loss 2.956991\n", "Epoch 2007, Loss 2.956892\n", "Epoch 2008, Loss 2.956792\n", "Epoch 2009, Loss 2.956694\n", "Epoch 2010, Loss 2.956595\n", "Epoch 2011, Loss 2.956496\n", "Epoch 2012, Loss 2.956397\n", "Epoch 2013, Loss 2.956300\n", "Epoch 2014, Loss 2.956204\n", "Epoch 2015, Loss 2.956108\n", "Epoch 2016, Loss 2.956010\n", "Epoch 2017, Loss 2.955914\n", "Epoch 2018, Loss 2.955817\n", "Epoch 2019, Loss 2.955722\n", "Epoch 2020, Loss 2.955627\n", "Epoch 2021, Loss 2.955533\n", "Epoch 2022, Loss 2.955436\n", "Epoch 2023, Loss 2.955343\n", "Epoch 2024, Loss 2.955250\n", "Epoch 2025, Loss 2.955154\n", "Epoch 2026, Loss 2.955062\n", "Epoch 2027, Loss 2.954968\n", "Epoch 2028, Loss 2.954875\n", "Epoch 2029, Loss 2.954783\n", "Epoch 2030, Loss 2.954691\n", "Epoch 2031, Loss 2.954600\n", "Epoch 2032, Loss 2.954507\n", "Epoch 2033, Loss 2.954417\n", "Epoch 2034, Loss 2.954326\n", "Epoch 2035, Loss 2.954235\n", "Epoch 2036, Loss 2.954145\n", "Epoch 2037, Loss 2.954055\n", "Epoch 2038, Loss 2.953966\n", "Epoch 2039, Loss 2.953876\n", "Epoch 2040, Loss 2.953787\n", "Epoch 2041, Loss 2.953698\n", "Epoch 2042, Loss 2.953610\n", "Epoch 2043, Loss 2.953521\n", "Epoch 2044, Loss 2.953434\n", "Epoch 2045, Loss 2.953346\n", "Epoch 2046, Loss 2.953259\n", "Epoch 2047, Loss 2.953172\n", "Epoch 2048, Loss 2.953085\n", "Epoch 2049, Loss 2.953000\n", "Epoch 2050, Loss 2.952913\n", "Epoch 2051, Loss 2.952828\n", "Epoch 2052, Loss 2.952742\n", "Epoch 2053, Loss 2.952657\n", "Epoch 2054, Loss 2.952571\n", "Epoch 2055, Loss 2.952487\n", "Epoch 2056, Loss 2.952403\n", "Epoch 2057, Loss 2.952318\n", "Epoch 2058, Loss 2.952235\n", "Epoch 2059, Loss 2.952152\n", "Epoch 2060, Loss 2.952068\n", "Epoch 2061, Loss 2.951985\n", "Epoch 2062, Loss 2.951903\n", "Epoch 2063, Loss 2.951820\n", "Epoch 2064, Loss 2.951738\n", "Epoch 2065, Loss 2.951656\n", "Epoch 2066, Loss 2.951575\n", "Epoch 2067, Loss 2.951494\n", "Epoch 2068, Loss 2.951413\n", "Epoch 2069, Loss 2.951333\n", "Epoch 2070, Loss 2.951252\n", "Epoch 2071, Loss 2.951172\n", "Epoch 2072, Loss 2.951093\n", "Epoch 2073, Loss 2.951012\n", "Epoch 2074, Loss 2.950932\n", "Epoch 2075, Loss 2.950853\n", "Epoch 2076, Loss 2.950774\n", "Epoch 2077, Loss 2.950698\n", "Epoch 2078, Loss 2.950618\n", "Epoch 2079, Loss 2.950540\n", "Epoch 2080, Loss 2.950463\n", "Epoch 2081, Loss 2.950385\n", "Epoch 2082, Loss 2.950308\n", "Epoch 2083, Loss 2.950231\n", "Epoch 2084, Loss 2.950155\n", "Epoch 2085, Loss 2.950079\n", "Epoch 2086, Loss 2.950003\n", "Epoch 2087, Loss 2.949925\n", "Epoch 2088, Loss 2.949850\n", "Epoch 2089, Loss 2.949776\n", "Epoch 2090, Loss 2.949699\n", "Epoch 2091, Loss 2.949626\n", "Epoch 2092, Loss 2.949551\n", "Epoch 2093, Loss 2.949476\n", "Epoch 2094, Loss 2.949401\n", "Epoch 2095, Loss 2.949328\n", "Epoch 2096, Loss 2.949254\n", "Epoch 2097, Loss 2.949182\n", "Epoch 2098, Loss 2.949108\n", "Epoch 2099, Loss 2.949036\n", "Epoch 2100, Loss 2.948962\n", "Epoch 2101, Loss 2.948890\n", "Epoch 2102, Loss 2.948818\n", "Epoch 2103, Loss 2.948746\n", "Epoch 2104, Loss 2.948675\n", "Epoch 2105, Loss 2.948602\n", "Epoch 2106, Loss 2.948532\n", "Epoch 2107, Loss 2.948462\n", "Epoch 2108, Loss 2.948391\n", "Epoch 2109, Loss 2.948321\n", "Epoch 2110, Loss 2.948250\n", "Epoch 2111, Loss 2.948180\n", "Epoch 2112, Loss 2.948109\n", "Epoch 2113, Loss 2.948040\n", "Epoch 2114, Loss 2.947971\n", "Epoch 2115, Loss 2.947902\n", "Epoch 2116, Loss 2.947833\n", "Epoch 2117, Loss 2.947765\n", "Epoch 2118, Loss 2.947696\n", "Epoch 2119, Loss 2.947628\n", "Epoch 2120, Loss 2.947560\n", "Epoch 2121, Loss 2.947494\n", "Epoch 2122, Loss 2.947426\n", "Epoch 2123, Loss 2.947358\n", "Epoch 2124, Loss 2.947294\n", "Epoch 2125, Loss 2.947226\n", "Epoch 2126, Loss 2.947158\n", "Epoch 2127, Loss 2.947091\n", "Epoch 2128, Loss 2.947026\n", "Epoch 2129, Loss 2.946960\n", "Epoch 2130, Loss 2.946895\n", "Epoch 2131, Loss 2.946830\n", "Epoch 2132, Loss 2.946764\n", "Epoch 2133, Loss 2.946700\n", "Epoch 2134, Loss 2.946635\n", "Epoch 2135, Loss 2.946571\n", "Epoch 2136, Loss 2.946507\n", "Epoch 2137, Loss 2.946442\n", "Epoch 2138, Loss 2.946378\n", "Epoch 2139, Loss 2.946314\n", "Epoch 2140, Loss 2.946251\n", "Epoch 2141, Loss 2.946189\n", "Epoch 2142, Loss 2.946125\n", "Epoch 2143, Loss 2.946063\n", "Epoch 2144, Loss 2.946001\n", "Epoch 2145, Loss 2.945937\n", "Epoch 2146, Loss 2.945876\n", "Epoch 2147, Loss 2.945815\n", "Epoch 2148, Loss 2.945753\n", "Epoch 2149, Loss 2.945690\n", "Epoch 2150, Loss 2.945630\n", "Epoch 2151, Loss 2.945567\n", "Epoch 2152, Loss 2.945509\n", "Epoch 2153, Loss 2.945448\n", "Epoch 2154, Loss 2.945386\n", "Epoch 2155, Loss 2.945326\n", "Epoch 2156, Loss 2.945267\n", "Epoch 2157, Loss 2.945207\n", "Epoch 2158, Loss 2.945146\n", "Epoch 2159, Loss 2.945088\n", "Epoch 2160, Loss 2.945028\n", "Epoch 2161, Loss 2.944969\n", "Epoch 2162, Loss 2.944911\n", "Epoch 2163, Loss 2.944852\n", "Epoch 2164, Loss 2.944792\n", "Epoch 2165, Loss 2.944736\n", "Epoch 2166, Loss 2.944678\n", "Epoch 2167, Loss 2.944619\n", "Epoch 2168, Loss 2.944562\n", "Epoch 2169, Loss 2.944504\n", "Epoch 2170, Loss 2.944447\n", "Epoch 2171, Loss 2.944391\n", "Epoch 2172, Loss 2.944332\n", "Epoch 2173, Loss 2.944276\n", "Epoch 2174, Loss 2.944220\n", "Epoch 2175, Loss 2.944164\n", "Epoch 2176, Loss 2.944108\n", "Epoch 2177, Loss 2.944052\n", "Epoch 2178, Loss 2.943996\n", "Epoch 2179, Loss 2.943941\n", "Epoch 2180, Loss 2.943886\n", "Epoch 2181, Loss 2.943831\n", "Epoch 2182, Loss 2.943775\n", "Epoch 2183, Loss 2.943721\n", "Epoch 2184, Loss 2.943666\n", "Epoch 2185, Loss 2.943613\n", "Epoch 2186, Loss 2.943558\n", "Epoch 2187, Loss 2.943504\n", "Epoch 2188, Loss 2.943451\n", "Epoch 2189, Loss 2.943395\n", "Epoch 2190, Loss 2.943343\n", "Epoch 2191, Loss 2.943290\n", "Epoch 2192, Loss 2.943235\n", "Epoch 2193, Loss 2.943183\n", "Epoch 2194, Loss 2.943130\n", "Epoch 2195, Loss 2.943079\n", "Epoch 2196, Loss 2.943026\n", "Epoch 2197, Loss 2.942974\n", "Epoch 2198, Loss 2.942922\n", "Epoch 2199, Loss 2.942870\n", "Epoch 2200, Loss 2.942818\n", "Epoch 2201, Loss 2.942765\n", "Epoch 2202, Loss 2.942714\n", "Epoch 2203, Loss 2.942664\n", "Epoch 2204, Loss 2.942612\n", "Epoch 2205, Loss 2.942563\n", "Epoch 2206, Loss 2.942510\n", "Epoch 2207, Loss 2.942461\n", "Epoch 2208, Loss 2.942411\n", "Epoch 2209, Loss 2.942361\n", "Epoch 2210, Loss 2.942310\n", "Epoch 2211, Loss 2.942261\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2212, Loss 2.942211\n", "Epoch 2213, Loss 2.942162\n", "Epoch 2214, Loss 2.942112\n", "Epoch 2215, Loss 2.942062\n", "Epoch 2216, Loss 2.942014\n", "Epoch 2217, Loss 2.941965\n", "Epoch 2218, Loss 2.941918\n", "Epoch 2219, Loss 2.941868\n", "Epoch 2220, Loss 2.941821\n", "Epoch 2221, Loss 2.941773\n", "Epoch 2222, Loss 2.941725\n", "Epoch 2223, Loss 2.941677\n", "Epoch 2224, Loss 2.941629\n", "Epoch 2225, Loss 2.941582\n", "Epoch 2226, Loss 2.941534\n", "Epoch 2227, Loss 2.941488\n", "Epoch 2228, Loss 2.941440\n", "Epoch 2229, Loss 2.941393\n", "Epoch 2230, Loss 2.941346\n", "Epoch 2231, Loss 2.941299\n", "Epoch 2232, Loss 2.941252\n", "Epoch 2233, Loss 2.941206\n", "Epoch 2234, Loss 2.941163\n", "Epoch 2235, Loss 2.941115\n", "Epoch 2236, Loss 2.941070\n", "Epoch 2237, Loss 2.941025\n", "Epoch 2238, Loss 2.940979\n", "Epoch 2239, Loss 2.940933\n", "Epoch 2240, Loss 2.940890\n", "Epoch 2241, Loss 2.940844\n", "Epoch 2242, Loss 2.940798\n", "Epoch 2243, Loss 2.940753\n", "Epoch 2244, Loss 2.940711\n", "Epoch 2245, Loss 2.940666\n", "Epoch 2246, Loss 2.940621\n", "Epoch 2247, Loss 2.940576\n", "Epoch 2248, Loss 2.940533\n", "Epoch 2249, Loss 2.940489\n", "Epoch 2250, Loss 2.940446\n", "Epoch 2251, Loss 2.940403\n", "Epoch 2252, Loss 2.940358\n", "Epoch 2253, Loss 2.940316\n", "Epoch 2254, Loss 2.940274\n", "Epoch 2255, Loss 2.940229\n", "Epoch 2256, Loss 2.940187\n", "Epoch 2257, Loss 2.940144\n", "Epoch 2258, Loss 2.940102\n", "Epoch 2259, Loss 2.940060\n", "Epoch 2260, Loss 2.940018\n", "Epoch 2261, Loss 2.939977\n", "Epoch 2262, Loss 2.939934\n", "Epoch 2263, Loss 2.939892\n", "Epoch 2264, Loss 2.939851\n", "Epoch 2265, Loss 2.939809\n", "Epoch 2266, Loss 2.939769\n", "Epoch 2267, Loss 2.939727\n", "Epoch 2268, Loss 2.939686\n", "Epoch 2269, Loss 2.939646\n", "Epoch 2270, Loss 2.939605\n", "Epoch 2271, Loss 2.939565\n", "Epoch 2272, Loss 2.939522\n", "Epoch 2273, Loss 2.939483\n", "Epoch 2274, Loss 2.939443\n", "Epoch 2275, Loss 2.939403\n", "Epoch 2276, Loss 2.939361\n", "Epoch 2277, Loss 2.939323\n", "Epoch 2278, Loss 2.939282\n", "Epoch 2279, Loss 2.939243\n", "Epoch 2280, Loss 2.939205\n", "Epoch 2281, Loss 2.939165\n", "Epoch 2282, Loss 2.939127\n", "Epoch 2283, Loss 2.939087\n", "Epoch 2284, Loss 2.939049\n", "Epoch 2285, Loss 2.939011\n", "Epoch 2286, Loss 2.938971\n", "Epoch 2287, Loss 2.938933\n", "Epoch 2288, Loss 2.938893\n", "Epoch 2289, Loss 2.938857\n", "Epoch 2290, Loss 2.938820\n", "Epoch 2291, Loss 2.938779\n", "Epoch 2292, Loss 2.938743\n", "Epoch 2293, Loss 2.938705\n", "Epoch 2294, Loss 2.938667\n", "Epoch 2295, Loss 2.938629\n", "Epoch 2296, Loss 2.938593\n", "Epoch 2297, Loss 2.938555\n", "Epoch 2298, Loss 2.938519\n", "Epoch 2299, Loss 2.938481\n", "Epoch 2300, Loss 2.938444\n", "Epoch 2301, Loss 2.938408\n", "Epoch 2302, Loss 2.938371\n", "Epoch 2303, Loss 2.938335\n", "Epoch 2304, Loss 2.938299\n", "Epoch 2305, Loss 2.938262\n", "Epoch 2306, Loss 2.938227\n", "Epoch 2307, Loss 2.938191\n", "Epoch 2308, Loss 2.938155\n", "Epoch 2309, Loss 2.938118\n", "Epoch 2310, Loss 2.938084\n", "Epoch 2311, Loss 2.938049\n", "Epoch 2312, Loss 2.938014\n", "Epoch 2313, Loss 2.937977\n", "Epoch 2314, Loss 2.937943\n", "Epoch 2315, Loss 2.937908\n", "Epoch 2316, Loss 2.937872\n", "Epoch 2317, Loss 2.937839\n", "Epoch 2318, Loss 2.937804\n", "Epoch 2319, Loss 2.937768\n", "Epoch 2320, Loss 2.937734\n", "Epoch 2321, Loss 2.937700\n", "Epoch 2322, Loss 2.937665\n", "Epoch 2323, Loss 2.937632\n", "Epoch 2324, Loss 2.937598\n", "Epoch 2325, Loss 2.937565\n", "Epoch 2326, Loss 2.937531\n", "Epoch 2327, Loss 2.937499\n", "Epoch 2328, Loss 2.937464\n", "Epoch 2329, Loss 2.937430\n", "Epoch 2330, Loss 2.937398\n", "Epoch 2331, Loss 2.937364\n", "Epoch 2332, Loss 2.937332\n", "Epoch 2333, Loss 2.937299\n", "Epoch 2334, Loss 2.937265\n", "Epoch 2335, Loss 2.937232\n", "Epoch 2336, Loss 2.937201\n", "Epoch 2337, Loss 2.937168\n", "Epoch 2338, Loss 2.937134\n", "Epoch 2339, Loss 2.937104\n", "Epoch 2340, Loss 2.937071\n", "Epoch 2341, Loss 2.937039\n", "Epoch 2342, Loss 2.937008\n", "Epoch 2343, Loss 2.936976\n", "Epoch 2344, Loss 2.936945\n", "Epoch 2345, Loss 2.936912\n", "Epoch 2346, Loss 2.936882\n", "Epoch 2347, Loss 2.936851\n", "Epoch 2348, Loss 2.936819\n", "Epoch 2349, Loss 2.936788\n", "Epoch 2350, Loss 2.936757\n", "Epoch 2351, Loss 2.936725\n", "Epoch 2352, Loss 2.936694\n", "Epoch 2353, Loss 2.936665\n", "Epoch 2354, Loss 2.936633\n", "Epoch 2355, Loss 2.936602\n", "Epoch 2356, Loss 2.936572\n", "Epoch 2357, Loss 2.936542\n", "Epoch 2358, Loss 2.936511\n", "Epoch 2359, Loss 2.936482\n", "Epoch 2360, Loss 2.936451\n", "Epoch 2361, Loss 2.936421\n", "Epoch 2362, Loss 2.936392\n", "Epoch 2363, Loss 2.936362\n", "Epoch 2364, Loss 2.936332\n", "Epoch 2365, Loss 2.936304\n", "Epoch 2366, Loss 2.936274\n", "Epoch 2367, Loss 2.936244\n", "Epoch 2368, Loss 2.936215\n", "Epoch 2369, Loss 2.936188\n", "Epoch 2370, Loss 2.936156\n", "Epoch 2371, Loss 2.936128\n", "Epoch 2372, Loss 2.936100\n", "Epoch 2373, Loss 2.936071\n", "Epoch 2374, Loss 2.936043\n", "Epoch 2375, Loss 2.936014\n", "Epoch 2376, Loss 2.935986\n", "Epoch 2377, Loss 2.935957\n", "Epoch 2378, Loss 2.935928\n", "Epoch 2379, Loss 2.935901\n", "Epoch 2380, Loss 2.935873\n", "Epoch 2381, Loss 2.935845\n", "Epoch 2382, Loss 2.935817\n", "Epoch 2383, Loss 2.935789\n", "Epoch 2384, Loss 2.935761\n", "Epoch 2385, Loss 2.935734\n", "Epoch 2386, Loss 2.935707\n", "Epoch 2387, Loss 2.935679\n", "Epoch 2388, Loss 2.935650\n", "Epoch 2389, Loss 2.935626\n", "Epoch 2390, Loss 2.935596\n", "Epoch 2391, Loss 2.935571\n", "Epoch 2392, Loss 2.935544\n", "Epoch 2393, Loss 2.935516\n", "Epoch 2394, Loss 2.935489\n", "Epoch 2395, Loss 2.935464\n", "Epoch 2396, Loss 2.935436\n", "Epoch 2397, Loss 2.935412\n", "Epoch 2398, Loss 2.935385\n", "Epoch 2399, Loss 2.935357\n", "Epoch 2400, Loss 2.935332\n", "Epoch 2401, Loss 2.935304\n", "Epoch 2402, Loss 2.935281\n", "Epoch 2403, Loss 2.935252\n", "Epoch 2404, Loss 2.935229\n", "Epoch 2405, Loss 2.935203\n", "Epoch 2406, Loss 2.935177\n", "Epoch 2407, Loss 2.935152\n", "Epoch 2408, Loss 2.935126\n", "Epoch 2409, Loss 2.935099\n", "Epoch 2410, Loss 2.935075\n", "Epoch 2411, Loss 2.935049\n", "Epoch 2412, Loss 2.935024\n", "Epoch 2413, Loss 2.935001\n", "Epoch 2414, Loss 2.934973\n", "Epoch 2415, Loss 2.934949\n", "Epoch 2416, Loss 2.934925\n", "Epoch 2417, Loss 2.934899\n", "Epoch 2418, Loss 2.934876\n", "Epoch 2419, Loss 2.934852\n", "Epoch 2420, Loss 2.934827\n", "Epoch 2421, Loss 2.934802\n", "Epoch 2422, Loss 2.934777\n", "Epoch 2423, Loss 2.934753\n", "Epoch 2424, Loss 2.934730\n", "Epoch 2425, Loss 2.934705\n", "Epoch 2426, Loss 2.934681\n", "Epoch 2427, Loss 2.934658\n", "Epoch 2428, Loss 2.934635\n", "Epoch 2429, Loss 2.934609\n", "Epoch 2430, Loss 2.934585\n", "Epoch 2431, Loss 2.934564\n", "Epoch 2432, Loss 2.934541\n", "Epoch 2433, Loss 2.934516\n", "Epoch 2434, Loss 2.934493\n", "Epoch 2435, Loss 2.934469\n", "Epoch 2436, Loss 2.934446\n", "Epoch 2437, Loss 2.934423\n", "Epoch 2438, Loss 2.934400\n", "Epoch 2439, Loss 2.934377\n", "Epoch 2440, Loss 2.934355\n", "Epoch 2441, Loss 2.934331\n", "Epoch 2442, Loss 2.934309\n", "Epoch 2443, Loss 2.934287\n", "Epoch 2444, Loss 2.934264\n", "Epoch 2445, Loss 2.934242\n", "Epoch 2446, Loss 2.934219\n", "Epoch 2447, Loss 2.934198\n", "Epoch 2448, Loss 2.934175\n", "Epoch 2449, Loss 2.934151\n", "Epoch 2450, Loss 2.934129\n", "Epoch 2451, Loss 2.934108\n", "Epoch 2452, Loss 2.934084\n", "Epoch 2453, Loss 2.934064\n", "Epoch 2454, Loss 2.934043\n", "Epoch 2455, Loss 2.934020\n", "Epoch 2456, Loss 2.934000\n", "Epoch 2457, Loss 2.933978\n", "Epoch 2458, Loss 2.933956\n", "Epoch 2459, Loss 2.933935\n", "Epoch 2460, Loss 2.933913\n", "Epoch 2461, Loss 2.933893\n", "Epoch 2462, Loss 2.933871\n", "Epoch 2463, Loss 2.933849\n", "Epoch 2464, Loss 2.933828\n", "Epoch 2465, Loss 2.933807\n", "Epoch 2466, Loss 2.933787\n", "Epoch 2467, Loss 2.933766\n", "Epoch 2468, Loss 2.933745\n", "Epoch 2469, Loss 2.933723\n", "Epoch 2470, Loss 2.933704\n", "Epoch 2471, Loss 2.933682\n", "Epoch 2472, Loss 2.933662\n", "Epoch 2473, Loss 2.933643\n", "Epoch 2474, Loss 2.933622\n", "Epoch 2475, Loss 2.933602\n", "Epoch 2476, Loss 2.933583\n", "Epoch 2477, Loss 2.933561\n", "Epoch 2478, Loss 2.933541\n", "Epoch 2479, Loss 2.933521\n", "Epoch 2480, Loss 2.933501\n", "Epoch 2481, Loss 2.933480\n", "Epoch 2482, Loss 2.933463\n", "Epoch 2483, Loss 2.933442\n", "Epoch 2484, Loss 2.933423\n", "Epoch 2485, Loss 2.933403\n", "Epoch 2486, Loss 2.933382\n", "Epoch 2487, Loss 2.933365\n", "Epoch 2488, Loss 2.933345\n", "Epoch 2489, Loss 2.933325\n", "Epoch 2490, Loss 2.933306\n", "Epoch 2491, Loss 2.933287\n", "Epoch 2492, Loss 2.933266\n", "Epoch 2493, Loss 2.933249\n", "Epoch 2494, Loss 2.933228\n", "Epoch 2495, Loss 2.933209\n", "Epoch 2496, Loss 2.933190\n", "Epoch 2497, Loss 2.933172\n", "Epoch 2498, Loss 2.933154\n", "Epoch 2499, Loss 2.933134\n", "Epoch 2500, Loss 2.933116\n", "Epoch 2501, Loss 2.933097\n", "Epoch 2502, Loss 2.933079\n", "Epoch 2503, Loss 2.933060\n", "Epoch 2504, Loss 2.933043\n", "Epoch 2505, Loss 2.933025\n", "Epoch 2506, Loss 2.933007\n", "Epoch 2507, Loss 2.932988\n", "Epoch 2508, Loss 2.932970\n", "Epoch 2509, Loss 2.932953\n", "Epoch 2510, Loss 2.932932\n", "Epoch 2511, Loss 2.932915\n", "Epoch 2512, Loss 2.932898\n", "Epoch 2513, Loss 2.932880\n", "Epoch 2514, Loss 2.932862\n", "Epoch 2515, Loss 2.932846\n", "Epoch 2516, Loss 2.932826\n", "Epoch 2517, Loss 2.932810\n", "Epoch 2518, Loss 2.932791\n", "Epoch 2519, Loss 2.932774\n", "Epoch 2520, Loss 2.932758\n", "Epoch 2521, Loss 2.932739\n", "Epoch 2522, Loss 2.932723\n", "Epoch 2523, Loss 2.932706\n", "Epoch 2524, Loss 2.932689\n", "Epoch 2525, Loss 2.932671\n", "Epoch 2526, Loss 2.932654\n", "Epoch 2527, Loss 2.932637\n", "Epoch 2528, Loss 2.932619\n", "Epoch 2529, Loss 2.932603\n", "Epoch 2530, Loss 2.932585\n", "Epoch 2531, Loss 2.932569\n", "Epoch 2532, Loss 2.932553\n", "Epoch 2533, Loss 2.932535\n", "Epoch 2534, Loss 2.932520\n", "Epoch 2535, Loss 2.932502\n", "Epoch 2536, Loss 2.932487\n", "Epoch 2537, Loss 2.932469\n", "Epoch 2538, Loss 2.932455\n", "Epoch 2539, Loss 2.932438\n", "Epoch 2540, Loss 2.932421\n", "Epoch 2541, Loss 2.932404\n", "Epoch 2542, Loss 2.932387\n", "Epoch 2543, Loss 2.932370\n", "Epoch 2544, Loss 2.932358\n", "Epoch 2545, Loss 2.932340\n", "Epoch 2546, Loss 2.932324\n", "Epoch 2547, Loss 2.932310\n", "Epoch 2548, Loss 2.932293\n", "Epoch 2549, Loss 2.932278\n", "Epoch 2550, Loss 2.932261\n", "Epoch 2551, Loss 2.932246\n", "Epoch 2552, Loss 2.932229\n", "Epoch 2553, Loss 2.932215\n", "Epoch 2554, Loss 2.932198\n", "Epoch 2555, Loss 2.932184\n", "Epoch 2556, Loss 2.932168\n", "Epoch 2557, Loss 2.932153\n", "Epoch 2558, Loss 2.932137\n", "Epoch 2559, Loss 2.932122\n", "Epoch 2560, Loss 2.932107\n", "Epoch 2561, Loss 2.932092\n", "Epoch 2562, Loss 2.932076\n", "Epoch 2563, Loss 2.932061\n", "Epoch 2564, Loss 2.932047\n", "Epoch 2565, Loss 2.932031\n", "Epoch 2566, Loss 2.932017\n", "Epoch 2567, Loss 2.932002\n", "Epoch 2568, Loss 2.931986\n", "Epoch 2569, Loss 2.931972\n", "Epoch 2570, Loss 2.931957\n", "Epoch 2571, Loss 2.931941\n", "Epoch 2572, Loss 2.931929\n", "Epoch 2573, Loss 2.931914\n", "Epoch 2574, Loss 2.931900\n", "Epoch 2575, Loss 2.931885\n", "Epoch 2576, Loss 2.931870\n", "Epoch 2577, Loss 2.931855\n", "Epoch 2578, Loss 2.931843\n", "Epoch 2579, Loss 2.931828\n", "Epoch 2580, Loss 2.931813\n", "Epoch 2581, Loss 2.931799\n", "Epoch 2582, Loss 2.931786\n", "Epoch 2583, Loss 2.931771\n", "Epoch 2584, Loss 2.931759\n", "Epoch 2585, Loss 2.931742\n", "Epoch 2586, Loss 2.931729\n", "Epoch 2587, Loss 2.931717\n", "Epoch 2588, Loss 2.931701\n", "Epoch 2589, Loss 2.931687\n", "Epoch 2590, Loss 2.931674\n", "Epoch 2591, Loss 2.931660\n", "Epoch 2592, Loss 2.931647\n", "Epoch 2593, Loss 2.931632\n", "Epoch 2594, Loss 2.931619\n", "Epoch 2595, Loss 2.931606\n", "Epoch 2596, Loss 2.931594\n", "Epoch 2597, Loss 2.931580\n", "Epoch 2598, Loss 2.931566\n", "Epoch 2599, Loss 2.931554\n", "Epoch 2600, Loss 2.931539\n", "Epoch 2601, Loss 2.931526\n", "Epoch 2602, Loss 2.931512\n", "Epoch 2603, Loss 2.931500\n", "Epoch 2604, Loss 2.931488\n", "Epoch 2605, Loss 2.931474\n", "Epoch 2606, Loss 2.931462\n", "Epoch 2607, Loss 2.931448\n", "Epoch 2608, Loss 2.931436\n", "Epoch 2609, Loss 2.931422\n", "Epoch 2610, Loss 2.931411\n", "Epoch 2611, Loss 2.931398\n", "Epoch 2612, Loss 2.931384\n", "Epoch 2613, Loss 2.931370\n", "Epoch 2614, Loss 2.931358\n", "Epoch 2615, Loss 2.931346\n", "Epoch 2616, Loss 2.931334\n", "Epoch 2617, Loss 2.931322\n", "Epoch 2618, Loss 2.931309\n", "Epoch 2619, Loss 2.931296\n", "Epoch 2620, Loss 2.931282\n", "Epoch 2621, Loss 2.931272\n", "Epoch 2622, Loss 2.931258\n", "Epoch 2623, Loss 2.931245\n", "Epoch 2624, Loss 2.931235\n", "Epoch 2625, Loss 2.931222\n", "Epoch 2626, Loss 2.931211\n", "Epoch 2627, Loss 2.931196\n", "Epoch 2628, Loss 2.931185\n", "Epoch 2629, Loss 2.931173\n", "Epoch 2630, Loss 2.931162\n", "Epoch 2631, Loss 2.931149\n", "Epoch 2632, Loss 2.931139\n", "Epoch 2633, Loss 2.931126\n", "Epoch 2634, Loss 2.931114\n", "Epoch 2635, Loss 2.931101\n", "Epoch 2636, Loss 2.931090\n", "Epoch 2637, Loss 2.931079\n", "Epoch 2638, Loss 2.931067\n", "Epoch 2639, Loss 2.931054\n", "Epoch 2640, Loss 2.931044\n", "Epoch 2641, Loss 2.931034\n", "Epoch 2642, Loss 2.931021\n", "Epoch 2643, Loss 2.931010\n", "Epoch 2644, Loss 2.930999\n", "Epoch 2645, Loss 2.930987\n", "Epoch 2646, Loss 2.930976\n", "Epoch 2647, Loss 2.930964\n", "Epoch 2648, Loss 2.930953\n", "Epoch 2649, Loss 2.930941\n", "Epoch 2650, Loss 2.930932\n", "Epoch 2651, Loss 2.930920\n", "Epoch 2652, Loss 2.930908\n", "Epoch 2653, Loss 2.930899\n", "Epoch 2654, Loss 2.930885\n", "Epoch 2655, Loss 2.930876\n", "Epoch 2656, Loss 2.930864\n", "Epoch 2657, Loss 2.930854\n", "Epoch 2658, Loss 2.930841\n", "Epoch 2659, Loss 2.930833\n", "Epoch 2660, Loss 2.930821\n", "Epoch 2661, Loss 2.930811\n", "Epoch 2662, Loss 2.930801\n", "Epoch 2663, Loss 2.930788\n", "Epoch 2664, Loss 2.930778\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2665, Loss 2.930766\n", "Epoch 2666, Loss 2.930757\n", "Epoch 2667, Loss 2.930746\n", "Epoch 2668, Loss 2.930735\n", "Epoch 2669, Loss 2.930724\n", "Epoch 2670, Loss 2.930715\n", "Epoch 2671, Loss 2.930704\n", "Epoch 2672, Loss 2.930694\n", "Epoch 2673, Loss 2.930685\n", "Epoch 2674, Loss 2.930674\n", "Epoch 2675, Loss 2.930663\n", "Epoch 2676, Loss 2.930654\n", "Epoch 2677, Loss 2.930644\n", "Epoch 2678, Loss 2.930631\n", "Epoch 2679, Loss 2.930622\n", "Epoch 2680, Loss 2.930614\n", "Epoch 2681, Loss 2.930603\n", "Epoch 2682, Loss 2.930592\n", "Epoch 2683, Loss 2.930582\n", "Epoch 2684, Loss 2.930572\n", "Epoch 2685, Loss 2.930562\n", "Epoch 2686, Loss 2.930552\n", "Epoch 2687, Loss 2.930543\n", "Epoch 2688, Loss 2.930534\n", "Epoch 2689, Loss 2.930524\n", "Epoch 2690, Loss 2.930514\n", "Epoch 2691, Loss 2.930502\n", "Epoch 2692, Loss 2.930493\n", "Epoch 2693, Loss 2.930482\n", "Epoch 2694, Loss 2.930474\n", "Epoch 2695, Loss 2.930465\n", "Epoch 2696, Loss 2.930454\n", "Epoch 2697, Loss 2.930446\n", "Epoch 2698, Loss 2.930436\n", "Epoch 2699, Loss 2.930426\n", "Epoch 2700, Loss 2.930417\n", "Epoch 2701, Loss 2.930408\n", "Epoch 2702, Loss 2.930398\n", "Epoch 2703, Loss 2.930387\n", "Epoch 2704, Loss 2.930380\n", "Epoch 2705, Loss 2.930370\n", "Epoch 2706, Loss 2.930360\n", "Epoch 2707, Loss 2.930352\n", "Epoch 2708, Loss 2.930342\n", "Epoch 2709, Loss 2.930334\n", "Epoch 2710, Loss 2.930325\n", "Epoch 2711, Loss 2.930315\n", "Epoch 2712, Loss 2.930306\n", "Epoch 2713, Loss 2.930298\n", "Epoch 2714, Loss 2.930288\n", "Epoch 2715, Loss 2.930279\n", "Epoch 2716, Loss 2.930270\n", "Epoch 2717, Loss 2.930262\n", "Epoch 2718, Loss 2.930254\n", "Epoch 2719, Loss 2.930244\n", "Epoch 2720, Loss 2.930235\n", "Epoch 2721, Loss 2.930226\n", "Epoch 2722, Loss 2.930218\n", "Epoch 2723, Loss 2.930209\n", "Epoch 2724, Loss 2.930201\n", "Epoch 2725, Loss 2.930190\n", "Epoch 2726, Loss 2.930182\n", "Epoch 2727, Loss 2.930173\n", "Epoch 2728, Loss 2.930167\n", "Epoch 2729, Loss 2.930156\n", "Epoch 2730, Loss 2.930149\n", "Epoch 2731, Loss 2.930139\n", "Epoch 2732, Loss 2.930131\n", "Epoch 2733, Loss 2.930123\n", "Epoch 2734, Loss 2.930113\n", "Epoch 2735, Loss 2.930107\n", "Epoch 2736, Loss 2.930099\n", "Epoch 2737, Loss 2.930090\n", "Epoch 2738, Loss 2.930081\n", "Epoch 2739, Loss 2.930073\n", "Epoch 2740, Loss 2.930064\n", "Epoch 2741, Loss 2.930056\n", "Epoch 2742, Loss 2.930048\n", "Epoch 2743, Loss 2.930041\n", "Epoch 2744, Loss 2.930032\n", "Epoch 2745, Loss 2.930022\n", "Epoch 2746, Loss 2.930016\n", "Epoch 2747, Loss 2.930008\n", "Epoch 2748, Loss 2.930000\n", "Epoch 2749, Loss 2.929992\n", "Epoch 2750, Loss 2.929983\n", "Epoch 2751, Loss 2.929975\n", "Epoch 2752, Loss 2.929968\n", "Epoch 2753, Loss 2.929960\n", "Epoch 2754, Loss 2.929953\n", "Epoch 2755, Loss 2.929945\n", "Epoch 2756, Loss 2.929937\n", "Epoch 2757, Loss 2.929929\n", "Epoch 2758, Loss 2.929921\n", "Epoch 2759, Loss 2.929914\n", "Epoch 2760, Loss 2.929905\n", "Epoch 2761, Loss 2.929896\n", "Epoch 2762, Loss 2.929891\n", "Epoch 2763, Loss 2.929882\n", "Epoch 2764, Loss 2.929874\n", "Epoch 2765, Loss 2.929869\n", "Epoch 2766, Loss 2.929859\n", "Epoch 2767, Loss 2.929852\n", "Epoch 2768, Loss 2.929845\n", "Epoch 2769, Loss 2.929838\n", "Epoch 2770, Loss 2.929830\n", "Epoch 2771, Loss 2.929822\n", "Epoch 2772, Loss 2.929816\n", "Epoch 2773, Loss 2.929806\n", "Epoch 2774, Loss 2.929799\n", "Epoch 2775, Loss 2.929793\n", "Epoch 2776, Loss 2.929786\n", "Epoch 2777, Loss 2.929778\n", "Epoch 2778, Loss 2.929771\n", "Epoch 2779, Loss 2.929765\n", "Epoch 2780, Loss 2.929757\n", "Epoch 2781, Loss 2.929750\n", "Epoch 2782, Loss 2.929743\n", "Epoch 2783, Loss 2.929735\n", "Epoch 2784, Loss 2.929729\n", "Epoch 2785, Loss 2.929722\n", "Epoch 2786, Loss 2.929714\n", "Epoch 2787, Loss 2.929707\n", "Epoch 2788, Loss 2.929701\n", "Epoch 2789, Loss 2.929692\n", "Epoch 2790, Loss 2.929685\n", "Epoch 2791, Loss 2.929680\n", "Epoch 2792, Loss 2.929672\n", "Epoch 2793, Loss 2.929666\n", "Epoch 2794, Loss 2.929659\n", "Epoch 2795, Loss 2.929652\n", "Epoch 2796, Loss 2.929646\n", "Epoch 2797, Loss 2.929638\n", "Epoch 2798, Loss 2.929632\n", "Epoch 2799, Loss 2.929626\n", "Epoch 2800, Loss 2.929620\n", "Epoch 2801, Loss 2.929611\n", "Epoch 2802, Loss 2.929605\n", "Epoch 2803, Loss 2.929600\n", "Epoch 2804, Loss 2.929593\n", "Epoch 2805, Loss 2.929586\n", "Epoch 2806, Loss 2.929579\n", "Epoch 2807, Loss 2.929572\n", "Epoch 2808, Loss 2.929566\n", "Epoch 2809, Loss 2.929559\n", "Epoch 2810, Loss 2.929552\n", "Epoch 2811, Loss 2.929545\n", "Epoch 2812, Loss 2.929540\n", "Epoch 2813, Loss 2.929533\n", "Epoch 2814, Loss 2.929527\n", "Epoch 2815, Loss 2.929520\n", "Epoch 2816, Loss 2.929513\n", "Epoch 2817, Loss 2.929507\n", "Epoch 2818, Loss 2.929501\n", "Epoch 2819, Loss 2.929496\n", "Epoch 2820, Loss 2.929489\n", "Epoch 2821, Loss 2.929482\n", "Epoch 2822, Loss 2.929476\n", "Epoch 2823, Loss 2.929471\n", "Epoch 2824, Loss 2.929463\n", "Epoch 2825, Loss 2.929457\n", "Epoch 2826, Loss 2.929452\n", "Epoch 2827, Loss 2.929445\n", "Epoch 2828, Loss 2.929439\n", "Epoch 2829, Loss 2.929433\n", "Epoch 2830, Loss 2.929427\n", "Epoch 2831, Loss 2.929421\n", "Epoch 2832, Loss 2.929415\n", "Epoch 2833, Loss 2.929409\n", "Epoch 2834, Loss 2.929404\n", "Epoch 2835, Loss 2.929396\n", "Epoch 2836, Loss 2.929391\n", "Epoch 2837, Loss 2.929383\n", "Epoch 2838, Loss 2.929380\n", "Epoch 2839, Loss 2.929373\n", "Epoch 2840, Loss 2.929368\n", "Epoch 2841, Loss 2.929362\n", "Epoch 2842, Loss 2.929356\n", "Epoch 2843, Loss 2.929351\n", "Epoch 2844, Loss 2.929344\n", "Epoch 2845, Loss 2.929338\n", "Epoch 2846, Loss 2.929332\n", "Epoch 2847, Loss 2.929328\n", "Epoch 2848, Loss 2.929321\n", "Epoch 2849, Loss 2.929316\n", "Epoch 2850, Loss 2.929309\n", "Epoch 2851, Loss 2.929304\n", "Epoch 2852, Loss 2.929300\n", "Epoch 2853, Loss 2.929293\n", "Epoch 2854, Loss 2.929288\n", "Epoch 2855, Loss 2.929282\n", "Epoch 2856, Loss 2.929277\n", "Epoch 2857, Loss 2.929271\n", "Epoch 2858, Loss 2.929266\n", "Epoch 2859, Loss 2.929260\n", "Epoch 2860, Loss 2.929255\n", "Epoch 2861, Loss 2.929250\n", "Epoch 2862, Loss 2.929244\n", "Epoch 2863, Loss 2.929238\n", "Epoch 2864, Loss 2.929234\n", "Epoch 2865, Loss 2.929228\n", "Epoch 2866, Loss 2.929222\n", "Epoch 2867, Loss 2.929217\n", "Epoch 2868, Loss 2.929211\n", "Epoch 2869, Loss 2.929207\n", "Epoch 2870, Loss 2.929201\n", "Epoch 2871, Loss 2.929195\n", "Epoch 2872, Loss 2.929191\n", "Epoch 2873, Loss 2.929185\n", "Epoch 2874, Loss 2.929181\n", "Epoch 2875, Loss 2.929175\n", "Epoch 2876, Loss 2.929170\n", "Epoch 2877, Loss 2.929165\n", "Epoch 2878, Loss 2.929160\n", "Epoch 2879, Loss 2.929154\n", "Epoch 2880, Loss 2.929148\n", "Epoch 2881, Loss 2.929143\n", "Epoch 2882, Loss 2.929140\n", "Epoch 2883, Loss 2.929133\n", "Epoch 2884, Loss 2.929128\n", "Epoch 2885, Loss 2.929122\n", "Epoch 2886, Loss 2.929119\n", "Epoch 2887, Loss 2.929112\n", "Epoch 2888, Loss 2.929108\n", "Epoch 2889, Loss 2.929104\n", "Epoch 2890, Loss 2.929099\n", "Epoch 2891, Loss 2.929093\n", "Epoch 2892, Loss 2.929088\n", "Epoch 2893, Loss 2.929083\n", "Epoch 2894, Loss 2.929080\n", "Epoch 2895, Loss 2.929074\n", "Epoch 2896, Loss 2.929069\n", "Epoch 2897, Loss 2.929065\n", "Epoch 2898, Loss 2.929059\n", "Epoch 2899, Loss 2.929054\n", "Epoch 2900, Loss 2.929050\n", "Epoch 2901, Loss 2.929044\n", "Epoch 2902, Loss 2.929041\n", "Epoch 2903, Loss 2.929036\n", "Epoch 2904, Loss 2.929031\n", "Epoch 2905, Loss 2.929025\n", "Epoch 2906, Loss 2.929021\n", "Epoch 2907, Loss 2.929017\n", "Epoch 2908, Loss 2.929012\n", "Epoch 2909, Loss 2.929007\n", "Epoch 2910, Loss 2.929003\n", "Epoch 2911, Loss 2.928999\n", "Epoch 2912, Loss 2.928994\n", "Epoch 2913, Loss 2.928989\n", "Epoch 2914, Loss 2.928985\n", "Epoch 2915, Loss 2.928980\n", "Epoch 2916, Loss 2.928976\n", "Epoch 2917, Loss 2.928971\n", "Epoch 2918, Loss 2.928967\n", "Epoch 2919, Loss 2.928962\n", "Epoch 2920, Loss 2.928958\n", "Epoch 2921, Loss 2.928953\n", "Epoch 2922, Loss 2.928947\n", "Epoch 2923, Loss 2.928944\n", "Epoch 2924, Loss 2.928941\n", "Epoch 2925, Loss 2.928935\n", "Epoch 2926, Loss 2.928932\n", "Epoch 2927, Loss 2.928926\n", "Epoch 2928, Loss 2.928924\n", "Epoch 2929, Loss 2.928918\n", "Epoch 2930, Loss 2.928913\n", "Epoch 2931, Loss 2.928909\n", "Epoch 2932, Loss 2.928904\n", "Epoch 2933, Loss 2.928902\n", "Epoch 2934, Loss 2.928897\n", "Epoch 2935, Loss 2.928893\n", "Epoch 2936, Loss 2.928887\n", "Epoch 2937, Loss 2.928883\n", "Epoch 2938, Loss 2.928880\n", "Epoch 2939, Loss 2.928877\n", "Epoch 2940, Loss 2.928871\n", "Epoch 2941, Loss 2.928867\n", "Epoch 2942, Loss 2.928864\n", "Epoch 2943, Loss 2.928860\n", "Epoch 2944, Loss 2.928855\n", "Epoch 2945, Loss 2.928850\n", "Epoch 2946, Loss 2.928845\n", "Epoch 2947, Loss 2.928843\n", "Epoch 2948, Loss 2.928838\n", "Epoch 2949, Loss 2.928833\n", "Epoch 2950, Loss 2.928830\n", "Epoch 2951, Loss 2.928826\n", "Epoch 2952, Loss 2.928822\n", "Epoch 2953, Loss 2.928818\n", "Epoch 2954, Loss 2.928815\n", "Epoch 2955, Loss 2.928811\n", "Epoch 2956, Loss 2.928805\n", "Epoch 2957, Loss 2.928801\n", "Epoch 2958, Loss 2.928799\n", "Epoch 2959, Loss 2.928795\n", "Epoch 2960, Loss 2.928789\n", "Epoch 2961, Loss 2.928789\n", "Epoch 2962, Loss 2.928783\n", "Epoch 2963, Loss 2.928779\n", "Epoch 2964, Loss 2.928775\n", "Epoch 2965, Loss 2.928771\n", "Epoch 2966, Loss 2.928767\n", "Epoch 2967, Loss 2.928765\n", "Epoch 2968, Loss 2.928761\n", "Epoch 2969, Loss 2.928758\n", "Epoch 2970, Loss 2.928752\n", "Epoch 2971, Loss 2.928750\n", "Epoch 2972, Loss 2.928745\n", "Epoch 2973, Loss 2.928741\n", "Epoch 2974, Loss 2.928737\n", "Epoch 2975, Loss 2.928735\n", "Epoch 2976, Loss 2.928730\n", "Epoch 2977, Loss 2.928727\n", "Epoch 2978, Loss 2.928723\n", "Epoch 2979, Loss 2.928719\n", "Epoch 2980, Loss 2.928716\n", "Epoch 2981, Loss 2.928712\n", "Epoch 2982, Loss 2.928708\n", "Epoch 2983, Loss 2.928705\n", "Epoch 2984, Loss 2.928700\n", "Epoch 2985, Loss 2.928698\n", "Epoch 2986, Loss 2.928695\n", "Epoch 2987, Loss 2.928690\n", "Epoch 2988, Loss 2.928687\n", "Epoch 2989, Loss 2.928684\n", "Epoch 2990, Loss 2.928679\n", "Epoch 2991, Loss 2.928677\n", "Epoch 2992, Loss 2.928673\n", "Epoch 2993, Loss 2.928669\n", "Epoch 2994, Loss 2.928666\n", "Epoch 2995, Loss 2.928662\n", "Epoch 2996, Loss 2.928660\n", "Epoch 2997, Loss 2.928656\n", "Epoch 2998, Loss 2.928651\n", "Epoch 2999, Loss 2.928648\n", "Epoch 3000, Loss 2.928646\n", "Epoch 3001, Loss 2.928643\n", "Epoch 3002, Loss 2.928638\n", "Epoch 3003, Loss 2.928635\n", "Epoch 3004, Loss 2.928632\n", "Epoch 3005, Loss 2.928629\n", "Epoch 3006, Loss 2.928625\n", "Epoch 3007, Loss 2.928621\n", "Epoch 3008, Loss 2.928617\n", "Epoch 3009, Loss 2.928616\n", "Epoch 3010, Loss 2.928612\n", "Epoch 3011, Loss 2.928608\n", "Epoch 3012, Loss 2.928604\n", "Epoch 3013, Loss 2.928601\n", "Epoch 3014, Loss 2.928599\n", "Epoch 3015, Loss 2.928595\n", "Epoch 3016, Loss 2.928592\n", "Epoch 3017, Loss 2.928588\n", "Epoch 3018, Loss 2.928586\n", "Epoch 3019, Loss 2.928583\n", "Epoch 3020, Loss 2.928580\n", "Epoch 3021, Loss 2.928576\n", "Epoch 3022, Loss 2.928574\n", "Epoch 3023, Loss 2.928569\n", "Epoch 3024, Loss 2.928567\n", "Epoch 3025, Loss 2.928564\n", "Epoch 3026, Loss 2.928560\n", "Epoch 3027, Loss 2.928557\n", "Epoch 3028, Loss 2.928555\n", "Epoch 3029, Loss 2.928551\n", "Epoch 3030, Loss 2.928548\n", "Epoch 3031, Loss 2.928545\n", "Epoch 3032, Loss 2.928543\n", "Epoch 3033, Loss 2.928539\n", "Epoch 3034, Loss 2.928536\n", "Epoch 3035, Loss 2.928532\n", "Epoch 3036, Loss 2.928531\n", "Epoch 3037, Loss 2.928528\n", "Epoch 3038, Loss 2.928524\n", "Epoch 3039, Loss 2.928521\n", "Epoch 3040, Loss 2.928519\n", "Epoch 3041, Loss 2.928514\n", "Epoch 3042, Loss 2.928512\n", "Epoch 3043, Loss 2.928509\n", "Epoch 3044, Loss 2.928505\n", "Epoch 3045, Loss 2.928503\n", "Epoch 3046, Loss 2.928500\n", "Epoch 3047, Loss 2.928498\n", "Epoch 3048, Loss 2.928495\n", "Epoch 3049, Loss 2.928491\n", "Epoch 3050, Loss 2.928488\n", "Epoch 3051, Loss 2.928486\n", "Epoch 3052, Loss 2.928484\n", "Epoch 3053, Loss 2.928480\n", "Epoch 3054, Loss 2.928477\n", "Epoch 3055, Loss 2.928475\n", "Epoch 3056, Loss 2.928473\n", "Epoch 3057, Loss 2.928469\n", "Epoch 3058, Loss 2.928468\n", "Epoch 3059, Loss 2.928463\n", "Epoch 3060, Loss 2.928460\n", "Epoch 3061, Loss 2.928458\n", "Epoch 3062, Loss 2.928456\n", "Epoch 3063, Loss 2.928452\n", "Epoch 3064, Loss 2.928450\n", "Epoch 3065, Loss 2.928447\n", "Epoch 3066, Loss 2.928443\n", "Epoch 3067, Loss 2.928443\n", "Epoch 3068, Loss 2.928440\n", "Epoch 3069, Loss 2.928435\n", "Epoch 3070, Loss 2.928436\n", "Epoch 3071, Loss 2.928430\n", "Epoch 3072, Loss 2.928428\n", "Epoch 3073, Loss 2.928426\n", "Epoch 3074, Loss 2.928423\n", "Epoch 3075, Loss 2.928421\n", "Epoch 3076, Loss 2.928417\n", "Epoch 3077, Loss 2.928415\n", "Epoch 3078, Loss 2.928411\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3079, Loss 2.928410\n", "Epoch 3080, Loss 2.928407\n", "Epoch 3081, Loss 2.928404\n", "Epoch 3082, Loss 2.928402\n", "Epoch 3083, Loss 2.928399\n", "Epoch 3084, Loss 2.928396\n", "Epoch 3085, Loss 2.928396\n", "Epoch 3086, Loss 2.928392\n", "Epoch 3087, Loss 2.928389\n", "Epoch 3088, Loss 2.928386\n", "Epoch 3089, Loss 2.928383\n", "Epoch 3090, Loss 2.928383\n", "Epoch 3091, Loss 2.928379\n", "Epoch 3092, Loss 2.928378\n", "Epoch 3093, Loss 2.928375\n", "Epoch 3094, Loss 2.928372\n", "Epoch 3095, Loss 2.928370\n", "Epoch 3096, Loss 2.928368\n", "Epoch 3097, Loss 2.928364\n", "Epoch 3098, Loss 2.928362\n", "Epoch 3099, Loss 2.928361\n", "Epoch 3100, Loss 2.928357\n", "Epoch 3101, Loss 2.928355\n", "Epoch 3102, Loss 2.928353\n", "Epoch 3103, Loss 2.928349\n", "Epoch 3104, Loss 2.928348\n", "Epoch 3105, Loss 2.928345\n", "Epoch 3106, Loss 2.928343\n", "Epoch 3107, Loss 2.928340\n", "Epoch 3108, Loss 2.928339\n", "Epoch 3109, Loss 2.928337\n", "Epoch 3110, Loss 2.928333\n", "Epoch 3111, Loss 2.928332\n", "Epoch 3112, Loss 2.928328\n", "Epoch 3113, Loss 2.928329\n", "Epoch 3114, Loss 2.928324\n", "Epoch 3115, Loss 2.928323\n", "Epoch 3116, Loss 2.928320\n", "Epoch 3117, Loss 2.928318\n", "Epoch 3118, Loss 2.928315\n", "Epoch 3119, Loss 2.928313\n", "Epoch 3120, Loss 2.928311\n", "Epoch 3121, Loss 2.928308\n", "Epoch 3122, Loss 2.928306\n", "Epoch 3123, Loss 2.928304\n", "Epoch 3124, Loss 2.928303\n", "Epoch 3125, Loss 2.928300\n", "Epoch 3126, Loss 2.928296\n", "Epoch 3127, Loss 2.928295\n", "Epoch 3128, Loss 2.928292\n", "Epoch 3129, Loss 2.928292\n", "Epoch 3130, Loss 2.928288\n", "Epoch 3131, Loss 2.928287\n", "Epoch 3132, Loss 2.928285\n", "Epoch 3133, Loss 2.928282\n", "Epoch 3134, Loss 2.928279\n", "Epoch 3135, Loss 2.928276\n", "Epoch 3136, Loss 2.928275\n", "Epoch 3137, Loss 2.928273\n", "Epoch 3138, Loss 2.928272\n", "Epoch 3139, Loss 2.928268\n", "Epoch 3140, Loss 2.928267\n", "Epoch 3141, Loss 2.928265\n", "Epoch 3142, Loss 2.928263\n", "Epoch 3143, Loss 2.928260\n", "Epoch 3144, Loss 2.928259\n", "Epoch 3145, Loss 2.928256\n", "Epoch 3146, Loss 2.928255\n", "Epoch 3147, Loss 2.928252\n", "Epoch 3148, Loss 2.928251\n", "Epoch 3149, Loss 2.928248\n", "Epoch 3150, Loss 2.928246\n", "Epoch 3151, Loss 2.928245\n", "Epoch 3152, Loss 2.928242\n", "Epoch 3153, Loss 2.928240\n", "Epoch 3154, Loss 2.928236\n", "Epoch 3155, Loss 2.928236\n", "Epoch 3156, Loss 2.928233\n", "Epoch 3157, Loss 2.928231\n", "Epoch 3158, Loss 2.928230\n", "Epoch 3159, Loss 2.928227\n", "Epoch 3160, Loss 2.928226\n", "Epoch 3161, Loss 2.928225\n", "Epoch 3162, Loss 2.928223\n", "Epoch 3163, Loss 2.928219\n", "Epoch 3164, Loss 2.928218\n", "Epoch 3165, Loss 2.928216\n", "Epoch 3166, Loss 2.928215\n", "Epoch 3167, Loss 2.928212\n", "Epoch 3168, Loss 2.928211\n", "Epoch 3169, Loss 2.928210\n", "Epoch 3170, Loss 2.928206\n", "Epoch 3171, Loss 2.928205\n", "Epoch 3172, Loss 2.928204\n", "Epoch 3173, Loss 2.928202\n", "Epoch 3174, Loss 2.928200\n", "Epoch 3175, Loss 2.928196\n", "Epoch 3176, Loss 2.928195\n", "Epoch 3177, Loss 2.928195\n", "Epoch 3178, Loss 2.928192\n", "Epoch 3179, Loss 2.928190\n", "Epoch 3180, Loss 2.928188\n", "Epoch 3181, Loss 2.928186\n", "Epoch 3182, Loss 2.928185\n", "Epoch 3183, Loss 2.928184\n", "Epoch 3184, Loss 2.928182\n", "Epoch 3185, Loss 2.928180\n", "Epoch 3186, Loss 2.928178\n", "Epoch 3187, Loss 2.928175\n", "Epoch 3188, Loss 2.928172\n", "Epoch 3189, Loss 2.928170\n", "Epoch 3190, Loss 2.928170\n", "Epoch 3191, Loss 2.928169\n", "Epoch 3192, Loss 2.928167\n", "Epoch 3193, Loss 2.928164\n", "Epoch 3194, Loss 2.928164\n", "Epoch 3195, Loss 2.928162\n", "Epoch 3196, Loss 2.928160\n", "Epoch 3197, Loss 2.928158\n", "Epoch 3198, Loss 2.928158\n", "Epoch 3199, Loss 2.928154\n", "Epoch 3200, Loss 2.928152\n", "Epoch 3201, Loss 2.928149\n", "Epoch 3202, Loss 2.928150\n", "Epoch 3203, Loss 2.928147\n", "Epoch 3204, Loss 2.928146\n", "Epoch 3205, Loss 2.928144\n", "Epoch 3206, Loss 2.928142\n", "Epoch 3207, Loss 2.928140\n", "Epoch 3208, Loss 2.928139\n", "Epoch 3209, Loss 2.928137\n", "Epoch 3210, Loss 2.928135\n", "Epoch 3211, Loss 2.928135\n", "Epoch 3212, Loss 2.928133\n", "Epoch 3213, Loss 2.928131\n", "Epoch 3214, Loss 2.928130\n", "Epoch 3215, Loss 2.928125\n", "Epoch 3216, Loss 2.928125\n", "Epoch 3217, Loss 2.928124\n", "Epoch 3218, Loss 2.928121\n", "Epoch 3219, Loss 2.928121\n", "Epoch 3220, Loss 2.928120\n", "Epoch 3221, Loss 2.928118\n", "Epoch 3222, Loss 2.928117\n", "Epoch 3223, Loss 2.928115\n", "Epoch 3224, Loss 2.928113\n", "Epoch 3225, Loss 2.928110\n", "Epoch 3226, Loss 2.928109\n", "Epoch 3227, Loss 2.928108\n", "Epoch 3228, Loss 2.928104\n", "Epoch 3229, Loss 2.928105\n", "Epoch 3230, Loss 2.928104\n", "Epoch 3231, Loss 2.928102\n", "Epoch 3232, Loss 2.928101\n", "Epoch 3233, Loss 2.928098\n", "Epoch 3234, Loss 2.928097\n", "Epoch 3235, Loss 2.928095\n", "Epoch 3236, Loss 2.928094\n", "Epoch 3237, Loss 2.928093\n", "Epoch 3238, Loss 2.928091\n", "Epoch 3239, Loss 2.928090\n", "Epoch 3240, Loss 2.928088\n", "Epoch 3241, Loss 2.928086\n", "Epoch 3242, Loss 2.928085\n", "Epoch 3243, Loss 2.928084\n", "Epoch 3244, Loss 2.928082\n", "Epoch 3245, Loss 2.928080\n", "Epoch 3246, Loss 2.928079\n", "Epoch 3247, Loss 2.928076\n", "Epoch 3248, Loss 2.928076\n", "Epoch 3249, Loss 2.928075\n", "Epoch 3250, Loss 2.928072\n", "Epoch 3251, Loss 2.928072\n", "Epoch 3252, Loss 2.928071\n", "Epoch 3253, Loss 2.928068\n", "Epoch 3254, Loss 2.928068\n", "Epoch 3255, Loss 2.928066\n", "Epoch 3256, Loss 2.928065\n", "Epoch 3257, Loss 2.928063\n", "Epoch 3258, Loss 2.928061\n", "Epoch 3259, Loss 2.928061\n", "Epoch 3260, Loss 2.928057\n", "Epoch 3261, Loss 2.928058\n", "Epoch 3262, Loss 2.928056\n", "Epoch 3263, Loss 2.928055\n", "Epoch 3264, Loss 2.928052\n", "Epoch 3265, Loss 2.928053\n", "Epoch 3266, Loss 2.928051\n", "Epoch 3267, Loss 2.928050\n", "Epoch 3268, Loss 2.928047\n", "Epoch 3269, Loss 2.928046\n", "Epoch 3270, Loss 2.928046\n", "Epoch 3271, Loss 2.928044\n", "Epoch 3272, Loss 2.928042\n", "Epoch 3273, Loss 2.928040\n", "Epoch 3274, Loss 2.928040\n", "Epoch 3275, Loss 2.928037\n", "Epoch 3276, Loss 2.928036\n", "Epoch 3277, Loss 2.928037\n", "Epoch 3278, Loss 2.928034\n", "Epoch 3279, Loss 2.928034\n", "Epoch 3280, Loss 2.928031\n", "Epoch 3281, Loss 2.928032\n", "Epoch 3282, Loss 2.928029\n", "Epoch 3283, Loss 2.928027\n", "Epoch 3284, Loss 2.928026\n", "Epoch 3285, Loss 2.928025\n", "Epoch 3286, Loss 2.928024\n", "Epoch 3287, Loss 2.928023\n", "Epoch 3288, Loss 2.928022\n", "Epoch 3289, Loss 2.928021\n", "Epoch 3290, Loss 2.928019\n", "Epoch 3291, Loss 2.928018\n", "Epoch 3292, Loss 2.928017\n", "Epoch 3293, Loss 2.928015\n", "Epoch 3294, Loss 2.928013\n", "Epoch 3295, Loss 2.928012\n", "Epoch 3296, Loss 2.928011\n", "Epoch 3297, Loss 2.928009\n", "Epoch 3298, Loss 2.928009\n", "Epoch 3299, Loss 2.928006\n", "Epoch 3300, Loss 2.928007\n", "Epoch 3301, Loss 2.928007\n", "Epoch 3302, Loss 2.928005\n", "Epoch 3303, Loss 2.928002\n", "Epoch 3304, Loss 2.928001\n", "Epoch 3305, Loss 2.928000\n", "Epoch 3306, Loss 2.928000\n", "Epoch 3307, Loss 2.927998\n", "Epoch 3308, Loss 2.927995\n", "Epoch 3309, Loss 2.927995\n", "Epoch 3310, Loss 2.927994\n", "Epoch 3311, Loss 2.927994\n", "Epoch 3312, Loss 2.927992\n", "Epoch 3313, Loss 2.927992\n", "Epoch 3314, Loss 2.927990\n", "Epoch 3315, Loss 2.927989\n", "Epoch 3316, Loss 2.927987\n", "Epoch 3317, Loss 2.927986\n", "Epoch 3318, Loss 2.927985\n", "Epoch 3319, Loss 2.927983\n", "Epoch 3320, Loss 2.927983\n", "Epoch 3321, Loss 2.927981\n", "Epoch 3322, Loss 2.927980\n", "Epoch 3323, Loss 2.927979\n", "Epoch 3324, Loss 2.927978\n", "Epoch 3325, Loss 2.927977\n", "Epoch 3326, Loss 2.927975\n", "Epoch 3327, Loss 2.927973\n", "Epoch 3328, Loss 2.927973\n", "Epoch 3329, Loss 2.927974\n", "Epoch 3330, Loss 2.927971\n", "Epoch 3331, Loss 2.927972\n", "Epoch 3332, Loss 2.927969\n", "Epoch 3333, Loss 2.927969\n", "Epoch 3334, Loss 2.927967\n", "Epoch 3335, Loss 2.927967\n", "Epoch 3336, Loss 2.927963\n", "Epoch 3337, Loss 2.927963\n", "Epoch 3338, Loss 2.927962\n", "Epoch 3339, Loss 2.927962\n", "Epoch 3340, Loss 2.927961\n", "Epoch 3341, Loss 2.927960\n", "Epoch 3342, Loss 2.927959\n", "Epoch 3343, Loss 2.927958\n", "Epoch 3344, Loss 2.927956\n", "Epoch 3345, Loss 2.927955\n", "Epoch 3346, Loss 2.927954\n", "Epoch 3347, Loss 2.927953\n", "Epoch 3348, Loss 2.927953\n", "Epoch 3349, Loss 2.927950\n", "Epoch 3350, Loss 2.927950\n", "Epoch 3351, Loss 2.927948\n", "Epoch 3352, Loss 2.927947\n", "Epoch 3353, Loss 2.927948\n", "Epoch 3354, Loss 2.927945\n", "Epoch 3355, Loss 2.927944\n", "Epoch 3356, Loss 2.927944\n", "Epoch 3357, Loss 2.927944\n", "Epoch 3358, Loss 2.927942\n", "Epoch 3359, Loss 2.927941\n", "Epoch 3360, Loss 2.927940\n", "Epoch 3361, Loss 2.927938\n", "Epoch 3362, Loss 2.927938\n", "Epoch 3363, Loss 2.927936\n", "Epoch 3364, Loss 2.927936\n", "Epoch 3365, Loss 2.927937\n", "Epoch 3366, Loss 2.927934\n", "Epoch 3367, Loss 2.927934\n", "Epoch 3368, Loss 2.927933\n", "Epoch 3369, Loss 2.927930\n", "Epoch 3370, Loss 2.927929\n", "Epoch 3371, Loss 2.927931\n", "Epoch 3372, Loss 2.927929\n", "Epoch 3373, Loss 2.927927\n", "Epoch 3374, Loss 2.927926\n", "Epoch 3375, Loss 2.927925\n", "Epoch 3376, Loss 2.927924\n", "Epoch 3377, Loss 2.927922\n", "Epoch 3378, Loss 2.927924\n", "Epoch 3379, Loss 2.927922\n", "Epoch 3380, Loss 2.927921\n", "Epoch 3381, Loss 2.927920\n", "Epoch 3382, Loss 2.927918\n", "Epoch 3383, Loss 2.927917\n", "Epoch 3384, Loss 2.927917\n", "Epoch 3385, Loss 2.927915\n", "Epoch 3386, Loss 2.927916\n", "Epoch 3387, Loss 2.927914\n", "Epoch 3388, Loss 2.927914\n", "Epoch 3389, Loss 2.927912\n", "Epoch 3390, Loss 2.927913\n", "Epoch 3391, Loss 2.927911\n", "Epoch 3392, Loss 2.927910\n", "Epoch 3393, Loss 2.927909\n", "Epoch 3394, Loss 2.927908\n", "Epoch 3395, Loss 2.927907\n", "Epoch 3396, Loss 2.927906\n", "Epoch 3397, Loss 2.927905\n", "Epoch 3398, Loss 2.927905\n", "Epoch 3399, Loss 2.927904\n", "Epoch 3400, Loss 2.927902\n", "Epoch 3401, Loss 2.927902\n", "Epoch 3402, Loss 2.927902\n", "Epoch 3403, Loss 2.927899\n", "Epoch 3404, Loss 2.927899\n", "Epoch 3405, Loss 2.927898\n", "Epoch 3406, Loss 2.927899\n", "Epoch 3407, Loss 2.927896\n", "Epoch 3408, Loss 2.927895\n", "Epoch 3409, Loss 2.927896\n", "Epoch 3410, Loss 2.927894\n", "Epoch 3411, Loss 2.927892\n", "Epoch 3412, Loss 2.927893\n", "Epoch 3413, Loss 2.927891\n", "Epoch 3414, Loss 2.927891\n", "Epoch 3415, Loss 2.927890\n", "Epoch 3416, Loss 2.927891\n", "Epoch 3417, Loss 2.927888\n", "Epoch 3418, Loss 2.927888\n", "Epoch 3419, Loss 2.927886\n", "Epoch 3420, Loss 2.927887\n", "Epoch 3421, Loss 2.927885\n", "Epoch 3422, Loss 2.927883\n", "Epoch 3423, Loss 2.927883\n", "Epoch 3424, Loss 2.927882\n", "Epoch 3425, Loss 2.927882\n", "Epoch 3426, Loss 2.927880\n", "Epoch 3427, Loss 2.927879\n", "Epoch 3428, Loss 2.927879\n", "Epoch 3429, Loss 2.927879\n", "Epoch 3430, Loss 2.927877\n", "Epoch 3431, Loss 2.927876\n", "Epoch 3432, Loss 2.927876\n", "Epoch 3433, Loss 2.927875\n", "Epoch 3434, Loss 2.927875\n", "Epoch 3435, Loss 2.927875\n", "Epoch 3436, Loss 2.927873\n", "Epoch 3437, Loss 2.927872\n", "Epoch 3438, Loss 2.927872\n", "Epoch 3439, Loss 2.927870\n", "Epoch 3440, Loss 2.927870\n", "Epoch 3441, Loss 2.927869\n", "Epoch 3442, Loss 2.927869\n", "Epoch 3443, Loss 2.927869\n", "Epoch 3444, Loss 2.927865\n", "Epoch 3445, Loss 2.927866\n", "Epoch 3446, Loss 2.927865\n", "Epoch 3447, Loss 2.927864\n", "Epoch 3448, Loss 2.927863\n", "Epoch 3449, Loss 2.927863\n", "Epoch 3450, Loss 2.927862\n", "Epoch 3451, Loss 2.927863\n", "Epoch 3452, Loss 2.927860\n", "Epoch 3453, Loss 2.927860\n", "Epoch 3454, Loss 2.927860\n", "Epoch 3455, Loss 2.927859\n", "Epoch 3456, Loss 2.927858\n", "Epoch 3457, Loss 2.927858\n", "Epoch 3458, Loss 2.927855\n", "Epoch 3459, Loss 2.927857\n", "Epoch 3460, Loss 2.927854\n", "Epoch 3461, Loss 2.927855\n", "Epoch 3462, Loss 2.927853\n", "Epoch 3463, Loss 2.927853\n", "Epoch 3464, Loss 2.927852\n", "Epoch 3465, Loss 2.927852\n", "Epoch 3466, Loss 2.927852\n", "Epoch 3467, Loss 2.927850\n", "Epoch 3468, Loss 2.927849\n", "Epoch 3469, Loss 2.927849\n", "Epoch 3470, Loss 2.927848\n", "Epoch 3471, Loss 2.927848\n", "Epoch 3472, Loss 2.927847\n", "Epoch 3473, Loss 2.927846\n", "Epoch 3474, Loss 2.927846\n", "Epoch 3475, Loss 2.927844\n", "Epoch 3476, Loss 2.927844\n", "Epoch 3477, Loss 2.927843\n", "Epoch 3478, Loss 2.927843\n", "Epoch 3479, Loss 2.927841\n", "Epoch 3480, Loss 2.927841\n", "Epoch 3481, Loss 2.927840\n", "Epoch 3482, Loss 2.927841\n", "Epoch 3483, Loss 2.927839\n", "Epoch 3484, Loss 2.927839\n", "Epoch 3485, Loss 2.927837\n", "Epoch 3486, Loss 2.927837\n", "Epoch 3487, Loss 2.927836\n", "Epoch 3488, Loss 2.927837\n", "Epoch 3489, Loss 2.927837\n", "Epoch 3490, Loss 2.927834\n", "Epoch 3491, Loss 2.927834\n", "Epoch 3492, Loss 2.927833\n", "Epoch 3493, Loss 2.927833\n", "Epoch 3494, Loss 2.927833\n", "Epoch 3495, Loss 2.927832\n", "Epoch 3496, Loss 2.927831\n", "Epoch 3497, Loss 2.927831\n", "Epoch 3498, Loss 2.927830\n", "Epoch 3499, Loss 2.927831\n", "Epoch 3500, Loss 2.927828\n", "Epoch 3501, Loss 2.927826\n", "Epoch 3502, Loss 2.927828\n", "Epoch 3503, Loss 2.927826\n", "Epoch 3504, Loss 2.927825\n", "Epoch 3505, Loss 2.927827\n", "Epoch 3506, Loss 2.927824\n", "Epoch 3507, Loss 2.927822\n", "Epoch 3508, Loss 2.927824\n", "Epoch 3509, Loss 2.927823\n", "Epoch 3510, Loss 2.927822\n", "Epoch 3511, Loss 2.927823\n", "Epoch 3512, Loss 2.927822\n", "Epoch 3513, Loss 2.927821\n", "Epoch 3514, Loss 2.927821\n", "Epoch 3515, Loss 2.927820\n", "Epoch 3516, Loss 2.927819\n", "Epoch 3517, Loss 2.927818\n", "Epoch 3518, Loss 2.927817\n", "Epoch 3519, Loss 2.927817\n", "Epoch 3520, Loss 2.927817\n", "Epoch 3521, Loss 2.927817\n", "Epoch 3522, Loss 2.927815\n", "Epoch 3523, Loss 2.927815\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3524, Loss 2.927814\n", "Epoch 3525, Loss 2.927813\n", "Epoch 3526, Loss 2.927814\n", "Epoch 3527, Loss 2.927814\n", "Epoch 3528, Loss 2.927812\n", "Epoch 3529, Loss 2.927812\n", "Epoch 3530, Loss 2.927810\n", "Epoch 3531, Loss 2.927811\n", "Epoch 3532, Loss 2.927810\n", "Epoch 3533, Loss 2.927808\n", "Epoch 3534, Loss 2.927809\n", "Epoch 3535, Loss 2.927808\n", "Epoch 3536, Loss 2.927807\n", "Epoch 3537, Loss 2.927808\n", "Epoch 3538, Loss 2.927806\n", "Epoch 3539, Loss 2.927805\n", "Epoch 3540, Loss 2.927806\n", "Epoch 3541, Loss 2.927805\n", "Epoch 3542, Loss 2.927804\n", "Epoch 3543, Loss 2.927805\n", "Epoch 3544, Loss 2.927803\n", "Epoch 3545, Loss 2.927802\n", "Epoch 3546, Loss 2.927803\n", "Epoch 3547, Loss 2.927801\n", "Epoch 3548, Loss 2.927802\n", "Epoch 3549, Loss 2.927799\n", "Epoch 3550, Loss 2.927800\n", "Epoch 3551, Loss 2.927799\n", "Epoch 3552, Loss 2.927799\n", "Epoch 3553, Loss 2.927798\n", "Epoch 3554, Loss 2.927798\n", "Epoch 3555, Loss 2.927797\n", "Epoch 3556, Loss 2.927798\n", "Epoch 3557, Loss 2.927795\n", "Epoch 3558, Loss 2.927796\n", "Epoch 3559, Loss 2.927795\n", "Epoch 3560, Loss 2.927795\n", "Epoch 3561, Loss 2.927793\n", "Epoch 3562, Loss 2.927793\n", "Epoch 3563, Loss 2.927793\n", "Epoch 3564, Loss 2.927792\n", "Epoch 3565, Loss 2.927794\n", "Epoch 3566, Loss 2.927791\n", "Epoch 3567, Loss 2.927793\n", "Epoch 3568, Loss 2.927791\n", "Epoch 3569, Loss 2.927789\n", "Epoch 3570, Loss 2.927791\n", "Epoch 3571, Loss 2.927790\n", "Epoch 3572, Loss 2.927788\n", "Epoch 3573, Loss 2.927788\n", "Epoch 3574, Loss 2.927789\n", "Epoch 3575, Loss 2.927788\n", "Epoch 3576, Loss 2.927787\n", "Epoch 3577, Loss 2.927787\n", "Epoch 3578, Loss 2.927786\n", "Epoch 3579, Loss 2.927787\n", "Epoch 3580, Loss 2.927785\n", "Epoch 3581, Loss 2.927784\n", "Epoch 3582, Loss 2.927785\n", "Epoch 3583, Loss 2.927784\n", "Epoch 3584, Loss 2.927784\n", "Epoch 3585, Loss 2.927783\n", "Epoch 3586, Loss 2.927782\n", "Epoch 3587, Loss 2.927781\n", "Epoch 3588, Loss 2.927782\n", "Epoch 3589, Loss 2.927780\n", "Epoch 3590, Loss 2.927781\n", "Epoch 3591, Loss 2.927779\n", "Epoch 3592, Loss 2.927779\n", "Epoch 3593, Loss 2.927779\n", "Epoch 3594, Loss 2.927780\n", "Epoch 3595, Loss 2.927779\n", "Epoch 3596, Loss 2.927778\n", "Epoch 3597, Loss 2.927778\n", "Epoch 3598, Loss 2.927776\n", "Epoch 3599, Loss 2.927776\n", "Epoch 3600, Loss 2.927775\n", "Epoch 3601, Loss 2.927776\n", "Epoch 3602, Loss 2.927774\n", "Epoch 3603, Loss 2.927774\n", "Epoch 3604, Loss 2.927773\n", "Epoch 3605, Loss 2.927773\n", "Epoch 3606, Loss 2.927774\n", "Epoch 3607, Loss 2.927773\n", "Epoch 3608, Loss 2.927773\n", "Epoch 3609, Loss 2.927773\n", "Epoch 3610, Loss 2.927773\n", "Epoch 3611, Loss 2.927771\n", "Epoch 3612, Loss 2.927769\n", "Epoch 3613, Loss 2.927770\n", "Epoch 3614, Loss 2.927770\n", "Epoch 3615, Loss 2.927771\n", "Epoch 3616, Loss 2.927769\n", "Epoch 3617, Loss 2.927769\n", "Epoch 3618, Loss 2.927769\n", "Epoch 3619, Loss 2.927768\n", "Epoch 3620, Loss 2.927767\n", "Epoch 3621, Loss 2.927766\n", "Epoch 3622, Loss 2.927767\n", "Epoch 3623, Loss 2.927767\n", "Epoch 3624, Loss 2.927766\n", "Epoch 3625, Loss 2.927767\n", "Epoch 3626, Loss 2.927766\n", "Epoch 3627, Loss 2.927765\n", "Epoch 3628, Loss 2.927764\n", "Epoch 3629, Loss 2.927765\n", "Epoch 3630, Loss 2.927762\n", "Epoch 3631, Loss 2.927763\n", "Epoch 3632, Loss 2.927763\n", "Epoch 3633, Loss 2.927762\n", "Epoch 3634, Loss 2.927761\n", "Epoch 3635, Loss 2.927762\n", "Epoch 3636, Loss 2.927759\n", "Epoch 3637, Loss 2.927762\n", "Epoch 3638, Loss 2.927761\n", "Epoch 3639, Loss 2.927761\n", "Epoch 3640, Loss 2.927759\n", "Epoch 3641, Loss 2.927760\n", "Epoch 3642, Loss 2.927759\n", "Epoch 3643, Loss 2.927758\n", "Epoch 3644, Loss 2.927758\n", "Epoch 3645, Loss 2.927759\n", "Epoch 3646, Loss 2.927758\n", "Epoch 3647, Loss 2.927757\n", "Epoch 3648, Loss 2.927757\n", "Epoch 3649, Loss 2.927757\n", "Epoch 3650, Loss 2.927756\n", "Epoch 3651, Loss 2.927754\n", "Epoch 3652, Loss 2.927754\n", "Epoch 3653, Loss 2.927756\n", "Epoch 3654, Loss 2.927754\n", "Epoch 3655, Loss 2.927754\n", "Epoch 3656, Loss 2.927752\n", "Epoch 3657, Loss 2.927752\n", "Epoch 3658, Loss 2.927753\n", "Epoch 3659, Loss 2.927752\n", "Epoch 3660, Loss 2.927752\n", "Epoch 3661, Loss 2.927752\n", "Epoch 3662, Loss 2.927751\n", "Epoch 3663, Loss 2.927751\n", "Epoch 3664, Loss 2.927749\n", "Epoch 3665, Loss 2.927751\n", "Epoch 3666, Loss 2.927750\n", "Epoch 3667, Loss 2.927750\n", "Epoch 3668, Loss 2.927750\n", "Epoch 3669, Loss 2.927749\n", "Epoch 3670, Loss 2.927748\n", "Epoch 3671, Loss 2.927749\n", "Epoch 3672, Loss 2.927749\n", "Epoch 3673, Loss 2.927748\n", "Epoch 3674, Loss 2.927747\n", "Epoch 3675, Loss 2.927747\n", "Epoch 3676, Loss 2.927746\n", "Epoch 3677, Loss 2.927746\n", "Epoch 3678, Loss 2.927745\n", "Epoch 3679, Loss 2.927747\n", "Epoch 3680, Loss 2.927743\n", "Epoch 3681, Loss 2.927744\n", "Epoch 3682, Loss 2.927743\n", "Epoch 3683, Loss 2.927744\n", "Epoch 3684, Loss 2.927744\n", "Epoch 3685, Loss 2.927743\n", "Epoch 3686, Loss 2.927743\n", "Epoch 3687, Loss 2.927743\n", "Epoch 3688, Loss 2.927743\n", "Epoch 3689, Loss 2.927743\n", "Epoch 3690, Loss 2.927742\n", "Epoch 3691, Loss 2.927742\n", "Epoch 3692, Loss 2.927739\n", "Epoch 3693, Loss 2.927741\n", "Epoch 3694, Loss 2.927740\n", "Epoch 3695, Loss 2.927742\n", "Epoch 3696, Loss 2.927740\n", "Epoch 3697, Loss 2.927740\n", "Epoch 3698, Loss 2.927738\n", "Epoch 3699, Loss 2.927738\n", "Epoch 3700, Loss 2.927740\n", "Epoch 3701, Loss 2.927738\n", "Epoch 3702, Loss 2.927737\n", "Epoch 3703, Loss 2.927738\n", "Epoch 3704, Loss 2.927737\n", "Epoch 3705, Loss 2.927737\n", "Epoch 3706, Loss 2.927736\n", "Epoch 3707, Loss 2.927735\n", "Epoch 3708, Loss 2.927735\n", "Epoch 3709, Loss 2.927734\n", "Epoch 3710, Loss 2.927734\n", "Epoch 3711, Loss 2.927734\n", "Epoch 3712, Loss 2.927736\n", "Epoch 3713, Loss 2.927734\n", "Epoch 3714, Loss 2.927734\n", "Epoch 3715, Loss 2.927734\n", "Epoch 3716, Loss 2.927735\n", "Epoch 3717, Loss 2.927734\n", "Epoch 3718, Loss 2.927735\n", "Epoch 3719, Loss 2.927733\n", "Epoch 3720, Loss 2.927733\n", "Epoch 3721, Loss 2.927732\n", "Epoch 3722, Loss 2.927732\n", "Epoch 3723, Loss 2.927732\n", "Epoch 3724, Loss 2.927731\n", "Epoch 3725, Loss 2.927731\n", "Epoch 3726, Loss 2.927731\n", "Epoch 3727, Loss 2.927730\n", "Epoch 3728, Loss 2.927730\n", "Epoch 3729, Loss 2.927729\n", "Epoch 3730, Loss 2.927731\n", "Epoch 3731, Loss 2.927731\n", "Epoch 3732, Loss 2.927728\n", "Epoch 3733, Loss 2.927729\n", "Epoch 3734, Loss 2.927729\n", "Epoch 3735, Loss 2.927728\n", "Epoch 3736, Loss 2.927727\n", "Epoch 3737, Loss 2.927728\n", "Epoch 3738, Loss 2.927728\n", "Epoch 3739, Loss 2.927727\n", "Epoch 3740, Loss 2.927727\n", "Epoch 3741, Loss 2.927727\n", "Epoch 3742, Loss 2.927727\n", "Epoch 3743, Loss 2.927727\n", "Epoch 3744, Loss 2.927725\n", "Epoch 3745, Loss 2.927724\n", "Epoch 3746, Loss 2.927726\n", "Epoch 3747, Loss 2.927725\n", "Epoch 3748, Loss 2.927723\n", "Epoch 3749, Loss 2.927725\n", "Epoch 3750, Loss 2.927724\n", "Epoch 3751, Loss 2.927725\n", "Epoch 3752, Loss 2.927724\n", "Epoch 3753, Loss 2.927724\n", "Epoch 3754, Loss 2.927723\n", "Epoch 3755, Loss 2.927723\n", "Epoch 3756, Loss 2.927721\n", "Epoch 3757, Loss 2.927722\n", "Epoch 3758, Loss 2.927723\n", "Epoch 3759, Loss 2.927721\n", "Epoch 3760, Loss 2.927722\n", "Epoch 3761, Loss 2.927721\n", "Epoch 3762, Loss 2.927721\n", "Epoch 3763, Loss 2.927720\n", "Epoch 3764, Loss 2.927720\n", "Epoch 3765, Loss 2.927719\n", "Epoch 3766, Loss 2.927720\n", "Epoch 3767, Loss 2.927719\n", "Epoch 3768, Loss 2.927721\n", "Epoch 3769, Loss 2.927719\n", "Epoch 3770, Loss 2.927719\n", "Epoch 3771, Loss 2.927718\n", "Epoch 3772, Loss 2.927719\n", "Epoch 3773, Loss 2.927717\n", "Epoch 3774, Loss 2.927718\n", "Epoch 3775, Loss 2.927717\n", "Epoch 3776, Loss 2.927718\n", "Epoch 3777, Loss 2.927717\n", "Epoch 3778, Loss 2.927717\n", "Epoch 3779, Loss 2.927716\n", "Epoch 3780, Loss 2.927716\n", "Epoch 3781, Loss 2.927717\n", "Epoch 3782, Loss 2.927717\n", "Epoch 3783, Loss 2.927716\n", "Epoch 3784, Loss 2.927715\n", "Epoch 3785, Loss 2.927715\n", "Epoch 3786, Loss 2.927715\n", "Epoch 3787, Loss 2.927715\n", "Epoch 3788, Loss 2.927715\n", "Epoch 3789, Loss 2.927715\n", "Epoch 3790, Loss 2.927714\n", "Epoch 3791, Loss 2.927714\n", "Epoch 3792, Loss 2.927714\n", "Epoch 3793, Loss 2.927713\n", "Epoch 3794, Loss 2.927713\n", "Epoch 3795, Loss 2.927714\n", "Epoch 3796, Loss 2.927713\n", "Epoch 3797, Loss 2.927712\n", "Epoch 3798, Loss 2.927712\n", "Epoch 3799, Loss 2.927712\n", "Epoch 3800, Loss 2.927711\n", "Epoch 3801, Loss 2.927711\n", "Epoch 3802, Loss 2.927713\n", "Epoch 3803, Loss 2.927711\n", "Epoch 3804, Loss 2.927712\n", "Epoch 3805, Loss 2.927711\n", "Epoch 3806, Loss 2.927711\n", "Epoch 3807, Loss 2.927711\n", "Epoch 3808, Loss 2.927709\n", "Epoch 3809, Loss 2.927711\n", "Epoch 3810, Loss 2.927710\n", "Epoch 3811, Loss 2.927708\n", "Epoch 3812, Loss 2.927708\n", "Epoch 3813, Loss 2.927709\n", "Epoch 3814, Loss 2.927709\n", "Epoch 3815, Loss 2.927710\n", "Epoch 3816, Loss 2.927708\n", "Epoch 3817, Loss 2.927708\n", "Epoch 3818, Loss 2.927706\n", "Epoch 3819, Loss 2.927707\n", "Epoch 3820, Loss 2.927708\n", "Epoch 3821, Loss 2.927707\n", "Epoch 3822, Loss 2.927707\n", "Epoch 3823, Loss 2.927707\n", "Epoch 3824, Loss 2.927708\n", "Epoch 3825, Loss 2.927708\n", "Epoch 3826, Loss 2.927706\n", "Epoch 3827, Loss 2.927707\n", "Epoch 3828, Loss 2.927706\n", "Epoch 3829, Loss 2.927706\n", "Epoch 3830, Loss 2.927706\n", "Epoch 3831, Loss 2.927705\n", "Epoch 3832, Loss 2.927705\n", "Epoch 3833, Loss 2.927705\n", "Epoch 3834, Loss 2.927705\n", "Epoch 3835, Loss 2.927705\n", "Epoch 3836, Loss 2.927704\n", "Epoch 3837, Loss 2.927703\n", "Epoch 3838, Loss 2.927704\n", "Epoch 3839, Loss 2.927704\n", "Epoch 3840, Loss 2.927703\n", "Epoch 3841, Loss 2.927702\n", "Epoch 3842, Loss 2.927703\n", "Epoch 3843, Loss 2.927703\n", "Epoch 3844, Loss 2.927704\n", "Epoch 3845, Loss 2.927702\n", "Epoch 3846, Loss 2.927701\n", "Epoch 3847, Loss 2.927703\n", "Epoch 3848, Loss 2.927702\n", "Epoch 3849, Loss 2.927701\n", "Epoch 3850, Loss 2.927701\n", "Epoch 3851, Loss 2.927703\n", "Epoch 3852, Loss 2.927700\n", "Epoch 3853, Loss 2.927701\n", "Epoch 3854, Loss 2.927701\n", "Epoch 3855, Loss 2.927700\n", "Epoch 3856, Loss 2.927700\n", "Epoch 3857, Loss 2.927700\n", "Epoch 3858, Loss 2.927701\n", "Epoch 3859, Loss 2.927700\n", "Epoch 3860, Loss 2.927700\n", "Epoch 3861, Loss 2.927700\n", "Epoch 3862, Loss 2.927699\n", "Epoch 3863, Loss 2.927698\n", "Epoch 3864, Loss 2.927700\n", "Epoch 3865, Loss 2.927697\n", "Epoch 3866, Loss 2.927700\n", "Epoch 3867, Loss 2.927700\n", "Epoch 3868, Loss 2.927698\n", "Epoch 3869, Loss 2.927697\n", "Epoch 3870, Loss 2.927698\n", "Epoch 3871, Loss 2.927696\n", "Epoch 3872, Loss 2.927699\n", "Epoch 3873, Loss 2.927697\n", "Epoch 3874, Loss 2.927696\n", "Epoch 3875, Loss 2.927699\n", "Epoch 3876, Loss 2.927697\n", "Epoch 3877, Loss 2.927696\n", "Epoch 3878, Loss 2.927697\n", "Epoch 3879, Loss 2.927696\n", "Epoch 3880, Loss 2.927696\n", "Epoch 3881, Loss 2.927696\n", "Epoch 3882, Loss 2.927696\n", "Epoch 3883, Loss 2.927695\n", "Epoch 3884, Loss 2.927695\n", "Epoch 3885, Loss 2.927696\n", "Epoch 3886, Loss 2.927696\n", "Epoch 3887, Loss 2.927695\n", "Epoch 3888, Loss 2.927694\n", "Epoch 3889, Loss 2.927694\n", "Epoch 3890, Loss 2.927694\n", "Epoch 3891, Loss 2.927693\n", "Epoch 3892, Loss 2.927695\n", "Epoch 3893, Loss 2.927695\n", "Epoch 3894, Loss 2.927694\n", "Epoch 3895, Loss 2.927695\n", "Epoch 3896, Loss 2.927693\n", "Epoch 3897, Loss 2.927693\n", "Epoch 3898, Loss 2.927695\n", "Epoch 3899, Loss 2.927693\n", "Epoch 3900, Loss 2.927692\n", "Epoch 3901, Loss 2.927694\n", "Epoch 3902, Loss 2.927692\n", "Epoch 3903, Loss 2.927693\n", "Epoch 3904, Loss 2.927691\n", "Epoch 3905, Loss 2.927692\n", "Epoch 3906, Loss 2.927692\n", "Epoch 3907, Loss 2.927692\n", "Epoch 3908, Loss 2.927692\n", "Epoch 3909, Loss 2.927692\n", "Epoch 3910, Loss 2.927690\n", "Epoch 3911, Loss 2.927692\n", "Epoch 3912, Loss 2.927691\n", "Epoch 3913, Loss 2.927691\n", "Epoch 3914, Loss 2.927689\n", "Epoch 3915, Loss 2.927691\n", "Epoch 3916, Loss 2.927691\n", "Epoch 3917, Loss 2.927689\n", "Epoch 3918, Loss 2.927690\n", "Epoch 3919, Loss 2.927690\n", "Epoch 3920, Loss 2.927690\n", "Epoch 3921, Loss 2.927690\n", "Epoch 3922, Loss 2.927689\n", "Epoch 3923, Loss 2.927688\n", "Epoch 3924, Loss 2.927689\n", "Epoch 3925, Loss 2.927688\n", "Epoch 3926, Loss 2.927689\n", "Epoch 3927, Loss 2.927689\n", "Epoch 3928, Loss 2.927689\n", "Epoch 3929, Loss 2.927688\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3930, Loss 2.927688\n", "Epoch 3931, Loss 2.927688\n", "Epoch 3932, Loss 2.927687\n", "Epoch 3933, Loss 2.927689\n", "Epoch 3934, Loss 2.927688\n", "Epoch 3935, Loss 2.927687\n", "Epoch 3936, Loss 2.927688\n", "Epoch 3937, Loss 2.927686\n", "Epoch 3938, Loss 2.927686\n", "Epoch 3939, Loss 2.927686\n", "Epoch 3940, Loss 2.927687\n", "Epoch 3941, Loss 2.927687\n", "Epoch 3942, Loss 2.927686\n", "Epoch 3943, Loss 2.927687\n", "Epoch 3944, Loss 2.927686\n", "Epoch 3945, Loss 2.927685\n", "Epoch 3946, Loss 2.927685\n", "Epoch 3947, Loss 2.927686\n", "Epoch 3948, Loss 2.927685\n", "Epoch 3949, Loss 2.927686\n", "Epoch 3950, Loss 2.927686\n", "Epoch 3951, Loss 2.927686\n", "Epoch 3952, Loss 2.927685\n", "Epoch 3953, Loss 2.927686\n", "Epoch 3954, Loss 2.927685\n", "Epoch 3955, Loss 2.927685\n", "Epoch 3956, Loss 2.927683\n", "Epoch 3957, Loss 2.927684\n", "Epoch 3958, Loss 2.927685\n", "Epoch 3959, Loss 2.927684\n", "Epoch 3960, Loss 2.927684\n", "Epoch 3961, Loss 2.927684\n", "Epoch 3962, Loss 2.927685\n", "Epoch 3963, Loss 2.927683\n", "Epoch 3964, Loss 2.927685\n", "Epoch 3965, Loss 2.927685\n", "Epoch 3966, Loss 2.927684\n", "Epoch 3967, Loss 2.927683\n", "Epoch 3968, Loss 2.927683\n", "Epoch 3969, Loss 2.927682\n", "Epoch 3970, Loss 2.927683\n", "Epoch 3971, Loss 2.927684\n", "Epoch 3972, Loss 2.927682\n", "Epoch 3973, Loss 2.927683\n", "Epoch 3974, Loss 2.927683\n", "Epoch 3975, Loss 2.927683\n", "Epoch 3976, Loss 2.927683\n", "Epoch 3977, Loss 2.927682\n", "Epoch 3978, Loss 2.927682\n", "Epoch 3979, Loss 2.927681\n", "Epoch 3980, Loss 2.927682\n", "Epoch 3981, Loss 2.927682\n", "Epoch 3982, Loss 2.927682\n", "Epoch 3983, Loss 2.927682\n", "Epoch 3984, Loss 2.927682\n", "Epoch 3985, Loss 2.927683\n", "Epoch 3986, Loss 2.927680\n", "Epoch 3987, Loss 2.927682\n", "Epoch 3988, Loss 2.927681\n", "Epoch 3989, Loss 2.927681\n", "Epoch 3990, Loss 2.927680\n", "Epoch 3991, Loss 2.927681\n", "Epoch 3992, Loss 2.927680\n", "Epoch 3993, Loss 2.927680\n", "Epoch 3994, Loss 2.927681\n", "Epoch 3995, Loss 2.927681\n", "Epoch 3996, Loss 2.927679\n", "Epoch 3997, Loss 2.927680\n", "Epoch 3998, Loss 2.927679\n", "Epoch 3999, Loss 2.927680\n", "Epoch 4000, Loss 2.927680\n", "Epoch 4001, Loss 2.927681\n", "Epoch 4002, Loss 2.927679\n", "Epoch 4003, Loss 2.927679\n", "Epoch 4004, Loss 2.927680\n", "Epoch 4005, Loss 2.927680\n", "Epoch 4006, Loss 2.927677\n", "Epoch 4007, Loss 2.927678\n", "Epoch 4008, Loss 2.927679\n", "Epoch 4009, Loss 2.927678\n", "Epoch 4010, Loss 2.927679\n", "Epoch 4011, Loss 2.927678\n", "Epoch 4012, Loss 2.927679\n", "Epoch 4013, Loss 2.927677\n", "Epoch 4014, Loss 2.927677\n", "Epoch 4015, Loss 2.927676\n", "Epoch 4016, Loss 2.927679\n", "Epoch 4017, Loss 2.927677\n", "Epoch 4018, Loss 2.927677\n", "Epoch 4019, Loss 2.927678\n", "Epoch 4020, Loss 2.927677\n", "Epoch 4021, Loss 2.927677\n", "Epoch 4022, Loss 2.927677\n", "Epoch 4023, Loss 2.927677\n", "Epoch 4024, Loss 2.927677\n", "Epoch 4025, Loss 2.927676\n", "Epoch 4026, Loss 2.927677\n", "Epoch 4027, Loss 2.927675\n", "Epoch 4028, Loss 2.927677\n", "Epoch 4029, Loss 2.927674\n", "Epoch 4030, Loss 2.927676\n", "Epoch 4031, Loss 2.927675\n", "Epoch 4032, Loss 2.927675\n", "Epoch 4033, Loss 2.927676\n", "Epoch 4034, Loss 2.927675\n", "Epoch 4035, Loss 2.927674\n", "Epoch 4036, Loss 2.927674\n", "Epoch 4037, Loss 2.927677\n", "Epoch 4038, Loss 2.927674\n", "Epoch 4039, Loss 2.927676\n", "Epoch 4040, Loss 2.927675\n", "Epoch 4041, Loss 2.927675\n", "Epoch 4042, Loss 2.927675\n", "Epoch 4043, Loss 2.927674\n", "Epoch 4044, Loss 2.927673\n", "Epoch 4045, Loss 2.927675\n", "Epoch 4046, Loss 2.927674\n", "Epoch 4047, Loss 2.927674\n", "Epoch 4048, Loss 2.927675\n", "Epoch 4049, Loss 2.927673\n", "Epoch 4050, Loss 2.927675\n", "Epoch 4051, Loss 2.927675\n", "Epoch 4052, Loss 2.927673\n", "Epoch 4053, Loss 2.927673\n", "Epoch 4054, Loss 2.927674\n", "Epoch 4055, Loss 2.927673\n", "Epoch 4056, Loss 2.927674\n", "Epoch 4057, Loss 2.927672\n", "Epoch 4058, Loss 2.927674\n", "Epoch 4059, Loss 2.927675\n", "Epoch 4060, Loss 2.927673\n", "Epoch 4061, Loss 2.927673\n", "Epoch 4062, Loss 2.927675\n", "Epoch 4063, Loss 2.927673\n", "Epoch 4064, Loss 2.927673\n", "Epoch 4065, Loss 2.927672\n", "Epoch 4066, Loss 2.927673\n", "Epoch 4067, Loss 2.927673\n", "Epoch 4068, Loss 2.927672\n", "Epoch 4069, Loss 2.927672\n", "Epoch 4070, Loss 2.927674\n", "Epoch 4071, Loss 2.927673\n", "Epoch 4072, Loss 2.927671\n", "Epoch 4073, Loss 2.927672\n", "Epoch 4074, Loss 2.927672\n", "Epoch 4075, Loss 2.927672\n", "Epoch 4076, Loss 2.927672\n", "Epoch 4077, Loss 2.927671\n", "Epoch 4078, Loss 2.927673\n", "Epoch 4079, Loss 2.927672\n", "Epoch 4080, Loss 2.927671\n", "Epoch 4081, Loss 2.927670\n", "Epoch 4082, Loss 2.927673\n", "Epoch 4083, Loss 2.927672\n", "Epoch 4084, Loss 2.927670\n", "Epoch 4085, Loss 2.927670\n", "Epoch 4086, Loss 2.927670\n", "Epoch 4087, Loss 2.927672\n", "Epoch 4088, Loss 2.927670\n", "Epoch 4089, Loss 2.927670\n", "Epoch 4090, Loss 2.927670\n", "Epoch 4091, Loss 2.927670\n", "Epoch 4092, Loss 2.927671\n", "Epoch 4093, Loss 2.927670\n", "Epoch 4094, Loss 2.927669\n", "Epoch 4095, Loss 2.927670\n", "Epoch 4096, Loss 2.927670\n", "Epoch 4097, Loss 2.927671\n", "Epoch 4098, Loss 2.927670\n", "Epoch 4099, Loss 2.927671\n", "Epoch 4100, Loss 2.927670\n", "Epoch 4101, Loss 2.927670\n", "Epoch 4102, Loss 2.927671\n", "Epoch 4103, Loss 2.927669\n", "Epoch 4104, Loss 2.927670\n", "Epoch 4105, Loss 2.927670\n", "Epoch 4106, Loss 2.927671\n", "Epoch 4107, Loss 2.927670\n", "Epoch 4108, Loss 2.927668\n", "Epoch 4109, Loss 2.927668\n", "Epoch 4110, Loss 2.927668\n", "Epoch 4111, Loss 2.927670\n", "Epoch 4112, Loss 2.927670\n", "Epoch 4113, Loss 2.927669\n", "Epoch 4114, Loss 2.927670\n", "Epoch 4115, Loss 2.927669\n", "Epoch 4116, Loss 2.927669\n", "Epoch 4117, Loss 2.927670\n", "Epoch 4118, Loss 2.927669\n", "Epoch 4119, Loss 2.927670\n", "Epoch 4120, Loss 2.927668\n", "Epoch 4121, Loss 2.927668\n", "Epoch 4122, Loss 2.927670\n", "Epoch 4123, Loss 2.927668\n", "Epoch 4124, Loss 2.927670\n", "Epoch 4125, Loss 2.927666\n", "Epoch 4126, Loss 2.927669\n", "Epoch 4127, Loss 2.927668\n", "Epoch 4128, Loss 2.927669\n", "Epoch 4129, Loss 2.927667\n", "Epoch 4130, Loss 2.927667\n", "Epoch 4131, Loss 2.927668\n", "Epoch 4132, Loss 2.927668\n", "Epoch 4133, Loss 2.927667\n", "Epoch 4134, Loss 2.927667\n", "Epoch 4135, Loss 2.927666\n", "Epoch 4136, Loss 2.927669\n", "Epoch 4137, Loss 2.927666\n", "Epoch 4138, Loss 2.927668\n", "Epoch 4139, Loss 2.927666\n", "Epoch 4140, Loss 2.927667\n", "Epoch 4141, Loss 2.927668\n", "Epoch 4142, Loss 2.927666\n", "Epoch 4143, Loss 2.927667\n", "Epoch 4144, Loss 2.927666\n", "Epoch 4145, Loss 2.927666\n", "Epoch 4146, Loss 2.927667\n", "Epoch 4147, Loss 2.927667\n", "Epoch 4148, Loss 2.927667\n", "Epoch 4149, Loss 2.927665\n", "Epoch 4150, Loss 2.927666\n", "Epoch 4151, Loss 2.927666\n", "Epoch 4152, Loss 2.927666\n", "Epoch 4153, Loss 2.927666\n", "Epoch 4154, Loss 2.927666\n", "Epoch 4155, Loss 2.927665\n", "Epoch 4156, Loss 2.927666\n", "Epoch 4157, Loss 2.927665\n", "Epoch 4158, Loss 2.927666\n", "Epoch 4159, Loss 2.927666\n", "Epoch 4160, Loss 2.927664\n", "Epoch 4161, Loss 2.927666\n", "Epoch 4162, Loss 2.927665\n", "Epoch 4163, Loss 2.927666\n", "Epoch 4164, Loss 2.927664\n", "Epoch 4165, Loss 2.927665\n", "Epoch 4166, Loss 2.927665\n", "Epoch 4167, Loss 2.927665\n", "Epoch 4168, Loss 2.927666\n", "Epoch 4169, Loss 2.927664\n", "Epoch 4170, Loss 2.927665\n", "Epoch 4171, Loss 2.927666\n", "Epoch 4172, Loss 2.927663\n", "Epoch 4173, Loss 2.927664\n", "Epoch 4174, Loss 2.927664\n", "Epoch 4175, Loss 2.927665\n", "Epoch 4176, Loss 2.927663\n", "Epoch 4177, Loss 2.927664\n", "Epoch 4178, Loss 2.927664\n", "Epoch 4179, Loss 2.927663\n", "Epoch 4180, Loss 2.927664\n", "Epoch 4181, Loss 2.927664\n", "Epoch 4182, Loss 2.927663\n", "Epoch 4183, Loss 2.927664\n", "Epoch 4184, Loss 2.927664\n", "Epoch 4185, Loss 2.927662\n", "Epoch 4186, Loss 2.927665\n", "Epoch 4187, Loss 2.927663\n", "Epoch 4188, Loss 2.927662\n", "Epoch 4189, Loss 2.927663\n", "Epoch 4190, Loss 2.927664\n", "Epoch 4191, Loss 2.927664\n", "Epoch 4192, Loss 2.927662\n", "Epoch 4193, Loss 2.927663\n", "Epoch 4194, Loss 2.927663\n", "Epoch 4195, Loss 2.927665\n", "Epoch 4196, Loss 2.927664\n", "Epoch 4197, Loss 2.927663\n", "Epoch 4198, Loss 2.927662\n", "Epoch 4199, Loss 2.927664\n", "Epoch 4200, Loss 2.927663\n", "Epoch 4201, Loss 2.927662\n", "Epoch 4202, Loss 2.927662\n", "Epoch 4203, Loss 2.927662\n", "Epoch 4204, Loss 2.927663\n", "Epoch 4205, Loss 2.927663\n", "Epoch 4206, Loss 2.927662\n", "Epoch 4207, Loss 2.927662\n", "Epoch 4208, Loss 2.927663\n", "Epoch 4209, Loss 2.927664\n", "Epoch 4210, Loss 2.927662\n", "Epoch 4211, Loss 2.927660\n", "Epoch 4212, Loss 2.927662\n", "Epoch 4213, Loss 2.927662\n", "Epoch 4214, Loss 2.927662\n", "Epoch 4215, Loss 2.927662\n", "Epoch 4216, Loss 2.927661\n", "Epoch 4217, Loss 2.927662\n", "Epoch 4218, Loss 2.927662\n", "Epoch 4219, Loss 2.927663\n", "Epoch 4220, Loss 2.927662\n", "Epoch 4221, Loss 2.927662\n", "Epoch 4222, Loss 2.927662\n", "Epoch 4223, Loss 2.927662\n", "Epoch 4224, Loss 2.927660\n", "Epoch 4225, Loss 2.927662\n", "Epoch 4226, Loss 2.927661\n", "Epoch 4227, Loss 2.927661\n", "Epoch 4228, Loss 2.927661\n", "Epoch 4229, Loss 2.927660\n", "Epoch 4230, Loss 2.927662\n", "Epoch 4231, Loss 2.927662\n", "Epoch 4232, Loss 2.927660\n", "Epoch 4233, Loss 2.927662\n", "Epoch 4234, Loss 2.927661\n", "Epoch 4235, Loss 2.927661\n", "Epoch 4236, Loss 2.927661\n", "Epoch 4237, Loss 2.927660\n", "Epoch 4238, Loss 2.927661\n", "Epoch 4239, Loss 2.927660\n", "Epoch 4240, Loss 2.927662\n", "Epoch 4241, Loss 2.927660\n", "Epoch 4242, Loss 2.927659\n", "Epoch 4243, Loss 2.927660\n", "Epoch 4244, Loss 2.927661\n", "Epoch 4245, Loss 2.927662\n", "Epoch 4246, Loss 2.927660\n", "Epoch 4247, Loss 2.927659\n", "Epoch 4248, Loss 2.927660\n", "Epoch 4249, Loss 2.927660\n", "Epoch 4250, Loss 2.927662\n", "Epoch 4251, Loss 2.927660\n", "Epoch 4252, Loss 2.927660\n", "Epoch 4253, Loss 2.927660\n", "Epoch 4254, Loss 2.927660\n", "Epoch 4255, Loss 2.927660\n", "Epoch 4256, Loss 2.927660\n", "Epoch 4257, Loss 2.927659\n", "Epoch 4258, Loss 2.927660\n", "Epoch 4259, Loss 2.927658\n", "Epoch 4260, Loss 2.927659\n", "Epoch 4261, Loss 2.927662\n", "Epoch 4262, Loss 2.927658\n", "Epoch 4263, Loss 2.927659\n", "Epoch 4264, Loss 2.927660\n", "Epoch 4265, Loss 2.927659\n", "Epoch 4266, Loss 2.927660\n", "Epoch 4267, Loss 2.927659\n", "Epoch 4268, Loss 2.927659\n", "Epoch 4269, Loss 2.927660\n", "Epoch 4270, Loss 2.927660\n", "Epoch 4271, Loss 2.927660\n", "Epoch 4272, Loss 2.927659\n", "Epoch 4273, Loss 2.927658\n", "Epoch 4274, Loss 2.927659\n", "Epoch 4275, Loss 2.927659\n", "Epoch 4276, Loss 2.927659\n", "Epoch 4277, Loss 2.927659\n", "Epoch 4278, Loss 2.927658\n", "Epoch 4279, Loss 2.927658\n", "Epoch 4280, Loss 2.927659\n", "Epoch 4281, Loss 2.927659\n", "Epoch 4282, Loss 2.927660\n", "Epoch 4283, Loss 2.927660\n", "Epoch 4284, Loss 2.927658\n", "Epoch 4285, Loss 2.927659\n", "Epoch 4286, Loss 2.927658\n", "Epoch 4287, Loss 2.927659\n", "Epoch 4288, Loss 2.927658\n", "Epoch 4289, Loss 2.927657\n", "Epoch 4290, Loss 2.927660\n", "Epoch 4291, Loss 2.927659\n", "Epoch 4292, Loss 2.927659\n", "Epoch 4293, Loss 2.927659\n", "Epoch 4294, Loss 2.927659\n", "Epoch 4295, Loss 2.927657\n", "Epoch 4296, Loss 2.927657\n", "Epoch 4297, Loss 2.927657\n", "Epoch 4298, Loss 2.927658\n", "Epoch 4299, Loss 2.927659\n", "Epoch 4300, Loss 2.927660\n", "Epoch 4301, Loss 2.927657\n", "Epoch 4302, Loss 2.927658\n", "Epoch 4303, Loss 2.927658\n", "Epoch 4304, Loss 2.927658\n", "Epoch 4305, Loss 2.927658\n", "Epoch 4306, Loss 2.927658\n", "Epoch 4307, Loss 2.927657\n", "Epoch 4308, Loss 2.927658\n", "Epoch 4309, Loss 2.927657\n", "Epoch 4310, Loss 2.927659\n", "Epoch 4311, Loss 2.927659\n", "Epoch 4312, Loss 2.927658\n", "Epoch 4313, Loss 2.927656\n", "Epoch 4314, Loss 2.927658\n", "Epoch 4315, Loss 2.927657\n", "Epoch 4316, Loss 2.927657\n", "Epoch 4317, Loss 2.927658\n", "Epoch 4318, Loss 2.927658\n", "Epoch 4319, Loss 2.927657\n", "Epoch 4320, Loss 2.927655\n", "Epoch 4321, Loss 2.927657\n", "Epoch 4322, Loss 2.927657\n", "Epoch 4323, Loss 2.927657\n", "Epoch 4324, Loss 2.927658\n", "Epoch 4325, Loss 2.927657\n", "Epoch 4326, Loss 2.927657\n", "Epoch 4327, Loss 2.927657\n", "Epoch 4328, Loss 2.927656\n", "Epoch 4329, Loss 2.927657\n", "Epoch 4330, Loss 2.927656\n", "Epoch 4331, Loss 2.927656\n", "Epoch 4332, Loss 2.927657\n", "Epoch 4333, Loss 2.927656\n", "Epoch 4334, Loss 2.927656\n", "Epoch 4335, Loss 2.927657\n", "Epoch 4336, Loss 2.927656\n", "Epoch 4337, Loss 2.927656\n", "Epoch 4338, Loss 2.927655\n", "Epoch 4339, Loss 2.927657\n", "Epoch 4340, Loss 2.927656\n", "Epoch 4341, Loss 2.927657\n", "Epoch 4342, Loss 2.927656\n", "Epoch 4343, Loss 2.927656\n", "Epoch 4344, Loss 2.927657\n", "Epoch 4345, Loss 2.927656\n", "Epoch 4346, Loss 2.927656\n", "Epoch 4347, Loss 2.927657\n", "Epoch 4348, Loss 2.927656\n", "Epoch 4349, Loss 2.927656\n", "Epoch 4350, Loss 2.927657\n", "Epoch 4351, Loss 2.927657\n", "Epoch 4352, Loss 2.927657\n", "Epoch 4353, Loss 2.927656\n", "Epoch 4354, Loss 2.927656\n", "Epoch 4355, Loss 2.927656\n", "Epoch 4356, Loss 2.927656\n", "Epoch 4357, Loss 2.927657\n", "Epoch 4358, Loss 2.927656\n", "Epoch 4359, Loss 2.927656\n", "Epoch 4360, Loss 2.927656\n", "Epoch 4361, Loss 2.927655\n", "Epoch 4362, Loss 2.927657\n", "Epoch 4363, Loss 2.927654\n", "Epoch 4364, Loss 2.927656\n", "Epoch 4365, Loss 2.927655\n", "Epoch 4366, Loss 2.927656\n", "Epoch 4367, Loss 2.927655\n", "Epoch 4368, Loss 2.927655\n", "Epoch 4369, Loss 2.927656\n", "Epoch 4370, Loss 2.927654\n", "Epoch 4371, Loss 2.927657\n", "Epoch 4372, Loss 2.927655\n", "Epoch 4373, Loss 2.927657\n", "Epoch 4374, Loss 2.927656\n", "Epoch 4375, Loss 2.927655\n", "Epoch 4376, Loss 2.927654\n", "Epoch 4377, Loss 2.927655\n", "Epoch 4378, Loss 2.927655\n", "Epoch 4379, Loss 2.927654\n", "Epoch 4380, Loss 2.927656\n", "Epoch 4381, Loss 2.927655\n", "Epoch 4382, Loss 2.927655\n", "Epoch 4383, Loss 2.927655\n", "Epoch 4384, Loss 2.927656\n", "Epoch 4385, Loss 2.927656\n", "Epoch 4386, Loss 2.927655\n", "Epoch 4387, Loss 2.927654\n", "Epoch 4388, Loss 2.927654\n", "Epoch 4389, Loss 2.927654\n", "Epoch 4390, Loss 2.927655\n", "Epoch 4391, Loss 2.927656\n", "Epoch 4392, Loss 2.927655\n", "Epoch 4393, Loss 2.927656\n", "Epoch 4394, Loss 2.927655\n", "Epoch 4395, Loss 2.927654\n", "Epoch 4396, Loss 2.927655\n", "Epoch 4397, Loss 2.927655\n", "Epoch 4398, Loss 2.927656\n", "Epoch 4399, Loss 2.927654\n", "Epoch 4400, Loss 2.927655\n", "Epoch 4401, Loss 2.927654\n", "Epoch 4402, Loss 2.927655\n", "Epoch 4403, Loss 2.927655\n", "Epoch 4404, Loss 2.927656\n", "Epoch 4405, Loss 2.927655\n", "Epoch 4406, Loss 2.927655\n", "Epoch 4407, Loss 2.927654\n", "Epoch 4408, Loss 2.927653\n", "Epoch 4409, Loss 2.927655\n", "Epoch 4410, Loss 2.927654\n", "Epoch 4411, Loss 2.927654\n", "Epoch 4412, Loss 2.927655\n", "Epoch 4413, Loss 2.927655\n", "Epoch 4414, Loss 2.927653\n", "Epoch 4415, Loss 2.927654\n", "Epoch 4416, Loss 2.927654\n", "Epoch 4417, Loss 2.927654\n", "Epoch 4418, Loss 2.927654\n", "Epoch 4419, Loss 2.927655\n", "Epoch 4420, Loss 2.927654\n", "Epoch 4421, Loss 2.927653\n", "Epoch 4422, Loss 2.927655\n", "Epoch 4423, Loss 2.927653\n", "Epoch 4424, Loss 2.927654\n", "Epoch 4425, Loss 2.927655\n", "Epoch 4426, Loss 2.927654\n", "Epoch 4427, Loss 2.927654\n", "Epoch 4428, Loss 2.927653\n", "Epoch 4429, Loss 2.927654\n", "Epoch 4430, Loss 2.927652\n", "Epoch 4431, Loss 2.927654\n", "Epoch 4432, Loss 2.927655\n", "Epoch 4433, Loss 2.927654\n", "Epoch 4434, Loss 2.927655\n", "Epoch 4435, Loss 2.927652\n", "Epoch 4436, Loss 2.927653\n", "Epoch 4437, Loss 2.927654\n", "Epoch 4438, Loss 2.927654\n", "Epoch 4439, Loss 2.927654\n", "Epoch 4440, Loss 2.927654\n", "Epoch 4441, Loss 2.927653\n", "Epoch 4442, Loss 2.927653\n", "Epoch 4443, Loss 2.927653\n", "Epoch 4444, Loss 2.927653\n", "Epoch 4445, Loss 2.927653\n", "Epoch 4446, Loss 2.927652\n", "Epoch 4447, Loss 2.927655\n", "Epoch 4448, Loss 2.927654\n", "Epoch 4449, Loss 2.927654\n", "Epoch 4450, Loss 2.927653\n", "Epoch 4451, Loss 2.927651\n", "Epoch 4452, Loss 2.927653\n", "Epoch 4453, Loss 2.927654\n", "Epoch 4454, Loss 2.927653\n", "Epoch 4455, Loss 2.927655\n", "Epoch 4456, Loss 2.927653\n", "Epoch 4457, Loss 2.927652\n", "Epoch 4458, Loss 2.927654\n", "Epoch 4459, Loss 2.927653\n", "Epoch 4460, Loss 2.927654\n", "Epoch 4461, Loss 2.927653\n", "Epoch 4462, Loss 2.927652\n", "Epoch 4463, Loss 2.927653\n", "Epoch 4464, Loss 2.927653\n", "Epoch 4465, Loss 2.927654\n", "Epoch 4466, Loss 2.927653\n", "Epoch 4467, Loss 2.927652\n", "Epoch 4468, Loss 2.927653\n", "Epoch 4469, Loss 2.927653\n", "Epoch 4470, Loss 2.927653\n", "Epoch 4471, Loss 2.927653\n", "Epoch 4472, Loss 2.927652\n", "Epoch 4473, Loss 2.927652\n", "Epoch 4474, Loss 2.927652\n", "Epoch 4475, Loss 2.927652\n", "Epoch 4476, Loss 2.927653\n", "Epoch 4477, Loss 2.927652\n", "Epoch 4478, Loss 2.927653\n", "Epoch 4479, Loss 2.927653\n", "Epoch 4480, Loss 2.927653\n", "Epoch 4481, Loss 2.927653\n", "Epoch 4482, Loss 2.927654\n", "Epoch 4483, Loss 2.927653\n", "Epoch 4484, Loss 2.927652\n", "Epoch 4485, Loss 2.927653\n", "Epoch 4486, Loss 2.927653\n", "Epoch 4487, Loss 2.927652\n", "Epoch 4488, Loss 2.927653\n", "Epoch 4489, Loss 2.927652\n", "Epoch 4490, Loss 2.927651\n", "Epoch 4491, Loss 2.927651\n", "Epoch 4492, Loss 2.927653\n", "Epoch 4493, Loss 2.927653\n", "Epoch 4494, Loss 2.927653\n", "Epoch 4495, Loss 2.927651\n", "Epoch 4496, Loss 2.927653\n", "Epoch 4497, Loss 2.927652\n", "Epoch 4498, Loss 2.927653\n", "Epoch 4499, Loss 2.927651\n", "Epoch 4500, Loss 2.927652\n", "Epoch 4501, Loss 2.927653\n", "Epoch 4502, Loss 2.927653\n", "Epoch 4503, Loss 2.927652\n", "Epoch 4504, Loss 2.927651\n", "Epoch 4505, Loss 2.927652\n", "Epoch 4506, Loss 2.927651\n", "Epoch 4507, Loss 2.927652\n", "Epoch 4508, Loss 2.927651\n", "Epoch 4509, Loss 2.927652\n", "Epoch 4510, Loss 2.927650\n", "Epoch 4511, Loss 2.927651\n", "Epoch 4512, Loss 2.927652\n", "Epoch 4513, Loss 2.927652\n", "Epoch 4514, Loss 2.927650\n", "Epoch 4515, Loss 2.927651\n", "Epoch 4516, Loss 2.927653\n", "Epoch 4517, Loss 2.927652\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4518, Loss 2.927650\n", "Epoch 4519, Loss 2.927651\n", "Epoch 4520, Loss 2.927651\n", "Epoch 4521, Loss 2.927652\n", "Epoch 4522, Loss 2.927652\n", "Epoch 4523, Loss 2.927653\n", "Epoch 4524, Loss 2.927652\n", "Epoch 4525, Loss 2.927653\n", "Epoch 4526, Loss 2.927653\n", "Epoch 4527, Loss 2.927652\n", "Epoch 4528, Loss 2.927651\n", "Epoch 4529, Loss 2.927651\n", "Epoch 4530, Loss 2.927651\n", "Epoch 4531, Loss 2.927651\n", "Epoch 4532, Loss 2.927652\n", "Epoch 4533, Loss 2.927651\n", "Epoch 4534, Loss 2.927651\n", "Epoch 4535, Loss 2.927651\n", "Epoch 4536, Loss 2.927653\n", "Epoch 4537, Loss 2.927651\n", "Epoch 4538, Loss 2.927650\n", "Epoch 4539, Loss 2.927651\n", "Epoch 4540, Loss 2.927651\n", "Epoch 4541, Loss 2.927651\n", "Epoch 4542, Loss 2.927651\n", "Epoch 4543, Loss 2.927650\n", "Epoch 4544, Loss 2.927651\n", "Epoch 4545, Loss 2.927652\n", "Epoch 4546, Loss 2.927651\n", "Epoch 4547, Loss 2.927650\n", "Epoch 4548, Loss 2.927651\n", "Epoch 4549, Loss 2.927652\n", "Epoch 4550, Loss 2.927652\n", "Epoch 4551, Loss 2.927653\n", "Epoch 4552, Loss 2.927651\n", "Epoch 4553, Loss 2.927652\n", "Epoch 4554, Loss 2.927651\n", "Epoch 4555, Loss 2.927650\n", "Epoch 4556, Loss 2.927652\n", "Epoch 4557, Loss 2.927651\n", "Epoch 4558, Loss 2.927651\n", "Epoch 4559, Loss 2.927650\n", "Epoch 4560, Loss 2.927651\n", "Epoch 4561, Loss 2.927653\n", "Epoch 4562, Loss 2.927651\n", "Epoch 4563, Loss 2.927650\n", "Epoch 4564, Loss 2.927651\n", "Epoch 4565, Loss 2.927652\n", "Epoch 4566, Loss 2.927650\n", "Epoch 4567, Loss 2.927650\n", "Epoch 4568, Loss 2.927651\n", "Epoch 4569, Loss 2.927652\n", "Epoch 4570, Loss 2.927650\n", "Epoch 4571, Loss 2.927649\n", "Epoch 4572, Loss 2.927650\n", "Epoch 4573, Loss 2.927650\n", "Epoch 4574, Loss 2.927650\n", "Epoch 4575, Loss 2.927651\n", "Epoch 4576, Loss 2.927650\n", "Epoch 4577, Loss 2.927651\n", "Epoch 4578, Loss 2.927651\n", "Epoch 4579, Loss 2.927652\n", "Epoch 4580, Loss 2.927649\n", "Epoch 4581, Loss 2.927652\n", "Epoch 4582, Loss 2.927651\n", "Epoch 4583, Loss 2.927649\n", "Epoch 4584, Loss 2.927650\n", "Epoch 4585, Loss 2.927650\n", "Epoch 4586, Loss 2.927650\n", "Epoch 4587, Loss 2.927651\n", "Epoch 4588, Loss 2.927650\n", "Epoch 4589, Loss 2.927650\n", "Epoch 4590, Loss 2.927652\n", "Epoch 4591, Loss 2.927650\n", "Epoch 4592, Loss 2.927650\n", "Epoch 4593, Loss 2.927651\n", "Epoch 4594, Loss 2.927650\n", "Epoch 4595, Loss 2.927651\n", "Epoch 4596, Loss 2.927650\n", "Epoch 4597, Loss 2.927651\n", "Epoch 4598, Loss 2.927652\n", "Epoch 4599, Loss 2.927650\n", "Epoch 4600, Loss 2.927651\n", "Epoch 4601, Loss 2.927650\n", "Epoch 4602, Loss 2.927649\n", "Epoch 4603, Loss 2.927650\n", "Epoch 4604, Loss 2.927651\n", "Epoch 4605, Loss 2.927651\n", "Epoch 4606, Loss 2.927651\n", "Epoch 4607, Loss 2.927651\n", "Epoch 4608, Loss 2.927651\n", "Epoch 4609, Loss 2.927649\n", "Epoch 4610, Loss 2.927649\n", "Epoch 4611, Loss 2.927650\n", "Epoch 4612, Loss 2.927649\n", "Epoch 4613, Loss 2.927649\n", "Epoch 4614, Loss 2.927650\n", "Epoch 4615, Loss 2.927650\n", "Epoch 4616, Loss 2.927651\n", "Epoch 4617, Loss 2.927650\n", "Epoch 4618, Loss 2.927651\n", "Epoch 4619, Loss 2.927649\n", "Epoch 4620, Loss 2.927650\n", "Epoch 4621, Loss 2.927651\n", "Epoch 4622, Loss 2.927650\n", "Epoch 4623, Loss 2.927651\n", "Epoch 4624, Loss 2.927651\n", "Epoch 4625, Loss 2.927651\n", "Epoch 4626, Loss 2.927651\n", "Epoch 4627, Loss 2.927649\n", "Epoch 4628, Loss 2.927651\n", "Epoch 4629, Loss 2.927651\n", "Epoch 4630, Loss 2.927650\n", "Epoch 4631, Loss 2.927650\n", "Epoch 4632, Loss 2.927651\n", "Epoch 4633, Loss 2.927650\n", "Epoch 4634, Loss 2.927650\n", "Epoch 4635, Loss 2.927650\n", "Epoch 4636, Loss 2.927650\n", "Epoch 4637, Loss 2.927650\n", "Epoch 4638, Loss 2.927649\n", "Epoch 4639, Loss 2.927649\n", "Epoch 4640, Loss 2.927650\n", "Epoch 4641, Loss 2.927649\n", "Epoch 4642, Loss 2.927650\n", "Epoch 4643, Loss 2.927650\n", "Epoch 4644, Loss 2.927650\n", "Epoch 4645, Loss 2.927649\n", "Epoch 4646, Loss 2.927650\n", "Epoch 4647, Loss 2.927650\n", "Epoch 4648, Loss 2.927650\n", "Epoch 4649, Loss 2.927650\n", "Epoch 4650, Loss 2.927649\n", "Epoch 4651, Loss 2.927650\n", "Epoch 4652, Loss 2.927651\n", "Epoch 4653, Loss 2.927650\n", "Epoch 4654, Loss 2.927650\n", "Epoch 4655, Loss 2.927651\n", "Epoch 4656, Loss 2.927650\n", "Epoch 4657, Loss 2.927651\n", "Epoch 4658, Loss 2.927650\n", "Epoch 4659, Loss 2.927649\n", "Epoch 4660, Loss 2.927649\n", "Epoch 4661, Loss 2.927649\n", "Epoch 4662, Loss 2.927649\n", "Epoch 4663, Loss 2.927649\n", "Epoch 4664, Loss 2.927648\n", "Epoch 4665, Loss 2.927650\n", "Epoch 4666, Loss 2.927649\n", "Epoch 4667, Loss 2.927649\n", "Epoch 4668, Loss 2.927649\n", "Epoch 4669, Loss 2.927650\n", "Epoch 4670, Loss 2.927649\n", "Epoch 4671, Loss 2.927650\n", "Epoch 4672, Loss 2.927649\n", "Epoch 4673, Loss 2.927650\n", "Epoch 4674, Loss 2.927650\n", "Epoch 4675, Loss 2.927649\n", "Epoch 4676, Loss 2.927649\n", "Epoch 4677, Loss 2.927650\n", "Epoch 4678, Loss 2.927650\n", "Epoch 4679, Loss 2.927648\n", "Epoch 4680, Loss 2.927648\n", "Epoch 4681, Loss 2.927649\n", "Epoch 4682, Loss 2.927649\n", "Epoch 4683, Loss 2.927648\n", "Epoch 4684, Loss 2.927650\n", "Epoch 4685, Loss 2.927649\n", "Epoch 4686, Loss 2.927651\n", "Epoch 4687, Loss 2.927649\n", "Epoch 4688, Loss 2.927649\n", "Epoch 4689, Loss 2.927649\n", "Epoch 4690, Loss 2.927650\n", "Epoch 4691, Loss 2.927649\n", "Epoch 4692, Loss 2.927649\n", "Epoch 4693, Loss 2.927649\n", "Epoch 4694, Loss 2.927650\n", "Epoch 4695, Loss 2.927648\n", "Epoch 4696, Loss 2.927649\n", "Epoch 4697, Loss 2.927648\n", "Epoch 4698, Loss 2.927650\n", "Epoch 4699, Loss 2.927650\n", "Epoch 4700, Loss 2.927649\n", "Epoch 4701, Loss 2.927649\n", "Epoch 4702, Loss 2.927649\n", "Epoch 4703, Loss 2.927648\n", "Epoch 4704, Loss 2.927650\n", "Epoch 4705, Loss 2.927649\n", "Epoch 4706, Loss 2.927649\n", "Epoch 4707, Loss 2.927649\n", "Epoch 4708, Loss 2.927649\n", "Epoch 4709, Loss 2.927649\n", "Epoch 4710, Loss 2.927650\n", "Epoch 4711, Loss 2.927648\n", "Epoch 4712, Loss 2.927649\n", "Epoch 4713, Loss 2.927649\n", "Epoch 4714, Loss 2.927650\n", "Epoch 4715, Loss 2.927648\n", "Epoch 4716, Loss 2.927649\n", "Epoch 4717, Loss 2.927648\n", "Epoch 4718, Loss 2.927648\n", "Epoch 4719, Loss 2.927649\n", "Epoch 4720, Loss 2.927649\n", "Epoch 4721, Loss 2.927649\n", "Epoch 4722, Loss 2.927649\n", "Epoch 4723, Loss 2.927648\n", "Epoch 4724, Loss 2.927649\n", "Epoch 4725, Loss 2.927650\n", "Epoch 4726, Loss 2.927648\n", "Epoch 4727, Loss 2.927648\n", "Epoch 4728, Loss 2.927649\n", "Epoch 4729, Loss 2.927649\n", "Epoch 4730, Loss 2.927648\n", "Epoch 4731, Loss 2.927649\n", "Epoch 4732, Loss 2.927649\n", "Epoch 4733, Loss 2.927649\n", "Epoch 4734, Loss 2.927649\n", "Epoch 4735, Loss 2.927650\n", "Epoch 4736, Loss 2.927649\n", "Epoch 4737, Loss 2.927650\n", "Epoch 4738, Loss 2.927648\n", "Epoch 4739, Loss 2.927649\n", "Epoch 4740, Loss 2.927648\n", "Epoch 4741, Loss 2.927648\n", "Epoch 4742, Loss 2.927649\n", "Epoch 4743, Loss 2.927649\n", "Epoch 4744, Loss 2.927649\n", "Epoch 4745, Loss 2.927648\n", "Epoch 4746, Loss 2.927649\n", "Epoch 4747, Loss 2.927648\n", "Epoch 4748, Loss 2.927648\n", "Epoch 4749, Loss 2.927650\n", "Epoch 4750, Loss 2.927649\n", "Epoch 4751, Loss 2.927648\n", "Epoch 4752, Loss 2.927648\n", "Epoch 4753, Loss 2.927648\n", "Epoch 4754, Loss 2.927647\n", "Epoch 4755, Loss 2.927649\n", "Epoch 4756, Loss 2.927647\n", "Epoch 4757, Loss 2.927649\n", "Epoch 4758, Loss 2.927648\n", "Epoch 4759, Loss 2.927649\n", "Epoch 4760, Loss 2.927649\n", "Epoch 4761, Loss 2.927648\n", "Epoch 4762, Loss 2.927649\n", "Epoch 4763, Loss 2.927647\n", "Epoch 4764, Loss 2.927650\n", "Epoch 4765, Loss 2.927649\n", "Epoch 4766, Loss 2.927649\n", "Epoch 4767, Loss 2.927649\n", "Epoch 4768, Loss 2.927648\n", "Epoch 4769, Loss 2.927648\n", "Epoch 4770, Loss 2.927648\n", "Epoch 4771, Loss 2.927648\n", "Epoch 4772, Loss 2.927649\n", "Epoch 4773, Loss 2.927649\n", "Epoch 4774, Loss 2.927650\n", "Epoch 4775, Loss 2.927647\n", "Epoch 4776, Loss 2.927648\n", "Epoch 4777, Loss 2.927648\n", "Epoch 4778, Loss 2.927650\n", "Epoch 4779, Loss 2.927649\n", "Epoch 4780, Loss 2.927648\n", "Epoch 4781, Loss 2.927648\n", "Epoch 4782, Loss 2.927649\n", "Epoch 4783, Loss 2.927649\n", "Epoch 4784, Loss 2.927648\n", "Epoch 4785, Loss 2.927648\n", "Epoch 4786, Loss 2.927650\n", "Epoch 4787, Loss 2.927648\n", "Epoch 4788, Loss 2.927648\n", "Epoch 4789, Loss 2.927648\n", "Epoch 4790, Loss 2.927648\n", "Epoch 4791, Loss 2.927647\n", "Epoch 4792, Loss 2.927650\n", "Epoch 4793, Loss 2.927648\n", "Epoch 4794, Loss 2.927649\n", "Epoch 4795, Loss 2.927648\n", "Epoch 4796, Loss 2.927648\n", "Epoch 4797, Loss 2.927648\n", "Epoch 4798, Loss 2.927648\n", "Epoch 4799, Loss 2.927648\n", "Epoch 4800, Loss 2.927647\n", "Epoch 4801, Loss 2.927649\n", "Epoch 4802, Loss 2.927647\n", "Epoch 4803, Loss 2.927648\n", "Epoch 4804, Loss 2.927649\n", "Epoch 4805, Loss 2.927646\n", "Epoch 4806, Loss 2.927648\n", "Epoch 4807, Loss 2.927648\n", "Epoch 4808, Loss 2.927649\n", "Epoch 4809, Loss 2.927648\n", "Epoch 4810, Loss 2.927649\n", "Epoch 4811, Loss 2.927649\n", "Epoch 4812, Loss 2.927647\n", "Epoch 4813, Loss 2.927648\n", "Epoch 4814, Loss 2.927648\n", "Epoch 4815, Loss 2.927647\n", "Epoch 4816, Loss 2.927648\n", "Epoch 4817, Loss 2.927648\n", "Epoch 4818, Loss 2.927647\n", "Epoch 4819, Loss 2.927648\n", "Epoch 4820, Loss 2.927649\n", "Epoch 4821, Loss 2.927648\n", "Epoch 4822, Loss 2.927647\n", "Epoch 4823, Loss 2.927649\n", "Epoch 4824, Loss 2.927649\n", "Epoch 4825, Loss 2.927649\n", "Epoch 4826, Loss 2.927648\n", "Epoch 4827, Loss 2.927649\n", "Epoch 4828, Loss 2.927649\n", "Epoch 4829, Loss 2.927647\n", "Epoch 4830, Loss 2.927648\n", "Epoch 4831, Loss 2.927648\n", "Epoch 4832, Loss 2.927646\n", "Epoch 4833, Loss 2.927648\n", "Epoch 4834, Loss 2.927648\n", "Epoch 4835, Loss 2.927647\n", "Epoch 4836, Loss 2.927648\n", "Epoch 4837, Loss 2.927647\n", "Epoch 4838, Loss 2.927648\n", "Epoch 4839, Loss 2.927647\n", "Epoch 4840, Loss 2.927650\n", "Epoch 4841, Loss 2.927648\n", "Epoch 4842, Loss 2.927648\n", "Epoch 4843, Loss 2.927649\n", "Epoch 4844, Loss 2.927647\n", "Epoch 4845, Loss 2.927647\n", "Epoch 4846, Loss 2.927648\n", "Epoch 4847, Loss 2.927647\n", "Epoch 4848, Loss 2.927648\n", "Epoch 4849, Loss 2.927647\n", "Epoch 4850, Loss 2.927648\n", "Epoch 4851, Loss 2.927647\n", "Epoch 4852, Loss 2.927648\n", "Epoch 4853, Loss 2.927649\n", "Epoch 4854, Loss 2.927647\n", "Epoch 4855, Loss 2.927648\n", "Epoch 4856, Loss 2.927647\n", "Epoch 4857, Loss 2.927648\n", "Epoch 4858, Loss 2.927648\n", "Epoch 4859, Loss 2.927649\n", "Epoch 4860, Loss 2.927648\n", "Epoch 4861, Loss 2.927646\n", "Epoch 4862, Loss 2.927648\n", "Epoch 4863, Loss 2.927648\n", "Epoch 4864, Loss 2.927647\n", "Epoch 4865, Loss 2.927648\n", "Epoch 4866, Loss 2.927648\n", "Epoch 4867, Loss 2.927648\n", "Epoch 4868, Loss 2.927648\n", "Epoch 4869, Loss 2.927648\n", "Epoch 4870, Loss 2.927648\n", "Epoch 4871, Loss 2.927647\n", "Epoch 4872, Loss 2.927648\n", "Epoch 4873, Loss 2.927649\n", "Epoch 4874, Loss 2.927647\n", "Epoch 4875, Loss 2.927648\n", "Epoch 4876, Loss 2.927647\n", "Epoch 4877, Loss 2.927648\n", "Epoch 4878, Loss 2.927647\n", "Epoch 4879, Loss 2.927648\n", "Epoch 4880, Loss 2.927648\n", "Epoch 4881, Loss 2.927648\n", "Epoch 4882, Loss 2.927648\n", "Epoch 4883, Loss 2.927647\n", "Epoch 4884, Loss 2.927647\n", "Epoch 4885, Loss 2.927649\n", "Epoch 4886, Loss 2.927647\n", "Epoch 4887, Loss 2.927649\n", "Epoch 4888, Loss 2.927648\n", "Epoch 4889, Loss 2.927648\n", "Epoch 4890, Loss 2.927647\n", "Epoch 4891, Loss 2.927647\n", "Epoch 4892, Loss 2.927648\n", "Epoch 4893, Loss 2.927646\n", "Epoch 4894, Loss 2.927649\n", "Epoch 4895, Loss 2.927648\n", "Epoch 4896, Loss 2.927648\n", "Epoch 4897, Loss 2.927647\n", "Epoch 4898, Loss 2.927647\n", "Epoch 4899, Loss 2.927648\n", "Epoch 4900, Loss 2.927648\n", "Epoch 4901, Loss 2.927649\n", "Epoch 4902, Loss 2.927648\n", "Epoch 4903, Loss 2.927647\n", "Epoch 4904, Loss 2.927647\n", "Epoch 4905, Loss 2.927646\n", "Epoch 4906, Loss 2.927648\n", "Epoch 4907, Loss 2.927647\n", "Epoch 4908, Loss 2.927647\n", "Epoch 4909, Loss 2.927648\n", "Epoch 4910, Loss 2.927647\n", "Epoch 4911, Loss 2.927648\n", "Epoch 4912, Loss 2.927647\n", "Epoch 4913, Loss 2.927648\n", "Epoch 4914, Loss 2.927648\n", "Epoch 4915, Loss 2.927647\n", "Epoch 4916, Loss 2.927648\n", "Epoch 4917, Loss 2.927649\n", "Epoch 4918, Loss 2.927648\n", "Epoch 4919, Loss 2.927648\n", "Epoch 4920, Loss 2.927647\n", "Epoch 4921, Loss 2.927647\n", "Epoch 4922, Loss 2.927648\n", "Epoch 4923, Loss 2.927647\n", "Epoch 4924, Loss 2.927648\n", "Epoch 4925, Loss 2.927646\n", "Epoch 4926, Loss 2.927647\n", "Epoch 4927, Loss 2.927648\n", "Epoch 4928, Loss 2.927647\n", "Epoch 4929, Loss 2.927648\n", "Epoch 4930, Loss 2.927648\n", "Epoch 4931, Loss 2.927648\n", "Epoch 4932, Loss 2.927646\n", "Epoch 4933, Loss 2.927649\n", "Epoch 4934, Loss 2.927648\n", "Epoch 4935, Loss 2.927646\n", "Epoch 4936, Loss 2.927649\n", "Epoch 4937, Loss 2.927646\n", "Epoch 4938, Loss 2.927647\n", "Epoch 4939, Loss 2.927646\n", "Epoch 4940, Loss 2.927648\n", "Epoch 4941, Loss 2.927648\n", "Epoch 4942, Loss 2.927646\n", "Epoch 4943, Loss 2.927648\n", "Epoch 4944, Loss 2.927647\n", "Epoch 4945, Loss 2.927647\n", "Epoch 4946, Loss 2.927648\n", "Epoch 4947, Loss 2.927649\n", "Epoch 4948, Loss 2.927646\n", "Epoch 4949, Loss 2.927648\n", "Epoch 4950, Loss 2.927646\n", "Epoch 4951, Loss 2.927646\n", "Epoch 4952, Loss 2.927647\n", "Epoch 4953, Loss 2.927647\n", "Epoch 4954, Loss 2.927647\n", "Epoch 4955, Loss 2.927647\n", "Epoch 4956, Loss 2.927648\n", "Epoch 4957, Loss 2.927646\n", "Epoch 4958, Loss 2.927648\n", "Epoch 4959, Loss 2.927647\n", "Epoch 4960, Loss 2.927648\n", "Epoch 4961, Loss 2.927646\n", "Epoch 4962, Loss 2.927648\n", "Epoch 4963, Loss 2.927647\n", "Epoch 4964, Loss 2.927648\n", "Epoch 4965, Loss 2.927648\n", "Epoch 4966, Loss 2.927646\n", "Epoch 4967, Loss 2.927647\n", "Epoch 4968, Loss 2.927648\n", "Epoch 4969, Loss 2.927648\n", "Epoch 4970, Loss 2.927648\n", "Epoch 4971, Loss 2.927647\n", "Epoch 4972, Loss 2.927648\n", "Epoch 4973, Loss 2.927646\n", "Epoch 4974, Loss 2.927647\n", "Epoch 4975, Loss 2.927647\n", "Epoch 4976, Loss 2.927647\n", "Epoch 4977, Loss 2.927647\n", "Epoch 4978, Loss 2.927648\n", "Epoch 4979, Loss 2.927647\n", "Epoch 4980, Loss 2.927646\n", "Epoch 4981, Loss 2.927648\n", "Epoch 4982, Loss 2.927646\n", "Epoch 4983, Loss 2.927648\n", "Epoch 4984, Loss 2.927646\n", "Epoch 4985, Loss 2.927648\n", "Epoch 4986, Loss 2.927647\n", "Epoch 4987, Loss 2.927647\n", "Epoch 4988, Loss 2.927647\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4989, Loss 2.927648\n", "Epoch 4990, Loss 2.927646\n", "Epoch 4991, Loss 2.927648\n", "Epoch 4992, Loss 2.927647\n", "Epoch 4993, Loss 2.927646\n", "Epoch 4994, Loss 2.927647\n", "Epoch 4995, Loss 2.927648\n", "Epoch 4996, Loss 2.927647\n", "Epoch 4997, Loss 2.927647\n", "Epoch 4998, Loss 2.927647\n", "Epoch 4999, Loss 2.927648\n" ] }, { "data": { "text/plain": [ "tensor([ 5.3671, -17.3012])" ] }, "execution_count": 84, "metadata": {}, "output_type": "execute_result" } ], "source": [ "params = torch.tensor([1.0, 0.0])\n", "\n", "nepochs = 5000\n", "\n", "learning_rate = 1e-2\n", "\n", "for epoch in range(nepochs):\n", " # forward pass\n", " w, b = params\n", " t_p = model(t_un, w, b)\n", "\n", " loss = loss_fn(t_p, t_c)\n", " print('Epoch %d, Loss %f' % (epoch, float(loss)))\n", " \n", " # backward pass\n", " grad = grad_fn(t_un, t_c, t_p, w, b)\n", "\n", " params = params - learning_rate * grad\n", " \n", "params" ] }, { "cell_type": "code", "execution_count": 95, "metadata": {}, "outputs": [], "source": [ "def model(t_u, w, b):\n", " return w * t_u + b\n", "\n", "def loss_fn(t_p, t_c):\n", " sq_diffs = (t_p - t_c)**2\n", " return sq_diffs.mean()" ] }, { "cell_type": "code", "execution_count": 96, "metadata": {}, "outputs": [], "source": [ "params = torch.tensor([1.0, 0.0], requires_grad=True)\n", "\n", "loss = loss_fn(model(t_u, *params), t_c)" ] }, { "cell_type": "code", "execution_count": 97, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "True" ] }, "execution_count": 97, "metadata": {}, "output_type": "execute_result" } ], "source": [ "params.grad is None" ] }, { "cell_type": "code", "execution_count": 98, "metadata": {}, "outputs": [], "source": [ "loss.backward()" ] }, { "cell_type": "code", "execution_count": 99, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "tensor([4517.2969, 82.6000])" ] }, "execution_count": 99, "metadata": {}, "output_type": "execute_result" } ], "source": [ "params.grad" ] }, { "cell_type": "code", "execution_count": 22, "metadata": {}, "outputs": [], "source": [ "if params.grad is not None:\n", " params.grad.zero_()" ] }, { "cell_type": "code", "execution_count": 100, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 0, Loss 80.364342\n", "Epoch 1, Loss 37.574917\n", "Epoch 2, Loss 30.871077\n", "Epoch 3, Loss 29.756193\n", "Epoch 4, Loss 29.507149\n", "Epoch 5, Loss 29.392458\n", "Epoch 6, Loss 29.298828\n", "Epoch 7, Loss 29.208717\n", "Epoch 8, Loss 29.119417\n", "Epoch 9, Loss 29.030487\n", "Epoch 10, Loss 28.941875\n", "Epoch 11, Loss 28.853565\n", "Epoch 12, Loss 28.765556\n", "Epoch 13, Loss 28.677851\n", "Epoch 14, Loss 28.590431\n", "Epoch 15, Loss 28.503321\n", "Epoch 16, Loss 28.416496\n", "Epoch 17, Loss 28.329973\n", "Epoch 18, Loss 28.243738\n", "Epoch 19, Loss 28.157801\n", "Epoch 20, Loss 28.072151\n", "Epoch 21, Loss 27.986799\n", "Epoch 22, Loss 27.901731\n", "Epoch 23, Loss 27.816954\n", "Epoch 24, Loss 27.732460\n", "Epoch 25, Loss 27.648256\n", "Epoch 26, Loss 27.564342\n", "Epoch 27, Loss 27.480711\n", "Epoch 28, Loss 27.397358\n", "Epoch 29, Loss 27.314293\n", "Epoch 30, Loss 27.231512\n", "Epoch 31, Loss 27.149006\n", "Epoch 32, Loss 27.066790\n", "Epoch 33, Loss 26.984844\n", "Epoch 34, Loss 26.903173\n", "Epoch 35, Loss 26.821791\n", "Epoch 36, Loss 26.740675\n", "Epoch 37, Loss 26.659838\n", "Epoch 38, Loss 26.579279\n", "Epoch 39, Loss 26.498987\n", "Epoch 40, Loss 26.418974\n", "Epoch 41, Loss 26.339228\n", "Epoch 42, Loss 26.259752\n", "Epoch 43, Loss 26.180548\n", "Epoch 44, Loss 26.101616\n", "Epoch 45, Loss 26.022947\n", "Epoch 46, Loss 25.944550\n", "Epoch 47, Loss 25.866417\n", "Epoch 48, Loss 25.788549\n", "Epoch 49, Loss 25.710936\n", "Epoch 50, Loss 25.633600\n", "Epoch 51, Loss 25.556524\n", "Epoch 52, Loss 25.479700\n", "Epoch 53, Loss 25.403145\n", "Epoch 54, Loss 25.326849\n", "Epoch 55, Loss 25.250811\n", "Epoch 56, Loss 25.175035\n", "Epoch 57, Loss 25.099510\n", "Epoch 58, Loss 25.024248\n", "Epoch 59, Loss 24.949238\n", "Epoch 60, Loss 24.874483\n", "Epoch 61, Loss 24.799980\n", "Epoch 62, Loss 24.725737\n", "Epoch 63, Loss 24.651735\n", "Epoch 64, Loss 24.577990\n", "Epoch 65, Loss 24.504494\n", "Epoch 66, Loss 24.431250\n", "Epoch 67, Loss 24.358257\n", "Epoch 68, Loss 24.285503\n", "Epoch 69, Loss 24.212996\n", "Epoch 70, Loss 24.140747\n", "Epoch 71, Loss 24.068733\n", "Epoch 72, Loss 23.996967\n", "Epoch 73, Loss 23.925446\n", "Epoch 74, Loss 23.854168\n", "Epoch 75, Loss 23.783129\n", "Epoch 76, Loss 23.712328\n", "Epoch 77, Loss 23.641771\n", "Epoch 78, Loss 23.571455\n", "Epoch 79, Loss 23.501379\n", "Epoch 80, Loss 23.431538\n", "Epoch 81, Loss 23.361933\n", "Epoch 82, Loss 23.292566\n", "Epoch 83, Loss 23.223436\n", "Epoch 84, Loss 23.154539\n", "Epoch 85, Loss 23.085882\n", "Epoch 86, Loss 23.017447\n", "Epoch 87, Loss 22.949249\n", "Epoch 88, Loss 22.881281\n", "Epoch 89, Loss 22.813547\n", "Epoch 90, Loss 22.746044\n", "Epoch 91, Loss 22.678768\n", "Epoch 92, Loss 22.611719\n", "Epoch 93, Loss 22.544899\n", "Epoch 94, Loss 22.478306\n", "Epoch 95, Loss 22.411940\n", "Epoch 96, Loss 22.345793\n", "Epoch 97, Loss 22.279875\n", "Epoch 98, Loss 22.214186\n", "Epoch 99, Loss 22.148710\n", "Epoch 100, Loss 22.083464\n", "Epoch 101, Loss 22.018436\n", "Epoch 102, Loss 21.953630\n", "Epoch 103, Loss 21.889046\n", "Epoch 104, Loss 21.824677\n", "Epoch 105, Loss 21.760530\n", "Epoch 106, Loss 21.696600\n", "Epoch 107, Loss 21.632881\n", "Epoch 108, Loss 21.569389\n", "Epoch 109, Loss 21.506104\n", "Epoch 110, Loss 21.443037\n", "Epoch 111, Loss 21.380190\n", "Epoch 112, Loss 21.317547\n", "Epoch 113, Loss 21.255119\n", "Epoch 114, Loss 21.192904\n", "Epoch 115, Loss 21.130901\n", "Epoch 116, Loss 21.069105\n", "Epoch 117, Loss 21.007528\n", "Epoch 118, Loss 20.946150\n", "Epoch 119, Loss 20.884983\n", "Epoch 120, Loss 20.824026\n", "Epoch 121, Loss 20.763273\n", "Epoch 122, Loss 20.702726\n", "Epoch 123, Loss 20.642384\n", "Epoch 124, Loss 20.582251\n", "Epoch 125, Loss 20.522322\n", "Epoch 126, Loss 20.462589\n", "Epoch 127, Loss 20.403067\n", "Epoch 128, Loss 20.343746\n", "Epoch 129, Loss 20.284622\n", "Epoch 130, Loss 20.225702\n", "Epoch 131, Loss 20.166983\n", "Epoch 132, Loss 20.108461\n", "Epoch 133, Loss 20.050135\n", "Epoch 134, Loss 19.992014\n", "Epoch 135, Loss 19.934088\n", "Epoch 136, Loss 19.876352\n", "Epoch 137, Loss 19.818821\n", "Epoch 138, Loss 19.761480\n", "Epoch 139, Loss 19.704332\n", "Epoch 140, Loss 19.647387\n", "Epoch 141, Loss 19.590626\n", "Epoch 142, Loss 19.534063\n", "Epoch 143, Loss 19.477690\n", "Epoch 144, Loss 19.421507\n", "Epoch 145, Loss 19.365517\n", "Epoch 146, Loss 19.309715\n", "Epoch 147, Loss 19.254107\n", "Epoch 148, Loss 19.198685\n", "Epoch 149, Loss 19.143446\n", "Epoch 150, Loss 19.088400\n", "Epoch 151, Loss 19.033545\n", "Epoch 152, Loss 18.978868\n", "Epoch 153, Loss 18.924377\n", "Epoch 154, Loss 18.870081\n", "Epoch 155, Loss 18.815960\n", "Epoch 156, Loss 18.762022\n", "Epoch 157, Loss 18.708269\n", "Epoch 158, Loss 18.654703\n", "Epoch 159, Loss 18.601313\n", "Epoch 160, Loss 18.548111\n", "Epoch 161, Loss 18.495081\n", "Epoch 162, Loss 18.442234\n", "Epoch 163, Loss 18.389570\n", "Epoch 164, Loss 18.337080\n", "Epoch 165, Loss 18.284777\n", "Epoch 166, Loss 18.232643\n", "Epoch 167, Loss 18.180687\n", "Epoch 168, Loss 18.128904\n", "Epoch 169, Loss 18.077303\n", "Epoch 170, Loss 18.025879\n", "Epoch 171, Loss 17.974623\n", "Epoch 172, Loss 17.923546\n", "Epoch 173, Loss 17.872641\n", "Epoch 174, Loss 17.821907\n", "Epoch 175, Loss 17.771343\n", "Epoch 176, Loss 17.720955\n", "Epoch 177, Loss 17.670738\n", "Epoch 178, Loss 17.620691\n", "Epoch 179, Loss 17.570814\n", "Epoch 180, Loss 17.521105\n", "Epoch 181, Loss 17.471563\n", "Epoch 182, Loss 17.422194\n", "Epoch 183, Loss 17.372992\n", "Epoch 184, Loss 17.323954\n", "Epoch 185, Loss 17.275085\n", "Epoch 186, Loss 17.226379\n", "Epoch 187, Loss 17.177839\n", "Epoch 188, Loss 17.129467\n", "Epoch 189, Loss 17.081255\n", "Epoch 190, Loss 17.033207\n", "Epoch 191, Loss 16.985327\n", "Epoch 192, Loss 16.937605\n", "Epoch 193, Loss 16.890047\n", "Epoch 194, Loss 16.842649\n", "Epoch 195, Loss 16.795412\n", "Epoch 196, Loss 16.748339\n", "Epoch 197, Loss 16.701424\n", "Epoch 198, Loss 16.654661\n", "Epoch 199, Loss 16.608065\n", "Epoch 200, Loss 16.561625\n", "Epoch 201, Loss 16.515343\n", "Epoch 202, Loss 16.469219\n", "Epoch 203, Loss 16.423250\n", "Epoch 204, Loss 16.377434\n", "Epoch 205, Loss 16.331776\n", "Epoch 206, Loss 16.286276\n", "Epoch 207, Loss 16.240925\n", "Epoch 208, Loss 16.195734\n", "Epoch 209, Loss 16.150694\n", "Epoch 210, Loss 16.105806\n", "Epoch 211, Loss 16.061071\n", "Epoch 212, Loss 16.016487\n", "Epoch 213, Loss 15.972058\n", "Epoch 214, Loss 15.927777\n", "Epoch 215, Loss 15.883645\n", "Epoch 216, Loss 15.839664\n", "Epoch 217, Loss 15.795832\n", "Epoch 218, Loss 15.752149\n", "Epoch 219, Loss 15.708612\n", "Epoch 220, Loss 15.665228\n", "Epoch 221, Loss 15.621990\n", "Epoch 222, Loss 15.578897\n", "Epoch 223, Loss 15.535950\n", "Epoch 224, Loss 15.493152\n", "Epoch 225, Loss 15.450497\n", "Epoch 226, Loss 15.407981\n", "Epoch 227, Loss 15.365615\n", "Epoch 228, Loss 15.323395\n", "Epoch 229, Loss 15.281318\n", "Epoch 230, Loss 15.239380\n", "Epoch 231, Loss 15.197586\n", "Epoch 232, Loss 15.155931\n", "Epoch 233, Loss 15.114425\n", "Epoch 234, Loss 15.073053\n", "Epoch 235, Loss 15.031823\n", "Epoch 236, Loss 14.990737\n", "Epoch 237, Loss 14.949784\n", "Epoch 238, Loss 14.908973\n", "Epoch 239, Loss 14.868304\n", "Epoch 240, Loss 14.827767\n", "Epoch 241, Loss 14.787370\n", "Epoch 242, Loss 14.747110\n", "Epoch 243, Loss 14.706989\n", "Epoch 244, Loss 14.667002\n", "Epoch 245, Loss 14.627149\n", "Epoch 246, Loss 14.587436\n", "Epoch 247, Loss 14.547854\n", "Epoch 248, Loss 14.508408\n", "Epoch 249, Loss 14.469095\n", "Epoch 250, Loss 14.429919\n", "Epoch 251, Loss 14.390872\n", "Epoch 252, Loss 14.351956\n", "Epoch 253, Loss 14.313177\n", "Epoch 254, Loss 14.274525\n", "Epoch 255, Loss 14.236008\n", "Epoch 256, Loss 14.197620\n", "Epoch 257, Loss 14.159363\n", "Epoch 258, Loss 14.121234\n", "Epoch 259, Loss 14.083237\n", "Epoch 260, Loss 14.045368\n", "Epoch 261, Loss 14.007627\n", "Epoch 262, Loss 13.970016\n", "Epoch 263, Loss 13.932532\n", "Epoch 264, Loss 13.895172\n", "Epoch 265, Loss 13.857942\n", "Epoch 266, Loss 13.820837\n", "Epoch 267, Loss 13.783858\n", "Epoch 268, Loss 13.747006\n", "Epoch 269, Loss 13.710278\n", "Epoch 270, Loss 13.673676\n", "Epoch 271, Loss 13.637196\n", "Epoch 272, Loss 13.600842\n", "Epoch 273, Loss 13.564609\n", "Epoch 274, Loss 13.528501\n", "Epoch 275, Loss 13.492515\n", "Epoch 276, Loss 13.456651\n", "Epoch 277, Loss 13.420910\n", "Epoch 278, Loss 13.385287\n", "Epoch 279, Loss 13.349787\n", "Epoch 280, Loss 13.314410\n", "Epoch 281, Loss 13.279148\n", "Epoch 282, Loss 13.244009\n", "Epoch 283, Loss 13.208993\n", "Epoch 284, Loss 13.174088\n", "Epoch 285, Loss 13.139307\n", "Epoch 286, Loss 13.104638\n", "Epoch 287, Loss 13.070093\n", "Epoch 288, Loss 13.035663\n", "Epoch 289, Loss 13.001349\n", "Epoch 290, Loss 12.967154\n", "Epoch 291, Loss 12.933074\n", "Epoch 292, Loss 12.899109\n", "Epoch 293, Loss 12.865259\n", "Epoch 294, Loss 12.831525\n", "Epoch 295, Loss 12.797904\n", "Epoch 296, Loss 12.764399\n", "Epoch 297, Loss 12.731007\n", "Epoch 298, Loss 12.697727\n", "Epoch 299, Loss 12.664560\n", "Epoch 300, Loss 12.631507\n", "Epoch 301, Loss 12.598566\n", "Epoch 302, Loss 12.565738\n", "Epoch 303, Loss 12.533021\n", "Epoch 304, Loss 12.500415\n", "Epoch 305, Loss 12.467919\n", "Epoch 306, Loss 12.435533\n", "Epoch 307, Loss 12.403255\n", "Epoch 308, Loss 12.371088\n", "Epoch 309, Loss 12.339031\n", "Epoch 310, Loss 12.307083\n", "Epoch 311, Loss 12.275247\n", "Epoch 312, Loss 12.243509\n", "Epoch 313, Loss 12.211887\n", "Epoch 314, Loss 12.180370\n", "Epoch 315, Loss 12.148962\n", "Epoch 316, Loss 12.117655\n", "Epoch 317, Loss 12.086463\n", "Epoch 318, Loss 12.055373\n", "Epoch 319, Loss 12.024384\n", "Epoch 320, Loss 11.993508\n", "Epoch 321, Loss 11.962732\n", "Epoch 322, Loss 11.932056\n", "Epoch 323, Loss 11.901492\n", "Epoch 324, Loss 11.871029\n", "Epoch 325, Loss 11.840671\n", "Epoch 326, Loss 11.810413\n", "Epoch 327, Loss 11.780257\n", "Epoch 328, Loss 11.750208\n", "Epoch 329, Loss 11.720258\n", "Epoch 330, Loss 11.690412\n", "Epoch 331, Loss 11.660664\n", "Epoch 332, Loss 11.631016\n", "Epoch 333, Loss 11.601473\n", "Epoch 334, Loss 11.572030\n", "Epoch 335, Loss 11.542686\n", "Epoch 336, Loss 11.513440\n", "Epoch 337, Loss 11.484293\n", "Epoch 338, Loss 11.455247\n", "Epoch 339, Loss 11.426300\n", "Epoch 340, Loss 11.397448\n", "Epoch 341, Loss 11.368696\n", "Epoch 342, Loss 11.340043\n", "Epoch 343, Loss 11.311487\n", "Epoch 344, Loss 11.283028\n", "Epoch 345, Loss 11.254662\n", "Epoch 346, Loss 11.226396\n", "Epoch 347, Loss 11.198221\n", "Epoch 348, Loss 11.170149\n", "Epoch 349, Loss 11.142170\n", "Epoch 350, Loss 11.114283\n", "Epoch 351, Loss 11.086493\n", "Epoch 352, Loss 11.058796\n", "Epoch 353, Loss 11.031192\n", "Epoch 354, Loss 11.003686\n", "Epoch 355, Loss 10.976271\n", "Epoch 356, Loss 10.948948\n", "Epoch 357, Loss 10.921718\n", "Epoch 358, Loss 10.894581\n", "Epoch 359, Loss 10.867537\n", "Epoch 360, Loss 10.840583\n", "Epoch 361, Loss 10.813720\n", "Epoch 362, Loss 10.786951\n", "Epoch 363, Loss 10.760270\n", "Epoch 364, Loss 10.733681\n", "Epoch 365, Loss 10.707183\n", "Epoch 366, Loss 10.680775\n", "Epoch 367, Loss 10.654453\n", "Epoch 368, Loss 10.628225\n", "Epoch 369, Loss 10.602084\n", "Epoch 370, Loss 10.576032\n", "Epoch 371, Loss 10.550071\n", "Epoch 372, Loss 10.524195\n", "Epoch 373, Loss 10.498408\n", "Epoch 374, Loss 10.472707\n", "Epoch 375, Loss 10.447094\n", "Epoch 376, Loss 10.421568\n", "Epoch 377, Loss 10.396132\n", "Epoch 378, Loss 10.370778\n", "Epoch 379, Loss 10.345510\n", "Epoch 380, Loss 10.320329\n", "Epoch 381, Loss 10.295236\n", "Epoch 382, Loss 10.270224\n", "Epoch 383, Loss 10.245296\n", "Epoch 384, Loss 10.220456\n", "Epoch 385, Loss 10.195701\n", "Epoch 386, Loss 10.171027\n", "Epoch 387, Loss 10.146436\n", "Epoch 388, Loss 10.121934\n", "Epoch 389, Loss 10.097512\n", "Epoch 390, Loss 10.073174\n", "Epoch 391, Loss 10.048919\n", "Epoch 392, Loss 10.024742\n", "Epoch 393, Loss 10.000652\n", "Epoch 394, Loss 9.976640\n", "Epoch 395, Loss 9.952712\n", "Epoch 396, Loss 9.928863\n", "Epoch 397, Loss 9.905092\n", "Epoch 398, Loss 9.881409\n", "Epoch 399, Loss 9.857802\n", "Epoch 400, Loss 9.834277\n", "Epoch 401, Loss 9.810832\n", "Epoch 402, Loss 9.787466\n", "Epoch 403, Loss 9.764176\n", "Epoch 404, Loss 9.740971\n", "Epoch 405, Loss 9.717843\n", "Epoch 406, Loss 9.694793\n", "Epoch 407, Loss 9.671823\n", "Epoch 408, Loss 9.648926\n", "Epoch 409, Loss 9.626110\n", "Epoch 410, Loss 9.603373\n", "Epoch 411, Loss 9.580710\n", "Epoch 412, Loss 9.558124\n", "Epoch 413, Loss 9.535618\n", "Epoch 414, Loss 9.513185\n", "Epoch 415, Loss 9.490829\n", "Epoch 416, Loss 9.468551\n", "Epoch 417, Loss 9.446347\n", "Epoch 418, Loss 9.424216\n", "Epoch 419, Loss 9.402163\n", "Epoch 420, Loss 9.380185\n", "Epoch 421, Loss 9.358281\n", "Epoch 422, Loss 9.336448\n", "Epoch 423, Loss 9.314696\n", "Epoch 424, Loss 9.293013\n", "Epoch 425, Loss 9.271402\n", "Epoch 426, Loss 9.249870\n", "Epoch 427, Loss 9.228409\n", "Epoch 428, Loss 9.207021\n", "Epoch 429, Loss 9.185704\n", "Epoch 430, Loss 9.164462\n", "Epoch 431, Loss 9.143288\n", "Epoch 432, Loss 9.122189\n", "Epoch 433, Loss 9.101160\n", "Epoch 434, Loss 9.080204\n", "Epoch 435, Loss 9.059317\n", "Epoch 436, Loss 9.038502\n", "Epoch 437, Loss 9.017757\n", "Epoch 438, Loss 8.997085\n", "Epoch 439, Loss 8.976479\n", "Epoch 440, Loss 8.955945\n", "Epoch 441, Loss 8.935481\n", "Epoch 442, Loss 8.915089\n", "Epoch 443, Loss 8.894763\n", "Epoch 444, Loss 8.874508\n", "Epoch 445, Loss 8.854318\n", "Epoch 446, Loss 8.834197\n", "Epoch 447, Loss 8.814149\n", "Epoch 448, Loss 8.794162\n", "Epoch 449, Loss 8.774252\n", "Epoch 450, Loss 8.754406\n", "Epoch 451, Loss 8.734625\n", "Epoch 452, Loss 8.714911\n", "Epoch 453, Loss 8.695266\n", "Epoch 454, Loss 8.675689\n", "Epoch 455, Loss 8.656174\n", "Epoch 456, Loss 8.636728\n", "Epoch 457, Loss 8.617346\n", "Epoch 458, Loss 8.598029\n", "Epoch 459, Loss 8.578781\n", "Epoch 460, Loss 8.559597\n", "Epoch 461, Loss 8.540478\n", "Epoch 462, Loss 8.521426\n", "Epoch 463, Loss 8.502438\n", "Epoch 464, Loss 8.483516\n", "Epoch 465, Loss 8.464652\n", "Epoch 466, Loss 8.445858\n", "Epoch 467, Loss 8.427128\n", "Epoch 468, Loss 8.408456\n", "Epoch 469, Loss 8.389848\n", "Epoch 470, Loss 8.371305\n", "Epoch 471, Loss 8.352828\n", "Epoch 472, Loss 8.334408\n", "Epoch 473, Loss 8.316055\n", "Epoch 474, Loss 8.297764\n", "Epoch 475, Loss 8.279534\n", "Epoch 476, Loss 8.261369\n", "Epoch 477, Loss 8.243261\n", "Epoch 478, Loss 8.225213\n", "Epoch 479, Loss 8.207232\n", "Epoch 480, Loss 8.189310\n", "Epoch 481, Loss 8.171450\n", "Epoch 482, Loss 8.153648\n", "Epoch 483, Loss 8.135907\n", "Epoch 484, Loss 8.118226\n", "Epoch 485, Loss 8.100607\n", "Epoch 486, Loss 8.083045\n", "Epoch 487, Loss 8.065548\n", "Epoch 488, Loss 8.048104\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 489, Loss 8.030723\n", "Epoch 490, Loss 8.013400\n", "Epoch 491, Loss 7.996135\n", "Epoch 492, Loss 7.978929\n", "Epoch 493, Loss 7.961784\n", "Epoch 494, Loss 7.944690\n", "Epoch 495, Loss 7.927662\n", "Epoch 496, Loss 7.910690\n", "Epoch 497, Loss 7.893775\n", "Epoch 498, Loss 7.876915\n", "Epoch 499, Loss 7.860116\n", "Epoch 500, Loss 7.843370\n", "Epoch 501, Loss 7.826681\n", "Epoch 502, Loss 7.810053\n", "Epoch 503, Loss 7.793480\n", "Epoch 504, Loss 7.776962\n", "Epoch 505, Loss 7.760498\n", "Epoch 506, Loss 7.744092\n", "Epoch 507, Loss 7.727745\n", "Epoch 508, Loss 7.711447\n", "Epoch 509, Loss 7.695212\n", "Epoch 510, Loss 7.679024\n", "Epoch 511, Loss 7.662895\n", "Epoch 512, Loss 7.646819\n", "Epoch 513, Loss 7.630803\n", "Epoch 514, Loss 7.614836\n", "Epoch 515, Loss 7.598925\n", "Epoch 516, Loss 7.583069\n", "Epoch 517, Loss 7.567266\n", "Epoch 518, Loss 7.551516\n", "Epoch 519, Loss 7.535819\n", "Epoch 520, Loss 7.520176\n", "Epoch 521, Loss 7.504588\n", "Epoch 522, Loss 7.489048\n", "Epoch 523, Loss 7.473566\n", "Epoch 524, Loss 7.458135\n", "Epoch 525, Loss 7.442751\n", "Epoch 526, Loss 7.427426\n", "Epoch 527, Loss 7.412152\n", "Epoch 528, Loss 7.396928\n", "Epoch 529, Loss 7.381756\n", "Epoch 530, Loss 7.366636\n", "Epoch 531, Loss 7.351566\n", "Epoch 532, Loss 7.336550\n", "Epoch 533, Loss 7.321585\n", "Epoch 534, Loss 7.306670\n", "Epoch 535, Loss 7.291803\n", "Epoch 536, Loss 7.276989\n", "Epoch 537, Loss 7.262227\n", "Epoch 538, Loss 7.247512\n", "Epoch 539, Loss 7.232846\n", "Epoch 540, Loss 7.218231\n", "Epoch 541, Loss 7.203666\n", "Epoch 542, Loss 7.189151\n", "Epoch 543, Loss 7.174683\n", "Epoch 544, Loss 7.160267\n", "Epoch 545, Loss 7.145897\n", "Epoch 546, Loss 7.131578\n", "Epoch 547, Loss 7.117305\n", "Epoch 548, Loss 7.103083\n", "Epoch 549, Loss 7.088911\n", "Epoch 550, Loss 7.074785\n", "Epoch 551, Loss 7.060707\n", "Epoch 552, Loss 7.046677\n", "Epoch 553, Loss 7.032695\n", "Epoch 554, Loss 7.018756\n", "Epoch 555, Loss 7.004869\n", "Epoch 556, Loss 6.991029\n", "Epoch 557, Loss 6.977232\n", "Epoch 558, Loss 6.963488\n", "Epoch 559, Loss 6.949786\n", "Epoch 560, Loss 6.936135\n", "Epoch 561, Loss 6.922528\n", "Epoch 562, Loss 6.908967\n", "Epoch 563, Loss 6.895452\n", "Epoch 564, Loss 6.881980\n", "Epoch 565, Loss 6.868558\n", "Epoch 566, Loss 6.855180\n", "Epoch 567, Loss 6.841848\n", "Epoch 568, Loss 6.828561\n", "Epoch 569, Loss 6.815319\n", "Epoch 570, Loss 6.802118\n", "Epoch 571, Loss 6.788968\n", "Epoch 572, Loss 6.775864\n", "Epoch 573, Loss 6.762798\n", "Epoch 574, Loss 6.749779\n", "Epoch 575, Loss 6.736803\n", "Epoch 576, Loss 6.723875\n", "Epoch 577, Loss 6.710986\n", "Epoch 578, Loss 6.698142\n", "Epoch 579, Loss 6.685344\n", "Epoch 580, Loss 6.672589\n", "Epoch 581, Loss 6.659874\n", "Epoch 582, Loss 6.647207\n", "Epoch 583, Loss 6.634577\n", "Epoch 584, Loss 6.621995\n", "Epoch 585, Loss 6.609454\n", "Epoch 586, Loss 6.596954\n", "Epoch 587, Loss 6.584500\n", "Epoch 588, Loss 6.572087\n", "Epoch 589, Loss 6.559712\n", "Epoch 590, Loss 6.547384\n", "Epoch 591, Loss 6.535097\n", "Epoch 592, Loss 6.522851\n", "Epoch 593, Loss 6.510646\n", "Epoch 594, Loss 6.498481\n", "Epoch 595, Loss 6.486362\n", "Epoch 596, Loss 6.474282\n", "Epoch 597, Loss 6.462242\n", "Epoch 598, Loss 6.450243\n", "Epoch 599, Loss 6.438284\n", "Epoch 600, Loss 6.426367\n", "Epoch 601, Loss 6.414490\n", "Epoch 602, Loss 6.402655\n", "Epoch 603, Loss 6.390859\n", "Epoch 604, Loss 6.379102\n", "Epoch 605, Loss 6.367384\n", "Epoch 606, Loss 6.355706\n", "Epoch 607, Loss 6.344071\n", "Epoch 608, Loss 6.332472\n", "Epoch 609, Loss 6.320912\n", "Epoch 610, Loss 6.309395\n", "Epoch 611, Loss 6.297915\n", "Epoch 612, Loss 6.286473\n", "Epoch 613, Loss 6.275074\n", "Epoch 614, Loss 6.263707\n", "Epoch 615, Loss 6.252382\n", "Epoch 616, Loss 6.241098\n", "Epoch 617, Loss 6.229849\n", "Epoch 618, Loss 6.218639\n", "Epoch 619, Loss 6.207471\n", "Epoch 620, Loss 6.196334\n", "Epoch 621, Loss 6.185240\n", "Epoch 622, Loss 6.174181\n", "Epoch 623, Loss 6.163159\n", "Epoch 624, Loss 6.152177\n", "Epoch 625, Loss 6.141229\n", "Epoch 626, Loss 6.130321\n", "Epoch 627, Loss 6.119448\n", "Epoch 628, Loss 6.108614\n", "Epoch 629, Loss 6.097815\n", "Epoch 630, Loss 6.087054\n", "Epoch 631, Loss 6.076329\n", "Epoch 632, Loss 6.065643\n", "Epoch 633, Loss 6.054988\n", "Epoch 634, Loss 6.044372\n", "Epoch 635, Loss 6.033794\n", "Epoch 636, Loss 6.023247\n", "Epoch 637, Loss 6.012738\n", "Epoch 638, Loss 6.002264\n", "Epoch 639, Loss 5.991829\n", "Epoch 640, Loss 5.981426\n", "Epoch 641, Loss 5.971057\n", "Epoch 642, Loss 5.960727\n", "Epoch 643, Loss 5.950432\n", "Epoch 644, Loss 5.940171\n", "Epoch 645, Loss 5.929944\n", "Epoch 646, Loss 5.919752\n", "Epoch 647, Loss 5.909597\n", "Epoch 648, Loss 5.899473\n", "Epoch 649, Loss 5.889384\n", "Epoch 650, Loss 5.879326\n", "Epoch 651, Loss 5.869310\n", "Epoch 652, Loss 5.859322\n", "Epoch 653, Loss 5.849374\n", "Epoch 654, Loss 5.839453\n", "Epoch 655, Loss 5.829570\n", "Epoch 656, Loss 5.819718\n", "Epoch 657, Loss 5.809900\n", "Epoch 658, Loss 5.800117\n", "Epoch 659, Loss 5.790367\n", "Epoch 660, Loss 5.780647\n", "Epoch 661, Loss 5.770962\n", "Epoch 662, Loss 5.761312\n", "Epoch 663, Loss 5.751693\n", "Epoch 664, Loss 5.742105\n", "Epoch 665, Loss 5.732550\n", "Epoch 666, Loss 5.723031\n", "Epoch 667, Loss 5.713540\n", "Epoch 668, Loss 5.704084\n", "Epoch 669, Loss 5.694658\n", "Epoch 670, Loss 5.685265\n", "Epoch 671, Loss 5.675904\n", "Epoch 672, Loss 5.666573\n", "Epoch 673, Loss 5.657277\n", "Epoch 674, Loss 5.648010\n", "Epoch 675, Loss 5.638776\n", "Epoch 676, Loss 5.629575\n", "Epoch 677, Loss 5.620402\n", "Epoch 678, Loss 5.611260\n", "Epoch 679, Loss 5.602148\n", "Epoch 680, Loss 5.593071\n", "Epoch 681, Loss 5.584022\n", "Epoch 682, Loss 5.575005\n", "Epoch 683, Loss 5.566019\n", "Epoch 684, Loss 5.557063\n", "Epoch 685, Loss 5.548136\n", "Epoch 686, Loss 5.539241\n", "Epoch 687, Loss 5.530376\n", "Epoch 688, Loss 5.521540\n", "Epoch 689, Loss 5.512733\n", "Epoch 690, Loss 5.503958\n", "Epoch 691, Loss 5.495212\n", "Epoch 692, Loss 5.486496\n", "Epoch 693, Loss 5.477808\n", "Epoch 694, Loss 5.469152\n", "Epoch 695, Loss 5.460525\n", "Epoch 696, Loss 5.451928\n", "Epoch 697, Loss 5.443359\n", "Epoch 698, Loss 5.434820\n", "Epoch 699, Loss 5.426310\n", "Epoch 700, Loss 5.417827\n", "Epoch 701, Loss 5.409373\n", "Epoch 702, Loss 5.400949\n", "Epoch 703, Loss 5.392551\n", "Epoch 704, Loss 5.384184\n", "Epoch 705, Loss 5.375845\n", "Epoch 706, Loss 5.367537\n", "Epoch 707, Loss 5.359253\n", "Epoch 708, Loss 5.350998\n", "Epoch 709, Loss 5.342771\n", "Epoch 710, Loss 5.334575\n", "Epoch 711, Loss 5.326403\n", "Epoch 712, Loss 5.318259\n", "Epoch 713, Loss 5.310144\n", "Epoch 714, Loss 5.302055\n", "Epoch 715, Loss 5.293994\n", "Epoch 716, Loss 5.285964\n", "Epoch 717, Loss 5.277958\n", "Epoch 718, Loss 5.269979\n", "Epoch 719, Loss 5.262026\n", "Epoch 720, Loss 5.254103\n", "Epoch 721, Loss 5.246205\n", "Epoch 722, Loss 5.238335\n", "Epoch 723, Loss 5.230491\n", "Epoch 724, Loss 5.222673\n", "Epoch 725, Loss 5.214881\n", "Epoch 726, Loss 5.207120\n", "Epoch 727, Loss 5.199381\n", "Epoch 728, Loss 5.191670\n", "Epoch 729, Loss 5.183984\n", "Epoch 730, Loss 5.176324\n", "Epoch 731, Loss 5.168688\n", "Epoch 732, Loss 5.161084\n", "Epoch 733, Loss 5.153500\n", "Epoch 734, Loss 5.145943\n", "Epoch 735, Loss 5.138412\n", "Epoch 736, Loss 5.130910\n", "Epoch 737, Loss 5.123428\n", "Epoch 738, Loss 5.115977\n", "Epoch 739, Loss 5.108547\n", "Epoch 740, Loss 5.101144\n", "Epoch 741, Loss 5.093765\n", "Epoch 742, Loss 5.086413\n", "Epoch 743, Loss 5.079085\n", "Epoch 744, Loss 5.071782\n", "Epoch 745, Loss 5.064505\n", "Epoch 746, Loss 5.057247\n", "Epoch 747, Loss 5.050022\n", "Epoch 748, Loss 5.042817\n", "Epoch 749, Loss 5.035636\n", "Epoch 750, Loss 5.028476\n", "Epoch 751, Loss 5.021346\n", "Epoch 752, Loss 5.014239\n", "Epoch 753, Loss 5.007157\n", "Epoch 754, Loss 5.000099\n", "Epoch 755, Loss 4.993064\n", "Epoch 756, Loss 4.986051\n", "Epoch 757, Loss 4.979065\n", "Epoch 758, Loss 4.972100\n", "Epoch 759, Loss 4.965159\n", "Epoch 760, Loss 4.958245\n", "Epoch 761, Loss 4.951350\n", "Epoch 762, Loss 4.944479\n", "Epoch 763, Loss 4.937633\n", "Epoch 764, Loss 4.930812\n", "Epoch 765, Loss 4.924009\n", "Epoch 766, Loss 4.917234\n", "Epoch 767, Loss 4.910480\n", "Epoch 768, Loss 4.903749\n", "Epoch 769, Loss 4.897040\n", "Epoch 770, Loss 4.890356\n", "Epoch 771, Loss 4.883691\n", "Epoch 772, Loss 4.877052\n", "Epoch 773, Loss 4.870436\n", "Epoch 774, Loss 4.863839\n", "Epoch 775, Loss 4.857267\n", "Epoch 776, Loss 4.850717\n", "Epoch 777, Loss 4.844189\n", "Epoch 778, Loss 4.837683\n", "Epoch 779, Loss 4.831196\n", "Epoch 780, Loss 4.824737\n", "Epoch 781, Loss 4.818298\n", "Epoch 782, Loss 4.811880\n", "Epoch 783, Loss 4.805481\n", "Epoch 784, Loss 4.799106\n", "Epoch 785, Loss 4.792755\n", "Epoch 786, Loss 4.786422\n", "Epoch 787, Loss 4.780112\n", "Epoch 788, Loss 4.773824\n", "Epoch 789, Loss 4.767559\n", "Epoch 790, Loss 4.761311\n", "Epoch 791, Loss 4.755087\n", "Epoch 792, Loss 4.748885\n", "Epoch 793, Loss 4.742701\n", "Epoch 794, Loss 4.736537\n", "Epoch 795, Loss 4.730397\n", "Epoch 796, Loss 4.724279\n", "Epoch 797, Loss 4.718181\n", "Epoch 798, Loss 4.712101\n", "Epoch 799, Loss 4.706046\n", "Epoch 800, Loss 4.700009\n", "Epoch 801, Loss 4.693989\n", "Epoch 802, Loss 4.687995\n", "Epoch 803, Loss 4.682020\n", "Epoch 804, Loss 4.676063\n", "Epoch 805, Loss 4.670130\n", "Epoch 806, Loss 4.664214\n", "Epoch 807, Loss 4.658320\n", "Epoch 808, Loss 4.652445\n", "Epoch 809, Loss 4.646592\n", "Epoch 810, Loss 4.640753\n", "Epoch 811, Loss 4.634938\n", "Epoch 812, Loss 4.629142\n", "Epoch 813, Loss 4.623368\n", "Epoch 814, Loss 4.617611\n", "Epoch 815, Loss 4.611873\n", "Epoch 816, Loss 4.606156\n", "Epoch 817, Loss 4.600458\n", "Epoch 818, Loss 4.594780\n", "Epoch 819, Loss 4.589119\n", "Epoch 820, Loss 4.583479\n", "Epoch 821, Loss 4.577857\n", "Epoch 822, Loss 4.572256\n", "Epoch 823, Loss 4.566675\n", "Epoch 824, Loss 4.561109\n", "Epoch 825, Loss 4.555565\n", "Epoch 826, Loss 4.550039\n", "Epoch 827, Loss 4.544533\n", "Epoch 828, Loss 4.539044\n", "Epoch 829, Loss 4.533575\n", "Epoch 830, Loss 4.528122\n", "Epoch 831, Loss 4.522691\n", "Epoch 832, Loss 4.517276\n", "Epoch 833, Loss 4.511879\n", "Epoch 834, Loss 4.506504\n", "Epoch 835, Loss 4.501141\n", "Epoch 836, Loss 4.495801\n", "Epoch 837, Loss 4.490474\n", "Epoch 838, Loss 4.485170\n", "Epoch 839, Loss 4.479884\n", "Epoch 840, Loss 4.474614\n", "Epoch 841, Loss 4.469364\n", "Epoch 842, Loss 4.464129\n", "Epoch 843, Loss 4.458913\n", "Epoch 844, Loss 4.453716\n", "Epoch 845, Loss 4.448534\n", "Epoch 846, Loss 4.443372\n", "Epoch 847, Loss 4.438227\n", "Epoch 848, Loss 4.433099\n", "Epoch 849, Loss 4.427989\n", "Epoch 850, Loss 4.422897\n", "Epoch 851, Loss 4.417819\n", "Epoch 852, Loss 4.412762\n", "Epoch 853, Loss 4.407720\n", "Epoch 854, Loss 4.402697\n", "Epoch 855, Loss 4.397688\n", "Epoch 856, Loss 4.392697\n", "Epoch 857, Loss 4.387725\n", "Epoch 858, Loss 4.382769\n", "Epoch 859, Loss 4.377828\n", "Epoch 860, Loss 4.372905\n", "Epoch 861, Loss 4.368000\n", "Epoch 862, Loss 4.363111\n", "Epoch 863, Loss 4.358238\n", "Epoch 864, Loss 4.353383\n", "Epoch 865, Loss 4.348542\n", "Epoch 866, Loss 4.343716\n", "Epoch 867, Loss 4.338911\n", "Epoch 868, Loss 4.334121\n", "Epoch 869, Loss 4.329345\n", "Epoch 870, Loss 4.324588\n", "Epoch 871, Loss 4.319845\n", "Epoch 872, Loss 4.315118\n", "Epoch 873, Loss 4.310409\n", "Epoch 874, Loss 4.305714\n", "Epoch 875, Loss 4.301035\n", "Epoch 876, Loss 4.296376\n", "Epoch 877, Loss 4.291727\n", "Epoch 878, Loss 4.287097\n", "Epoch 879, Loss 4.282482\n", "Epoch 880, Loss 4.277882\n", "Epoch 881, Loss 4.273299\n", "Epoch 882, Loss 4.268732\n", "Epoch 883, Loss 4.264178\n", "Epoch 884, Loss 4.259643\n", "Epoch 885, Loss 4.255120\n", "Epoch 886, Loss 4.250613\n", "Epoch 887, Loss 4.246124\n", "Epoch 888, Loss 4.241648\n", "Epoch 889, Loss 4.237185\n", "Epoch 890, Loss 4.232740\n", "Epoch 891, Loss 4.228308\n", "Epoch 892, Loss 4.223895\n", "Epoch 893, Loss 4.219494\n", "Epoch 894, Loss 4.215109\n", "Epoch 895, Loss 4.210737\n", "Epoch 896, Loss 4.206383\n", "Epoch 897, Loss 4.202042\n", "Epoch 898, Loss 4.197715\n", "Epoch 899, Loss 4.193405\n", "Epoch 900, Loss 4.189108\n", "Epoch 901, Loss 4.184825\n", "Epoch 902, Loss 4.180559\n", "Epoch 903, Loss 4.176305\n", "Epoch 904, Loss 4.172065\n", "Epoch 905, Loss 4.167842\n", "Epoch 906, Loss 4.163631\n", "Epoch 907, Loss 4.159436\n", "Epoch 908, Loss 4.155253\n", "Epoch 909, Loss 4.151086\n", "Epoch 910, Loss 4.146934\n", "Epoch 911, Loss 4.142795\n", "Epoch 912, Loss 4.138669\n", "Epoch 913, Loss 4.134559\n", "Epoch 914, Loss 4.130464\n", "Epoch 915, Loss 4.126378\n", "Epoch 916, Loss 4.122310\n", "Epoch 917, Loss 4.118254\n", "Epoch 918, Loss 4.114213\n", "Epoch 919, Loss 4.110184\n", "Epoch 920, Loss 4.106169\n", "Epoch 921, Loss 4.102170\n", "Epoch 922, Loss 4.098181\n", "Epoch 923, Loss 4.094210\n", "Epoch 924, Loss 4.090249\n", "Epoch 925, Loss 4.086300\n", "Epoch 926, Loss 4.082366\n", "Epoch 927, Loss 4.078448\n", "Epoch 928, Loss 4.074541\n", "Epoch 929, Loss 4.070649\n", "Epoch 930, Loss 4.066768\n", "Epoch 931, Loss 4.062900\n", "Epoch 932, Loss 4.059047\n", "Epoch 933, Loss 4.055204\n", "Epoch 934, Loss 4.051379\n", "Epoch 935, Loss 4.047564\n", "Epoch 936, Loss 4.043762\n", "Epoch 937, Loss 4.039972\n", "Epoch 938, Loss 4.036197\n", "Epoch 939, Loss 4.032434\n", "Epoch 940, Loss 4.028686\n", "Epoch 941, Loss 4.024947\n", "Epoch 942, Loss 4.021224\n", "Epoch 943, Loss 4.017509\n", "Epoch 944, Loss 4.013810\n", "Epoch 945, Loss 4.010122\n", "Epoch 946, Loss 4.006450\n", "Epoch 947, Loss 4.002785\n", "Epoch 948, Loss 3.999137\n", "Epoch 949, Loss 3.995498\n", "Epoch 950, Loss 3.991874\n", "Epoch 951, Loss 3.988262\n", "Epoch 952, Loss 3.984659\n", "Epoch 953, Loss 3.981071\n", "Epoch 954, Loss 3.977497\n", "Epoch 955, Loss 3.973931\n", "Epoch 956, Loss 3.970381\n", "Epoch 957, Loss 3.966840\n", "Epoch 958, Loss 3.963312\n", "Epoch 959, Loss 3.959797\n", "Epoch 960, Loss 3.956295\n", "Epoch 961, Loss 3.952801\n", "Epoch 962, Loss 3.949323\n", "Epoch 963, Loss 3.945855\n", "Epoch 964, Loss 3.942398\n", "Epoch 965, Loss 3.938953\n", "Epoch 966, Loss 3.935521\n", "Epoch 967, Loss 3.932096\n", "Epoch 968, Loss 3.928688\n", "Epoch 969, Loss 3.925292\n", "Epoch 970, Loss 3.921906\n", "Epoch 971, Loss 3.918528\n", "Epoch 972, Loss 3.915166\n", "Epoch 973, Loss 3.911815\n", "Epoch 974, Loss 3.908474\n", "Epoch 975, Loss 3.905144\n", "Epoch 976, Loss 3.901824\n", "Epoch 977, Loss 3.898517\n", "Epoch 978, Loss 3.895222\n", "Epoch 979, Loss 3.891935\n", "Epoch 980, Loss 3.888664\n", "Epoch 981, Loss 3.885400\n", "Epoch 982, Loss 3.882150\n", "Epoch 983, Loss 3.878911\n", "Epoch 984, Loss 3.875680\n", "Epoch 985, Loss 3.872463\n", "Epoch 986, Loss 3.869256\n", "Epoch 987, Loss 3.866060\n", "Epoch 988, Loss 3.862873\n", "Epoch 989, Loss 3.859699\n", "Epoch 990, Loss 3.856535\n", "Epoch 991, Loss 3.853381\n", "Epoch 992, Loss 3.850237\n", "Epoch 993, Loss 3.847109\n", "Epoch 994, Loss 3.843984\n", "Epoch 995, Loss 3.840876\n", "Epoch 996, Loss 3.837775\n", "Epoch 997, Loss 3.834686\n", "Epoch 998, Loss 3.831606\n", "Epoch 999, Loss 3.828538\n", "Epoch 1000, Loss 3.825484\n", "Epoch 1001, Loss 3.822433\n", "Epoch 1002, Loss 3.819398\n", "Epoch 1003, Loss 3.816369\n", "Epoch 1004, Loss 3.813350\n", "Epoch 1005, Loss 3.810344\n", "Epoch 1006, Loss 3.807348\n", "Epoch 1007, Loss 3.804360\n", "Epoch 1008, Loss 3.801384\n", "Epoch 1009, Loss 3.798421\n", "Epoch 1010, Loss 3.795465\n", "Epoch 1011, Loss 3.792518\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1012, Loss 3.789584\n", "Epoch 1013, Loss 3.786658\n", "Epoch 1014, Loss 3.783740\n", "Epoch 1015, Loss 3.780832\n", "Epoch 1016, Loss 3.777939\n", "Epoch 1017, Loss 3.775053\n", "Epoch 1018, Loss 3.772173\n", "Epoch 1019, Loss 3.769310\n", "Epoch 1020, Loss 3.766451\n", "Epoch 1021, Loss 3.763602\n", "Epoch 1022, Loss 3.760766\n", "Epoch 1023, Loss 3.757936\n", "Epoch 1024, Loss 3.755118\n", "Epoch 1025, Loss 3.752309\n", "Epoch 1026, Loss 3.749511\n", "Epoch 1027, Loss 3.746722\n", "Epoch 1028, Loss 3.743940\n", "Epoch 1029, Loss 3.741169\n", "Epoch 1030, Loss 3.738407\n", "Epoch 1031, Loss 3.735656\n", "Epoch 1032, Loss 3.732914\n", "Epoch 1033, Loss 3.730181\n", "Epoch 1034, Loss 3.727456\n", "Epoch 1035, Loss 3.724741\n", "Epoch 1036, Loss 3.722034\n", "Epoch 1037, Loss 3.719337\n", "Epoch 1038, Loss 3.716650\n", "Epoch 1039, Loss 3.713972\n", "Epoch 1040, Loss 3.711302\n", "Epoch 1041, Loss 3.708643\n", "Epoch 1042, Loss 3.705990\n", "Epoch 1043, Loss 3.703351\n", "Epoch 1044, Loss 3.700716\n", "Epoch 1045, Loss 3.698092\n", "Epoch 1046, Loss 3.695476\n", "Epoch 1047, Loss 3.692869\n", "Epoch 1048, Loss 3.690273\n", "Epoch 1049, Loss 3.687683\n", "Epoch 1050, Loss 3.685103\n", "Epoch 1051, Loss 3.682532\n", "Epoch 1052, Loss 3.679969\n", "Epoch 1053, Loss 3.677417\n", "Epoch 1054, Loss 3.674871\n", "Epoch 1055, Loss 3.672334\n", "Epoch 1056, Loss 3.669805\n", "Epoch 1057, Loss 3.667287\n", "Epoch 1058, Loss 3.664775\n", "Epoch 1059, Loss 3.662273\n", "Epoch 1060, Loss 3.659778\n", "Epoch 1061, Loss 3.657295\n", "Epoch 1062, Loss 3.654816\n", "Epoch 1063, Loss 3.652350\n", "Epoch 1064, Loss 3.649889\n", "Epoch 1065, Loss 3.647437\n", "Epoch 1066, Loss 3.644991\n", "Epoch 1067, Loss 3.642559\n", "Epoch 1068, Loss 3.640131\n", "Epoch 1069, Loss 3.637711\n", "Epoch 1070, Loss 3.635302\n", "Epoch 1071, Loss 3.632902\n", "Epoch 1072, Loss 3.630508\n", "Epoch 1073, Loss 3.628119\n", "Epoch 1074, Loss 3.625741\n", "Epoch 1075, Loss 3.623374\n", "Epoch 1076, Loss 3.621010\n", "Epoch 1077, Loss 3.618659\n", "Epoch 1078, Loss 3.616311\n", "Epoch 1079, Loss 3.613973\n", "Epoch 1080, Loss 3.611643\n", "Epoch 1081, Loss 3.609322\n", "Epoch 1082, Loss 3.607007\n", "Epoch 1083, Loss 3.604701\n", "Epoch 1084, Loss 3.602404\n", "Epoch 1085, Loss 3.600114\n", "Epoch 1086, Loss 3.597830\n", "Epoch 1087, Loss 3.595553\n", "Epoch 1088, Loss 3.593287\n", "Epoch 1089, Loss 3.591030\n", "Epoch 1090, Loss 3.588776\n", "Epoch 1091, Loss 3.586534\n", "Epoch 1092, Loss 3.584295\n", "Epoch 1093, Loss 3.582067\n", "Epoch 1094, Loss 3.579846\n", "Epoch 1095, Loss 3.577631\n", "Epoch 1096, Loss 3.575424\n", "Epoch 1097, Loss 3.573225\n", "Epoch 1098, Loss 3.571034\n", "Epoch 1099, Loss 3.568848\n", "Epoch 1100, Loss 3.566673\n", "Epoch 1101, Loss 3.564506\n", "Epoch 1102, Loss 3.562340\n", "Epoch 1103, Loss 3.560185\n", "Epoch 1104, Loss 3.558040\n", "Epoch 1105, Loss 3.555901\n", "Epoch 1106, Loss 3.553767\n", "Epoch 1107, Loss 3.551641\n", "Epoch 1108, Loss 3.549524\n", "Epoch 1109, Loss 3.547411\n", "Epoch 1110, Loss 3.545309\n", "Epoch 1111, Loss 3.543211\n", "Epoch 1112, Loss 3.541124\n", "Epoch 1113, Loss 3.539041\n", "Epoch 1114, Loss 3.536966\n", "Epoch 1115, Loss 3.534897\n", "Epoch 1116, Loss 3.532835\n", "Epoch 1117, Loss 3.530781\n", "Epoch 1118, Loss 3.528734\n", "Epoch 1119, Loss 3.526694\n", "Epoch 1120, Loss 3.524662\n", "Epoch 1121, Loss 3.522633\n", "Epoch 1122, Loss 3.520614\n", "Epoch 1123, Loss 3.518601\n", "Epoch 1124, Loss 3.516594\n", "Epoch 1125, Loss 3.514594\n", "Epoch 1126, Loss 3.512602\n", "Epoch 1127, Loss 3.510619\n", "Epoch 1128, Loss 3.508637\n", "Epoch 1129, Loss 3.506665\n", "Epoch 1130, Loss 3.504700\n", "Epoch 1131, Loss 3.502740\n", "Epoch 1132, Loss 3.500789\n", "Epoch 1133, Loss 3.498843\n", "Epoch 1134, Loss 3.496905\n", "Epoch 1135, Loss 3.494972\n", "Epoch 1136, Loss 3.493046\n", "Epoch 1137, Loss 3.491127\n", "Epoch 1138, Loss 3.489213\n", "Epoch 1139, Loss 3.487308\n", "Epoch 1140, Loss 3.485410\n", "Epoch 1141, Loss 3.483515\n", "Epoch 1142, Loss 3.481627\n", "Epoch 1143, Loss 3.479746\n", "Epoch 1144, Loss 3.477872\n", "Epoch 1145, Loss 3.476005\n", "Epoch 1146, Loss 3.474143\n", "Epoch 1147, Loss 3.472288\n", "Epoch 1148, Loss 3.470441\n", "Epoch 1149, Loss 3.468597\n", "Epoch 1150, Loss 3.466762\n", "Epoch 1151, Loss 3.464930\n", "Epoch 1152, Loss 3.463105\n", "Epoch 1153, Loss 3.461289\n", "Epoch 1154, Loss 3.459477\n", "Epoch 1155, Loss 3.457672\n", "Epoch 1156, Loss 3.455873\n", "Epoch 1157, Loss 3.454080\n", "Epoch 1158, Loss 3.452293\n", "Epoch 1159, Loss 3.450512\n", "Epoch 1160, Loss 3.448736\n", "Epoch 1161, Loss 3.446968\n", "Epoch 1162, Loss 3.445203\n", "Epoch 1163, Loss 3.443449\n", "Epoch 1164, Loss 3.441696\n", "Epoch 1165, Loss 3.439952\n", "Epoch 1166, Loss 3.438210\n", "Epoch 1167, Loss 3.436478\n", "Epoch 1168, Loss 3.434753\n", "Epoch 1169, Loss 3.433029\n", "Epoch 1170, Loss 3.431314\n", "Epoch 1171, Loss 3.429608\n", "Epoch 1172, Loss 3.427903\n", "Epoch 1173, Loss 3.426204\n", "Epoch 1174, Loss 3.424509\n", "Epoch 1175, Loss 3.422824\n", "Epoch 1176, Loss 3.421144\n", "Epoch 1177, Loss 3.419468\n", "Epoch 1178, Loss 3.417798\n", "Epoch 1179, Loss 3.416134\n", "Epoch 1180, Loss 3.414477\n", "Epoch 1181, Loss 3.412824\n", "Epoch 1182, Loss 3.411176\n", "Epoch 1183, Loss 3.409534\n", "Epoch 1184, Loss 3.407899\n", "Epoch 1185, Loss 3.406272\n", "Epoch 1186, Loss 3.404645\n", "Epoch 1187, Loss 3.403024\n", "Epoch 1188, Loss 3.401413\n", "Epoch 1189, Loss 3.399802\n", "Epoch 1190, Loss 3.398200\n", "Epoch 1191, Loss 3.396602\n", "Epoch 1192, Loss 3.395011\n", "Epoch 1193, Loss 3.393425\n", "Epoch 1194, Loss 3.391845\n", "Epoch 1195, Loss 3.390267\n", "Epoch 1196, Loss 3.388697\n", "Epoch 1197, Loss 3.387132\n", "Epoch 1198, Loss 3.385571\n", "Epoch 1199, Loss 3.384017\n", "Epoch 1200, Loss 3.382467\n", "Epoch 1201, Loss 3.380925\n", "Epoch 1202, Loss 3.379386\n", "Epoch 1203, Loss 3.377852\n", "Epoch 1204, Loss 3.376323\n", "Epoch 1205, Loss 3.374800\n", "Epoch 1206, Loss 3.373284\n", "Epoch 1207, Loss 3.371769\n", "Epoch 1208, Loss 3.370261\n", "Epoch 1209, Loss 3.368759\n", "Epoch 1210, Loss 3.367262\n", "Epoch 1211, Loss 3.365771\n", "Epoch 1212, Loss 3.364282\n", "Epoch 1213, Loss 3.362800\n", "Epoch 1214, Loss 3.361325\n", "Epoch 1215, Loss 3.359851\n", "Epoch 1216, Loss 3.358383\n", "Epoch 1217, Loss 3.356921\n", "Epoch 1218, Loss 3.355464\n", "Epoch 1219, Loss 3.354013\n", "Epoch 1220, Loss 3.352564\n", "Epoch 1221, Loss 3.351122\n", "Epoch 1222, Loss 3.349685\n", "Epoch 1223, Loss 3.348251\n", "Epoch 1224, Loss 3.346825\n", "Epoch 1225, Loss 3.345403\n", "Epoch 1226, Loss 3.343982\n", "Epoch 1227, Loss 3.342571\n", "Epoch 1228, Loss 3.341161\n", "Epoch 1229, Loss 3.339758\n", "Epoch 1230, Loss 3.338359\n", "Epoch 1231, Loss 3.336965\n", "Epoch 1232, Loss 3.335577\n", "Epoch 1233, Loss 3.334191\n", "Epoch 1234, Loss 3.332811\n", "Epoch 1235, Loss 3.331435\n", "Epoch 1236, Loss 3.330065\n", "Epoch 1237, Loss 3.328699\n", "Epoch 1238, Loss 3.327338\n", "Epoch 1239, Loss 3.325980\n", "Epoch 1240, Loss 3.324628\n", "Epoch 1241, Loss 3.323280\n", "Epoch 1242, Loss 3.321935\n", "Epoch 1243, Loss 3.320599\n", "Epoch 1244, Loss 3.319264\n", "Epoch 1245, Loss 3.317935\n", "Epoch 1246, Loss 3.316610\n", "Epoch 1247, Loss 3.315289\n", "Epoch 1248, Loss 3.313974\n", "Epoch 1249, Loss 3.312663\n", "Epoch 1250, Loss 3.311353\n", "Epoch 1251, Loss 3.310053\n", "Epoch 1252, Loss 3.308756\n", "Epoch 1253, Loss 3.307462\n", "Epoch 1254, Loss 3.306170\n", "Epoch 1255, Loss 3.304887\n", "Epoch 1256, Loss 3.303605\n", "Epoch 1257, Loss 3.302329\n", "Epoch 1258, Loss 3.301058\n", "Epoch 1259, Loss 3.299791\n", "Epoch 1260, Loss 3.298527\n", "Epoch 1261, Loss 3.297266\n", "Epoch 1262, Loss 3.296014\n", "Epoch 1263, Loss 3.294762\n", "Epoch 1264, Loss 3.293517\n", "Epoch 1265, Loss 3.292275\n", "Epoch 1266, Loss 3.291036\n", "Epoch 1267, Loss 3.289804\n", "Epoch 1268, Loss 3.288573\n", "Epoch 1269, Loss 3.287347\n", "Epoch 1270, Loss 3.286129\n", "Epoch 1271, Loss 3.284911\n", "Epoch 1272, Loss 3.283698\n", "Epoch 1273, Loss 3.282488\n", "Epoch 1274, Loss 3.281284\n", "Epoch 1275, Loss 3.280086\n", "Epoch 1276, Loss 3.278888\n", "Epoch 1277, Loss 3.277696\n", "Epoch 1278, Loss 3.276506\n", "Epoch 1279, Loss 3.275322\n", "Epoch 1280, Loss 3.274142\n", "Epoch 1281, Loss 3.272967\n", "Epoch 1282, Loss 3.271793\n", "Epoch 1283, Loss 3.270625\n", "Epoch 1284, Loss 3.269460\n", "Epoch 1285, Loss 3.268301\n", "Epoch 1286, Loss 3.267143\n", "Epoch 1287, Loss 3.265991\n", "Epoch 1288, Loss 3.264842\n", "Epoch 1289, Loss 3.263700\n", "Epoch 1290, Loss 3.262556\n", "Epoch 1291, Loss 3.261421\n", "Epoch 1292, Loss 3.260288\n", "Epoch 1293, Loss 3.259161\n", "Epoch 1294, Loss 3.258033\n", "Epoch 1295, Loss 3.256912\n", "Epoch 1296, Loss 3.255795\n", "Epoch 1297, Loss 3.254681\n", "Epoch 1298, Loss 3.253569\n", "Epoch 1299, Loss 3.252462\n", "Epoch 1300, Loss 3.251362\n", "Epoch 1301, Loss 3.250264\n", "Epoch 1302, Loss 3.249168\n", "Epoch 1303, Loss 3.248077\n", "Epoch 1304, Loss 3.246989\n", "Epoch 1305, Loss 3.245904\n", "Epoch 1306, Loss 3.244824\n", "Epoch 1307, Loss 3.243747\n", "Epoch 1308, Loss 3.242674\n", "Epoch 1309, Loss 3.241606\n", "Epoch 1310, Loss 3.240538\n", "Epoch 1311, Loss 3.239475\n", "Epoch 1312, Loss 3.238420\n", "Epoch 1313, Loss 3.237364\n", "Epoch 1314, Loss 3.236314\n", "Epoch 1315, Loss 3.235264\n", "Epoch 1316, Loss 3.234218\n", "Epoch 1317, Loss 3.233179\n", "Epoch 1318, Loss 3.232143\n", "Epoch 1319, Loss 3.231108\n", "Epoch 1320, Loss 3.230078\n", "Epoch 1321, Loss 3.229051\n", "Epoch 1322, Loss 3.228027\n", "Epoch 1323, Loss 3.227010\n", "Epoch 1324, Loss 3.225993\n", "Epoch 1325, Loss 3.224979\n", "Epoch 1326, Loss 3.223971\n", "Epoch 1327, Loss 3.222965\n", "Epoch 1328, Loss 3.221961\n", "Epoch 1329, Loss 3.220962\n", "Epoch 1330, Loss 3.219967\n", "Epoch 1331, Loss 3.218975\n", "Epoch 1332, Loss 3.217986\n", "Epoch 1333, Loss 3.217000\n", "Epoch 1334, Loss 3.216017\n", "Epoch 1335, Loss 3.215039\n", "Epoch 1336, Loss 3.214062\n", "Epoch 1337, Loss 3.213092\n", "Epoch 1338, Loss 3.212122\n", "Epoch 1339, Loss 3.211157\n", "Epoch 1340, Loss 3.210193\n", "Epoch 1341, Loss 3.209235\n", "Epoch 1342, Loss 3.208279\n", "Epoch 1343, Loss 3.207326\n", "Epoch 1344, Loss 3.206376\n", "Epoch 1345, Loss 3.205430\n", "Epoch 1346, Loss 3.204488\n", "Epoch 1347, Loss 3.203547\n", "Epoch 1348, Loss 3.202611\n", "Epoch 1349, Loss 3.201678\n", "Epoch 1350, Loss 3.200747\n", "Epoch 1351, Loss 3.199820\n", "Epoch 1352, Loss 3.198897\n", "Epoch 1353, Loss 3.197976\n", "Epoch 1354, Loss 3.197060\n", "Epoch 1355, Loss 3.196144\n", "Epoch 1356, Loss 3.195231\n", "Epoch 1357, Loss 3.194324\n", "Epoch 1358, Loss 3.193419\n", "Epoch 1359, Loss 3.192517\n", "Epoch 1360, Loss 3.191616\n", "Epoch 1361, Loss 3.190720\n", "Epoch 1362, Loss 3.189829\n", "Epoch 1363, Loss 3.188937\n", "Epoch 1364, Loss 3.188051\n", "Epoch 1365, Loss 3.187166\n", "Epoch 1366, Loss 3.186288\n", "Epoch 1367, Loss 3.185409\n", "Epoch 1368, Loss 3.184535\n", "Epoch 1369, Loss 3.183662\n", "Epoch 1370, Loss 3.182791\n", "Epoch 1371, Loss 3.181925\n", "Epoch 1372, Loss 3.181063\n", "Epoch 1373, Loss 3.180201\n", "Epoch 1374, Loss 3.179347\n", "Epoch 1375, Loss 3.178490\n", "Epoch 1376, Loss 3.177638\n", "Epoch 1377, Loss 3.176789\n", "Epoch 1378, Loss 3.175945\n", "Epoch 1379, Loss 3.175101\n", "Epoch 1380, Loss 3.174262\n", "Epoch 1381, Loss 3.173424\n", "Epoch 1382, Loss 3.172590\n", "Epoch 1383, Loss 3.171759\n", "Epoch 1384, Loss 3.170929\n", "Epoch 1385, Loss 3.170103\n", "Epoch 1386, Loss 3.169280\n", "Epoch 1387, Loss 3.168462\n", "Epoch 1388, Loss 3.167644\n", "Epoch 1389, Loss 3.166827\n", "Epoch 1390, Loss 3.166017\n", "Epoch 1391, Loss 3.165206\n", "Epoch 1392, Loss 3.164401\n", "Epoch 1393, Loss 3.163594\n", "Epoch 1394, Loss 3.162795\n", "Epoch 1395, Loss 3.161996\n", "Epoch 1396, Loss 3.161201\n", "Epoch 1397, Loss 3.160410\n", "Epoch 1398, Loss 3.159618\n", "Epoch 1399, Loss 3.158831\n", "Epoch 1400, Loss 3.158046\n", "Epoch 1401, Loss 3.157263\n", "Epoch 1402, Loss 3.156484\n", "Epoch 1403, Loss 3.155708\n", "Epoch 1404, Loss 3.154933\n", "Epoch 1405, Loss 3.154162\n", "Epoch 1406, Loss 3.153393\n", "Epoch 1407, Loss 3.152627\n", "Epoch 1408, Loss 3.151864\n", "Epoch 1409, Loss 3.151101\n", "Epoch 1410, Loss 3.150343\n", "Epoch 1411, Loss 3.149587\n", "Epoch 1412, Loss 3.148833\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1413, Loss 3.148082\n", "Epoch 1414, Loss 3.147335\n", "Epoch 1415, Loss 3.146588\n", "Epoch 1416, Loss 3.145845\n", "Epoch 1417, Loss 3.145105\n", "Epoch 1418, Loss 3.144367\n", "Epoch 1419, Loss 3.143630\n", "Epoch 1420, Loss 3.142899\n", "Epoch 1421, Loss 3.142166\n", "Epoch 1422, Loss 3.141439\n", "Epoch 1423, Loss 3.140712\n", "Epoch 1424, Loss 3.139989\n", "Epoch 1425, Loss 3.139271\n", "Epoch 1426, Loss 3.138551\n", "Epoch 1427, Loss 3.137834\n", "Epoch 1428, Loss 3.137121\n", "Epoch 1429, Loss 3.136410\n", "Epoch 1430, Loss 3.135700\n", "Epoch 1431, Loss 3.134995\n", "Epoch 1432, Loss 3.134291\n", "Epoch 1433, Loss 3.133590\n", "Epoch 1434, Loss 3.132889\n", "Epoch 1435, Loss 3.132194\n", "Epoch 1436, Loss 3.131500\n", "Epoch 1437, Loss 3.130810\n", "Epoch 1438, Loss 3.130119\n", "Epoch 1439, Loss 3.129432\n", "Epoch 1440, Loss 3.128746\n", "Epoch 1441, Loss 3.128064\n", "Epoch 1442, Loss 3.127381\n", "Epoch 1443, Loss 3.126705\n", "Epoch 1444, Loss 3.126031\n", "Epoch 1445, Loss 3.125356\n", "Epoch 1446, Loss 3.124683\n", "Epoch 1447, Loss 3.124017\n", "Epoch 1448, Loss 3.123348\n", "Epoch 1449, Loss 3.122685\n", "Epoch 1450, Loss 3.122022\n", "Epoch 1451, Loss 3.121362\n", "Epoch 1452, Loss 3.120706\n", "Epoch 1453, Loss 3.120049\n", "Epoch 1454, Loss 3.119396\n", "Epoch 1455, Loss 3.118746\n", "Epoch 1456, Loss 3.118098\n", "Epoch 1457, Loss 3.117452\n", "Epoch 1458, Loss 3.116805\n", "Epoch 1459, Loss 3.116164\n", "Epoch 1460, Loss 3.115525\n", "Epoch 1461, Loss 3.114886\n", "Epoch 1462, Loss 3.114250\n", "Epoch 1463, Loss 3.113617\n", "Epoch 1464, Loss 3.112984\n", "Epoch 1465, Loss 3.112358\n", "Epoch 1466, Loss 3.111731\n", "Epoch 1467, Loss 3.111103\n", "Epoch 1468, Loss 3.110484\n", "Epoch 1469, Loss 3.109860\n", "Epoch 1470, Loss 3.109242\n", "Epoch 1471, Loss 3.108627\n", "Epoch 1472, Loss 3.108011\n", "Epoch 1473, Loss 3.107401\n", "Epoch 1474, Loss 3.106791\n", "Epoch 1475, Loss 3.106180\n", "Epoch 1476, Loss 3.105575\n", "Epoch 1477, Loss 3.104972\n", "Epoch 1478, Loss 3.104370\n", "Epoch 1479, Loss 3.103770\n", "Epoch 1480, Loss 3.103172\n", "Epoch 1481, Loss 3.102576\n", "Epoch 1482, Loss 3.101982\n", "Epoch 1483, Loss 3.101390\n", "Epoch 1484, Loss 3.100802\n", "Epoch 1485, Loss 3.100213\n", "Epoch 1486, Loss 3.099627\n", "Epoch 1487, Loss 3.099044\n", "Epoch 1488, Loss 3.098462\n", "Epoch 1489, Loss 3.097883\n", "Epoch 1490, Loss 3.097302\n", "Epoch 1491, Loss 3.096727\n", "Epoch 1492, Loss 3.096153\n", "Epoch 1493, Loss 3.095583\n", "Epoch 1494, Loss 3.095011\n", "Epoch 1495, Loss 3.094444\n", "Epoch 1496, Loss 3.093876\n", "Epoch 1497, Loss 3.093314\n", "Epoch 1498, Loss 3.092751\n", "Epoch 1499, Loss 3.092191\n", "Epoch 1500, Loss 3.091631\n", "Epoch 1501, Loss 3.091074\n", "Epoch 1502, Loss 3.090520\n", "Epoch 1503, Loss 3.089969\n", "Epoch 1504, Loss 3.089417\n", "Epoch 1505, Loss 3.088867\n", "Epoch 1506, Loss 3.088320\n", "Epoch 1507, Loss 3.087775\n", "Epoch 1508, Loss 3.087232\n", "Epoch 1509, Loss 3.086690\n", "Epoch 1510, Loss 3.086150\n", "Epoch 1511, Loss 3.085612\n", "Epoch 1512, Loss 3.085075\n", "Epoch 1513, Loss 3.084542\n", "Epoch 1514, Loss 3.084009\n", "Epoch 1515, Loss 3.083478\n", "Epoch 1516, Loss 3.082948\n", "Epoch 1517, Loss 3.082422\n", "Epoch 1518, Loss 3.081897\n", "Epoch 1519, Loss 3.081373\n", "Epoch 1520, Loss 3.080850\n", "Epoch 1521, Loss 3.080331\n", "Epoch 1522, Loss 3.079811\n", "Epoch 1523, Loss 3.079297\n", "Epoch 1524, Loss 3.078781\n", "Epoch 1525, Loss 3.078268\n", "Epoch 1526, Loss 3.077757\n", "Epoch 1527, Loss 3.077247\n", "Epoch 1528, Loss 3.076739\n", "Epoch 1529, Loss 3.076232\n", "Epoch 1530, Loss 3.075729\n", "Epoch 1531, Loss 3.075225\n", "Epoch 1532, Loss 3.074724\n", "Epoch 1533, Loss 3.074227\n", "Epoch 1534, Loss 3.073726\n", "Epoch 1535, Loss 3.073232\n", "Epoch 1536, Loss 3.072739\n", "Epoch 1537, Loss 3.072245\n", "Epoch 1538, Loss 3.071753\n", "Epoch 1539, Loss 3.071265\n", "Epoch 1540, Loss 3.070778\n", "Epoch 1541, Loss 3.070293\n", "Epoch 1542, Loss 3.069808\n", "Epoch 1543, Loss 3.069326\n", "Epoch 1544, Loss 3.068845\n", "Epoch 1545, Loss 3.068366\n", "Epoch 1546, Loss 3.067887\n", "Epoch 1547, Loss 3.067412\n", "Epoch 1548, Loss 3.066937\n", "Epoch 1549, Loss 3.066464\n", "Epoch 1550, Loss 3.065993\n", "Epoch 1551, Loss 3.065524\n", "Epoch 1552, Loss 3.065055\n", "Epoch 1553, Loss 3.064588\n", "Epoch 1554, Loss 3.064123\n", "Epoch 1555, Loss 3.063660\n", "Epoch 1556, Loss 3.063199\n", "Epoch 1557, Loss 3.062738\n", "Epoch 1558, Loss 3.062280\n", "Epoch 1559, Loss 3.061822\n", "Epoch 1560, Loss 3.061368\n", "Epoch 1561, Loss 3.060913\n", "Epoch 1562, Loss 3.060461\n", "Epoch 1563, Loss 3.060011\n", "Epoch 1564, Loss 3.059561\n", "Epoch 1565, Loss 3.059114\n", "Epoch 1566, Loss 3.058668\n", "Epoch 1567, Loss 3.058221\n", "Epoch 1568, Loss 3.057780\n", "Epoch 1569, Loss 3.057338\n", "Epoch 1570, Loss 3.056898\n", "Epoch 1571, Loss 3.056458\n", "Epoch 1572, Loss 3.056019\n", "Epoch 1573, Loss 3.055585\n", "Epoch 1574, Loss 3.055151\n", "Epoch 1575, Loss 3.054717\n", "Epoch 1576, Loss 3.054286\n", "Epoch 1577, Loss 3.053857\n", "Epoch 1578, Loss 3.053428\n", "Epoch 1579, Loss 3.053001\n", "Epoch 1580, Loss 3.052576\n", "Epoch 1581, Loss 3.052152\n", "Epoch 1582, Loss 3.051730\n", "Epoch 1583, Loss 3.051307\n", "Epoch 1584, Loss 3.050888\n", "Epoch 1585, Loss 3.050471\n", "Epoch 1586, Loss 3.050052\n", "Epoch 1587, Loss 3.049639\n", "Epoch 1588, Loss 3.049223\n", "Epoch 1589, Loss 3.048811\n", "Epoch 1590, Loss 3.048398\n", "Epoch 1591, Loss 3.047991\n", "Epoch 1592, Loss 3.047581\n", "Epoch 1593, Loss 3.047173\n", "Epoch 1594, Loss 3.046768\n", "Epoch 1595, Loss 3.046363\n", "Epoch 1596, Loss 3.045960\n", "Epoch 1597, Loss 3.045559\n", "Epoch 1598, Loss 3.045160\n", "Epoch 1599, Loss 3.044759\n", "Epoch 1600, Loss 3.044361\n", "Epoch 1601, Loss 3.043966\n", "Epoch 1602, Loss 3.043571\n", "Epoch 1603, Loss 3.043176\n", "Epoch 1604, Loss 3.042785\n", "Epoch 1605, Loss 3.042395\n", "Epoch 1606, Loss 3.042004\n", "Epoch 1607, Loss 3.041615\n", "Epoch 1608, Loss 3.041230\n", "Epoch 1609, Loss 3.040844\n", "Epoch 1610, Loss 3.040460\n", "Epoch 1611, Loss 3.040077\n", "Epoch 1612, Loss 3.039695\n", "Epoch 1613, Loss 3.039314\n", "Epoch 1614, Loss 3.038934\n", "Epoch 1615, Loss 3.038557\n", "Epoch 1616, Loss 3.038182\n", "Epoch 1617, Loss 3.037806\n", "Epoch 1618, Loss 3.037431\n", "Epoch 1619, Loss 3.037059\n", "Epoch 1620, Loss 3.036689\n", "Epoch 1621, Loss 3.036319\n", "Epoch 1622, Loss 3.035949\n", "Epoch 1623, Loss 3.035583\n", "Epoch 1624, Loss 3.035215\n", "Epoch 1625, Loss 3.034849\n", "Epoch 1626, Loss 3.034485\n", "Epoch 1627, Loss 3.034122\n", "Epoch 1628, Loss 3.033762\n", "Epoch 1629, Loss 3.033402\n", "Epoch 1630, Loss 3.033042\n", "Epoch 1631, Loss 3.032685\n", "Epoch 1632, Loss 3.032329\n", "Epoch 1633, Loss 3.031973\n", "Epoch 1634, Loss 3.031619\n", "Epoch 1635, Loss 3.031265\n", "Epoch 1636, Loss 3.030913\n", "Epoch 1637, Loss 3.030564\n", "Epoch 1638, Loss 3.030215\n", "Epoch 1639, Loss 3.029866\n", "Epoch 1640, Loss 3.029518\n", "Epoch 1641, Loss 3.029172\n", "Epoch 1642, Loss 3.028829\n", "Epoch 1643, Loss 3.028486\n", "Epoch 1644, Loss 3.028142\n", "Epoch 1645, Loss 3.027802\n", "Epoch 1646, Loss 3.027462\n", "Epoch 1647, Loss 3.027122\n", "Epoch 1648, Loss 3.026784\n", "Epoch 1649, Loss 3.026447\n", "Epoch 1650, Loss 3.026111\n", "Epoch 1651, Loss 3.025780\n", "Epoch 1652, Loss 3.025446\n", "Epoch 1653, Loss 3.025114\n", "Epoch 1654, Loss 3.024782\n", "Epoch 1655, Loss 3.024452\n", "Epoch 1656, Loss 3.024125\n", "Epoch 1657, Loss 3.023797\n", "Epoch 1658, Loss 3.023471\n", "Epoch 1659, Loss 3.023146\n", "Epoch 1660, Loss 3.022821\n", "Epoch 1661, Loss 3.022498\n", "Epoch 1662, Loss 3.022177\n", "Epoch 1663, Loss 3.021855\n", "Epoch 1664, Loss 3.021534\n", "Epoch 1665, Loss 3.021217\n", "Epoch 1666, Loss 3.020898\n", "Epoch 1667, Loss 3.020582\n", "Epoch 1668, Loss 3.020266\n", "Epoch 1669, Loss 3.019952\n", "Epoch 1670, Loss 3.019639\n", "Epoch 1671, Loss 3.019325\n", "Epoch 1672, Loss 3.019016\n", "Epoch 1673, Loss 3.018706\n", "Epoch 1674, Loss 3.018395\n", "Epoch 1675, Loss 3.018089\n", "Epoch 1676, Loss 3.017780\n", "Epoch 1677, Loss 3.017475\n", "Epoch 1678, Loss 3.017170\n", "Epoch 1679, Loss 3.016867\n", "Epoch 1680, Loss 3.016564\n", "Epoch 1681, Loss 3.016262\n", "Epoch 1682, Loss 3.015959\n", "Epoch 1683, Loss 3.015661\n", "Epoch 1684, Loss 3.015362\n", "Epoch 1685, Loss 3.015064\n", "Epoch 1686, Loss 3.014768\n", "Epoch 1687, Loss 3.014472\n", "Epoch 1688, Loss 3.014179\n", "Epoch 1689, Loss 3.013884\n", "Epoch 1690, Loss 3.013591\n", "Epoch 1691, Loss 3.013299\n", "Epoch 1692, Loss 3.013008\n", "Epoch 1693, Loss 3.012719\n", "Epoch 1694, Loss 3.012431\n", "Epoch 1695, Loss 3.012141\n", "Epoch 1696, Loss 3.011855\n", "Epoch 1697, Loss 3.011570\n", "Epoch 1698, Loss 3.011284\n", "Epoch 1699, Loss 3.011001\n", "Epoch 1700, Loss 3.010718\n", "Epoch 1701, Loss 3.010436\n", "Epoch 1702, Loss 3.010156\n", "Epoch 1703, Loss 3.009876\n", "Epoch 1704, Loss 3.009595\n", "Epoch 1705, Loss 3.009319\n", "Epoch 1706, Loss 3.009040\n", "Epoch 1707, Loss 3.008763\n", "Epoch 1708, Loss 3.008487\n", "Epoch 1709, Loss 3.008214\n", "Epoch 1710, Loss 3.007941\n", "Epoch 1711, Loss 3.007668\n", "Epoch 1712, Loss 3.007396\n", "Epoch 1713, Loss 3.007126\n", "Epoch 1714, Loss 3.006856\n", "Epoch 1715, Loss 3.006586\n", "Epoch 1716, Loss 3.006318\n", "Epoch 1717, Loss 3.006052\n", "Epoch 1718, Loss 3.005785\n", "Epoch 1719, Loss 3.005520\n", "Epoch 1720, Loss 3.005256\n", "Epoch 1721, Loss 3.004993\n", "Epoch 1722, Loss 3.004729\n", "Epoch 1723, Loss 3.004467\n", "Epoch 1724, Loss 3.004207\n", "Epoch 1725, Loss 3.003947\n", "Epoch 1726, Loss 3.003690\n", "Epoch 1727, Loss 3.003430\n", "Epoch 1728, Loss 3.003174\n", "Epoch 1729, Loss 3.002918\n", "Epoch 1730, Loss 3.002661\n", "Epoch 1731, Loss 3.002406\n", "Epoch 1732, Loss 3.002152\n", "Epoch 1733, Loss 3.001901\n", "Epoch 1734, Loss 3.001649\n", "Epoch 1735, Loss 3.001395\n", "Epoch 1736, Loss 3.001145\n", "Epoch 1737, Loss 3.000898\n", "Epoch 1738, Loss 3.000648\n", "Epoch 1739, Loss 3.000400\n", "Epoch 1740, Loss 3.000154\n", "Epoch 1741, Loss 2.999907\n", "Epoch 1742, Loss 2.999662\n", "Epoch 1743, Loss 2.999417\n", "Epoch 1744, Loss 2.999174\n", "Epoch 1745, Loss 2.998930\n", "Epoch 1746, Loss 2.998688\n", "Epoch 1747, Loss 2.998448\n", "Epoch 1748, Loss 2.998208\n", "Epoch 1749, Loss 2.997968\n", "Epoch 1750, Loss 2.997730\n", "Epoch 1751, Loss 2.997490\n", "Epoch 1752, Loss 2.997254\n", "Epoch 1753, Loss 2.997018\n", "Epoch 1754, Loss 2.996783\n", "Epoch 1755, Loss 2.996548\n", "Epoch 1756, Loss 2.996313\n", "Epoch 1757, Loss 2.996081\n", "Epoch 1758, Loss 2.995847\n", "Epoch 1759, Loss 2.995615\n", "Epoch 1760, Loss 2.995387\n", "Epoch 1761, Loss 2.995156\n", "Epoch 1762, Loss 2.994929\n", "Epoch 1763, Loss 2.994699\n", "Epoch 1764, Loss 2.994472\n", "Epoch 1765, Loss 2.994245\n", "Epoch 1766, Loss 2.994018\n", "Epoch 1767, Loss 2.993793\n", "Epoch 1768, Loss 2.993569\n", "Epoch 1769, Loss 2.993344\n", "Epoch 1770, Loss 2.993122\n", "Epoch 1771, Loss 2.992900\n", "Epoch 1772, Loss 2.992678\n", "Epoch 1773, Loss 2.992457\n", "Epoch 1774, Loss 2.992238\n", "Epoch 1775, Loss 2.992017\n", "Epoch 1776, Loss 2.991798\n", "Epoch 1777, Loss 2.991583\n", "Epoch 1778, Loss 2.991366\n", "Epoch 1779, Loss 2.991146\n", "Epoch 1780, Loss 2.990932\n", "Epoch 1781, Loss 2.990719\n", "Epoch 1782, Loss 2.990503\n", "Epoch 1783, Loss 2.990289\n", "Epoch 1784, Loss 2.990078\n", "Epoch 1785, Loss 2.989866\n", "Epoch 1786, Loss 2.989654\n", "Epoch 1787, Loss 2.989443\n", "Epoch 1788, Loss 2.989233\n", "Epoch 1789, Loss 2.989025\n", "Epoch 1790, Loss 2.988817\n", "Epoch 1791, Loss 2.988609\n", "Epoch 1792, Loss 2.988401\n", "Epoch 1793, Loss 2.988195\n", "Epoch 1794, Loss 2.987989\n", "Epoch 1795, Loss 2.987784\n", "Epoch 1796, Loss 2.987581\n", "Epoch 1797, Loss 2.987377\n", "Epoch 1798, Loss 2.987174\n", "Epoch 1799, Loss 2.986974\n", "Epoch 1800, Loss 2.986771\n", "Epoch 1801, Loss 2.986570\n", "Epoch 1802, Loss 2.986371\n", "Epoch 1803, Loss 2.986171\n", "Epoch 1804, Loss 2.985972\n", "Epoch 1805, Loss 2.985774\n", "Epoch 1806, Loss 2.985578\n", "Epoch 1807, Loss 2.985381\n", "Epoch 1808, Loss 2.985184\n", "Epoch 1809, Loss 2.984989\n", "Epoch 1810, Loss 2.984793\n", "Epoch 1811, Loss 2.984601\n", "Epoch 1812, Loss 2.984406\n", "Epoch 1813, Loss 2.984215\n", "Epoch 1814, Loss 2.984022\n", "Epoch 1815, Loss 2.983830\n", "Epoch 1816, Loss 2.983639\n", "Epoch 1817, Loss 2.983449\n", "Epoch 1818, Loss 2.983260\n", "Epoch 1819, Loss 2.983073\n", "Epoch 1820, Loss 2.982884\n", "Epoch 1821, Loss 2.982697\n", "Epoch 1822, Loss 2.982510\n", "Epoch 1823, Loss 2.982322\n", "Epoch 1824, Loss 2.982137\n", "Epoch 1825, Loss 2.981953\n", "Epoch 1826, Loss 2.981769\n", "Epoch 1827, Loss 2.981586\n", "Epoch 1828, Loss 2.981402\n", "Epoch 1829, Loss 2.981219\n", "Epoch 1830, Loss 2.981037\n", "Epoch 1831, Loss 2.980856\n", "Epoch 1832, Loss 2.980676\n", "Epoch 1833, Loss 2.980495\n", "Epoch 1834, Loss 2.980316\n", "Epoch 1835, Loss 2.980137\n", "Epoch 1836, Loss 2.979958\n", "Epoch 1837, Loss 2.979782\n", "Epoch 1838, Loss 2.979604\n", "Epoch 1839, Loss 2.979428\n", "Epoch 1840, Loss 2.979253\n", "Epoch 1841, Loss 2.979078\n", "Epoch 1842, Loss 2.978902\n", "Epoch 1843, Loss 2.978729\n", "Epoch 1844, Loss 2.978555\n", "Epoch 1845, Loss 2.978382\n", "Epoch 1846, Loss 2.978211\n", "Epoch 1847, Loss 2.978039\n", "Epoch 1848, Loss 2.977867\n", "Epoch 1849, Loss 2.977696\n", "Epoch 1850, Loss 2.977527\n", "Epoch 1851, Loss 2.977357\n", "Epoch 1852, Loss 2.977188\n", "Epoch 1853, Loss 2.977021\n", "Epoch 1854, Loss 2.976853\n", "Epoch 1855, Loss 2.976687\n", "Epoch 1856, Loss 2.976520\n", "Epoch 1857, Loss 2.976354\n", "Epoch 1858, Loss 2.976189\n", "Epoch 1859, Loss 2.976023\n", "Epoch 1860, Loss 2.975860\n", "Epoch 1861, Loss 2.975697\n", "Epoch 1862, Loss 2.975532\n", "Epoch 1863, Loss 2.975369\n", "Epoch 1864, Loss 2.975208\n", "Epoch 1865, Loss 2.975046\n", "Epoch 1866, Loss 2.974886\n", "Epoch 1867, Loss 2.974725\n", "Epoch 1868, Loss 2.974565\n", "Epoch 1869, Loss 2.974406\n", "Epoch 1870, Loss 2.974248\n", "Epoch 1871, Loss 2.974088\n", "Epoch 1872, Loss 2.973930\n", "Epoch 1873, Loss 2.973776\n", "Epoch 1874, Loss 2.973618\n", "Epoch 1875, Loss 2.973463\n", "Epoch 1876, Loss 2.973307\n", "Epoch 1877, Loss 2.973150\n", "Epoch 1878, Loss 2.972996\n", "Epoch 1879, Loss 2.972844\n", "Epoch 1880, Loss 2.972690\n", "Epoch 1881, Loss 2.972536\n", "Epoch 1882, Loss 2.972383\n", "Epoch 1883, Loss 2.972232\n", "Epoch 1884, Loss 2.972081\n", "Epoch 1885, Loss 2.971931\n", "Epoch 1886, Loss 2.971780\n", "Epoch 1887, Loss 2.971629\n", "Epoch 1888, Loss 2.971481\n", "Epoch 1889, Loss 2.971332\n", "Epoch 1890, Loss 2.971185\n", "Epoch 1891, Loss 2.971035\n", "Epoch 1892, Loss 2.970888\n", "Epoch 1893, Loss 2.970741\n", "Epoch 1894, Loss 2.970596\n", "Epoch 1895, Loss 2.970449\n", "Epoch 1896, Loss 2.970304\n", "Epoch 1897, Loss 2.970159\n", "Epoch 1898, Loss 2.970015\n", "Epoch 1899, Loss 2.969871\n", "Epoch 1900, Loss 2.969727\n", "Epoch 1901, Loss 2.969586\n", "Epoch 1902, Loss 2.969443\n", "Epoch 1903, Loss 2.969302\n", "Epoch 1904, Loss 2.969160\n", "Epoch 1905, Loss 2.969017\n", "Epoch 1906, Loss 2.968879\n", "Epoch 1907, Loss 2.968739\n", "Epoch 1908, Loss 2.968599\n", "Epoch 1909, Loss 2.968460\n", "Epoch 1910, Loss 2.968322\n", "Epoch 1911, Loss 2.968184\n", "Epoch 1912, Loss 2.968046\n", "Epoch 1913, Loss 2.967908\n", "Epoch 1914, Loss 2.967772\n", "Epoch 1915, Loss 2.967636\n", "Epoch 1916, Loss 2.967499\n", "Epoch 1917, Loss 2.967365\n", "Epoch 1918, Loss 2.967230\n", "Epoch 1919, Loss 2.967095\n", "Epoch 1920, Loss 2.966961\n", "Epoch 1921, Loss 2.966827\n", "Epoch 1922, Loss 2.966693\n", "Epoch 1923, Loss 2.966561\n", "Epoch 1924, Loss 2.966429\n", "Epoch 1925, Loss 2.966297\n", "Epoch 1926, Loss 2.966167\n", "Epoch 1927, Loss 2.966036\n", "Epoch 1928, Loss 2.965904\n", "Epoch 1929, Loss 2.965776\n", "Epoch 1930, Loss 2.965646\n", "Epoch 1931, Loss 2.965517\n", "Epoch 1932, Loss 2.965387\n", "Epoch 1933, Loss 2.965261\n", "Epoch 1934, Loss 2.965131\n", "Epoch 1935, Loss 2.965005\n", "Epoch 1936, Loss 2.964878\n", "Epoch 1937, Loss 2.964751\n", "Epoch 1938, Loss 2.964625\n", "Epoch 1939, Loss 2.964500\n", "Epoch 1940, Loss 2.964375\n", "Epoch 1941, Loss 2.964250\n", "Epoch 1942, Loss 2.964126\n", "Epoch 1943, Loss 2.964001\n", "Epoch 1944, Loss 2.963879\n", "Epoch 1945, Loss 2.963756\n", "Epoch 1946, Loss 2.963632\n", "Epoch 1947, Loss 2.963511\n", "Epoch 1948, Loss 2.963388\n", "Epoch 1949, Loss 2.963266\n", "Epoch 1950, Loss 2.963149\n", "Epoch 1951, Loss 2.963026\n", "Epoch 1952, Loss 2.962907\n", "Epoch 1953, Loss 2.962788\n", "Epoch 1954, Loss 2.962666\n", "Epoch 1955, Loss 2.962547\n", "Epoch 1956, Loss 2.962429\n", "Epoch 1957, Loss 2.962312\n", "Epoch 1958, Loss 2.962195\n", "Epoch 1959, Loss 2.962078\n", "Epoch 1960, Loss 2.961959\n", "Epoch 1961, Loss 2.961843\n", "Epoch 1962, Loss 2.961728\n", "Epoch 1963, Loss 2.961611\n", "Epoch 1964, Loss 2.961497\n", "Epoch 1965, Loss 2.961382\n", "Epoch 1966, Loss 2.961268\n", "Epoch 1967, Loss 2.961153\n", "Epoch 1968, Loss 2.961038\n", "Epoch 1969, Loss 2.960926\n", "Epoch 1970, Loss 2.960814\n", "Epoch 1971, Loss 2.960699\n", "Epoch 1972, Loss 2.960587\n", "Epoch 1973, Loss 2.960475\n", "Epoch 1974, Loss 2.960365\n", "Epoch 1975, Loss 2.960254\n", "Epoch 1976, Loss 2.960143\n", "Epoch 1977, Loss 2.960033\n", "Epoch 1978, Loss 2.959923\n", "Epoch 1979, Loss 2.959812\n", "Epoch 1980, Loss 2.959703\n", "Epoch 1981, Loss 2.959594\n", "Epoch 1982, Loss 2.959486\n", "Epoch 1983, Loss 2.959378\n", "Epoch 1984, Loss 2.959271\n", "Epoch 1985, Loss 2.959163\n", "Epoch 1986, Loss 2.959055\n", "Epoch 1987, Loss 2.958950\n", "Epoch 1988, Loss 2.958842\n", "Epoch 1989, Loss 2.958738\n", "Epoch 1990, Loss 2.958632\n", "Epoch 1991, Loss 2.958526\n", "Epoch 1992, Loss 2.958421\n", "Epoch 1993, Loss 2.958317\n", "Epoch 1994, Loss 2.958212\n", "Epoch 1995, Loss 2.958109\n", "Epoch 1996, Loss 2.958006\n", "Epoch 1997, Loss 2.957903\n", "Epoch 1998, Loss 2.957801\n", "Epoch 1999, Loss 2.957697\n", "Epoch 2000, Loss 2.957596\n", "Epoch 2001, Loss 2.957494\n", "Epoch 2002, Loss 2.957393\n", "Epoch 2003, Loss 2.957293\n", "Epoch 2004, Loss 2.957193\n", "Epoch 2005, Loss 2.957091\n", "Epoch 2006, Loss 2.956991\n", "Epoch 2007, Loss 2.956892\n", "Epoch 2008, Loss 2.956792\n", "Epoch 2009, Loss 2.956694\n", "Epoch 2010, Loss 2.956595\n", "Epoch 2011, Loss 2.956496\n", "Epoch 2012, Loss 2.956397\n", "Epoch 2013, Loss 2.956300\n", "Epoch 2014, Loss 2.956204\n", "Epoch 2015, Loss 2.956108\n", "Epoch 2016, Loss 2.956010\n", "Epoch 2017, Loss 2.955914\n", "Epoch 2018, Loss 2.955817\n", "Epoch 2019, Loss 2.955722\n", "Epoch 2020, Loss 2.955627\n", "Epoch 2021, Loss 2.955533\n", "Epoch 2022, Loss 2.955436\n", "Epoch 2023, Loss 2.955343\n", "Epoch 2024, Loss 2.955250\n", "Epoch 2025, Loss 2.955154\n", "Epoch 2026, Loss 2.955062\n", "Epoch 2027, Loss 2.954968\n", "Epoch 2028, Loss 2.954875\n", "Epoch 2029, Loss 2.954783\n", "Epoch 2030, Loss 2.954691\n", "Epoch 2031, Loss 2.954600\n", "Epoch 2032, Loss 2.954507\n", "Epoch 2033, Loss 2.954417\n", "Epoch 2034, Loss 2.954326\n", "Epoch 2035, Loss 2.954235\n", "Epoch 2036, Loss 2.954145\n", "Epoch 2037, Loss 2.954055\n", "Epoch 2038, Loss 2.953966\n", "Epoch 2039, Loss 2.953876\n", "Epoch 2040, Loss 2.953787\n", "Epoch 2041, Loss 2.953698\n", "Epoch 2042, Loss 2.953610\n", "Epoch 2043, Loss 2.953521\n", "Epoch 2044, Loss 2.953434\n", "Epoch 2045, Loss 2.953346\n", "Epoch 2046, Loss 2.953259\n", "Epoch 2047, Loss 2.953172\n", "Epoch 2048, Loss 2.953085\n", "Epoch 2049, Loss 2.953000\n", "Epoch 2050, Loss 2.952913\n", "Epoch 2051, Loss 2.952828\n", "Epoch 2052, Loss 2.952742\n", "Epoch 2053, Loss 2.952657\n", "Epoch 2054, Loss 2.952571\n", "Epoch 2055, Loss 2.952487\n", "Epoch 2056, Loss 2.952403\n", "Epoch 2057, Loss 2.952318\n", "Epoch 2058, Loss 2.952235\n", "Epoch 2059, Loss 2.952152\n", "Epoch 2060, Loss 2.952068\n", "Epoch 2061, Loss 2.951985\n", "Epoch 2062, Loss 2.951903\n", "Epoch 2063, Loss 2.951820\n", "Epoch 2064, Loss 2.951738\n", "Epoch 2065, Loss 2.951656\n", "Epoch 2066, Loss 2.951575\n", "Epoch 2067, Loss 2.951494\n", "Epoch 2068, Loss 2.951413\n", "Epoch 2069, Loss 2.951333\n", "Epoch 2070, Loss 2.951252\n", "Epoch 2071, Loss 2.951172\n", "Epoch 2072, Loss 2.951093\n", "Epoch 2073, Loss 2.951012\n", "Epoch 2074, Loss 2.950932\n", "Epoch 2075, Loss 2.950853\n", "Epoch 2076, Loss 2.950774\n", "Epoch 2077, Loss 2.950698\n", "Epoch 2078, Loss 2.950618\n", "Epoch 2079, Loss 2.950540\n", "Epoch 2080, Loss 2.950463\n", "Epoch 2081, Loss 2.950385\n", "Epoch 2082, Loss 2.950308\n", "Epoch 2083, Loss 2.950231\n", "Epoch 2084, Loss 2.950155\n", "Epoch 2085, Loss 2.950079\n", "Epoch 2086, Loss 2.950003\n", "Epoch 2087, Loss 2.949925\n", "Epoch 2088, Loss 2.949850\n", "Epoch 2089, Loss 2.949776\n", "Epoch 2090, Loss 2.949699\n", "Epoch 2091, Loss 2.949626\n", "Epoch 2092, Loss 2.949551\n", "Epoch 2093, Loss 2.949476\n", "Epoch 2094, Loss 2.949401\n", "Epoch 2095, Loss 2.949328\n", "Epoch 2096, Loss 2.949254\n", "Epoch 2097, Loss 2.949182\n", "Epoch 2098, Loss 2.949108\n", "Epoch 2099, Loss 2.949036\n", "Epoch 2100, Loss 2.948962\n", "Epoch 2101, Loss 2.948890\n", "Epoch 2102, Loss 2.948818\n", "Epoch 2103, Loss 2.948746\n", "Epoch 2104, Loss 2.948675\n", "Epoch 2105, Loss 2.948602\n", "Epoch 2106, Loss 2.948532\n", "Epoch 2107, Loss 2.948462\n", "Epoch 2108, Loss 2.948391\n", "Epoch 2109, Loss 2.948321\n", "Epoch 2110, Loss 2.948250\n", "Epoch 2111, Loss 2.948180\n", "Epoch 2112, Loss 2.948109\n", "Epoch 2113, Loss 2.948040\n", "Epoch 2114, Loss 2.947971\n", "Epoch 2115, Loss 2.947902\n", "Epoch 2116, Loss 2.947833\n", "Epoch 2117, Loss 2.947765\n", "Epoch 2118, Loss 2.947696\n", "Epoch 2119, Loss 2.947628\n", "Epoch 2120, Loss 2.947560\n", "Epoch 2121, Loss 2.947494\n", "Epoch 2122, Loss 2.947426\n", "Epoch 2123, Loss 2.947358\n", "Epoch 2124, Loss 2.947294\n", "Epoch 2125, Loss 2.947226\n", "Epoch 2126, Loss 2.947158\n", "Epoch 2127, Loss 2.947091\n", "Epoch 2128, Loss 2.947026\n", "Epoch 2129, Loss 2.946960\n", "Epoch 2130, Loss 2.946895\n", "Epoch 2131, Loss 2.946830\n", "Epoch 2132, Loss 2.946764\n", "Epoch 2133, Loss 2.946700\n", "Epoch 2134, Loss 2.946635\n", "Epoch 2135, Loss 2.946571\n", "Epoch 2136, Loss 2.946507\n", "Epoch 2137, Loss 2.946442\n", "Epoch 2138, Loss 2.946378\n", "Epoch 2139, Loss 2.946314\n", "Epoch 2140, Loss 2.946251\n", "Epoch 2141, Loss 2.946189\n", "Epoch 2142, Loss 2.946125\n", "Epoch 2143, Loss 2.946063\n", "Epoch 2144, Loss 2.946001\n", "Epoch 2145, Loss 2.945937\n", "Epoch 2146, Loss 2.945876\n", "Epoch 2147, Loss 2.945815\n", "Epoch 2148, Loss 2.945753\n", "Epoch 2149, Loss 2.945690\n", "Epoch 2150, Loss 2.945630\n", "Epoch 2151, Loss 2.945567\n", "Epoch 2152, Loss 2.945509\n", "Epoch 2153, Loss 2.945448\n", "Epoch 2154, Loss 2.945386\n", "Epoch 2155, Loss 2.945326\n", "Epoch 2156, Loss 2.945267\n", "Epoch 2157, Loss 2.945207\n", "Epoch 2158, Loss 2.945146\n", "Epoch 2159, Loss 2.945088\n", "Epoch 2160, Loss 2.945028\n", "Epoch 2161, Loss 2.944969\n", "Epoch 2162, Loss 2.944911\n", "Epoch 2163, Loss 2.944852\n", "Epoch 2164, Loss 2.944792\n", "Epoch 2165, Loss 2.944736\n", "Epoch 2166, Loss 2.944678\n", "Epoch 2167, Loss 2.944619\n", "Epoch 2168, Loss 2.944562\n", "Epoch 2169, Loss 2.944504\n", "Epoch 2170, Loss 2.944447\n", "Epoch 2171, Loss 2.944391\n", "Epoch 2172, Loss 2.944332\n", "Epoch 2173, Loss 2.944276\n", "Epoch 2174, Loss 2.944220\n", "Epoch 2175, Loss 2.944164\n", "Epoch 2176, Loss 2.944108\n", "Epoch 2177, Loss 2.944052\n", "Epoch 2178, Loss 2.943996\n", "Epoch 2179, Loss 2.943941\n", "Epoch 2180, Loss 2.943886\n", "Epoch 2181, Loss 2.943831\n", "Epoch 2182, Loss 2.943775\n", "Epoch 2183, Loss 2.943721\n", "Epoch 2184, Loss 2.943666\n", "Epoch 2185, Loss 2.943613\n", "Epoch 2186, Loss 2.943558\n", "Epoch 2187, Loss 2.943504\n", "Epoch 2188, Loss 2.943451\n", "Epoch 2189, Loss 2.943395\n", "Epoch 2190, Loss 2.943343\n", "Epoch 2191, Loss 2.943290\n", "Epoch 2192, Loss 2.943235\n", "Epoch 2193, Loss 2.943183\n", "Epoch 2194, Loss 2.943130\n", "Epoch 2195, Loss 2.943079\n", "Epoch 2196, Loss 2.943026\n", "Epoch 2197, Loss 2.942974\n", "Epoch 2198, Loss 2.942922\n", "Epoch 2199, Loss 2.942870\n", "Epoch 2200, Loss 2.942818\n", "Epoch 2201, Loss 2.942765\n", "Epoch 2202, Loss 2.942714\n", "Epoch 2203, Loss 2.942664\n", "Epoch 2204, Loss 2.942612\n", "Epoch 2205, Loss 2.942563\n", "Epoch 2206, Loss 2.942510\n", "Epoch 2207, Loss 2.942461\n", "Epoch 2208, Loss 2.942411\n", "Epoch 2209, Loss 2.942361\n", "Epoch 2210, Loss 2.942310\n", "Epoch 2211, Loss 2.942261\n", "Epoch 2212, Loss 2.942211\n", "Epoch 2213, Loss 2.942162\n", "Epoch 2214, Loss 2.942112\n", "Epoch 2215, Loss 2.942062\n", "Epoch 2216, Loss 2.942014\n", "Epoch 2217, Loss 2.941965\n", "Epoch 2218, Loss 2.941918\n", "Epoch 2219, Loss 2.941868\n", "Epoch 2220, Loss 2.941821\n", "Epoch 2221, Loss 2.941773\n", "Epoch 2222, Loss 2.941725\n", "Epoch 2223, Loss 2.941677\n", "Epoch 2224, Loss 2.941629\n", "Epoch 2225, Loss 2.941582\n", "Epoch 2226, Loss 2.941534\n", "Epoch 2227, Loss 2.941488\n", "Epoch 2228, Loss 2.941440\n", "Epoch 2229, Loss 2.941393\n", "Epoch 2230, Loss 2.941346\n", "Epoch 2231, Loss 2.941299\n", "Epoch 2232, Loss 2.941252\n", "Epoch 2233, Loss 2.941206\n", "Epoch 2234, Loss 2.941163\n", "Epoch 2235, Loss 2.941115\n", "Epoch 2236, Loss 2.941070\n", "Epoch 2237, Loss 2.941025\n", "Epoch 2238, Loss 2.940979\n", "Epoch 2239, Loss 2.940933\n", "Epoch 2240, Loss 2.940890\n", "Epoch 2241, Loss 2.940844\n", "Epoch 2242, Loss 2.940798\n", "Epoch 2243, Loss 2.940753\n", "Epoch 2244, Loss 2.940711\n", "Epoch 2245, Loss 2.940666\n", "Epoch 2246, Loss 2.940621\n", "Epoch 2247, Loss 2.940576\n", "Epoch 2248, Loss 2.940533\n", "Epoch 2249, Loss 2.940489\n", "Epoch 2250, Loss 2.940446\n", "Epoch 2251, Loss 2.940403\n", "Epoch 2252, Loss 2.940358\n", "Epoch 2253, Loss 2.940316\n", "Epoch 2254, Loss 2.940274\n", "Epoch 2255, Loss 2.940229\n", "Epoch 2256, Loss 2.940187\n", "Epoch 2257, Loss 2.940144\n", "Epoch 2258, Loss 2.940102\n", "Epoch 2259, Loss 2.940060\n", "Epoch 2260, Loss 2.940018\n", "Epoch 2261, Loss 2.939977\n", "Epoch 2262, Loss 2.939934\n", "Epoch 2263, Loss 2.939892\n", "Epoch 2264, Loss 2.939851\n", "Epoch 2265, Loss 2.939809\n", "Epoch 2266, Loss 2.939769\n", "Epoch 2267, Loss 2.939727\n", "Epoch 2268, Loss 2.939686\n", "Epoch 2269, Loss 2.939646\n", "Epoch 2270, Loss 2.939605\n", "Epoch 2271, Loss 2.939565\n", "Epoch 2272, Loss 2.939522\n", "Epoch 2273, Loss 2.939483\n", "Epoch 2274, Loss 2.939443\n", "Epoch 2275, Loss 2.939403\n", "Epoch 2276, Loss 2.939361\n", "Epoch 2277, Loss 2.939323\n", "Epoch 2278, Loss 2.939282\n", "Epoch 2279, Loss 2.939243\n", "Epoch 2280, Loss 2.939205\n", "Epoch 2281, Loss 2.939165\n", "Epoch 2282, Loss 2.939127\n", "Epoch 2283, Loss 2.939087\n", "Epoch 2284, Loss 2.939049\n", "Epoch 2285, Loss 2.939011\n", "Epoch 2286, Loss 2.938971\n", "Epoch 2287, Loss 2.938933\n", "Epoch 2288, Loss 2.938893\n", "Epoch 2289, Loss 2.938857\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2290, Loss 2.938820\n", "Epoch 2291, Loss 2.938779\n", "Epoch 2292, Loss 2.938743\n", "Epoch 2293, Loss 2.938705\n", "Epoch 2294, Loss 2.938667\n", "Epoch 2295, Loss 2.938629\n", "Epoch 2296, Loss 2.938593\n", "Epoch 2297, Loss 2.938555\n", "Epoch 2298, Loss 2.938519\n", "Epoch 2299, Loss 2.938481\n", "Epoch 2300, Loss 2.938444\n", "Epoch 2301, Loss 2.938408\n", "Epoch 2302, Loss 2.938371\n", "Epoch 2303, Loss 2.938335\n", "Epoch 2304, Loss 2.938299\n", "Epoch 2305, Loss 2.938262\n", "Epoch 2306, Loss 2.938227\n", "Epoch 2307, Loss 2.938191\n", "Epoch 2308, Loss 2.938155\n", "Epoch 2309, Loss 2.938118\n", "Epoch 2310, Loss 2.938084\n", "Epoch 2311, Loss 2.938049\n", "Epoch 2312, Loss 2.938014\n", "Epoch 2313, Loss 2.937977\n", "Epoch 2314, Loss 2.937943\n", "Epoch 2315, Loss 2.937908\n", "Epoch 2316, Loss 2.937872\n", "Epoch 2317, Loss 2.937839\n", "Epoch 2318, Loss 2.937804\n", "Epoch 2319, Loss 2.937768\n", "Epoch 2320, Loss 2.937734\n", "Epoch 2321, Loss 2.937700\n", "Epoch 2322, Loss 2.937665\n", "Epoch 2323, Loss 2.937632\n", "Epoch 2324, Loss 2.937598\n", "Epoch 2325, Loss 2.937565\n", "Epoch 2326, Loss 2.937531\n", "Epoch 2327, Loss 2.937499\n", "Epoch 2328, Loss 2.937464\n", "Epoch 2329, Loss 2.937430\n", "Epoch 2330, Loss 2.937398\n", "Epoch 2331, Loss 2.937364\n", "Epoch 2332, Loss 2.937332\n", "Epoch 2333, Loss 2.937299\n", "Epoch 2334, Loss 2.937265\n", "Epoch 2335, Loss 2.937232\n", "Epoch 2336, Loss 2.937201\n", "Epoch 2337, Loss 2.937168\n", "Epoch 2338, Loss 2.937134\n", "Epoch 2339, Loss 2.937104\n", "Epoch 2340, Loss 2.937071\n", "Epoch 2341, Loss 2.937039\n", "Epoch 2342, Loss 2.937008\n", "Epoch 2343, Loss 2.936976\n", "Epoch 2344, Loss 2.936945\n", "Epoch 2345, Loss 2.936912\n", "Epoch 2346, Loss 2.936882\n", "Epoch 2347, Loss 2.936851\n", "Epoch 2348, Loss 2.936819\n", "Epoch 2349, Loss 2.936788\n", "Epoch 2350, Loss 2.936757\n", "Epoch 2351, Loss 2.936725\n", "Epoch 2352, Loss 2.936694\n", "Epoch 2353, Loss 2.936665\n", "Epoch 2354, Loss 2.936633\n", "Epoch 2355, Loss 2.936602\n", "Epoch 2356, Loss 2.936572\n", "Epoch 2357, Loss 2.936542\n", "Epoch 2358, Loss 2.936511\n", "Epoch 2359, Loss 2.936482\n", "Epoch 2360, Loss 2.936451\n", "Epoch 2361, Loss 2.936421\n", "Epoch 2362, Loss 2.936392\n", "Epoch 2363, Loss 2.936362\n", "Epoch 2364, Loss 2.936332\n", "Epoch 2365, Loss 2.936304\n", "Epoch 2366, Loss 2.936274\n", "Epoch 2367, Loss 2.936244\n", "Epoch 2368, Loss 2.936215\n", "Epoch 2369, Loss 2.936188\n", "Epoch 2370, Loss 2.936156\n", "Epoch 2371, Loss 2.936128\n", "Epoch 2372, Loss 2.936100\n", "Epoch 2373, Loss 2.936071\n", "Epoch 2374, Loss 2.936043\n", "Epoch 2375, Loss 2.936014\n", "Epoch 2376, Loss 2.935986\n", "Epoch 2377, Loss 2.935957\n", "Epoch 2378, Loss 2.935928\n", "Epoch 2379, Loss 2.935901\n", "Epoch 2380, Loss 2.935873\n", "Epoch 2381, Loss 2.935845\n", "Epoch 2382, Loss 2.935817\n", "Epoch 2383, Loss 2.935789\n", "Epoch 2384, Loss 2.935761\n", "Epoch 2385, Loss 2.935734\n", "Epoch 2386, Loss 2.935707\n", "Epoch 2387, Loss 2.935679\n", "Epoch 2388, Loss 2.935650\n", "Epoch 2389, Loss 2.935626\n", "Epoch 2390, Loss 2.935596\n", "Epoch 2391, Loss 2.935571\n", "Epoch 2392, Loss 2.935544\n", "Epoch 2393, Loss 2.935516\n", "Epoch 2394, Loss 2.935489\n", "Epoch 2395, Loss 2.935464\n", "Epoch 2396, Loss 2.935436\n", "Epoch 2397, Loss 2.935412\n", "Epoch 2398, Loss 2.935385\n", "Epoch 2399, Loss 2.935357\n", "Epoch 2400, Loss 2.935332\n", "Epoch 2401, Loss 2.935304\n", "Epoch 2402, Loss 2.935281\n", "Epoch 2403, Loss 2.935252\n", "Epoch 2404, Loss 2.935229\n", "Epoch 2405, Loss 2.935203\n", "Epoch 2406, Loss 2.935177\n", "Epoch 2407, Loss 2.935152\n", "Epoch 2408, Loss 2.935126\n", "Epoch 2409, Loss 2.935099\n", "Epoch 2410, Loss 2.935075\n", "Epoch 2411, Loss 2.935049\n", "Epoch 2412, Loss 2.935024\n", "Epoch 2413, Loss 2.935001\n", "Epoch 2414, Loss 2.934973\n", "Epoch 2415, Loss 2.934949\n", "Epoch 2416, Loss 2.934925\n", "Epoch 2417, Loss 2.934899\n", "Epoch 2418, Loss 2.934876\n", "Epoch 2419, Loss 2.934852\n", "Epoch 2420, Loss 2.934827\n", "Epoch 2421, Loss 2.934802\n", "Epoch 2422, Loss 2.934777\n", "Epoch 2423, Loss 2.934753\n", "Epoch 2424, Loss 2.934730\n", "Epoch 2425, Loss 2.934705\n", "Epoch 2426, Loss 2.934681\n", "Epoch 2427, Loss 2.934658\n", "Epoch 2428, Loss 2.934635\n", "Epoch 2429, Loss 2.934609\n", "Epoch 2430, Loss 2.934585\n", "Epoch 2431, Loss 2.934564\n", "Epoch 2432, Loss 2.934541\n", "Epoch 2433, Loss 2.934516\n", "Epoch 2434, Loss 2.934493\n", "Epoch 2435, Loss 2.934469\n", "Epoch 2436, Loss 2.934446\n", "Epoch 2437, Loss 2.934423\n", "Epoch 2438, Loss 2.934400\n", "Epoch 2439, Loss 2.934377\n", "Epoch 2440, Loss 2.934355\n", "Epoch 2441, Loss 2.934331\n", "Epoch 2442, Loss 2.934309\n", "Epoch 2443, Loss 2.934287\n", "Epoch 2444, Loss 2.934264\n", "Epoch 2445, Loss 2.934242\n", "Epoch 2446, Loss 2.934219\n", "Epoch 2447, Loss 2.934198\n", "Epoch 2448, Loss 2.934175\n", "Epoch 2449, Loss 2.934151\n", "Epoch 2450, Loss 2.934129\n", "Epoch 2451, Loss 2.934108\n", "Epoch 2452, Loss 2.934084\n", "Epoch 2453, Loss 2.934064\n", "Epoch 2454, Loss 2.934043\n", "Epoch 2455, Loss 2.934020\n", "Epoch 2456, Loss 2.934000\n", "Epoch 2457, Loss 2.933978\n", "Epoch 2458, Loss 2.933956\n", "Epoch 2459, Loss 2.933935\n", "Epoch 2460, Loss 2.933913\n", "Epoch 2461, Loss 2.933893\n", "Epoch 2462, Loss 2.933871\n", "Epoch 2463, Loss 2.933849\n", "Epoch 2464, Loss 2.933828\n", "Epoch 2465, Loss 2.933807\n", "Epoch 2466, Loss 2.933787\n", "Epoch 2467, Loss 2.933766\n", "Epoch 2468, Loss 2.933745\n", "Epoch 2469, Loss 2.933723\n", "Epoch 2470, Loss 2.933704\n", "Epoch 2471, Loss 2.933682\n", "Epoch 2472, Loss 2.933662\n", "Epoch 2473, Loss 2.933643\n", "Epoch 2474, Loss 2.933622\n", "Epoch 2475, Loss 2.933602\n", "Epoch 2476, Loss 2.933583\n", "Epoch 2477, Loss 2.933561\n", "Epoch 2478, Loss 2.933541\n", "Epoch 2479, Loss 2.933521\n", "Epoch 2480, Loss 2.933501\n", "Epoch 2481, Loss 2.933480\n", "Epoch 2482, Loss 2.933463\n", "Epoch 2483, Loss 2.933442\n", "Epoch 2484, Loss 2.933423\n", "Epoch 2485, Loss 2.933403\n", "Epoch 2486, Loss 2.933382\n", "Epoch 2487, Loss 2.933365\n", "Epoch 2488, Loss 2.933345\n", "Epoch 2489, Loss 2.933325\n", "Epoch 2490, Loss 2.933306\n", "Epoch 2491, Loss 2.933287\n", "Epoch 2492, Loss 2.933266\n", "Epoch 2493, Loss 2.933249\n", "Epoch 2494, Loss 2.933228\n", "Epoch 2495, Loss 2.933209\n", "Epoch 2496, Loss 2.933190\n", "Epoch 2497, Loss 2.933172\n", "Epoch 2498, Loss 2.933154\n", "Epoch 2499, Loss 2.933134\n", "Epoch 2500, Loss 2.933116\n", "Epoch 2501, Loss 2.933097\n", "Epoch 2502, Loss 2.933079\n", "Epoch 2503, Loss 2.933060\n", "Epoch 2504, Loss 2.933043\n", "Epoch 2505, Loss 2.933025\n", "Epoch 2506, Loss 2.933007\n", "Epoch 2507, Loss 2.932988\n", "Epoch 2508, Loss 2.932970\n", "Epoch 2509, Loss 2.932953\n", "Epoch 2510, Loss 2.932932\n", "Epoch 2511, Loss 2.932915\n", "Epoch 2512, Loss 2.932898\n", "Epoch 2513, Loss 2.932880\n", "Epoch 2514, Loss 2.932862\n", "Epoch 2515, Loss 2.932846\n", "Epoch 2516, Loss 2.932826\n", "Epoch 2517, Loss 2.932810\n", "Epoch 2518, Loss 2.932791\n", "Epoch 2519, Loss 2.932774\n", "Epoch 2520, Loss 2.932758\n", "Epoch 2521, Loss 2.932739\n", "Epoch 2522, Loss 2.932723\n", "Epoch 2523, Loss 2.932706\n", "Epoch 2524, Loss 2.932689\n", "Epoch 2525, Loss 2.932671\n", "Epoch 2526, Loss 2.932654\n", "Epoch 2527, Loss 2.932637\n", "Epoch 2528, Loss 2.932619\n", "Epoch 2529, Loss 2.932603\n", "Epoch 2530, Loss 2.932585\n", "Epoch 2531, Loss 2.932569\n", "Epoch 2532, Loss 2.932553\n", "Epoch 2533, Loss 2.932535\n", "Epoch 2534, Loss 2.932520\n", "Epoch 2535, Loss 2.932502\n", "Epoch 2536, Loss 2.932487\n", "Epoch 2537, Loss 2.932469\n", "Epoch 2538, Loss 2.932455\n", "Epoch 2539, Loss 2.932438\n", "Epoch 2540, Loss 2.932421\n", "Epoch 2541, Loss 2.932404\n", "Epoch 2542, Loss 2.932387\n", "Epoch 2543, Loss 2.932370\n", "Epoch 2544, Loss 2.932358\n", "Epoch 2545, Loss 2.932340\n", "Epoch 2546, Loss 2.932324\n", "Epoch 2547, Loss 2.932310\n", "Epoch 2548, Loss 2.932293\n", "Epoch 2549, Loss 2.932278\n", "Epoch 2550, Loss 2.932261\n", "Epoch 2551, Loss 2.932246\n", "Epoch 2552, Loss 2.932229\n", "Epoch 2553, Loss 2.932215\n", "Epoch 2554, Loss 2.932198\n", "Epoch 2555, Loss 2.932184\n", "Epoch 2556, Loss 2.932168\n", "Epoch 2557, Loss 2.932153\n", "Epoch 2558, Loss 2.932137\n", "Epoch 2559, Loss 2.932122\n", "Epoch 2560, Loss 2.932107\n", "Epoch 2561, Loss 2.932092\n", "Epoch 2562, Loss 2.932076\n", "Epoch 2563, Loss 2.932061\n", "Epoch 2564, Loss 2.932047\n", "Epoch 2565, Loss 2.932031\n", "Epoch 2566, Loss 2.932017\n", "Epoch 2567, Loss 2.932002\n", "Epoch 2568, Loss 2.931986\n", "Epoch 2569, Loss 2.931972\n", "Epoch 2570, Loss 2.931957\n", "Epoch 2571, Loss 2.931941\n", "Epoch 2572, Loss 2.931929\n", "Epoch 2573, Loss 2.931914\n", "Epoch 2574, Loss 2.931900\n", "Epoch 2575, Loss 2.931885\n", "Epoch 2576, Loss 2.931870\n", "Epoch 2577, Loss 2.931855\n", "Epoch 2578, Loss 2.931843\n", "Epoch 2579, Loss 2.931828\n", "Epoch 2580, Loss 2.931813\n", "Epoch 2581, Loss 2.931799\n", "Epoch 2582, Loss 2.931786\n", "Epoch 2583, Loss 2.931771\n", "Epoch 2584, Loss 2.931759\n", "Epoch 2585, Loss 2.931742\n", "Epoch 2586, Loss 2.931729\n", "Epoch 2587, Loss 2.931717\n", "Epoch 2588, Loss 2.931701\n", "Epoch 2589, Loss 2.931687\n", "Epoch 2590, Loss 2.931674\n", "Epoch 2591, Loss 2.931660\n", "Epoch 2592, Loss 2.931647\n", "Epoch 2593, Loss 2.931632\n", "Epoch 2594, Loss 2.931619\n", "Epoch 2595, Loss 2.931606\n", "Epoch 2596, Loss 2.931594\n", "Epoch 2597, Loss 2.931580\n", "Epoch 2598, Loss 2.931566\n", "Epoch 2599, Loss 2.931554\n", "Epoch 2600, Loss 2.931539\n", "Epoch 2601, Loss 2.931526\n", "Epoch 2602, Loss 2.931512\n", "Epoch 2603, Loss 2.931500\n", "Epoch 2604, Loss 2.931488\n", "Epoch 2605, Loss 2.931474\n", "Epoch 2606, Loss 2.931462\n", "Epoch 2607, Loss 2.931448\n", "Epoch 2608, Loss 2.931436\n", "Epoch 2609, Loss 2.931422\n", "Epoch 2610, Loss 2.931411\n", "Epoch 2611, Loss 2.931398\n", "Epoch 2612, Loss 2.931384\n", "Epoch 2613, Loss 2.931370\n", "Epoch 2614, Loss 2.931358\n", "Epoch 2615, Loss 2.931346\n", "Epoch 2616, Loss 2.931334\n", "Epoch 2617, Loss 2.931322\n", "Epoch 2618, Loss 2.931309\n", "Epoch 2619, Loss 2.931296\n", "Epoch 2620, Loss 2.931282\n", "Epoch 2621, Loss 2.931272\n", "Epoch 2622, Loss 2.931258\n", "Epoch 2623, Loss 2.931245\n", "Epoch 2624, Loss 2.931235\n", "Epoch 2625, Loss 2.931222\n", "Epoch 2626, Loss 2.931211\n", "Epoch 2627, Loss 2.931196\n", "Epoch 2628, Loss 2.931185\n", "Epoch 2629, Loss 2.931173\n", "Epoch 2630, Loss 2.931162\n", "Epoch 2631, Loss 2.931149\n", "Epoch 2632, Loss 2.931139\n", "Epoch 2633, Loss 2.931126\n", "Epoch 2634, Loss 2.931114\n", "Epoch 2635, Loss 2.931101\n", "Epoch 2636, Loss 2.931090\n", "Epoch 2637, Loss 2.931079\n", "Epoch 2638, Loss 2.931067\n", "Epoch 2639, Loss 2.931054\n", "Epoch 2640, Loss 2.931044\n", "Epoch 2641, Loss 2.931034\n", "Epoch 2642, Loss 2.931021\n", "Epoch 2643, Loss 2.931010\n", "Epoch 2644, Loss 2.930999\n", "Epoch 2645, Loss 2.930987\n", "Epoch 2646, Loss 2.930976\n", "Epoch 2647, Loss 2.930964\n", "Epoch 2648, Loss 2.930953\n", "Epoch 2649, Loss 2.930941\n", "Epoch 2650, Loss 2.930932\n", "Epoch 2651, Loss 2.930920\n", "Epoch 2652, Loss 2.930908\n", "Epoch 2653, Loss 2.930899\n", "Epoch 2654, Loss 2.930885\n", "Epoch 2655, Loss 2.930876\n", "Epoch 2656, Loss 2.930864\n", "Epoch 2657, Loss 2.930854\n", "Epoch 2658, Loss 2.930841\n", "Epoch 2659, Loss 2.930833\n", "Epoch 2660, Loss 2.930821\n", "Epoch 2661, Loss 2.930811\n", "Epoch 2662, Loss 2.930801\n", "Epoch 2663, Loss 2.930788\n", "Epoch 2664, Loss 2.930778\n", "Epoch 2665, Loss 2.930766\n", "Epoch 2666, Loss 2.930757\n", "Epoch 2667, Loss 2.930746\n", "Epoch 2668, Loss 2.930735\n", "Epoch 2669, Loss 2.930724\n", "Epoch 2670, Loss 2.930715\n", "Epoch 2671, Loss 2.930704\n", "Epoch 2672, Loss 2.930694\n", "Epoch 2673, Loss 2.930685\n", "Epoch 2674, Loss 2.930674\n", "Epoch 2675, Loss 2.930663\n", "Epoch 2676, Loss 2.930654\n", "Epoch 2677, Loss 2.930644\n", "Epoch 2678, Loss 2.930631\n", "Epoch 2679, Loss 2.930622\n", "Epoch 2680, Loss 2.930614\n", "Epoch 2681, Loss 2.930603\n", "Epoch 2682, Loss 2.930592\n", "Epoch 2683, Loss 2.930582\n", "Epoch 2684, Loss 2.930572\n", "Epoch 2685, Loss 2.930562\n", "Epoch 2686, Loss 2.930552\n", "Epoch 2687, Loss 2.930543\n", "Epoch 2688, Loss 2.930534\n", "Epoch 2689, Loss 2.930524\n", "Epoch 2690, Loss 2.930514\n", "Epoch 2691, Loss 2.930502\n", "Epoch 2692, Loss 2.930493\n", "Epoch 2693, Loss 2.930482\n", "Epoch 2694, Loss 2.930474\n", "Epoch 2695, Loss 2.930465\n", "Epoch 2696, Loss 2.930454\n", "Epoch 2697, Loss 2.930446\n", "Epoch 2698, Loss 2.930436\n", "Epoch 2699, Loss 2.930426\n", "Epoch 2700, Loss 2.930417\n", "Epoch 2701, Loss 2.930408\n", "Epoch 2702, Loss 2.930398\n", "Epoch 2703, Loss 2.930388\n", "Epoch 2704, Loss 2.930380\n", "Epoch 2705, Loss 2.930370\n", "Epoch 2706, Loss 2.930360\n", "Epoch 2707, Loss 2.930352\n", "Epoch 2708, Loss 2.930342\n", "Epoch 2709, Loss 2.930334\n", "Epoch 2710, Loss 2.930325\n", "Epoch 2711, Loss 2.930315\n", "Epoch 2712, Loss 2.930306\n", "Epoch 2713, Loss 2.930298\n", "Epoch 2714, Loss 2.930288\n", "Epoch 2715, Loss 2.930279\n", "Epoch 2716, Loss 2.930270\n", "Epoch 2717, Loss 2.930262\n", "Epoch 2718, Loss 2.930254\n", "Epoch 2719, Loss 2.930244\n", "Epoch 2720, Loss 2.930235\n", "Epoch 2721, Loss 2.930226\n", "Epoch 2722, Loss 2.930218\n", "Epoch 2723, Loss 2.930209\n", "Epoch 2724, Loss 2.930201\n", "Epoch 2725, Loss 2.930190\n", "Epoch 2726, Loss 2.930182\n", "Epoch 2727, Loss 2.930173\n", "Epoch 2728, Loss 2.930167\n", "Epoch 2729, Loss 2.930156\n", "Epoch 2730, Loss 2.930149\n", "Epoch 2731, Loss 2.930139\n", "Epoch 2732, Loss 2.930131\n", "Epoch 2733, Loss 2.930123\n", "Epoch 2734, Loss 2.930113\n", "Epoch 2735, Loss 2.930107\n", "Epoch 2736, Loss 2.930099\n", "Epoch 2737, Loss 2.930090\n", "Epoch 2738, Loss 2.930081\n", "Epoch 2739, Loss 2.930073\n", "Epoch 2740, Loss 2.930064\n", "Epoch 2741, Loss 2.930056\n", "Epoch 2742, Loss 2.930048\n", "Epoch 2743, Loss 2.930041\n", "Epoch 2744, Loss 2.930032\n", "Epoch 2745, Loss 2.930022\n", "Epoch 2746, Loss 2.930016\n", "Epoch 2747, Loss 2.930008\n", "Epoch 2748, Loss 2.930000\n", "Epoch 2749, Loss 2.929992\n", "Epoch 2750, Loss 2.929983\n", "Epoch 2751, Loss 2.929975\n", "Epoch 2752, Loss 2.929968\n", "Epoch 2753, Loss 2.929960\n", "Epoch 2754, Loss 2.929953\n", "Epoch 2755, Loss 2.929945\n", "Epoch 2756, Loss 2.929937\n", "Epoch 2757, Loss 2.929929\n", "Epoch 2758, Loss 2.929921\n", "Epoch 2759, Loss 2.929914\n", "Epoch 2760, Loss 2.929905\n", "Epoch 2761, Loss 2.929896\n", "Epoch 2762, Loss 2.929891\n", "Epoch 2763, Loss 2.929882\n", "Epoch 2764, Loss 2.929874\n", "Epoch 2765, Loss 2.929869\n", "Epoch 2766, Loss 2.929859\n", "Epoch 2767, Loss 2.929852\n", "Epoch 2768, Loss 2.929845\n", "Epoch 2769, Loss 2.929838\n", "Epoch 2770, Loss 2.929830\n", "Epoch 2771, Loss 2.929822\n", "Epoch 2772, Loss 2.929816\n", "Epoch 2773, Loss 2.929806\n", "Epoch 2774, Loss 2.929799\n", "Epoch 2775, Loss 2.929793\n", "Epoch 2776, Loss 2.929786\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2777, Loss 2.929778\n", "Epoch 2778, Loss 2.929771\n", "Epoch 2779, Loss 2.929765\n", "Epoch 2780, Loss 2.929757\n", "Epoch 2781, Loss 2.929750\n", "Epoch 2782, Loss 2.929743\n", "Epoch 2783, Loss 2.929735\n", "Epoch 2784, Loss 2.929729\n", "Epoch 2785, Loss 2.929722\n", "Epoch 2786, Loss 2.929714\n", "Epoch 2787, Loss 2.929707\n", "Epoch 2788, Loss 2.929701\n", "Epoch 2789, Loss 2.929692\n", "Epoch 2790, Loss 2.929685\n", "Epoch 2791, Loss 2.929680\n", "Epoch 2792, Loss 2.929672\n", "Epoch 2793, Loss 2.929666\n", "Epoch 2794, Loss 2.929658\n", "Epoch 2795, Loss 2.929652\n", "Epoch 2796, Loss 2.929646\n", "Epoch 2797, Loss 2.929638\n", "Epoch 2798, Loss 2.929632\n", "Epoch 2799, Loss 2.929626\n", "Epoch 2800, Loss 2.929620\n", "Epoch 2801, Loss 2.929611\n", "Epoch 2802, Loss 2.929605\n", "Epoch 2803, Loss 2.929600\n", "Epoch 2804, Loss 2.929593\n", "Epoch 2805, Loss 2.929586\n", "Epoch 2806, Loss 2.929579\n", "Epoch 2807, Loss 2.929572\n", "Epoch 2808, Loss 2.929566\n", "Epoch 2809, Loss 2.929559\n", "Epoch 2810, Loss 2.929552\n", "Epoch 2811, Loss 2.929545\n", "Epoch 2812, Loss 2.929540\n", "Epoch 2813, Loss 2.929533\n", "Epoch 2814, Loss 2.929527\n", "Epoch 2815, Loss 2.929520\n", "Epoch 2816, Loss 2.929513\n", "Epoch 2817, Loss 2.929507\n", "Epoch 2818, Loss 2.929501\n", "Epoch 2819, Loss 2.929496\n", "Epoch 2820, Loss 2.929489\n", "Epoch 2821, Loss 2.929482\n", "Epoch 2822, Loss 2.929476\n", "Epoch 2823, Loss 2.929471\n", "Epoch 2824, Loss 2.929463\n", "Epoch 2825, Loss 2.929457\n", "Epoch 2826, Loss 2.929452\n", "Epoch 2827, Loss 2.929445\n", "Epoch 2828, Loss 2.929439\n", "Epoch 2829, Loss 2.929433\n", "Epoch 2830, Loss 2.929427\n", "Epoch 2831, Loss 2.929421\n", "Epoch 2832, Loss 2.929415\n", "Epoch 2833, Loss 2.929409\n", "Epoch 2834, Loss 2.929404\n", "Epoch 2835, Loss 2.929396\n", "Epoch 2836, Loss 2.929391\n", "Epoch 2837, Loss 2.929383\n", "Epoch 2838, Loss 2.929380\n", "Epoch 2839, Loss 2.929373\n", "Epoch 2840, Loss 2.929368\n", "Epoch 2841, Loss 2.929362\n", "Epoch 2842, Loss 2.929356\n", "Epoch 2843, Loss 2.929351\n", "Epoch 2844, Loss 2.929344\n", "Epoch 2845, Loss 2.929338\n", "Epoch 2846, Loss 2.929332\n", "Epoch 2847, Loss 2.929328\n", "Epoch 2848, Loss 2.929321\n", "Epoch 2849, Loss 2.929316\n", "Epoch 2850, Loss 2.929309\n", "Epoch 2851, Loss 2.929304\n", "Epoch 2852, Loss 2.929300\n", "Epoch 2853, Loss 2.929293\n", "Epoch 2854, Loss 2.929288\n", "Epoch 2855, Loss 2.929282\n", "Epoch 2856, Loss 2.929277\n", "Epoch 2857, Loss 2.929271\n", "Epoch 2858, Loss 2.929266\n", "Epoch 2859, Loss 2.929260\n", "Epoch 2860, Loss 2.929255\n", "Epoch 2861, Loss 2.929250\n", "Epoch 2862, Loss 2.929244\n", "Epoch 2863, Loss 2.929237\n", "Epoch 2864, Loss 2.929234\n", "Epoch 2865, Loss 2.929227\n", "Epoch 2866, Loss 2.929222\n", "Epoch 2867, Loss 2.929216\n", "Epoch 2868, Loss 2.929212\n", "Epoch 2869, Loss 2.929207\n", "Epoch 2870, Loss 2.929202\n", "Epoch 2871, Loss 2.929195\n", "Epoch 2872, Loss 2.929191\n", "Epoch 2873, Loss 2.929184\n", "Epoch 2874, Loss 2.929180\n", "Epoch 2875, Loss 2.929175\n", "Epoch 2876, Loss 2.929170\n", "Epoch 2877, Loss 2.929166\n", "Epoch 2878, Loss 2.929160\n", "Epoch 2879, Loss 2.929155\n", "Epoch 2880, Loss 2.929150\n", "Epoch 2881, Loss 2.929144\n", "Epoch 2882, Loss 2.929138\n", "Epoch 2883, Loss 2.929133\n", "Epoch 2884, Loss 2.929127\n", "Epoch 2885, Loss 2.929122\n", "Epoch 2886, Loss 2.929118\n", "Epoch 2887, Loss 2.929113\n", "Epoch 2888, Loss 2.929108\n", "Epoch 2889, Loss 2.929103\n", "Epoch 2890, Loss 2.929099\n", "Epoch 2891, Loss 2.929093\n", "Epoch 2892, Loss 2.929090\n", "Epoch 2893, Loss 2.929084\n", "Epoch 2894, Loss 2.929079\n", "Epoch 2895, Loss 2.929075\n", "Epoch 2896, Loss 2.929069\n", "Epoch 2897, Loss 2.929065\n", "Epoch 2898, Loss 2.929059\n", "Epoch 2899, Loss 2.929054\n", "Epoch 2900, Loss 2.929050\n", "Epoch 2901, Loss 2.929044\n", "Epoch 2902, Loss 2.929040\n", "Epoch 2903, Loss 2.929036\n", "Epoch 2904, Loss 2.929031\n", "Epoch 2905, Loss 2.929025\n", "Epoch 2906, Loss 2.929021\n", "Epoch 2907, Loss 2.929017\n", "Epoch 2908, Loss 2.929012\n", "Epoch 2909, Loss 2.929006\n", "Epoch 2910, Loss 2.929002\n", "Epoch 2911, Loss 2.928999\n", "Epoch 2912, Loss 2.928994\n", "Epoch 2913, Loss 2.928988\n", "Epoch 2914, Loss 2.928984\n", "Epoch 2915, Loss 2.928980\n", "Epoch 2916, Loss 2.928976\n", "Epoch 2917, Loss 2.928971\n", "Epoch 2918, Loss 2.928967\n", "Epoch 2919, Loss 2.928962\n", "Epoch 2920, Loss 2.928958\n", "Epoch 2921, Loss 2.928953\n", "Epoch 2922, Loss 2.928947\n", "Epoch 2923, Loss 2.928946\n", "Epoch 2924, Loss 2.928941\n", "Epoch 2925, Loss 2.928935\n", "Epoch 2926, Loss 2.928932\n", "Epoch 2927, Loss 2.928926\n", "Epoch 2928, Loss 2.928923\n", "Epoch 2929, Loss 2.928919\n", "Epoch 2930, Loss 2.928915\n", "Epoch 2931, Loss 2.928909\n", "Epoch 2932, Loss 2.928904\n", "Epoch 2933, Loss 2.928902\n", "Epoch 2934, Loss 2.928897\n", "Epoch 2935, Loss 2.928893\n", "Epoch 2936, Loss 2.928887\n", "Epoch 2937, Loss 2.928883\n", "Epoch 2938, Loss 2.928880\n", "Epoch 2939, Loss 2.928877\n", "Epoch 2940, Loss 2.928871\n", "Epoch 2941, Loss 2.928867\n", "Epoch 2942, Loss 2.928864\n", "Epoch 2943, Loss 2.928860\n", "Epoch 2944, Loss 2.928855\n", "Epoch 2945, Loss 2.928850\n", "Epoch 2946, Loss 2.928845\n", "Epoch 2947, Loss 2.928843\n", "Epoch 2948, Loss 2.928838\n", "Epoch 2949, Loss 2.928833\n", "Epoch 2950, Loss 2.928830\n", "Epoch 2951, Loss 2.928826\n", "Epoch 2952, Loss 2.928822\n", "Epoch 2953, Loss 2.928818\n", "Epoch 2954, Loss 2.928815\n", "Epoch 2955, Loss 2.928811\n", "Epoch 2956, Loss 2.928805\n", "Epoch 2957, Loss 2.928801\n", "Epoch 2958, Loss 2.928799\n", "Epoch 2959, Loss 2.928795\n", "Epoch 2960, Loss 2.928789\n", "Epoch 2961, Loss 2.928789\n", "Epoch 2962, Loss 2.928783\n", "Epoch 2963, Loss 2.928779\n", "Epoch 2964, Loss 2.928775\n", "Epoch 2965, Loss 2.928771\n", "Epoch 2966, Loss 2.928767\n", "Epoch 2967, Loss 2.928765\n", "Epoch 2968, Loss 2.928761\n", "Epoch 2969, Loss 2.928758\n", "Epoch 2970, Loss 2.928752\n", "Epoch 2971, Loss 2.928750\n", "Epoch 2972, Loss 2.928745\n", "Epoch 2973, Loss 2.928741\n", "Epoch 2974, Loss 2.928737\n", "Epoch 2975, Loss 2.928735\n", "Epoch 2976, Loss 2.928730\n", "Epoch 2977, Loss 2.928727\n", "Epoch 2978, Loss 2.928723\n", "Epoch 2979, Loss 2.928719\n", "Epoch 2980, Loss 2.928716\n", "Epoch 2981, Loss 2.928712\n", "Epoch 2982, Loss 2.928708\n", "Epoch 2983, Loss 2.928705\n", "Epoch 2984, Loss 2.928700\n", "Epoch 2985, Loss 2.928698\n", "Epoch 2986, Loss 2.928695\n", "Epoch 2987, Loss 2.928690\n", "Epoch 2988, Loss 2.928687\n", "Epoch 2989, Loss 2.928684\n", "Epoch 2990, Loss 2.928679\n", "Epoch 2991, Loss 2.928677\n", "Epoch 2992, Loss 2.928673\n", "Epoch 2993, Loss 2.928669\n", "Epoch 2994, Loss 2.928666\n", "Epoch 2995, Loss 2.928662\n", "Epoch 2996, Loss 2.928660\n", "Epoch 2997, Loss 2.928656\n", "Epoch 2998, Loss 2.928651\n", "Epoch 2999, Loss 2.928648\n", "Epoch 3000, Loss 2.928646\n", "Epoch 3001, Loss 2.928643\n", "Epoch 3002, Loss 2.928638\n", "Epoch 3003, Loss 2.928635\n", "Epoch 3004, Loss 2.928632\n", "Epoch 3005, Loss 2.928629\n", "Epoch 3006, Loss 2.928625\n", "Epoch 3007, Loss 2.928621\n", "Epoch 3008, Loss 2.928617\n", "Epoch 3009, Loss 2.928616\n", "Epoch 3010, Loss 2.928612\n", "Epoch 3011, Loss 2.928608\n", "Epoch 3012, Loss 2.928604\n", "Epoch 3013, Loss 2.928601\n", "Epoch 3014, Loss 2.928599\n", "Epoch 3015, Loss 2.928595\n", "Epoch 3016, Loss 2.928592\n", "Epoch 3017, Loss 2.928588\n", "Epoch 3018, Loss 2.928586\n", "Epoch 3019, Loss 2.928583\n", "Epoch 3020, Loss 2.928580\n", "Epoch 3021, Loss 2.928576\n", "Epoch 3022, Loss 2.928574\n", "Epoch 3023, Loss 2.928569\n", "Epoch 3024, Loss 2.928567\n", "Epoch 3025, Loss 2.928564\n", "Epoch 3026, Loss 2.928560\n", "Epoch 3027, Loss 2.928557\n", "Epoch 3028, Loss 2.928555\n", "Epoch 3029, Loss 2.928551\n", "Epoch 3030, Loss 2.928548\n", "Epoch 3031, Loss 2.928545\n", "Epoch 3032, Loss 2.928543\n", "Epoch 3033, Loss 2.928539\n", "Epoch 3034, Loss 2.928536\n", "Epoch 3035, Loss 2.928532\n", "Epoch 3036, Loss 2.928531\n", "Epoch 3037, Loss 2.928528\n", "Epoch 3038, Loss 2.928524\n", "Epoch 3039, Loss 2.928521\n", "Epoch 3040, Loss 2.928519\n", "Epoch 3041, Loss 2.928514\n", "Epoch 3042, Loss 2.928512\n", "Epoch 3043, Loss 2.928509\n", "Epoch 3044, Loss 2.928505\n", "Epoch 3045, Loss 2.928503\n", "Epoch 3046, Loss 2.928500\n", "Epoch 3047, Loss 2.928498\n", "Epoch 3048, Loss 2.928495\n", "Epoch 3049, Loss 2.928491\n", "Epoch 3050, Loss 2.928488\n", "Epoch 3051, Loss 2.928486\n", "Epoch 3052, Loss 2.928484\n", "Epoch 3053, Loss 2.928480\n", "Epoch 3054, Loss 2.928477\n", "Epoch 3055, Loss 2.928475\n", "Epoch 3056, Loss 2.928473\n", "Epoch 3057, Loss 2.928469\n", "Epoch 3058, Loss 2.928468\n", "Epoch 3059, Loss 2.928463\n", "Epoch 3060, Loss 2.928460\n", "Epoch 3061, Loss 2.928458\n", "Epoch 3062, Loss 2.928456\n", "Epoch 3063, Loss 2.928452\n", "Epoch 3064, Loss 2.928450\n", "Epoch 3065, Loss 2.928447\n", "Epoch 3066, Loss 2.928443\n", "Epoch 3067, Loss 2.928443\n", "Epoch 3068, Loss 2.928440\n", "Epoch 3069, Loss 2.928435\n", "Epoch 3070, Loss 2.928436\n", "Epoch 3071, Loss 2.928430\n", "Epoch 3072, Loss 2.928428\n", "Epoch 3073, Loss 2.928426\n", "Epoch 3074, Loss 2.928423\n", "Epoch 3075, Loss 2.928421\n", "Epoch 3076, Loss 2.928417\n", "Epoch 3077, Loss 2.928415\n", "Epoch 3078, Loss 2.928411\n", "Epoch 3079, Loss 2.928410\n", "Epoch 3080, Loss 2.928407\n", "Epoch 3081, Loss 2.928404\n", "Epoch 3082, Loss 2.928402\n", "Epoch 3083, Loss 2.928399\n", "Epoch 3084, Loss 2.928396\n", "Epoch 3085, Loss 2.928396\n", "Epoch 3086, Loss 2.928392\n", "Epoch 3087, Loss 2.928389\n", "Epoch 3088, Loss 2.928386\n", "Epoch 3089, Loss 2.928383\n", "Epoch 3090, Loss 2.928383\n", "Epoch 3091, Loss 2.928379\n", "Epoch 3092, Loss 2.928378\n", "Epoch 3093, Loss 2.928375\n", "Epoch 3094, Loss 2.928372\n", "Epoch 3095, Loss 2.928370\n", "Epoch 3096, Loss 2.928368\n", "Epoch 3097, Loss 2.928364\n", "Epoch 3098, Loss 2.928362\n", "Epoch 3099, Loss 2.928361\n", "Epoch 3100, Loss 2.928357\n", "Epoch 3101, Loss 2.928355\n", "Epoch 3102, Loss 2.928353\n", "Epoch 3103, Loss 2.928349\n", "Epoch 3104, Loss 2.928348\n", "Epoch 3105, Loss 2.928345\n", "Epoch 3106, Loss 2.928343\n", "Epoch 3107, Loss 2.928340\n", "Epoch 3108, Loss 2.928339\n", "Epoch 3109, Loss 2.928337\n", "Epoch 3110, Loss 2.928333\n", "Epoch 3111, Loss 2.928332\n", "Epoch 3112, Loss 2.928328\n", "Epoch 3113, Loss 2.928329\n", "Epoch 3114, Loss 2.928324\n", "Epoch 3115, Loss 2.928323\n", "Epoch 3116, Loss 2.928320\n", "Epoch 3117, Loss 2.928318\n", "Epoch 3118, Loss 2.928315\n", "Epoch 3119, Loss 2.928313\n", "Epoch 3120, Loss 2.928311\n", "Epoch 3121, Loss 2.928308\n", "Epoch 3122, Loss 2.928306\n", "Epoch 3123, Loss 2.928304\n", "Epoch 3124, Loss 2.928303\n", "Epoch 3125, Loss 2.928300\n", "Epoch 3126, Loss 2.928296\n", "Epoch 3127, Loss 2.928295\n", "Epoch 3128, Loss 2.928292\n", "Epoch 3129, Loss 2.928292\n", "Epoch 3130, Loss 2.928288\n", "Epoch 3131, Loss 2.928287\n", "Epoch 3132, Loss 2.928285\n", "Epoch 3133, Loss 2.928282\n", "Epoch 3134, Loss 2.928279\n", "Epoch 3135, Loss 2.928276\n", "Epoch 3136, Loss 2.928275\n", "Epoch 3137, Loss 2.928273\n", "Epoch 3138, Loss 2.928272\n", "Epoch 3139, Loss 2.928268\n", "Epoch 3140, Loss 2.928267\n", "Epoch 3141, Loss 2.928265\n", "Epoch 3142, Loss 2.928263\n", "Epoch 3143, Loss 2.928260\n", "Epoch 3144, Loss 2.928259\n", "Epoch 3145, Loss 2.928256\n", "Epoch 3146, Loss 2.928255\n", "Epoch 3147, Loss 2.928252\n", "Epoch 3148, Loss 2.928251\n", "Epoch 3149, Loss 2.928248\n", "Epoch 3150, Loss 2.928246\n", "Epoch 3151, Loss 2.928245\n", "Epoch 3152, Loss 2.928242\n", "Epoch 3153, Loss 2.928240\n", "Epoch 3154, Loss 2.928236\n", "Epoch 3155, Loss 2.928236\n", "Epoch 3156, Loss 2.928233\n", "Epoch 3157, Loss 2.928231\n", "Epoch 3158, Loss 2.928230\n", "Epoch 3159, Loss 2.928227\n", "Epoch 3160, Loss 2.928226\n", "Epoch 3161, Loss 2.928225\n", "Epoch 3162, Loss 2.928223\n", "Epoch 3163, Loss 2.928219\n", "Epoch 3164, Loss 2.928218\n", "Epoch 3165, Loss 2.928216\n", "Epoch 3166, Loss 2.928215\n", "Epoch 3167, Loss 2.928212\n", "Epoch 3168, Loss 2.928211\n", "Epoch 3169, Loss 2.928210\n", "Epoch 3170, Loss 2.928206\n", "Epoch 3171, Loss 2.928205\n", "Epoch 3172, Loss 2.928204\n", "Epoch 3173, Loss 2.928202\n", "Epoch 3174, Loss 2.928200\n", "Epoch 3175, Loss 2.928196\n", "Epoch 3176, Loss 2.928195\n", "Epoch 3177, Loss 2.928195\n", "Epoch 3178, Loss 2.928192\n", "Epoch 3179, Loss 2.928190\n", "Epoch 3180, Loss 2.928188\n", "Epoch 3181, Loss 2.928186\n", "Epoch 3182, Loss 2.928185\n", "Epoch 3183, Loss 2.928184\n", "Epoch 3184, Loss 2.928182\n", "Epoch 3185, Loss 2.928180\n", "Epoch 3186, Loss 2.928178\n", "Epoch 3187, Loss 2.928175\n", "Epoch 3188, Loss 2.928172\n", "Epoch 3189, Loss 2.928170\n", "Epoch 3190, Loss 2.928170\n", "Epoch 3191, Loss 2.928169\n", "Epoch 3192, Loss 2.928167\n", "Epoch 3193, Loss 2.928164\n", "Epoch 3194, Loss 2.928164\n", "Epoch 3195, Loss 2.928162\n", "Epoch 3196, Loss 2.928160\n", "Epoch 3197, Loss 2.928158\n", "Epoch 3198, Loss 2.928158\n", "Epoch 3199, Loss 2.928154\n", "Epoch 3200, Loss 2.928152\n", "Epoch 3201, Loss 2.928149\n", "Epoch 3202, Loss 2.928150\n", "Epoch 3203, Loss 2.928147\n", "Epoch 3204, Loss 2.928146\n", "Epoch 3205, Loss 2.928144\n", "Epoch 3206, Loss 2.928142\n", "Epoch 3207, Loss 2.928140\n", "Epoch 3208, Loss 2.928139\n", "Epoch 3209, Loss 2.928137\n", "Epoch 3210, Loss 2.928135\n", "Epoch 3211, Loss 2.928135\n", "Epoch 3212, Loss 2.928133\n", "Epoch 3213, Loss 2.928131\n", "Epoch 3214, Loss 2.928130\n", "Epoch 3215, Loss 2.928125\n", "Epoch 3216, Loss 2.928125\n", "Epoch 3217, Loss 2.928124\n", "Epoch 3218, Loss 2.928121\n", "Epoch 3219, Loss 2.928121\n", "Epoch 3220, Loss 2.928120\n", "Epoch 3221, Loss 2.928118\n", "Epoch 3222, Loss 2.928117\n", "Epoch 3223, Loss 2.928115\n", "Epoch 3224, Loss 2.928113\n", "Epoch 3225, Loss 2.928110\n", "Epoch 3226, Loss 2.928109\n", "Epoch 3227, Loss 2.928108\n", "Epoch 3228, Loss 2.928104\n", "Epoch 3229, Loss 2.928105\n", "Epoch 3230, Loss 2.928104\n", "Epoch 3231, Loss 2.928102\n", "Epoch 3232, Loss 2.928101\n", "Epoch 3233, Loss 2.928098\n", "Epoch 3234, Loss 2.928097\n", "Epoch 3235, Loss 2.928095\n", "Epoch 3236, Loss 2.928094\n", "Epoch 3237, Loss 2.928093\n", "Epoch 3238, Loss 2.928091\n", "Epoch 3239, Loss 2.928090\n", "Epoch 3240, Loss 2.928088\n", "Epoch 3241, Loss 2.928086\n", "Epoch 3242, Loss 2.928085\n", "Epoch 3243, Loss 2.928084\n", "Epoch 3244, Loss 2.928082\n", "Epoch 3245, Loss 2.928080\n", "Epoch 3246, Loss 2.928079\n", "Epoch 3247, Loss 2.928076\n", "Epoch 3248, Loss 2.928076\n", "Epoch 3249, Loss 2.928075\n", "Epoch 3250, Loss 2.928072\n", "Epoch 3251, Loss 2.928072\n", "Epoch 3252, Loss 2.928071\n", "Epoch 3253, Loss 2.928068\n", "Epoch 3254, Loss 2.928068\n", "Epoch 3255, Loss 2.928066\n", "Epoch 3256, Loss 2.928065\n", "Epoch 3257, Loss 2.928063\n", "Epoch 3258, Loss 2.928061\n", "Epoch 3259, Loss 2.928061\n", "Epoch 3260, Loss 2.928057\n", "Epoch 3261, Loss 2.928058\n", "Epoch 3262, Loss 2.928056\n", "Epoch 3263, Loss 2.928055\n", "Epoch 3264, Loss 2.928052\n", "Epoch 3265, Loss 2.928053\n", "Epoch 3266, Loss 2.928051\n", "Epoch 3267, Loss 2.928050\n", "Epoch 3268, Loss 2.928047\n", "Epoch 3269, Loss 2.928046\n", "Epoch 3270, Loss 2.928046\n", "Epoch 3271, Loss 2.928044\n", "Epoch 3272, Loss 2.928042\n", "Epoch 3273, Loss 2.928040\n", "Epoch 3274, Loss 2.928040\n", "Epoch 3275, Loss 2.928037\n", "Epoch 3276, Loss 2.928036\n", "Epoch 3277, Loss 2.928037\n", "Epoch 3278, Loss 2.928034\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3279, Loss 2.928034\n", "Epoch 3280, Loss 2.928031\n", "Epoch 3281, Loss 2.928032\n", "Epoch 3282, Loss 2.928029\n", "Epoch 3283, Loss 2.928027\n", "Epoch 3284, Loss 2.928026\n", "Epoch 3285, Loss 2.928025\n", "Epoch 3286, Loss 2.928024\n", "Epoch 3287, Loss 2.928023\n", "Epoch 3288, Loss 2.928022\n", "Epoch 3289, Loss 2.928021\n", "Epoch 3290, Loss 2.928019\n", "Epoch 3291, Loss 2.928018\n", "Epoch 3292, Loss 2.928017\n", "Epoch 3293, Loss 2.928015\n", "Epoch 3294, Loss 2.928013\n", "Epoch 3295, Loss 2.928012\n", "Epoch 3296, Loss 2.928011\n", "Epoch 3297, Loss 2.928009\n", "Epoch 3298, Loss 2.928009\n", "Epoch 3299, Loss 2.928006\n", "Epoch 3300, Loss 2.928007\n", "Epoch 3301, Loss 2.928007\n", "Epoch 3302, Loss 2.928004\n", "Epoch 3303, Loss 2.928002\n", "Epoch 3304, Loss 2.928001\n", "Epoch 3305, Loss 2.928000\n", "Epoch 3306, Loss 2.928000\n", "Epoch 3307, Loss 2.927998\n", "Epoch 3308, Loss 2.927995\n", "Epoch 3309, Loss 2.927995\n", "Epoch 3310, Loss 2.927994\n", "Epoch 3311, Loss 2.927994\n", "Epoch 3312, Loss 2.927992\n", "Epoch 3313, Loss 2.927992\n", "Epoch 3314, Loss 2.927990\n", "Epoch 3315, Loss 2.927989\n", "Epoch 3316, Loss 2.927987\n", "Epoch 3317, Loss 2.927986\n", "Epoch 3318, Loss 2.927985\n", "Epoch 3319, Loss 2.927983\n", "Epoch 3320, Loss 2.927983\n", "Epoch 3321, Loss 2.927981\n", "Epoch 3322, Loss 2.927980\n", "Epoch 3323, Loss 2.927979\n", "Epoch 3324, Loss 2.927978\n", "Epoch 3325, Loss 2.927977\n", "Epoch 3326, Loss 2.927975\n", "Epoch 3327, Loss 2.927973\n", "Epoch 3328, Loss 2.927973\n", "Epoch 3329, Loss 2.927974\n", "Epoch 3330, Loss 2.927971\n", "Epoch 3331, Loss 2.927972\n", "Epoch 3332, Loss 2.927969\n", "Epoch 3333, Loss 2.927969\n", "Epoch 3334, Loss 2.927967\n", "Epoch 3335, Loss 2.927967\n", "Epoch 3336, Loss 2.927963\n", "Epoch 3337, Loss 2.927963\n", "Epoch 3338, Loss 2.927962\n", "Epoch 3339, Loss 2.927962\n", "Epoch 3340, Loss 2.927961\n", "Epoch 3341, Loss 2.927960\n", "Epoch 3342, Loss 2.927959\n", "Epoch 3343, Loss 2.927958\n", "Epoch 3344, Loss 2.927956\n", "Epoch 3345, Loss 2.927955\n", "Epoch 3346, Loss 2.927954\n", "Epoch 3347, Loss 2.927953\n", "Epoch 3348, Loss 2.927953\n", "Epoch 3349, Loss 2.927950\n", "Epoch 3350, Loss 2.927950\n", "Epoch 3351, Loss 2.927948\n", "Epoch 3352, Loss 2.927947\n", "Epoch 3353, Loss 2.927948\n", "Epoch 3354, Loss 2.927945\n", "Epoch 3355, Loss 2.927944\n", "Epoch 3356, Loss 2.927944\n", "Epoch 3357, Loss 2.927944\n", "Epoch 3358, Loss 2.927942\n", "Epoch 3359, Loss 2.927941\n", "Epoch 3360, Loss 2.927940\n", "Epoch 3361, Loss 2.927938\n", "Epoch 3362, Loss 2.927938\n", "Epoch 3363, Loss 2.927936\n", "Epoch 3364, Loss 2.927936\n", "Epoch 3365, Loss 2.927937\n", "Epoch 3366, Loss 2.927934\n", "Epoch 3367, Loss 2.927934\n", "Epoch 3368, Loss 2.927933\n", "Epoch 3369, Loss 2.927930\n", "Epoch 3370, Loss 2.927929\n", "Epoch 3371, Loss 2.927931\n", "Epoch 3372, Loss 2.927929\n", "Epoch 3373, Loss 2.927927\n", "Epoch 3374, Loss 2.927926\n", "Epoch 3375, Loss 2.927925\n", "Epoch 3376, Loss 2.927924\n", "Epoch 3377, Loss 2.927922\n", "Epoch 3378, Loss 2.927924\n", "Epoch 3379, Loss 2.927922\n", "Epoch 3380, Loss 2.927921\n", "Epoch 3381, Loss 2.927920\n", "Epoch 3382, Loss 2.927918\n", "Epoch 3383, Loss 2.927917\n", "Epoch 3384, Loss 2.927917\n", "Epoch 3385, Loss 2.927915\n", "Epoch 3386, Loss 2.927916\n", "Epoch 3387, Loss 2.927914\n", "Epoch 3388, Loss 2.927914\n", "Epoch 3389, Loss 2.927912\n", "Epoch 3390, Loss 2.927913\n", "Epoch 3391, Loss 2.927911\n", "Epoch 3392, Loss 2.927910\n", "Epoch 3393, Loss 2.927909\n", "Epoch 3394, Loss 2.927908\n", "Epoch 3395, Loss 2.927907\n", "Epoch 3396, Loss 2.927906\n", "Epoch 3397, Loss 2.927905\n", "Epoch 3398, Loss 2.927905\n", "Epoch 3399, Loss 2.927904\n", "Epoch 3400, Loss 2.927902\n", "Epoch 3401, Loss 2.927902\n", "Epoch 3402, Loss 2.927902\n", "Epoch 3403, Loss 2.927899\n", "Epoch 3404, Loss 2.927899\n", "Epoch 3405, Loss 2.927898\n", "Epoch 3406, Loss 2.927899\n", "Epoch 3407, Loss 2.927896\n", "Epoch 3408, Loss 2.927895\n", "Epoch 3409, Loss 2.927896\n", "Epoch 3410, Loss 2.927894\n", "Epoch 3411, Loss 2.927892\n", "Epoch 3412, Loss 2.927893\n", "Epoch 3413, Loss 2.927891\n", "Epoch 3414, Loss 2.927891\n", "Epoch 3415, Loss 2.927890\n", "Epoch 3416, Loss 2.927891\n", "Epoch 3417, Loss 2.927888\n", "Epoch 3418, Loss 2.927888\n", "Epoch 3419, Loss 2.927886\n", "Epoch 3420, Loss 2.927887\n", "Epoch 3421, Loss 2.927885\n", "Epoch 3422, Loss 2.927884\n", "Epoch 3423, Loss 2.927883\n", "Epoch 3424, Loss 2.927881\n", "Epoch 3425, Loss 2.927881\n", "Epoch 3426, Loss 2.927880\n", "Epoch 3427, Loss 2.927880\n", "Epoch 3428, Loss 2.927879\n", "Epoch 3429, Loss 2.927878\n", "Epoch 3430, Loss 2.927877\n", "Epoch 3431, Loss 2.927876\n", "Epoch 3432, Loss 2.927876\n", "Epoch 3433, Loss 2.927875\n", "Epoch 3434, Loss 2.927875\n", "Epoch 3435, Loss 2.927875\n", "Epoch 3436, Loss 2.927873\n", "Epoch 3437, Loss 2.927872\n", "Epoch 3438, Loss 2.927870\n", "Epoch 3439, Loss 2.927871\n", "Epoch 3440, Loss 2.927871\n", "Epoch 3441, Loss 2.927869\n", "Epoch 3442, Loss 2.927869\n", "Epoch 3443, Loss 2.927866\n", "Epoch 3444, Loss 2.927865\n", "Epoch 3445, Loss 2.927866\n", "Epoch 3446, Loss 2.927866\n", "Epoch 3447, Loss 2.927864\n", "Epoch 3448, Loss 2.927863\n", "Epoch 3449, Loss 2.927863\n", "Epoch 3450, Loss 2.927862\n", "Epoch 3451, Loss 2.927863\n", "Epoch 3452, Loss 2.927860\n", "Epoch 3453, Loss 2.927860\n", "Epoch 3454, Loss 2.927860\n", "Epoch 3455, Loss 2.927859\n", "Epoch 3456, Loss 2.927858\n", "Epoch 3457, Loss 2.927858\n", "Epoch 3458, Loss 2.927855\n", "Epoch 3459, Loss 2.927857\n", "Epoch 3460, Loss 2.927854\n", "Epoch 3461, Loss 2.927855\n", "Epoch 3462, Loss 2.927854\n", "Epoch 3463, Loss 2.927854\n", "Epoch 3464, Loss 2.927851\n", "Epoch 3465, Loss 2.927853\n", "Epoch 3466, Loss 2.927852\n", "Epoch 3467, Loss 2.927850\n", "Epoch 3468, Loss 2.927849\n", "Epoch 3469, Loss 2.927849\n", "Epoch 3470, Loss 2.927848\n", "Epoch 3471, Loss 2.927848\n", "Epoch 3472, Loss 2.927846\n", "Epoch 3473, Loss 2.927846\n", "Epoch 3474, Loss 2.927845\n", "Epoch 3475, Loss 2.927844\n", "Epoch 3476, Loss 2.927844\n", "Epoch 3477, Loss 2.927844\n", "Epoch 3478, Loss 2.927843\n", "Epoch 3479, Loss 2.927842\n", "Epoch 3480, Loss 2.927842\n", "Epoch 3481, Loss 2.927840\n", "Epoch 3482, Loss 2.927841\n", "Epoch 3483, Loss 2.927839\n", "Epoch 3484, Loss 2.927838\n", "Epoch 3485, Loss 2.927839\n", "Epoch 3486, Loss 2.927839\n", "Epoch 3487, Loss 2.927837\n", "Epoch 3488, Loss 2.927835\n", "Epoch 3489, Loss 2.927837\n", "Epoch 3490, Loss 2.927835\n", "Epoch 3491, Loss 2.927834\n", "Epoch 3492, Loss 2.927833\n", "Epoch 3493, Loss 2.927833\n", "Epoch 3494, Loss 2.927833\n", "Epoch 3495, Loss 2.927832\n", "Epoch 3496, Loss 2.927831\n", "Epoch 3497, Loss 2.927830\n", "Epoch 3498, Loss 2.927830\n", "Epoch 3499, Loss 2.927830\n", "Epoch 3500, Loss 2.927830\n", "Epoch 3501, Loss 2.927828\n", "Epoch 3502, Loss 2.927828\n", "Epoch 3503, Loss 2.927827\n", "Epoch 3504, Loss 2.927825\n", "Epoch 3505, Loss 2.927827\n", "Epoch 3506, Loss 2.927825\n", "Epoch 3507, Loss 2.927824\n", "Epoch 3508, Loss 2.927824\n", "Epoch 3509, Loss 2.927824\n", "Epoch 3510, Loss 2.927822\n", "Epoch 3511, Loss 2.927822\n", "Epoch 3512, Loss 2.927821\n", "Epoch 3513, Loss 2.927820\n", "Epoch 3514, Loss 2.927819\n", "Epoch 3515, Loss 2.927821\n", "Epoch 3516, Loss 2.927819\n", "Epoch 3517, Loss 2.927819\n", "Epoch 3518, Loss 2.927818\n", "Epoch 3519, Loss 2.927818\n", "Epoch 3520, Loss 2.927817\n", "Epoch 3521, Loss 2.927816\n", "Epoch 3522, Loss 2.927815\n", "Epoch 3523, Loss 2.927816\n", "Epoch 3524, Loss 2.927814\n", "Epoch 3525, Loss 2.927813\n", "Epoch 3526, Loss 2.927813\n", "Epoch 3527, Loss 2.927812\n", "Epoch 3528, Loss 2.927811\n", "Epoch 3529, Loss 2.927811\n", "Epoch 3530, Loss 2.927811\n", "Epoch 3531, Loss 2.927810\n", "Epoch 3532, Loss 2.927809\n", "Epoch 3533, Loss 2.927810\n", "Epoch 3534, Loss 2.927809\n", "Epoch 3535, Loss 2.927808\n", "Epoch 3536, Loss 2.927809\n", "Epoch 3537, Loss 2.927806\n", "Epoch 3538, Loss 2.927806\n", "Epoch 3539, Loss 2.927805\n", "Epoch 3540, Loss 2.927804\n", "Epoch 3541, Loss 2.927804\n", "Epoch 3542, Loss 2.927804\n", "Epoch 3543, Loss 2.927805\n", "Epoch 3544, Loss 2.927804\n", "Epoch 3545, Loss 2.927804\n", "Epoch 3546, Loss 2.927802\n", "Epoch 3547, Loss 2.927802\n", "Epoch 3548, Loss 2.927801\n", "Epoch 3549, Loss 2.927801\n", "Epoch 3550, Loss 2.927799\n", "Epoch 3551, Loss 2.927801\n", "Epoch 3552, Loss 2.927798\n", "Epoch 3553, Loss 2.927798\n", "Epoch 3554, Loss 2.927798\n", "Epoch 3555, Loss 2.927798\n", "Epoch 3556, Loss 2.927798\n", "Epoch 3557, Loss 2.927796\n", "Epoch 3558, Loss 2.927796\n", "Epoch 3559, Loss 2.927796\n", "Epoch 3560, Loss 2.927794\n", "Epoch 3561, Loss 2.927796\n", "Epoch 3562, Loss 2.927795\n", "Epoch 3563, Loss 2.927794\n", "Epoch 3564, Loss 2.927794\n", "Epoch 3565, Loss 2.927791\n", "Epoch 3566, Loss 2.927792\n", "Epoch 3567, Loss 2.927792\n", "Epoch 3568, Loss 2.927790\n", "Epoch 3569, Loss 2.927790\n", "Epoch 3570, Loss 2.927789\n", "Epoch 3571, Loss 2.927790\n", "Epoch 3572, Loss 2.927789\n", "Epoch 3573, Loss 2.927790\n", "Epoch 3574, Loss 2.927789\n", "Epoch 3575, Loss 2.927787\n", "Epoch 3576, Loss 2.927786\n", "Epoch 3577, Loss 2.927788\n", "Epoch 3578, Loss 2.927785\n", "Epoch 3579, Loss 2.927785\n", "Epoch 3580, Loss 2.927786\n", "Epoch 3581, Loss 2.927785\n", "Epoch 3582, Loss 2.927785\n", "Epoch 3583, Loss 2.927784\n", "Epoch 3584, Loss 2.927783\n", "Epoch 3585, Loss 2.927783\n", "Epoch 3586, Loss 2.927781\n", "Epoch 3587, Loss 2.927782\n", "Epoch 3588, Loss 2.927780\n", "Epoch 3589, Loss 2.927781\n", "Epoch 3590, Loss 2.927781\n", "Epoch 3591, Loss 2.927780\n", "Epoch 3592, Loss 2.927780\n", "Epoch 3593, Loss 2.927778\n", "Epoch 3594, Loss 2.927779\n", "Epoch 3595, Loss 2.927778\n", "Epoch 3596, Loss 2.927778\n", "Epoch 3597, Loss 2.927778\n", "Epoch 3598, Loss 2.927777\n", "Epoch 3599, Loss 2.927776\n", "Epoch 3600, Loss 2.927775\n", "Epoch 3601, Loss 2.927775\n", "Epoch 3602, Loss 2.927773\n", "Epoch 3603, Loss 2.927775\n", "Epoch 3604, Loss 2.927775\n", "Epoch 3605, Loss 2.927774\n", "Epoch 3606, Loss 2.927773\n", "Epoch 3607, Loss 2.927773\n", "Epoch 3608, Loss 2.927772\n", "Epoch 3609, Loss 2.927772\n", "Epoch 3610, Loss 2.927772\n", "Epoch 3611, Loss 2.927770\n", "Epoch 3612, Loss 2.927772\n", "Epoch 3613, Loss 2.927772\n", "Epoch 3614, Loss 2.927770\n", "Epoch 3615, Loss 2.927770\n", "Epoch 3616, Loss 2.927769\n", "Epoch 3617, Loss 2.927768\n", "Epoch 3618, Loss 2.927769\n", "Epoch 3619, Loss 2.927768\n", "Epoch 3620, Loss 2.927766\n", "Epoch 3621, Loss 2.927767\n", "Epoch 3622, Loss 2.927767\n", "Epoch 3623, Loss 2.927765\n", "Epoch 3624, Loss 2.927766\n", "Epoch 3625, Loss 2.927765\n", "Epoch 3626, Loss 2.927766\n", "Epoch 3627, Loss 2.927764\n", "Epoch 3628, Loss 2.927764\n", "Epoch 3629, Loss 2.927764\n", "Epoch 3630, Loss 2.927762\n", "Epoch 3631, Loss 2.927763\n", "Epoch 3632, Loss 2.927763\n", "Epoch 3633, Loss 2.927762\n", "Epoch 3634, Loss 2.927761\n", "Epoch 3635, Loss 2.927762\n", "Epoch 3636, Loss 2.927759\n", "Epoch 3637, Loss 2.927761\n", "Epoch 3638, Loss 2.927761\n", "Epoch 3639, Loss 2.927760\n", "Epoch 3640, Loss 2.927759\n", "Epoch 3641, Loss 2.927758\n", "Epoch 3642, Loss 2.927759\n", "Epoch 3643, Loss 2.927757\n", "Epoch 3644, Loss 2.927758\n", "Epoch 3645, Loss 2.927757\n", "Epoch 3646, Loss 2.927757\n", "Epoch 3647, Loss 2.927757\n", "Epoch 3648, Loss 2.927756\n", "Epoch 3649, Loss 2.927758\n", "Epoch 3650, Loss 2.927756\n", "Epoch 3651, Loss 2.927756\n", "Epoch 3652, Loss 2.927755\n", "Epoch 3653, Loss 2.927755\n", "Epoch 3654, Loss 2.927754\n", "Epoch 3655, Loss 2.927754\n", "Epoch 3656, Loss 2.927755\n", "Epoch 3657, Loss 2.927753\n", "Epoch 3658, Loss 2.927752\n", "Epoch 3659, Loss 2.927754\n", "Epoch 3660, Loss 2.927752\n", "Epoch 3661, Loss 2.927751\n", "Epoch 3662, Loss 2.927752\n", "Epoch 3663, Loss 2.927750\n", "Epoch 3664, Loss 2.927750\n", "Epoch 3665, Loss 2.927752\n", "Epoch 3666, Loss 2.927750\n", "Epoch 3667, Loss 2.927750\n", "Epoch 3668, Loss 2.927747\n", "Epoch 3669, Loss 2.927749\n", "Epoch 3670, Loss 2.927748\n", "Epoch 3671, Loss 2.927748\n", "Epoch 3672, Loss 2.927749\n", "Epoch 3673, Loss 2.927747\n", "Epoch 3674, Loss 2.927747\n", "Epoch 3675, Loss 2.927748\n", "Epoch 3676, Loss 2.927747\n", "Epoch 3677, Loss 2.927746\n", "Epoch 3678, Loss 2.927746\n", "Epoch 3679, Loss 2.927745\n", "Epoch 3680, Loss 2.927745\n", "Epoch 3681, Loss 2.927744\n", "Epoch 3682, Loss 2.927744\n", "Epoch 3683, Loss 2.927743\n", "Epoch 3684, Loss 2.927742\n", "Epoch 3685, Loss 2.927743\n", "Epoch 3686, Loss 2.927743\n", "Epoch 3687, Loss 2.927744\n", "Epoch 3688, Loss 2.927743\n", "Epoch 3689, Loss 2.927742\n", "Epoch 3690, Loss 2.927742\n", "Epoch 3691, Loss 2.927742\n", "Epoch 3692, Loss 2.927741\n", "Epoch 3693, Loss 2.927741\n", "Epoch 3694, Loss 2.927741\n", "Epoch 3695, Loss 2.927742\n", "Epoch 3696, Loss 2.927741\n", "Epoch 3697, Loss 2.927740\n", "Epoch 3698, Loss 2.927740\n", "Epoch 3699, Loss 2.927738\n", "Epoch 3700, Loss 2.927738\n", "Epoch 3701, Loss 2.927738\n", "Epoch 3702, Loss 2.927737\n", "Epoch 3703, Loss 2.927737\n", "Epoch 3704, Loss 2.927738\n", "Epoch 3705, Loss 2.927738\n", "Epoch 3706, Loss 2.927737\n", "Epoch 3707, Loss 2.927737\n", "Epoch 3708, Loss 2.927736\n", "Epoch 3709, Loss 2.927735\n", "Epoch 3710, Loss 2.927734\n", "Epoch 3711, Loss 2.927735\n", "Epoch 3712, Loss 2.927736\n", "Epoch 3713, Loss 2.927734\n", "Epoch 3714, Loss 2.927734\n", "Epoch 3715, Loss 2.927733\n", "Epoch 3716, Loss 2.927734\n", "Epoch 3717, Loss 2.927733\n", "Epoch 3718, Loss 2.927733\n", "Epoch 3719, Loss 2.927733\n", "Epoch 3720, Loss 2.927733\n", "Epoch 3721, Loss 2.927731\n", "Epoch 3722, Loss 2.927731\n", "Epoch 3723, Loss 2.927732\n", "Epoch 3724, Loss 2.927730\n", "Epoch 3725, Loss 2.927730\n", "Epoch 3726, Loss 2.927731\n", "Epoch 3727, Loss 2.927730\n", "Epoch 3728, Loss 2.927732\n", "Epoch 3729, Loss 2.927732\n", "Epoch 3730, Loss 2.927730\n", "Epoch 3731, Loss 2.927728\n", "Epoch 3732, Loss 2.927729\n", "Epoch 3733, Loss 2.927730\n", "Epoch 3734, Loss 2.927729\n", "Epoch 3735, Loss 2.927728\n", "Epoch 3736, Loss 2.927728\n", "Epoch 3737, Loss 2.927728\n", "Epoch 3738, Loss 2.927727\n", "Epoch 3739, Loss 2.927728\n", "Epoch 3740, Loss 2.927728\n", "Epoch 3741, Loss 2.927727\n", "Epoch 3742, Loss 2.927727\n", "Epoch 3743, Loss 2.927726\n", "Epoch 3744, Loss 2.927726\n", "Epoch 3745, Loss 2.927725\n", "Epoch 3746, Loss 2.927725\n", "Epoch 3747, Loss 2.927725\n", "Epoch 3748, Loss 2.927724\n", "Epoch 3749, Loss 2.927724\n", "Epoch 3750, Loss 2.927724\n", "Epoch 3751, Loss 2.927725\n", "Epoch 3752, Loss 2.927724\n", "Epoch 3753, Loss 2.927724\n", "Epoch 3754, Loss 2.927723\n", "Epoch 3755, Loss 2.927723\n", "Epoch 3756, Loss 2.927722\n", "Epoch 3757, Loss 2.927722\n", "Epoch 3758, Loss 2.927723\n", "Epoch 3759, Loss 2.927722\n", "Epoch 3760, Loss 2.927723\n", "Epoch 3761, Loss 2.927721\n", "Epoch 3762, Loss 2.927721\n", "Epoch 3763, Loss 2.927720\n", "Epoch 3764, Loss 2.927720\n", "Epoch 3765, Loss 2.927719\n", "Epoch 3766, Loss 2.927721\n", "Epoch 3767, Loss 2.927719\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3768, Loss 2.927719\n", "Epoch 3769, Loss 2.927719\n", "Epoch 3770, Loss 2.927719\n", "Epoch 3771, Loss 2.927718\n", "Epoch 3772, Loss 2.927720\n", "Epoch 3773, Loss 2.927718\n", "Epoch 3774, Loss 2.927718\n", "Epoch 3775, Loss 2.927717\n", "Epoch 3776, Loss 2.927718\n", "Epoch 3777, Loss 2.927717\n", "Epoch 3778, Loss 2.927717\n", "Epoch 3779, Loss 2.927716\n", "Epoch 3780, Loss 2.927716\n", "Epoch 3781, Loss 2.927717\n", "Epoch 3782, Loss 2.927717\n", "Epoch 3783, Loss 2.927716\n", "Epoch 3784, Loss 2.927715\n", "Epoch 3785, Loss 2.927715\n", "Epoch 3786, Loss 2.927715\n", "Epoch 3787, Loss 2.927715\n", "Epoch 3788, Loss 2.927715\n", "Epoch 3789, Loss 2.927715\n", "Epoch 3790, Loss 2.927714\n", "Epoch 3791, Loss 2.927714\n", "Epoch 3792, Loss 2.927714\n", "Epoch 3793, Loss 2.927713\n", "Epoch 3794, Loss 2.927713\n", "Epoch 3795, Loss 2.927714\n", "Epoch 3796, Loss 2.927713\n", "Epoch 3797, Loss 2.927712\n", "Epoch 3798, Loss 2.927712\n", "Epoch 3799, Loss 2.927712\n", "Epoch 3800, Loss 2.927711\n", "Epoch 3801, Loss 2.927711\n", "Epoch 3802, Loss 2.927713\n", "Epoch 3803, Loss 2.927711\n", "Epoch 3804, Loss 2.927712\n", "Epoch 3805, Loss 2.927711\n", "Epoch 3806, Loss 2.927711\n", "Epoch 3807, Loss 2.927711\n", "Epoch 3808, Loss 2.927709\n", "Epoch 3809, Loss 2.927711\n", "Epoch 3810, Loss 2.927710\n", "Epoch 3811, Loss 2.927708\n", "Epoch 3812, Loss 2.927708\n", "Epoch 3813, Loss 2.927709\n", "Epoch 3814, Loss 2.927709\n", "Epoch 3815, Loss 2.927710\n", "Epoch 3816, Loss 2.927708\n", "Epoch 3817, Loss 2.927708\n", "Epoch 3818, Loss 2.927706\n", "Epoch 3819, Loss 2.927707\n", "Epoch 3820, Loss 2.927708\n", "Epoch 3821, Loss 2.927707\n", "Epoch 3822, Loss 2.927707\n", "Epoch 3823, Loss 2.927707\n", "Epoch 3824, Loss 2.927708\n", "Epoch 3825, Loss 2.927708\n", "Epoch 3826, Loss 2.927706\n", "Epoch 3827, Loss 2.927707\n", "Epoch 3828, Loss 2.927706\n", "Epoch 3829, Loss 2.927706\n", "Epoch 3830, Loss 2.927706\n", "Epoch 3831, Loss 2.927705\n", "Epoch 3832, Loss 2.927705\n", "Epoch 3833, Loss 2.927705\n", "Epoch 3834, Loss 2.927705\n", "Epoch 3835, Loss 2.927705\n", "Epoch 3836, Loss 2.927704\n", "Epoch 3837, Loss 2.927703\n", "Epoch 3838, Loss 2.927704\n", "Epoch 3839, Loss 2.927704\n", "Epoch 3840, Loss 2.927703\n", "Epoch 3841, Loss 2.927702\n", "Epoch 3842, Loss 2.927703\n", "Epoch 3843, Loss 2.927702\n", "Epoch 3844, Loss 2.927704\n", "Epoch 3845, Loss 2.927702\n", "Epoch 3846, Loss 2.927701\n", "Epoch 3847, Loss 2.927703\n", "Epoch 3848, Loss 2.927702\n", "Epoch 3849, Loss 2.927701\n", "Epoch 3850, Loss 2.927701\n", "Epoch 3851, Loss 2.927703\n", "Epoch 3852, Loss 2.927700\n", "Epoch 3853, Loss 2.927701\n", "Epoch 3854, Loss 2.927701\n", "Epoch 3855, Loss 2.927700\n", "Epoch 3856, Loss 2.927700\n", "Epoch 3857, Loss 2.927700\n", "Epoch 3858, Loss 2.927701\n", "Epoch 3859, Loss 2.927700\n", "Epoch 3860, Loss 2.927700\n", "Epoch 3861, Loss 2.927700\n", "Epoch 3862, Loss 2.927699\n", "Epoch 3863, Loss 2.927698\n", "Epoch 3864, Loss 2.927700\n", "Epoch 3865, Loss 2.927697\n", "Epoch 3866, Loss 2.927700\n", "Epoch 3867, Loss 2.927700\n", "Epoch 3868, Loss 2.927698\n", "Epoch 3869, Loss 2.927697\n", "Epoch 3870, Loss 2.927698\n", "Epoch 3871, Loss 2.927696\n", "Epoch 3872, Loss 2.927699\n", "Epoch 3873, Loss 2.927697\n", "Epoch 3874, Loss 2.927696\n", "Epoch 3875, Loss 2.927699\n", "Epoch 3876, Loss 2.927697\n", "Epoch 3877, Loss 2.927696\n", "Epoch 3878, Loss 2.927697\n", "Epoch 3879, Loss 2.927696\n", "Epoch 3880, Loss 2.927696\n", "Epoch 3881, Loss 2.927696\n", "Epoch 3882, Loss 2.927696\n", "Epoch 3883, Loss 2.927695\n", "Epoch 3884, Loss 2.927695\n", "Epoch 3885, Loss 2.927696\n", "Epoch 3886, Loss 2.927696\n", "Epoch 3887, Loss 2.927695\n", "Epoch 3888, Loss 2.927694\n", "Epoch 3889, Loss 2.927694\n", "Epoch 3890, Loss 2.927694\n", "Epoch 3891, Loss 2.927693\n", "Epoch 3892, Loss 2.927695\n", "Epoch 3893, Loss 2.927695\n", "Epoch 3894, Loss 2.927694\n", "Epoch 3895, Loss 2.927695\n", "Epoch 3896, Loss 2.927693\n", "Epoch 3897, Loss 2.927693\n", "Epoch 3898, Loss 2.927695\n", "Epoch 3899, Loss 2.927693\n", "Epoch 3900, Loss 2.927692\n", "Epoch 3901, Loss 2.927694\n", "Epoch 3902, Loss 2.927692\n", "Epoch 3903, Loss 2.927693\n", "Epoch 3904, Loss 2.927691\n", "Epoch 3905, Loss 2.927692\n", "Epoch 3906, Loss 2.927692\n", "Epoch 3907, Loss 2.927692\n", "Epoch 3908, Loss 2.927692\n", "Epoch 3909, Loss 2.927692\n", "Epoch 3910, Loss 2.927690\n", "Epoch 3911, Loss 2.927692\n", "Epoch 3912, Loss 2.927691\n", "Epoch 3913, Loss 2.927691\n", "Epoch 3914, Loss 2.927689\n", "Epoch 3915, Loss 2.927691\n", "Epoch 3916, Loss 2.927691\n", "Epoch 3917, Loss 2.927689\n", "Epoch 3918, Loss 2.927690\n", "Epoch 3919, Loss 2.927690\n", "Epoch 3920, Loss 2.927690\n", "Epoch 3921, Loss 2.927690\n", "Epoch 3922, Loss 2.927689\n", "Epoch 3923, Loss 2.927688\n", "Epoch 3924, Loss 2.927689\n", "Epoch 3925, Loss 2.927688\n", "Epoch 3926, Loss 2.927689\n", "Epoch 3927, Loss 2.927689\n", "Epoch 3928, Loss 2.927689\n", "Epoch 3929, Loss 2.927688\n", "Epoch 3930, Loss 2.927688\n", "Epoch 3931, Loss 2.927688\n", "Epoch 3932, Loss 2.927687\n", "Epoch 3933, Loss 2.927689\n", "Epoch 3934, Loss 2.927688\n", "Epoch 3935, Loss 2.927687\n", "Epoch 3936, Loss 2.927688\n", "Epoch 3937, Loss 2.927686\n", "Epoch 3938, Loss 2.927686\n", "Epoch 3939, Loss 2.927686\n", "Epoch 3940, Loss 2.927687\n", "Epoch 3941, Loss 2.927687\n", "Epoch 3942, Loss 2.927686\n", "Epoch 3943, Loss 2.927687\n", "Epoch 3944, Loss 2.927686\n", "Epoch 3945, Loss 2.927685\n", "Epoch 3946, Loss 2.927685\n", "Epoch 3947, Loss 2.927686\n", "Epoch 3948, Loss 2.927685\n", "Epoch 3949, Loss 2.927686\n", "Epoch 3950, Loss 2.927686\n", "Epoch 3951, Loss 2.927686\n", "Epoch 3952, Loss 2.927685\n", "Epoch 3953, Loss 2.927686\n", "Epoch 3954, Loss 2.927685\n", "Epoch 3955, Loss 2.927685\n", "Epoch 3956, Loss 2.927683\n", "Epoch 3957, Loss 2.927684\n", "Epoch 3958, Loss 2.927685\n", "Epoch 3959, Loss 2.927684\n", "Epoch 3960, Loss 2.927684\n", "Epoch 3961, Loss 2.927684\n", "Epoch 3962, Loss 2.927685\n", "Epoch 3963, Loss 2.927683\n", "Epoch 3964, Loss 2.927685\n", "Epoch 3965, Loss 2.927684\n", "Epoch 3966, Loss 2.927683\n", "Epoch 3967, Loss 2.927683\n", "Epoch 3968, Loss 2.927683\n", "Epoch 3969, Loss 2.927682\n", "Epoch 3970, Loss 2.927682\n", "Epoch 3971, Loss 2.927684\n", "Epoch 3972, Loss 2.927683\n", "Epoch 3973, Loss 2.927684\n", "Epoch 3974, Loss 2.927683\n", "Epoch 3975, Loss 2.927682\n", "Epoch 3976, Loss 2.927682\n", "Epoch 3977, Loss 2.927682\n", "Epoch 3978, Loss 2.927682\n", "Epoch 3979, Loss 2.927681\n", "Epoch 3980, Loss 2.927682\n", "Epoch 3981, Loss 2.927681\n", "Epoch 3982, Loss 2.927681\n", "Epoch 3983, Loss 2.927682\n", "Epoch 3984, Loss 2.927681\n", "Epoch 3985, Loss 2.927681\n", "Epoch 3986, Loss 2.927681\n", "Epoch 3987, Loss 2.927680\n", "Epoch 3988, Loss 2.927681\n", "Epoch 3989, Loss 2.927681\n", "Epoch 3990, Loss 2.927680\n", "Epoch 3991, Loss 2.927682\n", "Epoch 3992, Loss 2.927681\n", "Epoch 3993, Loss 2.927680\n", "Epoch 3994, Loss 2.927679\n", "Epoch 3995, Loss 2.927680\n", "Epoch 3996, Loss 2.927679\n", "Epoch 3997, Loss 2.927680\n", "Epoch 3998, Loss 2.927680\n", "Epoch 3999, Loss 2.927679\n", "Epoch 4000, Loss 2.927679\n", "Epoch 4001, Loss 2.927679\n", "Epoch 4002, Loss 2.927679\n", "Epoch 4003, Loss 2.927679\n", "Epoch 4004, Loss 2.927680\n", "Epoch 4005, Loss 2.927681\n", "Epoch 4006, Loss 2.927679\n", "Epoch 4007, Loss 2.927679\n", "Epoch 4008, Loss 2.927679\n", "Epoch 4009, Loss 2.927679\n", "Epoch 4010, Loss 2.927678\n", "Epoch 4011, Loss 2.927679\n", "Epoch 4012, Loss 2.927679\n", "Epoch 4013, Loss 2.927677\n", "Epoch 4014, Loss 2.927678\n", "Epoch 4015, Loss 2.927677\n", "Epoch 4016, Loss 2.927678\n", "Epoch 4017, Loss 2.927677\n", "Epoch 4018, Loss 2.927677\n", "Epoch 4019, Loss 2.927676\n", "Epoch 4020, Loss 2.927678\n", "Epoch 4021, Loss 2.927677\n", "Epoch 4022, Loss 2.927677\n", "Epoch 4023, Loss 2.927678\n", "Epoch 4024, Loss 2.927678\n", "Epoch 4025, Loss 2.927677\n", "Epoch 4026, Loss 2.927676\n", "Epoch 4027, Loss 2.927676\n", "Epoch 4028, Loss 2.927677\n", "Epoch 4029, Loss 2.927676\n", "Epoch 4030, Loss 2.927675\n", "Epoch 4031, Loss 2.927676\n", "Epoch 4032, Loss 2.927676\n", "Epoch 4033, Loss 2.927675\n", "Epoch 4034, Loss 2.927676\n", "Epoch 4035, Loss 2.927676\n", "Epoch 4036, Loss 2.927676\n", "Epoch 4037, Loss 2.927677\n", "Epoch 4038, Loss 2.927676\n", "Epoch 4039, Loss 2.927675\n", "Epoch 4040, Loss 2.927676\n", "Epoch 4041, Loss 2.927675\n", "Epoch 4042, Loss 2.927675\n", "Epoch 4043, Loss 2.927675\n", "Epoch 4044, Loss 2.927675\n", "Epoch 4045, Loss 2.927675\n", "Epoch 4046, Loss 2.927675\n", "Epoch 4047, Loss 2.927675\n", "Epoch 4048, Loss 2.927675\n", "Epoch 4049, Loss 2.927673\n", "Epoch 4050, Loss 2.927675\n", "Epoch 4051, Loss 2.927675\n", "Epoch 4052, Loss 2.927675\n", "Epoch 4053, Loss 2.927673\n", "Epoch 4054, Loss 2.927674\n", "Epoch 4055, Loss 2.927675\n", "Epoch 4056, Loss 2.927673\n", "Epoch 4057, Loss 2.927673\n", "Epoch 4058, Loss 2.927673\n", "Epoch 4059, Loss 2.927675\n", "Epoch 4060, Loss 2.927673\n", "Epoch 4061, Loss 2.927673\n", "Epoch 4062, Loss 2.927673\n", "Epoch 4063, Loss 2.927673\n", "Epoch 4064, Loss 2.927673\n", "Epoch 4065, Loss 2.927672\n", "Epoch 4066, Loss 2.927673\n", "Epoch 4067, Loss 2.927672\n", "Epoch 4068, Loss 2.927672\n", "Epoch 4069, Loss 2.927673\n", "Epoch 4070, Loss 2.927672\n", "Epoch 4071, Loss 2.927672\n", "Epoch 4072, Loss 2.927672\n", "Epoch 4073, Loss 2.927672\n", "Epoch 4074, Loss 2.927672\n", "Epoch 4075, Loss 2.927672\n", "Epoch 4076, Loss 2.927671\n", "Epoch 4077, Loss 2.927671\n", "Epoch 4078, Loss 2.927673\n", "Epoch 4079, Loss 2.927671\n", "Epoch 4080, Loss 2.927670\n", "Epoch 4081, Loss 2.927672\n", "Epoch 4082, Loss 2.927671\n", "Epoch 4083, Loss 2.927673\n", "Epoch 4084, Loss 2.927670\n", "Epoch 4085, Loss 2.927670\n", "Epoch 4086, Loss 2.927671\n", "Epoch 4087, Loss 2.927672\n", "Epoch 4088, Loss 2.927670\n", "Epoch 4089, Loss 2.927670\n", "Epoch 4090, Loss 2.927670\n", "Epoch 4091, Loss 2.927671\n", "Epoch 4092, Loss 2.927670\n", "Epoch 4093, Loss 2.927671\n", "Epoch 4094, Loss 2.927670\n", "Epoch 4095, Loss 2.927670\n", "Epoch 4096, Loss 2.927670\n", "Epoch 4097, Loss 2.927670\n", "Epoch 4098, Loss 2.927671\n", "Epoch 4099, Loss 2.927670\n", "Epoch 4100, Loss 2.927669\n", "Epoch 4101, Loss 2.927669\n", "Epoch 4102, Loss 2.927671\n", "Epoch 4103, Loss 2.927670\n", "Epoch 4104, Loss 2.927670\n", "Epoch 4105, Loss 2.927670\n", "Epoch 4106, Loss 2.927670\n", "Epoch 4107, Loss 2.927670\n", "Epoch 4108, Loss 2.927669\n", "Epoch 4109, Loss 2.927668\n", "Epoch 4110, Loss 2.927670\n", "Epoch 4111, Loss 2.927669\n", "Epoch 4112, Loss 2.927669\n", "Epoch 4113, Loss 2.927669\n", "Epoch 4114, Loss 2.927670\n", "Epoch 4115, Loss 2.927669\n", "Epoch 4116, Loss 2.927668\n", "Epoch 4117, Loss 2.927667\n", "Epoch 4118, Loss 2.927669\n", "Epoch 4119, Loss 2.927668\n", "Epoch 4120, Loss 2.927668\n", "Epoch 4121, Loss 2.927669\n", "Epoch 4122, Loss 2.927669\n", "Epoch 4123, Loss 2.927668\n", "Epoch 4124, Loss 2.927668\n", "Epoch 4125, Loss 2.927668\n", "Epoch 4126, Loss 2.927668\n", "Epoch 4127, Loss 2.927667\n", "Epoch 4128, Loss 2.927668\n", "Epoch 4129, Loss 2.927667\n", "Epoch 4130, Loss 2.927667\n", "Epoch 4131, Loss 2.927667\n", "Epoch 4132, Loss 2.927668\n", "Epoch 4133, Loss 2.927666\n", "Epoch 4134, Loss 2.927667\n", "Epoch 4135, Loss 2.927667\n", "Epoch 4136, Loss 2.927667\n", "Epoch 4137, Loss 2.927667\n", "Epoch 4138, Loss 2.927666\n", "Epoch 4139, Loss 2.927669\n", "Epoch 4140, Loss 2.927667\n", "Epoch 4141, Loss 2.927666\n", "Epoch 4142, Loss 2.927667\n", "Epoch 4143, Loss 2.927665\n", "Epoch 4144, Loss 2.927667\n", "Epoch 4145, Loss 2.927666\n", "Epoch 4146, Loss 2.927666\n", "Epoch 4147, Loss 2.927667\n", "Epoch 4148, Loss 2.927666\n", "Epoch 4149, Loss 2.927667\n", "Epoch 4150, Loss 2.927666\n", "Epoch 4151, Loss 2.927666\n", "Epoch 4152, Loss 2.927667\n", "Epoch 4153, Loss 2.927666\n", "Epoch 4154, Loss 2.927666\n", "Epoch 4155, Loss 2.927666\n", "Epoch 4156, Loss 2.927666\n", "Epoch 4157, Loss 2.927666\n", "Epoch 4158, Loss 2.927666\n", "Epoch 4159, Loss 2.927665\n", "Epoch 4160, Loss 2.927666\n", "Epoch 4161, Loss 2.927665\n", "Epoch 4162, Loss 2.927665\n", "Epoch 4163, Loss 2.927666\n", "Epoch 4164, Loss 2.927666\n", "Epoch 4165, Loss 2.927665\n", "Epoch 4166, Loss 2.927664\n", "Epoch 4167, Loss 2.927666\n", "Epoch 4168, Loss 2.927664\n", "Epoch 4169, Loss 2.927665\n", "Epoch 4170, Loss 2.927665\n", "Epoch 4171, Loss 2.927664\n", "Epoch 4172, Loss 2.927666\n", "Epoch 4173, Loss 2.927665\n", "Epoch 4174, Loss 2.927664\n", "Epoch 4175, Loss 2.927665\n", "Epoch 4176, Loss 2.927665\n", "Epoch 4177, Loss 2.927665\n", "Epoch 4178, Loss 2.927663\n", "Epoch 4179, Loss 2.927665\n", "Epoch 4180, Loss 2.927664\n", "Epoch 4181, Loss 2.927664\n", "Epoch 4182, Loss 2.927664\n", "Epoch 4183, Loss 2.927663\n", "Epoch 4184, Loss 2.927663\n", "Epoch 4185, Loss 2.927665\n", "Epoch 4186, Loss 2.927664\n", "Epoch 4187, Loss 2.927663\n", "Epoch 4188, Loss 2.927664\n", "Epoch 4189, Loss 2.927664\n", "Epoch 4190, Loss 2.927663\n", "Epoch 4191, Loss 2.927662\n", "Epoch 4192, Loss 2.927663\n", "Epoch 4193, Loss 2.927663\n", "Epoch 4194, Loss 2.927663\n", "Epoch 4195, Loss 2.927663\n", "Epoch 4196, Loss 2.927662\n", "Epoch 4197, Loss 2.927663\n", "Epoch 4198, Loss 2.927664\n", "Epoch 4199, Loss 2.927663\n", "Epoch 4200, Loss 2.927664\n", "Epoch 4201, Loss 2.927663\n", "Epoch 4202, Loss 2.927664\n", "Epoch 4203, Loss 2.927663\n", "Epoch 4204, Loss 2.927662\n", "Epoch 4205, Loss 2.927662\n", "Epoch 4206, Loss 2.927662\n", "Epoch 4207, Loss 2.927663\n", "Epoch 4208, Loss 2.927662\n", "Epoch 4209, Loss 2.927663\n", "Epoch 4210, Loss 2.927662\n", "Epoch 4211, Loss 2.927664\n", "Epoch 4212, Loss 2.927663\n", "Epoch 4213, Loss 2.927662\n", "Epoch 4214, Loss 2.927662\n", "Epoch 4215, Loss 2.927664\n", "Epoch 4216, Loss 2.927662\n", "Epoch 4217, Loss 2.927660\n", "Epoch 4218, Loss 2.927663\n", "Epoch 4219, Loss 2.927662\n", "Epoch 4220, Loss 2.927663\n", "Epoch 4221, Loss 2.927662\n", "Epoch 4222, Loss 2.927661\n", "Epoch 4223, Loss 2.927660\n", "Epoch 4224, Loss 2.927662\n", "Epoch 4225, Loss 2.927663\n", "Epoch 4226, Loss 2.927661\n", "Epoch 4227, Loss 2.927661\n", "Epoch 4228, Loss 2.927663\n", "Epoch 4229, Loss 2.927662\n", "Epoch 4230, Loss 2.927662\n", "Epoch 4231, Loss 2.927661\n", "Epoch 4232, Loss 2.927660\n", "Epoch 4233, Loss 2.927661\n", "Epoch 4234, Loss 2.927662\n", "Epoch 4235, Loss 2.927661\n", "Epoch 4236, Loss 2.927660\n", "Epoch 4237, Loss 2.927662\n", "Epoch 4238, Loss 2.927661\n", "Epoch 4239, Loss 2.927661\n", "Epoch 4240, Loss 2.927660\n", "Epoch 4241, Loss 2.927660\n", "Epoch 4242, Loss 2.927662\n", "Epoch 4243, Loss 2.927662\n", "Epoch 4244, Loss 2.927661\n", "Epoch 4245, Loss 2.927661\n", "Epoch 4246, Loss 2.927661\n", "Epoch 4247, Loss 2.927661\n", "Epoch 4248, Loss 2.927660\n", "Epoch 4249, Loss 2.927661\n", "Epoch 4250, Loss 2.927660\n", "Epoch 4251, Loss 2.927660\n", "Epoch 4252, Loss 2.927660\n", "Epoch 4253, Loss 2.927659\n", "Epoch 4254, Loss 2.927659\n", "Epoch 4255, Loss 2.927661\n", "Epoch 4256, Loss 2.927662\n", "Epoch 4257, Loss 2.927661\n", "Epoch 4258, Loss 2.927660\n", "Epoch 4259, Loss 2.927660\n", "Epoch 4260, Loss 2.927660\n", "Epoch 4261, Loss 2.927658\n", "Epoch 4262, Loss 2.927658\n", "Epoch 4263, Loss 2.927660\n", "Epoch 4264, Loss 2.927659\n", "Epoch 4265, Loss 2.927660\n", "Epoch 4266, Loss 2.927660\n", "Epoch 4267, Loss 2.927660\n", "Epoch 4268, Loss 2.927660\n", "Epoch 4269, Loss 2.927660\n", "Epoch 4270, Loss 2.927660\n", "Epoch 4271, Loss 2.927660\n", "Epoch 4272, Loss 2.927659\n", "Epoch 4273, Loss 2.927659\n", "Epoch 4274, Loss 2.927660\n", "Epoch 4275, Loss 2.927660\n", "Epoch 4276, Loss 2.927660\n", "Epoch 4277, Loss 2.927659\n", "Epoch 4278, Loss 2.927658\n", "Epoch 4279, Loss 2.927659\n", "Epoch 4280, Loss 2.927659\n", "Epoch 4281, Loss 2.927660\n", "Epoch 4282, Loss 2.927658\n", "Epoch 4283, Loss 2.927659\n", "Epoch 4284, Loss 2.927658\n", "Epoch 4285, Loss 2.927657\n", "Epoch 4286, Loss 2.927659\n", "Epoch 4287, Loss 2.927659\n", "Epoch 4288, Loss 2.927660\n", "Epoch 4289, Loss 2.927658\n", "Epoch 4290, Loss 2.927658\n", "Epoch 4291, Loss 2.927658\n", "Epoch 4292, Loss 2.927658\n", "Epoch 4293, Loss 2.927657\n", "Epoch 4294, Loss 2.927658\n", "Epoch 4295, Loss 2.927659\n", "Epoch 4296, Loss 2.927660\n", "Epoch 4297, Loss 2.927658\n", "Epoch 4298, Loss 2.927659\n", "Epoch 4299, Loss 2.927658\n", "Epoch 4300, Loss 2.927657\n", "Epoch 4301, Loss 2.927658\n", "Epoch 4302, Loss 2.927658\n", "Epoch 4303, Loss 2.927658\n", "Epoch 4304, Loss 2.927657\n", "Epoch 4305, Loss 2.927659\n", "Epoch 4306, Loss 2.927657\n", "Epoch 4307, Loss 2.927658\n", "Epoch 4308, Loss 2.927658\n", "Epoch 4309, Loss 2.927658\n", "Epoch 4310, Loss 2.927658\n", "Epoch 4311, Loss 2.927657\n", "Epoch 4312, Loss 2.927657\n", "Epoch 4313, Loss 2.927657\n", "Epoch 4314, Loss 2.927656\n", "Epoch 4315, Loss 2.927657\n", "Epoch 4316, Loss 2.927657\n", "Epoch 4317, Loss 2.927657\n", "Epoch 4318, Loss 2.927656\n", "Epoch 4319, Loss 2.927657\n", "Epoch 4320, Loss 2.927657\n", "Epoch 4321, Loss 2.927656\n", "Epoch 4322, Loss 2.927658\n", "Epoch 4323, Loss 2.927658\n", "Epoch 4324, Loss 2.927657\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4325, Loss 2.927656\n", "Epoch 4326, Loss 2.927657\n", "Epoch 4327, Loss 2.927658\n", "Epoch 4328, Loss 2.927657\n", "Epoch 4329, Loss 2.927657\n", "Epoch 4330, Loss 2.927657\n", "Epoch 4331, Loss 2.927658\n", "Epoch 4332, Loss 2.927658\n", "Epoch 4333, Loss 2.927657\n", "Epoch 4334, Loss 2.927658\n", "Epoch 4335, Loss 2.927657\n", "Epoch 4336, Loss 2.927657\n", "Epoch 4337, Loss 2.927657\n", "Epoch 4338, Loss 2.927657\n", "Epoch 4339, Loss 2.927657\n", "Epoch 4340, Loss 2.927656\n", "Epoch 4341, Loss 2.927657\n", "Epoch 4342, Loss 2.927655\n", "Epoch 4343, Loss 2.927656\n", "Epoch 4344, Loss 2.927656\n", "Epoch 4345, Loss 2.927657\n", "Epoch 4346, Loss 2.927656\n", "Epoch 4347, Loss 2.927657\n", "Epoch 4348, Loss 2.927655\n", "Epoch 4349, Loss 2.927656\n", "Epoch 4350, Loss 2.927656\n", "Epoch 4351, Loss 2.927655\n", "Epoch 4352, Loss 2.927656\n", "Epoch 4353, Loss 2.927656\n", "Epoch 4354, Loss 2.927655\n", "Epoch 4355, Loss 2.927655\n", "Epoch 4356, Loss 2.927656\n", "Epoch 4357, Loss 2.927655\n", "Epoch 4358, Loss 2.927657\n", "Epoch 4359, Loss 2.927656\n", "Epoch 4360, Loss 2.927655\n", "Epoch 4361, Loss 2.927656\n", "Epoch 4362, Loss 2.927655\n", "Epoch 4363, Loss 2.927656\n", "Epoch 4364, Loss 2.927656\n", "Epoch 4365, Loss 2.927656\n", "Epoch 4366, Loss 2.927656\n", "Epoch 4367, Loss 2.927655\n", "Epoch 4368, Loss 2.927654\n", "Epoch 4369, Loss 2.927655\n", "Epoch 4370, Loss 2.927656\n", "Epoch 4371, Loss 2.927655\n", "Epoch 4372, Loss 2.927656\n", "Epoch 4373, Loss 2.927656\n", "Epoch 4374, Loss 2.927655\n", "Epoch 4375, Loss 2.927656\n", "Epoch 4376, Loss 2.927655\n", "Epoch 4377, Loss 2.927656\n", "Epoch 4378, Loss 2.927655\n", "Epoch 4379, Loss 2.927655\n", "Epoch 4380, Loss 2.927654\n", "Epoch 4381, Loss 2.927656\n", "Epoch 4382, Loss 2.927655\n", "Epoch 4383, Loss 2.927656\n", "Epoch 4384, Loss 2.927656\n", "Epoch 4385, Loss 2.927655\n", "Epoch 4386, Loss 2.927656\n", "Epoch 4387, Loss 2.927654\n", "Epoch 4388, Loss 2.927656\n", "Epoch 4389, Loss 2.927654\n", "Epoch 4390, Loss 2.927655\n", "Epoch 4391, Loss 2.927654\n", "Epoch 4392, Loss 2.927655\n", "Epoch 4393, Loss 2.927655\n", "Epoch 4394, Loss 2.927654\n", "Epoch 4395, Loss 2.927654\n", "Epoch 4396, Loss 2.927655\n", "Epoch 4397, Loss 2.927655\n", "Epoch 4398, Loss 2.927654\n", "Epoch 4399, Loss 2.927655\n", "Epoch 4400, Loss 2.927654\n", "Epoch 4401, Loss 2.927655\n", "Epoch 4402, Loss 2.927654\n", "Epoch 4403, Loss 2.927654\n", "Epoch 4404, Loss 2.927654\n", "Epoch 4405, Loss 2.927654\n", "Epoch 4406, Loss 2.927655\n", "Epoch 4407, Loss 2.927654\n", "Epoch 4408, Loss 2.927654\n", "Epoch 4409, Loss 2.927654\n", "Epoch 4410, Loss 2.927655\n", "Epoch 4411, Loss 2.927655\n", "Epoch 4412, Loss 2.927656\n", "Epoch 4413, Loss 2.927654\n", "Epoch 4414, Loss 2.927655\n", "Epoch 4415, Loss 2.927654\n", "Epoch 4416, Loss 2.927653\n", "Epoch 4417, Loss 2.927655\n", "Epoch 4418, Loss 2.927653\n", "Epoch 4419, Loss 2.927655\n", "Epoch 4420, Loss 2.927653\n", "Epoch 4421, Loss 2.927654\n", "Epoch 4422, Loss 2.927653\n", "Epoch 4423, Loss 2.927655\n", "Epoch 4424, Loss 2.927654\n", "Epoch 4425, Loss 2.927655\n", "Epoch 4426, Loss 2.927653\n", "Epoch 4427, Loss 2.927654\n", "Epoch 4428, Loss 2.927655\n", "Epoch 4429, Loss 2.927654\n", "Epoch 4430, Loss 2.927654\n", "Epoch 4431, Loss 2.927653\n", "Epoch 4432, Loss 2.927654\n", "Epoch 4433, Loss 2.927654\n", "Epoch 4434, Loss 2.927654\n", "Epoch 4435, Loss 2.927655\n", "Epoch 4436, Loss 2.927653\n", "Epoch 4437, Loss 2.927652\n", "Epoch 4438, Loss 2.927653\n", "Epoch 4439, Loss 2.927654\n", "Epoch 4440, Loss 2.927655\n", "Epoch 4441, Loss 2.927655\n", "Epoch 4442, Loss 2.927652\n", "Epoch 4443, Loss 2.927653\n", "Epoch 4444, Loss 2.927651\n", "Epoch 4445, Loss 2.927654\n", "Epoch 4446, Loss 2.927654\n", "Epoch 4447, Loss 2.927653\n", "Epoch 4448, Loss 2.927654\n", "Epoch 4449, Loss 2.927655\n", "Epoch 4450, Loss 2.927654\n", "Epoch 4451, Loss 2.927654\n", "Epoch 4452, Loss 2.927653\n", "Epoch 4453, Loss 2.927652\n", "Epoch 4454, Loss 2.927653\n", "Epoch 4455, Loss 2.927653\n", "Epoch 4456, Loss 2.927654\n", "Epoch 4457, Loss 2.927653\n", "Epoch 4458, Loss 2.927652\n", "Epoch 4459, Loss 2.927653\n", "Epoch 4460, Loss 2.927652\n", "Epoch 4461, Loss 2.927654\n", "Epoch 4462, Loss 2.927654\n", "Epoch 4463, Loss 2.927654\n", "Epoch 4464, Loss 2.927653\n", "Epoch 4465, Loss 2.927653\n", "Epoch 4466, Loss 2.927652\n", "Epoch 4467, Loss 2.927654\n", "Epoch 4468, Loss 2.927653\n", "Epoch 4469, Loss 2.927653\n", "Epoch 4470, Loss 2.927653\n", "Epoch 4471, Loss 2.927653\n", "Epoch 4472, Loss 2.927653\n", "Epoch 4473, Loss 2.927654\n", "Epoch 4474, Loss 2.927653\n", "Epoch 4475, Loss 2.927653\n", "Epoch 4476, Loss 2.927652\n", "Epoch 4477, Loss 2.927653\n", "Epoch 4478, Loss 2.927654\n", "Epoch 4479, Loss 2.927653\n", "Epoch 4480, Loss 2.927651\n", "Epoch 4481, Loss 2.927653\n", "Epoch 4482, Loss 2.927653\n", "Epoch 4483, Loss 2.927654\n", "Epoch 4484, Loss 2.927653\n", "Epoch 4485, Loss 2.927653\n", "Epoch 4486, Loss 2.927652\n", "Epoch 4487, Loss 2.927651\n", "Epoch 4488, Loss 2.927652\n", "Epoch 4489, Loss 2.927653\n", "Epoch 4490, Loss 2.927654\n", "Epoch 4491, Loss 2.927653\n", "Epoch 4492, Loss 2.927652\n", "Epoch 4493, Loss 2.927652\n", "Epoch 4494, Loss 2.927651\n", "Epoch 4495, Loss 2.927652\n", "Epoch 4496, Loss 2.927653\n", "Epoch 4497, Loss 2.927653\n", "Epoch 4498, Loss 2.927652\n", "Epoch 4499, Loss 2.927652\n", "Epoch 4500, Loss 2.927652\n", "Epoch 4501, Loss 2.927652\n", "Epoch 4502, Loss 2.927654\n", "Epoch 4503, Loss 2.927651\n", "Epoch 4504, Loss 2.927652\n", "Epoch 4505, Loss 2.927653\n", "Epoch 4506, Loss 2.927653\n", "Epoch 4507, Loss 2.927651\n", "Epoch 4508, Loss 2.927651\n", "Epoch 4509, Loss 2.927653\n", "Epoch 4510, Loss 2.927653\n", "Epoch 4511, Loss 2.927651\n", "Epoch 4512, Loss 2.927651\n", "Epoch 4513, Loss 2.927653\n", "Epoch 4514, Loss 2.927653\n", "Epoch 4515, Loss 2.927652\n", "Epoch 4516, Loss 2.927653\n", "Epoch 4517, Loss 2.927652\n", "Epoch 4518, Loss 2.927652\n", "Epoch 4519, Loss 2.927653\n", "Epoch 4520, Loss 2.927652\n", "Epoch 4521, Loss 2.927652\n", "Epoch 4522, Loss 2.927651\n", "Epoch 4523, Loss 2.927651\n", "Epoch 4524, Loss 2.927652\n", "Epoch 4525, Loss 2.927652\n", "Epoch 4526, Loss 2.927651\n", "Epoch 4527, Loss 2.927651\n", "Epoch 4528, Loss 2.927650\n", "Epoch 4529, Loss 2.927651\n", "Epoch 4530, Loss 2.927653\n", "Epoch 4531, Loss 2.927651\n", "Epoch 4532, Loss 2.927651\n", "Epoch 4533, Loss 2.927652\n", "Epoch 4534, Loss 2.927653\n", "Epoch 4535, Loss 2.927651\n", "Epoch 4536, Loss 2.927651\n", "Epoch 4537, Loss 2.927650\n", "Epoch 4538, Loss 2.927651\n", "Epoch 4539, Loss 2.927650\n", "Epoch 4540, Loss 2.927652\n", "Epoch 4541, Loss 2.927652\n", "Epoch 4542, Loss 2.927651\n", "Epoch 4543, Loss 2.927652\n", "Epoch 4544, Loss 2.927652\n", "Epoch 4545, Loss 2.927651\n", "Epoch 4546, Loss 2.927650\n", "Epoch 4547, Loss 2.927651\n", "Epoch 4548, Loss 2.927652\n", "Epoch 4549, Loss 2.927651\n", "Epoch 4550, Loss 2.927653\n", "Epoch 4551, Loss 2.927651\n", "Epoch 4552, Loss 2.927652\n", "Epoch 4553, Loss 2.927651\n", "Epoch 4554, Loss 2.927652\n", "Epoch 4555, Loss 2.927650\n", "Epoch 4556, Loss 2.927650\n", "Epoch 4557, Loss 2.927650\n", "Epoch 4558, Loss 2.927652\n", "Epoch 4559, Loss 2.927650\n", "Epoch 4560, Loss 2.927650\n", "Epoch 4561, Loss 2.927651\n", "Epoch 4562, Loss 2.927652\n", "Epoch 4563, Loss 2.927650\n", "Epoch 4564, Loss 2.927651\n", "Epoch 4565, Loss 2.927650\n", "Epoch 4566, Loss 2.927651\n", "Epoch 4567, Loss 2.927650\n", "Epoch 4568, Loss 2.927651\n", "Epoch 4569, Loss 2.927650\n", "Epoch 4570, Loss 2.927650\n", "Epoch 4571, Loss 2.927650\n", "Epoch 4572, Loss 2.927651\n", "Epoch 4573, Loss 2.927652\n", "Epoch 4574, Loss 2.927650\n", "Epoch 4575, Loss 2.927651\n", "Epoch 4576, Loss 2.927651\n", "Epoch 4577, Loss 2.927651\n", "Epoch 4578, Loss 2.927652\n", "Epoch 4579, Loss 2.927650\n", "Epoch 4580, Loss 2.927650\n", "Epoch 4581, Loss 2.927650\n", "Epoch 4582, Loss 2.927651\n", "Epoch 4583, Loss 2.927650\n", "Epoch 4584, Loss 2.927650\n", "Epoch 4585, Loss 2.927652\n", "Epoch 4586, Loss 2.927650\n", "Epoch 4587, Loss 2.927651\n", "Epoch 4588, Loss 2.927650\n", "Epoch 4589, Loss 2.927650\n", "Epoch 4590, Loss 2.927652\n", "Epoch 4591, Loss 2.927650\n", "Epoch 4592, Loss 2.927651\n", "Epoch 4593, Loss 2.927651\n", "Epoch 4594, Loss 2.927650\n", "Epoch 4595, Loss 2.927650\n", "Epoch 4596, Loss 2.927651\n", "Epoch 4597, Loss 2.927651\n", "Epoch 4598, Loss 2.927652\n", "Epoch 4599, Loss 2.927649\n", "Epoch 4600, Loss 2.927650\n", "Epoch 4601, Loss 2.927650\n", "Epoch 4602, Loss 2.927649\n", "Epoch 4603, Loss 2.927649\n", "Epoch 4604, Loss 2.927649\n", "Epoch 4605, Loss 2.927650\n", "Epoch 4606, Loss 2.927650\n", "Epoch 4607, Loss 2.927650\n", "Epoch 4608, Loss 2.927651\n", "Epoch 4609, Loss 2.927650\n", "Epoch 4610, Loss 2.927651\n", "Epoch 4611, Loss 2.927650\n", "Epoch 4612, Loss 2.927650\n", "Epoch 4613, Loss 2.927650\n", "Epoch 4614, Loss 2.927649\n", "Epoch 4615, Loss 2.927650\n", "Epoch 4616, Loss 2.927651\n", "Epoch 4617, Loss 2.927650\n", "Epoch 4618, Loss 2.927651\n", "Epoch 4619, Loss 2.927649\n", "Epoch 4620, Loss 2.927650\n", "Epoch 4621, Loss 2.927650\n", "Epoch 4622, Loss 2.927650\n", "Epoch 4623, Loss 2.927651\n", "Epoch 4624, Loss 2.927651\n", "Epoch 4625, Loss 2.927649\n", "Epoch 4626, Loss 2.927650\n", "Epoch 4627, Loss 2.927650\n", "Epoch 4628, Loss 2.927651\n", "Epoch 4629, Loss 2.927650\n", "Epoch 4630, Loss 2.927647\n", "Epoch 4631, Loss 2.927648\n", "Epoch 4632, Loss 2.927649\n", "Epoch 4633, Loss 2.927649\n", "Epoch 4634, Loss 2.927650\n", "Epoch 4635, Loss 2.927649\n", "Epoch 4636, Loss 2.927650\n", "Epoch 4637, Loss 2.927650\n", "Epoch 4638, Loss 2.927651\n", "Epoch 4639, Loss 2.927649\n", "Epoch 4640, Loss 2.927649\n", "Epoch 4641, Loss 2.927650\n", "Epoch 4642, Loss 2.927650\n", "Epoch 4643, Loss 2.927649\n", "Epoch 4644, Loss 2.927650\n", "Epoch 4645, Loss 2.927650\n", "Epoch 4646, Loss 2.927650\n", "Epoch 4647, Loss 2.927651\n", "Epoch 4648, Loss 2.927650\n", "Epoch 4649, Loss 2.927650\n", "Epoch 4650, Loss 2.927649\n", "Epoch 4651, Loss 2.927650\n", "Epoch 4652, Loss 2.927651\n", "Epoch 4653, Loss 2.927650\n", "Epoch 4654, Loss 2.927651\n", "Epoch 4655, Loss 2.927651\n", "Epoch 4656, Loss 2.927650\n", "Epoch 4657, Loss 2.927649\n", "Epoch 4658, Loss 2.927649\n", "Epoch 4659, Loss 2.927649\n", "Epoch 4660, Loss 2.927649\n", "Epoch 4661, Loss 2.927648\n", "Epoch 4662, Loss 2.927650\n", "Epoch 4663, Loss 2.927649\n", "Epoch 4664, Loss 2.927648\n", "Epoch 4665, Loss 2.927649\n", "Epoch 4666, Loss 2.927649\n", "Epoch 4667, Loss 2.927649\n", "Epoch 4668, Loss 2.927649\n", "Epoch 4669, Loss 2.927649\n", "Epoch 4670, Loss 2.927649\n", "Epoch 4671, Loss 2.927649\n", "Epoch 4672, Loss 2.927649\n", "Epoch 4673, Loss 2.927650\n", "Epoch 4674, Loss 2.927650\n", "Epoch 4675, Loss 2.927650\n", "Epoch 4676, Loss 2.927650\n", "Epoch 4677, Loss 2.927650\n", "Epoch 4678, Loss 2.927650\n", "Epoch 4679, Loss 2.927649\n", "Epoch 4680, Loss 2.927648\n", "Epoch 4681, Loss 2.927649\n", "Epoch 4682, Loss 2.927649\n", "Epoch 4683, Loss 2.927648\n", "Epoch 4684, Loss 2.927648\n", "Epoch 4685, Loss 2.927649\n", "Epoch 4686, Loss 2.927651\n", "Epoch 4687, Loss 2.927649\n", "Epoch 4688, Loss 2.927650\n", "Epoch 4689, Loss 2.927649\n", "Epoch 4690, Loss 2.927650\n", "Epoch 4691, Loss 2.927648\n", "Epoch 4692, Loss 2.927649\n", "Epoch 4693, Loss 2.927649\n", "Epoch 4694, Loss 2.927650\n", "Epoch 4695, Loss 2.927648\n", "Epoch 4696, Loss 2.927649\n", "Epoch 4697, Loss 2.927648\n", "Epoch 4698, Loss 2.927649\n", "Epoch 4699, Loss 2.927649\n", "Epoch 4700, Loss 2.927650\n", "Epoch 4701, Loss 2.927650\n", "Epoch 4702, Loss 2.927650\n", "Epoch 4703, Loss 2.927648\n", "Epoch 4704, Loss 2.927649\n", "Epoch 4705, Loss 2.927649\n", "Epoch 4706, Loss 2.927649\n", "Epoch 4707, Loss 2.927649\n", "Epoch 4708, Loss 2.927648\n", "Epoch 4709, Loss 2.927650\n", "Epoch 4710, Loss 2.927650\n", "Epoch 4711, Loss 2.927649\n", "Epoch 4712, Loss 2.927649\n", "Epoch 4713, Loss 2.927649\n", "Epoch 4714, Loss 2.927649\n", "Epoch 4715, Loss 2.927650\n", "Epoch 4716, Loss 2.927649\n", "Epoch 4717, Loss 2.927649\n", "Epoch 4718, Loss 2.927649\n", "Epoch 4719, Loss 2.927648\n", "Epoch 4720, Loss 2.927649\n", "Epoch 4721, Loss 2.927649\n", "Epoch 4722, Loss 2.927650\n", "Epoch 4723, Loss 2.927650\n", "Epoch 4724, Loss 2.927649\n", "Epoch 4725, Loss 2.927650\n", "Epoch 4726, Loss 2.927649\n", "Epoch 4727, Loss 2.927648\n", "Epoch 4728, Loss 2.927647\n", "Epoch 4729, Loss 2.927649\n", "Epoch 4730, Loss 2.927648\n", "Epoch 4731, Loss 2.927649\n", "Epoch 4732, Loss 2.927648\n", "Epoch 4733, Loss 2.927649\n", "Epoch 4734, Loss 2.927647\n", "Epoch 4735, Loss 2.927650\n", "Epoch 4736, Loss 2.927650\n", "Epoch 4737, Loss 2.927650\n", "Epoch 4738, Loss 2.927649\n", "Epoch 4739, Loss 2.927648\n", "Epoch 4740, Loss 2.927648\n", "Epoch 4741, Loss 2.927648\n", "Epoch 4742, Loss 2.927647\n", "Epoch 4743, Loss 2.927648\n", "Epoch 4744, Loss 2.927649\n", "Epoch 4745, Loss 2.927648\n", "Epoch 4746, Loss 2.927648\n", "Epoch 4747, Loss 2.927649\n", "Epoch 4748, Loss 2.927648\n", "Epoch 4749, Loss 2.927649\n", "Epoch 4750, Loss 2.927649\n", "Epoch 4751, Loss 2.927649\n", "Epoch 4752, Loss 2.927648\n", "Epoch 4753, Loss 2.927648\n", "Epoch 4754, Loss 2.927647\n", "Epoch 4755, Loss 2.927648\n", "Epoch 4756, Loss 2.927648\n", "Epoch 4757, Loss 2.927648\n", "Epoch 4758, Loss 2.927648\n", "Epoch 4759, Loss 2.927649\n", "Epoch 4760, Loss 2.927649\n", "Epoch 4761, Loss 2.927649\n", "Epoch 4762, Loss 2.927648\n", "Epoch 4763, Loss 2.927647\n", "Epoch 4764, Loss 2.927648\n", "Epoch 4765, Loss 2.927648\n", "Epoch 4766, Loss 2.927649\n", "Epoch 4767, Loss 2.927648\n", "Epoch 4768, Loss 2.927649\n", "Epoch 4769, Loss 2.927648\n", "Epoch 4770, Loss 2.927649\n", "Epoch 4771, Loss 2.927648\n", "Epoch 4772, Loss 2.927648\n", "Epoch 4773, Loss 2.927649\n", "Epoch 4774, Loss 2.927648\n", "Epoch 4775, Loss 2.927647\n", "Epoch 4776, Loss 2.927648\n", "Epoch 4777, Loss 2.927648\n", "Epoch 4778, Loss 2.927648\n", "Epoch 4779, Loss 2.927649\n", "Epoch 4780, Loss 2.927649\n", "Epoch 4781, Loss 2.927649\n", "Epoch 4782, Loss 2.927649\n", "Epoch 4783, Loss 2.927649\n", "Epoch 4784, Loss 2.927648\n", "Epoch 4785, Loss 2.927648\n", "Epoch 4786, Loss 2.927647\n", "Epoch 4787, Loss 2.927647\n", "Epoch 4788, Loss 2.927648\n", "Epoch 4789, Loss 2.927648\n", "Epoch 4790, Loss 2.927648\n", "Epoch 4791, Loss 2.927648\n", "Epoch 4792, Loss 2.927648\n", "Epoch 4793, Loss 2.927648\n", "Epoch 4794, Loss 2.927650\n", "Epoch 4795, Loss 2.927648\n", "Epoch 4796, Loss 2.927649\n", "Epoch 4797, Loss 2.927649\n", "Epoch 4798, Loss 2.927648\n", "Epoch 4799, Loss 2.927648\n", "Epoch 4800, Loss 2.927650\n", "Epoch 4801, Loss 2.927647\n", "Epoch 4802, Loss 2.927648\n", "Epoch 4803, Loss 2.927649\n", "Epoch 4804, Loss 2.927647\n", "Epoch 4805, Loss 2.927649\n", "Epoch 4806, Loss 2.927648\n", "Epoch 4807, Loss 2.927649\n", "Epoch 4808, Loss 2.927648\n", "Epoch 4809, Loss 2.927649\n", "Epoch 4810, Loss 2.927649\n", "Epoch 4811, Loss 2.927647\n", "Epoch 4812, Loss 2.927649\n", "Epoch 4813, Loss 2.927648\n", "Epoch 4814, Loss 2.927647\n", "Epoch 4815, Loss 2.927649\n", "Epoch 4816, Loss 2.927647\n", "Epoch 4817, Loss 2.927648\n", "Epoch 4818, Loss 2.927646\n", "Epoch 4819, Loss 2.927649\n", "Epoch 4820, Loss 2.927647\n", "Epoch 4821, Loss 2.927649\n", "Epoch 4822, Loss 2.927649\n", "Epoch 4823, Loss 2.927648\n", "Epoch 4824, Loss 2.927648\n", "Epoch 4825, Loss 2.927649\n", "Epoch 4826, Loss 2.927648\n", "Epoch 4827, Loss 2.927649\n", "Epoch 4828, Loss 2.927649\n", "Epoch 4829, Loss 2.927648\n", "Epoch 4830, Loss 2.927648\n", "Epoch 4831, Loss 2.927646\n", "Epoch 4832, Loss 2.927648\n", "Epoch 4833, Loss 2.927647\n", "Epoch 4834, Loss 2.927648\n", "Epoch 4835, Loss 2.927649\n", "Epoch 4836, Loss 2.927647\n", "Epoch 4837, Loss 2.927648\n", "Epoch 4838, Loss 2.927648\n", "Epoch 4839, Loss 2.927648\n", "Epoch 4840, Loss 2.927648\n", "Epoch 4841, Loss 2.927648\n", "Epoch 4842, Loss 2.927649\n", "Epoch 4843, Loss 2.927647\n", "Epoch 4844, Loss 2.927648\n", "Epoch 4845, Loss 2.927647\n", "Epoch 4846, Loss 2.927647\n", "Epoch 4847, Loss 2.927648\n", "Epoch 4848, Loss 2.927648\n", "Epoch 4849, Loss 2.927649\n", "Epoch 4850, Loss 2.927647\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4851, Loss 2.927649\n", "Epoch 4852, Loss 2.927648\n", "Epoch 4853, Loss 2.927647\n", "Epoch 4854, Loss 2.927649\n", "Epoch 4855, Loss 2.927648\n", "Epoch 4856, Loss 2.927649\n", "Epoch 4857, Loss 2.927649\n", "Epoch 4858, Loss 2.927647\n", "Epoch 4859, Loss 2.927648\n", "Epoch 4860, Loss 2.927648\n", "Epoch 4861, Loss 2.927649\n", "Epoch 4862, Loss 2.927647\n", "Epoch 4863, Loss 2.927647\n", "Epoch 4864, Loss 2.927648\n", "Epoch 4865, Loss 2.927646\n", "Epoch 4866, Loss 2.927648\n", "Epoch 4867, Loss 2.927648\n", "Epoch 4868, Loss 2.927648\n", "Epoch 4869, Loss 2.927648\n", "Epoch 4870, Loss 2.927647\n", "Epoch 4871, Loss 2.927649\n", "Epoch 4872, Loss 2.927648\n", "Epoch 4873, Loss 2.927647\n", "Epoch 4874, Loss 2.927648\n", "Epoch 4875, Loss 2.927648\n", "Epoch 4876, Loss 2.927649\n", "Epoch 4877, Loss 2.927648\n", "Epoch 4878, Loss 2.927648\n", "Epoch 4879, Loss 2.927647\n", "Epoch 4880, Loss 2.927648\n", "Epoch 4881, Loss 2.927648\n", "Epoch 4882, Loss 2.927647\n", "Epoch 4883, Loss 2.927649\n", "Epoch 4884, Loss 2.927648\n", "Epoch 4885, Loss 2.927647\n", "Epoch 4886, Loss 2.927649\n", "Epoch 4887, Loss 2.927648\n", "Epoch 4888, Loss 2.927647\n", "Epoch 4889, Loss 2.927646\n", "Epoch 4890, Loss 2.927647\n", "Epoch 4891, Loss 2.927648\n", "Epoch 4892, Loss 2.927646\n", "Epoch 4893, Loss 2.927649\n", "Epoch 4894, Loss 2.927648\n", "Epoch 4895, Loss 2.927648\n", "Epoch 4896, Loss 2.927649\n", "Epoch 4897, Loss 2.927647\n", "Epoch 4898, Loss 2.927648\n", "Epoch 4899, Loss 2.927648\n", "Epoch 4900, Loss 2.927648\n", "Epoch 4901, Loss 2.927649\n", "Epoch 4902, Loss 2.927647\n", "Epoch 4903, Loss 2.927647\n", "Epoch 4904, Loss 2.927646\n", "Epoch 4905, Loss 2.927647\n", "Epoch 4906, Loss 2.927647\n", "Epoch 4907, Loss 2.927647\n", "Epoch 4908, Loss 2.927647\n", "Epoch 4909, Loss 2.927648\n", "Epoch 4910, Loss 2.927649\n", "Epoch 4911, Loss 2.927646\n", "Epoch 4912, Loss 2.927648\n", "Epoch 4913, Loss 2.927648\n", "Epoch 4914, Loss 2.927647\n", "Epoch 4915, Loss 2.927649\n", "Epoch 4916, Loss 2.927648\n", "Epoch 4917, Loss 2.927647\n", "Epoch 4918, Loss 2.927647\n", "Epoch 4919, Loss 2.927647\n", "Epoch 4920, Loss 2.927647\n", "Epoch 4921, Loss 2.927646\n", "Epoch 4922, Loss 2.927649\n", "Epoch 4923, Loss 2.927647\n", "Epoch 4924, Loss 2.927646\n", "Epoch 4925, Loss 2.927647\n", "Epoch 4926, Loss 2.927648\n", "Epoch 4927, Loss 2.927647\n", "Epoch 4928, Loss 2.927649\n", "Epoch 4929, Loss 2.927648\n", "Epoch 4930, Loss 2.927648\n", "Epoch 4931, Loss 2.927646\n", "Epoch 4932, Loss 2.927648\n", "Epoch 4933, Loss 2.927646\n", "Epoch 4934, Loss 2.927646\n", "Epoch 4935, Loss 2.927649\n", "Epoch 4936, Loss 2.927647\n", "Epoch 4937, Loss 2.927647\n", "Epoch 4938, Loss 2.927647\n", "Epoch 4939, Loss 2.927647\n", "Epoch 4940, Loss 2.927647\n", "Epoch 4941, Loss 2.927646\n", "Epoch 4942, Loss 2.927649\n", "Epoch 4943, Loss 2.927647\n", "Epoch 4944, Loss 2.927648\n", "Epoch 4945, Loss 2.927647\n", "Epoch 4946, Loss 2.927649\n", "Epoch 4947, Loss 2.927646\n", "Epoch 4948, Loss 2.927649\n", "Epoch 4949, Loss 2.927648\n", "Epoch 4950, Loss 2.927647\n", "Epoch 4951, Loss 2.927648\n", "Epoch 4952, Loss 2.927647\n", "Epoch 4953, Loss 2.927647\n", "Epoch 4954, Loss 2.927647\n", "Epoch 4955, Loss 2.927648\n", "Epoch 4956, Loss 2.927646\n", "Epoch 4957, Loss 2.927648\n", "Epoch 4958, Loss 2.927647\n", "Epoch 4959, Loss 2.927647\n", "Epoch 4960, Loss 2.927647\n", "Epoch 4961, Loss 2.927648\n", "Epoch 4962, Loss 2.927647\n", "Epoch 4963, Loss 2.927648\n", "Epoch 4964, Loss 2.927648\n", "Epoch 4965, Loss 2.927648\n", "Epoch 4966, Loss 2.927648\n", "Epoch 4967, Loss 2.927648\n", "Epoch 4968, Loss 2.927647\n", "Epoch 4969, Loss 2.927647\n", "Epoch 4970, Loss 2.927646\n", "Epoch 4971, Loss 2.927647\n", "Epoch 4972, Loss 2.927647\n", "Epoch 4973, Loss 2.927647\n", "Epoch 4974, Loss 2.927647\n", "Epoch 4975, Loss 2.927647\n", "Epoch 4976, Loss 2.927647\n", "Epoch 4977, Loss 2.927648\n", "Epoch 4978, Loss 2.927647\n", "Epoch 4979, Loss 2.927648\n", "Epoch 4980, Loss 2.927647\n", "Epoch 4981, Loss 2.927648\n", "Epoch 4982, Loss 2.927648\n", "Epoch 4983, Loss 2.927646\n", "Epoch 4984, Loss 2.927648\n", "Epoch 4985, Loss 2.927647\n", "Epoch 4986, Loss 2.927648\n", "Epoch 4987, Loss 2.927648\n", "Epoch 4988, Loss 2.927648\n", "Epoch 4989, Loss 2.927646\n", "Epoch 4990, Loss 2.927648\n", "Epoch 4991, Loss 2.927647\n", "Epoch 4992, Loss 2.927647\n", "Epoch 4993, Loss 2.927647\n", "Epoch 4994, Loss 2.927648\n", "Epoch 4995, Loss 2.927647\n", "Epoch 4996, Loss 2.927646\n", "Epoch 4997, Loss 2.927647\n", "Epoch 4998, Loss 2.927648\n", "Epoch 4999, Loss 2.927647\n" ] }, { "data": { "text/plain": [ "tensor([ 5.3671, -17.3012], requires_grad=True)" ] }, "execution_count": 100, "metadata": {}, "output_type": "execute_result" } ], "source": [ "def model(t_u, w, b):\n", " return w * t_u + b\n", "\n", "def loss_fn(t_p, t_c):\n", " sq_diffs = (t_p - t_c)**2\n", " return sq_diffs.mean()\n", "\n", "params = torch.tensor([1.0, 0.0], requires_grad=True)\n", "\n", "nepochs = 5000\n", "\n", "learning_rate = 1e-2\n", "\n", "for epoch in range(nepochs):\n", " # forward pass\n", " t_p = model(t_un, *params)\n", " loss = loss_fn(t_p, t_c)\n", "\n", " print('Epoch %d, Loss %f' % (epoch, float(loss)))\n", " \n", " # backward pass\n", " if params.grad is not None:\n", " params.grad.zero_()\n", "\n", " loss.backward()\n", "\n", " #params.grad.clamp_(-1.0, 1.0)\n", " #print(params, params.grad)\n", "\n", " params = (params - learning_rate * params.grad).detach().requires_grad_()\n", "\n", "params\n", "#t_p = model(t_un, *params)\n", "#t_p" ] }, { "cell_type": "code", "execution_count": 24, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "['ASGD',\n", " 'Adadelta',\n", " 'Adagrad',\n", " 'Adam',\n", " 'Adamax',\n", " 'LBFGS',\n", " 'Optimizer',\n", " 'RMSprop',\n", " 'Rprop',\n", " 'SGD',\n", " 'SparseAdam',\n", " '__builtins__',\n", " '__cached__',\n", " '__doc__',\n", " '__file__',\n", " '__loader__',\n", " '__name__',\n", " '__package__',\n", " '__path__',\n", " '__spec__',\n", " 'lr_scheduler']" ] }, "execution_count": 24, "metadata": {}, "output_type": "execute_result" } ], "source": [ "import torch.optim as optim\n", "\n", "dir(optim)" ] }, { "cell_type": "code", "execution_count": 25, "metadata": {}, "outputs": [], "source": [ "params = torch.tensor([1.0, 0.0], requires_grad=True)\n", "\n", "learning_rate = 1e-5\n", "\n", "optimizer = optim.SGD([params], lr=learning_rate)" ] }, { "cell_type": "code", "execution_count": 26, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "tensor([ 0.9548, -0.0008], requires_grad=True)" ] }, "execution_count": 26, "metadata": {}, "output_type": "execute_result" } ], "source": [ "t_p = model(t_u, *params)\n", "\n", "loss = loss_fn(t_p, t_c)\n", "\n", "loss.backward()\n", "\n", "optimizer.step()\n", "\n", "params" ] }, { "cell_type": "code", "execution_count": 27, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "tensor([1.7761, 0.1064], requires_grad=True)" ] }, "execution_count": 27, "metadata": {}, "output_type": "execute_result" } ], "source": [ "params = torch.tensor([1.0, 0.0], requires_grad=True)\n", "\n", "learning_rate = 1e-2\n", "\n", "optimizer = optim.SGD([params], lr=learning_rate)\n", "\n", "t_p = model(t_un, *params)\n", "\n", "loss = loss_fn(t_p, t_c)\n", "\n", "optimizer.zero_grad()\n", "\n", "loss.backward()\n", "\n", "optimizer.step()\n", "\n", "params" ] }, { "cell_type": "code", "execution_count": 101, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 0, Loss 80.364342\n", "Epoch 1, Loss 37.574917\n", "Epoch 2, Loss 30.871077\n", "Epoch 3, Loss 29.756193\n", "Epoch 4, Loss 29.507149\n", "Epoch 5, Loss 29.392458\n", "Epoch 6, Loss 29.298828\n", "Epoch 7, Loss 29.208717\n", "Epoch 8, Loss 29.119417\n", "Epoch 9, Loss 29.030487\n", "Epoch 10, Loss 28.941875\n", "Epoch 11, Loss 28.853565\n", "Epoch 12, Loss 28.765556\n", "Epoch 13, Loss 28.677851\n", "Epoch 14, Loss 28.590431\n", "Epoch 15, Loss 28.503321\n", "Epoch 16, Loss 28.416496\n", "Epoch 17, Loss 28.329973\n", "Epoch 18, Loss 28.243738\n", "Epoch 19, Loss 28.157801\n", "Epoch 20, Loss 28.072151\n", "Epoch 21, Loss 27.986799\n", "Epoch 22, Loss 27.901731\n", "Epoch 23, Loss 27.816954\n", "Epoch 24, Loss 27.732460\n", "Epoch 25, Loss 27.648256\n", "Epoch 26, Loss 27.564342\n", "Epoch 27, Loss 27.480711\n", "Epoch 28, Loss 27.397358\n", "Epoch 29, Loss 27.314293\n", "Epoch 30, Loss 27.231512\n", "Epoch 31, Loss 27.149006\n", "Epoch 32, Loss 27.066790\n", "Epoch 33, Loss 26.984844\n", "Epoch 34, Loss 26.903173\n", "Epoch 35, Loss 26.821791\n", "Epoch 36, Loss 26.740675\n", "Epoch 37, Loss 26.659838\n", "Epoch 38, Loss 26.579279\n", "Epoch 39, Loss 26.498987\n", "Epoch 40, Loss 26.418974\n", "Epoch 41, Loss 26.339228\n", "Epoch 42, Loss 26.259752\n", "Epoch 43, Loss 26.180548\n", "Epoch 44, Loss 26.101616\n", "Epoch 45, Loss 26.022947\n", "Epoch 46, Loss 25.944550\n", "Epoch 47, Loss 25.866417\n", "Epoch 48, Loss 25.788549\n", "Epoch 49, Loss 25.710936\n", "Epoch 50, Loss 25.633600\n", "Epoch 51, Loss 25.556524\n", "Epoch 52, Loss 25.479700\n", "Epoch 53, Loss 25.403145\n", "Epoch 54, Loss 25.326849\n", "Epoch 55, Loss 25.250811\n", "Epoch 56, Loss 25.175035\n", "Epoch 57, Loss 25.099510\n", "Epoch 58, Loss 25.024248\n", "Epoch 59, Loss 24.949238\n", "Epoch 60, Loss 24.874483\n", "Epoch 61, Loss 24.799980\n", "Epoch 62, Loss 24.725737\n", "Epoch 63, Loss 24.651735\n", "Epoch 64, Loss 24.577990\n", "Epoch 65, Loss 24.504494\n", "Epoch 66, Loss 24.431250\n", "Epoch 67, Loss 24.358257\n", "Epoch 68, Loss 24.285503\n", "Epoch 69, Loss 24.212996\n", "Epoch 70, Loss 24.140747\n", "Epoch 71, Loss 24.068733\n", "Epoch 72, Loss 23.996967\n", "Epoch 73, Loss 23.925446\n", "Epoch 74, Loss 23.854168\n", "Epoch 75, Loss 23.783129\n", "Epoch 76, Loss 23.712328\n", "Epoch 77, Loss 23.641771\n", "Epoch 78, Loss 23.571455\n", "Epoch 79, Loss 23.501379\n", "Epoch 80, Loss 23.431538\n", "Epoch 81, Loss 23.361933\n", "Epoch 82, Loss 23.292566\n", "Epoch 83, Loss 23.223436\n", "Epoch 84, Loss 23.154539\n", "Epoch 85, Loss 23.085882\n", "Epoch 86, Loss 23.017447\n", "Epoch 87, Loss 22.949249\n", "Epoch 88, Loss 22.881281\n", "Epoch 89, Loss 22.813547\n", "Epoch 90, Loss 22.746044\n", "Epoch 91, Loss 22.678768\n", "Epoch 92, Loss 22.611719\n", "Epoch 93, Loss 22.544899\n", "Epoch 94, Loss 22.478306\n", "Epoch 95, Loss 22.411940\n", "Epoch 96, Loss 22.345793\n", "Epoch 97, Loss 22.279875\n", "Epoch 98, Loss 22.214186\n", "Epoch 99, Loss 22.148710\n", "Epoch 100, Loss 22.083464\n", "Epoch 101, Loss 22.018436\n", "Epoch 102, Loss 21.953630\n", "Epoch 103, Loss 21.889046\n", "Epoch 104, Loss 21.824677\n", "Epoch 105, Loss 21.760530\n", "Epoch 106, Loss 21.696600\n", "Epoch 107, Loss 21.632881\n", "Epoch 108, Loss 21.569389\n", "Epoch 109, Loss 21.506104\n", "Epoch 110, Loss 21.443037\n", "Epoch 111, Loss 21.380190\n", "Epoch 112, Loss 21.317547\n", "Epoch 113, Loss 21.255119\n", "Epoch 114, Loss 21.192904\n", "Epoch 115, Loss 21.130901\n", "Epoch 116, Loss 21.069105\n", "Epoch 117, Loss 21.007528\n", "Epoch 118, Loss 20.946150\n", "Epoch 119, Loss 20.884983\n", "Epoch 120, Loss 20.824026\n", "Epoch 121, Loss 20.763273\n", "Epoch 122, Loss 20.702726\n", "Epoch 123, Loss 20.642384\n", "Epoch 124, Loss 20.582251\n", "Epoch 125, Loss 20.522322\n", "Epoch 126, Loss 20.462589\n", "Epoch 127, Loss 20.403067\n", "Epoch 128, Loss 20.343746\n", "Epoch 129, Loss 20.284622\n", "Epoch 130, Loss 20.225702\n", "Epoch 131, Loss 20.166983\n", "Epoch 132, Loss 20.108461\n", "Epoch 133, Loss 20.050135\n", "Epoch 134, Loss 19.992014\n", "Epoch 135, Loss 19.934088\n", "Epoch 136, Loss 19.876352\n", "Epoch 137, Loss 19.818821\n", "Epoch 138, Loss 19.761480\n", "Epoch 139, Loss 19.704332\n", "Epoch 140, Loss 19.647387\n", "Epoch 141, Loss 19.590626\n", "Epoch 142, Loss 19.534063\n", "Epoch 143, Loss 19.477690\n", "Epoch 144, Loss 19.421507\n", "Epoch 145, Loss 19.365517\n", "Epoch 146, Loss 19.309715\n", "Epoch 147, Loss 19.254107\n", "Epoch 148, Loss 19.198685\n", "Epoch 149, Loss 19.143446\n", "Epoch 150, Loss 19.088400\n", "Epoch 151, Loss 19.033545\n", "Epoch 152, Loss 18.978868\n", "Epoch 153, Loss 18.924377\n", "Epoch 154, Loss 18.870081\n", "Epoch 155, Loss 18.815960\n", "Epoch 156, Loss 18.762022\n", "Epoch 157, Loss 18.708269\n", "Epoch 158, Loss 18.654703\n", "Epoch 159, Loss 18.601313\n", "Epoch 160, Loss 18.548111\n", "Epoch 161, Loss 18.495081\n", "Epoch 162, Loss 18.442234\n", "Epoch 163, Loss 18.389570\n", "Epoch 164, Loss 18.337080\n", "Epoch 165, Loss 18.284777\n", "Epoch 166, Loss 18.232643\n", "Epoch 167, Loss 18.180687\n", "Epoch 168, Loss 18.128904\n", "Epoch 169, Loss 18.077303\n", "Epoch 170, Loss 18.025879\n", "Epoch 171, Loss 17.974623\n", "Epoch 172, Loss 17.923546\n", "Epoch 173, Loss 17.872641\n", "Epoch 174, Loss 17.821907\n", "Epoch 175, Loss 17.771343\n", "Epoch 176, Loss 17.720955\n", "Epoch 177, Loss 17.670738\n", "Epoch 178, Loss 17.620691\n", "Epoch 179, Loss 17.570814\n", "Epoch 180, Loss 17.521105\n", "Epoch 181, Loss 17.471563\n", "Epoch 182, Loss 17.422194\n", "Epoch 183, Loss 17.372992\n", "Epoch 184, Loss 17.323954\n", "Epoch 185, Loss 17.275085\n", "Epoch 186, Loss 17.226379\n", "Epoch 187, Loss 17.177839\n", "Epoch 188, Loss 17.129467\n", "Epoch 189, Loss 17.081255\n", "Epoch 190, Loss 17.033207\n", "Epoch 191, Loss 16.985327\n", "Epoch 192, Loss 16.937605\n", "Epoch 193, Loss 16.890047\n", "Epoch 194, Loss 16.842649\n", "Epoch 195, Loss 16.795412\n", "Epoch 196, Loss 16.748339\n", "Epoch 197, Loss 16.701424\n", "Epoch 198, Loss 16.654661\n", "Epoch 199, Loss 16.608065\n", "Epoch 200, Loss 16.561625\n", "Epoch 201, Loss 16.515343\n", "Epoch 202, Loss 16.469219\n", "Epoch 203, Loss 16.423250\n", "Epoch 204, Loss 16.377434\n", "Epoch 205, Loss 16.331776\n", "Epoch 206, Loss 16.286276\n", "Epoch 207, Loss 16.240925\n", "Epoch 208, Loss 16.195734\n", "Epoch 209, Loss 16.150694\n", "Epoch 210, Loss 16.105806\n", "Epoch 211, Loss 16.061071\n", "Epoch 212, Loss 16.016487\n", "Epoch 213, Loss 15.972058\n", "Epoch 214, Loss 15.927777\n", "Epoch 215, Loss 15.883645\n", "Epoch 216, Loss 15.839664\n", "Epoch 217, Loss 15.795832\n", "Epoch 218, Loss 15.752149\n", "Epoch 219, Loss 15.708612\n", "Epoch 220, Loss 15.665228\n", "Epoch 221, Loss 15.621990\n", "Epoch 222, Loss 15.578897\n", "Epoch 223, Loss 15.535950\n", "Epoch 224, Loss 15.493152\n", "Epoch 225, Loss 15.450497\n", "Epoch 226, Loss 15.407981\n", "Epoch 227, Loss 15.365615\n", "Epoch 228, Loss 15.323395\n", "Epoch 229, Loss 15.281318\n", "Epoch 230, Loss 15.239380\n", "Epoch 231, Loss 15.197586\n", "Epoch 232, Loss 15.155931\n", "Epoch 233, Loss 15.114425\n", "Epoch 234, Loss 15.073053\n", "Epoch 235, Loss 15.031823\n", "Epoch 236, Loss 14.990737\n", "Epoch 237, Loss 14.949784\n", "Epoch 238, Loss 14.908973\n", "Epoch 239, Loss 14.868304\n", "Epoch 240, Loss 14.827767\n", "Epoch 241, Loss 14.787370\n", "Epoch 242, Loss 14.747110\n", "Epoch 243, Loss 14.706989\n", "Epoch 244, Loss 14.667002\n", "Epoch 245, Loss 14.627149\n", "Epoch 246, Loss 14.587436\n", "Epoch 247, Loss 14.547854\n", "Epoch 248, Loss 14.508408\n", "Epoch 249, Loss 14.469095\n", "Epoch 250, Loss 14.429919\n", "Epoch 251, Loss 14.390872\n", "Epoch 252, Loss 14.351956\n", "Epoch 253, Loss 14.313177\n", "Epoch 254, Loss 14.274525\n", "Epoch 255, Loss 14.236008\n", "Epoch 256, Loss 14.197620\n", "Epoch 257, Loss 14.159363\n", "Epoch 258, Loss 14.121234\n", "Epoch 259, Loss 14.083237\n", "Epoch 260, Loss 14.045368\n", "Epoch 261, Loss 14.007627\n", "Epoch 262, Loss 13.970016\n", "Epoch 263, Loss 13.932532\n", "Epoch 264, Loss 13.895172\n", "Epoch 265, Loss 13.857942\n", "Epoch 266, Loss 13.820837\n", "Epoch 267, Loss 13.783858\n", "Epoch 268, Loss 13.747006\n", "Epoch 269, Loss 13.710278\n", "Epoch 270, Loss 13.673676\n", "Epoch 271, Loss 13.637196\n", "Epoch 272, Loss 13.600842\n", "Epoch 273, Loss 13.564609\n", "Epoch 274, Loss 13.528501\n", "Epoch 275, Loss 13.492515\n", "Epoch 276, Loss 13.456651\n", "Epoch 277, Loss 13.420910\n", "Epoch 278, Loss 13.385287\n", "Epoch 279, Loss 13.349787\n", "Epoch 280, Loss 13.314410\n", "Epoch 281, Loss 13.279148\n", "Epoch 282, Loss 13.244009\n", "Epoch 283, Loss 13.208993\n", "Epoch 284, Loss 13.174088\n", "Epoch 285, Loss 13.139307\n", "Epoch 286, Loss 13.104638\n", "Epoch 287, Loss 13.070093\n", "Epoch 288, Loss 13.035663\n", "Epoch 289, Loss 13.001349\n", "Epoch 290, Loss 12.967154\n", "Epoch 291, Loss 12.933074\n", "Epoch 292, Loss 12.899109\n", "Epoch 293, Loss 12.865259\n", "Epoch 294, Loss 12.831525\n", "Epoch 295, Loss 12.797904\n", "Epoch 296, Loss 12.764399\n", "Epoch 297, Loss 12.731007\n", "Epoch 298, Loss 12.697727\n", "Epoch 299, Loss 12.664560\n", "Epoch 300, Loss 12.631507\n", "Epoch 301, Loss 12.598566\n", "Epoch 302, Loss 12.565738\n", "Epoch 303, Loss 12.533021\n", "Epoch 304, Loss 12.500415\n", "Epoch 305, Loss 12.467919\n", "Epoch 306, Loss 12.435533\n", "Epoch 307, Loss 12.403255\n", "Epoch 308, Loss 12.371088\n", "Epoch 309, Loss 12.339031\n", "Epoch 310, Loss 12.307083\n", "Epoch 311, Loss 12.275247\n", "Epoch 312, Loss 12.243509\n", "Epoch 313, Loss 12.211887\n", "Epoch 314, Loss 12.180370\n", "Epoch 315, Loss 12.148962\n", "Epoch 316, Loss 12.117655\n", "Epoch 317, Loss 12.086463\n", "Epoch 318, Loss 12.055373\n", "Epoch 319, Loss 12.024384\n", "Epoch 320, Loss 11.993508\n", "Epoch 321, Loss 11.962732\n", "Epoch 322, Loss 11.932056\n", "Epoch 323, Loss 11.901492\n", "Epoch 324, Loss 11.871029\n", "Epoch 325, Loss 11.840671\n", "Epoch 326, Loss 11.810413\n", "Epoch 327, Loss 11.780257\n", "Epoch 328, Loss 11.750208\n", "Epoch 329, Loss 11.720258\n", "Epoch 330, Loss 11.690412\n", "Epoch 331, Loss 11.660664\n", "Epoch 332, Loss 11.631016\n", "Epoch 333, Loss 11.601473\n", "Epoch 334, Loss 11.572030\n", "Epoch 335, Loss 11.542686\n", "Epoch 336, Loss 11.513440\n", "Epoch 337, Loss 11.484293\n", "Epoch 338, Loss 11.455247\n", "Epoch 339, Loss 11.426300\n", "Epoch 340, Loss 11.397448\n", "Epoch 341, Loss 11.368696\n", "Epoch 342, Loss 11.340043\n", "Epoch 343, Loss 11.311487\n", "Epoch 344, Loss 11.283028\n", "Epoch 345, Loss 11.254662\n", "Epoch 346, Loss 11.226396\n", "Epoch 347, Loss 11.198221\n", "Epoch 348, Loss 11.170149\n", "Epoch 349, Loss 11.142170\n", "Epoch 350, Loss 11.114283\n", "Epoch 351, Loss 11.086493\n", "Epoch 352, Loss 11.058796\n", "Epoch 353, Loss 11.031192\n", "Epoch 354, Loss 11.003686\n", "Epoch 355, Loss 10.976271\n", "Epoch 356, Loss 10.948948\n", "Epoch 357, Loss 10.921718\n", "Epoch 358, Loss 10.894581\n", "Epoch 359, Loss 10.867537\n", "Epoch 360, Loss 10.840583\n", "Epoch 361, Loss 10.813720\n", "Epoch 362, Loss 10.786951\n", "Epoch 363, Loss 10.760270\n", "Epoch 364, Loss 10.733681\n", "Epoch 365, Loss 10.707183\n", "Epoch 366, Loss 10.680775\n", "Epoch 367, Loss 10.654453\n", "Epoch 368, Loss 10.628225\n", "Epoch 369, Loss 10.602084\n", "Epoch 370, Loss 10.576032\n", "Epoch 371, Loss 10.550071\n", "Epoch 372, Loss 10.524195\n", "Epoch 373, Loss 10.498408\n", "Epoch 374, Loss 10.472707\n", "Epoch 375, Loss 10.447094\n", "Epoch 376, Loss 10.421568\n", "Epoch 377, Loss 10.396132\n", "Epoch 378, Loss 10.370778\n", "Epoch 379, Loss 10.345510\n", "Epoch 380, Loss 10.320329\n", "Epoch 381, Loss 10.295236\n", "Epoch 382, Loss 10.270224\n", "Epoch 383, Loss 10.245296\n", "Epoch 384, Loss 10.220456\n", "Epoch 385, Loss 10.195701\n", "Epoch 386, Loss 10.171027\n", "Epoch 387, Loss 10.146436\n", "Epoch 388, Loss 10.121934\n", "Epoch 389, Loss 10.097512\n", "Epoch 390, Loss 10.073174\n", "Epoch 391, Loss 10.048919\n", "Epoch 392, Loss 10.024742\n", "Epoch 393, Loss 10.000652\n", "Epoch 394, Loss 9.976640\n", "Epoch 395, Loss 9.952712\n", "Epoch 396, Loss 9.928863\n", "Epoch 397, Loss 9.905092\n", "Epoch 398, Loss 9.881409\n", "Epoch 399, Loss 9.857802\n", "Epoch 400, Loss 9.834277\n", "Epoch 401, Loss 9.810832\n", "Epoch 402, Loss 9.787466\n", "Epoch 403, Loss 9.764176\n", "Epoch 404, Loss 9.740971\n", "Epoch 405, Loss 9.717843\n", "Epoch 406, Loss 9.694793\n", "Epoch 407, Loss 9.671823\n", "Epoch 408, Loss 9.648926\n", "Epoch 409, Loss 9.626110\n", "Epoch 410, Loss 9.603373\n", "Epoch 411, Loss 9.580710\n", "Epoch 412, Loss 9.558124\n", "Epoch 413, Loss 9.535618\n", "Epoch 414, Loss 9.513185\n", "Epoch 415, Loss 9.490829\n", "Epoch 416, Loss 9.468551\n", "Epoch 417, Loss 9.446347\n", "Epoch 418, Loss 9.424216\n", "Epoch 419, Loss 9.402163\n", "Epoch 420, Loss 9.380185\n", "Epoch 421, Loss 9.358281\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 422, Loss 9.336448\n", "Epoch 423, Loss 9.314696\n", "Epoch 424, Loss 9.293013\n", "Epoch 425, Loss 9.271402\n", "Epoch 426, Loss 9.249870\n", "Epoch 427, Loss 9.228409\n", "Epoch 428, Loss 9.207021\n", "Epoch 429, Loss 9.185704\n", "Epoch 430, Loss 9.164462\n", "Epoch 431, Loss 9.143288\n", "Epoch 432, Loss 9.122189\n", "Epoch 433, Loss 9.101160\n", "Epoch 434, Loss 9.080204\n", "Epoch 435, Loss 9.059317\n", "Epoch 436, Loss 9.038502\n", "Epoch 437, Loss 9.017757\n", "Epoch 438, Loss 8.997085\n", "Epoch 439, Loss 8.976479\n", "Epoch 440, Loss 8.955945\n", "Epoch 441, Loss 8.935481\n", "Epoch 442, Loss 8.915089\n", "Epoch 443, Loss 8.894763\n", "Epoch 444, Loss 8.874508\n", "Epoch 445, Loss 8.854318\n", "Epoch 446, Loss 8.834197\n", "Epoch 447, Loss 8.814149\n", "Epoch 448, Loss 8.794162\n", "Epoch 449, Loss 8.774252\n", "Epoch 450, Loss 8.754406\n", "Epoch 451, Loss 8.734625\n", "Epoch 452, Loss 8.714911\n", "Epoch 453, Loss 8.695266\n", "Epoch 454, Loss 8.675689\n", "Epoch 455, Loss 8.656174\n", "Epoch 456, Loss 8.636728\n", "Epoch 457, Loss 8.617346\n", "Epoch 458, Loss 8.598029\n", "Epoch 459, Loss 8.578781\n", "Epoch 460, Loss 8.559597\n", "Epoch 461, Loss 8.540478\n", "Epoch 462, Loss 8.521426\n", "Epoch 463, Loss 8.502438\n", "Epoch 464, Loss 8.483516\n", "Epoch 465, Loss 8.464652\n", "Epoch 466, Loss 8.445858\n", "Epoch 467, Loss 8.427128\n", "Epoch 468, Loss 8.408456\n", "Epoch 469, Loss 8.389848\n", "Epoch 470, Loss 8.371305\n", "Epoch 471, Loss 8.352828\n", "Epoch 472, Loss 8.334408\n", "Epoch 473, Loss 8.316055\n", "Epoch 474, Loss 8.297764\n", "Epoch 475, Loss 8.279534\n", "Epoch 476, Loss 8.261369\n", "Epoch 477, Loss 8.243261\n", "Epoch 478, Loss 8.225213\n", "Epoch 479, Loss 8.207232\n", "Epoch 480, Loss 8.189310\n", "Epoch 481, Loss 8.171450\n", "Epoch 482, Loss 8.153648\n", "Epoch 483, Loss 8.135907\n", "Epoch 484, Loss 8.118226\n", "Epoch 485, Loss 8.100607\n", "Epoch 486, Loss 8.083045\n", "Epoch 487, Loss 8.065548\n", "Epoch 488, Loss 8.048104\n", "Epoch 489, Loss 8.030723\n", "Epoch 490, Loss 8.013400\n", "Epoch 491, Loss 7.996135\n", "Epoch 492, Loss 7.978929\n", "Epoch 493, Loss 7.961784\n", "Epoch 494, Loss 7.944690\n", "Epoch 495, Loss 7.927662\n", "Epoch 496, Loss 7.910690\n", "Epoch 497, Loss 7.893775\n", "Epoch 498, Loss 7.876915\n", "Epoch 499, Loss 7.860116\n", "Epoch 500, Loss 7.843370\n", "Epoch 501, Loss 7.826681\n", "Epoch 502, Loss 7.810053\n", "Epoch 503, Loss 7.793480\n", "Epoch 504, Loss 7.776962\n", "Epoch 505, Loss 7.760498\n", "Epoch 506, Loss 7.744092\n", "Epoch 507, Loss 7.727745\n", "Epoch 508, Loss 7.711447\n", "Epoch 509, Loss 7.695212\n", "Epoch 510, Loss 7.679024\n", "Epoch 511, Loss 7.662895\n", "Epoch 512, Loss 7.646819\n", "Epoch 513, Loss 7.630803\n", "Epoch 514, Loss 7.614836\n", "Epoch 515, Loss 7.598925\n", "Epoch 516, Loss 7.583069\n", "Epoch 517, Loss 7.567266\n", "Epoch 518, Loss 7.551516\n", "Epoch 519, Loss 7.535819\n", "Epoch 520, Loss 7.520176\n", "Epoch 521, Loss 7.504588\n", "Epoch 522, Loss 7.489048\n", "Epoch 523, Loss 7.473566\n", "Epoch 524, Loss 7.458135\n", "Epoch 525, Loss 7.442751\n", "Epoch 526, Loss 7.427426\n", "Epoch 527, Loss 7.412152\n", "Epoch 528, Loss 7.396928\n", "Epoch 529, Loss 7.381756\n", "Epoch 530, Loss 7.366636\n", "Epoch 531, Loss 7.351566\n", "Epoch 532, Loss 7.336550\n", "Epoch 533, Loss 7.321585\n", "Epoch 534, Loss 7.306670\n", "Epoch 535, Loss 7.291803\n", "Epoch 536, Loss 7.276989\n", "Epoch 537, Loss 7.262227\n", "Epoch 538, Loss 7.247512\n", "Epoch 539, Loss 7.232846\n", "Epoch 540, Loss 7.218231\n", "Epoch 541, Loss 7.203666\n", "Epoch 542, Loss 7.189151\n", "Epoch 543, Loss 7.174683\n", "Epoch 544, Loss 7.160267\n", "Epoch 545, Loss 7.145897\n", "Epoch 546, Loss 7.131578\n", "Epoch 547, Loss 7.117305\n", "Epoch 548, Loss 7.103083\n", "Epoch 549, Loss 7.088911\n", "Epoch 550, Loss 7.074785\n", "Epoch 551, Loss 7.060707\n", "Epoch 552, Loss 7.046677\n", "Epoch 553, Loss 7.032695\n", "Epoch 554, Loss 7.018756\n", "Epoch 555, Loss 7.004869\n", "Epoch 556, Loss 6.991029\n", "Epoch 557, Loss 6.977232\n", "Epoch 558, Loss 6.963488\n", "Epoch 559, Loss 6.949786\n", "Epoch 560, Loss 6.936135\n", "Epoch 561, Loss 6.922528\n", "Epoch 562, Loss 6.908967\n", "Epoch 563, Loss 6.895452\n", "Epoch 564, Loss 6.881980\n", "Epoch 565, Loss 6.868558\n", "Epoch 566, Loss 6.855180\n", "Epoch 567, Loss 6.841848\n", "Epoch 568, Loss 6.828561\n", "Epoch 569, Loss 6.815319\n", "Epoch 570, Loss 6.802118\n", "Epoch 571, Loss 6.788968\n", "Epoch 572, Loss 6.775864\n", "Epoch 573, Loss 6.762798\n", "Epoch 574, Loss 6.749779\n", "Epoch 575, Loss 6.736803\n", "Epoch 576, Loss 6.723875\n", "Epoch 577, Loss 6.710986\n", "Epoch 578, Loss 6.698142\n", "Epoch 579, Loss 6.685344\n", "Epoch 580, Loss 6.672589\n", "Epoch 581, Loss 6.659874\n", "Epoch 582, Loss 6.647207\n", "Epoch 583, Loss 6.634577\n", "Epoch 584, Loss 6.621995\n", "Epoch 585, Loss 6.609454\n", "Epoch 586, Loss 6.596954\n", "Epoch 587, Loss 6.584500\n", "Epoch 588, Loss 6.572087\n", "Epoch 589, Loss 6.559712\n", "Epoch 590, Loss 6.547384\n", "Epoch 591, Loss 6.535097\n", "Epoch 592, Loss 6.522851\n", "Epoch 593, Loss 6.510646\n", "Epoch 594, Loss 6.498481\n", "Epoch 595, Loss 6.486362\n", "Epoch 596, Loss 6.474282\n", "Epoch 597, Loss 6.462242\n", "Epoch 598, Loss 6.450243\n", "Epoch 599, Loss 6.438284\n", "Epoch 600, Loss 6.426367\n", "Epoch 601, Loss 6.414490\n", "Epoch 602, Loss 6.402655\n", "Epoch 603, Loss 6.390859\n", "Epoch 604, Loss 6.379102\n", "Epoch 605, Loss 6.367384\n", "Epoch 606, Loss 6.355706\n", "Epoch 607, Loss 6.344071\n", "Epoch 608, Loss 6.332472\n", "Epoch 609, Loss 6.320912\n", "Epoch 610, Loss 6.309395\n", "Epoch 611, Loss 6.297915\n", "Epoch 612, Loss 6.286473\n", "Epoch 613, Loss 6.275074\n", "Epoch 614, Loss 6.263707\n", "Epoch 615, Loss 6.252382\n", "Epoch 616, Loss 6.241098\n", "Epoch 617, Loss 6.229849\n", "Epoch 618, Loss 6.218639\n", "Epoch 619, Loss 6.207471\n", "Epoch 620, Loss 6.196334\n", "Epoch 621, Loss 6.185240\n", "Epoch 622, Loss 6.174181\n", "Epoch 623, Loss 6.163159\n", "Epoch 624, Loss 6.152177\n", "Epoch 625, Loss 6.141229\n", "Epoch 626, Loss 6.130321\n", "Epoch 627, Loss 6.119448\n", "Epoch 628, Loss 6.108614\n", "Epoch 629, Loss 6.097815\n", "Epoch 630, Loss 6.087054\n", "Epoch 631, Loss 6.076329\n", "Epoch 632, Loss 6.065643\n", "Epoch 633, Loss 6.054988\n", "Epoch 634, Loss 6.044372\n", "Epoch 635, Loss 6.033794\n", "Epoch 636, Loss 6.023247\n", "Epoch 637, Loss 6.012738\n", "Epoch 638, Loss 6.002264\n", "Epoch 639, Loss 5.991829\n", "Epoch 640, Loss 5.981426\n", "Epoch 641, Loss 5.971057\n", "Epoch 642, Loss 5.960727\n", "Epoch 643, Loss 5.950432\n", "Epoch 644, Loss 5.940171\n", "Epoch 645, Loss 5.929944\n", "Epoch 646, Loss 5.919752\n", "Epoch 647, Loss 5.909597\n", "Epoch 648, Loss 5.899473\n", "Epoch 649, Loss 5.889384\n", "Epoch 650, Loss 5.879326\n", "Epoch 651, Loss 5.869310\n", "Epoch 652, Loss 5.859322\n", "Epoch 653, Loss 5.849374\n", "Epoch 654, Loss 5.839453\n", "Epoch 655, Loss 5.829570\n", "Epoch 656, Loss 5.819718\n", "Epoch 657, Loss 5.809900\n", "Epoch 658, Loss 5.800117\n", "Epoch 659, Loss 5.790367\n", "Epoch 660, Loss 5.780647\n", "Epoch 661, Loss 5.770962\n", "Epoch 662, Loss 5.761312\n", "Epoch 663, Loss 5.751693\n", "Epoch 664, Loss 5.742105\n", "Epoch 665, Loss 5.732550\n", "Epoch 666, Loss 5.723031\n", "Epoch 667, Loss 5.713540\n", "Epoch 668, Loss 5.704084\n", "Epoch 669, Loss 5.694658\n", "Epoch 670, Loss 5.685265\n", "Epoch 671, Loss 5.675904\n", "Epoch 672, Loss 5.666573\n", "Epoch 673, Loss 5.657277\n", "Epoch 674, Loss 5.648010\n", "Epoch 675, Loss 5.638776\n", "Epoch 676, Loss 5.629575\n", "Epoch 677, Loss 5.620402\n", "Epoch 678, Loss 5.611260\n", "Epoch 679, Loss 5.602148\n", "Epoch 680, Loss 5.593071\n", "Epoch 681, Loss 5.584022\n", "Epoch 682, Loss 5.575005\n", "Epoch 683, Loss 5.566019\n", "Epoch 684, Loss 5.557063\n", "Epoch 685, Loss 5.548136\n", "Epoch 686, Loss 5.539241\n", "Epoch 687, Loss 5.530376\n", "Epoch 688, Loss 5.521540\n", "Epoch 689, Loss 5.512733\n", "Epoch 690, Loss 5.503958\n", "Epoch 691, Loss 5.495212\n", "Epoch 692, Loss 5.486496\n", "Epoch 693, Loss 5.477808\n", "Epoch 694, Loss 5.469152\n", "Epoch 695, Loss 5.460525\n", "Epoch 696, Loss 5.451928\n", "Epoch 697, Loss 5.443359\n", "Epoch 698, Loss 5.434820\n", "Epoch 699, Loss 5.426310\n", "Epoch 700, Loss 5.417827\n", "Epoch 701, Loss 5.409373\n", "Epoch 702, Loss 5.400949\n", "Epoch 703, Loss 5.392551\n", "Epoch 704, Loss 5.384184\n", "Epoch 705, Loss 5.375845\n", "Epoch 706, Loss 5.367537\n", "Epoch 707, Loss 5.359253\n", "Epoch 708, Loss 5.350998\n", "Epoch 709, Loss 5.342771\n", "Epoch 710, Loss 5.334575\n", "Epoch 711, Loss 5.326403\n", "Epoch 712, Loss 5.318259\n", "Epoch 713, Loss 5.310144\n", "Epoch 714, Loss 5.302055\n", "Epoch 715, Loss 5.293994\n", "Epoch 716, Loss 5.285964\n", "Epoch 717, Loss 5.277958\n", "Epoch 718, Loss 5.269979\n", "Epoch 719, Loss 5.262026\n", "Epoch 720, Loss 5.254103\n", "Epoch 721, Loss 5.246205\n", "Epoch 722, Loss 5.238335\n", "Epoch 723, Loss 5.230491\n", "Epoch 724, Loss 5.222673\n", "Epoch 725, Loss 5.214881\n", "Epoch 726, Loss 5.207120\n", "Epoch 727, Loss 5.199381\n", "Epoch 728, Loss 5.191670\n", "Epoch 729, Loss 5.183984\n", "Epoch 730, Loss 5.176324\n", "Epoch 731, Loss 5.168688\n", "Epoch 732, Loss 5.161084\n", "Epoch 733, Loss 5.153500\n", "Epoch 734, Loss 5.145943\n", "Epoch 735, Loss 5.138412\n", "Epoch 736, Loss 5.130910\n", "Epoch 737, Loss 5.123428\n", "Epoch 738, Loss 5.115977\n", "Epoch 739, Loss 5.108547\n", "Epoch 740, Loss 5.101144\n", "Epoch 741, Loss 5.093765\n", "Epoch 742, Loss 5.086413\n", "Epoch 743, Loss 5.079085\n", "Epoch 744, Loss 5.071782\n", "Epoch 745, Loss 5.064505\n", "Epoch 746, Loss 5.057247\n", "Epoch 747, Loss 5.050022\n", "Epoch 748, Loss 5.042817\n", "Epoch 749, Loss 5.035636\n", "Epoch 750, Loss 5.028476\n", "Epoch 751, Loss 5.021346\n", "Epoch 752, Loss 5.014239\n", "Epoch 753, Loss 5.007157\n", "Epoch 754, Loss 5.000099\n", "Epoch 755, Loss 4.993064\n", "Epoch 756, Loss 4.986051\n", "Epoch 757, Loss 4.979065\n", "Epoch 758, Loss 4.972100\n", "Epoch 759, Loss 4.965159\n", "Epoch 760, Loss 4.958245\n", "Epoch 761, Loss 4.951350\n", "Epoch 762, Loss 4.944479\n", "Epoch 763, Loss 4.937633\n", "Epoch 764, Loss 4.930812\n", "Epoch 765, Loss 4.924009\n", "Epoch 766, Loss 4.917234\n", "Epoch 767, Loss 4.910480\n", "Epoch 768, Loss 4.903749\n", "Epoch 769, Loss 4.897040\n", "Epoch 770, Loss 4.890356\n", "Epoch 771, Loss 4.883691\n", "Epoch 772, Loss 4.877052\n", "Epoch 773, Loss 4.870436\n", "Epoch 774, Loss 4.863839\n", "Epoch 775, Loss 4.857267\n", "Epoch 776, Loss 4.850717\n", "Epoch 777, Loss 4.844189\n", "Epoch 778, Loss 4.837683\n", "Epoch 779, Loss 4.831196\n", "Epoch 780, Loss 4.824737\n", "Epoch 781, Loss 4.818298\n", "Epoch 782, Loss 4.811880\n", "Epoch 783, Loss 4.805481\n", "Epoch 784, Loss 4.799106\n", "Epoch 785, Loss 4.792755\n", "Epoch 786, Loss 4.786422\n", "Epoch 787, Loss 4.780112\n", "Epoch 788, Loss 4.773824\n", "Epoch 789, Loss 4.767559\n", "Epoch 790, Loss 4.761311\n", "Epoch 791, Loss 4.755087\n", "Epoch 792, Loss 4.748885\n", "Epoch 793, Loss 4.742701\n", "Epoch 794, Loss 4.736537\n", "Epoch 795, Loss 4.730397\n", "Epoch 796, Loss 4.724279\n", "Epoch 797, Loss 4.718181\n", "Epoch 798, Loss 4.712101\n", "Epoch 799, Loss 4.706046\n", "Epoch 800, Loss 4.700009\n", "Epoch 801, Loss 4.693989\n", "Epoch 802, Loss 4.687995\n", "Epoch 803, Loss 4.682020\n", "Epoch 804, Loss 4.676063\n", "Epoch 805, Loss 4.670130\n", "Epoch 806, Loss 4.664214\n", "Epoch 807, Loss 4.658320\n", "Epoch 808, Loss 4.652445\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 809, Loss 4.646592\n", "Epoch 810, Loss 4.640753\n", "Epoch 811, Loss 4.634938\n", "Epoch 812, Loss 4.629142\n", "Epoch 813, Loss 4.623368\n", "Epoch 814, Loss 4.617611\n", "Epoch 815, Loss 4.611873\n", "Epoch 816, Loss 4.606156\n", "Epoch 817, Loss 4.600458\n", "Epoch 818, Loss 4.594780\n", "Epoch 819, Loss 4.589119\n", "Epoch 820, Loss 4.583479\n", "Epoch 821, Loss 4.577857\n", "Epoch 822, Loss 4.572256\n", "Epoch 823, Loss 4.566675\n", "Epoch 824, Loss 4.561109\n", "Epoch 825, Loss 4.555565\n", "Epoch 826, Loss 4.550039\n", "Epoch 827, Loss 4.544533\n", "Epoch 828, Loss 4.539044\n", "Epoch 829, Loss 4.533575\n", "Epoch 830, Loss 4.528122\n", "Epoch 831, Loss 4.522691\n", "Epoch 832, Loss 4.517276\n", "Epoch 833, Loss 4.511879\n", "Epoch 834, Loss 4.506504\n", "Epoch 835, Loss 4.501141\n", "Epoch 836, Loss 4.495801\n", "Epoch 837, Loss 4.490474\n", "Epoch 838, Loss 4.485170\n", "Epoch 839, Loss 4.479884\n", "Epoch 840, Loss 4.474614\n", "Epoch 841, Loss 4.469364\n", "Epoch 842, Loss 4.464129\n", "Epoch 843, Loss 4.458913\n", "Epoch 844, Loss 4.453716\n", "Epoch 845, Loss 4.448534\n", "Epoch 846, Loss 4.443372\n", "Epoch 847, Loss 4.438227\n", "Epoch 848, Loss 4.433099\n", "Epoch 849, Loss 4.427989\n", "Epoch 850, Loss 4.422897\n", "Epoch 851, Loss 4.417819\n", "Epoch 852, Loss 4.412762\n", "Epoch 853, Loss 4.407720\n", "Epoch 854, Loss 4.402697\n", "Epoch 855, Loss 4.397688\n", "Epoch 856, Loss 4.392697\n", "Epoch 857, Loss 4.387725\n", "Epoch 858, Loss 4.382769\n", "Epoch 859, Loss 4.377828\n", "Epoch 860, Loss 4.372905\n", "Epoch 861, Loss 4.368000\n", "Epoch 862, Loss 4.363111\n", "Epoch 863, Loss 4.358238\n", "Epoch 864, Loss 4.353383\n", "Epoch 865, Loss 4.348542\n", "Epoch 866, Loss 4.343716\n", "Epoch 867, Loss 4.338911\n", "Epoch 868, Loss 4.334121\n", "Epoch 869, Loss 4.329345\n", "Epoch 870, Loss 4.324588\n", "Epoch 871, Loss 4.319845\n", "Epoch 872, Loss 4.315118\n", "Epoch 873, Loss 4.310409\n", "Epoch 874, Loss 4.305714\n", "Epoch 875, Loss 4.301035\n", "Epoch 876, Loss 4.296376\n", "Epoch 877, Loss 4.291727\n", "Epoch 878, Loss 4.287097\n", "Epoch 879, Loss 4.282482\n", "Epoch 880, Loss 4.277882\n", "Epoch 881, Loss 4.273299\n", "Epoch 882, Loss 4.268732\n", "Epoch 883, Loss 4.264178\n", "Epoch 884, Loss 4.259643\n", "Epoch 885, Loss 4.255120\n", "Epoch 886, Loss 4.250613\n", "Epoch 887, Loss 4.246124\n", "Epoch 888, Loss 4.241648\n", "Epoch 889, Loss 4.237185\n", "Epoch 890, Loss 4.232740\n", "Epoch 891, Loss 4.228308\n", "Epoch 892, Loss 4.223895\n", "Epoch 893, Loss 4.219494\n", "Epoch 894, Loss 4.215109\n", "Epoch 895, Loss 4.210737\n", "Epoch 896, Loss 4.206383\n", "Epoch 897, Loss 4.202042\n", "Epoch 898, Loss 4.197715\n", "Epoch 899, Loss 4.193405\n", "Epoch 900, Loss 4.189108\n", "Epoch 901, Loss 4.184825\n", "Epoch 902, Loss 4.180559\n", "Epoch 903, Loss 4.176305\n", "Epoch 904, Loss 4.172065\n", "Epoch 905, Loss 4.167842\n", "Epoch 906, Loss 4.163631\n", "Epoch 907, Loss 4.159436\n", "Epoch 908, Loss 4.155253\n", "Epoch 909, Loss 4.151086\n", "Epoch 910, Loss 4.146934\n", "Epoch 911, Loss 4.142795\n", "Epoch 912, Loss 4.138669\n", "Epoch 913, Loss 4.134559\n", "Epoch 914, Loss 4.130464\n", "Epoch 915, Loss 4.126378\n", "Epoch 916, Loss 4.122310\n", "Epoch 917, Loss 4.118254\n", "Epoch 918, Loss 4.114213\n", "Epoch 919, Loss 4.110184\n", "Epoch 920, Loss 4.106169\n", "Epoch 921, Loss 4.102170\n", "Epoch 922, Loss 4.098181\n", "Epoch 923, Loss 4.094210\n", "Epoch 924, Loss 4.090249\n", "Epoch 925, Loss 4.086300\n", "Epoch 926, Loss 4.082366\n", "Epoch 927, Loss 4.078448\n", "Epoch 928, Loss 4.074541\n", "Epoch 929, Loss 4.070649\n", "Epoch 930, Loss 4.066768\n", "Epoch 931, Loss 4.062900\n", "Epoch 932, Loss 4.059047\n", "Epoch 933, Loss 4.055204\n", "Epoch 934, Loss 4.051379\n", "Epoch 935, Loss 4.047564\n", "Epoch 936, Loss 4.043762\n", "Epoch 937, Loss 4.039972\n", "Epoch 938, Loss 4.036197\n", "Epoch 939, Loss 4.032434\n", "Epoch 940, Loss 4.028686\n", "Epoch 941, Loss 4.024947\n", "Epoch 942, Loss 4.021224\n", "Epoch 943, Loss 4.017509\n", "Epoch 944, Loss 4.013810\n", "Epoch 945, Loss 4.010122\n", "Epoch 946, Loss 4.006450\n", "Epoch 947, Loss 4.002785\n", "Epoch 948, Loss 3.999137\n", "Epoch 949, Loss 3.995498\n", "Epoch 950, Loss 3.991874\n", "Epoch 951, Loss 3.988262\n", "Epoch 952, Loss 3.984659\n", "Epoch 953, Loss 3.981071\n", "Epoch 954, Loss 3.977497\n", "Epoch 955, Loss 3.973931\n", "Epoch 956, Loss 3.970381\n", "Epoch 957, Loss 3.966840\n", "Epoch 958, Loss 3.963312\n", "Epoch 959, Loss 3.959797\n", "Epoch 960, Loss 3.956295\n", "Epoch 961, Loss 3.952801\n", "Epoch 962, Loss 3.949323\n", "Epoch 963, Loss 3.945855\n", "Epoch 964, Loss 3.942398\n", "Epoch 965, Loss 3.938953\n", "Epoch 966, Loss 3.935521\n", "Epoch 967, Loss 3.932096\n", "Epoch 968, Loss 3.928688\n", "Epoch 969, Loss 3.925292\n", "Epoch 970, Loss 3.921906\n", "Epoch 971, Loss 3.918528\n", "Epoch 972, Loss 3.915166\n", "Epoch 973, Loss 3.911815\n", "Epoch 974, Loss 3.908474\n", "Epoch 975, Loss 3.905144\n", "Epoch 976, Loss 3.901824\n", "Epoch 977, Loss 3.898517\n", "Epoch 978, Loss 3.895222\n", "Epoch 979, Loss 3.891935\n", "Epoch 980, Loss 3.888664\n", "Epoch 981, Loss 3.885400\n", "Epoch 982, Loss 3.882150\n", "Epoch 983, Loss 3.878911\n", "Epoch 984, Loss 3.875680\n", "Epoch 985, Loss 3.872463\n", "Epoch 986, Loss 3.869256\n", "Epoch 987, Loss 3.866060\n", "Epoch 988, Loss 3.862873\n", "Epoch 989, Loss 3.859699\n", "Epoch 990, Loss 3.856535\n", "Epoch 991, Loss 3.853381\n", "Epoch 992, Loss 3.850237\n", "Epoch 993, Loss 3.847109\n", "Epoch 994, Loss 3.843984\n", "Epoch 995, Loss 3.840876\n", "Epoch 996, Loss 3.837775\n", "Epoch 997, Loss 3.834686\n", "Epoch 998, Loss 3.831606\n", "Epoch 999, Loss 3.828538\n", "Epoch 1000, Loss 3.825484\n", "Epoch 1001, Loss 3.822433\n", "Epoch 1002, Loss 3.819398\n", "Epoch 1003, Loss 3.816369\n", "Epoch 1004, Loss 3.813350\n", "Epoch 1005, Loss 3.810344\n", "Epoch 1006, Loss 3.807348\n", "Epoch 1007, Loss 3.804360\n", "Epoch 1008, Loss 3.801384\n", "Epoch 1009, Loss 3.798421\n", "Epoch 1010, Loss 3.795465\n", "Epoch 1011, Loss 3.792518\n", "Epoch 1012, Loss 3.789584\n", "Epoch 1013, Loss 3.786658\n", "Epoch 1014, Loss 3.783740\n", "Epoch 1015, Loss 3.780832\n", "Epoch 1016, Loss 3.777939\n", "Epoch 1017, Loss 3.775053\n", "Epoch 1018, Loss 3.772173\n", "Epoch 1019, Loss 3.769310\n", "Epoch 1020, Loss 3.766451\n", "Epoch 1021, Loss 3.763602\n", "Epoch 1022, Loss 3.760766\n", "Epoch 1023, Loss 3.757936\n", "Epoch 1024, Loss 3.755118\n", "Epoch 1025, Loss 3.752309\n", "Epoch 1026, Loss 3.749511\n", "Epoch 1027, Loss 3.746722\n", "Epoch 1028, Loss 3.743940\n", "Epoch 1029, Loss 3.741169\n", "Epoch 1030, Loss 3.738407\n", "Epoch 1031, Loss 3.735656\n", "Epoch 1032, Loss 3.732914\n", "Epoch 1033, Loss 3.730181\n", "Epoch 1034, Loss 3.727456\n", "Epoch 1035, Loss 3.724741\n", "Epoch 1036, Loss 3.722034\n", "Epoch 1037, Loss 3.719337\n", "Epoch 1038, Loss 3.716650\n", "Epoch 1039, Loss 3.713972\n", "Epoch 1040, Loss 3.711302\n", "Epoch 1041, Loss 3.708643\n", "Epoch 1042, Loss 3.705990\n", "Epoch 1043, Loss 3.703351\n", "Epoch 1044, Loss 3.700716\n", "Epoch 1045, Loss 3.698092\n", "Epoch 1046, Loss 3.695476\n", "Epoch 1047, Loss 3.692869\n", "Epoch 1048, Loss 3.690273\n", "Epoch 1049, Loss 3.687683\n", "Epoch 1050, Loss 3.685103\n", "Epoch 1051, Loss 3.682532\n", "Epoch 1052, Loss 3.679969\n", "Epoch 1053, Loss 3.677417\n", "Epoch 1054, Loss 3.674871\n", "Epoch 1055, Loss 3.672334\n", "Epoch 1056, Loss 3.669805\n", "Epoch 1057, Loss 3.667287\n", "Epoch 1058, Loss 3.664775\n", "Epoch 1059, Loss 3.662273\n", "Epoch 1060, Loss 3.659778\n", "Epoch 1061, Loss 3.657295\n", "Epoch 1062, Loss 3.654816\n", "Epoch 1063, Loss 3.652350\n", "Epoch 1064, Loss 3.649889\n", "Epoch 1065, Loss 3.647437\n", "Epoch 1066, Loss 3.644991\n", "Epoch 1067, Loss 3.642559\n", "Epoch 1068, Loss 3.640131\n", "Epoch 1069, Loss 3.637711\n", "Epoch 1070, Loss 3.635302\n", "Epoch 1071, Loss 3.632902\n", "Epoch 1072, Loss 3.630508\n", "Epoch 1073, Loss 3.628119\n", "Epoch 1074, Loss 3.625741\n", "Epoch 1075, Loss 3.623374\n", "Epoch 1076, Loss 3.621010\n", "Epoch 1077, Loss 3.618659\n", "Epoch 1078, Loss 3.616311\n", "Epoch 1079, Loss 3.613973\n", "Epoch 1080, Loss 3.611643\n", "Epoch 1081, Loss 3.609322\n", "Epoch 1082, Loss 3.607007\n", "Epoch 1083, Loss 3.604701\n", "Epoch 1084, Loss 3.602404\n", "Epoch 1085, Loss 3.600114\n", "Epoch 1086, Loss 3.597830\n", "Epoch 1087, Loss 3.595553\n", "Epoch 1088, Loss 3.593287\n", "Epoch 1089, Loss 3.591030\n", "Epoch 1090, Loss 3.588776\n", "Epoch 1091, Loss 3.586534\n", "Epoch 1092, Loss 3.584295\n", "Epoch 1093, Loss 3.582067\n", "Epoch 1094, Loss 3.579846\n", "Epoch 1095, Loss 3.577631\n", "Epoch 1096, Loss 3.575424\n", "Epoch 1097, Loss 3.573225\n", "Epoch 1098, Loss 3.571034\n", "Epoch 1099, Loss 3.568848\n", "Epoch 1100, Loss 3.566673\n", "Epoch 1101, Loss 3.564506\n", "Epoch 1102, Loss 3.562340\n", "Epoch 1103, Loss 3.560185\n", "Epoch 1104, Loss 3.558040\n", "Epoch 1105, Loss 3.555901\n", "Epoch 1106, Loss 3.553767\n", "Epoch 1107, Loss 3.551641\n", "Epoch 1108, Loss 3.549524\n", "Epoch 1109, Loss 3.547411\n", "Epoch 1110, Loss 3.545309\n", "Epoch 1111, Loss 3.543211\n", "Epoch 1112, Loss 3.541124\n", "Epoch 1113, Loss 3.539041\n", "Epoch 1114, Loss 3.536966\n", "Epoch 1115, Loss 3.534897\n", "Epoch 1116, Loss 3.532835\n", "Epoch 1117, Loss 3.530781\n", "Epoch 1118, Loss 3.528734\n", "Epoch 1119, Loss 3.526694\n", "Epoch 1120, Loss 3.524662\n", "Epoch 1121, Loss 3.522633\n", "Epoch 1122, Loss 3.520614\n", "Epoch 1123, Loss 3.518601\n", "Epoch 1124, Loss 3.516594\n", "Epoch 1125, Loss 3.514594\n", "Epoch 1126, Loss 3.512602\n", "Epoch 1127, Loss 3.510619\n", "Epoch 1128, Loss 3.508637\n", "Epoch 1129, Loss 3.506665\n", "Epoch 1130, Loss 3.504700\n", "Epoch 1131, Loss 3.502740\n", "Epoch 1132, Loss 3.500789\n", "Epoch 1133, Loss 3.498843\n", "Epoch 1134, Loss 3.496905\n", "Epoch 1135, Loss 3.494972\n", "Epoch 1136, Loss 3.493046\n", "Epoch 1137, Loss 3.491127\n", "Epoch 1138, Loss 3.489213\n", "Epoch 1139, Loss 3.487308\n", "Epoch 1140, Loss 3.485410\n", "Epoch 1141, Loss 3.483515\n", "Epoch 1142, Loss 3.481627\n", "Epoch 1143, Loss 3.479746\n", "Epoch 1144, Loss 3.477872\n", "Epoch 1145, Loss 3.476005\n", "Epoch 1146, Loss 3.474143\n", "Epoch 1147, Loss 3.472288\n", "Epoch 1148, Loss 3.470441\n", "Epoch 1149, Loss 3.468597\n", "Epoch 1150, Loss 3.466762\n", "Epoch 1151, Loss 3.464930\n", "Epoch 1152, Loss 3.463105\n", "Epoch 1153, Loss 3.461289\n", "Epoch 1154, Loss 3.459477\n", "Epoch 1155, Loss 3.457672\n", "Epoch 1156, Loss 3.455873\n", "Epoch 1157, Loss 3.454080\n", "Epoch 1158, Loss 3.452293\n", "Epoch 1159, Loss 3.450512\n", "Epoch 1160, Loss 3.448736\n", "Epoch 1161, Loss 3.446968\n", "Epoch 1162, Loss 3.445203\n", "Epoch 1163, Loss 3.443449\n", "Epoch 1164, Loss 3.441696\n", "Epoch 1165, Loss 3.439952\n", "Epoch 1166, Loss 3.438210\n", "Epoch 1167, Loss 3.436478\n", "Epoch 1168, Loss 3.434753\n", "Epoch 1169, Loss 3.433029\n", "Epoch 1170, Loss 3.431314\n", "Epoch 1171, Loss 3.429608\n", "Epoch 1172, Loss 3.427903\n", "Epoch 1173, Loss 3.426204\n", "Epoch 1174, Loss 3.424509\n", "Epoch 1175, Loss 3.422824\n", "Epoch 1176, Loss 3.421144\n", "Epoch 1177, Loss 3.419468\n", "Epoch 1178, Loss 3.417798\n", "Epoch 1179, Loss 3.416134\n", "Epoch 1180, Loss 3.414477\n", "Epoch 1181, Loss 3.412824\n", "Epoch 1182, Loss 3.411176\n", "Epoch 1183, Loss 3.409534\n", "Epoch 1184, Loss 3.407899\n", "Epoch 1185, Loss 3.406272\n", "Epoch 1186, Loss 3.404645\n", "Epoch 1187, Loss 3.403024\n", "Epoch 1188, Loss 3.401413\n", "Epoch 1189, Loss 3.399802\n", "Epoch 1190, Loss 3.398200\n", "Epoch 1191, Loss 3.396602\n", "Epoch 1192, Loss 3.395011\n", "Epoch 1193, Loss 3.393425\n", "Epoch 1194, Loss 3.391845\n", "Epoch 1195, Loss 3.390267\n", "Epoch 1196, Loss 3.388697\n", "Epoch 1197, Loss 3.387132\n", "Epoch 1198, Loss 3.385571\n", "Epoch 1199, Loss 3.384017\n", "Epoch 1200, Loss 3.382467\n", "Epoch 1201, Loss 3.380925\n", "Epoch 1202, Loss 3.379386\n", "Epoch 1203, Loss 3.377852\n", "Epoch 1204, Loss 3.376323\n", "Epoch 1205, Loss 3.374800\n", "Epoch 1206, Loss 3.373284\n", "Epoch 1207, Loss 3.371769\n", "Epoch 1208, Loss 3.370261\n", "Epoch 1209, Loss 3.368759\n", "Epoch 1210, Loss 3.367262\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1211, Loss 3.365771\n", "Epoch 1212, Loss 3.364282\n", "Epoch 1213, Loss 3.362800\n", "Epoch 1214, Loss 3.361325\n", "Epoch 1215, Loss 3.359851\n", "Epoch 1216, Loss 3.358383\n", "Epoch 1217, Loss 3.356921\n", "Epoch 1218, Loss 3.355464\n", "Epoch 1219, Loss 3.354013\n", "Epoch 1220, Loss 3.352564\n", "Epoch 1221, Loss 3.351122\n", "Epoch 1222, Loss 3.349685\n", "Epoch 1223, Loss 3.348251\n", "Epoch 1224, Loss 3.346825\n", "Epoch 1225, Loss 3.345403\n", "Epoch 1226, Loss 3.343982\n", "Epoch 1227, Loss 3.342571\n", "Epoch 1228, Loss 3.341161\n", "Epoch 1229, Loss 3.339758\n", "Epoch 1230, Loss 3.338359\n", "Epoch 1231, Loss 3.336965\n", "Epoch 1232, Loss 3.335577\n", "Epoch 1233, Loss 3.334191\n", "Epoch 1234, Loss 3.332811\n", "Epoch 1235, Loss 3.331435\n", "Epoch 1236, Loss 3.330065\n", "Epoch 1237, Loss 3.328699\n", "Epoch 1238, Loss 3.327338\n", "Epoch 1239, Loss 3.325980\n", "Epoch 1240, Loss 3.324628\n", "Epoch 1241, Loss 3.323280\n", "Epoch 1242, Loss 3.321935\n", "Epoch 1243, Loss 3.320599\n", "Epoch 1244, Loss 3.319264\n", "Epoch 1245, Loss 3.317935\n", "Epoch 1246, Loss 3.316610\n", "Epoch 1247, Loss 3.315289\n", "Epoch 1248, Loss 3.313974\n", "Epoch 1249, Loss 3.312663\n", "Epoch 1250, Loss 3.311353\n", "Epoch 1251, Loss 3.310053\n", "Epoch 1252, Loss 3.308756\n", "Epoch 1253, Loss 3.307462\n", "Epoch 1254, Loss 3.306170\n", "Epoch 1255, Loss 3.304887\n", "Epoch 1256, Loss 3.303605\n", "Epoch 1257, Loss 3.302329\n", "Epoch 1258, Loss 3.301058\n", "Epoch 1259, Loss 3.299791\n", "Epoch 1260, Loss 3.298527\n", "Epoch 1261, Loss 3.297266\n", "Epoch 1262, Loss 3.296014\n", "Epoch 1263, Loss 3.294762\n", "Epoch 1264, Loss 3.293517\n", "Epoch 1265, Loss 3.292275\n", "Epoch 1266, Loss 3.291036\n", "Epoch 1267, Loss 3.289804\n", "Epoch 1268, Loss 3.288573\n", "Epoch 1269, Loss 3.287347\n", "Epoch 1270, Loss 3.286129\n", "Epoch 1271, Loss 3.284911\n", "Epoch 1272, Loss 3.283698\n", "Epoch 1273, Loss 3.282488\n", "Epoch 1274, Loss 3.281284\n", "Epoch 1275, Loss 3.280086\n", "Epoch 1276, Loss 3.278888\n", "Epoch 1277, Loss 3.277696\n", "Epoch 1278, Loss 3.276506\n", "Epoch 1279, Loss 3.275322\n", "Epoch 1280, Loss 3.274142\n", "Epoch 1281, Loss 3.272967\n", "Epoch 1282, Loss 3.271793\n", "Epoch 1283, Loss 3.270625\n", "Epoch 1284, Loss 3.269460\n", "Epoch 1285, Loss 3.268301\n", "Epoch 1286, Loss 3.267143\n", "Epoch 1287, Loss 3.265991\n", "Epoch 1288, Loss 3.264842\n", "Epoch 1289, Loss 3.263700\n", "Epoch 1290, Loss 3.262556\n", "Epoch 1291, Loss 3.261421\n", "Epoch 1292, Loss 3.260288\n", "Epoch 1293, Loss 3.259161\n", "Epoch 1294, Loss 3.258033\n", "Epoch 1295, Loss 3.256912\n", "Epoch 1296, Loss 3.255795\n", "Epoch 1297, Loss 3.254681\n", "Epoch 1298, Loss 3.253569\n", "Epoch 1299, Loss 3.252462\n", "Epoch 1300, Loss 3.251362\n", "Epoch 1301, Loss 3.250264\n", "Epoch 1302, Loss 3.249168\n", "Epoch 1303, Loss 3.248077\n", "Epoch 1304, Loss 3.246989\n", "Epoch 1305, Loss 3.245904\n", "Epoch 1306, Loss 3.244824\n", "Epoch 1307, Loss 3.243747\n", "Epoch 1308, Loss 3.242674\n", "Epoch 1309, Loss 3.241606\n", "Epoch 1310, Loss 3.240538\n", "Epoch 1311, Loss 3.239475\n", "Epoch 1312, Loss 3.238420\n", "Epoch 1313, Loss 3.237364\n", "Epoch 1314, Loss 3.236314\n", "Epoch 1315, Loss 3.235264\n", "Epoch 1316, Loss 3.234218\n", "Epoch 1317, Loss 3.233179\n", "Epoch 1318, Loss 3.232143\n", "Epoch 1319, Loss 3.231108\n", "Epoch 1320, Loss 3.230078\n", "Epoch 1321, Loss 3.229051\n", "Epoch 1322, Loss 3.228027\n", "Epoch 1323, Loss 3.227010\n", "Epoch 1324, Loss 3.225993\n", "Epoch 1325, Loss 3.224979\n", "Epoch 1326, Loss 3.223971\n", "Epoch 1327, Loss 3.222965\n", "Epoch 1328, Loss 3.221961\n", "Epoch 1329, Loss 3.220962\n", "Epoch 1330, Loss 3.219967\n", "Epoch 1331, Loss 3.218975\n", "Epoch 1332, Loss 3.217986\n", "Epoch 1333, Loss 3.217000\n", "Epoch 1334, Loss 3.216017\n", "Epoch 1335, Loss 3.215039\n", "Epoch 1336, Loss 3.214062\n", "Epoch 1337, Loss 3.213092\n", "Epoch 1338, Loss 3.212122\n", "Epoch 1339, Loss 3.211157\n", "Epoch 1340, Loss 3.210193\n", "Epoch 1341, Loss 3.209235\n", "Epoch 1342, Loss 3.208279\n", "Epoch 1343, Loss 3.207326\n", "Epoch 1344, Loss 3.206376\n", "Epoch 1345, Loss 3.205430\n", "Epoch 1346, Loss 3.204488\n", "Epoch 1347, Loss 3.203547\n", "Epoch 1348, Loss 3.202611\n", "Epoch 1349, Loss 3.201678\n", "Epoch 1350, Loss 3.200747\n", "Epoch 1351, Loss 3.199820\n", "Epoch 1352, Loss 3.198897\n", "Epoch 1353, Loss 3.197976\n", "Epoch 1354, Loss 3.197060\n", "Epoch 1355, Loss 3.196144\n", "Epoch 1356, Loss 3.195231\n", "Epoch 1357, Loss 3.194324\n", "Epoch 1358, Loss 3.193419\n", "Epoch 1359, Loss 3.192517\n", "Epoch 1360, Loss 3.191616\n", "Epoch 1361, Loss 3.190720\n", "Epoch 1362, Loss 3.189829\n", "Epoch 1363, Loss 3.188937\n", "Epoch 1364, Loss 3.188051\n", "Epoch 1365, Loss 3.187166\n", "Epoch 1366, Loss 3.186288\n", "Epoch 1367, Loss 3.185409\n", "Epoch 1368, Loss 3.184535\n", "Epoch 1369, Loss 3.183662\n", "Epoch 1370, Loss 3.182791\n", "Epoch 1371, Loss 3.181925\n", "Epoch 1372, Loss 3.181063\n", "Epoch 1373, Loss 3.180201\n", "Epoch 1374, Loss 3.179347\n", "Epoch 1375, Loss 3.178490\n", "Epoch 1376, Loss 3.177638\n", "Epoch 1377, Loss 3.176789\n", "Epoch 1378, Loss 3.175945\n", "Epoch 1379, Loss 3.175101\n", "Epoch 1380, Loss 3.174262\n", "Epoch 1381, Loss 3.173424\n", "Epoch 1382, Loss 3.172590\n", "Epoch 1383, Loss 3.171759\n", "Epoch 1384, Loss 3.170929\n", "Epoch 1385, Loss 3.170103\n", "Epoch 1386, Loss 3.169280\n", "Epoch 1387, Loss 3.168462\n", "Epoch 1388, Loss 3.167644\n", "Epoch 1389, Loss 3.166827\n", "Epoch 1390, Loss 3.166017\n", "Epoch 1391, Loss 3.165206\n", "Epoch 1392, Loss 3.164401\n", "Epoch 1393, Loss 3.163594\n", "Epoch 1394, Loss 3.162795\n", "Epoch 1395, Loss 3.161996\n", "Epoch 1396, Loss 3.161201\n", "Epoch 1397, Loss 3.160410\n", "Epoch 1398, Loss 3.159618\n", "Epoch 1399, Loss 3.158831\n", "Epoch 1400, Loss 3.158046\n", "Epoch 1401, Loss 3.157263\n", "Epoch 1402, Loss 3.156484\n", "Epoch 1403, Loss 3.155708\n", "Epoch 1404, Loss 3.154933\n", "Epoch 1405, Loss 3.154162\n", "Epoch 1406, Loss 3.153393\n", "Epoch 1407, Loss 3.152627\n", "Epoch 1408, Loss 3.151864\n", "Epoch 1409, Loss 3.151101\n", "Epoch 1410, Loss 3.150343\n", "Epoch 1411, Loss 3.149587\n", "Epoch 1412, Loss 3.148833\n", "Epoch 1413, Loss 3.148082\n", "Epoch 1414, Loss 3.147335\n", "Epoch 1415, Loss 3.146588\n", "Epoch 1416, Loss 3.145845\n", "Epoch 1417, Loss 3.145105\n", "Epoch 1418, Loss 3.144367\n", "Epoch 1419, Loss 3.143630\n", "Epoch 1420, Loss 3.142899\n", "Epoch 1421, Loss 3.142166\n", "Epoch 1422, Loss 3.141439\n", "Epoch 1423, Loss 3.140712\n", "Epoch 1424, Loss 3.139989\n", "Epoch 1425, Loss 3.139271\n", "Epoch 1426, Loss 3.138551\n", "Epoch 1427, Loss 3.137834\n", "Epoch 1428, Loss 3.137121\n", "Epoch 1429, Loss 3.136410\n", "Epoch 1430, Loss 3.135700\n", "Epoch 1431, Loss 3.134995\n", "Epoch 1432, Loss 3.134291\n", "Epoch 1433, Loss 3.133590\n", "Epoch 1434, Loss 3.132889\n", "Epoch 1435, Loss 3.132194\n", "Epoch 1436, Loss 3.131500\n", "Epoch 1437, Loss 3.130810\n", "Epoch 1438, Loss 3.130119\n", "Epoch 1439, Loss 3.129432\n", "Epoch 1440, Loss 3.128746\n", "Epoch 1441, Loss 3.128064\n", "Epoch 1442, Loss 3.127381\n", "Epoch 1443, Loss 3.126705\n", "Epoch 1444, Loss 3.126031\n", "Epoch 1445, Loss 3.125356\n", "Epoch 1446, Loss 3.124683\n", "Epoch 1447, Loss 3.124017\n", "Epoch 1448, Loss 3.123348\n", "Epoch 1449, Loss 3.122685\n", "Epoch 1450, Loss 3.122022\n", "Epoch 1451, Loss 3.121362\n", "Epoch 1452, Loss 3.120706\n", "Epoch 1453, Loss 3.120049\n", "Epoch 1454, Loss 3.119396\n", "Epoch 1455, Loss 3.118746\n", "Epoch 1456, Loss 3.118098\n", "Epoch 1457, Loss 3.117452\n", "Epoch 1458, Loss 3.116805\n", "Epoch 1459, Loss 3.116164\n", "Epoch 1460, Loss 3.115525\n", "Epoch 1461, Loss 3.114886\n", "Epoch 1462, Loss 3.114250\n", "Epoch 1463, Loss 3.113617\n", "Epoch 1464, Loss 3.112984\n", "Epoch 1465, Loss 3.112358\n", "Epoch 1466, Loss 3.111731\n", "Epoch 1467, Loss 3.111103\n", "Epoch 1468, Loss 3.110484\n", "Epoch 1469, Loss 3.109860\n", "Epoch 1470, Loss 3.109242\n", "Epoch 1471, Loss 3.108627\n", "Epoch 1472, Loss 3.108011\n", "Epoch 1473, Loss 3.107401\n", "Epoch 1474, Loss 3.106791\n", "Epoch 1475, Loss 3.106180\n", "Epoch 1476, Loss 3.105575\n", "Epoch 1477, Loss 3.104972\n", "Epoch 1478, Loss 3.104370\n", "Epoch 1479, Loss 3.103770\n", "Epoch 1480, Loss 3.103172\n", "Epoch 1481, Loss 3.102576\n", "Epoch 1482, Loss 3.101982\n", "Epoch 1483, Loss 3.101390\n", "Epoch 1484, Loss 3.100802\n", "Epoch 1485, Loss 3.100213\n", "Epoch 1486, Loss 3.099627\n", "Epoch 1487, Loss 3.099044\n", "Epoch 1488, Loss 3.098462\n", "Epoch 1489, Loss 3.097883\n", "Epoch 1490, Loss 3.097302\n", "Epoch 1491, Loss 3.096727\n", "Epoch 1492, Loss 3.096153\n", "Epoch 1493, Loss 3.095583\n", "Epoch 1494, Loss 3.095011\n", "Epoch 1495, Loss 3.094444\n", "Epoch 1496, Loss 3.093876\n", "Epoch 1497, Loss 3.093314\n", "Epoch 1498, Loss 3.092751\n", "Epoch 1499, Loss 3.092191\n", "Epoch 1500, Loss 3.091631\n", "Epoch 1501, Loss 3.091074\n", "Epoch 1502, Loss 3.090520\n", "Epoch 1503, Loss 3.089969\n", "Epoch 1504, Loss 3.089417\n", "Epoch 1505, Loss 3.088867\n", "Epoch 1506, Loss 3.088320\n", "Epoch 1507, Loss 3.087775\n", "Epoch 1508, Loss 3.087232\n", "Epoch 1509, Loss 3.086690\n", "Epoch 1510, Loss 3.086150\n", "Epoch 1511, Loss 3.085612\n", "Epoch 1512, Loss 3.085075\n", "Epoch 1513, Loss 3.084542\n", "Epoch 1514, Loss 3.084009\n", "Epoch 1515, Loss 3.083478\n", "Epoch 1516, Loss 3.082948\n", "Epoch 1517, Loss 3.082422\n", "Epoch 1518, Loss 3.081897\n", "Epoch 1519, Loss 3.081373\n", "Epoch 1520, Loss 3.080850\n", "Epoch 1521, Loss 3.080331\n", "Epoch 1522, Loss 3.079811\n", "Epoch 1523, Loss 3.079297\n", "Epoch 1524, Loss 3.078781\n", "Epoch 1525, Loss 3.078268\n", "Epoch 1526, Loss 3.077757\n", "Epoch 1527, Loss 3.077247\n", "Epoch 1528, Loss 3.076739\n", "Epoch 1529, Loss 3.076232\n", "Epoch 1530, Loss 3.075729\n", "Epoch 1531, Loss 3.075225\n", "Epoch 1532, Loss 3.074724\n", "Epoch 1533, Loss 3.074227\n", "Epoch 1534, Loss 3.073726\n", "Epoch 1535, Loss 3.073232\n", "Epoch 1536, Loss 3.072739\n", "Epoch 1537, Loss 3.072245\n", "Epoch 1538, Loss 3.071753\n", "Epoch 1539, Loss 3.071265\n", "Epoch 1540, Loss 3.070778\n", "Epoch 1541, Loss 3.070293\n", "Epoch 1542, Loss 3.069808\n", "Epoch 1543, Loss 3.069326\n", "Epoch 1544, Loss 3.068845\n", "Epoch 1545, Loss 3.068366\n", "Epoch 1546, Loss 3.067887\n", "Epoch 1547, Loss 3.067412\n", "Epoch 1548, Loss 3.066937\n", "Epoch 1549, Loss 3.066464\n", "Epoch 1550, Loss 3.065993\n", "Epoch 1551, Loss 3.065524\n", "Epoch 1552, Loss 3.065055\n", "Epoch 1553, Loss 3.064588\n", "Epoch 1554, Loss 3.064123\n", "Epoch 1555, Loss 3.063660\n", "Epoch 1556, Loss 3.063199\n", "Epoch 1557, Loss 3.062738\n", "Epoch 1558, Loss 3.062280\n", "Epoch 1559, Loss 3.061822\n", "Epoch 1560, Loss 3.061368\n", "Epoch 1561, Loss 3.060913\n", "Epoch 1562, Loss 3.060461\n", "Epoch 1563, Loss 3.060011\n", "Epoch 1564, Loss 3.059561\n", "Epoch 1565, Loss 3.059114\n", "Epoch 1566, Loss 3.058668\n", "Epoch 1567, Loss 3.058221\n", "Epoch 1568, Loss 3.057780\n", "Epoch 1569, Loss 3.057338\n", "Epoch 1570, Loss 3.056898\n", "Epoch 1571, Loss 3.056458\n", "Epoch 1572, Loss 3.056019\n", "Epoch 1573, Loss 3.055585\n", "Epoch 1574, Loss 3.055151\n", "Epoch 1575, Loss 3.054717\n", "Epoch 1576, Loss 3.054286\n", "Epoch 1577, Loss 3.053857\n", "Epoch 1578, Loss 3.053428\n", "Epoch 1579, Loss 3.053001\n", "Epoch 1580, Loss 3.052576\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1581, Loss 3.052152\n", "Epoch 1582, Loss 3.051730\n", "Epoch 1583, Loss 3.051307\n", "Epoch 1584, Loss 3.050888\n", "Epoch 1585, Loss 3.050471\n", "Epoch 1586, Loss 3.050052\n", "Epoch 1587, Loss 3.049639\n", "Epoch 1588, Loss 3.049223\n", "Epoch 1589, Loss 3.048811\n", "Epoch 1590, Loss 3.048398\n", "Epoch 1591, Loss 3.047991\n", "Epoch 1592, Loss 3.047581\n", "Epoch 1593, Loss 3.047173\n", "Epoch 1594, Loss 3.046768\n", "Epoch 1595, Loss 3.046363\n", "Epoch 1596, Loss 3.045960\n", "Epoch 1597, Loss 3.045559\n", "Epoch 1598, Loss 3.045160\n", "Epoch 1599, Loss 3.044759\n", "Epoch 1600, Loss 3.044361\n", "Epoch 1601, Loss 3.043966\n", "Epoch 1602, Loss 3.043571\n", "Epoch 1603, Loss 3.043176\n", "Epoch 1604, Loss 3.042785\n", "Epoch 1605, Loss 3.042395\n", "Epoch 1606, Loss 3.042004\n", "Epoch 1607, Loss 3.041615\n", "Epoch 1608, Loss 3.041230\n", "Epoch 1609, Loss 3.040844\n", "Epoch 1610, Loss 3.040460\n", "Epoch 1611, Loss 3.040077\n", "Epoch 1612, Loss 3.039695\n", "Epoch 1613, Loss 3.039314\n", "Epoch 1614, Loss 3.038934\n", "Epoch 1615, Loss 3.038557\n", "Epoch 1616, Loss 3.038182\n", "Epoch 1617, Loss 3.037806\n", "Epoch 1618, Loss 3.037431\n", "Epoch 1619, Loss 3.037059\n", "Epoch 1620, Loss 3.036689\n", "Epoch 1621, Loss 3.036319\n", "Epoch 1622, Loss 3.035949\n", "Epoch 1623, Loss 3.035583\n", "Epoch 1624, Loss 3.035215\n", "Epoch 1625, Loss 3.034849\n", "Epoch 1626, Loss 3.034485\n", "Epoch 1627, Loss 3.034122\n", "Epoch 1628, Loss 3.033762\n", "Epoch 1629, Loss 3.033402\n", "Epoch 1630, Loss 3.033042\n", "Epoch 1631, Loss 3.032685\n", "Epoch 1632, Loss 3.032329\n", "Epoch 1633, Loss 3.031973\n", "Epoch 1634, Loss 3.031619\n", "Epoch 1635, Loss 3.031265\n", "Epoch 1636, Loss 3.030913\n", "Epoch 1637, Loss 3.030564\n", "Epoch 1638, Loss 3.030215\n", "Epoch 1639, Loss 3.029866\n", "Epoch 1640, Loss 3.029518\n", "Epoch 1641, Loss 3.029172\n", "Epoch 1642, Loss 3.028829\n", "Epoch 1643, Loss 3.028486\n", "Epoch 1644, Loss 3.028142\n", "Epoch 1645, Loss 3.027802\n", "Epoch 1646, Loss 3.027462\n", "Epoch 1647, Loss 3.027122\n", "Epoch 1648, Loss 3.026784\n", "Epoch 1649, Loss 3.026447\n", "Epoch 1650, Loss 3.026111\n", "Epoch 1651, Loss 3.025780\n", "Epoch 1652, Loss 3.025446\n", "Epoch 1653, Loss 3.025114\n", "Epoch 1654, Loss 3.024782\n", "Epoch 1655, Loss 3.024452\n", "Epoch 1656, Loss 3.024125\n", "Epoch 1657, Loss 3.023797\n", "Epoch 1658, Loss 3.023471\n", "Epoch 1659, Loss 3.023146\n", "Epoch 1660, Loss 3.022821\n", "Epoch 1661, Loss 3.022498\n", "Epoch 1662, Loss 3.022177\n", "Epoch 1663, Loss 3.021855\n", "Epoch 1664, Loss 3.021534\n", "Epoch 1665, Loss 3.021217\n", "Epoch 1666, Loss 3.020898\n", "Epoch 1667, Loss 3.020582\n", "Epoch 1668, Loss 3.020266\n", "Epoch 1669, Loss 3.019952\n", "Epoch 1670, Loss 3.019639\n", "Epoch 1671, Loss 3.019325\n", "Epoch 1672, Loss 3.019016\n", "Epoch 1673, Loss 3.018706\n", "Epoch 1674, Loss 3.018395\n", "Epoch 1675, Loss 3.018089\n", "Epoch 1676, Loss 3.017780\n", "Epoch 1677, Loss 3.017475\n", "Epoch 1678, Loss 3.017170\n", "Epoch 1679, Loss 3.016867\n", "Epoch 1680, Loss 3.016564\n", "Epoch 1681, Loss 3.016262\n", "Epoch 1682, Loss 3.015959\n", "Epoch 1683, Loss 3.015661\n", "Epoch 1684, Loss 3.015362\n", "Epoch 1685, Loss 3.015064\n", "Epoch 1686, Loss 3.014768\n", "Epoch 1687, Loss 3.014472\n", "Epoch 1688, Loss 3.014179\n", "Epoch 1689, Loss 3.013884\n", "Epoch 1690, Loss 3.013591\n", "Epoch 1691, Loss 3.013299\n", "Epoch 1692, Loss 3.013008\n", "Epoch 1693, Loss 3.012719\n", "Epoch 1694, Loss 3.012431\n", "Epoch 1695, Loss 3.012141\n", "Epoch 1696, Loss 3.011855\n", "Epoch 1697, Loss 3.011570\n", "Epoch 1698, Loss 3.011284\n", "Epoch 1699, Loss 3.011001\n", "Epoch 1700, Loss 3.010718\n", "Epoch 1701, Loss 3.010436\n", "Epoch 1702, Loss 3.010156\n", "Epoch 1703, Loss 3.009876\n", "Epoch 1704, Loss 3.009595\n", "Epoch 1705, Loss 3.009319\n", "Epoch 1706, Loss 3.009040\n", "Epoch 1707, Loss 3.008763\n", "Epoch 1708, Loss 3.008487\n", "Epoch 1709, Loss 3.008214\n", "Epoch 1710, Loss 3.007941\n", "Epoch 1711, Loss 3.007668\n", "Epoch 1712, Loss 3.007396\n", "Epoch 1713, Loss 3.007126\n", "Epoch 1714, Loss 3.006856\n", "Epoch 1715, Loss 3.006586\n", "Epoch 1716, Loss 3.006318\n", "Epoch 1717, Loss 3.006052\n", "Epoch 1718, Loss 3.005785\n", "Epoch 1719, Loss 3.005520\n", "Epoch 1720, Loss 3.005256\n", "Epoch 1721, Loss 3.004993\n", "Epoch 1722, Loss 3.004729\n", "Epoch 1723, Loss 3.004467\n", "Epoch 1724, Loss 3.004207\n", "Epoch 1725, Loss 3.003947\n", "Epoch 1726, Loss 3.003690\n", "Epoch 1727, Loss 3.003430\n", "Epoch 1728, Loss 3.003174\n", "Epoch 1729, Loss 3.002918\n", "Epoch 1730, Loss 3.002661\n", "Epoch 1731, Loss 3.002406\n", "Epoch 1732, Loss 3.002152\n", "Epoch 1733, Loss 3.001901\n", "Epoch 1734, Loss 3.001649\n", "Epoch 1735, Loss 3.001395\n", "Epoch 1736, Loss 3.001145\n", "Epoch 1737, Loss 3.000898\n", "Epoch 1738, Loss 3.000648\n", "Epoch 1739, Loss 3.000400\n", "Epoch 1740, Loss 3.000154\n", "Epoch 1741, Loss 2.999907\n", "Epoch 1742, Loss 2.999662\n", "Epoch 1743, Loss 2.999417\n", "Epoch 1744, Loss 2.999174\n", "Epoch 1745, Loss 2.998930\n", "Epoch 1746, Loss 2.998688\n", "Epoch 1747, Loss 2.998448\n", "Epoch 1748, Loss 2.998208\n", "Epoch 1749, Loss 2.997968\n", "Epoch 1750, Loss 2.997730\n", "Epoch 1751, Loss 2.997490\n", "Epoch 1752, Loss 2.997254\n", "Epoch 1753, Loss 2.997018\n", "Epoch 1754, Loss 2.996783\n", "Epoch 1755, Loss 2.996548\n", "Epoch 1756, Loss 2.996313\n", "Epoch 1757, Loss 2.996081\n", "Epoch 1758, Loss 2.995847\n", "Epoch 1759, Loss 2.995615\n", "Epoch 1760, Loss 2.995387\n", "Epoch 1761, Loss 2.995156\n", "Epoch 1762, Loss 2.994929\n", "Epoch 1763, Loss 2.994699\n", "Epoch 1764, Loss 2.994472\n", "Epoch 1765, Loss 2.994245\n", "Epoch 1766, Loss 2.994018\n", "Epoch 1767, Loss 2.993793\n", "Epoch 1768, Loss 2.993569\n", "Epoch 1769, Loss 2.993344\n", "Epoch 1770, Loss 2.993122\n", "Epoch 1771, Loss 2.992900\n", "Epoch 1772, Loss 2.992678\n", "Epoch 1773, Loss 2.992457\n", "Epoch 1774, Loss 2.992238\n", "Epoch 1775, Loss 2.992017\n", "Epoch 1776, Loss 2.991798\n", "Epoch 1777, Loss 2.991583\n", "Epoch 1778, Loss 2.991366\n", "Epoch 1779, Loss 2.991146\n", "Epoch 1780, Loss 2.990932\n", "Epoch 1781, Loss 2.990719\n", "Epoch 1782, Loss 2.990503\n", "Epoch 1783, Loss 2.990289\n", "Epoch 1784, Loss 2.990078\n", "Epoch 1785, Loss 2.989866\n", "Epoch 1786, Loss 2.989654\n", "Epoch 1787, Loss 2.989443\n", "Epoch 1788, Loss 2.989233\n", "Epoch 1789, Loss 2.989025\n", "Epoch 1790, Loss 2.988817\n", "Epoch 1791, Loss 2.988609\n", "Epoch 1792, Loss 2.988401\n", "Epoch 1793, Loss 2.988195\n", "Epoch 1794, Loss 2.987989\n", "Epoch 1795, Loss 2.987784\n", "Epoch 1796, Loss 2.987581\n", "Epoch 1797, Loss 2.987377\n", "Epoch 1798, Loss 2.987174\n", "Epoch 1799, Loss 2.986974\n", "Epoch 1800, Loss 2.986771\n", "Epoch 1801, Loss 2.986570\n", "Epoch 1802, Loss 2.986371\n", "Epoch 1803, Loss 2.986171\n", "Epoch 1804, Loss 2.985972\n", "Epoch 1805, Loss 2.985774\n", "Epoch 1806, Loss 2.985578\n", "Epoch 1807, Loss 2.985381\n", "Epoch 1808, Loss 2.985184\n", "Epoch 1809, Loss 2.984989\n", "Epoch 1810, Loss 2.984793\n", "Epoch 1811, Loss 2.984601\n", "Epoch 1812, Loss 2.984406\n", "Epoch 1813, Loss 2.984215\n", "Epoch 1814, Loss 2.984022\n", "Epoch 1815, Loss 2.983830\n", "Epoch 1816, Loss 2.983639\n", "Epoch 1817, Loss 2.983449\n", "Epoch 1818, Loss 2.983260\n", "Epoch 1819, Loss 2.983073\n", "Epoch 1820, Loss 2.982884\n", "Epoch 1821, Loss 2.982697\n", "Epoch 1822, Loss 2.982510\n", "Epoch 1823, Loss 2.982322\n", "Epoch 1824, Loss 2.982137\n", "Epoch 1825, Loss 2.981953\n", "Epoch 1826, Loss 2.981769\n", "Epoch 1827, Loss 2.981586\n", "Epoch 1828, Loss 2.981402\n", "Epoch 1829, Loss 2.981219\n", "Epoch 1830, Loss 2.981037\n", "Epoch 1831, Loss 2.980856\n", "Epoch 1832, Loss 2.980676\n", "Epoch 1833, Loss 2.980495\n", "Epoch 1834, Loss 2.980316\n", "Epoch 1835, Loss 2.980137\n", "Epoch 1836, Loss 2.979958\n", "Epoch 1837, Loss 2.979782\n", "Epoch 1838, Loss 2.979604\n", "Epoch 1839, Loss 2.979428\n", "Epoch 1840, Loss 2.979253\n", "Epoch 1841, Loss 2.979078\n", "Epoch 1842, Loss 2.978902\n", "Epoch 1843, Loss 2.978729\n", "Epoch 1844, Loss 2.978555\n", "Epoch 1845, Loss 2.978382\n", "Epoch 1846, Loss 2.978211\n", "Epoch 1847, Loss 2.978039\n", "Epoch 1848, Loss 2.977867\n", "Epoch 1849, Loss 2.977696\n", "Epoch 1850, Loss 2.977527\n", "Epoch 1851, Loss 2.977357\n", "Epoch 1852, Loss 2.977188\n", "Epoch 1853, Loss 2.977021\n", "Epoch 1854, Loss 2.976853\n", "Epoch 1855, Loss 2.976687\n", "Epoch 1856, Loss 2.976520\n", "Epoch 1857, Loss 2.976354\n", "Epoch 1858, Loss 2.976189\n", "Epoch 1859, Loss 2.976023\n", "Epoch 1860, Loss 2.975860\n", "Epoch 1861, Loss 2.975697\n", "Epoch 1862, Loss 2.975532\n", "Epoch 1863, Loss 2.975369\n", "Epoch 1864, Loss 2.975208\n", "Epoch 1865, Loss 2.975046\n", "Epoch 1866, Loss 2.974886\n", "Epoch 1867, Loss 2.974725\n", "Epoch 1868, Loss 2.974565\n", "Epoch 1869, Loss 2.974406\n", "Epoch 1870, Loss 2.974248\n", "Epoch 1871, Loss 2.974088\n", "Epoch 1872, Loss 2.973930\n", "Epoch 1873, Loss 2.973776\n", "Epoch 1874, Loss 2.973618\n", "Epoch 1875, Loss 2.973463\n", "Epoch 1876, Loss 2.973307\n", "Epoch 1877, Loss 2.973150\n", "Epoch 1878, Loss 2.972996\n", "Epoch 1879, Loss 2.972844\n", "Epoch 1880, Loss 2.972690\n", "Epoch 1881, Loss 2.972536\n", "Epoch 1882, Loss 2.972383\n", "Epoch 1883, Loss 2.972232\n", "Epoch 1884, Loss 2.972081\n", "Epoch 1885, Loss 2.971931\n", "Epoch 1886, Loss 2.971780\n", "Epoch 1887, Loss 2.971629\n", "Epoch 1888, Loss 2.971481\n", "Epoch 1889, Loss 2.971332\n", "Epoch 1890, Loss 2.971185\n", "Epoch 1891, Loss 2.971035\n", "Epoch 1892, Loss 2.970888\n", "Epoch 1893, Loss 2.970741\n", "Epoch 1894, Loss 2.970596\n", "Epoch 1895, Loss 2.970449\n", "Epoch 1896, Loss 2.970304\n", "Epoch 1897, Loss 2.970159\n", "Epoch 1898, Loss 2.970015\n", "Epoch 1899, Loss 2.969871\n", "Epoch 1900, Loss 2.969727\n", "Epoch 1901, Loss 2.969586\n", "Epoch 1902, Loss 2.969443\n", "Epoch 1903, Loss 2.969302\n", "Epoch 1904, Loss 2.969160\n", "Epoch 1905, Loss 2.969017\n", "Epoch 1906, Loss 2.968879\n", "Epoch 1907, Loss 2.968739\n", "Epoch 1908, Loss 2.968599\n", "Epoch 1909, Loss 2.968460\n", "Epoch 1910, Loss 2.968322\n", "Epoch 1911, Loss 2.968184\n", "Epoch 1912, Loss 2.968046\n", "Epoch 1913, Loss 2.967908\n", "Epoch 1914, Loss 2.967772\n", "Epoch 1915, Loss 2.967636\n", "Epoch 1916, Loss 2.967499\n", "Epoch 1917, Loss 2.967365\n", "Epoch 1918, Loss 2.967230\n", "Epoch 1919, Loss 2.967095\n", "Epoch 1920, Loss 2.966961\n", "Epoch 1921, Loss 2.966827\n", "Epoch 1922, Loss 2.966693\n", "Epoch 1923, Loss 2.966561\n", "Epoch 1924, Loss 2.966429\n", "Epoch 1925, Loss 2.966297\n", "Epoch 1926, Loss 2.966167\n", "Epoch 1927, Loss 2.966036\n", "Epoch 1928, Loss 2.965904\n", "Epoch 1929, Loss 2.965776\n", "Epoch 1930, Loss 2.965646\n", "Epoch 1931, Loss 2.965517\n", "Epoch 1932, Loss 2.965387\n", "Epoch 1933, Loss 2.965261\n", "Epoch 1934, Loss 2.965131\n", "Epoch 1935, Loss 2.965005\n", "Epoch 1936, Loss 2.964878\n", "Epoch 1937, Loss 2.964751\n", "Epoch 1938, Loss 2.964625\n", "Epoch 1939, Loss 2.964500\n", "Epoch 1940, Loss 2.964375\n", "Epoch 1941, Loss 2.964250\n", "Epoch 1942, Loss 2.964126\n", "Epoch 1943, Loss 2.964001\n", "Epoch 1944, Loss 2.963879\n", "Epoch 1945, Loss 2.963756\n", "Epoch 1946, Loss 2.963632\n", "Epoch 1947, Loss 2.963511\n", "Epoch 1948, Loss 2.963388\n", "Epoch 1949, Loss 2.963266\n", "Epoch 1950, Loss 2.963149\n", "Epoch 1951, Loss 2.963026\n", "Epoch 1952, Loss 2.962907\n", "Epoch 1953, Loss 2.962788\n", "Epoch 1954, Loss 2.962666\n", "Epoch 1955, Loss 2.962547\n", "Epoch 1956, Loss 2.962429\n", "Epoch 1957, Loss 2.962312\n", "Epoch 1958, Loss 2.962195\n", "Epoch 1959, Loss 2.962078\n", "Epoch 1960, Loss 2.961959\n", "Epoch 1961, Loss 2.961843\n", "Epoch 1962, Loss 2.961728\n", "Epoch 1963, Loss 2.961611\n", "Epoch 1964, Loss 2.961497\n", "Epoch 1965, Loss 2.961382\n", "Epoch 1966, Loss 2.961268\n", "Epoch 1967, Loss 2.961153\n", "Epoch 1968, Loss 2.961038\n", "Epoch 1969, Loss 2.960926\n", "Epoch 1970, Loss 2.960814\n", "Epoch 1971, Loss 2.960699\n", "Epoch 1972, Loss 2.960587\n", "Epoch 1973, Loss 2.960475\n", "Epoch 1974, Loss 2.960365\n", "Epoch 1975, Loss 2.960254\n", "Epoch 1976, Loss 2.960143\n", "Epoch 1977, Loss 2.960033\n", "Epoch 1978, Loss 2.959923\n", "Epoch 1979, Loss 2.959812\n", "Epoch 1980, Loss 2.959703\n", "Epoch 1981, Loss 2.959594\n", "Epoch 1982, Loss 2.959486\n", "Epoch 1983, Loss 2.959378\n", "Epoch 1984, Loss 2.959271\n", "Epoch 1985, Loss 2.959163\n", "Epoch 1986, Loss 2.959055\n", "Epoch 1987, Loss 2.958950\n", "Epoch 1988, Loss 2.958842\n", "Epoch 1989, Loss 2.958738\n", "Epoch 1990, Loss 2.958632\n", "Epoch 1991, Loss 2.958526\n", "Epoch 1992, Loss 2.958421\n", "Epoch 1993, Loss 2.958317\n", "Epoch 1994, Loss 2.958212\n", "Epoch 1995, Loss 2.958109\n", "Epoch 1996, Loss 2.958006\n", "Epoch 1997, Loss 2.957903\n", "Epoch 1998, Loss 2.957801\n", "Epoch 1999, Loss 2.957697\n", "Epoch 2000, Loss 2.957596\n", "Epoch 2001, Loss 2.957494\n", "Epoch 2002, Loss 2.957393\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2003, Loss 2.957293\n", "Epoch 2004, Loss 2.957193\n", "Epoch 2005, Loss 2.957091\n", "Epoch 2006, Loss 2.956991\n", "Epoch 2007, Loss 2.956892\n", "Epoch 2008, Loss 2.956792\n", "Epoch 2009, Loss 2.956694\n", "Epoch 2010, Loss 2.956595\n", "Epoch 2011, Loss 2.956496\n", "Epoch 2012, Loss 2.956397\n", "Epoch 2013, Loss 2.956300\n", "Epoch 2014, Loss 2.956204\n", "Epoch 2015, Loss 2.956108\n", "Epoch 2016, Loss 2.956010\n", "Epoch 2017, Loss 2.955914\n", "Epoch 2018, Loss 2.955817\n", "Epoch 2019, Loss 2.955722\n", "Epoch 2020, Loss 2.955627\n", "Epoch 2021, Loss 2.955533\n", "Epoch 2022, Loss 2.955436\n", "Epoch 2023, Loss 2.955343\n", "Epoch 2024, Loss 2.955250\n", "Epoch 2025, Loss 2.955154\n", "Epoch 2026, Loss 2.955062\n", "Epoch 2027, Loss 2.954968\n", "Epoch 2028, Loss 2.954875\n", "Epoch 2029, Loss 2.954783\n", "Epoch 2030, Loss 2.954691\n", "Epoch 2031, Loss 2.954600\n", "Epoch 2032, Loss 2.954507\n", "Epoch 2033, Loss 2.954417\n", "Epoch 2034, Loss 2.954326\n", "Epoch 2035, Loss 2.954235\n", "Epoch 2036, Loss 2.954145\n", "Epoch 2037, Loss 2.954055\n", "Epoch 2038, Loss 2.953966\n", "Epoch 2039, Loss 2.953876\n", "Epoch 2040, Loss 2.953787\n", "Epoch 2041, Loss 2.953698\n", "Epoch 2042, Loss 2.953610\n", "Epoch 2043, Loss 2.953521\n", "Epoch 2044, Loss 2.953434\n", "Epoch 2045, Loss 2.953346\n", "Epoch 2046, Loss 2.953259\n", "Epoch 2047, Loss 2.953172\n", "Epoch 2048, Loss 2.953085\n", "Epoch 2049, Loss 2.953000\n", "Epoch 2050, Loss 2.952913\n", "Epoch 2051, Loss 2.952828\n", "Epoch 2052, Loss 2.952742\n", "Epoch 2053, Loss 2.952657\n", "Epoch 2054, Loss 2.952571\n", "Epoch 2055, Loss 2.952487\n", "Epoch 2056, Loss 2.952403\n", "Epoch 2057, Loss 2.952318\n", "Epoch 2058, Loss 2.952235\n", "Epoch 2059, Loss 2.952152\n", "Epoch 2060, Loss 2.952068\n", "Epoch 2061, Loss 2.951985\n", "Epoch 2062, Loss 2.951903\n", "Epoch 2063, Loss 2.951820\n", "Epoch 2064, Loss 2.951738\n", "Epoch 2065, Loss 2.951656\n", "Epoch 2066, Loss 2.951575\n", "Epoch 2067, Loss 2.951494\n", "Epoch 2068, Loss 2.951413\n", "Epoch 2069, Loss 2.951333\n", "Epoch 2070, Loss 2.951252\n", "Epoch 2071, Loss 2.951172\n", "Epoch 2072, Loss 2.951093\n", "Epoch 2073, Loss 2.951012\n", "Epoch 2074, Loss 2.950932\n", "Epoch 2075, Loss 2.950853\n", "Epoch 2076, Loss 2.950774\n", "Epoch 2077, Loss 2.950698\n", "Epoch 2078, Loss 2.950618\n", "Epoch 2079, Loss 2.950540\n", "Epoch 2080, Loss 2.950463\n", "Epoch 2081, Loss 2.950385\n", "Epoch 2082, Loss 2.950308\n", "Epoch 2083, Loss 2.950231\n", "Epoch 2084, Loss 2.950155\n", "Epoch 2085, Loss 2.950079\n", "Epoch 2086, Loss 2.950003\n", "Epoch 2087, Loss 2.949925\n", "Epoch 2088, Loss 2.949850\n", "Epoch 2089, Loss 2.949776\n", "Epoch 2090, Loss 2.949699\n", "Epoch 2091, Loss 2.949626\n", "Epoch 2092, Loss 2.949551\n", "Epoch 2093, Loss 2.949476\n", "Epoch 2094, Loss 2.949401\n", "Epoch 2095, Loss 2.949328\n", "Epoch 2096, Loss 2.949254\n", "Epoch 2097, Loss 2.949182\n", "Epoch 2098, Loss 2.949108\n", "Epoch 2099, Loss 2.949036\n", "Epoch 2100, Loss 2.948962\n", "Epoch 2101, Loss 2.948890\n", "Epoch 2102, Loss 2.948818\n", "Epoch 2103, Loss 2.948746\n", "Epoch 2104, Loss 2.948675\n", "Epoch 2105, Loss 2.948602\n", "Epoch 2106, Loss 2.948532\n", "Epoch 2107, Loss 2.948462\n", "Epoch 2108, Loss 2.948391\n", "Epoch 2109, Loss 2.948321\n", "Epoch 2110, Loss 2.948250\n", "Epoch 2111, Loss 2.948180\n", "Epoch 2112, Loss 2.948109\n", "Epoch 2113, Loss 2.948040\n", "Epoch 2114, Loss 2.947971\n", "Epoch 2115, Loss 2.947902\n", "Epoch 2116, Loss 2.947833\n", "Epoch 2117, Loss 2.947765\n", "Epoch 2118, Loss 2.947696\n", "Epoch 2119, Loss 2.947628\n", "Epoch 2120, Loss 2.947560\n", "Epoch 2121, Loss 2.947494\n", "Epoch 2122, Loss 2.947426\n", "Epoch 2123, Loss 2.947358\n", "Epoch 2124, Loss 2.947294\n", "Epoch 2125, Loss 2.947226\n", "Epoch 2126, Loss 2.947158\n", "Epoch 2127, Loss 2.947091\n", "Epoch 2128, Loss 2.947026\n", "Epoch 2129, Loss 2.946960\n", "Epoch 2130, Loss 2.946895\n", "Epoch 2131, Loss 2.946830\n", "Epoch 2132, Loss 2.946764\n", "Epoch 2133, Loss 2.946700\n", "Epoch 2134, Loss 2.946635\n", "Epoch 2135, Loss 2.946571\n", "Epoch 2136, Loss 2.946507\n", "Epoch 2137, Loss 2.946442\n", "Epoch 2138, Loss 2.946378\n", "Epoch 2139, Loss 2.946314\n", "Epoch 2140, Loss 2.946251\n", "Epoch 2141, Loss 2.946189\n", "Epoch 2142, Loss 2.946125\n", "Epoch 2143, Loss 2.946063\n", "Epoch 2144, Loss 2.946001\n", "Epoch 2145, Loss 2.945937\n", "Epoch 2146, Loss 2.945876\n", "Epoch 2147, Loss 2.945815\n", "Epoch 2148, Loss 2.945753\n", "Epoch 2149, Loss 2.945690\n", "Epoch 2150, Loss 2.945630\n", "Epoch 2151, Loss 2.945567\n", "Epoch 2152, Loss 2.945509\n", "Epoch 2153, Loss 2.945448\n", "Epoch 2154, Loss 2.945386\n", "Epoch 2155, Loss 2.945326\n", "Epoch 2156, Loss 2.945267\n", "Epoch 2157, Loss 2.945207\n", "Epoch 2158, Loss 2.945146\n", "Epoch 2159, Loss 2.945088\n", "Epoch 2160, Loss 2.945028\n", "Epoch 2161, Loss 2.944969\n", "Epoch 2162, Loss 2.944911\n", "Epoch 2163, Loss 2.944852\n", "Epoch 2164, Loss 2.944792\n", "Epoch 2165, Loss 2.944736\n", "Epoch 2166, Loss 2.944678\n", "Epoch 2167, Loss 2.944619\n", "Epoch 2168, Loss 2.944562\n", "Epoch 2169, Loss 2.944504\n", "Epoch 2170, Loss 2.944447\n", "Epoch 2171, Loss 2.944391\n", "Epoch 2172, Loss 2.944332\n", "Epoch 2173, Loss 2.944276\n", "Epoch 2174, Loss 2.944220\n", "Epoch 2175, Loss 2.944164\n", "Epoch 2176, Loss 2.944108\n", "Epoch 2177, Loss 2.944052\n", "Epoch 2178, Loss 2.943996\n", "Epoch 2179, Loss 2.943941\n", "Epoch 2180, Loss 2.943886\n", "Epoch 2181, Loss 2.943831\n", "Epoch 2182, Loss 2.943775\n", "Epoch 2183, Loss 2.943721\n", "Epoch 2184, Loss 2.943666\n", "Epoch 2185, Loss 2.943613\n", "Epoch 2186, Loss 2.943558\n", "Epoch 2187, Loss 2.943504\n", "Epoch 2188, Loss 2.943451\n", "Epoch 2189, Loss 2.943395\n", "Epoch 2190, Loss 2.943343\n", "Epoch 2191, Loss 2.943290\n", "Epoch 2192, Loss 2.943235\n", "Epoch 2193, Loss 2.943183\n", "Epoch 2194, Loss 2.943130\n", "Epoch 2195, Loss 2.943079\n", "Epoch 2196, Loss 2.943026\n", "Epoch 2197, Loss 2.942974\n", "Epoch 2198, Loss 2.942922\n", "Epoch 2199, Loss 2.942870\n", "Epoch 2200, Loss 2.942818\n", "Epoch 2201, Loss 2.942765\n", "Epoch 2202, Loss 2.942714\n", "Epoch 2203, Loss 2.942664\n", "Epoch 2204, Loss 2.942612\n", "Epoch 2205, Loss 2.942563\n", "Epoch 2206, Loss 2.942510\n", "Epoch 2207, Loss 2.942461\n", "Epoch 2208, Loss 2.942411\n", "Epoch 2209, Loss 2.942361\n", "Epoch 2210, Loss 2.942310\n", "Epoch 2211, Loss 2.942261\n", "Epoch 2212, Loss 2.942211\n", "Epoch 2213, Loss 2.942162\n", "Epoch 2214, Loss 2.942112\n", "Epoch 2215, Loss 2.942062\n", "Epoch 2216, Loss 2.942014\n", "Epoch 2217, Loss 2.941965\n", "Epoch 2218, Loss 2.941918\n", "Epoch 2219, Loss 2.941868\n", "Epoch 2220, Loss 2.941821\n", "Epoch 2221, Loss 2.941773\n", "Epoch 2222, Loss 2.941725\n", "Epoch 2223, Loss 2.941677\n", "Epoch 2224, Loss 2.941629\n", "Epoch 2225, Loss 2.941582\n", "Epoch 2226, Loss 2.941534\n", "Epoch 2227, Loss 2.941488\n", "Epoch 2228, Loss 2.941440\n", "Epoch 2229, Loss 2.941393\n", "Epoch 2230, Loss 2.941346\n", "Epoch 2231, Loss 2.941299\n", "Epoch 2232, Loss 2.941252\n", "Epoch 2233, Loss 2.941206\n", "Epoch 2234, Loss 2.941163\n", "Epoch 2235, Loss 2.941115\n", "Epoch 2236, Loss 2.941070\n", "Epoch 2237, Loss 2.941025\n", "Epoch 2238, Loss 2.940979\n", "Epoch 2239, Loss 2.940933\n", "Epoch 2240, Loss 2.940890\n", "Epoch 2241, Loss 2.940844\n", "Epoch 2242, Loss 2.940798\n", "Epoch 2243, Loss 2.940753\n", "Epoch 2244, Loss 2.940711\n", "Epoch 2245, Loss 2.940666\n", "Epoch 2246, Loss 2.940621\n", "Epoch 2247, Loss 2.940576\n", "Epoch 2248, Loss 2.940533\n", "Epoch 2249, Loss 2.940489\n", "Epoch 2250, Loss 2.940446\n", "Epoch 2251, Loss 2.940403\n", "Epoch 2252, Loss 2.940358\n", "Epoch 2253, Loss 2.940316\n", "Epoch 2254, Loss 2.940274\n", "Epoch 2255, Loss 2.940229\n", "Epoch 2256, Loss 2.940187\n", "Epoch 2257, Loss 2.940144\n", "Epoch 2258, Loss 2.940102\n", "Epoch 2259, Loss 2.940060\n", "Epoch 2260, Loss 2.940018\n", "Epoch 2261, Loss 2.939977\n", "Epoch 2262, Loss 2.939934\n", "Epoch 2263, Loss 2.939892\n", "Epoch 2264, Loss 2.939851\n", "Epoch 2265, Loss 2.939809\n", "Epoch 2266, Loss 2.939769\n", "Epoch 2267, Loss 2.939727\n", "Epoch 2268, Loss 2.939686\n", "Epoch 2269, Loss 2.939646\n", "Epoch 2270, Loss 2.939605\n", "Epoch 2271, Loss 2.939565\n", "Epoch 2272, Loss 2.939522\n", "Epoch 2273, Loss 2.939483\n", "Epoch 2274, Loss 2.939443\n", "Epoch 2275, Loss 2.939403\n", "Epoch 2276, Loss 2.939361\n", "Epoch 2277, Loss 2.939323\n", "Epoch 2278, Loss 2.939282\n", "Epoch 2279, Loss 2.939243\n", "Epoch 2280, Loss 2.939205\n", "Epoch 2281, Loss 2.939165\n", "Epoch 2282, Loss 2.939127\n", "Epoch 2283, Loss 2.939087\n", "Epoch 2284, Loss 2.939049\n", "Epoch 2285, Loss 2.939011\n", "Epoch 2286, Loss 2.938971\n", "Epoch 2287, Loss 2.938933\n", "Epoch 2288, Loss 2.938893\n", "Epoch 2289, Loss 2.938857\n", "Epoch 2290, Loss 2.938820\n", "Epoch 2291, Loss 2.938779\n", "Epoch 2292, Loss 2.938743\n", "Epoch 2293, Loss 2.938705\n", "Epoch 2294, Loss 2.938667\n", "Epoch 2295, Loss 2.938629\n", "Epoch 2296, Loss 2.938593\n", "Epoch 2297, Loss 2.938555\n", "Epoch 2298, Loss 2.938519\n", "Epoch 2299, Loss 2.938481\n", "Epoch 2300, Loss 2.938444\n", "Epoch 2301, Loss 2.938408\n", "Epoch 2302, Loss 2.938371\n", "Epoch 2303, Loss 2.938335\n", "Epoch 2304, Loss 2.938299\n", "Epoch 2305, Loss 2.938262\n", "Epoch 2306, Loss 2.938227\n", "Epoch 2307, Loss 2.938191\n", "Epoch 2308, Loss 2.938155\n", "Epoch 2309, Loss 2.938118\n", "Epoch 2310, Loss 2.938084\n", "Epoch 2311, Loss 2.938049\n", "Epoch 2312, Loss 2.938014\n", "Epoch 2313, Loss 2.937977\n", "Epoch 2314, Loss 2.937943\n", "Epoch 2315, Loss 2.937908\n", "Epoch 2316, Loss 2.937872\n", "Epoch 2317, Loss 2.937839\n", "Epoch 2318, Loss 2.937804\n", "Epoch 2319, Loss 2.937768\n", "Epoch 2320, Loss 2.937734\n", "Epoch 2321, Loss 2.937700\n", "Epoch 2322, Loss 2.937665\n", "Epoch 2323, Loss 2.937632\n", "Epoch 2324, Loss 2.937598\n", "Epoch 2325, Loss 2.937565\n", "Epoch 2326, Loss 2.937531\n", "Epoch 2327, Loss 2.937499\n", "Epoch 2328, Loss 2.937464\n", "Epoch 2329, Loss 2.937430\n", "Epoch 2330, Loss 2.937398\n", "Epoch 2331, Loss 2.937364\n", "Epoch 2332, Loss 2.937332\n", "Epoch 2333, Loss 2.937299\n", "Epoch 2334, Loss 2.937265\n", "Epoch 2335, Loss 2.937232\n", "Epoch 2336, Loss 2.937201\n", "Epoch 2337, Loss 2.937168\n", "Epoch 2338, Loss 2.937134\n", "Epoch 2339, Loss 2.937104\n", "Epoch 2340, Loss 2.937071\n", "Epoch 2341, Loss 2.937039\n", "Epoch 2342, Loss 2.937008\n", "Epoch 2343, Loss 2.936976\n", "Epoch 2344, Loss 2.936945\n", "Epoch 2345, Loss 2.936912\n", "Epoch 2346, Loss 2.936882\n", "Epoch 2347, Loss 2.936851\n", "Epoch 2348, Loss 2.936819\n", "Epoch 2349, Loss 2.936788\n", "Epoch 2350, Loss 2.936757\n", "Epoch 2351, Loss 2.936725\n", "Epoch 2352, Loss 2.936694\n", "Epoch 2353, Loss 2.936665\n", "Epoch 2354, Loss 2.936633\n", "Epoch 2355, Loss 2.936602\n", "Epoch 2356, Loss 2.936572\n", "Epoch 2357, Loss 2.936542\n", "Epoch 2358, Loss 2.936511\n", "Epoch 2359, Loss 2.936482\n", "Epoch 2360, Loss 2.936451\n", "Epoch 2361, Loss 2.936421\n", "Epoch 2362, Loss 2.936392\n", "Epoch 2363, Loss 2.936362\n", "Epoch 2364, Loss 2.936332\n", "Epoch 2365, Loss 2.936304\n", "Epoch 2366, Loss 2.936274\n", "Epoch 2367, Loss 2.936244\n", "Epoch 2368, Loss 2.936215\n", "Epoch 2369, Loss 2.936188\n", "Epoch 2370, Loss 2.936156\n", "Epoch 2371, Loss 2.936128\n", "Epoch 2372, Loss 2.936100\n", "Epoch 2373, Loss 2.936071\n", "Epoch 2374, Loss 2.936043\n", "Epoch 2375, Loss 2.936014\n", "Epoch 2376, Loss 2.935986\n", "Epoch 2377, Loss 2.935957\n", "Epoch 2378, Loss 2.935928\n", "Epoch 2379, Loss 2.935901\n", "Epoch 2380, Loss 2.935873\n", "Epoch 2381, Loss 2.935845\n", "Epoch 2382, Loss 2.935817\n", "Epoch 2383, Loss 2.935789\n", "Epoch 2384, Loss 2.935761\n", "Epoch 2385, Loss 2.935734\n", "Epoch 2386, Loss 2.935707\n", "Epoch 2387, Loss 2.935679\n", "Epoch 2388, Loss 2.935650\n", "Epoch 2389, Loss 2.935626\n", "Epoch 2390, Loss 2.935596\n", "Epoch 2391, Loss 2.935571\n", "Epoch 2392, Loss 2.935544\n", "Epoch 2393, Loss 2.935516\n", "Epoch 2394, Loss 2.935489\n", "Epoch 2395, Loss 2.935464\n", "Epoch 2396, Loss 2.935436\n", "Epoch 2397, Loss 2.935412\n", "Epoch 2398, Loss 2.935385\n", "Epoch 2399, Loss 2.935357\n", "Epoch 2400, Loss 2.935332\n", "Epoch 2401, Loss 2.935304\n", "Epoch 2402, Loss 2.935281\n", "Epoch 2403, Loss 2.935252\n", "Epoch 2404, Loss 2.935229\n", "Epoch 2405, Loss 2.935203\n", "Epoch 2406, Loss 2.935177\n", "Epoch 2407, Loss 2.935152\n", "Epoch 2408, Loss 2.935126\n", "Epoch 2409, Loss 2.935099\n", "Epoch 2410, Loss 2.935075\n", "Epoch 2411, Loss 2.935049\n", "Epoch 2412, Loss 2.935024\n", "Epoch 2413, Loss 2.935001\n", "Epoch 2414, Loss 2.934973\n", "Epoch 2415, Loss 2.934949\n", "Epoch 2416, Loss 2.934925\n", "Epoch 2417, Loss 2.934899\n", "Epoch 2418, Loss 2.934876\n", "Epoch 2419, Loss 2.934852\n", "Epoch 2420, Loss 2.934827\n", "Epoch 2421, Loss 2.934802\n", "Epoch 2422, Loss 2.934777\n", "Epoch 2423, Loss 2.934753\n", "Epoch 2424, Loss 2.934730\n", "Epoch 2425, Loss 2.934705\n", "Epoch 2426, Loss 2.934681\n", "Epoch 2427, Loss 2.934658\n", "Epoch 2428, Loss 2.934635\n", "Epoch 2429, Loss 2.934609\n", "Epoch 2430, Loss 2.934585\n", "Epoch 2431, Loss 2.934564\n", "Epoch 2432, Loss 2.934541\n", "Epoch 2433, Loss 2.934516\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2434, Loss 2.934493\n", "Epoch 2435, Loss 2.934469\n", "Epoch 2436, Loss 2.934446\n", "Epoch 2437, Loss 2.934423\n", "Epoch 2438, Loss 2.934400\n", "Epoch 2439, Loss 2.934377\n", "Epoch 2440, Loss 2.934355\n", "Epoch 2441, Loss 2.934331\n", "Epoch 2442, Loss 2.934309\n", "Epoch 2443, Loss 2.934287\n", "Epoch 2444, Loss 2.934264\n", "Epoch 2445, Loss 2.934242\n", "Epoch 2446, Loss 2.934219\n", "Epoch 2447, Loss 2.934198\n", "Epoch 2448, Loss 2.934175\n", "Epoch 2449, Loss 2.934151\n", "Epoch 2450, Loss 2.934129\n", "Epoch 2451, Loss 2.934108\n", "Epoch 2452, Loss 2.934084\n", "Epoch 2453, Loss 2.934064\n", "Epoch 2454, Loss 2.934043\n", "Epoch 2455, Loss 2.934020\n", "Epoch 2456, Loss 2.934000\n", "Epoch 2457, Loss 2.933978\n", "Epoch 2458, Loss 2.933956\n", "Epoch 2459, Loss 2.933935\n", "Epoch 2460, Loss 2.933913\n", "Epoch 2461, Loss 2.933893\n", "Epoch 2462, Loss 2.933871\n", "Epoch 2463, Loss 2.933849\n", "Epoch 2464, Loss 2.933828\n", "Epoch 2465, Loss 2.933807\n", "Epoch 2466, Loss 2.933787\n", "Epoch 2467, Loss 2.933766\n", "Epoch 2468, Loss 2.933745\n", "Epoch 2469, Loss 2.933723\n", "Epoch 2470, Loss 2.933704\n", "Epoch 2471, Loss 2.933682\n", "Epoch 2472, Loss 2.933662\n", "Epoch 2473, Loss 2.933643\n", "Epoch 2474, Loss 2.933622\n", "Epoch 2475, Loss 2.933602\n", "Epoch 2476, Loss 2.933583\n", "Epoch 2477, Loss 2.933561\n", "Epoch 2478, Loss 2.933541\n", "Epoch 2479, Loss 2.933521\n", "Epoch 2480, Loss 2.933501\n", "Epoch 2481, Loss 2.933480\n", "Epoch 2482, Loss 2.933463\n", "Epoch 2483, Loss 2.933442\n", "Epoch 2484, Loss 2.933423\n", "Epoch 2485, Loss 2.933403\n", "Epoch 2486, Loss 2.933382\n", "Epoch 2487, Loss 2.933365\n", "Epoch 2488, Loss 2.933345\n", "Epoch 2489, Loss 2.933325\n", "Epoch 2490, Loss 2.933306\n", "Epoch 2491, Loss 2.933287\n", "Epoch 2492, Loss 2.933266\n", "Epoch 2493, Loss 2.933249\n", "Epoch 2494, Loss 2.933228\n", "Epoch 2495, Loss 2.933209\n", "Epoch 2496, Loss 2.933190\n", "Epoch 2497, Loss 2.933172\n", "Epoch 2498, Loss 2.933154\n", "Epoch 2499, Loss 2.933134\n", "Epoch 2500, Loss 2.933116\n", "Epoch 2501, Loss 2.933097\n", "Epoch 2502, Loss 2.933079\n", "Epoch 2503, Loss 2.933060\n", "Epoch 2504, Loss 2.933043\n", "Epoch 2505, Loss 2.933025\n", "Epoch 2506, Loss 2.933007\n", "Epoch 2507, Loss 2.932988\n", "Epoch 2508, Loss 2.932970\n", "Epoch 2509, Loss 2.932953\n", "Epoch 2510, Loss 2.932932\n", "Epoch 2511, Loss 2.932915\n", "Epoch 2512, Loss 2.932898\n", "Epoch 2513, Loss 2.932880\n", "Epoch 2514, Loss 2.932862\n", "Epoch 2515, Loss 2.932846\n", "Epoch 2516, Loss 2.932826\n", "Epoch 2517, Loss 2.932810\n", "Epoch 2518, Loss 2.932791\n", "Epoch 2519, Loss 2.932774\n", "Epoch 2520, Loss 2.932758\n", "Epoch 2521, Loss 2.932739\n", "Epoch 2522, Loss 2.932723\n", "Epoch 2523, Loss 2.932706\n", "Epoch 2524, Loss 2.932689\n", "Epoch 2525, Loss 2.932671\n", "Epoch 2526, Loss 2.932654\n", "Epoch 2527, Loss 2.932637\n", "Epoch 2528, Loss 2.932619\n", "Epoch 2529, Loss 2.932603\n", "Epoch 2530, Loss 2.932585\n", "Epoch 2531, Loss 2.932569\n", "Epoch 2532, Loss 2.932553\n", "Epoch 2533, Loss 2.932535\n", "Epoch 2534, Loss 2.932520\n", "Epoch 2535, Loss 2.932502\n", "Epoch 2536, Loss 2.932487\n", "Epoch 2537, Loss 2.932469\n", "Epoch 2538, Loss 2.932455\n", "Epoch 2539, Loss 2.932438\n", "Epoch 2540, Loss 2.932421\n", "Epoch 2541, Loss 2.932404\n", "Epoch 2542, Loss 2.932387\n", "Epoch 2543, Loss 2.932370\n", "Epoch 2544, Loss 2.932358\n", "Epoch 2545, Loss 2.932340\n", "Epoch 2546, Loss 2.932324\n", "Epoch 2547, Loss 2.932310\n", "Epoch 2548, Loss 2.932293\n", "Epoch 2549, Loss 2.932278\n", "Epoch 2550, Loss 2.932261\n", "Epoch 2551, Loss 2.932246\n", "Epoch 2552, Loss 2.932229\n", "Epoch 2553, Loss 2.932215\n", "Epoch 2554, Loss 2.932198\n", "Epoch 2555, Loss 2.932184\n", "Epoch 2556, Loss 2.932168\n", "Epoch 2557, Loss 2.932153\n", "Epoch 2558, Loss 2.932137\n", "Epoch 2559, Loss 2.932122\n", "Epoch 2560, Loss 2.932107\n", "Epoch 2561, Loss 2.932092\n", "Epoch 2562, Loss 2.932076\n", "Epoch 2563, Loss 2.932061\n", "Epoch 2564, Loss 2.932047\n", "Epoch 2565, Loss 2.932031\n", "Epoch 2566, Loss 2.932017\n", "Epoch 2567, Loss 2.932002\n", "Epoch 2568, Loss 2.931986\n", "Epoch 2569, Loss 2.931972\n", "Epoch 2570, Loss 2.931957\n", "Epoch 2571, Loss 2.931941\n", "Epoch 2572, Loss 2.931929\n", "Epoch 2573, Loss 2.931914\n", "Epoch 2574, Loss 2.931900\n", "Epoch 2575, Loss 2.931885\n", "Epoch 2576, Loss 2.931870\n", "Epoch 2577, Loss 2.931855\n", "Epoch 2578, Loss 2.931843\n", "Epoch 2579, Loss 2.931828\n", "Epoch 2580, Loss 2.931813\n", "Epoch 2581, Loss 2.931799\n", "Epoch 2582, Loss 2.931786\n", "Epoch 2583, Loss 2.931771\n", "Epoch 2584, Loss 2.931759\n", "Epoch 2585, Loss 2.931742\n", "Epoch 2586, Loss 2.931729\n", "Epoch 2587, Loss 2.931717\n", "Epoch 2588, Loss 2.931701\n", "Epoch 2589, Loss 2.931687\n", "Epoch 2590, Loss 2.931674\n", "Epoch 2591, Loss 2.931660\n", "Epoch 2592, Loss 2.931647\n", "Epoch 2593, Loss 2.931632\n", "Epoch 2594, Loss 2.931619\n", "Epoch 2595, Loss 2.931606\n", "Epoch 2596, Loss 2.931594\n", "Epoch 2597, Loss 2.931580\n", "Epoch 2598, Loss 2.931566\n", "Epoch 2599, Loss 2.931554\n", "Epoch 2600, Loss 2.931539\n", "Epoch 2601, Loss 2.931526\n", "Epoch 2602, Loss 2.931512\n", "Epoch 2603, Loss 2.931500\n", "Epoch 2604, Loss 2.931488\n", "Epoch 2605, Loss 2.931474\n", "Epoch 2606, Loss 2.931462\n", "Epoch 2607, Loss 2.931448\n", "Epoch 2608, Loss 2.931436\n", "Epoch 2609, Loss 2.931422\n", "Epoch 2610, Loss 2.931411\n", "Epoch 2611, Loss 2.931398\n", "Epoch 2612, Loss 2.931384\n", "Epoch 2613, Loss 2.931370\n", "Epoch 2614, Loss 2.931358\n", "Epoch 2615, Loss 2.931346\n", "Epoch 2616, Loss 2.931334\n", "Epoch 2617, Loss 2.931322\n", "Epoch 2618, Loss 2.931309\n", "Epoch 2619, Loss 2.931296\n", "Epoch 2620, Loss 2.931282\n", "Epoch 2621, Loss 2.931272\n", "Epoch 2622, Loss 2.931258\n", "Epoch 2623, Loss 2.931245\n", "Epoch 2624, Loss 2.931235\n", "Epoch 2625, Loss 2.931222\n", "Epoch 2626, Loss 2.931211\n", "Epoch 2627, Loss 2.931196\n", "Epoch 2628, Loss 2.931185\n", "Epoch 2629, Loss 2.931173\n", "Epoch 2630, Loss 2.931162\n", "Epoch 2631, Loss 2.931149\n", "Epoch 2632, Loss 2.931139\n", "Epoch 2633, Loss 2.931126\n", "Epoch 2634, Loss 2.931114\n", "Epoch 2635, Loss 2.931101\n", "Epoch 2636, Loss 2.931090\n", "Epoch 2637, Loss 2.931079\n", "Epoch 2638, Loss 2.931067\n", "Epoch 2639, Loss 2.931054\n", "Epoch 2640, Loss 2.931044\n", "Epoch 2641, Loss 2.931034\n", "Epoch 2642, Loss 2.931021\n", "Epoch 2643, Loss 2.931010\n", "Epoch 2644, Loss 2.930999\n", "Epoch 2645, Loss 2.930987\n", "Epoch 2646, Loss 2.930976\n", "Epoch 2647, Loss 2.930964\n", "Epoch 2648, Loss 2.930953\n", "Epoch 2649, Loss 2.930941\n", "Epoch 2650, Loss 2.930932\n", "Epoch 2651, Loss 2.930920\n", "Epoch 2652, Loss 2.930908\n", "Epoch 2653, Loss 2.930899\n", "Epoch 2654, Loss 2.930885\n", "Epoch 2655, Loss 2.930876\n", "Epoch 2656, Loss 2.930864\n", "Epoch 2657, Loss 2.930854\n", "Epoch 2658, Loss 2.930841\n", "Epoch 2659, Loss 2.930833\n", "Epoch 2660, Loss 2.930821\n", "Epoch 2661, Loss 2.930811\n", "Epoch 2662, Loss 2.930801\n", "Epoch 2663, Loss 2.930788\n", "Epoch 2664, Loss 2.930778\n", "Epoch 2665, Loss 2.930766\n", "Epoch 2666, Loss 2.930757\n", "Epoch 2667, Loss 2.930746\n", "Epoch 2668, Loss 2.930735\n", "Epoch 2669, Loss 2.930724\n", "Epoch 2670, Loss 2.930715\n", "Epoch 2671, Loss 2.930704\n", "Epoch 2672, Loss 2.930694\n", "Epoch 2673, Loss 2.930685\n", "Epoch 2674, Loss 2.930674\n", "Epoch 2675, Loss 2.930663\n", "Epoch 2676, Loss 2.930654\n", "Epoch 2677, Loss 2.930644\n", "Epoch 2678, Loss 2.930631\n", "Epoch 2679, Loss 2.930622\n", "Epoch 2680, Loss 2.930614\n", "Epoch 2681, Loss 2.930603\n", "Epoch 2682, Loss 2.930592\n", "Epoch 2683, Loss 2.930582\n", "Epoch 2684, Loss 2.930572\n", "Epoch 2685, Loss 2.930562\n", "Epoch 2686, Loss 2.930552\n", "Epoch 2687, Loss 2.930543\n", "Epoch 2688, Loss 2.930534\n", "Epoch 2689, Loss 2.930524\n", "Epoch 2690, Loss 2.930514\n", "Epoch 2691, Loss 2.930502\n", "Epoch 2692, Loss 2.930493\n", "Epoch 2693, Loss 2.930482\n", "Epoch 2694, Loss 2.930474\n", "Epoch 2695, Loss 2.930465\n", "Epoch 2696, Loss 2.930454\n", "Epoch 2697, Loss 2.930446\n", "Epoch 2698, Loss 2.930436\n", "Epoch 2699, Loss 2.930426\n", "Epoch 2700, Loss 2.930417\n", "Epoch 2701, Loss 2.930408\n", "Epoch 2702, Loss 2.930398\n", "Epoch 2703, Loss 2.930388\n", "Epoch 2704, Loss 2.930380\n", "Epoch 2705, Loss 2.930370\n", "Epoch 2706, Loss 2.930360\n", "Epoch 2707, Loss 2.930352\n", "Epoch 2708, Loss 2.930342\n", "Epoch 2709, Loss 2.930334\n", "Epoch 2710, Loss 2.930325\n", "Epoch 2711, Loss 2.930315\n", "Epoch 2712, Loss 2.930306\n", "Epoch 2713, Loss 2.930298\n", "Epoch 2714, Loss 2.930288\n", "Epoch 2715, Loss 2.930279\n", "Epoch 2716, Loss 2.930270\n", "Epoch 2717, Loss 2.930262\n", "Epoch 2718, Loss 2.930254\n", "Epoch 2719, Loss 2.930244\n", "Epoch 2720, Loss 2.930235\n", "Epoch 2721, Loss 2.930226\n", "Epoch 2722, Loss 2.930218\n", "Epoch 2723, Loss 2.930209\n", "Epoch 2724, Loss 2.930201\n", "Epoch 2725, Loss 2.930190\n", "Epoch 2726, Loss 2.930182\n", "Epoch 2727, Loss 2.930173\n", "Epoch 2728, Loss 2.930167\n", "Epoch 2729, Loss 2.930156\n", "Epoch 2730, Loss 2.930149\n", "Epoch 2731, Loss 2.930139\n", "Epoch 2732, Loss 2.930131\n", "Epoch 2733, Loss 2.930123\n", "Epoch 2734, Loss 2.930113\n", "Epoch 2735, Loss 2.930107\n", "Epoch 2736, Loss 2.930099\n", "Epoch 2737, Loss 2.930090\n", "Epoch 2738, Loss 2.930081\n", "Epoch 2739, Loss 2.930073\n", "Epoch 2740, Loss 2.930064\n", "Epoch 2741, Loss 2.930056\n", "Epoch 2742, Loss 2.930048\n", "Epoch 2743, Loss 2.930041\n", "Epoch 2744, Loss 2.930032\n", "Epoch 2745, Loss 2.930022\n", "Epoch 2746, Loss 2.930016\n", "Epoch 2747, Loss 2.930008\n", "Epoch 2748, Loss 2.930000\n", "Epoch 2749, Loss 2.929992\n", "Epoch 2750, Loss 2.929983\n", "Epoch 2751, Loss 2.929975\n", "Epoch 2752, Loss 2.929968\n", "Epoch 2753, Loss 2.929960\n", "Epoch 2754, Loss 2.929953\n", "Epoch 2755, Loss 2.929945\n", "Epoch 2756, Loss 2.929937\n", "Epoch 2757, Loss 2.929929\n", "Epoch 2758, Loss 2.929921\n", "Epoch 2759, Loss 2.929914\n", "Epoch 2760, Loss 2.929905\n", "Epoch 2761, Loss 2.929896\n", "Epoch 2762, Loss 2.929891\n", "Epoch 2763, Loss 2.929882\n", "Epoch 2764, Loss 2.929874\n", "Epoch 2765, Loss 2.929869\n", "Epoch 2766, Loss 2.929859\n", "Epoch 2767, Loss 2.929852\n", "Epoch 2768, Loss 2.929845\n", "Epoch 2769, Loss 2.929838\n", "Epoch 2770, Loss 2.929830\n", "Epoch 2771, Loss 2.929822\n", "Epoch 2772, Loss 2.929816\n", "Epoch 2773, Loss 2.929806\n", "Epoch 2774, Loss 2.929799\n", "Epoch 2775, Loss 2.929793\n", "Epoch 2776, Loss 2.929786\n", "Epoch 2777, Loss 2.929778\n", "Epoch 2778, Loss 2.929771\n", "Epoch 2779, Loss 2.929765\n", "Epoch 2780, Loss 2.929757\n", "Epoch 2781, Loss 2.929750\n", "Epoch 2782, Loss 2.929743\n", "Epoch 2783, Loss 2.929735\n", "Epoch 2784, Loss 2.929729\n", "Epoch 2785, Loss 2.929722\n", "Epoch 2786, Loss 2.929714\n", "Epoch 2787, Loss 2.929707\n", "Epoch 2788, Loss 2.929701\n", "Epoch 2789, Loss 2.929692\n", "Epoch 2790, Loss 2.929685\n", "Epoch 2791, Loss 2.929680\n", "Epoch 2792, Loss 2.929672\n", "Epoch 2793, Loss 2.929666\n", "Epoch 2794, Loss 2.929658\n", "Epoch 2795, Loss 2.929652\n", "Epoch 2796, Loss 2.929646\n", "Epoch 2797, Loss 2.929638\n", "Epoch 2798, Loss 2.929632\n", "Epoch 2799, Loss 2.929626\n", "Epoch 2800, Loss 2.929620\n", "Epoch 2801, Loss 2.929611\n", "Epoch 2802, Loss 2.929605\n", "Epoch 2803, Loss 2.929600\n", "Epoch 2804, Loss 2.929593\n", "Epoch 2805, Loss 2.929586\n", "Epoch 2806, Loss 2.929579\n", "Epoch 2807, Loss 2.929572\n", "Epoch 2808, Loss 2.929566\n", "Epoch 2809, Loss 2.929559\n", "Epoch 2810, Loss 2.929552\n", "Epoch 2811, Loss 2.929545\n", "Epoch 2812, Loss 2.929540\n", "Epoch 2813, Loss 2.929533\n", "Epoch 2814, Loss 2.929527\n", "Epoch 2815, Loss 2.929520\n", "Epoch 2816, Loss 2.929513\n", "Epoch 2817, Loss 2.929507\n", "Epoch 2818, Loss 2.929501\n", "Epoch 2819, Loss 2.929496\n", "Epoch 2820, Loss 2.929489\n", "Epoch 2821, Loss 2.929482\n", "Epoch 2822, Loss 2.929476\n", "Epoch 2823, Loss 2.929471\n", "Epoch 2824, Loss 2.929463\n", "Epoch 2825, Loss 2.929457\n", "Epoch 2826, Loss 2.929452\n", "Epoch 2827, Loss 2.929445\n", "Epoch 2828, Loss 2.929439\n", "Epoch 2829, Loss 2.929433\n", "Epoch 2830, Loss 2.929427\n", "Epoch 2831, Loss 2.929421\n", "Epoch 2832, Loss 2.929415\n", "Epoch 2833, Loss 2.929409\n", "Epoch 2834, Loss 2.929404\n", "Epoch 2835, Loss 2.929396\n", "Epoch 2836, Loss 2.929391\n", "Epoch 2837, Loss 2.929383\n", "Epoch 2838, Loss 2.929380\n", "Epoch 2839, Loss 2.929373\n", "Epoch 2840, Loss 2.929368\n", "Epoch 2841, Loss 2.929362\n", "Epoch 2842, Loss 2.929356\n", "Epoch 2843, Loss 2.929351\n", "Epoch 2844, Loss 2.929344\n", "Epoch 2845, Loss 2.929338\n", "Epoch 2846, Loss 2.929332\n", "Epoch 2847, Loss 2.929328\n", "Epoch 2848, Loss 2.929321\n", "Epoch 2849, Loss 2.929316\n", "Epoch 2850, Loss 2.929309\n", "Epoch 2851, Loss 2.929304\n", "Epoch 2852, Loss 2.929300\n", "Epoch 2853, Loss 2.929293\n", "Epoch 2854, Loss 2.929288\n", "Epoch 2855, Loss 2.929282\n", "Epoch 2856, Loss 2.929277\n", "Epoch 2857, Loss 2.929271\n", "Epoch 2858, Loss 2.929266\n", "Epoch 2859, Loss 2.929260\n", "Epoch 2860, Loss 2.929255\n", "Epoch 2861, Loss 2.929250\n", "Epoch 2862, Loss 2.929244\n", "Epoch 2863, Loss 2.929237\n", "Epoch 2864, Loss 2.929234\n", "Epoch 2865, Loss 2.929227\n", "Epoch 2866, Loss 2.929222\n", "Epoch 2867, Loss 2.929216\n", "Epoch 2868, Loss 2.929212\n", "Epoch 2869, Loss 2.929207\n", "Epoch 2870, Loss 2.929202\n", "Epoch 2871, Loss 2.929195\n", "Epoch 2872, Loss 2.929191\n", "Epoch 2873, Loss 2.929184\n", "Epoch 2874, Loss 2.929180\n", "Epoch 2875, Loss 2.929175\n", "Epoch 2876, Loss 2.929170\n", "Epoch 2877, Loss 2.929166\n", "Epoch 2878, Loss 2.929160\n", "Epoch 2879, Loss 2.929155\n", "Epoch 2880, Loss 2.929150\n", "Epoch 2881, Loss 2.929144\n", "Epoch 2882, Loss 2.929138\n", "Epoch 2883, Loss 2.929133\n", "Epoch 2884, Loss 2.929127\n", "Epoch 2885, Loss 2.929122\n", "Epoch 2886, Loss 2.929118\n", "Epoch 2887, Loss 2.929113\n", "Epoch 2888, Loss 2.929108\n", "Epoch 2889, Loss 2.929103\n", "Epoch 2890, Loss 2.929099\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2891, Loss 2.929093\n", "Epoch 2892, Loss 2.929090\n", "Epoch 2893, Loss 2.929084\n", "Epoch 2894, Loss 2.929079\n", "Epoch 2895, Loss 2.929075\n", "Epoch 2896, Loss 2.929069\n", "Epoch 2897, Loss 2.929065\n", "Epoch 2898, Loss 2.929059\n", "Epoch 2899, Loss 2.929054\n", "Epoch 2900, Loss 2.929050\n", "Epoch 2901, Loss 2.929044\n", "Epoch 2902, Loss 2.929040\n", "Epoch 2903, Loss 2.929036\n", "Epoch 2904, Loss 2.929031\n", "Epoch 2905, Loss 2.929025\n", "Epoch 2906, Loss 2.929021\n", "Epoch 2907, Loss 2.929017\n", "Epoch 2908, Loss 2.929012\n", "Epoch 2909, Loss 2.929006\n", "Epoch 2910, Loss 2.929002\n", "Epoch 2911, Loss 2.928999\n", "Epoch 2912, Loss 2.928994\n", "Epoch 2913, Loss 2.928988\n", "Epoch 2914, Loss 2.928984\n", "Epoch 2915, Loss 2.928980\n", "Epoch 2916, Loss 2.928976\n", "Epoch 2917, Loss 2.928971\n", "Epoch 2918, Loss 2.928967\n", "Epoch 2919, Loss 2.928962\n", "Epoch 2920, Loss 2.928958\n", "Epoch 2921, Loss 2.928953\n", "Epoch 2922, Loss 2.928947\n", "Epoch 2923, Loss 2.928946\n", "Epoch 2924, Loss 2.928941\n", "Epoch 2925, Loss 2.928935\n", "Epoch 2926, Loss 2.928932\n", "Epoch 2927, Loss 2.928926\n", "Epoch 2928, Loss 2.928923\n", "Epoch 2929, Loss 2.928919\n", "Epoch 2930, Loss 2.928915\n", "Epoch 2931, Loss 2.928909\n", "Epoch 2932, Loss 2.928904\n", "Epoch 2933, Loss 2.928902\n", "Epoch 2934, Loss 2.928897\n", "Epoch 2935, Loss 2.928893\n", "Epoch 2936, Loss 2.928887\n", "Epoch 2937, Loss 2.928883\n", "Epoch 2938, Loss 2.928880\n", "Epoch 2939, Loss 2.928877\n", "Epoch 2940, Loss 2.928871\n", "Epoch 2941, Loss 2.928867\n", "Epoch 2942, Loss 2.928864\n", "Epoch 2943, Loss 2.928860\n", "Epoch 2944, Loss 2.928855\n", "Epoch 2945, Loss 2.928850\n", "Epoch 2946, Loss 2.928845\n", "Epoch 2947, Loss 2.928843\n", "Epoch 2948, Loss 2.928838\n", "Epoch 2949, Loss 2.928833\n", "Epoch 2950, Loss 2.928830\n", "Epoch 2951, Loss 2.928826\n", "Epoch 2952, Loss 2.928822\n", "Epoch 2953, Loss 2.928818\n", "Epoch 2954, Loss 2.928815\n", "Epoch 2955, Loss 2.928811\n", "Epoch 2956, Loss 2.928805\n", "Epoch 2957, Loss 2.928801\n", "Epoch 2958, Loss 2.928799\n", "Epoch 2959, Loss 2.928795\n", "Epoch 2960, Loss 2.928789\n", "Epoch 2961, Loss 2.928789\n", "Epoch 2962, Loss 2.928783\n", "Epoch 2963, Loss 2.928779\n", "Epoch 2964, Loss 2.928775\n", "Epoch 2965, Loss 2.928771\n", "Epoch 2966, Loss 2.928767\n", "Epoch 2967, Loss 2.928765\n", "Epoch 2968, Loss 2.928761\n", "Epoch 2969, Loss 2.928758\n", "Epoch 2970, Loss 2.928752\n", "Epoch 2971, Loss 2.928750\n", "Epoch 2972, Loss 2.928745\n", "Epoch 2973, Loss 2.928741\n", "Epoch 2974, Loss 2.928737\n", "Epoch 2975, Loss 2.928735\n", "Epoch 2976, Loss 2.928730\n", "Epoch 2977, Loss 2.928727\n", "Epoch 2978, Loss 2.928723\n", "Epoch 2979, Loss 2.928719\n", "Epoch 2980, Loss 2.928716\n", "Epoch 2981, Loss 2.928712\n", "Epoch 2982, Loss 2.928708\n", "Epoch 2983, Loss 2.928705\n", "Epoch 2984, Loss 2.928700\n", "Epoch 2985, Loss 2.928698\n", "Epoch 2986, Loss 2.928695\n", "Epoch 2987, Loss 2.928690\n", "Epoch 2988, Loss 2.928687\n", "Epoch 2989, Loss 2.928684\n", "Epoch 2990, Loss 2.928679\n", "Epoch 2991, Loss 2.928677\n", "Epoch 2992, Loss 2.928673\n", "Epoch 2993, Loss 2.928669\n", "Epoch 2994, Loss 2.928666\n", "Epoch 2995, Loss 2.928662\n", "Epoch 2996, Loss 2.928660\n", "Epoch 2997, Loss 2.928656\n", "Epoch 2998, Loss 2.928651\n", "Epoch 2999, Loss 2.928648\n", "Epoch 3000, Loss 2.928646\n", "Epoch 3001, Loss 2.928643\n", "Epoch 3002, Loss 2.928638\n", "Epoch 3003, Loss 2.928635\n", "Epoch 3004, Loss 2.928632\n", "Epoch 3005, Loss 2.928629\n", "Epoch 3006, Loss 2.928625\n", "Epoch 3007, Loss 2.928621\n", "Epoch 3008, Loss 2.928617\n", "Epoch 3009, Loss 2.928616\n", "Epoch 3010, Loss 2.928612\n", "Epoch 3011, Loss 2.928608\n", "Epoch 3012, Loss 2.928604\n", "Epoch 3013, Loss 2.928601\n", "Epoch 3014, Loss 2.928599\n", "Epoch 3015, Loss 2.928595\n", "Epoch 3016, Loss 2.928592\n", "Epoch 3017, Loss 2.928588\n", "Epoch 3018, Loss 2.928586\n", "Epoch 3019, Loss 2.928583\n", "Epoch 3020, Loss 2.928580\n", "Epoch 3021, Loss 2.928576\n", "Epoch 3022, Loss 2.928574\n", "Epoch 3023, Loss 2.928569\n", "Epoch 3024, Loss 2.928567\n", "Epoch 3025, Loss 2.928564\n", "Epoch 3026, Loss 2.928560\n", "Epoch 3027, Loss 2.928557\n", "Epoch 3028, Loss 2.928555\n", "Epoch 3029, Loss 2.928551\n", "Epoch 3030, Loss 2.928548\n", "Epoch 3031, Loss 2.928545\n", "Epoch 3032, Loss 2.928543\n", "Epoch 3033, Loss 2.928539\n", "Epoch 3034, Loss 2.928536\n", "Epoch 3035, Loss 2.928532\n", "Epoch 3036, Loss 2.928531\n", "Epoch 3037, Loss 2.928528\n", "Epoch 3038, Loss 2.928524\n", "Epoch 3039, Loss 2.928521\n", "Epoch 3040, Loss 2.928519\n", "Epoch 3041, Loss 2.928514\n", "Epoch 3042, Loss 2.928512\n", "Epoch 3043, Loss 2.928509\n", "Epoch 3044, Loss 2.928505\n", "Epoch 3045, Loss 2.928503\n", "Epoch 3046, Loss 2.928500\n", "Epoch 3047, Loss 2.928498\n", "Epoch 3048, Loss 2.928495\n", "Epoch 3049, Loss 2.928491\n", "Epoch 3050, Loss 2.928488\n", "Epoch 3051, Loss 2.928486\n", "Epoch 3052, Loss 2.928484\n", "Epoch 3053, Loss 2.928480\n", "Epoch 3054, Loss 2.928477\n", "Epoch 3055, Loss 2.928475\n", "Epoch 3056, Loss 2.928473\n", "Epoch 3057, Loss 2.928469\n", "Epoch 3058, Loss 2.928468\n", "Epoch 3059, Loss 2.928463\n", "Epoch 3060, Loss 2.928460\n", "Epoch 3061, Loss 2.928458\n", "Epoch 3062, Loss 2.928456\n", "Epoch 3063, Loss 2.928452\n", "Epoch 3064, Loss 2.928450\n", "Epoch 3065, Loss 2.928447\n", "Epoch 3066, Loss 2.928443\n", "Epoch 3067, Loss 2.928443\n", "Epoch 3068, Loss 2.928440\n", "Epoch 3069, Loss 2.928435\n", "Epoch 3070, Loss 2.928436\n", "Epoch 3071, Loss 2.928430\n", "Epoch 3072, Loss 2.928428\n", "Epoch 3073, Loss 2.928426\n", "Epoch 3074, Loss 2.928423\n", "Epoch 3075, Loss 2.928421\n", "Epoch 3076, Loss 2.928417\n", "Epoch 3077, Loss 2.928415\n", "Epoch 3078, Loss 2.928411\n", "Epoch 3079, Loss 2.928410\n", "Epoch 3080, Loss 2.928407\n", "Epoch 3081, Loss 2.928404\n", "Epoch 3082, Loss 2.928402\n", "Epoch 3083, Loss 2.928399\n", "Epoch 3084, Loss 2.928396\n", "Epoch 3085, Loss 2.928396\n", "Epoch 3086, Loss 2.928392\n", "Epoch 3087, Loss 2.928389\n", "Epoch 3088, Loss 2.928386\n", "Epoch 3089, Loss 2.928383\n", "Epoch 3090, Loss 2.928383\n", "Epoch 3091, Loss 2.928379\n", "Epoch 3092, Loss 2.928378\n", "Epoch 3093, Loss 2.928375\n", "Epoch 3094, Loss 2.928372\n", "Epoch 3095, Loss 2.928370\n", "Epoch 3096, Loss 2.928368\n", "Epoch 3097, Loss 2.928364\n", "Epoch 3098, Loss 2.928362\n", "Epoch 3099, Loss 2.928361\n", "Epoch 3100, Loss 2.928357\n", "Epoch 3101, Loss 2.928355\n", "Epoch 3102, Loss 2.928353\n", "Epoch 3103, Loss 2.928349\n", "Epoch 3104, Loss 2.928348\n", "Epoch 3105, Loss 2.928345\n", "Epoch 3106, Loss 2.928343\n", "Epoch 3107, Loss 2.928340\n", "Epoch 3108, Loss 2.928339\n", "Epoch 3109, Loss 2.928337\n", "Epoch 3110, Loss 2.928333\n", "Epoch 3111, Loss 2.928332\n", "Epoch 3112, Loss 2.928328\n", "Epoch 3113, Loss 2.928329\n", "Epoch 3114, Loss 2.928324\n", "Epoch 3115, Loss 2.928323\n", "Epoch 3116, Loss 2.928320\n", "Epoch 3117, Loss 2.928318\n", "Epoch 3118, Loss 2.928315\n", "Epoch 3119, Loss 2.928313\n", "Epoch 3120, Loss 2.928311\n", "Epoch 3121, Loss 2.928308\n", "Epoch 3122, Loss 2.928306\n", "Epoch 3123, Loss 2.928304\n", "Epoch 3124, Loss 2.928303\n", "Epoch 3125, Loss 2.928300\n", "Epoch 3126, Loss 2.928296\n", "Epoch 3127, Loss 2.928295\n", "Epoch 3128, Loss 2.928292\n", "Epoch 3129, Loss 2.928292\n", "Epoch 3130, Loss 2.928288\n", "Epoch 3131, Loss 2.928287\n", "Epoch 3132, Loss 2.928285\n", "Epoch 3133, Loss 2.928282\n", "Epoch 3134, Loss 2.928279\n", "Epoch 3135, Loss 2.928276\n", "Epoch 3136, Loss 2.928275\n", "Epoch 3137, Loss 2.928273\n", "Epoch 3138, Loss 2.928272\n", "Epoch 3139, Loss 2.928268\n", "Epoch 3140, Loss 2.928267\n", "Epoch 3141, Loss 2.928265\n", "Epoch 3142, Loss 2.928263\n", "Epoch 3143, Loss 2.928260\n", "Epoch 3144, Loss 2.928259\n", "Epoch 3145, Loss 2.928256\n", "Epoch 3146, Loss 2.928255\n", "Epoch 3147, Loss 2.928252\n", "Epoch 3148, Loss 2.928251\n", "Epoch 3149, Loss 2.928248\n", "Epoch 3150, Loss 2.928246\n", "Epoch 3151, Loss 2.928245\n", "Epoch 3152, Loss 2.928242\n", "Epoch 3153, Loss 2.928240\n", "Epoch 3154, Loss 2.928236\n", "Epoch 3155, Loss 2.928236\n", "Epoch 3156, Loss 2.928233\n", "Epoch 3157, Loss 2.928231\n", "Epoch 3158, Loss 2.928230\n", "Epoch 3159, Loss 2.928227\n", "Epoch 3160, Loss 2.928226\n", "Epoch 3161, Loss 2.928225\n", "Epoch 3162, Loss 2.928223\n", "Epoch 3163, Loss 2.928219\n", "Epoch 3164, Loss 2.928218\n", "Epoch 3165, Loss 2.928216\n", "Epoch 3166, Loss 2.928215\n", "Epoch 3167, Loss 2.928212\n", "Epoch 3168, Loss 2.928211\n", "Epoch 3169, Loss 2.928210\n", "Epoch 3170, Loss 2.928206\n", "Epoch 3171, Loss 2.928205\n", "Epoch 3172, Loss 2.928204\n", "Epoch 3173, Loss 2.928202\n", "Epoch 3174, Loss 2.928200\n", "Epoch 3175, Loss 2.928196\n", "Epoch 3176, Loss 2.928195\n", "Epoch 3177, Loss 2.928195\n", "Epoch 3178, Loss 2.928192\n", "Epoch 3179, Loss 2.928190\n", "Epoch 3180, Loss 2.928188\n", "Epoch 3181, Loss 2.928186\n", "Epoch 3182, Loss 2.928185\n", "Epoch 3183, Loss 2.928184\n", "Epoch 3184, Loss 2.928182\n", "Epoch 3185, Loss 2.928180\n", "Epoch 3186, Loss 2.928178\n", "Epoch 3187, Loss 2.928175\n", "Epoch 3188, Loss 2.928172\n", "Epoch 3189, Loss 2.928170\n", "Epoch 3190, Loss 2.928170\n", "Epoch 3191, Loss 2.928169\n", "Epoch 3192, Loss 2.928167\n", "Epoch 3193, Loss 2.928164\n", "Epoch 3194, Loss 2.928164\n", "Epoch 3195, Loss 2.928162\n", "Epoch 3196, Loss 2.928160\n", "Epoch 3197, Loss 2.928158\n", "Epoch 3198, Loss 2.928158\n", "Epoch 3199, Loss 2.928154\n", "Epoch 3200, Loss 2.928152\n", "Epoch 3201, Loss 2.928149\n", "Epoch 3202, Loss 2.928150\n", "Epoch 3203, Loss 2.928147\n", "Epoch 3204, Loss 2.928146\n", "Epoch 3205, Loss 2.928144\n", "Epoch 3206, Loss 2.928142\n", "Epoch 3207, Loss 2.928140\n", "Epoch 3208, Loss 2.928139\n", "Epoch 3209, Loss 2.928137\n", "Epoch 3210, Loss 2.928135\n", "Epoch 3211, Loss 2.928135\n", "Epoch 3212, Loss 2.928133\n", "Epoch 3213, Loss 2.928131\n", "Epoch 3214, Loss 2.928130\n", "Epoch 3215, Loss 2.928125\n", "Epoch 3216, Loss 2.928125\n", "Epoch 3217, Loss 2.928124\n", "Epoch 3218, Loss 2.928121\n", "Epoch 3219, Loss 2.928121\n", "Epoch 3220, Loss 2.928120\n", "Epoch 3221, Loss 2.928118\n", "Epoch 3222, Loss 2.928117\n", "Epoch 3223, Loss 2.928115\n", "Epoch 3224, Loss 2.928113\n", "Epoch 3225, Loss 2.928110\n", "Epoch 3226, Loss 2.928109\n", "Epoch 3227, Loss 2.928108\n", "Epoch 3228, Loss 2.928104\n", "Epoch 3229, Loss 2.928105\n", "Epoch 3230, Loss 2.928104\n", "Epoch 3231, Loss 2.928102\n", "Epoch 3232, Loss 2.928101\n", "Epoch 3233, Loss 2.928098\n", "Epoch 3234, Loss 2.928097\n", "Epoch 3235, Loss 2.928095\n", "Epoch 3236, Loss 2.928094\n", "Epoch 3237, Loss 2.928093\n", "Epoch 3238, Loss 2.928091\n", "Epoch 3239, Loss 2.928090\n", "Epoch 3240, Loss 2.928088\n", "Epoch 3241, Loss 2.928086\n", "Epoch 3242, Loss 2.928085\n", "Epoch 3243, Loss 2.928084\n", "Epoch 3244, Loss 2.928082\n", "Epoch 3245, Loss 2.928080\n", "Epoch 3246, Loss 2.928079\n", "Epoch 3247, Loss 2.928076\n", "Epoch 3248, Loss 2.928076\n", "Epoch 3249, Loss 2.928075\n", "Epoch 3250, Loss 2.928072\n", "Epoch 3251, Loss 2.928072\n", "Epoch 3252, Loss 2.928071\n", "Epoch 3253, Loss 2.928068\n", "Epoch 3254, Loss 2.928068\n", "Epoch 3255, Loss 2.928066\n", "Epoch 3256, Loss 2.928065\n", "Epoch 3257, Loss 2.928063\n", "Epoch 3258, Loss 2.928061\n", "Epoch 3259, Loss 2.928061\n", "Epoch 3260, Loss 2.928057\n", "Epoch 3261, Loss 2.928058\n", "Epoch 3262, Loss 2.928056\n", "Epoch 3263, Loss 2.928055\n", "Epoch 3264, Loss 2.928052\n", "Epoch 3265, Loss 2.928053\n", "Epoch 3266, Loss 2.928051\n", "Epoch 3267, Loss 2.928050\n", "Epoch 3268, Loss 2.928047\n", "Epoch 3269, Loss 2.928046\n", "Epoch 3270, Loss 2.928046\n", "Epoch 3271, Loss 2.928044\n", "Epoch 3272, Loss 2.928042\n", "Epoch 3273, Loss 2.928040\n", "Epoch 3274, Loss 2.928040\n", "Epoch 3275, Loss 2.928037\n", "Epoch 3276, Loss 2.928036\n", "Epoch 3277, Loss 2.928037\n", "Epoch 3278, Loss 2.928034\n", "Epoch 3279, Loss 2.928034\n", "Epoch 3280, Loss 2.928031\n", "Epoch 3281, Loss 2.928032\n", "Epoch 3282, Loss 2.928029\n", "Epoch 3283, Loss 2.928027\n", "Epoch 3284, Loss 2.928026\n", "Epoch 3285, Loss 2.928025\n", "Epoch 3286, Loss 2.928024\n", "Epoch 3287, Loss 2.928023\n", "Epoch 3288, Loss 2.928022\n", "Epoch 3289, Loss 2.928021\n", "Epoch 3290, Loss 2.928019\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3291, Loss 2.928018\n", "Epoch 3292, Loss 2.928017\n", "Epoch 3293, Loss 2.928015\n", "Epoch 3294, Loss 2.928013\n", "Epoch 3295, Loss 2.928012\n", "Epoch 3296, Loss 2.928011\n", "Epoch 3297, Loss 2.928009\n", "Epoch 3298, Loss 2.928009\n", "Epoch 3299, Loss 2.928006\n", "Epoch 3300, Loss 2.928007\n", "Epoch 3301, Loss 2.928007\n", "Epoch 3302, Loss 2.928004\n", "Epoch 3303, Loss 2.928002\n", "Epoch 3304, Loss 2.928001\n", "Epoch 3305, Loss 2.928000\n", "Epoch 3306, Loss 2.928000\n", "Epoch 3307, Loss 2.927998\n", "Epoch 3308, Loss 2.927995\n", "Epoch 3309, Loss 2.927995\n", "Epoch 3310, Loss 2.927994\n", "Epoch 3311, Loss 2.927994\n", "Epoch 3312, Loss 2.927992\n", "Epoch 3313, Loss 2.927992\n", "Epoch 3314, Loss 2.927990\n", "Epoch 3315, Loss 2.927989\n", "Epoch 3316, Loss 2.927987\n", "Epoch 3317, Loss 2.927986\n", "Epoch 3318, Loss 2.927985\n", "Epoch 3319, Loss 2.927983\n", "Epoch 3320, Loss 2.927983\n", "Epoch 3321, Loss 2.927981\n", "Epoch 3322, Loss 2.927980\n", "Epoch 3323, Loss 2.927979\n", "Epoch 3324, Loss 2.927978\n", "Epoch 3325, Loss 2.927977\n", "Epoch 3326, Loss 2.927975\n", "Epoch 3327, Loss 2.927973\n", "Epoch 3328, Loss 2.927973\n", "Epoch 3329, Loss 2.927974\n", "Epoch 3330, Loss 2.927971\n", "Epoch 3331, Loss 2.927972\n", "Epoch 3332, Loss 2.927969\n", "Epoch 3333, Loss 2.927969\n", "Epoch 3334, Loss 2.927967\n", "Epoch 3335, Loss 2.927967\n", "Epoch 3336, Loss 2.927963\n", "Epoch 3337, Loss 2.927963\n", "Epoch 3338, Loss 2.927962\n", "Epoch 3339, Loss 2.927962\n", "Epoch 3340, Loss 2.927961\n", "Epoch 3341, Loss 2.927960\n", "Epoch 3342, Loss 2.927959\n", "Epoch 3343, Loss 2.927958\n", "Epoch 3344, Loss 2.927956\n", "Epoch 3345, Loss 2.927955\n", "Epoch 3346, Loss 2.927954\n", "Epoch 3347, Loss 2.927953\n", "Epoch 3348, Loss 2.927953\n", "Epoch 3349, Loss 2.927950\n", "Epoch 3350, Loss 2.927950\n", "Epoch 3351, Loss 2.927948\n", "Epoch 3352, Loss 2.927947\n", "Epoch 3353, Loss 2.927948\n", "Epoch 3354, Loss 2.927945\n", "Epoch 3355, Loss 2.927944\n", "Epoch 3356, Loss 2.927944\n", "Epoch 3357, Loss 2.927944\n", "Epoch 3358, Loss 2.927942\n", "Epoch 3359, Loss 2.927941\n", "Epoch 3360, Loss 2.927940\n", "Epoch 3361, Loss 2.927938\n", "Epoch 3362, Loss 2.927938\n", "Epoch 3363, Loss 2.927936\n", "Epoch 3364, Loss 2.927936\n", "Epoch 3365, Loss 2.927937\n", "Epoch 3366, Loss 2.927934\n", "Epoch 3367, Loss 2.927934\n", "Epoch 3368, Loss 2.927933\n", "Epoch 3369, Loss 2.927930\n", "Epoch 3370, Loss 2.927929\n", "Epoch 3371, Loss 2.927931\n", "Epoch 3372, Loss 2.927929\n", "Epoch 3373, Loss 2.927927\n", "Epoch 3374, Loss 2.927926\n", "Epoch 3375, Loss 2.927925\n", "Epoch 3376, Loss 2.927924\n", "Epoch 3377, Loss 2.927922\n", "Epoch 3378, Loss 2.927924\n", "Epoch 3379, Loss 2.927922\n", "Epoch 3380, Loss 2.927921\n", "Epoch 3381, Loss 2.927920\n", "Epoch 3382, Loss 2.927918\n", "Epoch 3383, Loss 2.927917\n", "Epoch 3384, Loss 2.927917\n", "Epoch 3385, Loss 2.927915\n", "Epoch 3386, Loss 2.927916\n", "Epoch 3387, Loss 2.927914\n", "Epoch 3388, Loss 2.927914\n", "Epoch 3389, Loss 2.927912\n", "Epoch 3390, Loss 2.927913\n", "Epoch 3391, Loss 2.927911\n", "Epoch 3392, Loss 2.927910\n", "Epoch 3393, Loss 2.927909\n", "Epoch 3394, Loss 2.927908\n", "Epoch 3395, Loss 2.927907\n", "Epoch 3396, Loss 2.927906\n", "Epoch 3397, Loss 2.927905\n", "Epoch 3398, Loss 2.927905\n", "Epoch 3399, Loss 2.927904\n", "Epoch 3400, Loss 2.927902\n", "Epoch 3401, Loss 2.927902\n", "Epoch 3402, Loss 2.927902\n", "Epoch 3403, Loss 2.927899\n", "Epoch 3404, Loss 2.927899\n", "Epoch 3405, Loss 2.927898\n", "Epoch 3406, Loss 2.927899\n", "Epoch 3407, Loss 2.927896\n", "Epoch 3408, Loss 2.927895\n", "Epoch 3409, Loss 2.927896\n", "Epoch 3410, Loss 2.927894\n", "Epoch 3411, Loss 2.927892\n", "Epoch 3412, Loss 2.927893\n", "Epoch 3413, Loss 2.927891\n", "Epoch 3414, Loss 2.927891\n", "Epoch 3415, Loss 2.927890\n", "Epoch 3416, Loss 2.927891\n", "Epoch 3417, Loss 2.927888\n", "Epoch 3418, Loss 2.927888\n", "Epoch 3419, Loss 2.927886\n", "Epoch 3420, Loss 2.927887\n", "Epoch 3421, Loss 2.927885\n", "Epoch 3422, Loss 2.927884\n", "Epoch 3423, Loss 2.927883\n", "Epoch 3424, Loss 2.927881\n", "Epoch 3425, Loss 2.927881\n", "Epoch 3426, Loss 2.927880\n", "Epoch 3427, Loss 2.927880\n", "Epoch 3428, Loss 2.927879\n", "Epoch 3429, Loss 2.927878\n", "Epoch 3430, Loss 2.927877\n", "Epoch 3431, Loss 2.927876\n", "Epoch 3432, Loss 2.927876\n", "Epoch 3433, Loss 2.927875\n", "Epoch 3434, Loss 2.927875\n", "Epoch 3435, Loss 2.927875\n", "Epoch 3436, Loss 2.927873\n", "Epoch 3437, Loss 2.927872\n", "Epoch 3438, Loss 2.927870\n", "Epoch 3439, Loss 2.927871\n", "Epoch 3440, Loss 2.927871\n", "Epoch 3441, Loss 2.927869\n", "Epoch 3442, Loss 2.927869\n", "Epoch 3443, Loss 2.927866\n", "Epoch 3444, Loss 2.927865\n", "Epoch 3445, Loss 2.927866\n", "Epoch 3446, Loss 2.927866\n", "Epoch 3447, Loss 2.927864\n", "Epoch 3448, Loss 2.927863\n", "Epoch 3449, Loss 2.927863\n", "Epoch 3450, Loss 2.927862\n", "Epoch 3451, Loss 2.927863\n", "Epoch 3452, Loss 2.927860\n", "Epoch 3453, Loss 2.927860\n", "Epoch 3454, Loss 2.927860\n", "Epoch 3455, Loss 2.927859\n", "Epoch 3456, Loss 2.927858\n", "Epoch 3457, Loss 2.927858\n", "Epoch 3458, Loss 2.927855\n", "Epoch 3459, Loss 2.927857\n", "Epoch 3460, Loss 2.927854\n", "Epoch 3461, Loss 2.927855\n", "Epoch 3462, Loss 2.927854\n", "Epoch 3463, Loss 2.927854\n", "Epoch 3464, Loss 2.927851\n", "Epoch 3465, Loss 2.927853\n", "Epoch 3466, Loss 2.927852\n", "Epoch 3467, Loss 2.927850\n", "Epoch 3468, Loss 2.927849\n", "Epoch 3469, Loss 2.927849\n", "Epoch 3470, Loss 2.927848\n", "Epoch 3471, Loss 2.927848\n", "Epoch 3472, Loss 2.927846\n", "Epoch 3473, Loss 2.927846\n", "Epoch 3474, Loss 2.927845\n", "Epoch 3475, Loss 2.927844\n", "Epoch 3476, Loss 2.927844\n", "Epoch 3477, Loss 2.927844\n", "Epoch 3478, Loss 2.927843\n", "Epoch 3479, Loss 2.927842\n", "Epoch 3480, Loss 2.927842\n", "Epoch 3481, Loss 2.927840\n", "Epoch 3482, Loss 2.927841\n", "Epoch 3483, Loss 2.927839\n", "Epoch 3484, Loss 2.927838\n", "Epoch 3485, Loss 2.927839\n", "Epoch 3486, Loss 2.927839\n", "Epoch 3487, Loss 2.927837\n", "Epoch 3488, Loss 2.927835\n", "Epoch 3489, Loss 2.927837\n", "Epoch 3490, Loss 2.927835\n", "Epoch 3491, Loss 2.927834\n", "Epoch 3492, Loss 2.927833\n", "Epoch 3493, Loss 2.927833\n", "Epoch 3494, Loss 2.927833\n", "Epoch 3495, Loss 2.927832\n", "Epoch 3496, Loss 2.927831\n", "Epoch 3497, Loss 2.927830\n", "Epoch 3498, Loss 2.927830\n", "Epoch 3499, Loss 2.927830\n", "Epoch 3500, Loss 2.927830\n", "Epoch 3501, Loss 2.927828\n", "Epoch 3502, Loss 2.927828\n", "Epoch 3503, Loss 2.927827\n", "Epoch 3504, Loss 2.927825\n", "Epoch 3505, Loss 2.927827\n", "Epoch 3506, Loss 2.927825\n", "Epoch 3507, Loss 2.927824\n", "Epoch 3508, Loss 2.927824\n", "Epoch 3509, Loss 2.927824\n", "Epoch 3510, Loss 2.927822\n", "Epoch 3511, Loss 2.927822\n", "Epoch 3512, Loss 2.927821\n", "Epoch 3513, Loss 2.927820\n", "Epoch 3514, Loss 2.927819\n", "Epoch 3515, Loss 2.927821\n", "Epoch 3516, Loss 2.927819\n", "Epoch 3517, Loss 2.927819\n", "Epoch 3518, Loss 2.927818\n", "Epoch 3519, Loss 2.927818\n", "Epoch 3520, Loss 2.927817\n", "Epoch 3521, Loss 2.927816\n", "Epoch 3522, Loss 2.927815\n", "Epoch 3523, Loss 2.927816\n", "Epoch 3524, Loss 2.927814\n", "Epoch 3525, Loss 2.927813\n", "Epoch 3526, Loss 2.927813\n", "Epoch 3527, Loss 2.927812\n", "Epoch 3528, Loss 2.927811\n", "Epoch 3529, Loss 2.927811\n", "Epoch 3530, Loss 2.927811\n", "Epoch 3531, Loss 2.927810\n", "Epoch 3532, Loss 2.927809\n", "Epoch 3533, Loss 2.927810\n", "Epoch 3534, Loss 2.927809\n", "Epoch 3535, Loss 2.927808\n", "Epoch 3536, Loss 2.927809\n", "Epoch 3537, Loss 2.927806\n", "Epoch 3538, Loss 2.927806\n", "Epoch 3539, Loss 2.927805\n", "Epoch 3540, Loss 2.927804\n", "Epoch 3541, Loss 2.927804\n", "Epoch 3542, Loss 2.927804\n", "Epoch 3543, Loss 2.927805\n", "Epoch 3544, Loss 2.927804\n", "Epoch 3545, Loss 2.927804\n", "Epoch 3546, Loss 2.927802\n", "Epoch 3547, Loss 2.927802\n", "Epoch 3548, Loss 2.927801\n", "Epoch 3549, Loss 2.927801\n", "Epoch 3550, Loss 2.927799\n", "Epoch 3551, Loss 2.927801\n", "Epoch 3552, Loss 2.927798\n", "Epoch 3553, Loss 2.927798\n", "Epoch 3554, Loss 2.927798\n", "Epoch 3555, Loss 2.927798\n", "Epoch 3556, Loss 2.927798\n", "Epoch 3557, Loss 2.927796\n", "Epoch 3558, Loss 2.927796\n", "Epoch 3559, Loss 2.927796\n", "Epoch 3560, Loss 2.927794\n", "Epoch 3561, Loss 2.927796\n", "Epoch 3562, Loss 2.927795\n", "Epoch 3563, Loss 2.927794\n", "Epoch 3564, Loss 2.927794\n", "Epoch 3565, Loss 2.927791\n", "Epoch 3566, Loss 2.927792\n", "Epoch 3567, Loss 2.927792\n", "Epoch 3568, Loss 2.927790\n", "Epoch 3569, Loss 2.927790\n", "Epoch 3570, Loss 2.927789\n", "Epoch 3571, Loss 2.927790\n", "Epoch 3572, Loss 2.927789\n", "Epoch 3573, Loss 2.927790\n", "Epoch 3574, Loss 2.927789\n", "Epoch 3575, Loss 2.927787\n", "Epoch 3576, Loss 2.927786\n", "Epoch 3577, Loss 2.927788\n", "Epoch 3578, Loss 2.927785\n", "Epoch 3579, Loss 2.927785\n", "Epoch 3580, Loss 2.927786\n", "Epoch 3581, Loss 2.927785\n", "Epoch 3582, Loss 2.927785\n", "Epoch 3583, Loss 2.927784\n", "Epoch 3584, Loss 2.927783\n", "Epoch 3585, Loss 2.927783\n", "Epoch 3586, Loss 2.927781\n", "Epoch 3587, Loss 2.927782\n", "Epoch 3588, Loss 2.927780\n", "Epoch 3589, Loss 2.927781\n", "Epoch 3590, Loss 2.927781\n", "Epoch 3591, Loss 2.927780\n", "Epoch 3592, Loss 2.927780\n", "Epoch 3593, Loss 2.927778\n", "Epoch 3594, Loss 2.927779\n", "Epoch 3595, Loss 2.927778\n", "Epoch 3596, Loss 2.927778\n", "Epoch 3597, Loss 2.927778\n", "Epoch 3598, Loss 2.927777\n", "Epoch 3599, Loss 2.927776\n", "Epoch 3600, Loss 2.927775\n", "Epoch 3601, Loss 2.927775\n", "Epoch 3602, Loss 2.927773\n", "Epoch 3603, Loss 2.927775\n", "Epoch 3604, Loss 2.927775\n", "Epoch 3605, Loss 2.927774\n", "Epoch 3606, Loss 2.927773\n", "Epoch 3607, Loss 2.927773\n", "Epoch 3608, Loss 2.927772\n", "Epoch 3609, Loss 2.927772\n", "Epoch 3610, Loss 2.927772\n", "Epoch 3611, Loss 2.927770\n", "Epoch 3612, Loss 2.927772\n", "Epoch 3613, Loss 2.927772\n", "Epoch 3614, Loss 2.927770\n", "Epoch 3615, Loss 2.927770\n", "Epoch 3616, Loss 2.927769\n", "Epoch 3617, Loss 2.927768\n", "Epoch 3618, Loss 2.927769\n", "Epoch 3619, Loss 2.927768\n", "Epoch 3620, Loss 2.927766\n", "Epoch 3621, Loss 2.927767\n", "Epoch 3622, Loss 2.927767\n", "Epoch 3623, Loss 2.927765\n", "Epoch 3624, Loss 2.927766\n", "Epoch 3625, Loss 2.927765\n", "Epoch 3626, Loss 2.927766\n", "Epoch 3627, Loss 2.927764\n", "Epoch 3628, Loss 2.927764\n", "Epoch 3629, Loss 2.927764\n", "Epoch 3630, Loss 2.927762\n", "Epoch 3631, Loss 2.927763\n", "Epoch 3632, Loss 2.927763\n", "Epoch 3633, Loss 2.927762\n", "Epoch 3634, Loss 2.927761\n", "Epoch 3635, Loss 2.927762\n", "Epoch 3636, Loss 2.927759\n", "Epoch 3637, Loss 2.927761\n", "Epoch 3638, Loss 2.927761\n", "Epoch 3639, Loss 2.927760\n", "Epoch 3640, Loss 2.927759\n", "Epoch 3641, Loss 2.927758\n", "Epoch 3642, Loss 2.927759\n", "Epoch 3643, Loss 2.927757\n", "Epoch 3644, Loss 2.927758\n", "Epoch 3645, Loss 2.927757\n", "Epoch 3646, Loss 2.927757\n", "Epoch 3647, Loss 2.927757\n", "Epoch 3648, Loss 2.927756\n", "Epoch 3649, Loss 2.927758\n", "Epoch 3650, Loss 2.927756\n", "Epoch 3651, Loss 2.927756\n", "Epoch 3652, Loss 2.927755\n", "Epoch 3653, Loss 2.927755\n", "Epoch 3654, Loss 2.927754\n", "Epoch 3655, Loss 2.927754\n", "Epoch 3656, Loss 2.927755\n", "Epoch 3657, Loss 2.927753\n", "Epoch 3658, Loss 2.927752\n", "Epoch 3659, Loss 2.927754\n", "Epoch 3660, Loss 2.927752\n", "Epoch 3661, Loss 2.927751\n", "Epoch 3662, Loss 2.927752\n", "Epoch 3663, Loss 2.927750\n", "Epoch 3664, Loss 2.927750\n", "Epoch 3665, Loss 2.927752\n", "Epoch 3666, Loss 2.927750\n", "Epoch 3667, Loss 2.927750\n", "Epoch 3668, Loss 2.927747\n", "Epoch 3669, Loss 2.927749\n", "Epoch 3670, Loss 2.927748\n", "Epoch 3671, Loss 2.927748\n", "Epoch 3672, Loss 2.927749\n", "Epoch 3673, Loss 2.927747\n", "Epoch 3674, Loss 2.927747\n", "Epoch 3675, Loss 2.927748\n", "Epoch 3676, Loss 2.927747\n", "Epoch 3677, Loss 2.927746\n", "Epoch 3678, Loss 2.927746\n", "Epoch 3679, Loss 2.927745\n", "Epoch 3680, Loss 2.927745\n", "Epoch 3681, Loss 2.927744\n", "Epoch 3682, Loss 2.927744\n", "Epoch 3683, Loss 2.927743\n", "Epoch 3684, Loss 2.927742\n", "Epoch 3685, Loss 2.927743\n", "Epoch 3686, Loss 2.927743\n", "Epoch 3687, Loss 2.927744\n", "Epoch 3688, Loss 2.927743\n", "Epoch 3689, Loss 2.927742\n", "Epoch 3690, Loss 2.927742\n", "Epoch 3691, Loss 2.927742\n", "Epoch 3692, Loss 2.927741\n", "Epoch 3693, Loss 2.927741\n", "Epoch 3694, Loss 2.927741\n", "Epoch 3695, Loss 2.927742\n", "Epoch 3696, Loss 2.927741\n", "Epoch 3697, Loss 2.927740\n", "Epoch 3698, Loss 2.927740\n", "Epoch 3699, Loss 2.927738\n", "Epoch 3700, Loss 2.927738\n", "Epoch 3701, Loss 2.927738\n", "Epoch 3702, Loss 2.927737\n", "Epoch 3703, Loss 2.927737\n", "Epoch 3704, Loss 2.927738\n", "Epoch 3705, Loss 2.927738\n", "Epoch 3706, Loss 2.927737\n", "Epoch 3707, Loss 2.927737\n", "Epoch 3708, Loss 2.927736\n", "Epoch 3709, Loss 2.927735\n", "Epoch 3710, Loss 2.927734\n", "Epoch 3711, Loss 2.927735\n", "Epoch 3712, Loss 2.927736\n", "Epoch 3713, Loss 2.927734\n", "Epoch 3714, Loss 2.927734\n", "Epoch 3715, Loss 2.927733\n", "Epoch 3716, Loss 2.927734\n", "Epoch 3717, Loss 2.927733\n", "Epoch 3718, Loss 2.927733\n", "Epoch 3719, Loss 2.927733\n", "Epoch 3720, Loss 2.927733\n", "Epoch 3721, Loss 2.927731\n", "Epoch 3722, Loss 2.927731\n", "Epoch 3723, Loss 2.927732\n", "Epoch 3724, Loss 2.927730\n", "Epoch 3725, Loss 2.927730\n", "Epoch 3726, Loss 2.927731\n", "Epoch 3727, Loss 2.927730\n", "Epoch 3728, Loss 2.927732\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3729, Loss 2.927732\n", "Epoch 3730, Loss 2.927730\n", "Epoch 3731, Loss 2.927728\n", "Epoch 3732, Loss 2.927729\n", "Epoch 3733, Loss 2.927730\n", "Epoch 3734, Loss 2.927729\n", "Epoch 3735, Loss 2.927728\n", "Epoch 3736, Loss 2.927728\n", "Epoch 3737, Loss 2.927728\n", "Epoch 3738, Loss 2.927727\n", "Epoch 3739, Loss 2.927728\n", "Epoch 3740, Loss 2.927728\n", "Epoch 3741, Loss 2.927727\n", "Epoch 3742, Loss 2.927727\n", "Epoch 3743, Loss 2.927726\n", "Epoch 3744, Loss 2.927726\n", "Epoch 3745, Loss 2.927725\n", "Epoch 3746, Loss 2.927725\n", "Epoch 3747, Loss 2.927725\n", "Epoch 3748, Loss 2.927724\n", "Epoch 3749, Loss 2.927724\n", "Epoch 3750, Loss 2.927724\n", "Epoch 3751, Loss 2.927725\n", "Epoch 3752, Loss 2.927724\n", "Epoch 3753, Loss 2.927724\n", "Epoch 3754, Loss 2.927723\n", "Epoch 3755, Loss 2.927723\n", "Epoch 3756, Loss 2.927722\n", "Epoch 3757, Loss 2.927722\n", "Epoch 3758, Loss 2.927723\n", "Epoch 3759, Loss 2.927722\n", "Epoch 3760, Loss 2.927723\n", "Epoch 3761, Loss 2.927721\n", "Epoch 3762, Loss 2.927721\n", "Epoch 3763, Loss 2.927720\n", "Epoch 3764, Loss 2.927720\n", "Epoch 3765, Loss 2.927719\n", "Epoch 3766, Loss 2.927721\n", "Epoch 3767, Loss 2.927719\n", "Epoch 3768, Loss 2.927719\n", "Epoch 3769, Loss 2.927719\n", "Epoch 3770, Loss 2.927719\n", "Epoch 3771, Loss 2.927718\n", "Epoch 3772, Loss 2.927720\n", "Epoch 3773, Loss 2.927718\n", "Epoch 3774, Loss 2.927718\n", "Epoch 3775, Loss 2.927717\n", "Epoch 3776, Loss 2.927718\n", "Epoch 3777, Loss 2.927717\n", "Epoch 3778, Loss 2.927717\n", "Epoch 3779, Loss 2.927716\n", "Epoch 3780, Loss 2.927716\n", "Epoch 3781, Loss 2.927717\n", "Epoch 3782, Loss 2.927717\n", "Epoch 3783, Loss 2.927716\n", "Epoch 3784, Loss 2.927715\n", "Epoch 3785, Loss 2.927715\n", "Epoch 3786, Loss 2.927715\n", "Epoch 3787, Loss 2.927715\n", "Epoch 3788, Loss 2.927715\n", "Epoch 3789, Loss 2.927715\n", "Epoch 3790, Loss 2.927714\n", "Epoch 3791, Loss 2.927714\n", "Epoch 3792, Loss 2.927714\n", "Epoch 3793, Loss 2.927713\n", "Epoch 3794, Loss 2.927713\n", "Epoch 3795, Loss 2.927714\n", "Epoch 3796, Loss 2.927713\n", "Epoch 3797, Loss 2.927712\n", "Epoch 3798, Loss 2.927712\n", "Epoch 3799, Loss 2.927712\n", "Epoch 3800, Loss 2.927711\n", "Epoch 3801, Loss 2.927711\n", "Epoch 3802, Loss 2.927713\n", "Epoch 3803, Loss 2.927711\n", "Epoch 3804, Loss 2.927712\n", "Epoch 3805, Loss 2.927711\n", "Epoch 3806, Loss 2.927711\n", "Epoch 3807, Loss 2.927711\n", "Epoch 3808, Loss 2.927709\n", "Epoch 3809, Loss 2.927711\n", "Epoch 3810, Loss 2.927710\n", "Epoch 3811, Loss 2.927708\n", "Epoch 3812, Loss 2.927708\n", "Epoch 3813, Loss 2.927709\n", "Epoch 3814, Loss 2.927709\n", "Epoch 3815, Loss 2.927710\n", "Epoch 3816, Loss 2.927708\n", "Epoch 3817, Loss 2.927708\n", "Epoch 3818, Loss 2.927706\n", "Epoch 3819, Loss 2.927707\n", "Epoch 3820, Loss 2.927708\n", "Epoch 3821, Loss 2.927707\n", "Epoch 3822, Loss 2.927707\n", "Epoch 3823, Loss 2.927707\n", "Epoch 3824, Loss 2.927708\n", "Epoch 3825, Loss 2.927708\n", "Epoch 3826, Loss 2.927706\n", "Epoch 3827, Loss 2.927707\n", "Epoch 3828, Loss 2.927706\n", "Epoch 3829, Loss 2.927706\n", "Epoch 3830, Loss 2.927706\n", "Epoch 3831, Loss 2.927705\n", "Epoch 3832, Loss 2.927705\n", "Epoch 3833, Loss 2.927705\n", "Epoch 3834, Loss 2.927705\n", "Epoch 3835, Loss 2.927705\n", "Epoch 3836, Loss 2.927704\n", "Epoch 3837, Loss 2.927703\n", "Epoch 3838, Loss 2.927704\n", "Epoch 3839, Loss 2.927704\n", "Epoch 3840, Loss 2.927703\n", "Epoch 3841, Loss 2.927702\n", "Epoch 3842, Loss 2.927703\n", "Epoch 3843, Loss 2.927702\n", "Epoch 3844, Loss 2.927704\n", "Epoch 3845, Loss 2.927702\n", "Epoch 3846, Loss 2.927701\n", "Epoch 3847, Loss 2.927703\n", "Epoch 3848, Loss 2.927702\n", "Epoch 3849, Loss 2.927701\n", "Epoch 3850, Loss 2.927701\n", "Epoch 3851, Loss 2.927703\n", "Epoch 3852, Loss 2.927700\n", "Epoch 3853, Loss 2.927701\n", "Epoch 3854, Loss 2.927701\n", "Epoch 3855, Loss 2.927700\n", "Epoch 3856, Loss 2.927700\n", "Epoch 3857, Loss 2.927700\n", "Epoch 3858, Loss 2.927701\n", "Epoch 3859, Loss 2.927700\n", "Epoch 3860, Loss 2.927700\n", "Epoch 3861, Loss 2.927700\n", "Epoch 3862, Loss 2.927699\n", "Epoch 3863, Loss 2.927698\n", "Epoch 3864, Loss 2.927700\n", "Epoch 3865, Loss 2.927697\n", "Epoch 3866, Loss 2.927700\n", "Epoch 3867, Loss 2.927700\n", "Epoch 3868, Loss 2.927698\n", "Epoch 3869, Loss 2.927697\n", "Epoch 3870, Loss 2.927698\n", "Epoch 3871, Loss 2.927696\n", "Epoch 3872, Loss 2.927699\n", "Epoch 3873, Loss 2.927697\n", "Epoch 3874, Loss 2.927696\n", "Epoch 3875, Loss 2.927699\n", "Epoch 3876, Loss 2.927697\n", "Epoch 3877, Loss 2.927696\n", "Epoch 3878, Loss 2.927697\n", "Epoch 3879, Loss 2.927696\n", "Epoch 3880, Loss 2.927696\n", "Epoch 3881, Loss 2.927696\n", "Epoch 3882, Loss 2.927696\n", "Epoch 3883, Loss 2.927695\n", "Epoch 3884, Loss 2.927695\n", "Epoch 3885, Loss 2.927696\n", "Epoch 3886, Loss 2.927696\n", "Epoch 3887, Loss 2.927695\n", "Epoch 3888, Loss 2.927694\n", "Epoch 3889, Loss 2.927694\n", "Epoch 3890, Loss 2.927694\n", "Epoch 3891, Loss 2.927693\n", "Epoch 3892, Loss 2.927695\n", "Epoch 3893, Loss 2.927695\n", "Epoch 3894, Loss 2.927694\n", "Epoch 3895, Loss 2.927695\n", "Epoch 3896, Loss 2.927693\n", "Epoch 3897, Loss 2.927693\n", "Epoch 3898, Loss 2.927695\n", "Epoch 3899, Loss 2.927693\n", "Epoch 3900, Loss 2.927692\n", "Epoch 3901, Loss 2.927694\n", "Epoch 3902, Loss 2.927692\n", "Epoch 3903, Loss 2.927693\n", "Epoch 3904, Loss 2.927691\n", "Epoch 3905, Loss 2.927692\n", "Epoch 3906, Loss 2.927692\n", "Epoch 3907, Loss 2.927692\n", "Epoch 3908, Loss 2.927692\n", "Epoch 3909, Loss 2.927692\n", "Epoch 3910, Loss 2.927690\n", "Epoch 3911, Loss 2.927692\n", "Epoch 3912, Loss 2.927691\n", "Epoch 3913, Loss 2.927691\n", "Epoch 3914, Loss 2.927689\n", "Epoch 3915, Loss 2.927691\n", "Epoch 3916, Loss 2.927691\n", "Epoch 3917, Loss 2.927689\n", "Epoch 3918, Loss 2.927690\n", "Epoch 3919, Loss 2.927690\n", "Epoch 3920, Loss 2.927690\n", "Epoch 3921, Loss 2.927690\n", "Epoch 3922, Loss 2.927689\n", "Epoch 3923, Loss 2.927688\n", "Epoch 3924, Loss 2.927689\n", "Epoch 3925, Loss 2.927688\n", "Epoch 3926, Loss 2.927689\n", "Epoch 3927, Loss 2.927689\n", "Epoch 3928, Loss 2.927689\n", "Epoch 3929, Loss 2.927688\n", "Epoch 3930, Loss 2.927688\n", "Epoch 3931, Loss 2.927688\n", "Epoch 3932, Loss 2.927687\n", "Epoch 3933, Loss 2.927689\n", "Epoch 3934, Loss 2.927688\n", "Epoch 3935, Loss 2.927687\n", "Epoch 3936, Loss 2.927688\n", "Epoch 3937, Loss 2.927686\n", "Epoch 3938, Loss 2.927686\n", "Epoch 3939, Loss 2.927686\n", "Epoch 3940, Loss 2.927687\n", "Epoch 3941, Loss 2.927687\n", "Epoch 3942, Loss 2.927686\n", "Epoch 3943, Loss 2.927687\n", "Epoch 3944, Loss 2.927686\n", "Epoch 3945, Loss 2.927685\n", "Epoch 3946, Loss 2.927685\n", "Epoch 3947, Loss 2.927686\n", "Epoch 3948, Loss 2.927685\n", "Epoch 3949, Loss 2.927686\n", "Epoch 3950, Loss 2.927686\n", "Epoch 3951, Loss 2.927686\n", "Epoch 3952, Loss 2.927685\n", "Epoch 3953, Loss 2.927686\n", "Epoch 3954, Loss 2.927685\n", "Epoch 3955, Loss 2.927685\n", "Epoch 3956, Loss 2.927683\n", "Epoch 3957, Loss 2.927684\n", "Epoch 3958, Loss 2.927685\n", "Epoch 3959, Loss 2.927684\n", "Epoch 3960, Loss 2.927684\n", "Epoch 3961, Loss 2.927684\n", "Epoch 3962, Loss 2.927685\n", "Epoch 3963, Loss 2.927683\n", "Epoch 3964, Loss 2.927685\n", "Epoch 3965, Loss 2.927684\n", "Epoch 3966, Loss 2.927683\n", "Epoch 3967, Loss 2.927683\n", "Epoch 3968, Loss 2.927683\n", "Epoch 3969, Loss 2.927682\n", "Epoch 3970, Loss 2.927682\n", "Epoch 3971, Loss 2.927684\n", "Epoch 3972, Loss 2.927683\n", "Epoch 3973, Loss 2.927684\n", "Epoch 3974, Loss 2.927683\n", "Epoch 3975, Loss 2.927682\n", "Epoch 3976, Loss 2.927682\n", "Epoch 3977, Loss 2.927682\n", "Epoch 3978, Loss 2.927682\n", "Epoch 3979, Loss 2.927681\n", "Epoch 3980, Loss 2.927682\n", "Epoch 3981, Loss 2.927681\n", "Epoch 3982, Loss 2.927681\n", "Epoch 3983, Loss 2.927682\n", "Epoch 3984, Loss 2.927681\n", "Epoch 3985, Loss 2.927681\n", "Epoch 3986, Loss 2.927681\n", "Epoch 3987, Loss 2.927680\n", "Epoch 3988, Loss 2.927681\n", "Epoch 3989, Loss 2.927681\n", "Epoch 3990, Loss 2.927680\n", "Epoch 3991, Loss 2.927682\n", "Epoch 3992, Loss 2.927681\n", "Epoch 3993, Loss 2.927680\n", "Epoch 3994, Loss 2.927679\n", "Epoch 3995, Loss 2.927680\n", "Epoch 3996, Loss 2.927679\n", "Epoch 3997, Loss 2.927680\n", "Epoch 3998, Loss 2.927680\n", "Epoch 3999, Loss 2.927679\n", "Epoch 4000, Loss 2.927679\n", "Epoch 4001, Loss 2.927679\n", "Epoch 4002, Loss 2.927679\n", "Epoch 4003, Loss 2.927679\n", "Epoch 4004, Loss 2.927680\n", "Epoch 4005, Loss 2.927681\n", "Epoch 4006, Loss 2.927679\n", "Epoch 4007, Loss 2.927679\n", "Epoch 4008, Loss 2.927679\n", "Epoch 4009, Loss 2.927679\n", "Epoch 4010, Loss 2.927678\n", "Epoch 4011, Loss 2.927679\n", "Epoch 4012, Loss 2.927679\n", "Epoch 4013, Loss 2.927677\n", "Epoch 4014, Loss 2.927678\n", "Epoch 4015, Loss 2.927677\n", "Epoch 4016, Loss 2.927678\n", "Epoch 4017, Loss 2.927677\n", "Epoch 4018, Loss 2.927677\n", "Epoch 4019, Loss 2.927676\n", "Epoch 4020, Loss 2.927678\n", "Epoch 4021, Loss 2.927677\n", "Epoch 4022, Loss 2.927677\n", "Epoch 4023, Loss 2.927678\n", "Epoch 4024, Loss 2.927678\n", "Epoch 4025, Loss 2.927677\n", "Epoch 4026, Loss 2.927676\n", "Epoch 4027, Loss 2.927676\n", "Epoch 4028, Loss 2.927677\n", "Epoch 4029, Loss 2.927676\n", "Epoch 4030, Loss 2.927675\n", "Epoch 4031, Loss 2.927676\n", "Epoch 4032, Loss 2.927676\n", "Epoch 4033, Loss 2.927675\n", "Epoch 4034, Loss 2.927676\n", "Epoch 4035, Loss 2.927676\n", "Epoch 4036, Loss 2.927676\n", "Epoch 4037, Loss 2.927677\n", "Epoch 4038, Loss 2.927676\n", "Epoch 4039, Loss 2.927675\n", "Epoch 4040, Loss 2.927676\n", "Epoch 4041, Loss 2.927675\n", "Epoch 4042, Loss 2.927675\n", "Epoch 4043, Loss 2.927675\n", "Epoch 4044, Loss 2.927675\n", "Epoch 4045, Loss 2.927675\n", "Epoch 4046, Loss 2.927675\n", "Epoch 4047, Loss 2.927675\n", "Epoch 4048, Loss 2.927675\n", "Epoch 4049, Loss 2.927673\n", "Epoch 4050, Loss 2.927675\n", "Epoch 4051, Loss 2.927675\n", "Epoch 4052, Loss 2.927675\n", "Epoch 4053, Loss 2.927673\n", "Epoch 4054, Loss 2.927674\n", "Epoch 4055, Loss 2.927675\n", "Epoch 4056, Loss 2.927673\n", "Epoch 4057, Loss 2.927673\n", "Epoch 4058, Loss 2.927673\n", "Epoch 4059, Loss 2.927675\n", "Epoch 4060, Loss 2.927673\n", "Epoch 4061, Loss 2.927673\n", "Epoch 4062, Loss 2.927673\n", "Epoch 4063, Loss 2.927673\n", "Epoch 4064, Loss 2.927673\n", "Epoch 4065, Loss 2.927672\n", "Epoch 4066, Loss 2.927673\n", "Epoch 4067, Loss 2.927672\n", "Epoch 4068, Loss 2.927672\n", "Epoch 4069, Loss 2.927673\n", "Epoch 4070, Loss 2.927672\n", "Epoch 4071, Loss 2.927672\n", "Epoch 4072, Loss 2.927672\n", "Epoch 4073, Loss 2.927672\n", "Epoch 4074, Loss 2.927672\n", "Epoch 4075, Loss 2.927672\n", "Epoch 4076, Loss 2.927671\n", "Epoch 4077, Loss 2.927671\n", "Epoch 4078, Loss 2.927673\n", "Epoch 4079, Loss 2.927671\n", "Epoch 4080, Loss 2.927670\n", "Epoch 4081, Loss 2.927672\n", "Epoch 4082, Loss 2.927671\n", "Epoch 4083, Loss 2.927673\n", "Epoch 4084, Loss 2.927670\n", "Epoch 4085, Loss 2.927670\n", "Epoch 4086, Loss 2.927671\n", "Epoch 4087, Loss 2.927672\n", "Epoch 4088, Loss 2.927670\n", "Epoch 4089, Loss 2.927670\n", "Epoch 4090, Loss 2.927670\n", "Epoch 4091, Loss 2.927671\n", "Epoch 4092, Loss 2.927670\n", "Epoch 4093, Loss 2.927671\n", "Epoch 4094, Loss 2.927670\n", "Epoch 4095, Loss 2.927670\n", "Epoch 4096, Loss 2.927670\n", "Epoch 4097, Loss 2.927670\n", "Epoch 4098, Loss 2.927671\n", "Epoch 4099, Loss 2.927670\n", "Epoch 4100, Loss 2.927669\n", "Epoch 4101, Loss 2.927669\n", "Epoch 4102, Loss 2.927671\n", "Epoch 4103, Loss 2.927670\n", "Epoch 4104, Loss 2.927670\n", "Epoch 4105, Loss 2.927670\n", "Epoch 4106, Loss 2.927670\n", "Epoch 4107, Loss 2.927670\n", "Epoch 4108, Loss 2.927669\n", "Epoch 4109, Loss 2.927668\n", "Epoch 4110, Loss 2.927670\n", "Epoch 4111, Loss 2.927669\n", "Epoch 4112, Loss 2.927669\n", "Epoch 4113, Loss 2.927669\n", "Epoch 4114, Loss 2.927670\n", "Epoch 4115, Loss 2.927669\n", "Epoch 4116, Loss 2.927668\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4117, Loss 2.927667\n", "Epoch 4118, Loss 2.927669\n", "Epoch 4119, Loss 2.927668\n", "Epoch 4120, Loss 2.927668\n", "Epoch 4121, Loss 2.927669\n", "Epoch 4122, Loss 2.927669\n", "Epoch 4123, Loss 2.927668\n", "Epoch 4124, Loss 2.927668\n", "Epoch 4125, Loss 2.927668\n", "Epoch 4126, Loss 2.927668\n", "Epoch 4127, Loss 2.927667\n", "Epoch 4128, Loss 2.927668\n", "Epoch 4129, Loss 2.927667\n", "Epoch 4130, Loss 2.927667\n", "Epoch 4131, Loss 2.927667\n", "Epoch 4132, Loss 2.927668\n", "Epoch 4133, Loss 2.927666\n", "Epoch 4134, Loss 2.927667\n", "Epoch 4135, Loss 2.927667\n", "Epoch 4136, Loss 2.927667\n", "Epoch 4137, Loss 2.927667\n", "Epoch 4138, Loss 2.927666\n", "Epoch 4139, Loss 2.927669\n", "Epoch 4140, Loss 2.927667\n", "Epoch 4141, Loss 2.927666\n", "Epoch 4142, Loss 2.927667\n", "Epoch 4143, Loss 2.927665\n", "Epoch 4144, Loss 2.927667\n", "Epoch 4145, Loss 2.927666\n", "Epoch 4146, Loss 2.927666\n", "Epoch 4147, Loss 2.927667\n", "Epoch 4148, Loss 2.927666\n", "Epoch 4149, Loss 2.927667\n", "Epoch 4150, Loss 2.927666\n", "Epoch 4151, Loss 2.927666\n", "Epoch 4152, Loss 2.927667\n", "Epoch 4153, Loss 2.927666\n", "Epoch 4154, Loss 2.927666\n", "Epoch 4155, Loss 2.927666\n", "Epoch 4156, Loss 2.927666\n", "Epoch 4157, Loss 2.927666\n", "Epoch 4158, Loss 2.927666\n", "Epoch 4159, Loss 2.927665\n", "Epoch 4160, Loss 2.927666\n", "Epoch 4161, Loss 2.927665\n", "Epoch 4162, Loss 2.927665\n", "Epoch 4163, Loss 2.927666\n", "Epoch 4164, Loss 2.927666\n", "Epoch 4165, Loss 2.927665\n", "Epoch 4166, Loss 2.927664\n", "Epoch 4167, Loss 2.927666\n", "Epoch 4168, Loss 2.927664\n", "Epoch 4169, Loss 2.927665\n", "Epoch 4170, Loss 2.927665\n", "Epoch 4171, Loss 2.927664\n", "Epoch 4172, Loss 2.927666\n", "Epoch 4173, Loss 2.927665\n", "Epoch 4174, Loss 2.927664\n", "Epoch 4175, Loss 2.927665\n", "Epoch 4176, Loss 2.927665\n", "Epoch 4177, Loss 2.927665\n", "Epoch 4178, Loss 2.927663\n", "Epoch 4179, Loss 2.927665\n", "Epoch 4180, Loss 2.927664\n", "Epoch 4181, Loss 2.927664\n", "Epoch 4182, Loss 2.927664\n", "Epoch 4183, Loss 2.927663\n", "Epoch 4184, Loss 2.927663\n", "Epoch 4185, Loss 2.927665\n", "Epoch 4186, Loss 2.927664\n", "Epoch 4187, Loss 2.927663\n", "Epoch 4188, Loss 2.927664\n", "Epoch 4189, Loss 2.927664\n", "Epoch 4190, Loss 2.927663\n", "Epoch 4191, Loss 2.927662\n", "Epoch 4192, Loss 2.927663\n", "Epoch 4193, Loss 2.927663\n", "Epoch 4194, Loss 2.927663\n", "Epoch 4195, Loss 2.927663\n", "Epoch 4196, Loss 2.927662\n", "Epoch 4197, Loss 2.927663\n", "Epoch 4198, Loss 2.927664\n", "Epoch 4199, Loss 2.927663\n", "Epoch 4200, Loss 2.927664\n", "Epoch 4201, Loss 2.927663\n", "Epoch 4202, Loss 2.927664\n", "Epoch 4203, Loss 2.927663\n", "Epoch 4204, Loss 2.927662\n", "Epoch 4205, Loss 2.927662\n", "Epoch 4206, Loss 2.927662\n", "Epoch 4207, Loss 2.927663\n", "Epoch 4208, Loss 2.927662\n", "Epoch 4209, Loss 2.927663\n", "Epoch 4210, Loss 2.927662\n", "Epoch 4211, Loss 2.927664\n", "Epoch 4212, Loss 2.927663\n", "Epoch 4213, Loss 2.927662\n", "Epoch 4214, Loss 2.927662\n", "Epoch 4215, Loss 2.927664\n", "Epoch 4216, Loss 2.927662\n", "Epoch 4217, Loss 2.927660\n", "Epoch 4218, Loss 2.927663\n", "Epoch 4219, Loss 2.927662\n", "Epoch 4220, Loss 2.927663\n", "Epoch 4221, Loss 2.927662\n", "Epoch 4222, Loss 2.927661\n", "Epoch 4223, Loss 2.927660\n", "Epoch 4224, Loss 2.927662\n", "Epoch 4225, Loss 2.927663\n", "Epoch 4226, Loss 2.927661\n", "Epoch 4227, Loss 2.927661\n", "Epoch 4228, Loss 2.927663\n", "Epoch 4229, Loss 2.927662\n", "Epoch 4230, Loss 2.927662\n", "Epoch 4231, Loss 2.927661\n", "Epoch 4232, Loss 2.927660\n", "Epoch 4233, Loss 2.927661\n", "Epoch 4234, Loss 2.927662\n", "Epoch 4235, Loss 2.927661\n", "Epoch 4236, Loss 2.927660\n", "Epoch 4237, Loss 2.927662\n", "Epoch 4238, Loss 2.927661\n", "Epoch 4239, Loss 2.927661\n", "Epoch 4240, Loss 2.927660\n", "Epoch 4241, Loss 2.927660\n", "Epoch 4242, Loss 2.927662\n", "Epoch 4243, Loss 2.927662\n", "Epoch 4244, Loss 2.927661\n", "Epoch 4245, Loss 2.927661\n", "Epoch 4246, Loss 2.927661\n", "Epoch 4247, Loss 2.927661\n", "Epoch 4248, Loss 2.927660\n", "Epoch 4249, Loss 2.927661\n", "Epoch 4250, Loss 2.927660\n", "Epoch 4251, Loss 2.927660\n", "Epoch 4252, Loss 2.927660\n", "Epoch 4253, Loss 2.927659\n", "Epoch 4254, Loss 2.927659\n", "Epoch 4255, Loss 2.927661\n", "Epoch 4256, Loss 2.927662\n", "Epoch 4257, Loss 2.927661\n", "Epoch 4258, Loss 2.927660\n", "Epoch 4259, Loss 2.927660\n", "Epoch 4260, Loss 2.927660\n", "Epoch 4261, Loss 2.927658\n", "Epoch 4262, Loss 2.927658\n", "Epoch 4263, Loss 2.927660\n", "Epoch 4264, Loss 2.927659\n", "Epoch 4265, Loss 2.927660\n", "Epoch 4266, Loss 2.927660\n", "Epoch 4267, Loss 2.927660\n", "Epoch 4268, Loss 2.927660\n", "Epoch 4269, Loss 2.927660\n", "Epoch 4270, Loss 2.927660\n", "Epoch 4271, Loss 2.927660\n", "Epoch 4272, Loss 2.927659\n", "Epoch 4273, Loss 2.927659\n", "Epoch 4274, Loss 2.927660\n", "Epoch 4275, Loss 2.927660\n", "Epoch 4276, Loss 2.927660\n", "Epoch 4277, Loss 2.927659\n", "Epoch 4278, Loss 2.927658\n", "Epoch 4279, Loss 2.927659\n", "Epoch 4280, Loss 2.927659\n", "Epoch 4281, Loss 2.927660\n", "Epoch 4282, Loss 2.927658\n", "Epoch 4283, Loss 2.927659\n", "Epoch 4284, Loss 2.927658\n", "Epoch 4285, Loss 2.927657\n", "Epoch 4286, Loss 2.927659\n", "Epoch 4287, Loss 2.927659\n", "Epoch 4288, Loss 2.927660\n", "Epoch 4289, Loss 2.927658\n", "Epoch 4290, Loss 2.927658\n", "Epoch 4291, Loss 2.927658\n", "Epoch 4292, Loss 2.927658\n", "Epoch 4293, Loss 2.927657\n", "Epoch 4294, Loss 2.927658\n", "Epoch 4295, Loss 2.927659\n", "Epoch 4296, Loss 2.927660\n", "Epoch 4297, Loss 2.927658\n", "Epoch 4298, Loss 2.927659\n", "Epoch 4299, Loss 2.927658\n", "Epoch 4300, Loss 2.927657\n", "Epoch 4301, Loss 2.927658\n", "Epoch 4302, Loss 2.927658\n", "Epoch 4303, Loss 2.927658\n", "Epoch 4304, Loss 2.927657\n", "Epoch 4305, Loss 2.927659\n", "Epoch 4306, Loss 2.927657\n", "Epoch 4307, Loss 2.927658\n", "Epoch 4308, Loss 2.927658\n", "Epoch 4309, Loss 2.927658\n", "Epoch 4310, Loss 2.927658\n", "Epoch 4311, Loss 2.927657\n", "Epoch 4312, Loss 2.927657\n", "Epoch 4313, Loss 2.927657\n", "Epoch 4314, Loss 2.927656\n", "Epoch 4315, Loss 2.927657\n", "Epoch 4316, Loss 2.927657\n", "Epoch 4317, Loss 2.927657\n", "Epoch 4318, Loss 2.927656\n", "Epoch 4319, Loss 2.927657\n", "Epoch 4320, Loss 2.927657\n", "Epoch 4321, Loss 2.927656\n", "Epoch 4322, Loss 2.927658\n", "Epoch 4323, Loss 2.927658\n", "Epoch 4324, Loss 2.927657\n", "Epoch 4325, Loss 2.927656\n", "Epoch 4326, Loss 2.927657\n", "Epoch 4327, Loss 2.927658\n", "Epoch 4328, Loss 2.927657\n", "Epoch 4329, Loss 2.927657\n", "Epoch 4330, Loss 2.927657\n", "Epoch 4331, Loss 2.927658\n", "Epoch 4332, Loss 2.927658\n", "Epoch 4333, Loss 2.927657\n", "Epoch 4334, Loss 2.927658\n", "Epoch 4335, Loss 2.927657\n", "Epoch 4336, Loss 2.927657\n", "Epoch 4337, Loss 2.927657\n", "Epoch 4338, Loss 2.927657\n", "Epoch 4339, Loss 2.927657\n", "Epoch 4340, Loss 2.927656\n", "Epoch 4341, Loss 2.927657\n", "Epoch 4342, Loss 2.927655\n", "Epoch 4343, Loss 2.927656\n", "Epoch 4344, Loss 2.927656\n", "Epoch 4345, Loss 2.927657\n", "Epoch 4346, Loss 2.927656\n", "Epoch 4347, Loss 2.927657\n", "Epoch 4348, Loss 2.927655\n", "Epoch 4349, Loss 2.927656\n", "Epoch 4350, Loss 2.927656\n", "Epoch 4351, Loss 2.927655\n", "Epoch 4352, Loss 2.927656\n", "Epoch 4353, Loss 2.927656\n", "Epoch 4354, Loss 2.927655\n", "Epoch 4355, Loss 2.927655\n", "Epoch 4356, Loss 2.927656\n", "Epoch 4357, Loss 2.927655\n", "Epoch 4358, Loss 2.927657\n", "Epoch 4359, Loss 2.927656\n", "Epoch 4360, Loss 2.927655\n", "Epoch 4361, Loss 2.927656\n", "Epoch 4362, Loss 2.927655\n", "Epoch 4363, Loss 2.927656\n", "Epoch 4364, Loss 2.927656\n", "Epoch 4365, Loss 2.927656\n", "Epoch 4366, Loss 2.927656\n", "Epoch 4367, Loss 2.927655\n", "Epoch 4368, Loss 2.927654\n", "Epoch 4369, Loss 2.927655\n", "Epoch 4370, Loss 2.927656\n", "Epoch 4371, Loss 2.927655\n", "Epoch 4372, Loss 2.927656\n", "Epoch 4373, Loss 2.927656\n", "Epoch 4374, Loss 2.927655\n", "Epoch 4375, Loss 2.927656\n", "Epoch 4376, Loss 2.927655\n", "Epoch 4377, Loss 2.927656\n", "Epoch 4378, Loss 2.927655\n", "Epoch 4379, Loss 2.927655\n", "Epoch 4380, Loss 2.927654\n", "Epoch 4381, Loss 2.927656\n", "Epoch 4382, Loss 2.927655\n", "Epoch 4383, Loss 2.927656\n", "Epoch 4384, Loss 2.927656\n", "Epoch 4385, Loss 2.927655\n", "Epoch 4386, Loss 2.927656\n", "Epoch 4387, Loss 2.927654\n", "Epoch 4388, Loss 2.927656\n", "Epoch 4389, Loss 2.927654\n", "Epoch 4390, Loss 2.927655\n", "Epoch 4391, Loss 2.927654\n", "Epoch 4392, Loss 2.927655\n", "Epoch 4393, Loss 2.927655\n", "Epoch 4394, Loss 2.927654\n", "Epoch 4395, Loss 2.927654\n", "Epoch 4396, Loss 2.927655\n", "Epoch 4397, Loss 2.927655\n", "Epoch 4398, Loss 2.927654\n", "Epoch 4399, Loss 2.927655\n", "Epoch 4400, Loss 2.927654\n", "Epoch 4401, Loss 2.927655\n", "Epoch 4402, Loss 2.927654\n", "Epoch 4403, Loss 2.927654\n", "Epoch 4404, Loss 2.927654\n", "Epoch 4405, Loss 2.927654\n", "Epoch 4406, Loss 2.927655\n", "Epoch 4407, Loss 2.927654\n", "Epoch 4408, Loss 2.927654\n", "Epoch 4409, Loss 2.927654\n", "Epoch 4410, Loss 2.927655\n", "Epoch 4411, Loss 2.927655\n", "Epoch 4412, Loss 2.927656\n", "Epoch 4413, Loss 2.927654\n", "Epoch 4414, Loss 2.927655\n", "Epoch 4415, Loss 2.927654\n", "Epoch 4416, Loss 2.927653\n", "Epoch 4417, Loss 2.927655\n", "Epoch 4418, Loss 2.927653\n", "Epoch 4419, Loss 2.927655\n", "Epoch 4420, Loss 2.927653\n", "Epoch 4421, Loss 2.927654\n", "Epoch 4422, Loss 2.927653\n", "Epoch 4423, Loss 2.927655\n", "Epoch 4424, Loss 2.927654\n", "Epoch 4425, Loss 2.927655\n", "Epoch 4426, Loss 2.927653\n", "Epoch 4427, Loss 2.927654\n", "Epoch 4428, Loss 2.927655\n", "Epoch 4429, Loss 2.927654\n", "Epoch 4430, Loss 2.927654\n", "Epoch 4431, Loss 2.927653\n", "Epoch 4432, Loss 2.927654\n", "Epoch 4433, Loss 2.927654\n", "Epoch 4434, Loss 2.927654\n", "Epoch 4435, Loss 2.927655\n", "Epoch 4436, Loss 2.927653\n", "Epoch 4437, Loss 2.927652\n", "Epoch 4438, Loss 2.927653\n", "Epoch 4439, Loss 2.927654\n", "Epoch 4440, Loss 2.927655\n", "Epoch 4441, Loss 2.927655\n", "Epoch 4442, Loss 2.927652\n", "Epoch 4443, Loss 2.927653\n", "Epoch 4444, Loss 2.927651\n", "Epoch 4445, Loss 2.927654\n", "Epoch 4446, Loss 2.927654\n", "Epoch 4447, Loss 2.927653\n", "Epoch 4448, Loss 2.927654\n", "Epoch 4449, Loss 2.927655\n", "Epoch 4450, Loss 2.927654\n", "Epoch 4451, Loss 2.927654\n", "Epoch 4452, Loss 2.927653\n", "Epoch 4453, Loss 2.927652\n", "Epoch 4454, Loss 2.927653\n", "Epoch 4455, Loss 2.927653\n", "Epoch 4456, Loss 2.927654\n", "Epoch 4457, Loss 2.927653\n", "Epoch 4458, Loss 2.927652\n", "Epoch 4459, Loss 2.927653\n", "Epoch 4460, Loss 2.927652\n", "Epoch 4461, Loss 2.927654\n", "Epoch 4462, Loss 2.927654\n", "Epoch 4463, Loss 2.927654\n", "Epoch 4464, Loss 2.927653\n", "Epoch 4465, Loss 2.927653\n", "Epoch 4466, Loss 2.927652\n", "Epoch 4467, Loss 2.927654\n", "Epoch 4468, Loss 2.927653\n", "Epoch 4469, Loss 2.927653\n", "Epoch 4470, Loss 2.927653\n", "Epoch 4471, Loss 2.927653\n", "Epoch 4472, Loss 2.927653\n", "Epoch 4473, Loss 2.927654\n", "Epoch 4474, Loss 2.927653\n", "Epoch 4475, Loss 2.927653\n", "Epoch 4476, Loss 2.927652\n", "Epoch 4477, Loss 2.927653\n", "Epoch 4478, Loss 2.927654\n", "Epoch 4479, Loss 2.927653\n", "Epoch 4480, Loss 2.927651\n", "Epoch 4481, Loss 2.927653\n", "Epoch 4482, Loss 2.927653\n", "Epoch 4483, Loss 2.927654\n", "Epoch 4484, Loss 2.927653\n", "Epoch 4485, Loss 2.927653\n", "Epoch 4486, Loss 2.927652\n", "Epoch 4487, Loss 2.927651\n", "Epoch 4488, Loss 2.927652\n", "Epoch 4489, Loss 2.927653\n", "Epoch 4490, Loss 2.927654\n", "Epoch 4491, Loss 2.927653\n", "Epoch 4492, Loss 2.927652\n", "Epoch 4493, Loss 2.927652\n", "Epoch 4494, Loss 2.927651\n", "Epoch 4495, Loss 2.927652\n", "Epoch 4496, Loss 2.927653\n", "Epoch 4497, Loss 2.927653\n", "Epoch 4498, Loss 2.927652\n", "Epoch 4499, Loss 2.927652\n", "Epoch 4500, Loss 2.927652\n", "Epoch 4501, Loss 2.927652\n", "Epoch 4502, Loss 2.927654\n", "Epoch 4503, Loss 2.927651\n", "Epoch 4504, Loss 2.927652\n", "Epoch 4505, Loss 2.927653\n", "Epoch 4506, Loss 2.927653\n", "Epoch 4507, Loss 2.927651\n", "Epoch 4508, Loss 2.927651\n", "Epoch 4509, Loss 2.927653\n", "Epoch 4510, Loss 2.927653\n", "Epoch 4511, Loss 2.927651\n", "Epoch 4512, Loss 2.927651\n", "Epoch 4513, Loss 2.927653\n", "Epoch 4514, Loss 2.927653\n", "Epoch 4515, Loss 2.927652\n", "Epoch 4516, Loss 2.927653\n", "Epoch 4517, Loss 2.927652\n", "Epoch 4518, Loss 2.927652\n", "Epoch 4519, Loss 2.927653\n", "Epoch 4520, Loss 2.927652\n", "Epoch 4521, Loss 2.927652\n", "Epoch 4522, Loss 2.927651\n", "Epoch 4523, Loss 2.927651\n", "Epoch 4524, Loss 2.927652\n", "Epoch 4525, Loss 2.927652\n", "Epoch 4526, Loss 2.927651\n", "Epoch 4527, Loss 2.927651\n", "Epoch 4528, Loss 2.927650\n", "Epoch 4529, Loss 2.927651\n", "Epoch 4530, Loss 2.927653\n", "Epoch 4531, Loss 2.927651\n", "Epoch 4532, Loss 2.927651\n", "Epoch 4533, Loss 2.927652\n", "Epoch 4534, Loss 2.927653\n", "Epoch 4535, Loss 2.927651\n", "Epoch 4536, Loss 2.927651\n", "Epoch 4537, Loss 2.927650\n", "Epoch 4538, Loss 2.927651\n", "Epoch 4539, Loss 2.927650\n", "Epoch 4540, Loss 2.927652\n", "Epoch 4541, Loss 2.927652\n", "Epoch 4542, Loss 2.927651\n", "Epoch 4543, Loss 2.927652\n", "Epoch 4544, Loss 2.927652\n", "Epoch 4545, Loss 2.927651\n", "Epoch 4546, Loss 2.927650\n", "Epoch 4547, Loss 2.927651\n", "Epoch 4548, Loss 2.927652\n", "Epoch 4549, Loss 2.927651\n", "Epoch 4550, Loss 2.927653\n", "Epoch 4551, Loss 2.927651\n", "Epoch 4552, Loss 2.927652\n", "Epoch 4553, Loss 2.927651\n", "Epoch 4554, Loss 2.927652\n", "Epoch 4555, Loss 2.927650\n", "Epoch 4556, Loss 2.927650\n", "Epoch 4557, Loss 2.927650\n", "Epoch 4558, Loss 2.927652\n", "Epoch 4559, Loss 2.927650\n", "Epoch 4560, Loss 2.927650\n", "Epoch 4561, Loss 2.927651\n", "Epoch 4562, Loss 2.927652\n", "Epoch 4563, Loss 2.927650\n", "Epoch 4564, Loss 2.927651\n", "Epoch 4565, Loss 2.927650\n", "Epoch 4566, Loss 2.927651\n", "Epoch 4567, Loss 2.927650\n", "Epoch 4568, Loss 2.927651\n", "Epoch 4569, Loss 2.927650\n", "Epoch 4570, Loss 2.927650\n", "Epoch 4571, Loss 2.927650\n", "Epoch 4572, Loss 2.927651\n", "Epoch 4573, Loss 2.927652\n", "Epoch 4574, Loss 2.927650\n", "Epoch 4575, Loss 2.927651\n", "Epoch 4576, Loss 2.927651\n", "Epoch 4577, Loss 2.927651\n", "Epoch 4578, Loss 2.927652\n", "Epoch 4579, Loss 2.927650\n", "Epoch 4580, Loss 2.927650\n", "Epoch 4581, Loss 2.927650\n", "Epoch 4582, Loss 2.927651\n", "Epoch 4583, Loss 2.927650\n", "Epoch 4584, Loss 2.927650\n", "Epoch 4585, Loss 2.927652\n", "Epoch 4586, Loss 2.927650\n", "Epoch 4587, Loss 2.927651\n", "Epoch 4588, Loss 2.927650\n", "Epoch 4589, Loss 2.927650\n", "Epoch 4590, Loss 2.927652\n", "Epoch 4591, Loss 2.927650\n", "Epoch 4592, Loss 2.927651\n", "Epoch 4593, Loss 2.927651\n", "Epoch 4594, Loss 2.927650\n", "Epoch 4595, Loss 2.927650\n", "Epoch 4596, Loss 2.927651\n", "Epoch 4597, Loss 2.927651\n", "Epoch 4598, Loss 2.927652\n", "Epoch 4599, Loss 2.927649\n", "Epoch 4600, Loss 2.927650\n", "Epoch 4601, Loss 2.927650\n", "Epoch 4602, Loss 2.927649\n", "Epoch 4603, Loss 2.927649\n", "Epoch 4604, Loss 2.927649\n", "Epoch 4605, Loss 2.927650\n", "Epoch 4606, Loss 2.927650\n", "Epoch 4607, Loss 2.927650\n", "Epoch 4608, Loss 2.927651\n", "Epoch 4609, Loss 2.927650\n", "Epoch 4610, Loss 2.927651\n", "Epoch 4611, Loss 2.927650\n", "Epoch 4612, Loss 2.927650\n", "Epoch 4613, Loss 2.927650\n", "Epoch 4614, Loss 2.927649\n", "Epoch 4615, Loss 2.927650\n", "Epoch 4616, Loss 2.927651\n", "Epoch 4617, Loss 2.927650\n", "Epoch 4618, Loss 2.927651\n", "Epoch 4619, Loss 2.927649\n", "Epoch 4620, Loss 2.927650\n", "Epoch 4621, Loss 2.927650\n", "Epoch 4622, Loss 2.927650\n", "Epoch 4623, Loss 2.927651\n", "Epoch 4624, Loss 2.927651\n", "Epoch 4625, Loss 2.927649\n", "Epoch 4626, Loss 2.927650\n", "Epoch 4627, Loss 2.927650\n", "Epoch 4628, Loss 2.927651\n", "Epoch 4629, Loss 2.927650\n", "Epoch 4630, Loss 2.927647\n", "Epoch 4631, Loss 2.927648\n", "Epoch 4632, Loss 2.927649\n", "Epoch 4633, Loss 2.927649\n", "Epoch 4634, Loss 2.927650\n", "Epoch 4635, Loss 2.927649\n", "Epoch 4636, Loss 2.927650\n", "Epoch 4637, Loss 2.927650\n", "Epoch 4638, Loss 2.927651\n", "Epoch 4639, Loss 2.927649\n", "Epoch 4640, Loss 2.927649\n", "Epoch 4641, Loss 2.927650\n", "Epoch 4642, Loss 2.927650\n", "Epoch 4643, Loss 2.927649\n", "Epoch 4644, Loss 2.927650\n", "Epoch 4645, Loss 2.927650\n", "Epoch 4646, Loss 2.927650\n", "Epoch 4647, Loss 2.927651\n", "Epoch 4648, Loss 2.927650\n", "Epoch 4649, Loss 2.927650\n", "Epoch 4650, Loss 2.927649\n", "Epoch 4651, Loss 2.927650\n", "Epoch 4652, Loss 2.927651\n", "Epoch 4653, Loss 2.927650\n", "Epoch 4654, Loss 2.927651\n", "Epoch 4655, Loss 2.927651\n", "Epoch 4656, Loss 2.927650\n", "Epoch 4657, Loss 2.927649\n", "Epoch 4658, Loss 2.927649\n", "Epoch 4659, Loss 2.927649\n", "Epoch 4660, Loss 2.927649\n", "Epoch 4661, Loss 2.927648\n", "Epoch 4662, Loss 2.927650\n", "Epoch 4663, Loss 2.927649\n", "Epoch 4664, Loss 2.927648\n", "Epoch 4665, Loss 2.927649\n", "Epoch 4666, Loss 2.927649\n", "Epoch 4667, Loss 2.927649\n", "Epoch 4668, Loss 2.927649\n", "Epoch 4669, Loss 2.927649\n", "Epoch 4670, Loss 2.927649\n", "Epoch 4671, Loss 2.927649\n", "Epoch 4672, Loss 2.927649\n", "Epoch 4673, Loss 2.927650\n", "Epoch 4674, Loss 2.927650\n", "Epoch 4675, Loss 2.927650\n", "Epoch 4676, Loss 2.927650\n", "Epoch 4677, Loss 2.927650\n", "Epoch 4678, Loss 2.927650\n", "Epoch 4679, Loss 2.927649\n", "Epoch 4680, Loss 2.927648\n", "Epoch 4681, Loss 2.927649\n", "Epoch 4682, Loss 2.927649\n", "Epoch 4683, Loss 2.927648\n", "Epoch 4684, Loss 2.927648\n", "Epoch 4685, Loss 2.927649\n", "Epoch 4686, Loss 2.927651\n", "Epoch 4687, Loss 2.927649\n", "Epoch 4688, Loss 2.927650\n", "Epoch 4689, Loss 2.927649\n", "Epoch 4690, Loss 2.927650\n", "Epoch 4691, Loss 2.927648\n", "Epoch 4692, Loss 2.927649\n", "Epoch 4693, Loss 2.927649\n", "Epoch 4694, Loss 2.927650\n", "Epoch 4695, Loss 2.927648\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4696, Loss 2.927649\n", "Epoch 4697, Loss 2.927648\n", "Epoch 4698, Loss 2.927649\n", "Epoch 4699, Loss 2.927649\n", "Epoch 4700, Loss 2.927650\n", "Epoch 4701, Loss 2.927650\n", "Epoch 4702, Loss 2.927650\n", "Epoch 4703, Loss 2.927648\n", "Epoch 4704, Loss 2.927649\n", "Epoch 4705, Loss 2.927649\n", "Epoch 4706, Loss 2.927649\n", "Epoch 4707, Loss 2.927649\n", "Epoch 4708, Loss 2.927648\n", "Epoch 4709, Loss 2.927650\n", "Epoch 4710, Loss 2.927650\n", "Epoch 4711, Loss 2.927649\n", "Epoch 4712, Loss 2.927649\n", "Epoch 4713, Loss 2.927649\n", "Epoch 4714, Loss 2.927649\n", "Epoch 4715, Loss 2.927650\n", "Epoch 4716, Loss 2.927649\n", "Epoch 4717, Loss 2.927649\n", "Epoch 4718, Loss 2.927649\n", "Epoch 4719, Loss 2.927648\n", "Epoch 4720, Loss 2.927649\n", "Epoch 4721, Loss 2.927649\n", "Epoch 4722, Loss 2.927650\n", "Epoch 4723, Loss 2.927650\n", "Epoch 4724, Loss 2.927649\n", "Epoch 4725, Loss 2.927650\n", "Epoch 4726, Loss 2.927649\n", "Epoch 4727, Loss 2.927648\n", "Epoch 4728, Loss 2.927647\n", "Epoch 4729, Loss 2.927649\n", "Epoch 4730, Loss 2.927648\n", "Epoch 4731, Loss 2.927649\n", "Epoch 4732, Loss 2.927648\n", "Epoch 4733, Loss 2.927649\n", "Epoch 4734, Loss 2.927647\n", "Epoch 4735, Loss 2.927650\n", "Epoch 4736, Loss 2.927650\n", "Epoch 4737, Loss 2.927650\n", "Epoch 4738, Loss 2.927649\n", "Epoch 4739, Loss 2.927648\n", "Epoch 4740, Loss 2.927648\n", "Epoch 4741, Loss 2.927648\n", "Epoch 4742, Loss 2.927647\n", "Epoch 4743, Loss 2.927648\n", "Epoch 4744, Loss 2.927649\n", "Epoch 4745, Loss 2.927648\n", "Epoch 4746, Loss 2.927648\n", "Epoch 4747, Loss 2.927649\n", "Epoch 4748, Loss 2.927648\n", "Epoch 4749, Loss 2.927649\n", "Epoch 4750, Loss 2.927649\n", "Epoch 4751, Loss 2.927649\n", "Epoch 4752, Loss 2.927648\n", "Epoch 4753, Loss 2.927648\n", "Epoch 4754, Loss 2.927647\n", "Epoch 4755, Loss 2.927648\n", "Epoch 4756, Loss 2.927648\n", "Epoch 4757, Loss 2.927648\n", "Epoch 4758, Loss 2.927648\n", "Epoch 4759, Loss 2.927649\n", "Epoch 4760, Loss 2.927649\n", "Epoch 4761, Loss 2.927649\n", "Epoch 4762, Loss 2.927648\n", "Epoch 4763, Loss 2.927647\n", "Epoch 4764, Loss 2.927648\n", "Epoch 4765, Loss 2.927648\n", "Epoch 4766, Loss 2.927649\n", "Epoch 4767, Loss 2.927648\n", "Epoch 4768, Loss 2.927649\n", "Epoch 4769, Loss 2.927648\n", "Epoch 4770, Loss 2.927649\n", "Epoch 4771, Loss 2.927648\n", "Epoch 4772, Loss 2.927648\n", "Epoch 4773, Loss 2.927649\n", "Epoch 4774, Loss 2.927648\n", "Epoch 4775, Loss 2.927647\n", "Epoch 4776, Loss 2.927648\n", "Epoch 4777, Loss 2.927648\n", "Epoch 4778, Loss 2.927648\n", "Epoch 4779, Loss 2.927649\n", "Epoch 4780, Loss 2.927649\n", "Epoch 4781, Loss 2.927649\n", "Epoch 4782, Loss 2.927649\n", "Epoch 4783, Loss 2.927649\n", "Epoch 4784, Loss 2.927648\n", "Epoch 4785, Loss 2.927648\n", "Epoch 4786, Loss 2.927647\n", "Epoch 4787, Loss 2.927647\n", "Epoch 4788, Loss 2.927648\n", "Epoch 4789, Loss 2.927648\n", "Epoch 4790, Loss 2.927648\n", "Epoch 4791, Loss 2.927648\n", "Epoch 4792, Loss 2.927648\n", "Epoch 4793, Loss 2.927648\n", "Epoch 4794, Loss 2.927650\n", "Epoch 4795, Loss 2.927648\n", "Epoch 4796, Loss 2.927649\n", "Epoch 4797, Loss 2.927649\n", "Epoch 4798, Loss 2.927648\n", "Epoch 4799, Loss 2.927648\n", "Epoch 4800, Loss 2.927650\n", "Epoch 4801, Loss 2.927647\n", "Epoch 4802, Loss 2.927648\n", "Epoch 4803, Loss 2.927649\n", "Epoch 4804, Loss 2.927647\n", "Epoch 4805, Loss 2.927649\n", "Epoch 4806, Loss 2.927648\n", "Epoch 4807, Loss 2.927649\n", "Epoch 4808, Loss 2.927648\n", "Epoch 4809, Loss 2.927649\n", "Epoch 4810, Loss 2.927649\n", "Epoch 4811, Loss 2.927647\n", "Epoch 4812, Loss 2.927649\n", "Epoch 4813, Loss 2.927648\n", "Epoch 4814, Loss 2.927647\n", "Epoch 4815, Loss 2.927649\n", "Epoch 4816, Loss 2.927647\n", "Epoch 4817, Loss 2.927648\n", "Epoch 4818, Loss 2.927646\n", "Epoch 4819, Loss 2.927649\n", "Epoch 4820, Loss 2.927647\n", "Epoch 4821, Loss 2.927649\n", "Epoch 4822, Loss 2.927649\n", "Epoch 4823, Loss 2.927648\n", "Epoch 4824, Loss 2.927648\n", "Epoch 4825, Loss 2.927649\n", "Epoch 4826, Loss 2.927648\n", "Epoch 4827, Loss 2.927649\n", "Epoch 4828, Loss 2.927649\n", "Epoch 4829, Loss 2.927648\n", "Epoch 4830, Loss 2.927648\n", "Epoch 4831, Loss 2.927646\n", "Epoch 4832, Loss 2.927648\n", "Epoch 4833, Loss 2.927647\n", "Epoch 4834, Loss 2.927648\n", "Epoch 4835, Loss 2.927649\n", "Epoch 4836, Loss 2.927647\n", "Epoch 4837, Loss 2.927648\n", "Epoch 4838, Loss 2.927648\n", "Epoch 4839, Loss 2.927648\n", "Epoch 4840, Loss 2.927648\n", "Epoch 4841, Loss 2.927648\n", "Epoch 4842, Loss 2.927649\n", "Epoch 4843, Loss 2.927647\n", "Epoch 4844, Loss 2.927648\n", "Epoch 4845, Loss 2.927647\n", "Epoch 4846, Loss 2.927647\n", "Epoch 4847, Loss 2.927648\n", "Epoch 4848, Loss 2.927648\n", "Epoch 4849, Loss 2.927649\n", "Epoch 4850, Loss 2.927647\n", "Epoch 4851, Loss 2.927649\n", "Epoch 4852, Loss 2.927648\n", "Epoch 4853, Loss 2.927647\n", "Epoch 4854, Loss 2.927649\n", "Epoch 4855, Loss 2.927648\n", "Epoch 4856, Loss 2.927649\n", "Epoch 4857, Loss 2.927649\n", "Epoch 4858, Loss 2.927647\n", "Epoch 4859, Loss 2.927648\n", "Epoch 4860, Loss 2.927648\n", "Epoch 4861, Loss 2.927649\n", "Epoch 4862, Loss 2.927647\n", "Epoch 4863, Loss 2.927647\n", "Epoch 4864, Loss 2.927648\n", "Epoch 4865, Loss 2.927646\n", "Epoch 4866, Loss 2.927648\n", "Epoch 4867, Loss 2.927648\n", "Epoch 4868, Loss 2.927648\n", "Epoch 4869, Loss 2.927648\n", "Epoch 4870, Loss 2.927647\n", "Epoch 4871, Loss 2.927649\n", "Epoch 4872, Loss 2.927648\n", "Epoch 4873, Loss 2.927647\n", "Epoch 4874, Loss 2.927648\n", "Epoch 4875, Loss 2.927648\n", "Epoch 4876, Loss 2.927649\n", "Epoch 4877, Loss 2.927648\n", "Epoch 4878, Loss 2.927648\n", "Epoch 4879, Loss 2.927647\n", "Epoch 4880, Loss 2.927648\n", "Epoch 4881, Loss 2.927648\n", "Epoch 4882, Loss 2.927647\n", "Epoch 4883, Loss 2.927649\n", "Epoch 4884, Loss 2.927648\n", "Epoch 4885, Loss 2.927647\n", "Epoch 4886, Loss 2.927649\n", "Epoch 4887, Loss 2.927648\n", "Epoch 4888, Loss 2.927647\n", "Epoch 4889, Loss 2.927646\n", "Epoch 4890, Loss 2.927647\n", "Epoch 4891, Loss 2.927648\n", "Epoch 4892, Loss 2.927646\n", "Epoch 4893, Loss 2.927649\n", "Epoch 4894, Loss 2.927648\n", "Epoch 4895, Loss 2.927648\n", "Epoch 4896, Loss 2.927649\n", "Epoch 4897, Loss 2.927647\n", "Epoch 4898, Loss 2.927648\n", "Epoch 4899, Loss 2.927648\n", "Epoch 4900, Loss 2.927648\n", "Epoch 4901, Loss 2.927649\n", "Epoch 4902, Loss 2.927647\n", "Epoch 4903, Loss 2.927647\n", "Epoch 4904, Loss 2.927646\n", "Epoch 4905, Loss 2.927647\n", "Epoch 4906, Loss 2.927647\n", "Epoch 4907, Loss 2.927647\n", "Epoch 4908, Loss 2.927647\n", "Epoch 4909, Loss 2.927648\n", "Epoch 4910, Loss 2.927649\n", "Epoch 4911, Loss 2.927646\n", "Epoch 4912, Loss 2.927648\n", "Epoch 4913, Loss 2.927648\n", "Epoch 4914, Loss 2.927647\n", "Epoch 4915, Loss 2.927649\n", "Epoch 4916, Loss 2.927648\n", "Epoch 4917, Loss 2.927647\n", "Epoch 4918, Loss 2.927647\n", "Epoch 4919, Loss 2.927647\n", "Epoch 4920, Loss 2.927647\n", "Epoch 4921, Loss 2.927646\n", "Epoch 4922, Loss 2.927649\n", "Epoch 4923, Loss 2.927647\n", "Epoch 4924, Loss 2.927646\n", "Epoch 4925, Loss 2.927647\n", "Epoch 4926, Loss 2.927648\n", "Epoch 4927, Loss 2.927647\n", "Epoch 4928, Loss 2.927649\n", "Epoch 4929, Loss 2.927648\n", "Epoch 4930, Loss 2.927648\n", "Epoch 4931, Loss 2.927646\n", "Epoch 4932, Loss 2.927648\n", "Epoch 4933, Loss 2.927646\n", "Epoch 4934, Loss 2.927646\n", "Epoch 4935, Loss 2.927649\n", "Epoch 4936, Loss 2.927647\n", "Epoch 4937, Loss 2.927647\n", "Epoch 4938, Loss 2.927647\n", "Epoch 4939, Loss 2.927647\n", "Epoch 4940, Loss 2.927647\n", "Epoch 4941, Loss 2.927646\n", "Epoch 4942, Loss 2.927649\n", "Epoch 4943, Loss 2.927647\n", "Epoch 4944, Loss 2.927648\n", "Epoch 4945, Loss 2.927647\n", "Epoch 4946, Loss 2.927649\n", "Epoch 4947, Loss 2.927646\n", "Epoch 4948, Loss 2.927649\n", "Epoch 4949, Loss 2.927648\n", "Epoch 4950, Loss 2.927647\n", "Epoch 4951, Loss 2.927648\n", "Epoch 4952, Loss 2.927647\n", "Epoch 4953, Loss 2.927647\n", "Epoch 4954, Loss 2.927647\n", "Epoch 4955, Loss 2.927648\n", "Epoch 4956, Loss 2.927646\n", "Epoch 4957, Loss 2.927648\n", "Epoch 4958, Loss 2.927647\n", "Epoch 4959, Loss 2.927647\n", "Epoch 4960, Loss 2.927647\n", "Epoch 4961, Loss 2.927648\n", "Epoch 4962, Loss 2.927647\n", "Epoch 4963, Loss 2.927648\n", "Epoch 4964, Loss 2.927648\n", "Epoch 4965, Loss 2.927648\n", "Epoch 4966, Loss 2.927648\n", "Epoch 4967, Loss 2.927648\n", "Epoch 4968, Loss 2.927647\n", "Epoch 4969, Loss 2.927647\n", "Epoch 4970, Loss 2.927646\n", "Epoch 4971, Loss 2.927647\n", "Epoch 4972, Loss 2.927647\n", "Epoch 4973, Loss 2.927647\n", "Epoch 4974, Loss 2.927647\n", "Epoch 4975, Loss 2.927647\n", "Epoch 4976, Loss 2.927647\n", "Epoch 4977, Loss 2.927648\n", "Epoch 4978, Loss 2.927647\n", "Epoch 4979, Loss 2.927648\n", "Epoch 4980, Loss 2.927647\n", "Epoch 4981, Loss 2.927648\n", "Epoch 4982, Loss 2.927648\n", "Epoch 4983, Loss 2.927646\n", "Epoch 4984, Loss 2.927648\n", "Epoch 4985, Loss 2.927647\n", "Epoch 4986, Loss 2.927648\n", "Epoch 4987, Loss 2.927648\n", "Epoch 4988, Loss 2.927648\n", "Epoch 4989, Loss 2.927646\n", "Epoch 4990, Loss 2.927648\n", "Epoch 4991, Loss 2.927647\n", "Epoch 4992, Loss 2.927647\n", "Epoch 4993, Loss 2.927647\n", "Epoch 4994, Loss 2.927648\n", "Epoch 4995, Loss 2.927647\n", "Epoch 4996, Loss 2.927646\n", "Epoch 4997, Loss 2.927647\n", "Epoch 4998, Loss 2.927648\n", "Epoch 4999, Loss 2.927647\n" ] }, { "data": { "text/plain": [ "tensor([ 5.3671, -17.3012], requires_grad=True)" ] }, "execution_count": 101, "metadata": {}, "output_type": "execute_result" } ], "source": [ "def model(t_u, w, b):\n", " return w * t_u + b\n", "\n", "def loss_fn(t_p, t_c):\n", " sq_diffs = (t_p - t_c)**2\n", " return sq_diffs.mean()\n", "\n", "params = torch.tensor([1.0, 0.0], requires_grad=True)\n", "\n", "nepochs = 5000\n", "learning_rate = 1e-2\n", "\n", "optimizer = optim.SGD([params], lr=learning_rate)\n", "\n", "for epoch in range(nepochs):\n", " \n", " # forward pass\n", " t_p = model(t_un, *params)\n", " loss = loss_fn(t_p, t_c)\n", "\n", " print('Epoch %d, Loss %f' % (epoch, float(loss)))\n", " \n", " # backward pass\n", " optimizer.zero_grad()\n", " loss.backward() \n", " optimizer.step()\n", "\n", "t_p = model(t_un, *params)\n", "\n", "params" ] }, { "cell_type": "code", "execution_count": 29, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 0, Loss 1763.884644\n", "Epoch 1, Loss 1334.349121\n", "Epoch 2, Loss 967.815857\n", "Epoch 3, Loss 664.756348\n", "Epoch 4, Loss 424.630096\n", "Epoch 5, Loss 245.539536\n", "Epoch 6, Loss 123.854927\n", "Epoch 7, Loss 53.885216\n", "Epoch 8, Loss 27.729158\n", "Epoch 9, Loss 35.472927\n", "Epoch 10, Loss 65.865623\n", "Epoch 11, Loss 107.448486\n", "Epoch 12, Loss 149.893204\n", "Epoch 13, Loss 185.160660\n", "Epoch 14, Loss 208.152191\n", "Epoch 15, Loss 216.751862\n", "Epoch 16, Loss 211.386810\n", "Epoch 17, Loss 194.333359\n", "Epoch 18, Loss 168.973526\n", "Epoch 19, Loss 139.126602\n", "Epoch 20, Loss 108.509003\n", "Epoch 21, Loss 80.331757\n", "Epoch 22, Loss 57.027817\n", "Epoch 23, Loss 40.099060\n", "Epoch 24, Loss 30.076361\n", "Epoch 25, Loss 26.585762\n", "Epoch 26, Loss 28.510445\n", "Epoch 27, Loss 34.227798\n", "Epoch 28, Loss 41.889477\n", "Epoch 29, Loss 49.703087\n", "Epoch 30, Loss 56.173435\n", "Epoch 31, Loss 60.269627\n", "Epoch 32, Loss 61.500473\n", "Epoch 33, Loss 59.899094\n", "Epoch 34, Loss 55.932739\n", "Epoch 35, Loss 50.363575\n", "Epoch 36, Loss 44.088673\n", "Epoch 37, Loss 37.985172\n", "Epoch 38, Loss 32.781734\n", "Epoch 39, Loss 28.970186\n", "Epoch 40, Loss 26.764654\n", "Epoch 41, Loss 26.108040\n", "Epoch 42, Loss 26.719383\n", "Epoch 43, Loss 28.170134\n", "Epoch 44, Loss 29.973921\n", "Epoch 45, Loss 31.673698\n", "Epoch 46, Loss 32.912014\n", "Epoch 47, Loss 33.474720\n", "Epoch 48, Loss 33.304337\n", "Epoch 49, Loss 32.484787\n", "Epoch 50, Loss 31.204374\n", "Epoch 51, Loss 29.706341\n", "Epoch 52, Loss 28.237404\n", "Epoch 53, Loss 27.003424\n", "Epoch 54, Loss 26.138813\n", "Epoch 55, Loss 25.693235\n", "Epoch 56, Loss 25.635466\n", "Epoch 57, Loss 25.871410\n", "Epoch 58, Loss 26.270973\n", "Epoch 59, Loss 26.697638\n", "Epoch 60, Loss 27.034904\n", "Epoch 61, Loss 27.204889\n", "Epoch 62, Loss 27.176851\n", "Epoch 63, Loss 26.965380\n", "Epoch 64, Loss 26.620049\n", "Epoch 65, Loss 26.209972\n", "Epoch 66, Loss 25.806860\n", "Epoch 67, Loss 25.470259\n", "Epoch 68, Loss 25.237463\n", "Epoch 69, Loss 25.119513\n", "Epoch 70, Loss 25.103111\n", "Epoch 71, Loss 25.157188\n", "Epoch 72, Loss 25.242144\n", "Epoch 73, Loss 25.319366\n", "Epoch 74, Loss 25.359070\n", "Epoch 75, Loss 25.345070\n", "Epoch 76, Loss 25.275896\n", "Epoch 77, Loss 25.162645\n", "Epoch 78, Loss 25.024433\n", "Epoch 79, Loss 24.882936\n", "Epoch 80, Loss 24.757172\n", "Epoch 81, Loss 24.659727\n", "Epoch 82, Loss 24.595060\n", "Epoch 83, Loss 24.559795\n", "Epoch 84, Loss 24.544792\n", "Epoch 85, Loss 24.538088\n", "Epoch 86, Loss 24.528118\n", "Epoch 87, Loss 24.506195\n", "Epoch 88, Loss 24.468050\n", "Epoch 89, Loss 24.414021\n", "Epoch 90, Loss 24.348192\n", "Epoch 91, Loss 24.276789\n", "Epoch 92, Loss 24.206377\n", "Epoch 93, Loss 24.142193\n", "Epoch 94, Loss 24.087170\n", "Epoch 95, Loss 24.041588\n", "Epoch 96, Loss 24.003447\n", "Epoch 97, Loss 23.969360\n", "Epoch 98, Loss 23.935572\n", "Epoch 99, Loss 23.898945\n", "Epoch 100, Loss 23.857599\n", "Epoch 101, Loss 23.811182\n", "Epoch 102, Loss 23.760670\n", "Epoch 103, Loss 23.707891\n", "Epoch 104, Loss 23.654949\n", "Epoch 105, Loss 23.603601\n", "Epoch 106, Loss 23.554895\n", "Epoch 107, Loss 23.509031\n", "Epoch 108, Loss 23.465446\n", "Epoch 109, Loss 23.423058\n", "Epoch 110, Loss 23.380684\n", "Epoch 111, Loss 23.337336\n", "Epoch 112, Loss 23.292431\n", "Epoch 113, Loss 23.245888\n", "Epoch 114, Loss 23.198063\n", "Epoch 115, Loss 23.149569\n", "Epoch 116, Loss 23.101072\n", "Epoch 117, Loss 23.053108\n", "Epoch 118, Loss 23.005962\n", "Epoch 119, Loss 22.959620\n", "Epoch 120, Loss 22.913841\n", "Epoch 121, Loss 22.868263\n", "Epoch 122, Loss 22.822508\n", "Epoch 123, Loss 22.776289\n", "Epoch 124, Loss 22.729492\n", "Epoch 125, Loss 22.682161\n", "Epoch 126, Loss 22.634451\n", "Epoch 127, Loss 22.586586\n", "Epoch 128, Loss 22.538774\n", "Epoch 129, Loss 22.491146\n", "Epoch 130, Loss 22.443747\n", "Epoch 131, Loss 22.396524\n", "Epoch 132, Loss 22.349373\n", "Epoch 133, Loss 22.302151\n", "Epoch 134, Loss 22.254770\n", "Epoch 135, Loss 22.207170\n", "Epoch 136, Loss 22.159342\n", "Epoch 137, Loss 22.111336\n", "Epoch 138, Loss 22.063232\n", "Epoch 139, Loss 22.015093\n", "Epoch 140, Loss 21.966978\n", "Epoch 141, Loss 21.918909\n", "Epoch 142, Loss 21.870867\n", "Epoch 143, Loss 21.822821\n", "Epoch 144, Loss 21.774725\n", "Epoch 145, Loss 21.726547\n", "Epoch 146, Loss 21.678268\n", "Epoch 147, Loss 21.629892\n", "Epoch 148, Loss 21.581429\n", "Epoch 149, Loss 21.532902\n", "Epoch 150, Loss 21.484350\n", "Epoch 151, Loss 21.435778\n", "Epoch 152, Loss 21.387205\n", "Epoch 153, Loss 21.338610\n", "Epoch 154, Loss 21.289995\n", "Epoch 155, Loss 21.241346\n", "Epoch 156, Loss 21.192650\n", "Epoch 157, Loss 21.143892\n", "Epoch 158, Loss 21.095097\n", "Epoch 159, Loss 21.046257\n", "Epoch 160, Loss 20.997375\n", "Epoch 161, Loss 20.948483\n", "Epoch 162, Loss 20.899563\n", "Epoch 163, Loss 20.850632\n", "Epoch 164, Loss 20.801683\n", "Epoch 165, Loss 20.752718\n", "Epoch 166, Loss 20.703728\n", "Epoch 167, Loss 20.654715\n", "Epoch 168, Loss 20.605667\n", "Epoch 169, Loss 20.556602\n", "Epoch 170, Loss 20.507517\n", "Epoch 171, Loss 20.458418\n", "Epoch 172, Loss 20.409304\n", "Epoch 173, Loss 20.360180\n", "Epoch 174, Loss 20.311049\n", "Epoch 175, Loss 20.261909\n", "Epoch 176, Loss 20.212765\n", "Epoch 177, Loss 20.163607\n", "Epoch 178, Loss 20.114439\n", "Epoch 179, Loss 20.065260\n", "Epoch 180, Loss 20.016081\n", "Epoch 181, Loss 19.966890\n", "Epoch 182, Loss 19.917706\n", "Epoch 183, Loss 19.868521\n", "Epoch 184, Loss 19.819332\n", "Epoch 185, Loss 19.770149\n", "Epoch 186, Loss 19.720964\n", "Epoch 187, Loss 19.671787\n", "Epoch 188, Loss 19.622612\n", "Epoch 189, Loss 19.573439\n", "Epoch 190, Loss 19.524275\n", "Epoch 191, Loss 19.475117\n", "Epoch 192, Loss 19.425968\n", "Epoch 193, Loss 19.376829\n", "Epoch 194, Loss 19.327700\n", "Epoch 195, Loss 19.278585\n", "Epoch 196, Loss 19.229490\n", "Epoch 197, Loss 19.180397\n", "Epoch 198, Loss 19.131319\n", "Epoch 199, Loss 19.082262\n", "Epoch 200, Loss 19.033220\n", "Epoch 201, Loss 18.984196\n", "Epoch 202, Loss 18.935190\n", "Epoch 203, Loss 18.886209\n", "Epoch 204, Loss 18.837244\n", "Epoch 205, Loss 18.788298\n", "Epoch 206, Loss 18.739380\n", "Epoch 207, Loss 18.690481\n", "Epoch 208, Loss 18.641609\n", "Epoch 209, Loss 18.592764\n", "Epoch 210, Loss 18.543947\n", "Epoch 211, Loss 18.495150\n", "Epoch 212, Loss 18.446386\n", "Epoch 213, Loss 18.397655\n", "Epoch 214, Loss 18.348946\n", "Epoch 215, Loss 18.300276\n", "Epoch 216, Loss 18.251633\n", "Epoch 217, Loss 18.203024\n", "Epoch 218, Loss 18.154449\n", "Epoch 219, Loss 18.105909\n", "Epoch 220, Loss 18.057405\n", "Epoch 221, Loss 18.008934\n", "Epoch 222, Loss 17.960503\n", "Epoch 223, Loss 17.912107\n", "Epoch 224, Loss 17.863754\n", "Epoch 225, Loss 17.815432\n", "Epoch 226, Loss 17.767159\n", "Epoch 227, Loss 17.718925\n", "Epoch 228, Loss 17.670732\n", "Epoch 229, Loss 17.622578\n", "Epoch 230, Loss 17.574472\n", "Epoch 231, Loss 17.526411\n", "Epoch 232, Loss 17.478392\n", "Epoch 233, Loss 17.430424\n", "Epoch 234, Loss 17.382500\n", "Epoch 235, Loss 17.334621\n", "Epoch 236, Loss 17.286787\n", "Epoch 237, Loss 17.239010\n", "Epoch 238, Loss 17.191277\n", "Epoch 239, Loss 17.143597\n", "Epoch 240, Loss 17.095966\n", "Epoch 241, Loss 17.048386\n", "Epoch 242, Loss 17.000860\n", "Epoch 243, Loss 16.953388\n", "Epoch 244, Loss 16.905970\n", "Epoch 245, Loss 16.858601\n", "Epoch 246, Loss 16.811291\n", "Epoch 247, Loss 16.764036\n", "Epoch 248, Loss 16.716839\n", "Epoch 249, Loss 16.669701\n", "Epoch 250, Loss 16.622616\n", "Epoch 251, Loss 16.575592\n", "Epoch 252, Loss 16.528629\n", "Epoch 253, Loss 16.481726\n", "Epoch 254, Loss 16.434881\n", "Epoch 255, Loss 16.388094\n", "Epoch 256, Loss 16.341372\n", "Epoch 257, Loss 16.294710\n", "Epoch 258, Loss 16.248110\n", "Epoch 259, Loss 16.201580\n", "Epoch 260, Loss 16.155104\n", "Epoch 261, Loss 16.108702\n", "Epoch 262, Loss 16.062361\n", "Epoch 263, Loss 16.016085\n", "Epoch 264, Loss 15.969875\n", "Epoch 265, Loss 15.923737\n", "Epoch 266, Loss 15.877664\n", "Epoch 267, Loss 15.831656\n", "Epoch 268, Loss 15.785720\n", "Epoch 269, Loss 15.739847\n", "Epoch 270, Loss 15.694050\n", "Epoch 271, Loss 15.648321\n", "Epoch 272, Loss 15.602660\n", "Epoch 273, Loss 15.557074\n", "Epoch 274, Loss 15.511562\n", "Epoch 275, Loss 15.466117\n", "Epoch 276, Loss 15.420747\n", "Epoch 277, Loss 15.375449\n", "Epoch 278, Loss 15.330227\n", "Epoch 279, Loss 15.285076\n", "Epoch 280, Loss 15.240003\n", "Epoch 281, Loss 15.195002\n", "Epoch 282, Loss 15.150082\n", "Epoch 283, Loss 15.105236\n", "Epoch 284, Loss 15.060464\n", "Epoch 285, Loss 15.015766\n", "Epoch 286, Loss 14.971149\n", "Epoch 287, Loss 14.926617\n", "Epoch 288, Loss 14.882153\n", "Epoch 289, Loss 14.837772\n", "Epoch 290, Loss 14.793471\n", "Epoch 291, Loss 14.749247\n", "Epoch 292, Loss 14.705105\n", "Epoch 293, Loss 14.661045\n", "Epoch 294, Loss 14.617065\n", "Epoch 295, Loss 14.573163\n", "Epoch 296, Loss 14.529346\n", "Epoch 297, Loss 14.485611\n", "Epoch 298, Loss 14.441958\n", "Epoch 299, Loss 14.398392\n", "Epoch 300, Loss 14.354903\n", "Epoch 301, Loss 14.311500\n", "Epoch 302, Loss 14.268179\n", "Epoch 303, Loss 14.224946\n", "Epoch 304, Loss 14.181797\n", "Epoch 305, Loss 14.138736\n", "Epoch 306, Loss 14.095755\n", "Epoch 307, Loss 14.052859\n", "Epoch 308, Loss 14.010055\n", "Epoch 309, Loss 13.967334\n", "Epoch 310, Loss 13.924701\n", "Epoch 311, Loss 13.882154\n", "Epoch 312, Loss 13.839697\n", "Epoch 313, Loss 13.797324\n", "Epoch 314, Loss 13.755044\n", "Epoch 315, Loss 13.712846\n", "Epoch 316, Loss 13.670743\n", "Epoch 317, Loss 13.628729\n", "Epoch 318, Loss 13.586803\n", "Epoch 319, Loss 13.544965\n", "Epoch 320, Loss 13.503219\n", "Epoch 321, Loss 13.461562\n", "Epoch 322, Loss 13.419998\n", "Epoch 323, Loss 13.378526\n", "Epoch 324, Loss 13.337142\n", "Epoch 325, Loss 13.295847\n", "Epoch 326, Loss 13.254648\n", "Epoch 327, Loss 13.213541\n", "Epoch 328, Loss 13.172523\n", "Epoch 329, Loss 13.131601\n", "Epoch 330, Loss 13.090771\n", "Epoch 331, Loss 13.050035\n", "Epoch 332, Loss 13.009387\n", "Epoch 333, Loss 12.968836\n", "Epoch 334, Loss 12.928381\n", "Epoch 335, Loss 12.888017\n", "Epoch 336, Loss 12.847748\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 337, Loss 12.807572\n", "Epoch 338, Loss 12.767495\n", "Epoch 339, Loss 12.727509\n", "Epoch 340, Loss 12.687618\n", "Epoch 341, Loss 12.647824\n", "Epoch 342, Loss 12.608130\n", "Epoch 343, Loss 12.568524\n", "Epoch 344, Loss 12.529016\n", "Epoch 345, Loss 12.489603\n", "Epoch 346, Loss 12.450291\n", "Epoch 347, Loss 12.411072\n", "Epoch 348, Loss 12.371951\n", "Epoch 349, Loss 12.332926\n", "Epoch 350, Loss 12.293992\n", "Epoch 351, Loss 12.255167\n", "Epoch 352, Loss 12.216436\n", "Epoch 353, Loss 12.177798\n", "Epoch 354, Loss 12.139260\n", "Epoch 355, Loss 12.100818\n", "Epoch 356, Loss 12.062476\n", "Epoch 357, Loss 12.024231\n", "Epoch 358, Loss 11.986085\n", "Epoch 359, Loss 11.948040\n", "Epoch 360, Loss 11.910089\n", "Epoch 361, Loss 11.872239\n", "Epoch 362, Loss 11.834488\n", "Epoch 363, Loss 11.796836\n", "Epoch 364, Loss 11.759283\n", "Epoch 365, Loss 11.721827\n", "Epoch 366, Loss 11.684476\n", "Epoch 367, Loss 11.647220\n", "Epoch 368, Loss 11.610066\n", "Epoch 369, Loss 11.573008\n", "Epoch 370, Loss 11.536054\n", "Epoch 371, Loss 11.499199\n", "Epoch 372, Loss 11.462442\n", "Epoch 373, Loss 11.425790\n", "Epoch 374, Loss 11.389233\n", "Epoch 375, Loss 11.352775\n", "Epoch 376, Loss 11.316423\n", "Epoch 377, Loss 11.280168\n", "Epoch 378, Loss 11.244014\n", "Epoch 379, Loss 11.207963\n", "Epoch 380, Loss 11.172011\n", "Epoch 381, Loss 11.136163\n", "Epoch 382, Loss 11.100406\n", "Epoch 383, Loss 11.064760\n", "Epoch 384, Loss 11.029212\n", "Epoch 385, Loss 10.993761\n", "Epoch 386, Loss 10.958414\n", "Epoch 387, Loss 10.923172\n", "Epoch 388, Loss 10.888029\n", "Epoch 389, Loss 10.852987\n", "Epoch 390, Loss 10.818047\n", "Epoch 391, Loss 10.783207\n", "Epoch 392, Loss 10.748471\n", "Epoch 393, Loss 10.713831\n", "Epoch 394, Loss 10.679297\n", "Epoch 395, Loss 10.644863\n", "Epoch 396, Loss 10.610531\n", "Epoch 397, Loss 10.576301\n", "Epoch 398, Loss 10.542174\n", "Epoch 399, Loss 10.508145\n", "Epoch 400, Loss 10.474223\n", "Epoch 401, Loss 10.440397\n", "Epoch 402, Loss 10.406676\n", "Epoch 403, Loss 10.373056\n", "Epoch 404, Loss 10.339539\n", "Epoch 405, Loss 10.306123\n", "Epoch 406, Loss 10.272808\n", "Epoch 407, Loss 10.239594\n", "Epoch 408, Loss 10.206486\n", "Epoch 409, Loss 10.173474\n", "Epoch 410, Loss 10.140569\n", "Epoch 411, Loss 10.107763\n", "Epoch 412, Loss 10.075057\n", "Epoch 413, Loss 10.042455\n", "Epoch 414, Loss 10.009957\n", "Epoch 415, Loss 9.977558\n", "Epoch 416, Loss 9.945260\n", "Epoch 417, Loss 9.913065\n", "Epoch 418, Loss 9.880971\n", "Epoch 419, Loss 9.848977\n", "Epoch 420, Loss 9.817090\n", "Epoch 421, Loss 9.785301\n", "Epoch 422, Loss 9.753615\n", "Epoch 423, Loss 9.722031\n", "Epoch 424, Loss 9.690547\n", "Epoch 425, Loss 9.659162\n", "Epoch 426, Loss 9.627883\n", "Epoch 427, Loss 9.596701\n", "Epoch 428, Loss 9.565627\n", "Epoch 429, Loss 9.534644\n", "Epoch 430, Loss 9.503769\n", "Epoch 431, Loss 9.472991\n", "Epoch 432, Loss 9.442321\n", "Epoch 433, Loss 9.411749\n", "Epoch 434, Loss 9.381274\n", "Epoch 435, Loss 9.350903\n", "Epoch 436, Loss 9.320635\n", "Epoch 437, Loss 9.290464\n", "Epoch 438, Loss 9.260394\n", "Epoch 439, Loss 9.230426\n", "Epoch 440, Loss 9.200559\n", "Epoch 441, Loss 9.170793\n", "Epoch 442, Loss 9.141128\n", "Epoch 443, Loss 9.111559\n", "Epoch 444, Loss 9.082092\n", "Epoch 445, Loss 9.052730\n", "Epoch 446, Loss 9.023463\n", "Epoch 447, Loss 8.994298\n", "Epoch 448, Loss 8.965234\n", "Epoch 449, Loss 8.936267\n", "Epoch 450, Loss 8.907401\n", "Epoch 451, Loss 8.878634\n", "Epoch 452, Loss 8.849972\n", "Epoch 453, Loss 8.821403\n", "Epoch 454, Loss 8.792933\n", "Epoch 455, Loss 8.764565\n", "Epoch 456, Loss 8.736297\n", "Epoch 457, Loss 8.708126\n", "Epoch 458, Loss 8.680058\n", "Epoch 459, Loss 8.652083\n", "Epoch 460, Loss 8.624209\n", "Epoch 461, Loss 8.596437\n", "Epoch 462, Loss 8.568760\n", "Epoch 463, Loss 8.541183\n", "Epoch 464, Loss 8.513700\n", "Epoch 465, Loss 8.486322\n", "Epoch 466, Loss 8.459037\n", "Epoch 467, Loss 8.431854\n", "Epoch 468, Loss 8.404766\n", "Epoch 469, Loss 8.377777\n", "Epoch 470, Loss 8.350884\n", "Epoch 471, Loss 8.324091\n", "Epoch 472, Loss 8.297395\n", "Epoch 473, Loss 8.270791\n", "Epoch 474, Loss 8.244291\n", "Epoch 475, Loss 8.217887\n", "Epoch 476, Loss 8.191579\n", "Epoch 477, Loss 8.165365\n", "Epoch 478, Loss 8.139252\n", "Epoch 479, Loss 8.113233\n", "Epoch 480, Loss 8.087310\n", "Epoch 481, Loss 8.061483\n", "Epoch 482, Loss 8.035751\n", "Epoch 483, Loss 8.010121\n", "Epoch 484, Loss 7.984584\n", "Epoch 485, Loss 7.959137\n", "Epoch 486, Loss 7.933791\n", "Epoch 487, Loss 7.908540\n", "Epoch 488, Loss 7.883385\n", "Epoch 489, Loss 7.858326\n", "Epoch 490, Loss 7.833362\n", "Epoch 491, Loss 7.808485\n", "Epoch 492, Loss 7.783708\n", "Epoch 493, Loss 7.759026\n", "Epoch 494, Loss 7.734435\n", "Epoch 495, Loss 7.709942\n", "Epoch 496, Loss 7.685541\n", "Epoch 497, Loss 7.661234\n", "Epoch 498, Loss 7.637021\n", "Epoch 499, Loss 7.612901\n", "Epoch 500, Loss 7.588877\n", "Epoch 501, Loss 7.564943\n", "Epoch 502, Loss 7.541099\n", "Epoch 503, Loss 7.517354\n", "Epoch 504, Loss 7.493701\n", "Epoch 505, Loss 7.470137\n", "Epoch 506, Loss 7.446667\n", "Epoch 507, Loss 7.423285\n", "Epoch 508, Loss 7.400001\n", "Epoch 509, Loss 7.376806\n", "Epoch 510, Loss 7.353708\n", "Epoch 511, Loss 7.330697\n", "Epoch 512, Loss 7.307774\n", "Epoch 513, Loss 7.284945\n", "Epoch 514, Loss 7.262210\n", "Epoch 515, Loss 7.239566\n", "Epoch 516, Loss 7.217006\n", "Epoch 517, Loss 7.194542\n", "Epoch 518, Loss 7.172167\n", "Epoch 519, Loss 7.149880\n", "Epoch 520, Loss 7.127686\n", "Epoch 521, Loss 7.105580\n", "Epoch 522, Loss 7.083565\n", "Epoch 523, Loss 7.061636\n", "Epoch 524, Loss 7.039798\n", "Epoch 525, Loss 7.018051\n", "Epoch 526, Loss 6.996391\n", "Epoch 527, Loss 6.974820\n", "Epoch 528, Loss 6.953336\n", "Epoch 529, Loss 6.931940\n", "Epoch 530, Loss 6.910632\n", "Epoch 531, Loss 6.889412\n", "Epoch 532, Loss 6.868282\n", "Epoch 533, Loss 6.847236\n", "Epoch 534, Loss 6.826282\n", "Epoch 535, Loss 6.805409\n", "Epoch 536, Loss 6.784626\n", "Epoch 537, Loss 6.763931\n", "Epoch 538, Loss 6.743317\n", "Epoch 539, Loss 6.722795\n", "Epoch 540, Loss 6.702356\n", "Epoch 541, Loss 6.682003\n", "Epoch 542, Loss 6.661738\n", "Epoch 543, Loss 6.641555\n", "Epoch 544, Loss 6.621461\n", "Epoch 545, Loss 6.601449\n", "Epoch 546, Loss 6.581524\n", "Epoch 547, Loss 6.561679\n", "Epoch 548, Loss 6.541925\n", "Epoch 549, Loss 6.522255\n", "Epoch 550, Loss 6.502666\n", "Epoch 551, Loss 6.483161\n", "Epoch 552, Loss 6.463739\n", "Epoch 553, Loss 6.444402\n", "Epoch 554, Loss 6.425147\n", "Epoch 555, Loss 6.405974\n", "Epoch 556, Loss 6.386885\n", "Epoch 557, Loss 6.367880\n", "Epoch 558, Loss 6.348955\n", "Epoch 559, Loss 6.330118\n", "Epoch 560, Loss 6.311353\n", "Epoch 561, Loss 6.292679\n", "Epoch 562, Loss 6.274080\n", "Epoch 563, Loss 6.255568\n", "Epoch 564, Loss 6.237137\n", "Epoch 565, Loss 6.218780\n", "Epoch 566, Loss 6.200507\n", "Epoch 567, Loss 6.182319\n", "Epoch 568, Loss 6.164204\n", "Epoch 569, Loss 6.146173\n", "Epoch 570, Loss 6.128223\n", "Epoch 571, Loss 6.110352\n", "Epoch 572, Loss 6.092559\n", "Epoch 573, Loss 6.074843\n", "Epoch 574, Loss 6.057209\n", "Epoch 575, Loss 6.039656\n", "Epoch 576, Loss 6.022178\n", "Epoch 577, Loss 6.004780\n", "Epoch 578, Loss 5.987460\n", "Epoch 579, Loss 5.970214\n", "Epoch 580, Loss 5.953053\n", "Epoch 581, Loss 5.935962\n", "Epoch 582, Loss 5.918952\n", "Epoch 583, Loss 5.902019\n", "Epoch 584, Loss 5.885164\n", "Epoch 585, Loss 5.868382\n", "Epoch 586, Loss 5.851679\n", "Epoch 587, Loss 5.835053\n", "Epoch 588, Loss 5.818501\n", "Epoch 589, Loss 5.802024\n", "Epoch 590, Loss 5.785624\n", "Epoch 591, Loss 5.769298\n", "Epoch 592, Loss 5.753048\n", "Epoch 593, Loss 5.736876\n", "Epoch 594, Loss 5.720775\n", "Epoch 595, Loss 5.704750\n", "Epoch 596, Loss 5.688797\n", "Epoch 597, Loss 5.672918\n", "Epoch 598, Loss 5.657115\n", "Epoch 599, Loss 5.641382\n", "Epoch 600, Loss 5.625725\n", "Epoch 601, Loss 5.610138\n", "Epoch 602, Loss 5.594628\n", "Epoch 603, Loss 5.579188\n", "Epoch 604, Loss 5.563824\n", "Epoch 605, Loss 5.548529\n", "Epoch 606, Loss 5.533307\n", "Epoch 607, Loss 5.518155\n", "Epoch 608, Loss 5.503077\n", "Epoch 609, Loss 5.488071\n", "Epoch 610, Loss 5.473133\n", "Epoch 611, Loss 5.458267\n", "Epoch 612, Loss 5.443472\n", "Epoch 613, Loss 5.428747\n", "Epoch 614, Loss 5.414092\n", "Epoch 615, Loss 5.399506\n", "Epoch 616, Loss 5.384992\n", "Epoch 617, Loss 5.370546\n", "Epoch 618, Loss 5.356169\n", "Epoch 619, Loss 5.341862\n", "Epoch 620, Loss 5.327622\n", "Epoch 621, Loss 5.313454\n", "Epoch 622, Loss 5.299353\n", "Epoch 623, Loss 5.285318\n", "Epoch 624, Loss 5.271354\n", "Epoch 625, Loss 5.257456\n", "Epoch 626, Loss 5.243625\n", "Epoch 627, Loss 5.229863\n", "Epoch 628, Loss 5.216168\n", "Epoch 629, Loss 5.202537\n", "Epoch 630, Loss 5.188976\n", "Epoch 631, Loss 5.175478\n", "Epoch 632, Loss 5.162047\n", "Epoch 633, Loss 5.148685\n", "Epoch 634, Loss 5.135385\n", "Epoch 635, Loss 5.122155\n", "Epoch 636, Loss 5.108985\n", "Epoch 637, Loss 5.095883\n", "Epoch 638, Loss 5.082844\n", "Epoch 639, Loss 5.069872\n", "Epoch 640, Loss 5.056963\n", "Epoch 641, Loss 5.044117\n", "Epoch 642, Loss 5.031339\n", "Epoch 643, Loss 5.018620\n", "Epoch 644, Loss 5.005966\n", "Epoch 645, Loss 4.993377\n", "Epoch 646, Loss 4.980847\n", "Epoch 647, Loss 4.968384\n", "Epoch 648, Loss 4.955980\n", "Epoch 649, Loss 4.943643\n", "Epoch 650, Loss 4.931365\n", "Epoch 651, Loss 4.919148\n", "Epoch 652, Loss 4.906995\n", "Epoch 653, Loss 4.894899\n", "Epoch 654, Loss 4.882869\n", "Epoch 655, Loss 4.870899\n", "Epoch 656, Loss 4.858989\n", "Epoch 657, Loss 4.847142\n", "Epoch 658, Loss 4.835353\n", "Epoch 659, Loss 4.823626\n", "Epoch 660, Loss 4.811958\n", "Epoch 661, Loss 4.800347\n", "Epoch 662, Loss 4.788798\n", "Epoch 663, Loss 4.777308\n", "Epoch 664, Loss 4.765877\n", "Epoch 665, Loss 4.754507\n", "Epoch 666, Loss 4.743189\n", "Epoch 667, Loss 4.731938\n", "Epoch 668, Loss 4.720741\n", "Epoch 669, Loss 4.709600\n", "Epoch 670, Loss 4.698519\n", "Epoch 671, Loss 4.687497\n", "Epoch 672, Loss 4.676529\n", "Epoch 673, Loss 4.665621\n", "Epoch 674, Loss 4.654770\n", "Epoch 675, Loss 4.643971\n", "Epoch 676, Loss 4.633237\n", "Epoch 677, Loss 4.622550\n", "Epoch 678, Loss 4.611922\n", "Epoch 679, Loss 4.601353\n", "Epoch 680, Loss 4.590835\n", "Epoch 681, Loss 4.580375\n", "Epoch 682, Loss 4.569973\n", "Epoch 683, Loss 4.559620\n", "Epoch 684, Loss 4.549325\n", "Epoch 685, Loss 4.539084\n", "Epoch 686, Loss 4.528898\n", "Epoch 687, Loss 4.518763\n", "Epoch 688, Loss 4.508686\n", "Epoch 689, Loss 4.498661\n", "Epoch 690, Loss 4.488688\n", "Epoch 691, Loss 4.478768\n", "Epoch 692, Loss 4.468905\n", "Epoch 693, Loss 4.459092\n", "Epoch 694, Loss 4.449331\n", "Epoch 695, Loss 4.439624\n", "Epoch 696, Loss 4.429968\n", "Epoch 697, Loss 4.420366\n", "Epoch 698, Loss 4.410813\n", "Epoch 699, Loss 4.401315\n", "Epoch 700, Loss 4.391865\n", "Epoch 701, Loss 4.382470\n", "Epoch 702, Loss 4.373121\n", "Epoch 703, Loss 4.363824\n", "Epoch 704, Loss 4.354578\n", "Epoch 705, Loss 4.345384\n", "Epoch 706, Loss 4.336239\n", "Epoch 707, Loss 4.327145\n", "Epoch 708, Loss 4.318100\n", "Epoch 709, Loss 4.309102\n", "Epoch 710, Loss 4.300156\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 711, Loss 4.291261\n", "Epoch 712, Loss 4.282409\n", "Epoch 713, Loss 4.273609\n", "Epoch 714, Loss 4.264856\n", "Epoch 715, Loss 4.256155\n", "Epoch 716, Loss 4.247500\n", "Epoch 717, Loss 4.238893\n", "Epoch 718, Loss 4.230330\n", "Epoch 719, Loss 4.221818\n", "Epoch 720, Loss 4.213353\n", "Epoch 721, Loss 4.204937\n", "Epoch 722, Loss 4.196567\n", "Epoch 723, Loss 4.188239\n", "Epoch 724, Loss 4.179962\n", "Epoch 725, Loss 4.171730\n", "Epoch 726, Loss 4.163545\n", "Epoch 727, Loss 4.155403\n", "Epoch 728, Loss 4.147312\n", "Epoch 729, Loss 4.139259\n", "Epoch 730, Loss 4.131258\n", "Epoch 731, Loss 4.123301\n", "Epoch 732, Loss 4.115387\n", "Epoch 733, Loss 4.107519\n", "Epoch 734, Loss 4.099694\n", "Epoch 735, Loss 4.091914\n", "Epoch 736, Loss 4.084178\n", "Epoch 737, Loss 4.076486\n", "Epoch 738, Loss 4.068839\n", "Epoch 739, Loss 4.061233\n", "Epoch 740, Loss 4.053671\n", "Epoch 741, Loss 4.046154\n", "Epoch 742, Loss 4.038677\n", "Epoch 743, Loss 4.031245\n", "Epoch 744, Loss 4.023854\n", "Epoch 745, Loss 4.016506\n", "Epoch 746, Loss 4.009202\n", "Epoch 747, Loss 4.001936\n", "Epoch 748, Loss 3.994716\n", "Epoch 749, Loss 3.987536\n", "Epoch 750, Loss 3.980396\n", "Epoch 751, Loss 3.973298\n", "Epoch 752, Loss 3.966244\n", "Epoch 753, Loss 3.959228\n", "Epoch 754, Loss 3.952252\n", "Epoch 755, Loss 3.945319\n", "Epoch 756, Loss 3.938425\n", "Epoch 757, Loss 3.931570\n", "Epoch 758, Loss 3.924755\n", "Epoch 759, Loss 3.917985\n", "Epoch 760, Loss 3.911247\n", "Epoch 761, Loss 3.904553\n", "Epoch 762, Loss 3.897898\n", "Epoch 763, Loss 3.891278\n", "Epoch 764, Loss 3.884702\n", "Epoch 765, Loss 3.878162\n", "Epoch 766, Loss 3.871660\n", "Epoch 767, Loss 3.865198\n", "Epoch 768, Loss 3.858773\n", "Epoch 769, Loss 3.852385\n", "Epoch 770, Loss 3.846037\n", "Epoch 771, Loss 3.839729\n", "Epoch 772, Loss 3.833451\n", "Epoch 773, Loss 3.827215\n", "Epoch 774, Loss 3.821015\n", "Epoch 775, Loss 3.814853\n", "Epoch 776, Loss 3.808729\n", "Epoch 777, Loss 3.802637\n", "Epoch 778, Loss 3.796585\n", "Epoch 779, Loss 3.790568\n", "Epoch 780, Loss 3.784588\n", "Epoch 781, Loss 3.778642\n", "Epoch 782, Loss 3.772732\n", "Epoch 783, Loss 3.766859\n", "Epoch 784, Loss 3.761022\n", "Epoch 785, Loss 3.755220\n", "Epoch 786, Loss 3.749448\n", "Epoch 787, Loss 3.743715\n", "Epoch 788, Loss 3.738018\n", "Epoch 789, Loss 3.732355\n", "Epoch 790, Loss 3.726722\n", "Epoch 791, Loss 3.721126\n", "Epoch 792, Loss 3.715564\n", "Epoch 793, Loss 3.710037\n", "Epoch 794, Loss 3.704546\n", "Epoch 795, Loss 3.699084\n", "Epoch 796, Loss 3.693657\n", "Epoch 797, Loss 3.688265\n", "Epoch 798, Loss 3.682900\n", "Epoch 799, Loss 3.677574\n", "Epoch 800, Loss 3.672278\n", "Epoch 801, Loss 3.667013\n", "Epoch 802, Loss 3.661784\n", "Epoch 803, Loss 3.656586\n", "Epoch 804, Loss 3.651420\n", "Epoch 805, Loss 3.646283\n", "Epoch 806, Loss 3.641181\n", "Epoch 807, Loss 3.636111\n", "Epoch 808, Loss 3.631069\n", "Epoch 809, Loss 3.626060\n", "Epoch 810, Loss 3.621083\n", "Epoch 811, Loss 3.616135\n", "Epoch 812, Loss 3.611220\n", "Epoch 813, Loss 3.606333\n", "Epoch 814, Loss 3.601478\n", "Epoch 815, Loss 3.596657\n", "Epoch 816, Loss 3.591862\n", "Epoch 817, Loss 3.587096\n", "Epoch 818, Loss 3.582361\n", "Epoch 819, Loss 3.577657\n", "Epoch 820, Loss 3.572980\n", "Epoch 821, Loss 3.568335\n", "Epoch 822, Loss 3.563716\n", "Epoch 823, Loss 3.559129\n", "Epoch 824, Loss 3.554571\n", "Epoch 825, Loss 3.550040\n", "Epoch 826, Loss 3.545538\n", "Epoch 827, Loss 3.541065\n", "Epoch 828, Loss 3.536623\n", "Epoch 829, Loss 3.532205\n", "Epoch 830, Loss 3.527817\n", "Epoch 831, Loss 3.523457\n", "Epoch 832, Loss 3.519121\n", "Epoch 833, Loss 3.514819\n", "Epoch 834, Loss 3.510540\n", "Epoch 835, Loss 3.506290\n", "Epoch 836, Loss 3.502067\n", "Epoch 837, Loss 3.497871\n", "Epoch 838, Loss 3.493703\n", "Epoch 839, Loss 3.489559\n", "Epoch 840, Loss 3.485444\n", "Epoch 841, Loss 3.481354\n", "Epoch 842, Loss 3.477291\n", "Epoch 843, Loss 3.473255\n", "Epoch 844, Loss 3.469242\n", "Epoch 845, Loss 3.465256\n", "Epoch 846, Loss 3.461300\n", "Epoch 847, Loss 3.457365\n", "Epoch 848, Loss 3.453458\n", "Epoch 849, Loss 3.449575\n", "Epoch 850, Loss 3.445716\n", "Epoch 851, Loss 3.441885\n", "Epoch 852, Loss 3.438079\n", "Epoch 853, Loss 3.434294\n", "Epoch 854, Loss 3.430536\n", "Epoch 855, Loss 3.426803\n", "Epoch 856, Loss 3.423092\n", "Epoch 857, Loss 3.419409\n", "Epoch 858, Loss 3.415747\n", "Epoch 859, Loss 3.412109\n", "Epoch 860, Loss 3.408499\n", "Epoch 861, Loss 3.404907\n", "Epoch 862, Loss 3.401344\n", "Epoch 863, Loss 3.397801\n", "Epoch 864, Loss 3.394282\n", "Epoch 865, Loss 3.390787\n", "Epoch 866, Loss 3.387316\n", "Epoch 867, Loss 3.383866\n", "Epoch 868, Loss 3.380440\n", "Epoch 869, Loss 3.377035\n", "Epoch 870, Loss 3.373656\n", "Epoch 871, Loss 3.370297\n", "Epoch 872, Loss 3.366960\n", "Epoch 873, Loss 3.363647\n", "Epoch 874, Loss 3.360354\n", "Epoch 875, Loss 3.357084\n", "Epoch 876, Loss 3.353836\n", "Epoch 877, Loss 3.350609\n", "Epoch 878, Loss 3.347404\n", "Epoch 879, Loss 3.344221\n", "Epoch 880, Loss 3.341058\n", "Epoch 881, Loss 3.337919\n", "Epoch 882, Loss 3.334798\n", "Epoch 883, Loss 3.331702\n", "Epoch 884, Loss 3.328621\n", "Epoch 885, Loss 3.325564\n", "Epoch 886, Loss 3.322528\n", "Epoch 887, Loss 3.319514\n", "Epoch 888, Loss 3.316518\n", "Epoch 889, Loss 3.313543\n", "Epoch 890, Loss 3.310589\n", "Epoch 891, Loss 3.307653\n", "Epoch 892, Loss 3.304737\n", "Epoch 893, Loss 3.301844\n", "Epoch 894, Loss 3.298968\n", "Epoch 895, Loss 3.296114\n", "Epoch 896, Loss 3.293275\n", "Epoch 897, Loss 3.290459\n", "Epoch 898, Loss 3.287661\n", "Epoch 899, Loss 3.284882\n", "Epoch 900, Loss 3.282125\n", "Epoch 901, Loss 3.279382\n", "Epoch 902, Loss 3.276663\n", "Epoch 903, Loss 3.273959\n", "Epoch 904, Loss 3.271273\n", "Epoch 905, Loss 3.268609\n", "Epoch 906, Loss 3.265962\n", "Epoch 907, Loss 3.263334\n", "Epoch 908, Loss 3.260722\n", "Epoch 909, Loss 3.258129\n", "Epoch 910, Loss 3.255554\n", "Epoch 911, Loss 3.252996\n", "Epoch 912, Loss 3.250459\n", "Epoch 913, Loss 3.247937\n", "Epoch 914, Loss 3.245432\n", "Epoch 915, Loss 3.242943\n", "Epoch 916, Loss 3.240478\n", "Epoch 917, Loss 3.238023\n", "Epoch 918, Loss 3.235590\n", "Epoch 919, Loss 3.233171\n", "Epoch 920, Loss 3.230770\n", "Epoch 921, Loss 3.228386\n", "Epoch 922, Loss 3.226018\n", "Epoch 923, Loss 3.223666\n", "Epoch 924, Loss 3.221333\n", "Epoch 925, Loss 3.219015\n", "Epoch 926, Loss 3.216713\n", "Epoch 927, Loss 3.214429\n", "Epoch 928, Loss 3.212159\n", "Epoch 929, Loss 3.209906\n", "Epoch 930, Loss 3.207668\n", "Epoch 931, Loss 3.205448\n", "Epoch 932, Loss 3.203243\n", "Epoch 933, Loss 3.201053\n", "Epoch 934, Loss 3.198876\n", "Epoch 935, Loss 3.196717\n", "Epoch 936, Loss 3.194575\n", "Epoch 937, Loss 3.192446\n", "Epoch 938, Loss 3.190333\n", "Epoch 939, Loss 3.188235\n", "Epoch 940, Loss 3.186152\n", "Epoch 941, Loss 3.184084\n", "Epoch 942, Loss 3.182028\n", "Epoch 943, Loss 3.179991\n", "Epoch 944, Loss 3.177967\n", "Epoch 945, Loss 3.175957\n", "Epoch 946, Loss 3.173961\n", "Epoch 947, Loss 3.171982\n", "Epoch 948, Loss 3.170016\n", "Epoch 949, Loss 3.168063\n", "Epoch 950, Loss 3.166126\n", "Epoch 951, Loss 3.164201\n", "Epoch 952, Loss 3.162289\n", "Epoch 953, Loss 3.160396\n", "Epoch 954, Loss 3.158512\n", "Epoch 955, Loss 3.156645\n", "Epoch 956, Loss 3.154786\n", "Epoch 957, Loss 3.152947\n", "Epoch 958, Loss 3.151116\n", "Epoch 959, Loss 3.149301\n", "Epoch 960, Loss 3.147500\n", "Epoch 961, Loss 3.145711\n", "Epoch 962, Loss 3.143936\n", "Epoch 963, Loss 3.142172\n", "Epoch 964, Loss 3.140423\n", "Epoch 965, Loss 3.138685\n", "Epoch 966, Loss 3.136961\n", "Epoch 967, Loss 3.135251\n", "Epoch 968, Loss 3.133551\n", "Epoch 969, Loss 3.131864\n", "Epoch 970, Loss 3.130189\n", "Epoch 971, Loss 3.128528\n", "Epoch 972, Loss 3.126878\n", "Epoch 973, Loss 3.125241\n", "Epoch 974, Loss 3.123617\n", "Epoch 975, Loss 3.122001\n", "Epoch 976, Loss 3.120400\n", "Epoch 977, Loss 3.118811\n", "Epoch 978, Loss 3.117234\n", "Epoch 979, Loss 3.115669\n", "Epoch 980, Loss 3.114114\n", "Epoch 981, Loss 3.112573\n", "Epoch 982, Loss 3.111041\n", "Epoch 983, Loss 3.109523\n", "Epoch 984, Loss 3.108013\n", "Epoch 985, Loss 3.106516\n", "Epoch 986, Loss 3.105030\n", "Epoch 987, Loss 3.103556\n", "Epoch 988, Loss 3.102093\n", "Epoch 989, Loss 3.100639\n", "Epoch 990, Loss 3.099196\n", "Epoch 991, Loss 3.097765\n", "Epoch 992, Loss 3.096345\n", "Epoch 993, Loss 3.094935\n", "Epoch 994, Loss 3.093536\n", "Epoch 995, Loss 3.092148\n", "Epoch 996, Loss 3.090771\n", "Epoch 997, Loss 3.089404\n", "Epoch 998, Loss 3.088046\n", "Epoch 999, Loss 3.086700\n", "Epoch 1000, Loss 3.085363\n", "Epoch 1001, Loss 3.084038\n", "Epoch 1002, Loss 3.082720\n", "Epoch 1003, Loss 3.081414\n", "Epoch 1004, Loss 3.080117\n", "Epoch 1005, Loss 3.078832\n", "Epoch 1006, Loss 3.077554\n", "Epoch 1007, Loss 3.076287\n", "Epoch 1008, Loss 3.075028\n", "Epoch 1009, Loss 3.073782\n", "Epoch 1010, Loss 3.072544\n", "Epoch 1011, Loss 3.071315\n", "Epoch 1012, Loss 3.070095\n", "Epoch 1013, Loss 3.068885\n", "Epoch 1014, Loss 3.067685\n", "Epoch 1015, Loss 3.066496\n", "Epoch 1016, Loss 3.065313\n", "Epoch 1017, Loss 3.064140\n", "Epoch 1018, Loss 3.062976\n", "Epoch 1019, Loss 3.061821\n", "Epoch 1020, Loss 3.060674\n", "Epoch 1021, Loss 3.059537\n", "Epoch 1022, Loss 3.058409\n", "Epoch 1023, Loss 3.057288\n", "Epoch 1024, Loss 3.056179\n", "Epoch 1025, Loss 3.055076\n", "Epoch 1026, Loss 3.053982\n", "Epoch 1027, Loss 3.052897\n", "Epoch 1028, Loss 3.051822\n", "Epoch 1029, Loss 3.050752\n", "Epoch 1030, Loss 3.049690\n", "Epoch 1031, Loss 3.048641\n", "Epoch 1032, Loss 3.047596\n", "Epoch 1033, Loss 3.046560\n", "Epoch 1034, Loss 3.045534\n", "Epoch 1035, Loss 3.044514\n", "Epoch 1036, Loss 3.043503\n", "Epoch 1037, Loss 3.042500\n", "Epoch 1038, Loss 3.041504\n", "Epoch 1039, Loss 3.040518\n", "Epoch 1040, Loss 3.039538\n", "Epoch 1041, Loss 3.038564\n", "Epoch 1042, Loss 3.037600\n", "Epoch 1043, Loss 3.036643\n", "Epoch 1044, Loss 3.035694\n", "Epoch 1045, Loss 3.034753\n", "Epoch 1046, Loss 3.033819\n", "Epoch 1047, Loss 3.032892\n", "Epoch 1048, Loss 3.031972\n", "Epoch 1049, Loss 3.031061\n", "Epoch 1050, Loss 3.030154\n", "Epoch 1051, Loss 3.029255\n", "Epoch 1052, Loss 3.028368\n", "Epoch 1053, Loss 3.027485\n", "Epoch 1054, Loss 3.026608\n", "Epoch 1055, Loss 3.025739\n", "Epoch 1056, Loss 3.024877\n", "Epoch 1057, Loss 3.024020\n", "Epoch 1058, Loss 3.023171\n", "Epoch 1059, Loss 3.022329\n", "Epoch 1060, Loss 3.021495\n", "Epoch 1061, Loss 3.020666\n", "Epoch 1062, Loss 3.019845\n", "Epoch 1063, Loss 3.019030\n", "Epoch 1064, Loss 3.018221\n", "Epoch 1065, Loss 3.017419\n", "Epoch 1066, Loss 3.016623\n", "Epoch 1067, Loss 3.015833\n", "Epoch 1068, Loss 3.015051\n", "Epoch 1069, Loss 3.014274\n", "Epoch 1070, Loss 3.013504\n", "Epoch 1071, Loss 3.012739\n", "Epoch 1072, Loss 3.011983\n", "Epoch 1073, Loss 3.011232\n", "Epoch 1074, Loss 3.010485\n", "Epoch 1075, Loss 3.009746\n", "Epoch 1076, Loss 3.009012\n", "Epoch 1077, Loss 3.008285\n", "Epoch 1078, Loss 3.007563\n", "Epoch 1079, Loss 3.006847\n", "Epoch 1080, Loss 3.006139\n", "Epoch 1081, Loss 3.005434\n", "Epoch 1082, Loss 3.004736\n", "Epoch 1083, Loss 3.004044\n", "Epoch 1084, Loss 3.003356\n", "Epoch 1085, Loss 3.002675\n", "Epoch 1086, Loss 3.002001\n", "Epoch 1087, Loss 3.001330\n", "Epoch 1088, Loss 3.000667\n", "Epoch 1089, Loss 3.000006\n", "Epoch 1090, Loss 2.999354\n", "Epoch 1091, Loss 2.998705\n", "Epoch 1092, Loss 2.998061\n", "Epoch 1093, Loss 2.997424\n", "Epoch 1094, Loss 2.996792\n", "Epoch 1095, Loss 2.996165\n", "Epoch 1096, Loss 2.995544\n", "Epoch 1097, Loss 2.994926\n", "Epoch 1098, Loss 2.994314\n", "Epoch 1099, Loss 2.993710\n", "Epoch 1100, Loss 2.993109\n", "Epoch 1101, Loss 2.992512\n", "Epoch 1102, Loss 2.991921\n", "Epoch 1103, Loss 2.991334\n", "Epoch 1104, Loss 2.990752\n", "Epoch 1105, Loss 2.990175\n", "Epoch 1106, Loss 2.989604\n", "Epoch 1107, Loss 2.989037\n", "Epoch 1108, Loss 2.988474\n", "Epoch 1109, Loss 2.987915\n", "Epoch 1110, Loss 2.987362\n", "Epoch 1111, Loss 2.986815\n", "Epoch 1112, Loss 2.986272\n", "Epoch 1113, Loss 2.985732\n", "Epoch 1114, Loss 2.985198\n", "Epoch 1115, Loss 2.984669\n", "Epoch 1116, Loss 2.984142\n", "Epoch 1117, Loss 2.983621\n", "Epoch 1118, Loss 2.983105\n", "Epoch 1119, Loss 2.982591\n", "Epoch 1120, Loss 2.982084\n", "Epoch 1121, Loss 2.981579\n", "Epoch 1122, Loss 2.981081\n", "Epoch 1123, Loss 2.980585\n", "Epoch 1124, Loss 2.980095\n", "Epoch 1125, Loss 2.979607\n", "Epoch 1126, Loss 2.979125\n", "Epoch 1127, Loss 2.978646\n", "Epoch 1128, Loss 2.978171\n", "Epoch 1129, Loss 2.977701\n", "Epoch 1130, Loss 2.977235\n", "Epoch 1131, Loss 2.976773\n", "Epoch 1132, Loss 2.976315\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1133, Loss 2.975861\n", "Epoch 1134, Loss 2.975410\n", "Epoch 1135, Loss 2.974962\n", "Epoch 1136, Loss 2.974520\n", "Epoch 1137, Loss 2.974081\n", "Epoch 1138, Loss 2.973646\n", "Epoch 1139, Loss 2.973213\n", "Epoch 1140, Loss 2.972785\n", "Epoch 1141, Loss 2.972360\n", "Epoch 1142, Loss 2.971941\n", "Epoch 1143, Loss 2.971524\n", "Epoch 1144, Loss 2.971111\n", "Epoch 1145, Loss 2.970701\n", "Epoch 1146, Loss 2.970295\n", "Epoch 1147, Loss 2.969892\n", "Epoch 1148, Loss 2.969492\n", "Epoch 1149, Loss 2.969098\n", "Epoch 1150, Loss 2.968705\n", "Epoch 1151, Loss 2.968318\n", "Epoch 1152, Loss 2.967931\n", "Epoch 1153, Loss 2.967551\n", "Epoch 1154, Loss 2.967172\n", "Epoch 1155, Loss 2.966795\n", "Epoch 1156, Loss 2.966423\n", "Epoch 1157, Loss 2.966054\n", "Epoch 1158, Loss 2.965688\n", "Epoch 1159, Loss 2.965328\n", "Epoch 1160, Loss 2.964967\n", "Epoch 1161, Loss 2.964612\n", "Epoch 1162, Loss 2.964257\n", "Epoch 1163, Loss 2.963909\n", "Epoch 1164, Loss 2.963563\n", "Epoch 1165, Loss 2.963218\n", "Epoch 1166, Loss 2.962877\n", "Epoch 1167, Loss 2.962541\n", "Epoch 1168, Loss 2.962204\n", "Epoch 1169, Loss 2.961874\n", "Epoch 1170, Loss 2.961545\n", "Epoch 1171, Loss 2.961218\n", "Epoch 1172, Loss 2.960897\n", "Epoch 1173, Loss 2.960574\n", "Epoch 1174, Loss 2.960258\n", "Epoch 1175, Loss 2.959945\n", "Epoch 1176, Loss 2.959633\n", "Epoch 1177, Loss 2.959324\n", "Epoch 1178, Loss 2.959017\n", "Epoch 1179, Loss 2.958714\n", "Epoch 1180, Loss 2.958415\n", "Epoch 1181, Loss 2.958115\n", "Epoch 1182, Loss 2.957821\n", "Epoch 1183, Loss 2.957525\n", "Epoch 1184, Loss 2.957237\n", "Epoch 1185, Loss 2.956949\n", "Epoch 1186, Loss 2.956666\n", "Epoch 1187, Loss 2.956382\n", "Epoch 1188, Loss 2.956102\n", "Epoch 1189, Loss 2.955825\n", "Epoch 1190, Loss 2.955550\n", "Epoch 1191, Loss 2.955277\n", "Epoch 1192, Loss 2.955006\n", "Epoch 1193, Loss 2.954741\n", "Epoch 1194, Loss 2.954474\n", "Epoch 1195, Loss 2.954213\n", "Epoch 1196, Loss 2.953953\n", "Epoch 1197, Loss 2.953694\n", "Epoch 1198, Loss 2.953439\n", "Epoch 1199, Loss 2.953185\n", "Epoch 1200, Loss 2.952935\n", "Epoch 1201, Loss 2.952684\n", "Epoch 1202, Loss 2.952441\n", "Epoch 1203, Loss 2.952195\n", "Epoch 1204, Loss 2.951955\n", "Epoch 1205, Loss 2.951714\n", "Epoch 1206, Loss 2.951476\n", "Epoch 1207, Loss 2.951241\n", "Epoch 1208, Loss 2.951007\n", "Epoch 1209, Loss 2.950776\n", "Epoch 1210, Loss 2.950548\n", "Epoch 1211, Loss 2.950321\n", "Epoch 1212, Loss 2.950095\n", "Epoch 1213, Loss 2.949873\n", "Epoch 1214, Loss 2.949651\n", "Epoch 1215, Loss 2.949435\n", "Epoch 1216, Loss 2.949217\n", "Epoch 1217, Loss 2.949002\n", "Epoch 1218, Loss 2.948789\n", "Epoch 1219, Loss 2.948579\n", "Epoch 1220, Loss 2.948370\n", "Epoch 1221, Loss 2.948164\n", "Epoch 1222, Loss 2.947958\n", "Epoch 1223, Loss 2.947756\n", "Epoch 1224, Loss 2.947555\n", "Epoch 1225, Loss 2.947355\n", "Epoch 1226, Loss 2.947158\n", "Epoch 1227, Loss 2.946962\n", "Epoch 1228, Loss 2.946767\n", "Epoch 1229, Loss 2.946576\n", "Epoch 1230, Loss 2.946386\n", "Epoch 1231, Loss 2.946198\n", "Epoch 1232, Loss 2.946012\n", "Epoch 1233, Loss 2.945827\n", "Epoch 1234, Loss 2.945642\n", "Epoch 1235, Loss 2.945462\n", "Epoch 1236, Loss 2.945282\n", "Epoch 1237, Loss 2.945103\n", "Epoch 1238, Loss 2.944927\n", "Epoch 1239, Loss 2.944752\n", "Epoch 1240, Loss 2.944578\n", "Epoch 1241, Loss 2.944407\n", "Epoch 1242, Loss 2.944237\n", "Epoch 1243, Loss 2.944070\n", "Epoch 1244, Loss 2.943903\n", "Epoch 1245, Loss 2.943737\n", "Epoch 1246, Loss 2.943573\n", "Epoch 1247, Loss 2.943411\n", "Epoch 1248, Loss 2.943251\n", "Epoch 1249, Loss 2.943092\n", "Epoch 1250, Loss 2.942936\n", "Epoch 1251, Loss 2.942780\n", "Epoch 1252, Loss 2.942625\n", "Epoch 1253, Loss 2.942470\n", "Epoch 1254, Loss 2.942320\n", "Epoch 1255, Loss 2.942169\n", "Epoch 1256, Loss 2.942021\n", "Epoch 1257, Loss 2.941873\n", "Epoch 1258, Loss 2.941727\n", "Epoch 1259, Loss 2.941583\n", "Epoch 1260, Loss 2.941441\n", "Epoch 1261, Loss 2.941298\n", "Epoch 1262, Loss 2.941158\n", "Epoch 1263, Loss 2.941017\n", "Epoch 1264, Loss 2.940881\n", "Epoch 1265, Loss 2.940743\n", "Epoch 1266, Loss 2.940610\n", "Epoch 1267, Loss 2.940476\n", "Epoch 1268, Loss 2.940343\n", "Epoch 1269, Loss 2.940212\n", "Epoch 1270, Loss 2.940082\n", "Epoch 1271, Loss 2.939952\n", "Epoch 1272, Loss 2.939824\n", "Epoch 1273, Loss 2.939699\n", "Epoch 1274, Loss 2.939574\n", "Epoch 1275, Loss 2.939449\n", "Epoch 1276, Loss 2.939327\n", "Epoch 1277, Loss 2.939206\n", "Epoch 1278, Loss 2.939086\n", "Epoch 1279, Loss 2.938967\n", "Epoch 1280, Loss 2.938848\n", "Epoch 1281, Loss 2.938730\n", "Epoch 1282, Loss 2.938617\n", "Epoch 1283, Loss 2.938500\n", "Epoch 1284, Loss 2.938386\n", "Epoch 1285, Loss 2.938276\n", "Epoch 1286, Loss 2.938163\n", "Epoch 1287, Loss 2.938055\n", "Epoch 1288, Loss 2.937946\n", "Epoch 1289, Loss 2.937837\n", "Epoch 1290, Loss 2.937730\n", "Epoch 1291, Loss 2.937622\n", "Epoch 1292, Loss 2.937519\n", "Epoch 1293, Loss 2.937415\n", "Epoch 1294, Loss 2.937313\n", "Epoch 1295, Loss 2.937212\n", "Epoch 1296, Loss 2.937109\n", "Epoch 1297, Loss 2.937010\n", "Epoch 1298, Loss 2.936911\n", "Epoch 1299, Loss 2.936812\n", "Epoch 1300, Loss 2.936714\n", "Epoch 1301, Loss 2.936620\n", "Epoch 1302, Loss 2.936524\n", "Epoch 1303, Loss 2.936433\n", "Epoch 1304, Loss 2.936339\n", "Epoch 1305, Loss 2.936245\n", "Epoch 1306, Loss 2.936154\n", "Epoch 1307, Loss 2.936065\n", "Epoch 1308, Loss 2.935974\n", "Epoch 1309, Loss 2.935884\n", "Epoch 1310, Loss 2.935798\n", "Epoch 1311, Loss 2.935712\n", "Epoch 1312, Loss 2.935624\n", "Epoch 1313, Loss 2.935539\n", "Epoch 1314, Loss 2.935453\n", "Epoch 1315, Loss 2.935371\n", "Epoch 1316, Loss 2.935288\n", "Epoch 1317, Loss 2.935208\n", "Epoch 1318, Loss 2.935125\n", "Epoch 1319, Loss 2.935046\n", "Epoch 1320, Loss 2.934966\n", "Epoch 1321, Loss 2.934886\n", "Epoch 1322, Loss 2.934811\n", "Epoch 1323, Loss 2.934734\n", "Epoch 1324, Loss 2.934656\n", "Epoch 1325, Loss 2.934583\n", "Epoch 1326, Loss 2.934508\n", "Epoch 1327, Loss 2.934433\n", "Epoch 1328, Loss 2.934358\n", "Epoch 1329, Loss 2.934286\n", "Epoch 1330, Loss 2.934215\n", "Epoch 1331, Loss 2.934145\n", "Epoch 1332, Loss 2.934074\n", "Epoch 1333, Loss 2.934005\n", "Epoch 1334, Loss 2.933935\n", "Epoch 1335, Loss 2.933868\n", "Epoch 1336, Loss 2.933800\n", "Epoch 1337, Loss 2.933733\n", "Epoch 1338, Loss 2.933667\n", "Epoch 1339, Loss 2.933600\n", "Epoch 1340, Loss 2.933537\n", "Epoch 1341, Loss 2.933473\n", "Epoch 1342, Loss 2.933408\n", "Epoch 1343, Loss 2.933346\n", "Epoch 1344, Loss 2.933285\n", "Epoch 1345, Loss 2.933223\n", "Epoch 1346, Loss 2.933161\n", "Epoch 1347, Loss 2.933102\n", "Epoch 1348, Loss 2.933041\n", "Epoch 1349, Loss 2.932981\n", "Epoch 1350, Loss 2.932924\n", "Epoch 1351, Loss 2.932865\n", "Epoch 1352, Loss 2.932810\n", "Epoch 1353, Loss 2.932752\n", "Epoch 1354, Loss 2.932696\n", "Epoch 1355, Loss 2.932639\n", "Epoch 1356, Loss 2.932585\n", "Epoch 1357, Loss 2.932530\n", "Epoch 1358, Loss 2.932476\n", "Epoch 1359, Loss 2.932424\n", "Epoch 1360, Loss 2.932371\n", "Epoch 1361, Loss 2.932319\n", "Epoch 1362, Loss 2.932268\n", "Epoch 1363, Loss 2.932215\n", "Epoch 1364, Loss 2.932166\n", "Epoch 1365, Loss 2.932116\n", "Epoch 1366, Loss 2.932066\n", "Epoch 1367, Loss 2.932017\n", "Epoch 1368, Loss 2.931968\n", "Epoch 1369, Loss 2.931920\n", "Epoch 1370, Loss 2.931871\n", "Epoch 1371, Loss 2.931826\n", "Epoch 1372, Loss 2.931779\n", "Epoch 1373, Loss 2.931733\n", "Epoch 1374, Loss 2.931687\n", "Epoch 1375, Loss 2.931642\n", "Epoch 1376, Loss 2.931598\n", "Epoch 1377, Loss 2.931553\n", "Epoch 1378, Loss 2.931509\n", "Epoch 1379, Loss 2.931465\n", "Epoch 1380, Loss 2.931424\n", "Epoch 1381, Loss 2.931381\n", "Epoch 1382, Loss 2.931339\n", "Epoch 1383, Loss 2.931297\n", "Epoch 1384, Loss 2.931256\n", "Epoch 1385, Loss 2.931216\n", "Epoch 1386, Loss 2.931175\n", "Epoch 1387, Loss 2.931137\n", "Epoch 1388, Loss 2.931098\n", "Epoch 1389, Loss 2.931058\n", "Epoch 1390, Loss 2.931020\n", "Epoch 1391, Loss 2.930982\n", "Epoch 1392, Loss 2.930944\n", "Epoch 1393, Loss 2.930907\n", "Epoch 1394, Loss 2.930869\n", "Epoch 1395, Loss 2.930833\n", "Epoch 1396, Loss 2.930798\n", "Epoch 1397, Loss 2.930761\n", "Epoch 1398, Loss 2.930725\n", "Epoch 1399, Loss 2.930691\n", "Epoch 1400, Loss 2.930657\n", "Epoch 1401, Loss 2.930623\n", "Epoch 1402, Loss 2.930589\n", "Epoch 1403, Loss 2.930555\n", "Epoch 1404, Loss 2.930523\n", "Epoch 1405, Loss 2.930490\n", "Epoch 1406, Loss 2.930457\n", "Epoch 1407, Loss 2.930425\n", "Epoch 1408, Loss 2.930394\n", "Epoch 1409, Loss 2.930363\n", "Epoch 1410, Loss 2.930331\n", "Epoch 1411, Loss 2.930300\n", "Epoch 1412, Loss 2.930270\n", "Epoch 1413, Loss 2.930240\n", "Epoch 1414, Loss 2.930210\n", "Epoch 1415, Loss 2.930180\n", "Epoch 1416, Loss 2.930151\n", "Epoch 1417, Loss 2.930123\n", "Epoch 1418, Loss 2.930093\n", "Epoch 1419, Loss 2.930066\n", "Epoch 1420, Loss 2.930036\n", "Epoch 1421, Loss 2.930010\n", "Epoch 1422, Loss 2.929983\n", "Epoch 1423, Loss 2.929956\n", "Epoch 1424, Loss 2.929930\n", "Epoch 1425, Loss 2.929904\n", "Epoch 1426, Loss 2.929878\n", "Epoch 1427, Loss 2.929852\n", "Epoch 1428, Loss 2.929826\n", "Epoch 1429, Loss 2.929800\n", "Epoch 1430, Loss 2.929775\n", "Epoch 1431, Loss 2.929751\n", "Epoch 1432, Loss 2.929725\n", "Epoch 1433, Loss 2.929703\n", "Epoch 1434, Loss 2.929678\n", "Epoch 1435, Loss 2.929654\n", "Epoch 1436, Loss 2.929631\n", "Epoch 1437, Loss 2.929608\n", "Epoch 1438, Loss 2.929584\n", "Epoch 1439, Loss 2.929563\n", "Epoch 1440, Loss 2.929539\n", "Epoch 1441, Loss 2.929517\n", "Epoch 1442, Loss 2.929496\n", "Epoch 1443, Loss 2.929475\n", "Epoch 1444, Loss 2.929452\n", "Epoch 1445, Loss 2.929430\n", "Epoch 1446, Loss 2.929411\n", "Epoch 1447, Loss 2.929388\n", "Epoch 1448, Loss 2.929370\n", "Epoch 1449, Loss 2.929350\n", "Epoch 1450, Loss 2.929330\n", "Epoch 1451, Loss 2.929310\n", "Epoch 1452, Loss 2.929289\n", "Epoch 1453, Loss 2.929272\n", "Epoch 1454, Loss 2.929250\n", "Epoch 1455, Loss 2.929233\n", "Epoch 1456, Loss 2.929214\n", "Epoch 1457, Loss 2.929195\n", "Epoch 1458, Loss 2.929177\n", "Epoch 1459, Loss 2.929159\n", "Epoch 1460, Loss 2.929141\n", "Epoch 1461, Loss 2.929122\n", "Epoch 1462, Loss 2.929105\n", "Epoch 1463, Loss 2.929090\n", "Epoch 1464, Loss 2.929070\n", "Epoch 1465, Loss 2.929054\n", "Epoch 1466, Loss 2.929038\n", "Epoch 1467, Loss 2.929021\n", "Epoch 1468, Loss 2.929005\n", "Epoch 1469, Loss 2.928989\n", "Epoch 1470, Loss 2.928973\n", "Epoch 1471, Loss 2.928956\n", "Epoch 1472, Loss 2.928942\n", "Epoch 1473, Loss 2.928924\n", "Epoch 1474, Loss 2.928910\n", "Epoch 1475, Loss 2.928895\n", "Epoch 1476, Loss 2.928879\n", "Epoch 1477, Loss 2.928866\n", "Epoch 1478, Loss 2.928851\n", "Epoch 1479, Loss 2.928837\n", "Epoch 1480, Loss 2.928822\n", "Epoch 1481, Loss 2.928807\n", "Epoch 1482, Loss 2.928794\n", "Epoch 1483, Loss 2.928780\n", "Epoch 1484, Loss 2.928767\n", "Epoch 1485, Loss 2.928753\n", "Epoch 1486, Loss 2.928738\n", "Epoch 1487, Loss 2.928728\n", "Epoch 1488, Loss 2.928713\n", "Epoch 1489, Loss 2.928700\n", "Epoch 1490, Loss 2.928688\n", "Epoch 1491, Loss 2.928675\n", "Epoch 1492, Loss 2.928662\n", "Epoch 1493, Loss 2.928651\n", "Epoch 1494, Loss 2.928637\n", "Epoch 1495, Loss 2.928625\n", "Epoch 1496, Loss 2.928614\n", "Epoch 1497, Loss 2.928602\n", "Epoch 1498, Loss 2.928590\n", "Epoch 1499, Loss 2.928578\n", "Epoch 1500, Loss 2.928566\n", "Epoch 1501, Loss 2.928558\n", "Epoch 1502, Loss 2.928544\n", "Epoch 1503, Loss 2.928533\n", "Epoch 1504, Loss 2.928524\n", "Epoch 1505, Loss 2.928514\n", "Epoch 1506, Loss 2.928502\n", "Epoch 1507, Loss 2.928493\n", "Epoch 1508, Loss 2.928481\n", "Epoch 1509, Loss 2.928470\n", "Epoch 1510, Loss 2.928461\n", "Epoch 1511, Loss 2.928453\n", "Epoch 1512, Loss 2.928442\n", "Epoch 1513, Loss 2.928432\n", "Epoch 1514, Loss 2.928421\n", "Epoch 1515, Loss 2.928411\n", "Epoch 1516, Loss 2.928403\n", "Epoch 1517, Loss 2.928394\n", "Epoch 1518, Loss 2.928385\n", "Epoch 1519, Loss 2.928377\n", "Epoch 1520, Loss 2.928365\n", "Epoch 1521, Loss 2.928358\n", "Epoch 1522, Loss 2.928349\n", "Epoch 1523, Loss 2.928339\n", "Epoch 1524, Loss 2.928331\n", "Epoch 1525, Loss 2.928324\n", "Epoch 1526, Loss 2.928316\n", "Epoch 1527, Loss 2.928306\n", "Epoch 1528, Loss 2.928299\n", "Epoch 1529, Loss 2.928290\n", "Epoch 1530, Loss 2.928282\n", "Epoch 1531, Loss 2.928274\n", "Epoch 1532, Loss 2.928265\n", "Epoch 1533, Loss 2.928259\n", "Epoch 1534, Loss 2.928251\n", "Epoch 1535, Loss 2.928242\n", "Epoch 1536, Loss 2.928236\n", "Epoch 1537, Loss 2.928229\n", "Epoch 1538, Loss 2.928223\n", "Epoch 1539, Loss 2.928215\n", "Epoch 1540, Loss 2.928207\n", "Epoch 1541, Loss 2.928200\n", "Epoch 1542, Loss 2.928193\n", "Epoch 1543, Loss 2.928188\n", "Epoch 1544, Loss 2.928181\n", "Epoch 1545, Loss 2.928172\n", "Epoch 1546, Loss 2.928166\n", "Epoch 1547, Loss 2.928159\n", "Epoch 1548, Loss 2.928153\n", "Epoch 1549, Loss 2.928146\n", "Epoch 1550, Loss 2.928141\n", "Epoch 1551, Loss 2.928134\n", "Epoch 1552, Loss 2.928127\n", "Epoch 1553, Loss 2.928123\n", "Epoch 1554, Loss 2.928117\n", "Epoch 1555, Loss 2.928111\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1556, Loss 2.928102\n", "Epoch 1557, Loss 2.928098\n", "Epoch 1558, Loss 2.928092\n", "Epoch 1559, Loss 2.928087\n", "Epoch 1560, Loss 2.928081\n", "Epoch 1561, Loss 2.928075\n", "Epoch 1562, Loss 2.928070\n", "Epoch 1563, Loss 2.928066\n", "Epoch 1564, Loss 2.928058\n", "Epoch 1565, Loss 2.928053\n", "Epoch 1566, Loss 2.928050\n", "Epoch 1567, Loss 2.928045\n", "Epoch 1568, Loss 2.928038\n", "Epoch 1569, Loss 2.928034\n", "Epoch 1570, Loss 2.928027\n", "Epoch 1571, Loss 2.928024\n", "Epoch 1572, Loss 2.928018\n", "Epoch 1573, Loss 2.928015\n", "Epoch 1574, Loss 2.928009\n", "Epoch 1575, Loss 2.928005\n", "Epoch 1576, Loss 2.928001\n", "Epoch 1577, Loss 2.927997\n", "Epoch 1578, Loss 2.927992\n", "Epoch 1579, Loss 2.927988\n", "Epoch 1580, Loss 2.927984\n", "Epoch 1581, Loss 2.927979\n", "Epoch 1582, Loss 2.927974\n", "Epoch 1583, Loss 2.927970\n", "Epoch 1584, Loss 2.927965\n", "Epoch 1585, Loss 2.927961\n", "Epoch 1586, Loss 2.927956\n", "Epoch 1587, Loss 2.927953\n", "Epoch 1588, Loss 2.927949\n", "Epoch 1589, Loss 2.927946\n", "Epoch 1590, Loss 2.927942\n", "Epoch 1591, Loss 2.927937\n", "Epoch 1592, Loss 2.927933\n", "Epoch 1593, Loss 2.927930\n", "Epoch 1594, Loss 2.927928\n", "Epoch 1595, Loss 2.927923\n", "Epoch 1596, Loss 2.927921\n", "Epoch 1597, Loss 2.927914\n", "Epoch 1598, Loss 2.927912\n", "Epoch 1599, Loss 2.927908\n", "Epoch 1600, Loss 2.927906\n", "Epoch 1601, Loss 2.927902\n", "Epoch 1602, Loss 2.927898\n", "Epoch 1603, Loss 2.927896\n", "Epoch 1604, Loss 2.927892\n", "Epoch 1605, Loss 2.927888\n", "Epoch 1606, Loss 2.927885\n", "Epoch 1607, Loss 2.927884\n", "Epoch 1608, Loss 2.927880\n", "Epoch 1609, Loss 2.927877\n", "Epoch 1610, Loss 2.927873\n", "Epoch 1611, Loss 2.927870\n", "Epoch 1612, Loss 2.927868\n", "Epoch 1613, Loss 2.927864\n", "Epoch 1614, Loss 2.927863\n", "Epoch 1615, Loss 2.927859\n", "Epoch 1616, Loss 2.927856\n", "Epoch 1617, Loss 2.927852\n", "Epoch 1618, Loss 2.927850\n", "Epoch 1619, Loss 2.927848\n", "Epoch 1620, Loss 2.927845\n", "Epoch 1621, Loss 2.927842\n", "Epoch 1622, Loss 2.927839\n", "Epoch 1623, Loss 2.927837\n", "Epoch 1624, Loss 2.927833\n", "Epoch 1625, Loss 2.927831\n", "Epoch 1626, Loss 2.927829\n", "Epoch 1627, Loss 2.927827\n", "Epoch 1628, Loss 2.927825\n", "Epoch 1629, Loss 2.927824\n", "Epoch 1630, Loss 2.927819\n", "Epoch 1631, Loss 2.927818\n", "Epoch 1632, Loss 2.927814\n", "Epoch 1633, Loss 2.927813\n", "Epoch 1634, Loss 2.927811\n", "Epoch 1635, Loss 2.927810\n", "Epoch 1636, Loss 2.927807\n", "Epoch 1637, Loss 2.927805\n", "Epoch 1638, Loss 2.927802\n", "Epoch 1639, Loss 2.927803\n", "Epoch 1640, Loss 2.927799\n", "Epoch 1641, Loss 2.927797\n", "Epoch 1642, Loss 2.927794\n", "Epoch 1643, Loss 2.927792\n", "Epoch 1644, Loss 2.927790\n", "Epoch 1645, Loss 2.927789\n", "Epoch 1646, Loss 2.927785\n", "Epoch 1647, Loss 2.927785\n", "Epoch 1648, Loss 2.927782\n", "Epoch 1649, Loss 2.927781\n", "Epoch 1650, Loss 2.927779\n", "Epoch 1651, Loss 2.927778\n", "Epoch 1652, Loss 2.927775\n", "Epoch 1653, Loss 2.927773\n", "Epoch 1654, Loss 2.927772\n", "Epoch 1655, Loss 2.927772\n", "Epoch 1656, Loss 2.927769\n", "Epoch 1657, Loss 2.927766\n", "Epoch 1658, Loss 2.927763\n", "Epoch 1659, Loss 2.927763\n", "Epoch 1660, Loss 2.927763\n", "Epoch 1661, Loss 2.927760\n", "Epoch 1662, Loss 2.927761\n", "Epoch 1663, Loss 2.927757\n", "Epoch 1664, Loss 2.927754\n", "Epoch 1665, Loss 2.927753\n", "Epoch 1666, Loss 2.927752\n", "Epoch 1667, Loss 2.927751\n", "Epoch 1668, Loss 2.927750\n", "Epoch 1669, Loss 2.927750\n", "Epoch 1670, Loss 2.927747\n", "Epoch 1671, Loss 2.927746\n", "Epoch 1672, Loss 2.927744\n", "Epoch 1673, Loss 2.927743\n", "Epoch 1674, Loss 2.927741\n", "Epoch 1675, Loss 2.927741\n", "Epoch 1676, Loss 2.927739\n", "Epoch 1677, Loss 2.927737\n", "Epoch 1678, Loss 2.927737\n", "Epoch 1679, Loss 2.927736\n", "Epoch 1680, Loss 2.927735\n", "Epoch 1681, Loss 2.927731\n", "Epoch 1682, Loss 2.927733\n", "Epoch 1683, Loss 2.927732\n", "Epoch 1684, Loss 2.927730\n", "Epoch 1685, Loss 2.927728\n", "Epoch 1686, Loss 2.927726\n", "Epoch 1687, Loss 2.927725\n", "Epoch 1688, Loss 2.927724\n", "Epoch 1689, Loss 2.927725\n", "Epoch 1690, Loss 2.927722\n", "Epoch 1691, Loss 2.927721\n", "Epoch 1692, Loss 2.927721\n", "Epoch 1693, Loss 2.927718\n", "Epoch 1694, Loss 2.927718\n", "Epoch 1695, Loss 2.927717\n", "Epoch 1696, Loss 2.927717\n", "Epoch 1697, Loss 2.927715\n", "Epoch 1698, Loss 2.927715\n", "Epoch 1699, Loss 2.927713\n", "Epoch 1700, Loss 2.927712\n", "Epoch 1701, Loss 2.927711\n", "Epoch 1702, Loss 2.927711\n", "Epoch 1703, Loss 2.927710\n", "Epoch 1704, Loss 2.927709\n", "Epoch 1705, Loss 2.927707\n", "Epoch 1706, Loss 2.927708\n", "Epoch 1707, Loss 2.927706\n", "Epoch 1708, Loss 2.927706\n", "Epoch 1709, Loss 2.927704\n", "Epoch 1710, Loss 2.927704\n", "Epoch 1711, Loss 2.927703\n", "Epoch 1712, Loss 2.927702\n", "Epoch 1713, Loss 2.927701\n", "Epoch 1714, Loss 2.927701\n", "Epoch 1715, Loss 2.927700\n", "Epoch 1716, Loss 2.927699\n", "Epoch 1717, Loss 2.927697\n", "Epoch 1718, Loss 2.927697\n", "Epoch 1719, Loss 2.927697\n", "Epoch 1720, Loss 2.927696\n", "Epoch 1721, Loss 2.927695\n", "Epoch 1722, Loss 2.927695\n", "Epoch 1723, Loss 2.927693\n", "Epoch 1724, Loss 2.927692\n", "Epoch 1725, Loss 2.927692\n", "Epoch 1726, Loss 2.927692\n", "Epoch 1727, Loss 2.927690\n", "Epoch 1728, Loss 2.927692\n", "Epoch 1729, Loss 2.927690\n", "Epoch 1730, Loss 2.927689\n", "Epoch 1731, Loss 2.927689\n", "Epoch 1732, Loss 2.927688\n", "Epoch 1733, Loss 2.927688\n", "Epoch 1734, Loss 2.927688\n", "Epoch 1735, Loss 2.927686\n", "Epoch 1736, Loss 2.927686\n", "Epoch 1737, Loss 2.927685\n", "Epoch 1738, Loss 2.927683\n", "Epoch 1739, Loss 2.927685\n", "Epoch 1740, Loss 2.927684\n", "Epoch 1741, Loss 2.927683\n", "Epoch 1742, Loss 2.927683\n", "Epoch 1743, Loss 2.927682\n", "Epoch 1744, Loss 2.927681\n", "Epoch 1745, Loss 2.927680\n", "Epoch 1746, Loss 2.927681\n", "Epoch 1747, Loss 2.927680\n", "Epoch 1748, Loss 2.927679\n", "Epoch 1749, Loss 2.927678\n", "Epoch 1750, Loss 2.927677\n", "Epoch 1751, Loss 2.927678\n", "Epoch 1752, Loss 2.927678\n", "Epoch 1753, Loss 2.927677\n", "Epoch 1754, Loss 2.927677\n", "Epoch 1755, Loss 2.927675\n", "Epoch 1756, Loss 2.927677\n", "Epoch 1757, Loss 2.927675\n", "Epoch 1758, Loss 2.927674\n", "Epoch 1759, Loss 2.927674\n", "Epoch 1760, Loss 2.927675\n", "Epoch 1761, Loss 2.927674\n", "Epoch 1762, Loss 2.927672\n", "Epoch 1763, Loss 2.927673\n", "Epoch 1764, Loss 2.927672\n", "Epoch 1765, Loss 2.927673\n", "Epoch 1766, Loss 2.927672\n", "Epoch 1767, Loss 2.927672\n", "Epoch 1768, Loss 2.927672\n", "Epoch 1769, Loss 2.927671\n", "Epoch 1770, Loss 2.927671\n", "Epoch 1771, Loss 2.927670\n", "Epoch 1772, Loss 2.927671\n", "Epoch 1773, Loss 2.927670\n", "Epoch 1774, Loss 2.927670\n", "Epoch 1775, Loss 2.927669\n", "Epoch 1776, Loss 2.927668\n", "Epoch 1777, Loss 2.927668\n", "Epoch 1778, Loss 2.927668\n", "Epoch 1779, Loss 2.927667\n", "Epoch 1780, Loss 2.927666\n", "Epoch 1781, Loss 2.927667\n", "Epoch 1782, Loss 2.927666\n", "Epoch 1783, Loss 2.927665\n", "Epoch 1784, Loss 2.927664\n", "Epoch 1785, Loss 2.927665\n", "Epoch 1786, Loss 2.927665\n", "Epoch 1787, Loss 2.927665\n", "Epoch 1788, Loss 2.927664\n", "Epoch 1789, Loss 2.927663\n", "Epoch 1790, Loss 2.927662\n", "Epoch 1791, Loss 2.927663\n", "Epoch 1792, Loss 2.927663\n", "Epoch 1793, Loss 2.927663\n", "Epoch 1794, Loss 2.927662\n", "Epoch 1795, Loss 2.927664\n", "Epoch 1796, Loss 2.927662\n", "Epoch 1797, Loss 2.927661\n", "Epoch 1798, Loss 2.927662\n", "Epoch 1799, Loss 2.927662\n", "Epoch 1800, Loss 2.927661\n", "Epoch 1801, Loss 2.927660\n", "Epoch 1802, Loss 2.927661\n", "Epoch 1803, Loss 2.927660\n", "Epoch 1804, Loss 2.927662\n", "Epoch 1805, Loss 2.927660\n", "Epoch 1806, Loss 2.927659\n", "Epoch 1807, Loss 2.927660\n", "Epoch 1808, Loss 2.927658\n", "Epoch 1809, Loss 2.927660\n", "Epoch 1810, Loss 2.927659\n", "Epoch 1811, Loss 2.927657\n", "Epoch 1812, Loss 2.927660\n", "Epoch 1813, Loss 2.927658\n", "Epoch 1814, Loss 2.927659\n", "Epoch 1815, Loss 2.927657\n", "Epoch 1816, Loss 2.927658\n", "Epoch 1817, Loss 2.927656\n", "Epoch 1818, Loss 2.927658\n", "Epoch 1819, Loss 2.927657\n", "Epoch 1820, Loss 2.927657\n", "Epoch 1821, Loss 2.927657\n", "Epoch 1822, Loss 2.927657\n", "Epoch 1823, Loss 2.927657\n", "Epoch 1824, Loss 2.927657\n", "Epoch 1825, Loss 2.927656\n", "Epoch 1826, Loss 2.927655\n", "Epoch 1827, Loss 2.927656\n", "Epoch 1828, Loss 2.927656\n", "Epoch 1829, Loss 2.927654\n", "Epoch 1830, Loss 2.927655\n", "Epoch 1831, Loss 2.927654\n", "Epoch 1832, Loss 2.927654\n", "Epoch 1833, Loss 2.927655\n", "Epoch 1834, Loss 2.927654\n", "Epoch 1835, Loss 2.927656\n", "Epoch 1836, Loss 2.927654\n", "Epoch 1837, Loss 2.927655\n", "Epoch 1838, Loss 2.927653\n", "Epoch 1839, Loss 2.927654\n", "Epoch 1840, Loss 2.927655\n", "Epoch 1841, Loss 2.927654\n", "Epoch 1842, Loss 2.927654\n", "Epoch 1843, Loss 2.927654\n", "Epoch 1844, Loss 2.927655\n", "Epoch 1845, Loss 2.927654\n", "Epoch 1846, Loss 2.927653\n", "Epoch 1847, Loss 2.927651\n", "Epoch 1848, Loss 2.927654\n", "Epoch 1849, Loss 2.927653\n", "Epoch 1850, Loss 2.927653\n", "Epoch 1851, Loss 2.927651\n", "Epoch 1852, Loss 2.927653\n", "Epoch 1853, Loss 2.927653\n", "Epoch 1854, Loss 2.927653\n", "Epoch 1855, Loss 2.927652\n", "Epoch 1856, Loss 2.927652\n", "Epoch 1857, Loss 2.927651\n", "Epoch 1858, Loss 2.927651\n", "Epoch 1859, Loss 2.927651\n", "Epoch 1860, Loss 2.927652\n", "Epoch 1861, Loss 2.927652\n", "Epoch 1862, Loss 2.927652\n", "Epoch 1863, Loss 2.927652\n", "Epoch 1864, Loss 2.927653\n", "Epoch 1865, Loss 2.927651\n", "Epoch 1866, Loss 2.927653\n", "Epoch 1867, Loss 2.927651\n", "Epoch 1868, Loss 2.927650\n", "Epoch 1869, Loss 2.927651\n", "Epoch 1870, Loss 2.927650\n", "Epoch 1871, Loss 2.927651\n", "Epoch 1872, Loss 2.927651\n", "Epoch 1873, Loss 2.927651\n", "Epoch 1874, Loss 2.927650\n", "Epoch 1875, Loss 2.927650\n", "Epoch 1876, Loss 2.927650\n", "Epoch 1877, Loss 2.927651\n", "Epoch 1878, Loss 2.927650\n", "Epoch 1879, Loss 2.927650\n", "Epoch 1880, Loss 2.927651\n", "Epoch 1881, Loss 2.927650\n", "Epoch 1882, Loss 2.927650\n", "Epoch 1883, Loss 2.927649\n", "Epoch 1884, Loss 2.927650\n", "Epoch 1885, Loss 2.927649\n", "Epoch 1886, Loss 2.927649\n", "Epoch 1887, Loss 2.927650\n", "Epoch 1888, Loss 2.927650\n", "Epoch 1889, Loss 2.927649\n", "Epoch 1890, Loss 2.927650\n", "Epoch 1891, Loss 2.927649\n", "Epoch 1892, Loss 2.927649\n", "Epoch 1893, Loss 2.927650\n", "Epoch 1894, Loss 2.927648\n", "Epoch 1895, Loss 2.927649\n", "Epoch 1896, Loss 2.927647\n", "Epoch 1897, Loss 2.927648\n", "Epoch 1898, Loss 2.927649\n", "Epoch 1899, Loss 2.927650\n", "Epoch 1900, Loss 2.927648\n", "Epoch 1901, Loss 2.927648\n", "Epoch 1902, Loss 2.927648\n", "Epoch 1903, Loss 2.927649\n", "Epoch 1904, Loss 2.927648\n", "Epoch 1905, Loss 2.927649\n", "Epoch 1906, Loss 2.927649\n", "Epoch 1907, Loss 2.927647\n", "Epoch 1908, Loss 2.927647\n", "Epoch 1909, Loss 2.927648\n", "Epoch 1910, Loss 2.927648\n", "Epoch 1911, Loss 2.927648\n", "Epoch 1912, Loss 2.927648\n", "Epoch 1913, Loss 2.927649\n", "Epoch 1914, Loss 2.927648\n", "Epoch 1915, Loss 2.927648\n", "Epoch 1916, Loss 2.927648\n", "Epoch 1917, Loss 2.927648\n", "Epoch 1918, Loss 2.927648\n", "Epoch 1919, Loss 2.927646\n", "Epoch 1920, Loss 2.927647\n", "Epoch 1921, Loss 2.927649\n", "Epoch 1922, Loss 2.927648\n", "Epoch 1923, Loss 2.927647\n", "Epoch 1924, Loss 2.927648\n", "Epoch 1925, Loss 2.927646\n", "Epoch 1926, Loss 2.927646\n", "Epoch 1927, Loss 2.927648\n", "Epoch 1928, Loss 2.927648\n", "Epoch 1929, Loss 2.927648\n", "Epoch 1930, Loss 2.927647\n", "Epoch 1931, Loss 2.927647\n", "Epoch 1932, Loss 2.927647\n", "Epoch 1933, Loss 2.927648\n", "Epoch 1934, Loss 2.927648\n", "Epoch 1935, Loss 2.927647\n", "Epoch 1936, Loss 2.927647\n", "Epoch 1937, Loss 2.927648\n", "Epoch 1938, Loss 2.927647\n", "Epoch 1939, Loss 2.927647\n", "Epoch 1940, Loss 2.927647\n", "Epoch 1941, Loss 2.927647\n", "Epoch 1942, Loss 2.927647\n", "Epoch 1943, Loss 2.927649\n", "Epoch 1944, Loss 2.927647\n", "Epoch 1945, Loss 2.927648\n", "Epoch 1946, Loss 2.927647\n", "Epoch 1947, Loss 2.927645\n", "Epoch 1948, Loss 2.927648\n", "Epoch 1949, Loss 2.927647\n", "Epoch 1950, Loss 2.927645\n", "Epoch 1951, Loss 2.927647\n", "Epoch 1952, Loss 2.927647\n", "Epoch 1953, Loss 2.927647\n", "Epoch 1954, Loss 2.927647\n", "Epoch 1955, Loss 2.927647\n", "Epoch 1956, Loss 2.927647\n", "Epoch 1957, Loss 2.927646\n", "Epoch 1958, Loss 2.927647\n", "Epoch 1959, Loss 2.927647\n", "Epoch 1960, Loss 2.927647\n", "Epoch 1961, Loss 2.927647\n", "Epoch 1962, Loss 2.927647\n", "Epoch 1963, Loss 2.927646\n", "Epoch 1964, Loss 2.927647\n", "Epoch 1965, Loss 2.927648\n", "Epoch 1966, Loss 2.927646\n", "Epoch 1967, Loss 2.927647\n", "Epoch 1968, Loss 2.927646\n", "Epoch 1969, Loss 2.927647\n", "Epoch 1970, Loss 2.927646\n", "Epoch 1971, Loss 2.927646\n", "Epoch 1972, Loss 2.927645\n", "Epoch 1973, Loss 2.927648\n", "Epoch 1974, Loss 2.927646\n", "Epoch 1975, Loss 2.927647\n", "Epoch 1976, Loss 2.927646\n", "Epoch 1977, Loss 2.927647\n", "Epoch 1978, Loss 2.927646\n", "Epoch 1979, Loss 2.927646\n", "Epoch 1980, Loss 2.927646\n", "Epoch 1981, Loss 2.927647\n", "Epoch 1982, Loss 2.927647\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1983, Loss 2.927647\n", "Epoch 1984, Loss 2.927648\n", "Epoch 1985, Loss 2.927647\n", "Epoch 1986, Loss 2.927647\n", "Epoch 1987, Loss 2.927646\n", "Epoch 1988, Loss 2.927647\n", "Epoch 1989, Loss 2.927646\n", "Epoch 1990, Loss 2.927646\n", "Epoch 1991, Loss 2.927648\n", "Epoch 1992, Loss 2.927646\n", "Epoch 1993, Loss 2.927647\n", "Epoch 1994, Loss 2.927645\n", "Epoch 1995, Loss 2.927647\n", "Epoch 1996, Loss 2.927647\n", "Epoch 1997, Loss 2.927647\n", "Epoch 1998, Loss 2.927645\n", "Epoch 1999, Loss 2.927646\n", "Epoch 2000, Loss 2.927646\n", "Epoch 2001, Loss 2.927647\n", "Epoch 2002, Loss 2.927646\n", "Epoch 2003, Loss 2.927645\n", "Epoch 2004, Loss 2.927647\n", "Epoch 2005, Loss 2.927646\n", "Epoch 2006, Loss 2.927647\n", "Epoch 2007, Loss 2.927646\n", "Epoch 2008, Loss 2.927646\n", "Epoch 2009, Loss 2.927647\n", "Epoch 2010, Loss 2.927644\n", "Epoch 2011, Loss 2.927646\n", "Epoch 2012, Loss 2.927646\n", "Epoch 2013, Loss 2.927647\n", "Epoch 2014, Loss 2.927645\n", "Epoch 2015, Loss 2.927647\n", "Epoch 2016, Loss 2.927645\n", "Epoch 2017, Loss 2.927646\n", "Epoch 2018, Loss 2.927647\n", "Epoch 2019, Loss 2.927645\n", "Epoch 2020, Loss 2.927647\n", "Epoch 2021, Loss 2.927647\n", "Epoch 2022, Loss 2.927646\n", "Epoch 2023, Loss 2.927647\n", "Epoch 2024, Loss 2.927646\n", "Epoch 2025, Loss 2.927645\n", "Epoch 2026, Loss 2.927646\n", "Epoch 2027, Loss 2.927647\n", "Epoch 2028, Loss 2.927646\n", "Epoch 2029, Loss 2.927646\n", "Epoch 2030, Loss 2.927647\n", "Epoch 2031, Loss 2.927644\n", "Epoch 2032, Loss 2.927646\n", "Epoch 2033, Loss 2.927647\n", "Epoch 2034, Loss 2.927646\n", "Epoch 2035, Loss 2.927645\n", "Epoch 2036, Loss 2.927646\n", "Epoch 2037, Loss 2.927646\n", "Epoch 2038, Loss 2.927644\n", "Epoch 2039, Loss 2.927646\n", "Epoch 2040, Loss 2.927647\n", "Epoch 2041, Loss 2.927644\n", "Epoch 2042, Loss 2.927645\n", "Epoch 2043, Loss 2.927645\n", "Epoch 2044, Loss 2.927647\n", "Epoch 2045, Loss 2.927644\n", "Epoch 2046, Loss 2.927644\n", "Epoch 2047, Loss 2.927646\n", "Epoch 2048, Loss 2.927646\n", "Epoch 2049, Loss 2.927644\n", "Epoch 2050, Loss 2.927646\n", "Epoch 2051, Loss 2.927646\n", "Epoch 2052, Loss 2.927647\n", "Epoch 2053, Loss 2.927644\n", "Epoch 2054, Loss 2.927645\n", "Epoch 2055, Loss 2.927646\n", "Epoch 2056, Loss 2.927646\n", "Epoch 2057, Loss 2.927646\n", "Epoch 2058, Loss 2.927646\n", "Epoch 2059, Loss 2.927644\n", "Epoch 2060, Loss 2.927645\n", "Epoch 2061, Loss 2.927645\n", "Epoch 2062, Loss 2.927645\n", "Epoch 2063, Loss 2.927646\n", "Epoch 2064, Loss 2.927647\n", "Epoch 2065, Loss 2.927647\n", "Epoch 2066, Loss 2.927645\n", "Epoch 2067, Loss 2.927647\n", "Epoch 2068, Loss 2.927646\n", "Epoch 2069, Loss 2.927646\n", "Epoch 2070, Loss 2.927645\n", "Epoch 2071, Loss 2.927646\n", "Epoch 2072, Loss 2.927646\n", "Epoch 2073, Loss 2.927646\n", "Epoch 2074, Loss 2.927646\n", "Epoch 2075, Loss 2.927645\n", "Epoch 2076, Loss 2.927645\n", "Epoch 2077, Loss 2.927646\n", "Epoch 2078, Loss 2.927646\n", "Epoch 2079, Loss 2.927645\n", "Epoch 2080, Loss 2.927646\n", "Epoch 2081, Loss 2.927646\n", "Epoch 2082, Loss 2.927645\n", "Epoch 2083, Loss 2.927645\n", "Epoch 2084, Loss 2.927647\n", "Epoch 2085, Loss 2.927645\n", "Epoch 2086, Loss 2.927646\n", "Epoch 2087, Loss 2.927646\n", "Epoch 2088, Loss 2.927646\n", "Epoch 2089, Loss 2.927647\n", "Epoch 2090, Loss 2.927646\n", "Epoch 2091, Loss 2.927645\n", "Epoch 2092, Loss 2.927646\n", "Epoch 2093, Loss 2.927646\n", "Epoch 2094, Loss 2.927646\n", "Epoch 2095, Loss 2.927644\n", "Epoch 2096, Loss 2.927644\n", "Epoch 2097, Loss 2.927646\n", "Epoch 2098, Loss 2.927646\n", "Epoch 2099, Loss 2.927646\n", "Epoch 2100, Loss 2.927647\n", "Epoch 2101, Loss 2.927646\n", "Epoch 2102, Loss 2.927646\n", "Epoch 2103, Loss 2.927645\n", "Epoch 2104, Loss 2.927646\n", "Epoch 2105, Loss 2.927645\n", "Epoch 2106, Loss 2.927647\n", "Epoch 2107, Loss 2.927646\n", "Epoch 2108, Loss 2.927645\n", "Epoch 2109, Loss 2.927645\n", "Epoch 2110, Loss 2.927646\n", "Epoch 2111, Loss 2.927647\n", "Epoch 2112, Loss 2.927646\n", "Epoch 2113, Loss 2.927646\n", "Epoch 2114, Loss 2.927643\n", "Epoch 2115, Loss 2.927645\n", "Epoch 2116, Loss 2.927645\n", "Epoch 2117, Loss 2.927646\n", "Epoch 2118, Loss 2.927648\n", "Epoch 2119, Loss 2.927644\n", "Epoch 2120, Loss 2.927644\n", "Epoch 2121, Loss 2.927646\n", "Epoch 2122, Loss 2.927646\n", "Epoch 2123, Loss 2.927646\n", "Epoch 2124, Loss 2.927646\n", "Epoch 2125, Loss 2.927646\n", "Epoch 2126, Loss 2.927645\n", "Epoch 2127, Loss 2.927644\n", "Epoch 2128, Loss 2.927644\n", "Epoch 2129, Loss 2.927645\n", "Epoch 2130, Loss 2.927645\n", "Epoch 2131, Loss 2.927646\n", "Epoch 2132, Loss 2.927646\n", "Epoch 2133, Loss 2.927646\n", "Epoch 2134, Loss 2.927646\n", "Epoch 2135, Loss 2.927644\n", "Epoch 2136, Loss 2.927646\n", "Epoch 2137, Loss 2.927645\n", "Epoch 2138, Loss 2.927646\n", "Epoch 2139, Loss 2.927646\n", "Epoch 2140, Loss 2.927646\n", "Epoch 2141, Loss 2.927646\n", "Epoch 2142, Loss 2.927645\n", "Epoch 2143, Loss 2.927645\n", "Epoch 2144, Loss 2.927645\n", "Epoch 2145, Loss 2.927644\n", "Epoch 2146, Loss 2.927645\n", "Epoch 2147, Loss 2.927646\n", "Epoch 2148, Loss 2.927646\n", "Epoch 2149, Loss 2.927646\n", "Epoch 2150, Loss 2.927646\n", "Epoch 2151, Loss 2.927644\n", "Epoch 2152, Loss 2.927645\n", "Epoch 2153, Loss 2.927645\n", "Epoch 2154, Loss 2.927646\n", "Epoch 2155, Loss 2.927647\n", "Epoch 2156, Loss 2.927645\n", "Epoch 2157, Loss 2.927647\n", "Epoch 2158, Loss 2.927646\n", "Epoch 2159, Loss 2.927646\n", "Epoch 2160, Loss 2.927646\n", "Epoch 2161, Loss 2.927645\n", "Epoch 2162, Loss 2.927645\n", "Epoch 2163, Loss 2.927646\n", "Epoch 2164, Loss 2.927645\n", "Epoch 2165, Loss 2.927646\n", "Epoch 2166, Loss 2.927646\n", "Epoch 2167, Loss 2.927646\n", "Epoch 2168, Loss 2.927645\n", "Epoch 2169, Loss 2.927646\n", "Epoch 2170, Loss 2.927646\n", "Epoch 2171, Loss 2.927644\n", "Epoch 2172, Loss 2.927646\n", "Epoch 2173, Loss 2.927645\n", "Epoch 2174, Loss 2.927646\n", "Epoch 2175, Loss 2.927646\n", "Epoch 2176, Loss 2.927645\n", "Epoch 2177, Loss 2.927646\n", "Epoch 2178, Loss 2.927646\n", "Epoch 2179, Loss 2.927647\n", "Epoch 2180, Loss 2.927646\n", "Epoch 2181, Loss 2.927646\n", "Epoch 2182, Loss 2.927646\n", "Epoch 2183, Loss 2.927646\n", "Epoch 2184, Loss 2.927645\n", "Epoch 2185, Loss 2.927644\n", "Epoch 2186, Loss 2.927646\n", "Epoch 2187, Loss 2.927645\n", "Epoch 2188, Loss 2.927645\n", "Epoch 2189, Loss 2.927645\n", "Epoch 2190, Loss 2.927646\n", "Epoch 2191, Loss 2.927646\n", "Epoch 2192, Loss 2.927647\n", "Epoch 2193, Loss 2.927646\n", "Epoch 2194, Loss 2.927646\n", "Epoch 2195, Loss 2.927645\n", "Epoch 2196, Loss 2.927645\n", "Epoch 2197, Loss 2.927644\n", "Epoch 2198, Loss 2.927644\n", "Epoch 2199, Loss 2.927645\n", "Epoch 2200, Loss 2.927645\n", "Epoch 2201, Loss 2.927646\n", "Epoch 2202, Loss 2.927645\n", "Epoch 2203, Loss 2.927646\n", "Epoch 2204, Loss 2.927647\n", "Epoch 2205, Loss 2.927647\n", "Epoch 2206, Loss 2.927646\n", "Epoch 2207, Loss 2.927646\n", "Epoch 2208, Loss 2.927647\n", "Epoch 2209, Loss 2.927645\n", "Epoch 2210, Loss 2.927646\n", "Epoch 2211, Loss 2.927644\n", "Epoch 2212, Loss 2.927645\n", "Epoch 2213, Loss 2.927645\n", "Epoch 2214, Loss 2.927646\n", "Epoch 2215, Loss 2.927646\n", "Epoch 2216, Loss 2.927646\n", "Epoch 2217, Loss 2.927645\n", "Epoch 2218, Loss 2.927646\n", "Epoch 2219, Loss 2.927646\n", "Epoch 2220, Loss 2.927646\n", "Epoch 2221, Loss 2.927646\n", "Epoch 2222, Loss 2.927645\n", "Epoch 2223, Loss 2.927645\n", "Epoch 2224, Loss 2.927646\n", "Epoch 2225, Loss 2.927646\n", "Epoch 2226, Loss 2.927645\n", "Epoch 2227, Loss 2.927645\n", "Epoch 2228, Loss 2.927644\n", "Epoch 2229, Loss 2.927646\n", "Epoch 2230, Loss 2.927646\n", "Epoch 2231, Loss 2.927647\n", "Epoch 2232, Loss 2.927644\n", "Epoch 2233, Loss 2.927646\n", "Epoch 2234, Loss 2.927647\n", "Epoch 2235, Loss 2.927646\n", "Epoch 2236, Loss 2.927645\n", "Epoch 2237, Loss 2.927647\n", "Epoch 2238, Loss 2.927646\n", "Epoch 2239, Loss 2.927646\n", "Epoch 2240, Loss 2.927646\n", "Epoch 2241, Loss 2.927647\n", "Epoch 2242, Loss 2.927645\n", "Epoch 2243, Loss 2.927645\n", "Epoch 2244, Loss 2.927646\n", "Epoch 2245, Loss 2.927646\n", "Epoch 2246, Loss 2.927646\n", "Epoch 2247, Loss 2.927646\n", "Epoch 2248, Loss 2.927646\n", "Epoch 2249, Loss 2.927646\n", "Epoch 2250, Loss 2.927646\n", "Epoch 2251, Loss 2.927646\n", "Epoch 2252, Loss 2.927644\n", "Epoch 2253, Loss 2.927645\n", "Epoch 2254, Loss 2.927646\n", "Epoch 2255, Loss 2.927645\n", "Epoch 2256, Loss 2.927644\n", "Epoch 2257, Loss 2.927644\n", "Epoch 2258, Loss 2.927644\n", "Epoch 2259, Loss 2.927644\n", "Epoch 2260, Loss 2.927645\n", "Epoch 2261, Loss 2.927646\n", "Epoch 2262, Loss 2.927646\n", "Epoch 2263, Loss 2.927647\n", "Epoch 2264, Loss 2.927646\n", "Epoch 2265, Loss 2.927647\n", "Epoch 2266, Loss 2.927646\n", "Epoch 2267, Loss 2.927646\n", "Epoch 2268, Loss 2.927646\n", "Epoch 2269, Loss 2.927646\n", "Epoch 2270, Loss 2.927647\n", "Epoch 2271, Loss 2.927644\n", "Epoch 2272, Loss 2.927646\n", "Epoch 2273, Loss 2.927646\n", "Epoch 2274, Loss 2.927646\n", "Epoch 2275, Loss 2.927646\n", "Epoch 2276, Loss 2.927646\n", "Epoch 2277, Loss 2.927644\n", "Epoch 2278, Loss 2.927644\n", "Epoch 2279, Loss 2.927644\n", "Epoch 2280, Loss 2.927644\n", "Epoch 2281, Loss 2.927644\n", "Epoch 2282, Loss 2.927644\n", "Epoch 2283, Loss 2.927644\n", "Epoch 2284, Loss 2.927644\n", "Epoch 2285, Loss 2.927645\n", "Epoch 2286, Loss 2.927644\n", "Epoch 2287, Loss 2.927646\n", "Epoch 2288, Loss 2.927646\n", "Epoch 2289, Loss 2.927646\n", "Epoch 2290, Loss 2.927645\n", "Epoch 2291, Loss 2.927646\n", "Epoch 2292, Loss 2.927646\n", "Epoch 2293, Loss 2.927646\n", "Epoch 2294, Loss 2.927645\n", "Epoch 2295, Loss 2.927646\n", "Epoch 2296, Loss 2.927646\n", "Epoch 2297, Loss 2.927647\n", "Epoch 2298, Loss 2.927647\n", "Epoch 2299, Loss 2.927647\n", "Epoch 2300, Loss 2.927647\n", "Epoch 2301, Loss 2.927647\n", "Epoch 2302, Loss 2.927647\n", "Epoch 2303, Loss 2.927646\n", "Epoch 2304, Loss 2.927645\n", "Epoch 2305, Loss 2.927646\n", "Epoch 2306, Loss 2.927646\n", "Epoch 2307, Loss 2.927645\n", "Epoch 2308, Loss 2.927645\n", "Epoch 2309, Loss 2.927646\n", "Epoch 2310, Loss 2.927644\n", "Epoch 2311, Loss 2.927644\n", "Epoch 2312, Loss 2.927645\n", "Epoch 2313, Loss 2.927645\n", "Epoch 2314, Loss 2.927645\n", "Epoch 2315, Loss 2.927645\n", "Epoch 2316, Loss 2.927645\n", "Epoch 2317, Loss 2.927645\n", "Epoch 2318, Loss 2.927645\n", "Epoch 2319, Loss 2.927646\n", "Epoch 2320, Loss 2.927646\n", "Epoch 2321, Loss 2.927646\n", "Epoch 2322, Loss 2.927647\n", "Epoch 2323, Loss 2.927646\n", "Epoch 2324, Loss 2.927646\n", "Epoch 2325, Loss 2.927646\n", "Epoch 2326, Loss 2.927646\n", "Epoch 2327, Loss 2.927646\n", "Epoch 2328, Loss 2.927646\n", "Epoch 2329, Loss 2.927646\n", "Epoch 2330, Loss 2.927646\n", "Epoch 2331, Loss 2.927646\n", "Epoch 2332, Loss 2.927646\n", "Epoch 2333, Loss 2.927647\n", "Epoch 2334, Loss 2.927646\n", "Epoch 2335, Loss 2.927646\n", "Epoch 2336, Loss 2.927645\n", "Epoch 2337, Loss 2.927645\n", "Epoch 2338, Loss 2.927644\n", "Epoch 2339, Loss 2.927644\n", "Epoch 2340, Loss 2.927644\n", "Epoch 2341, Loss 2.927644\n", "Epoch 2342, Loss 2.927646\n", "Epoch 2343, Loss 2.927646\n", "Epoch 2344, Loss 2.927646\n", "Epoch 2345, Loss 2.927645\n", "Epoch 2346, Loss 2.927645\n", "Epoch 2347, Loss 2.927646\n", "Epoch 2348, Loss 2.927645\n", "Epoch 2349, Loss 2.927645\n", "Epoch 2350, Loss 2.927645\n", "Epoch 2351, Loss 2.927645\n", "Epoch 2352, Loss 2.927646\n", "Epoch 2353, Loss 2.927646\n", "Epoch 2354, Loss 2.927646\n", "Epoch 2355, Loss 2.927646\n", "Epoch 2356, Loss 2.927645\n", "Epoch 2357, Loss 2.927645\n", "Epoch 2358, Loss 2.927645\n", "Epoch 2359, Loss 2.927645\n", "Epoch 2360, Loss 2.927645\n", "Epoch 2361, Loss 2.927646\n", "Epoch 2362, Loss 2.927646\n", "Epoch 2363, Loss 2.927646\n", "Epoch 2364, Loss 2.927645\n", "Epoch 2365, Loss 2.927645\n", "Epoch 2366, Loss 2.927645\n", "Epoch 2367, Loss 2.927645\n", "Epoch 2368, Loss 2.927645\n", "Epoch 2369, Loss 2.927646\n", "Epoch 2370, Loss 2.927646\n", "Epoch 2371, Loss 2.927646\n", "Epoch 2372, Loss 2.927646\n", "Epoch 2373, Loss 2.927645\n", "Epoch 2374, Loss 2.927645\n", "Epoch 2375, Loss 2.927645\n", "Epoch 2376, Loss 2.927645\n", "Epoch 2377, Loss 2.927645\n", "Epoch 2378, Loss 2.927646\n", "Epoch 2379, Loss 2.927646\n", "Epoch 2380, Loss 2.927646\n", "Epoch 2381, Loss 2.927645\n", "Epoch 2382, Loss 2.927645\n", "Epoch 2383, Loss 2.927645\n", "Epoch 2384, Loss 2.927645\n", "Epoch 2385, Loss 2.927645\n", "Epoch 2386, Loss 2.927646\n", "Epoch 2387, Loss 2.927646\n", "Epoch 2388, Loss 2.927646\n", "Epoch 2389, Loss 2.927646\n", "Epoch 2390, Loss 2.927645\n", "Epoch 2391, Loss 2.927645\n", "Epoch 2392, Loss 2.927645\n", "Epoch 2393, Loss 2.927645\n", "Epoch 2394, Loss 2.927645\n", "Epoch 2395, Loss 2.927646\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2396, Loss 2.927646\n", "Epoch 2397, Loss 2.927646\n", "Epoch 2398, Loss 2.927645\n", "Epoch 2399, Loss 2.927645\n", "Epoch 2400, Loss 2.927645\n", "Epoch 2401, Loss 2.927645\n", "Epoch 2402, Loss 2.927646\n", "Epoch 2403, Loss 2.927646\n", "Epoch 2404, Loss 2.927646\n", "Epoch 2405, Loss 2.927645\n", "Epoch 2406, Loss 2.927645\n", "Epoch 2407, Loss 2.927645\n", "Epoch 2408, Loss 2.927645\n", "Epoch 2409, Loss 2.927645\n", "Epoch 2410, Loss 2.927646\n", "Epoch 2411, Loss 2.927646\n", "Epoch 2412, Loss 2.927646\n", "Epoch 2413, Loss 2.927646\n", "Epoch 2414, Loss 2.927645\n", "Epoch 2415, Loss 2.927645\n", "Epoch 2416, Loss 2.927645\n", "Epoch 2417, Loss 2.927645\n", "Epoch 2418, Loss 2.927645\n", "Epoch 2419, Loss 2.927646\n", "Epoch 2420, Loss 2.927646\n", "Epoch 2421, Loss 2.927646\n", "Epoch 2422, Loss 2.927645\n", "Epoch 2423, Loss 2.927645\n", "Epoch 2424, Loss 2.927645\n", "Epoch 2425, Loss 2.927645\n", "Epoch 2426, Loss 2.927646\n", "Epoch 2427, Loss 2.927646\n", "Epoch 2428, Loss 2.927646\n", "Epoch 2429, Loss 2.927645\n", "Epoch 2430, Loss 2.927645\n", "Epoch 2431, Loss 2.927645\n", "Epoch 2432, Loss 2.927645\n", "Epoch 2433, Loss 2.927645\n", "Epoch 2434, Loss 2.927646\n", "Epoch 2435, Loss 2.927646\n", "Epoch 2436, Loss 2.927646\n", "Epoch 2437, Loss 2.927646\n", "Epoch 2438, Loss 2.927645\n", "Epoch 2439, Loss 2.927645\n", "Epoch 2440, Loss 2.927645\n", "Epoch 2441, Loss 2.927645\n", "Epoch 2442, Loss 2.927645\n", "Epoch 2443, Loss 2.927646\n", "Epoch 2444, Loss 2.927646\n", "Epoch 2445, Loss 2.927646\n", "Epoch 2446, Loss 2.927645\n", "Epoch 2447, Loss 2.927645\n", "Epoch 2448, Loss 2.927645\n", "Epoch 2449, Loss 2.927645\n", "Epoch 2450, Loss 2.927646\n", "Epoch 2451, Loss 2.927646\n", "Epoch 2452, Loss 2.927646\n", "Epoch 2453, Loss 2.927645\n", "Epoch 2454, Loss 2.927645\n", "Epoch 2455, Loss 2.927645\n", "Epoch 2456, Loss 2.927645\n", "Epoch 2457, Loss 2.927645\n", "Epoch 2458, Loss 2.927646\n", "Epoch 2459, Loss 2.927646\n", "Epoch 2460, Loss 2.927646\n", "Epoch 2461, Loss 2.927646\n", "Epoch 2462, Loss 2.927645\n", "Epoch 2463, Loss 2.927645\n", "Epoch 2464, Loss 2.927645\n", "Epoch 2465, Loss 2.927645\n", "Epoch 2466, Loss 2.927645\n", "Epoch 2467, Loss 2.927646\n", "Epoch 2468, Loss 2.927646\n", "Epoch 2469, Loss 2.927646\n", "Epoch 2470, Loss 2.927645\n", "Epoch 2471, Loss 2.927645\n", "Epoch 2472, Loss 2.927645\n", "Epoch 2473, Loss 2.927645\n", "Epoch 2474, Loss 2.927646\n", "Epoch 2475, Loss 2.927646\n", "Epoch 2476, Loss 2.927646\n", "Epoch 2477, Loss 2.927645\n", "Epoch 2478, Loss 2.927645\n", "Epoch 2479, Loss 2.927645\n", "Epoch 2480, Loss 2.927645\n", "Epoch 2481, Loss 2.927645\n", "Epoch 2482, Loss 2.927646\n", "Epoch 2483, Loss 2.927646\n", "Epoch 2484, Loss 2.927646\n", "Epoch 2485, Loss 2.927646\n", "Epoch 2486, Loss 2.927645\n", "Epoch 2487, Loss 2.927645\n", "Epoch 2488, Loss 2.927645\n", "Epoch 2489, Loss 2.927645\n", "Epoch 2490, Loss 2.927645\n", "Epoch 2491, Loss 2.927646\n", "Epoch 2492, Loss 2.927646\n", "Epoch 2493, Loss 2.927646\n", "Epoch 2494, Loss 2.927645\n", "Epoch 2495, Loss 2.927645\n", "Epoch 2496, Loss 2.927645\n", "Epoch 2497, Loss 2.927645\n", "Epoch 2498, Loss 2.927646\n", "Epoch 2499, Loss 2.927646\n", "Epoch 2500, Loss 2.927646\n", "Epoch 2501, Loss 2.927645\n", "Epoch 2502, Loss 2.927645\n", "Epoch 2503, Loss 2.927645\n", "Epoch 2504, Loss 2.927645\n", "Epoch 2505, Loss 2.927645\n", "Epoch 2506, Loss 2.927646\n", "Epoch 2507, Loss 2.927646\n", "Epoch 2508, Loss 2.927646\n", "Epoch 2509, Loss 2.927646\n", "Epoch 2510, Loss 2.927645\n", "Epoch 2511, Loss 2.927645\n", "Epoch 2512, Loss 2.927645\n", "Epoch 2513, Loss 2.927645\n", "Epoch 2514, Loss 2.927645\n", "Epoch 2515, Loss 2.927646\n", "Epoch 2516, Loss 2.927646\n", "Epoch 2517, Loss 2.927646\n", "Epoch 2518, Loss 2.927645\n", "Epoch 2519, Loss 2.927645\n", "Epoch 2520, Loss 2.927645\n", "Epoch 2521, Loss 2.927645\n", "Epoch 2522, Loss 2.927646\n", "Epoch 2523, Loss 2.927646\n", "Epoch 2524, Loss 2.927646\n", "Epoch 2525, Loss 2.927645\n", "Epoch 2526, Loss 2.927645\n", "Epoch 2527, Loss 2.927645\n", "Epoch 2528, Loss 2.927645\n", "Epoch 2529, Loss 2.927646\n", "Epoch 2530, Loss 2.927646\n", "Epoch 2531, Loss 2.927646\n", "Epoch 2532, Loss 2.927645\n", "Epoch 2533, Loss 2.927645\n", "Epoch 2534, Loss 2.927645\n", "Epoch 2535, Loss 2.927645\n", "Epoch 2536, Loss 2.927645\n", "Epoch 2537, Loss 2.927646\n", "Epoch 2538, Loss 2.927646\n", "Epoch 2539, Loss 2.927646\n", "Epoch 2540, Loss 2.927645\n", "Epoch 2541, Loss 2.927645\n", "Epoch 2542, Loss 2.927645\n", "Epoch 2543, Loss 2.927645\n", "Epoch 2544, Loss 2.927646\n", "Epoch 2545, Loss 2.927646\n", "Epoch 2546, Loss 2.927646\n", "Epoch 2547, Loss 2.927645\n", "Epoch 2548, Loss 2.927645\n", "Epoch 2549, Loss 2.927645\n", "Epoch 2550, Loss 2.927645\n", "Epoch 2551, Loss 2.927646\n", "Epoch 2552, Loss 2.927646\n", "Epoch 2553, Loss 2.927646\n", "Epoch 2554, Loss 2.927645\n", "Epoch 2555, Loss 2.927645\n", "Epoch 2556, Loss 2.927645\n", "Epoch 2557, Loss 2.927645\n", "Epoch 2558, Loss 2.927646\n", "Epoch 2559, Loss 2.927646\n", "Epoch 2560, Loss 2.927646\n", "Epoch 2561, Loss 2.927645\n", "Epoch 2562, Loss 2.927645\n", "Epoch 2563, Loss 2.927645\n", "Epoch 2564, Loss 2.927645\n", "Epoch 2565, Loss 2.927645\n", "Epoch 2566, Loss 2.927646\n", "Epoch 2567, Loss 2.927646\n", "Epoch 2568, Loss 2.927646\n", "Epoch 2569, Loss 2.927645\n", "Epoch 2570, Loss 2.927645\n", "Epoch 2571, Loss 2.927645\n", "Epoch 2572, Loss 2.927645\n", "Epoch 2573, Loss 2.927646\n", "Epoch 2574, Loss 2.927646\n", "Epoch 2575, Loss 2.927646\n", "Epoch 2576, Loss 2.927645\n", "Epoch 2577, Loss 2.927645\n", "Epoch 2578, Loss 2.927645\n", "Epoch 2579, Loss 2.927645\n", "Epoch 2580, Loss 2.927646\n", "Epoch 2581, Loss 2.927646\n", "Epoch 2582, Loss 2.927646\n", "Epoch 2583, Loss 2.927645\n", "Epoch 2584, Loss 2.927645\n", "Epoch 2585, Loss 2.927645\n", "Epoch 2586, Loss 2.927645\n", "Epoch 2587, Loss 2.927646\n", "Epoch 2588, Loss 2.927646\n", "Epoch 2589, Loss 2.927646\n", "Epoch 2590, Loss 2.927645\n", "Epoch 2591, Loss 2.927645\n", "Epoch 2592, Loss 2.927645\n", "Epoch 2593, Loss 2.927645\n", "Epoch 2594, Loss 2.927645\n", "Epoch 2595, Loss 2.927646\n", "Epoch 2596, Loss 2.927646\n", "Epoch 2597, Loss 2.927646\n", "Epoch 2598, Loss 2.927645\n", "Epoch 2599, Loss 2.927645\n", "Epoch 2600, Loss 2.927645\n", "Epoch 2601, Loss 2.927645\n", "Epoch 2602, Loss 2.927646\n", "Epoch 2603, Loss 2.927646\n", "Epoch 2604, Loss 2.927646\n", "Epoch 2605, Loss 2.927645\n", "Epoch 2606, Loss 2.927645\n", "Epoch 2607, Loss 2.927645\n", "Epoch 2608, Loss 2.927645\n", "Epoch 2609, Loss 2.927646\n", "Epoch 2610, Loss 2.927646\n", "Epoch 2611, Loss 2.927646\n", "Epoch 2612, Loss 2.927645\n", "Epoch 2613, Loss 2.927645\n", "Epoch 2614, Loss 2.927645\n", "Epoch 2615, Loss 2.927645\n", "Epoch 2616, Loss 2.927646\n", "Epoch 2617, Loss 2.927646\n", "Epoch 2618, Loss 2.927646\n", "Epoch 2619, Loss 2.927645\n", "Epoch 2620, Loss 2.927645\n", "Epoch 2621, Loss 2.927645\n", "Epoch 2622, Loss 2.927645\n", "Epoch 2623, Loss 2.927646\n", "Epoch 2624, Loss 2.927646\n", "Epoch 2625, Loss 2.927646\n", "Epoch 2626, Loss 2.927645\n", "Epoch 2627, Loss 2.927645\n", "Epoch 2628, Loss 2.927646\n", "Epoch 2629, Loss 2.927646\n", "Epoch 2630, Loss 2.927646\n", "Epoch 2631, Loss 2.927646\n", "Epoch 2632, Loss 2.927645\n", "Epoch 2633, Loss 2.927645\n", "Epoch 2634, Loss 2.927646\n", "Epoch 2635, Loss 2.927646\n", "Epoch 2636, Loss 2.927646\n", "Epoch 2637, Loss 2.927646\n", "Epoch 2638, Loss 2.927646\n", "Epoch 2639, Loss 2.927646\n", "Epoch 2640, Loss 2.927646\n", "Epoch 2641, Loss 2.927646\n", "Epoch 2642, Loss 2.927646\n", "Epoch 2643, Loss 2.927646\n", "Epoch 2644, Loss 2.927646\n", "Epoch 2645, Loss 2.927646\n", "Epoch 2646, Loss 2.927646\n", "Epoch 2647, Loss 2.927646\n", "Epoch 2648, Loss 2.927646\n", "Epoch 2649, Loss 2.927646\n", "Epoch 2650, Loss 2.927646\n", "Epoch 2651, Loss 2.927646\n", "Epoch 2652, Loss 2.927646\n", "Epoch 2653, Loss 2.927646\n", "Epoch 2654, Loss 2.927646\n", "Epoch 2655, Loss 2.927646\n", "Epoch 2656, Loss 2.927646\n", "Epoch 2657, Loss 2.927646\n", "Epoch 2658, Loss 2.927646\n", "Epoch 2659, Loss 2.927646\n", "Epoch 2660, Loss 2.927646\n", "Epoch 2661, Loss 2.927646\n", "Epoch 2662, Loss 2.927646\n", "Epoch 2663, Loss 2.927646\n", "Epoch 2664, Loss 2.927646\n", "Epoch 2665, Loss 2.927646\n", "Epoch 2666, Loss 2.927646\n", "Epoch 2667, Loss 2.927646\n", "Epoch 2668, Loss 2.927646\n", "Epoch 2669, Loss 2.927646\n", "Epoch 2670, Loss 2.927646\n", "Epoch 2671, Loss 2.927646\n", "Epoch 2672, Loss 2.927646\n", "Epoch 2673, Loss 2.927646\n", "Epoch 2674, Loss 2.927646\n", "Epoch 2675, Loss 2.927646\n", "Epoch 2676, Loss 2.927646\n", "Epoch 2677, Loss 2.927646\n", "Epoch 2678, Loss 2.927646\n", "Epoch 2679, Loss 2.927646\n", "Epoch 2680, Loss 2.927646\n", "Epoch 2681, Loss 2.927646\n", "Epoch 2682, Loss 2.927646\n", "Epoch 2683, Loss 2.927646\n", "Epoch 2684, Loss 2.927646\n", "Epoch 2685, Loss 2.927646\n", "Epoch 2686, Loss 2.927646\n", "Epoch 2687, Loss 2.927646\n", "Epoch 2688, Loss 2.927646\n", "Epoch 2689, Loss 2.927646\n", "Epoch 2690, Loss 2.927646\n", "Epoch 2691, Loss 2.927646\n", "Epoch 2692, Loss 2.927646\n", "Epoch 2693, Loss 2.927646\n", "Epoch 2694, Loss 2.927646\n", "Epoch 2695, Loss 2.927646\n", "Epoch 2696, Loss 2.927646\n", "Epoch 2697, Loss 2.927646\n", "Epoch 2698, Loss 2.927646\n", "Epoch 2699, Loss 2.927646\n", "Epoch 2700, Loss 2.927646\n", "Epoch 2701, Loss 2.927646\n", "Epoch 2702, Loss 2.927646\n", "Epoch 2703, Loss 2.927646\n", "Epoch 2704, Loss 2.927646\n", "Epoch 2705, Loss 2.927646\n", "Epoch 2706, Loss 2.927646\n", "Epoch 2707, Loss 2.927646\n", "Epoch 2708, Loss 2.927646\n", "Epoch 2709, Loss 2.927646\n", "Epoch 2710, Loss 2.927646\n", "Epoch 2711, Loss 2.927646\n", "Epoch 2712, Loss 2.927646\n", "Epoch 2713, Loss 2.927646\n", "Epoch 2714, Loss 2.927646\n", "Epoch 2715, Loss 2.927646\n", "Epoch 2716, Loss 2.927646\n", "Epoch 2717, Loss 2.927646\n", "Epoch 2718, Loss 2.927646\n", "Epoch 2719, Loss 2.927646\n", "Epoch 2720, Loss 2.927646\n", "Epoch 2721, Loss 2.927646\n", "Epoch 2722, Loss 2.927646\n", "Epoch 2723, Loss 2.927646\n", "Epoch 2724, Loss 2.927646\n", "Epoch 2725, Loss 2.927646\n", "Epoch 2726, Loss 2.927646\n", "Epoch 2727, Loss 2.927646\n", "Epoch 2728, Loss 2.927646\n", "Epoch 2729, Loss 2.927646\n", "Epoch 2730, Loss 2.927646\n", "Epoch 2731, Loss 2.927646\n", "Epoch 2732, Loss 2.927646\n", "Epoch 2733, Loss 2.927646\n", "Epoch 2734, Loss 2.927646\n", "Epoch 2735, Loss 2.927646\n", "Epoch 2736, Loss 2.927646\n", "Epoch 2737, Loss 2.927646\n", "Epoch 2738, Loss 2.927646\n", "Epoch 2739, Loss 2.927646\n", "Epoch 2740, Loss 2.927646\n", "Epoch 2741, Loss 2.927646\n", "Epoch 2742, Loss 2.927646\n", "Epoch 2743, Loss 2.927646\n", "Epoch 2744, Loss 2.927646\n", "Epoch 2745, Loss 2.927646\n", "Epoch 2746, Loss 2.927646\n", "Epoch 2747, Loss 2.927646\n", "Epoch 2748, Loss 2.927646\n", "Epoch 2749, Loss 2.927646\n", "Epoch 2750, Loss 2.927646\n", "Epoch 2751, Loss 2.927646\n", "Epoch 2752, Loss 2.927646\n", "Epoch 2753, Loss 2.927646\n", "Epoch 2754, Loss 2.927646\n", "Epoch 2755, Loss 2.927646\n", "Epoch 2756, Loss 2.927646\n", "Epoch 2757, Loss 2.927646\n", "Epoch 2758, Loss 2.927646\n", "Epoch 2759, Loss 2.927646\n", "Epoch 2760, Loss 2.927646\n", "Epoch 2761, Loss 2.927646\n", "Epoch 2762, Loss 2.927646\n", "Epoch 2763, Loss 2.927646\n", "Epoch 2764, Loss 2.927646\n", "Epoch 2765, Loss 2.927646\n", "Epoch 2766, Loss 2.927645\n", "Epoch 2767, Loss 2.927645\n", "Epoch 2768, Loss 2.927646\n", "Epoch 2769, Loss 2.927645\n", "Epoch 2770, Loss 2.927645\n", "Epoch 2771, Loss 2.927645\n", "Epoch 2772, Loss 2.927645\n", "Epoch 2773, Loss 2.927645\n", "Epoch 2774, Loss 2.927645\n", "Epoch 2775, Loss 2.927645\n", "Epoch 2776, Loss 2.927646\n", "Epoch 2777, Loss 2.927646\n", "Epoch 2778, Loss 2.927645\n", "Epoch 2779, Loss 2.927645\n", "Epoch 2780, Loss 2.927645\n", "Epoch 2781, Loss 2.927645\n", "Epoch 2782, Loss 2.927646\n", "Epoch 2783, Loss 2.927646\n", "Epoch 2784, Loss 2.927646\n", "Epoch 2785, Loss 2.927645\n", "Epoch 2786, Loss 2.927645\n", "Epoch 2787, Loss 2.927645\n", "Epoch 2788, Loss 2.927645\n", "Epoch 2789, Loss 2.927645\n", "Epoch 2790, Loss 2.927645\n", "Epoch 2791, Loss 2.927646\n", "Epoch 2792, Loss 2.927646\n", "Epoch 2793, Loss 2.927645\n", "Epoch 2794, Loss 2.927645\n", "Epoch 2795, Loss 2.927645\n", "Epoch 2796, Loss 2.927645\n", "Epoch 2797, Loss 2.927645\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2798, Loss 2.927646\n", "Epoch 2799, Loss 2.927646\n", "Epoch 2800, Loss 2.927645\n", "Epoch 2801, Loss 2.927645\n", "Epoch 2802, Loss 2.927645\n", "Epoch 2803, Loss 2.927645\n", "Epoch 2804, Loss 2.927645\n", "Epoch 2805, Loss 2.927646\n", "Epoch 2806, Loss 2.927646\n", "Epoch 2807, Loss 2.927645\n", "Epoch 2808, Loss 2.927645\n", "Epoch 2809, Loss 2.927645\n", "Epoch 2810, Loss 2.927645\n", "Epoch 2811, Loss 2.927645\n", "Epoch 2812, Loss 2.927646\n", "Epoch 2813, Loss 2.927646\n", "Epoch 2814, Loss 2.927645\n", "Epoch 2815, Loss 2.927645\n", "Epoch 2816, Loss 2.927645\n", "Epoch 2817, Loss 2.927645\n", "Epoch 2818, Loss 2.927645\n", "Epoch 2819, Loss 2.927646\n", "Epoch 2820, Loss 2.927646\n", "Epoch 2821, Loss 2.927645\n", "Epoch 2822, Loss 2.927645\n", "Epoch 2823, Loss 2.927645\n", "Epoch 2824, Loss 2.927645\n", "Epoch 2825, Loss 2.927645\n", "Epoch 2826, Loss 2.927646\n", "Epoch 2827, Loss 2.927646\n", "Epoch 2828, Loss 2.927645\n", "Epoch 2829, Loss 2.927645\n", "Epoch 2830, Loss 2.927645\n", "Epoch 2831, Loss 2.927645\n", "Epoch 2832, Loss 2.927645\n", "Epoch 2833, Loss 2.927646\n", "Epoch 2834, Loss 2.927646\n", "Epoch 2835, Loss 2.927645\n", "Epoch 2836, Loss 2.927645\n", "Epoch 2837, Loss 2.927645\n", "Epoch 2838, Loss 2.927645\n", "Epoch 2839, Loss 2.927645\n", "Epoch 2840, Loss 2.927646\n", "Epoch 2841, Loss 2.927646\n", "Epoch 2842, Loss 2.927645\n", "Epoch 2843, Loss 2.927645\n", "Epoch 2844, Loss 2.927645\n", "Epoch 2845, Loss 2.927645\n", "Epoch 2846, Loss 2.927645\n", "Epoch 2847, Loss 2.927646\n", "Epoch 2848, Loss 2.927646\n", "Epoch 2849, Loss 2.927645\n", "Epoch 2850, Loss 2.927645\n", "Epoch 2851, Loss 2.927645\n", "Epoch 2852, Loss 2.927645\n", "Epoch 2853, Loss 2.927645\n", "Epoch 2854, Loss 2.927646\n", "Epoch 2855, Loss 2.927646\n", "Epoch 2856, Loss 2.927645\n", "Epoch 2857, Loss 2.927645\n", "Epoch 2858, Loss 2.927645\n", "Epoch 2859, Loss 2.927645\n", "Epoch 2860, Loss 2.927645\n", "Epoch 2861, Loss 2.927646\n", "Epoch 2862, Loss 2.927646\n", "Epoch 2863, Loss 2.927645\n", "Epoch 2864, Loss 2.927645\n", "Epoch 2865, Loss 2.927645\n", "Epoch 2866, Loss 2.927645\n", "Epoch 2867, Loss 2.927645\n", "Epoch 2868, Loss 2.927646\n", "Epoch 2869, Loss 2.927646\n", "Epoch 2870, Loss 2.927645\n", "Epoch 2871, Loss 2.927645\n", "Epoch 2872, Loss 2.927645\n", "Epoch 2873, Loss 2.927645\n", "Epoch 2874, Loss 2.927645\n", "Epoch 2875, Loss 2.927646\n", "Epoch 2876, Loss 2.927646\n", "Epoch 2877, Loss 2.927645\n", "Epoch 2878, Loss 2.927645\n", "Epoch 2879, Loss 2.927645\n", "Epoch 2880, Loss 2.927645\n", "Epoch 2881, Loss 2.927645\n", "Epoch 2882, Loss 2.927646\n", "Epoch 2883, Loss 2.927646\n", "Epoch 2884, Loss 2.927645\n", "Epoch 2885, Loss 2.927645\n", "Epoch 2886, Loss 2.927645\n", "Epoch 2887, Loss 2.927645\n", "Epoch 2888, Loss 2.927645\n", "Epoch 2889, Loss 2.927646\n", "Epoch 2890, Loss 2.927646\n", "Epoch 2891, Loss 2.927645\n", "Epoch 2892, Loss 2.927645\n", "Epoch 2893, Loss 2.927645\n", "Epoch 2894, Loss 2.927645\n", "Epoch 2895, Loss 2.927645\n", "Epoch 2896, Loss 2.927646\n", "Epoch 2897, Loss 2.927646\n", "Epoch 2898, Loss 2.927645\n", "Epoch 2899, Loss 2.927645\n", "Epoch 2900, Loss 2.927645\n", "Epoch 2901, Loss 2.927645\n", "Epoch 2902, Loss 2.927645\n", "Epoch 2903, Loss 2.927646\n", "Epoch 2904, Loss 2.927646\n", "Epoch 2905, Loss 2.927645\n", "Epoch 2906, Loss 2.927645\n", "Epoch 2907, Loss 2.927645\n", "Epoch 2908, Loss 2.927645\n", "Epoch 2909, Loss 2.927645\n", "Epoch 2910, Loss 2.927646\n", "Epoch 2911, Loss 2.927646\n", "Epoch 2912, Loss 2.927645\n", "Epoch 2913, Loss 2.927645\n", "Epoch 2914, Loss 2.927645\n", "Epoch 2915, Loss 2.927645\n", "Epoch 2916, Loss 2.927645\n", "Epoch 2917, Loss 2.927646\n", "Epoch 2918, Loss 2.927646\n", "Epoch 2919, Loss 2.927645\n", "Epoch 2920, Loss 2.927645\n", "Epoch 2921, Loss 2.927645\n", "Epoch 2922, Loss 2.927645\n", "Epoch 2923, Loss 2.927645\n", "Epoch 2924, Loss 2.927646\n", "Epoch 2925, Loss 2.927646\n", "Epoch 2926, Loss 2.927645\n", "Epoch 2927, Loss 2.927645\n", "Epoch 2928, Loss 2.927645\n", "Epoch 2929, Loss 2.927645\n", "Epoch 2930, Loss 2.927645\n", "Epoch 2931, Loss 2.927646\n", "Epoch 2932, Loss 2.927646\n", "Epoch 2933, Loss 2.927645\n", "Epoch 2934, Loss 2.927645\n", "Epoch 2935, Loss 2.927645\n", "Epoch 2936, Loss 2.927645\n", "Epoch 2937, Loss 2.927645\n", "Epoch 2938, Loss 2.927646\n", "Epoch 2939, Loss 2.927646\n", "Epoch 2940, Loss 2.927645\n", "Epoch 2941, Loss 2.927645\n", "Epoch 2942, Loss 2.927645\n", "Epoch 2943, Loss 2.927645\n", "Epoch 2944, Loss 2.927645\n", "Epoch 2945, Loss 2.927646\n", "Epoch 2946, Loss 2.927646\n", "Epoch 2947, Loss 2.927645\n", "Epoch 2948, Loss 2.927645\n", "Epoch 2949, Loss 2.927645\n", "Epoch 2950, Loss 2.927645\n", "Epoch 2951, Loss 2.927645\n", "Epoch 2952, Loss 2.927646\n", "Epoch 2953, Loss 2.927646\n", "Epoch 2954, Loss 2.927645\n", "Epoch 2955, Loss 2.927645\n", "Epoch 2956, Loss 2.927645\n", "Epoch 2957, Loss 2.927645\n", "Epoch 2958, Loss 2.927645\n", "Epoch 2959, Loss 2.927646\n", "Epoch 2960, Loss 2.927646\n", "Epoch 2961, Loss 2.927645\n", "Epoch 2962, Loss 2.927645\n", "Epoch 2963, Loss 2.927645\n", "Epoch 2964, Loss 2.927645\n", "Epoch 2965, Loss 2.927645\n", "Epoch 2966, Loss 2.927646\n", "Epoch 2967, Loss 2.927646\n", "Epoch 2968, Loss 2.927645\n", "Epoch 2969, Loss 2.927645\n", "Epoch 2970, Loss 2.927645\n", "Epoch 2971, Loss 2.927645\n", "Epoch 2972, Loss 2.927645\n", "Epoch 2973, Loss 2.927646\n", "Epoch 2974, Loss 2.927646\n", "Epoch 2975, Loss 2.927645\n", "Epoch 2976, Loss 2.927645\n", "Epoch 2977, Loss 2.927645\n", "Epoch 2978, Loss 2.927645\n", "Epoch 2979, Loss 2.927645\n", "Epoch 2980, Loss 2.927646\n", "Epoch 2981, Loss 2.927646\n", "Epoch 2982, Loss 2.927645\n", "Epoch 2983, Loss 2.927645\n", "Epoch 2984, Loss 2.927645\n", "Epoch 2985, Loss 2.927645\n", "Epoch 2986, Loss 2.927645\n", "Epoch 2987, Loss 2.927646\n", "Epoch 2988, Loss 2.927646\n", "Epoch 2989, Loss 2.927645\n", "Epoch 2990, Loss 2.927645\n", "Epoch 2991, Loss 2.927645\n", "Epoch 2992, Loss 2.927645\n", "Epoch 2993, Loss 2.927645\n", "Epoch 2994, Loss 2.927646\n", "Epoch 2995, Loss 2.927646\n", "Epoch 2996, Loss 2.927645\n", "Epoch 2997, Loss 2.927645\n", "Epoch 2998, Loss 2.927645\n", "Epoch 2999, Loss 2.927645\n", "Epoch 3000, Loss 2.927645\n", "Epoch 3001, Loss 2.927646\n", "Epoch 3002, Loss 2.927646\n", "Epoch 3003, Loss 2.927645\n", "Epoch 3004, Loss 2.927645\n", "Epoch 3005, Loss 2.927645\n", "Epoch 3006, Loss 2.927645\n", "Epoch 3007, Loss 2.927645\n", "Epoch 3008, Loss 2.927646\n", "Epoch 3009, Loss 2.927646\n", "Epoch 3010, Loss 2.927645\n", "Epoch 3011, Loss 2.927645\n", "Epoch 3012, Loss 2.927646\n", "Epoch 3013, Loss 2.927645\n", "Epoch 3014, Loss 2.927645\n", "Epoch 3015, Loss 2.927646\n", "Epoch 3016, Loss 2.927644\n", "Epoch 3017, Loss 2.927646\n", "Epoch 3018, Loss 2.927646\n", "Epoch 3019, Loss 2.927646\n", "Epoch 3020, Loss 2.927646\n", "Epoch 3021, Loss 2.927646\n", "Epoch 3022, Loss 2.927645\n", "Epoch 3023, Loss 2.927646\n", "Epoch 3024, Loss 2.927646\n", "Epoch 3025, Loss 2.927646\n", "Epoch 3026, Loss 2.927646\n", "Epoch 3027, Loss 2.927646\n", "Epoch 3028, Loss 2.927645\n", "Epoch 3029, Loss 2.927645\n", "Epoch 3030, Loss 2.927646\n", "Epoch 3031, Loss 2.927646\n", "Epoch 3032, Loss 2.927646\n", "Epoch 3033, Loss 2.927646\n", "Epoch 3034, Loss 2.927645\n", "Epoch 3035, Loss 2.927645\n", "Epoch 3036, Loss 2.927646\n", "Epoch 3037, Loss 2.927646\n", "Epoch 3038, Loss 2.927646\n", "Epoch 3039, Loss 2.927646\n", "Epoch 3040, Loss 2.927646\n", "Epoch 3041, Loss 2.927646\n", "Epoch 3042, Loss 2.927646\n", "Epoch 3043, Loss 2.927646\n", "Epoch 3044, Loss 2.927646\n", "Epoch 3045, Loss 2.927646\n", "Epoch 3046, Loss 2.927646\n", "Epoch 3047, Loss 2.927646\n", "Epoch 3048, Loss 2.927646\n", "Epoch 3049, Loss 2.927646\n", "Epoch 3050, Loss 2.927646\n", "Epoch 3051, Loss 2.927646\n", "Epoch 3052, Loss 2.927646\n", "Epoch 3053, Loss 2.927646\n", "Epoch 3054, Loss 2.927646\n", "Epoch 3055, Loss 2.927646\n", "Epoch 3056, Loss 2.927646\n", "Epoch 3057, Loss 2.927646\n", "Epoch 3058, Loss 2.927646\n", "Epoch 3059, Loss 2.927646\n", "Epoch 3060, Loss 2.927646\n", "Epoch 3061, Loss 2.927646\n", "Epoch 3062, Loss 2.927646\n", "Epoch 3063, Loss 2.927646\n", "Epoch 3064, Loss 2.927646\n", "Epoch 3065, Loss 2.927646\n", "Epoch 3066, Loss 2.927646\n", "Epoch 3067, Loss 2.927645\n", "Epoch 3068, Loss 2.927645\n", "Epoch 3069, Loss 2.927646\n", "Epoch 3070, Loss 2.927646\n", "Epoch 3071, Loss 2.927646\n", "Epoch 3072, Loss 2.927646\n", "Epoch 3073, Loss 2.927645\n", "Epoch 3074, Loss 2.927645\n", "Epoch 3075, Loss 2.927646\n", "Epoch 3076, Loss 2.927646\n", "Epoch 3077, Loss 2.927646\n", "Epoch 3078, Loss 2.927646\n", "Epoch 3079, Loss 2.927646\n", "Epoch 3080, Loss 2.927646\n", "Epoch 3081, Loss 2.927646\n", "Epoch 3082, Loss 2.927646\n", "Epoch 3083, Loss 2.927646\n", "Epoch 3084, Loss 2.927646\n", "Epoch 3085, Loss 2.927646\n", "Epoch 3086, Loss 2.927646\n", "Epoch 3087, Loss 2.927646\n", "Epoch 3088, Loss 2.927646\n", "Epoch 3089, Loss 2.927646\n", "Epoch 3090, Loss 2.927646\n", "Epoch 3091, Loss 2.927646\n", "Epoch 3092, Loss 2.927646\n", "Epoch 3093, Loss 2.927646\n", "Epoch 3094, Loss 2.927646\n", "Epoch 3095, Loss 2.927646\n", "Epoch 3096, Loss 2.927646\n", "Epoch 3097, Loss 2.927646\n", "Epoch 3098, Loss 2.927646\n", "Epoch 3099, Loss 2.927646\n", "Epoch 3100, Loss 2.927646\n", "Epoch 3101, Loss 2.927646\n", "Epoch 3102, Loss 2.927646\n", "Epoch 3103, Loss 2.927646\n", "Epoch 3104, Loss 2.927646\n", "Epoch 3105, Loss 2.927645\n", "Epoch 3106, Loss 2.927645\n", "Epoch 3107, Loss 2.927646\n", "Epoch 3108, Loss 2.927646\n", "Epoch 3109, Loss 2.927646\n", "Epoch 3110, Loss 2.927646\n", "Epoch 3111, Loss 2.927645\n", "Epoch 3112, Loss 2.927645\n", "Epoch 3113, Loss 2.927646\n", "Epoch 3114, Loss 2.927646\n", "Epoch 3115, Loss 2.927646\n", "Epoch 3116, Loss 2.927646\n", "Epoch 3117, Loss 2.927646\n", "Epoch 3118, Loss 2.927646\n", "Epoch 3119, Loss 2.927646\n", "Epoch 3120, Loss 2.927646\n", "Epoch 3121, Loss 2.927646\n", "Epoch 3122, Loss 2.927646\n", "Epoch 3123, Loss 2.927646\n", "Epoch 3124, Loss 2.927646\n", "Epoch 3125, Loss 2.927646\n", "Epoch 3126, Loss 2.927646\n", "Epoch 3127, Loss 2.927646\n", "Epoch 3128, Loss 2.927646\n", "Epoch 3129, Loss 2.927646\n", "Epoch 3130, Loss 2.927646\n", "Epoch 3131, Loss 2.927646\n", "Epoch 3132, Loss 2.927646\n", "Epoch 3133, Loss 2.927646\n", "Epoch 3134, Loss 2.927646\n", "Epoch 3135, Loss 2.927646\n", "Epoch 3136, Loss 2.927646\n", "Epoch 3137, Loss 2.927646\n", "Epoch 3138, Loss 2.927646\n", "Epoch 3139, Loss 2.927646\n", "Epoch 3140, Loss 2.927646\n", "Epoch 3141, Loss 2.927646\n", "Epoch 3142, Loss 2.927645\n", "Epoch 3143, Loss 2.927645\n", "Epoch 3144, Loss 2.927646\n", "Epoch 3145, Loss 2.927646\n", "Epoch 3146, Loss 2.927646\n", "Epoch 3147, Loss 2.927646\n", "Epoch 3148, Loss 2.927645\n", "Epoch 3149, Loss 2.927645\n", "Epoch 3150, Loss 2.927646\n", "Epoch 3151, Loss 2.927646\n", "Epoch 3152, Loss 2.927646\n", "Epoch 3153, Loss 2.927646\n", "Epoch 3154, Loss 2.927646\n", "Epoch 3155, Loss 2.927646\n", "Epoch 3156, Loss 2.927646\n", "Epoch 3157, Loss 2.927646\n", "Epoch 3158, Loss 2.927646\n", "Epoch 3159, Loss 2.927646\n", "Epoch 3160, Loss 2.927646\n", "Epoch 3161, Loss 2.927646\n", "Epoch 3162, Loss 2.927646\n", "Epoch 3163, Loss 2.927646\n", "Epoch 3164, Loss 2.927646\n", "Epoch 3165, Loss 2.927646\n", "Epoch 3166, Loss 2.927646\n", "Epoch 3167, Loss 2.927646\n", "Epoch 3168, Loss 2.927646\n", "Epoch 3169, Loss 2.927646\n", "Epoch 3170, Loss 2.927646\n", "Epoch 3171, Loss 2.927646\n", "Epoch 3172, Loss 2.927646\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3173, Loss 2.927646\n", "Epoch 3174, Loss 2.927646\n", "Epoch 3175, Loss 2.927646\n", "Epoch 3176, Loss 2.927646\n", "Epoch 3177, Loss 2.927646\n", "Epoch 3178, Loss 2.927645\n", "Epoch 3179, Loss 2.927645\n", "Epoch 3180, Loss 2.927646\n", "Epoch 3181, Loss 2.927646\n", "Epoch 3182, Loss 2.927646\n", "Epoch 3183, Loss 2.927646\n", "Epoch 3184, Loss 2.927645\n", "Epoch 3185, Loss 2.927645\n", "Epoch 3186, Loss 2.927646\n", "Epoch 3187, Loss 2.927646\n", "Epoch 3188, Loss 2.927646\n", "Epoch 3189, Loss 2.927646\n", "Epoch 3190, Loss 2.927646\n", "Epoch 3191, Loss 2.927646\n", "Epoch 3192, Loss 2.927646\n", "Epoch 3193, Loss 2.927646\n", "Epoch 3194, Loss 2.927646\n", "Epoch 3195, Loss 2.927646\n", "Epoch 3196, Loss 2.927646\n", "Epoch 3197, Loss 2.927646\n", "Epoch 3198, Loss 2.927646\n", "Epoch 3199, Loss 2.927646\n", "Epoch 3200, Loss 2.927646\n", "Epoch 3201, Loss 2.927646\n", "Epoch 3202, Loss 2.927646\n", "Epoch 3203, Loss 2.927646\n", "Epoch 3204, Loss 2.927646\n", "Epoch 3205, Loss 2.927646\n", "Epoch 3206, Loss 2.927646\n", "Epoch 3207, Loss 2.927646\n", "Epoch 3208, Loss 2.927646\n", "Epoch 3209, Loss 2.927646\n", "Epoch 3210, Loss 2.927646\n", "Epoch 3211, Loss 2.927646\n", "Epoch 3212, Loss 2.927646\n", "Epoch 3213, Loss 2.927645\n", "Epoch 3214, Loss 2.927645\n", "Epoch 3215, Loss 2.927646\n", "Epoch 3216, Loss 2.927646\n", "Epoch 3217, Loss 2.927646\n", "Epoch 3218, Loss 2.927646\n", "Epoch 3219, Loss 2.927645\n", "Epoch 3220, Loss 2.927645\n", "Epoch 3221, Loss 2.927646\n", "Epoch 3222, Loss 2.927646\n", "Epoch 3223, Loss 2.927646\n", "Epoch 3224, Loss 2.927646\n", "Epoch 3225, Loss 2.927646\n", "Epoch 3226, Loss 2.927646\n", "Epoch 3227, Loss 2.927646\n", "Epoch 3228, Loss 2.927646\n", "Epoch 3229, Loss 2.927646\n", "Epoch 3230, Loss 2.927646\n", "Epoch 3231, Loss 2.927646\n", "Epoch 3232, Loss 2.927646\n", "Epoch 3233, Loss 2.927646\n", "Epoch 3234, Loss 2.927646\n", "Epoch 3235, Loss 2.927646\n", "Epoch 3236, Loss 2.927646\n", "Epoch 3237, Loss 2.927646\n", "Epoch 3238, Loss 2.927646\n", "Epoch 3239, Loss 2.927646\n", "Epoch 3240, Loss 2.927646\n", "Epoch 3241, Loss 2.927646\n", "Epoch 3242, Loss 2.927646\n", "Epoch 3243, Loss 2.927646\n", "Epoch 3244, Loss 2.927646\n", "Epoch 3245, Loss 2.927646\n", "Epoch 3246, Loss 2.927646\n", "Epoch 3247, Loss 2.927646\n", "Epoch 3248, Loss 2.927645\n", "Epoch 3249, Loss 2.927646\n", "Epoch 3250, Loss 2.927647\n", "Epoch 3251, Loss 2.927647\n", "Epoch 3252, Loss 2.927646\n", "Epoch 3253, Loss 2.927646\n", "Epoch 3254, Loss 2.927646\n", "Epoch 3255, Loss 2.927646\n", "Epoch 3256, Loss 2.927647\n", "Epoch 3257, Loss 2.927645\n", "Epoch 3258, Loss 2.927647\n", "Epoch 3259, Loss 2.927646\n", "Epoch 3260, Loss 2.927646\n", "Epoch 3261, Loss 2.927647\n", "Epoch 3262, Loss 2.927645\n", "Epoch 3263, Loss 2.927645\n", "Epoch 3264, Loss 2.927646\n", "Epoch 3265, Loss 2.927646\n", "Epoch 3266, Loss 2.927646\n", "Epoch 3267, Loss 2.927645\n", "Epoch 3268, Loss 2.927645\n", "Epoch 3269, Loss 2.927647\n", "Epoch 3270, Loss 2.927646\n", "Epoch 3271, Loss 2.927646\n", "Epoch 3272, Loss 2.927647\n", "Epoch 3273, Loss 2.927645\n", "Epoch 3274, Loss 2.927645\n", "Epoch 3275, Loss 2.927647\n", "Epoch 3276, Loss 2.927645\n", "Epoch 3277, Loss 2.927646\n", "Epoch 3278, Loss 2.927645\n", "Epoch 3279, Loss 2.927645\n", "Epoch 3280, Loss 2.927645\n", "Epoch 3281, Loss 2.927647\n", "Epoch 3282, Loss 2.927646\n", "Epoch 3283, Loss 2.927646\n", "Epoch 3284, Loss 2.927645\n", "Epoch 3285, Loss 2.927645\n", "Epoch 3286, Loss 2.927645\n", "Epoch 3287, Loss 2.927647\n", "Epoch 3288, Loss 2.927646\n", "Epoch 3289, Loss 2.927647\n", "Epoch 3290, Loss 2.927645\n", "Epoch 3291, Loss 2.927645\n", "Epoch 3292, Loss 2.927645\n", "Epoch 3293, Loss 2.927647\n", "Epoch 3294, Loss 2.927646\n", "Epoch 3295, Loss 2.927647\n", "Epoch 3296, Loss 2.927645\n", "Epoch 3297, Loss 2.927645\n", "Epoch 3298, Loss 2.927645\n", "Epoch 3299, Loss 2.927647\n", "Epoch 3300, Loss 2.927647\n", "Epoch 3301, Loss 2.927647\n", "Epoch 3302, Loss 2.927645\n", "Epoch 3303, Loss 2.927645\n", "Epoch 3304, Loss 2.927647\n", "Epoch 3305, Loss 2.927647\n", "Epoch 3306, Loss 2.927647\n", "Epoch 3307, Loss 2.927645\n", "Epoch 3308, Loss 2.927645\n", "Epoch 3309, Loss 2.927645\n", "Epoch 3310, Loss 2.927647\n", "Epoch 3311, Loss 2.927647\n", "Epoch 3312, Loss 2.927647\n", "Epoch 3313, Loss 2.927645\n", "Epoch 3314, Loss 2.927645\n", "Epoch 3315, Loss 2.927647\n", "Epoch 3316, Loss 2.927647\n", "Epoch 3317, Loss 2.927647\n", "Epoch 3318, Loss 2.927645\n", "Epoch 3319, Loss 2.927645\n", "Epoch 3320, Loss 2.927645\n", "Epoch 3321, Loss 2.927647\n", "Epoch 3322, Loss 2.927647\n", "Epoch 3323, Loss 2.927647\n", "Epoch 3324, Loss 2.927645\n", "Epoch 3325, Loss 2.927645\n", "Epoch 3326, Loss 2.927647\n", "Epoch 3327, Loss 2.927647\n", "Epoch 3328, Loss 2.927647\n", "Epoch 3329, Loss 2.927645\n", "Epoch 3330, Loss 2.927645\n", "Epoch 3331, Loss 2.927645\n", "Epoch 3332, Loss 2.927647\n", "Epoch 3333, Loss 2.927647\n", "Epoch 3334, Loss 2.927647\n", "Epoch 3335, Loss 2.927645\n", "Epoch 3336, Loss 2.927645\n", "Epoch 3337, Loss 2.927647\n", "Epoch 3338, Loss 2.927647\n", "Epoch 3339, Loss 2.927647\n", "Epoch 3340, Loss 2.927645\n", "Epoch 3341, Loss 2.927645\n", "Epoch 3342, Loss 2.927645\n", "Epoch 3343, Loss 2.927647\n", "Epoch 3344, Loss 2.927647\n", "Epoch 3345, Loss 2.927647\n", "Epoch 3346, Loss 2.927645\n", "Epoch 3347, Loss 2.927645\n", "Epoch 3348, Loss 2.927647\n", "Epoch 3349, Loss 2.927647\n", "Epoch 3350, Loss 2.927647\n", "Epoch 3351, Loss 2.927645\n", "Epoch 3352, Loss 2.927645\n", "Epoch 3353, Loss 2.927645\n", "Epoch 3354, Loss 2.927647\n", "Epoch 3355, Loss 2.927647\n", "Epoch 3356, Loss 2.927647\n", "Epoch 3357, Loss 2.927645\n", "Epoch 3358, Loss 2.927645\n", "Epoch 3359, Loss 2.927647\n", "Epoch 3360, Loss 2.927647\n", "Epoch 3361, Loss 2.927647\n", "Epoch 3362, Loss 2.927645\n", "Epoch 3363, Loss 2.927645\n", "Epoch 3364, Loss 2.927645\n", "Epoch 3365, Loss 2.927647\n", "Epoch 3366, Loss 2.927647\n", "Epoch 3367, Loss 2.927647\n", "Epoch 3368, Loss 2.927645\n", "Epoch 3369, Loss 2.927645\n", "Epoch 3370, Loss 2.927647\n", "Epoch 3371, Loss 2.927647\n", "Epoch 3372, Loss 2.927647\n", "Epoch 3373, Loss 2.927645\n", "Epoch 3374, Loss 2.927645\n", "Epoch 3375, Loss 2.927645\n", "Epoch 3376, Loss 2.927647\n", "Epoch 3377, Loss 2.927647\n", "Epoch 3378, Loss 2.927647\n", "Epoch 3379, Loss 2.927645\n", "Epoch 3380, Loss 2.927645\n", "Epoch 3381, Loss 2.927647\n", "Epoch 3382, Loss 2.927647\n", "Epoch 3383, Loss 2.927647\n", "Epoch 3384, Loss 2.927645\n", "Epoch 3385, Loss 2.927645\n", "Epoch 3386, Loss 2.927645\n", "Epoch 3387, Loss 2.927647\n", "Epoch 3388, Loss 2.927647\n", "Epoch 3389, Loss 2.927647\n", "Epoch 3390, Loss 2.927645\n", "Epoch 3391, Loss 2.927645\n", "Epoch 3392, Loss 2.927647\n", "Epoch 3393, Loss 2.927647\n", "Epoch 3394, Loss 2.927647\n", "Epoch 3395, Loss 2.927645\n", "Epoch 3396, Loss 2.927645\n", "Epoch 3397, Loss 2.927645\n", "Epoch 3398, Loss 2.927647\n", "Epoch 3399, Loss 2.927647\n", "Epoch 3400, Loss 2.927647\n", "Epoch 3401, Loss 2.927645\n", "Epoch 3402, Loss 2.927645\n", "Epoch 3403, Loss 2.927647\n", "Epoch 3404, Loss 2.927647\n", "Epoch 3405, Loss 2.927647\n", "Epoch 3406, Loss 2.927645\n", "Epoch 3407, Loss 2.927645\n", "Epoch 3408, Loss 2.927645\n", "Epoch 3409, Loss 2.927647\n", "Epoch 3410, Loss 2.927647\n", "Epoch 3411, Loss 2.927647\n", "Epoch 3412, Loss 2.927645\n", "Epoch 3413, Loss 2.927645\n", "Epoch 3414, Loss 2.927647\n", "Epoch 3415, Loss 2.927647\n", "Epoch 3416, Loss 2.927647\n", "Epoch 3417, Loss 2.927645\n", "Epoch 3418, Loss 2.927645\n", "Epoch 3419, Loss 2.927645\n", "Epoch 3420, Loss 2.927647\n", "Epoch 3421, Loss 2.927647\n", "Epoch 3422, Loss 2.927647\n", "Epoch 3423, Loss 2.927645\n", "Epoch 3424, Loss 2.927645\n", "Epoch 3425, Loss 2.927647\n", "Epoch 3426, Loss 2.927647\n", "Epoch 3427, Loss 2.927647\n", "Epoch 3428, Loss 2.927645\n", "Epoch 3429, Loss 2.927645\n", "Epoch 3430, Loss 2.927645\n", "Epoch 3431, Loss 2.927647\n", "Epoch 3432, Loss 2.927647\n", "Epoch 3433, Loss 2.927647\n", "Epoch 3434, Loss 2.927645\n", "Epoch 3435, Loss 2.927645\n", "Epoch 3436, Loss 2.927647\n", "Epoch 3437, Loss 2.927647\n", "Epoch 3438, Loss 2.927647\n", "Epoch 3439, Loss 2.927645\n", "Epoch 3440, Loss 2.927645\n", "Epoch 3441, Loss 2.927647\n", "Epoch 3442, Loss 2.927647\n", "Epoch 3443, Loss 2.927645\n", "Epoch 3444, Loss 2.927645\n", "Epoch 3445, Loss 2.927647\n", "Epoch 3446, Loss 2.927647\n", "Epoch 3447, Loss 2.927647\n", "Epoch 3448, Loss 2.927645\n", "Epoch 3449, Loss 2.927645\n", "Epoch 3450, Loss 2.927647\n", "Epoch 3451, Loss 2.927647\n", "Epoch 3452, Loss 2.927645\n", "Epoch 3453, Loss 2.927645\n", "Epoch 3454, Loss 2.927647\n", "Epoch 3455, Loss 2.927647\n", "Epoch 3456, Loss 2.927647\n", "Epoch 3457, Loss 2.927645\n", "Epoch 3458, Loss 2.927645\n", "Epoch 3459, Loss 2.927647\n", "Epoch 3460, Loss 2.927647\n", "Epoch 3461, Loss 2.927645\n", "Epoch 3462, Loss 2.927645\n", "Epoch 3463, Loss 2.927647\n", "Epoch 3464, Loss 2.927647\n", "Epoch 3465, Loss 2.927647\n", "Epoch 3466, Loss 2.927645\n", "Epoch 3467, Loss 2.927645\n", "Epoch 3468, Loss 2.927647\n", "Epoch 3469, Loss 2.927647\n", "Epoch 3470, Loss 2.927645\n", "Epoch 3471, Loss 2.927645\n", "Epoch 3472, Loss 2.927647\n", "Epoch 3473, Loss 2.927647\n", "Epoch 3474, Loss 2.927647\n", "Epoch 3475, Loss 2.927645\n", "Epoch 3476, Loss 2.927645\n", "Epoch 3477, Loss 2.927647\n", "Epoch 3478, Loss 2.927647\n", "Epoch 3479, Loss 2.927645\n", "Epoch 3480, Loss 2.927645\n", "Epoch 3481, Loss 2.927647\n", "Epoch 3482, Loss 2.927647\n", "Epoch 3483, Loss 2.927647\n", "Epoch 3484, Loss 2.927645\n", "Epoch 3485, Loss 2.927645\n", "Epoch 3486, Loss 2.927647\n", "Epoch 3487, Loss 2.927647\n", "Epoch 3488, Loss 2.927645\n", "Epoch 3489, Loss 2.927645\n", "Epoch 3490, Loss 2.927647\n", "Epoch 3491, Loss 2.927647\n", "Epoch 3492, Loss 2.927647\n", "Epoch 3493, Loss 2.927645\n", "Epoch 3494, Loss 2.927645\n", "Epoch 3495, Loss 2.927647\n", "Epoch 3496, Loss 2.927647\n", "Epoch 3497, Loss 2.927645\n", "Epoch 3498, Loss 2.927645\n", "Epoch 3499, Loss 2.927647\n", "Epoch 3500, Loss 2.927647\n", "Epoch 3501, Loss 2.927647\n", "Epoch 3502, Loss 2.927645\n", "Epoch 3503, Loss 2.927645\n", "Epoch 3504, Loss 2.927647\n", "Epoch 3505, Loss 2.927647\n", "Epoch 3506, Loss 2.927645\n", "Epoch 3507, Loss 2.927645\n", "Epoch 3508, Loss 2.927647\n", "Epoch 3509, Loss 2.927647\n", "Epoch 3510, Loss 2.927647\n", "Epoch 3511, Loss 2.927645\n", "Epoch 3512, Loss 2.927645\n", "Epoch 3513, Loss 2.927647\n", "Epoch 3514, Loss 2.927647\n", "Epoch 3515, Loss 2.927645\n", "Epoch 3516, Loss 2.927645\n", "Epoch 3517, Loss 2.927647\n", "Epoch 3518, Loss 2.927647\n", "Epoch 3519, Loss 2.927647\n", "Epoch 3520, Loss 2.927645\n", "Epoch 3521, Loss 2.927645\n", "Epoch 3522, Loss 2.927647\n", "Epoch 3523, Loss 2.927647\n", "Epoch 3524, Loss 2.927645\n", "Epoch 3525, Loss 2.927645\n", "Epoch 3526, Loss 2.927647\n", "Epoch 3527, Loss 2.927647\n", "Epoch 3528, Loss 2.927647\n", "Epoch 3529, Loss 2.927645\n", "Epoch 3530, Loss 2.927645\n", "Epoch 3531, Loss 2.927647\n", "Epoch 3532, Loss 2.927647\n", "Epoch 3533, Loss 2.927645\n", "Epoch 3534, Loss 2.927645\n", "Epoch 3535, Loss 2.927647\n", "Epoch 3536, Loss 2.927647\n", "Epoch 3537, Loss 2.927647\n", "Epoch 3538, Loss 2.927645\n", "Epoch 3539, Loss 2.927645\n", "Epoch 3540, Loss 2.927647\n", "Epoch 3541, Loss 2.927647\n", "Epoch 3542, Loss 2.927645\n", "Epoch 3543, Loss 2.927645\n", "Epoch 3544, Loss 2.927647\n", "Epoch 3545, Loss 2.927647\n", "Epoch 3546, Loss 2.927645\n", "Epoch 3547, Loss 2.927645\n", "Epoch 3548, Loss 2.927644\n", "Epoch 3549, Loss 2.927647\n", "Epoch 3550, Loss 2.927644\n", "Epoch 3551, Loss 2.927644\n", "Epoch 3552, Loss 2.927644\n", "Epoch 3553, Loss 2.927644\n", "Epoch 3554, Loss 2.927644\n", "Epoch 3555, Loss 2.927644\n", "Epoch 3556, Loss 2.927644\n", "Epoch 3557, Loss 2.927644\n", "Epoch 3558, Loss 2.927644\n", "Epoch 3559, Loss 2.927644\n", "Epoch 3560, Loss 2.927644\n", "Epoch 3561, Loss 2.927644\n", "Epoch 3562, Loss 2.927644\n", "Epoch 3563, Loss 2.927644\n", "Epoch 3564, Loss 2.927644\n", "Epoch 3565, Loss 2.927644\n", "Epoch 3566, Loss 2.927644\n", "Epoch 3567, Loss 2.927644\n", "Epoch 3568, Loss 2.927644\n", "Epoch 3569, Loss 2.927644\n", "Epoch 3570, Loss 2.927644\n", "Epoch 3571, Loss 2.927644\n", "Epoch 3572, Loss 2.927644\n", "Epoch 3573, Loss 2.927644\n", "Epoch 3574, Loss 2.927644\n", "Epoch 3575, Loss 2.927644\n", "Epoch 3576, Loss 2.927644\n", "Epoch 3577, Loss 2.927644\n", "Epoch 3578, Loss 2.927644\n", "Epoch 3579, Loss 2.927644\n", "Epoch 3580, Loss 2.927644\n", "Epoch 3581, Loss 2.927644\n", "Epoch 3582, Loss 2.927644\n", "Epoch 3583, Loss 2.927644\n", "Epoch 3584, Loss 2.927644\n", "Epoch 3585, Loss 2.927644\n", "Epoch 3586, Loss 2.927644\n", "Epoch 3587, Loss 2.927644\n", "Epoch 3588, Loss 2.927644\n", "Epoch 3589, Loss 2.927644\n", "Epoch 3590, Loss 2.927644\n", "Epoch 3591, Loss 2.927644\n", "Epoch 3592, Loss 2.927644\n", "Epoch 3593, Loss 2.927644\n", "Epoch 3594, Loss 2.927644\n", "Epoch 3595, Loss 2.927644\n", "Epoch 3596, Loss 2.927644\n", "Epoch 3597, Loss 2.927644\n", "Epoch 3598, Loss 2.927644\n", "Epoch 3599, Loss 2.927644\n", "Epoch 3600, Loss 2.927644\n", "Epoch 3601, Loss 2.927644\n", "Epoch 3602, Loss 2.927644\n", "Epoch 3603, Loss 2.927644\n", "Epoch 3604, Loss 2.927644\n", "Epoch 3605, Loss 2.927644\n", "Epoch 3606, Loss 2.927644\n", "Epoch 3607, Loss 2.927644\n", "Epoch 3608, Loss 2.927644\n", "Epoch 3609, Loss 2.927644\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3610, Loss 2.927644\n", "Epoch 3611, Loss 2.927644\n", "Epoch 3612, Loss 2.927644\n", "Epoch 3613, Loss 2.927644\n", "Epoch 3614, Loss 2.927644\n", "Epoch 3615, Loss 2.927644\n", "Epoch 3616, Loss 2.927644\n", "Epoch 3617, Loss 2.927644\n", "Epoch 3618, Loss 2.927644\n", "Epoch 3619, Loss 2.927644\n", "Epoch 3620, Loss 2.927644\n", "Epoch 3621, Loss 2.927644\n", "Epoch 3622, Loss 2.927644\n", "Epoch 3623, Loss 2.927644\n", "Epoch 3624, Loss 2.927644\n", "Epoch 3625, Loss 2.927644\n", "Epoch 3626, Loss 2.927644\n", "Epoch 3627, Loss 2.927644\n", "Epoch 3628, Loss 2.927644\n", "Epoch 3629, Loss 2.927644\n", "Epoch 3630, Loss 2.927644\n", "Epoch 3631, Loss 2.927644\n", "Epoch 3632, Loss 2.927644\n", "Epoch 3633, Loss 2.927644\n", "Epoch 3634, Loss 2.927644\n", "Epoch 3635, Loss 2.927644\n", "Epoch 3636, Loss 2.927644\n", "Epoch 3637, Loss 2.927644\n", "Epoch 3638, Loss 2.927644\n", "Epoch 3639, Loss 2.927644\n", "Epoch 3640, Loss 2.927644\n", "Epoch 3641, Loss 2.927644\n", "Epoch 3642, Loss 2.927644\n", "Epoch 3643, Loss 2.927644\n", "Epoch 3644, Loss 2.927644\n", "Epoch 3645, Loss 2.927644\n", "Epoch 3646, Loss 2.927644\n", "Epoch 3647, Loss 2.927644\n", "Epoch 3648, Loss 2.927644\n", "Epoch 3649, Loss 2.927644\n", "Epoch 3650, Loss 2.927644\n", "Epoch 3651, Loss 2.927644\n", "Epoch 3652, Loss 2.927644\n", "Epoch 3653, Loss 2.927644\n", "Epoch 3654, Loss 2.927644\n", "Epoch 3655, Loss 2.927644\n", "Epoch 3656, Loss 2.927644\n", "Epoch 3657, Loss 2.927644\n", "Epoch 3658, Loss 2.927644\n", "Epoch 3659, Loss 2.927644\n", "Epoch 3660, Loss 2.927644\n", "Epoch 3661, Loss 2.927644\n", "Epoch 3662, Loss 2.927644\n", "Epoch 3663, Loss 2.927644\n", "Epoch 3664, Loss 2.927644\n", "Epoch 3665, Loss 2.927644\n", "Epoch 3666, Loss 2.927644\n", "Epoch 3667, Loss 2.927644\n", "Epoch 3668, Loss 2.927644\n", "Epoch 3669, Loss 2.927644\n", "Epoch 3670, Loss 2.927644\n", "Epoch 3671, Loss 2.927644\n", "Epoch 3672, Loss 2.927644\n", "Epoch 3673, Loss 2.927644\n", "Epoch 3674, Loss 2.927644\n", "Epoch 3675, Loss 2.927644\n", "Epoch 3676, Loss 2.927644\n", "Epoch 3677, Loss 2.927644\n", "Epoch 3678, Loss 2.927644\n", "Epoch 3679, Loss 2.927644\n", "Epoch 3680, Loss 2.927644\n", "Epoch 3681, Loss 2.927644\n", "Epoch 3682, Loss 2.927644\n", "Epoch 3683, Loss 2.927644\n", "Epoch 3684, Loss 2.927644\n", "Epoch 3685, Loss 2.927644\n", "Epoch 3686, Loss 2.927644\n", "Epoch 3687, Loss 2.927644\n", "Epoch 3688, Loss 2.927644\n", "Epoch 3689, Loss 2.927644\n", "Epoch 3690, Loss 2.927644\n", "Epoch 3691, Loss 2.927644\n", "Epoch 3692, Loss 2.927644\n", "Epoch 3693, Loss 2.927644\n", "Epoch 3694, Loss 2.927644\n", "Epoch 3695, Loss 2.927644\n", "Epoch 3696, Loss 2.927644\n", "Epoch 3697, Loss 2.927644\n", "Epoch 3698, Loss 2.927644\n", "Epoch 3699, Loss 2.927644\n", "Epoch 3700, Loss 2.927644\n", "Epoch 3701, Loss 2.927644\n", "Epoch 3702, Loss 2.927644\n", "Epoch 3703, Loss 2.927644\n", "Epoch 3704, Loss 2.927644\n", "Epoch 3705, Loss 2.927644\n", "Epoch 3706, Loss 2.927644\n", "Epoch 3707, Loss 2.927644\n", "Epoch 3708, Loss 2.927644\n", "Epoch 3709, Loss 2.927644\n", "Epoch 3710, Loss 2.927644\n", "Epoch 3711, Loss 2.927644\n", "Epoch 3712, Loss 2.927644\n", "Epoch 3713, Loss 2.927644\n", "Epoch 3714, Loss 2.927644\n", "Epoch 3715, Loss 2.927644\n", "Epoch 3716, Loss 2.927644\n", "Epoch 3717, Loss 2.927644\n", "Epoch 3718, Loss 2.927644\n", "Epoch 3719, Loss 2.927644\n", "Epoch 3720, Loss 2.927644\n", "Epoch 3721, Loss 2.927644\n", "Epoch 3722, Loss 2.927644\n", "Epoch 3723, Loss 2.927644\n", "Epoch 3724, Loss 2.927644\n", "Epoch 3725, Loss 2.927644\n", "Epoch 3726, Loss 2.927644\n", "Epoch 3727, Loss 2.927644\n", "Epoch 3728, Loss 2.927644\n", "Epoch 3729, Loss 2.927644\n", "Epoch 3730, Loss 2.927644\n", "Epoch 3731, Loss 2.927644\n", "Epoch 3732, Loss 2.927644\n", "Epoch 3733, Loss 2.927644\n", "Epoch 3734, Loss 2.927644\n", "Epoch 3735, Loss 2.927645\n", "Epoch 3736, Loss 2.927645\n", "Epoch 3737, Loss 2.927645\n", "Epoch 3738, Loss 2.927645\n", "Epoch 3739, Loss 2.927645\n", "Epoch 3740, Loss 2.927645\n", "Epoch 3741, Loss 2.927645\n", "Epoch 3742, Loss 2.927645\n", "Epoch 3743, Loss 2.927645\n", "Epoch 3744, Loss 2.927645\n", "Epoch 3745, Loss 2.927645\n", "Epoch 3746, Loss 2.927645\n", "Epoch 3747, Loss 2.927645\n", "Epoch 3748, Loss 2.927645\n", "Epoch 3749, Loss 2.927645\n", "Epoch 3750, Loss 2.927645\n", "Epoch 3751, Loss 2.927645\n", "Epoch 3752, Loss 2.927645\n", "Epoch 3753, Loss 2.927645\n", "Epoch 3754, Loss 2.927645\n", "Epoch 3755, Loss 2.927645\n", "Epoch 3756, Loss 2.927645\n", "Epoch 3757, Loss 2.927645\n", "Epoch 3758, Loss 2.927645\n", "Epoch 3759, Loss 2.927645\n", "Epoch 3760, Loss 2.927645\n", "Epoch 3761, Loss 2.927645\n", "Epoch 3762, Loss 2.927645\n", "Epoch 3763, Loss 2.927645\n", "Epoch 3764, Loss 2.927645\n", "Epoch 3765, Loss 2.927645\n", "Epoch 3766, Loss 2.927645\n", "Epoch 3767, Loss 2.927645\n", "Epoch 3768, Loss 2.927645\n", "Epoch 3769, Loss 2.927645\n", "Epoch 3770, Loss 2.927645\n", "Epoch 3771, Loss 2.927645\n", "Epoch 3772, Loss 2.927645\n", "Epoch 3773, Loss 2.927646\n", "Epoch 3774, Loss 2.927645\n", "Epoch 3775, Loss 2.927645\n", "Epoch 3776, Loss 2.927645\n", "Epoch 3777, Loss 2.927645\n", "Epoch 3778, Loss 2.927645\n", "Epoch 3779, Loss 2.927645\n", "Epoch 3780, Loss 2.927645\n", "Epoch 3781, Loss 2.927645\n", "Epoch 3782, Loss 2.927645\n", "Epoch 3783, Loss 2.927645\n", "Epoch 3784, Loss 2.927645\n", "Epoch 3785, Loss 2.927645\n", "Epoch 3786, Loss 2.927645\n", "Epoch 3787, Loss 2.927645\n", "Epoch 3788, Loss 2.927645\n", "Epoch 3789, Loss 2.927645\n", "Epoch 3790, Loss 2.927645\n", "Epoch 3791, Loss 2.927645\n", "Epoch 3792, Loss 2.927645\n", "Epoch 3793, Loss 2.927645\n", "Epoch 3794, Loss 2.927645\n", "Epoch 3795, Loss 2.927645\n", "Epoch 3796, Loss 2.927645\n", "Epoch 3797, Loss 2.927645\n", "Epoch 3798, Loss 2.927645\n", "Epoch 3799, Loss 2.927645\n", "Epoch 3800, Loss 2.927645\n", "Epoch 3801, Loss 2.927645\n", "Epoch 3802, Loss 2.927645\n", "Epoch 3803, Loss 2.927645\n", "Epoch 3804, Loss 2.927645\n", "Epoch 3805, Loss 2.927645\n", "Epoch 3806, Loss 2.927645\n", "Epoch 3807, Loss 2.927645\n", "Epoch 3808, Loss 2.927645\n", "Epoch 3809, Loss 2.927645\n", "Epoch 3810, Loss 2.927645\n", "Epoch 3811, Loss 2.927645\n", "Epoch 3812, Loss 2.927645\n", "Epoch 3813, Loss 2.927645\n", "Epoch 3814, Loss 2.927645\n", "Epoch 3815, Loss 2.927645\n", "Epoch 3816, Loss 2.927645\n", "Epoch 3817, Loss 2.927645\n", "Epoch 3818, Loss 2.927645\n", "Epoch 3819, Loss 2.927645\n", "Epoch 3820, Loss 2.927645\n", "Epoch 3821, Loss 2.927645\n", "Epoch 3822, Loss 2.927645\n", "Epoch 3823, Loss 2.927645\n", "Epoch 3824, Loss 2.927645\n", "Epoch 3825, Loss 2.927645\n", "Epoch 3826, Loss 2.927645\n", "Epoch 3827, Loss 2.927645\n", "Epoch 3828, Loss 2.927645\n", "Epoch 3829, Loss 2.927645\n", "Epoch 3830, Loss 2.927645\n", "Epoch 3831, Loss 2.927645\n", "Epoch 3832, Loss 2.927645\n", "Epoch 3833, Loss 2.927645\n", "Epoch 3834, Loss 2.927645\n", "Epoch 3835, Loss 2.927645\n", "Epoch 3836, Loss 2.927645\n", "Epoch 3837, Loss 2.927645\n", "Epoch 3838, Loss 2.927645\n", "Epoch 3839, Loss 2.927645\n", "Epoch 3840, Loss 2.927645\n", "Epoch 3841, Loss 2.927645\n", "Epoch 3842, Loss 2.927645\n", "Epoch 3843, Loss 2.927645\n", "Epoch 3844, Loss 2.927645\n", "Epoch 3845, Loss 2.927645\n", "Epoch 3846, Loss 2.927645\n", "Epoch 3847, Loss 2.927645\n", "Epoch 3848, Loss 2.927645\n", "Epoch 3849, Loss 2.927645\n", "Epoch 3850, Loss 2.927645\n", "Epoch 3851, Loss 2.927645\n", "Epoch 3852, Loss 2.927645\n", "Epoch 3853, Loss 2.927645\n", "Epoch 3854, Loss 2.927645\n", "Epoch 3855, Loss 2.927645\n", "Epoch 3856, Loss 2.927645\n", "Epoch 3857, Loss 2.927645\n", "Epoch 3858, Loss 2.927645\n", "Epoch 3859, Loss 2.927645\n", "Epoch 3860, Loss 2.927645\n", "Epoch 3861, Loss 2.927645\n", "Epoch 3862, Loss 2.927645\n", "Epoch 3863, Loss 2.927645\n", "Epoch 3864, Loss 2.927645\n", "Epoch 3865, Loss 2.927645\n", "Epoch 3866, Loss 2.927645\n", "Epoch 3867, Loss 2.927645\n", "Epoch 3868, Loss 2.927645\n", "Epoch 3869, Loss 2.927645\n", "Epoch 3870, Loss 2.927645\n", "Epoch 3871, Loss 2.927645\n", "Epoch 3872, Loss 2.927645\n", "Epoch 3873, Loss 2.927645\n", "Epoch 3874, Loss 2.927645\n", "Epoch 3875, Loss 2.927645\n", "Epoch 3876, Loss 2.927645\n", "Epoch 3877, Loss 2.927645\n", "Epoch 3878, Loss 2.927645\n", "Epoch 3879, Loss 2.927645\n", "Epoch 3880, Loss 2.927645\n", "Epoch 3881, Loss 2.927645\n", "Epoch 3882, Loss 2.927645\n", "Epoch 3883, Loss 2.927645\n", "Epoch 3884, Loss 2.927645\n", "Epoch 3885, Loss 2.927645\n", "Epoch 3886, Loss 2.927645\n", "Epoch 3887, Loss 2.927645\n", "Epoch 3888, Loss 2.927645\n", "Epoch 3889, Loss 2.927645\n", "Epoch 3890, Loss 2.927645\n", "Epoch 3891, Loss 2.927645\n", "Epoch 3892, Loss 2.927645\n", "Epoch 3893, Loss 2.927645\n", "Epoch 3894, Loss 2.927645\n", "Epoch 3895, Loss 2.927645\n", "Epoch 3896, Loss 2.927645\n", "Epoch 3897, Loss 2.927645\n", "Epoch 3898, Loss 2.927645\n", "Epoch 3899, Loss 2.927645\n", "Epoch 3900, Loss 2.927645\n", "Epoch 3901, Loss 2.927645\n", "Epoch 3902, Loss 2.927645\n", "Epoch 3903, Loss 2.927645\n", "Epoch 3904, Loss 2.927645\n", "Epoch 3905, Loss 2.927645\n", "Epoch 3906, Loss 2.927645\n", "Epoch 3907, Loss 2.927645\n", "Epoch 3908, Loss 2.927645\n", "Epoch 3909, Loss 2.927645\n", "Epoch 3910, Loss 2.927645\n", "Epoch 3911, Loss 2.927645\n", "Epoch 3912, Loss 2.927645\n", "Epoch 3913, Loss 2.927645\n", "Epoch 3914, Loss 2.927645\n", "Epoch 3915, Loss 2.927645\n", "Epoch 3916, Loss 2.927645\n", "Epoch 3917, Loss 2.927645\n", "Epoch 3918, Loss 2.927645\n", "Epoch 3919, Loss 2.927645\n", "Epoch 3920, Loss 2.927645\n", "Epoch 3921, Loss 2.927645\n", "Epoch 3922, Loss 2.927645\n", "Epoch 3923, Loss 2.927645\n", "Epoch 3924, Loss 2.927645\n", "Epoch 3925, Loss 2.927645\n", "Epoch 3926, Loss 2.927645\n", "Epoch 3927, Loss 2.927645\n", "Epoch 3928, Loss 2.927645\n", "Epoch 3929, Loss 2.927645\n", "Epoch 3930, Loss 2.927645\n", "Epoch 3931, Loss 2.927645\n", "Epoch 3932, Loss 2.927645\n", "Epoch 3933, Loss 2.927645\n", "Epoch 3934, Loss 2.927645\n", "Epoch 3935, Loss 2.927645\n", "Epoch 3936, Loss 2.927645\n", "Epoch 3937, Loss 2.927645\n", "Epoch 3938, Loss 2.927645\n", "Epoch 3939, Loss 2.927645\n", "Epoch 3940, Loss 2.927645\n", "Epoch 3941, Loss 2.927645\n", "Epoch 3942, Loss 2.927645\n", "Epoch 3943, Loss 2.927645\n", "Epoch 3944, Loss 2.927645\n", "Epoch 3945, Loss 2.927645\n", "Epoch 3946, Loss 2.927645\n", "Epoch 3947, Loss 2.927645\n", "Epoch 3948, Loss 2.927645\n", "Epoch 3949, Loss 2.927645\n", "Epoch 3950, Loss 2.927645\n", "Epoch 3951, Loss 2.927645\n", "Epoch 3952, Loss 2.927645\n", "Epoch 3953, Loss 2.927645\n", "Epoch 3954, Loss 2.927645\n", "Epoch 3955, Loss 2.927645\n", "Epoch 3956, Loss 2.927645\n", "Epoch 3957, Loss 2.927645\n", "Epoch 3958, Loss 2.927645\n", "Epoch 3959, Loss 2.927645\n", "Epoch 3960, Loss 2.927645\n", "Epoch 3961, Loss 2.927645\n", "Epoch 3962, Loss 2.927645\n", "Epoch 3963, Loss 2.927645\n", "Epoch 3964, Loss 2.927645\n", "Epoch 3965, Loss 2.927645\n", "Epoch 3966, Loss 2.927645\n", "Epoch 3967, Loss 2.927645\n", "Epoch 3968, Loss 2.927645\n", "Epoch 3969, Loss 2.927645\n", "Epoch 3970, Loss 2.927645\n", "Epoch 3971, Loss 2.927645\n", "Epoch 3972, Loss 2.927645\n", "Epoch 3973, Loss 2.927645\n", "Epoch 3974, Loss 2.927645\n", "Epoch 3975, Loss 2.927645\n", "Epoch 3976, Loss 2.927645\n", "Epoch 3977, Loss 2.927645\n", "Epoch 3978, Loss 2.927645\n", "Epoch 3979, Loss 2.927645\n", "Epoch 3980, Loss 2.927645\n", "Epoch 3981, Loss 2.927645\n", "Epoch 3982, Loss 2.927645\n", "Epoch 3983, Loss 2.927645\n", "Epoch 3984, Loss 2.927645\n", "Epoch 3985, Loss 2.927645\n", "Epoch 3986, Loss 2.927645\n", "Epoch 3987, Loss 2.927645\n", "Epoch 3988, Loss 2.927645\n", "Epoch 3989, Loss 2.927645\n", "Epoch 3990, Loss 2.927645\n", "Epoch 3991, Loss 2.927645\n", "Epoch 3992, Loss 2.927645\n", "Epoch 3993, Loss 2.927645\n", "Epoch 3994, Loss 2.927645\n", "Epoch 3995, Loss 2.927645\n", "Epoch 3996, Loss 2.927645\n", "Epoch 3997, Loss 2.927645\n", "Epoch 3998, Loss 2.927645\n", "Epoch 3999, Loss 2.927645\n", "Epoch 4000, Loss 2.927645\n", "Epoch 4001, Loss 2.927645\n", "Epoch 4002, Loss 2.927645\n", "Epoch 4003, Loss 2.927645\n", "Epoch 4004, Loss 2.927645\n", "Epoch 4005, Loss 2.927645\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4006, Loss 2.927645\n", "Epoch 4007, Loss 2.927645\n", "Epoch 4008, Loss 2.927645\n", "Epoch 4009, Loss 2.927645\n", "Epoch 4010, Loss 2.927645\n", "Epoch 4011, Loss 2.927645\n", "Epoch 4012, Loss 2.927645\n", "Epoch 4013, Loss 2.927645\n", "Epoch 4014, Loss 2.927645\n", "Epoch 4015, Loss 2.927645\n", "Epoch 4016, Loss 2.927645\n", "Epoch 4017, Loss 2.927645\n", "Epoch 4018, Loss 2.927645\n", "Epoch 4019, Loss 2.927645\n", "Epoch 4020, Loss 2.927645\n", "Epoch 4021, Loss 2.927645\n", "Epoch 4022, Loss 2.927645\n", "Epoch 4023, Loss 2.927645\n", "Epoch 4024, Loss 2.927645\n", "Epoch 4025, Loss 2.927645\n", "Epoch 4026, Loss 2.927645\n", "Epoch 4027, Loss 2.927645\n", "Epoch 4028, Loss 2.927645\n", "Epoch 4029, Loss 2.927645\n", "Epoch 4030, Loss 2.927645\n", "Epoch 4031, Loss 2.927645\n", "Epoch 4032, Loss 2.927645\n", "Epoch 4033, Loss 2.927645\n", "Epoch 4034, Loss 2.927645\n", "Epoch 4035, Loss 2.927645\n", "Epoch 4036, Loss 2.927645\n", "Epoch 4037, Loss 2.927645\n", "Epoch 4038, Loss 2.927645\n", "Epoch 4039, Loss 2.927645\n", "Epoch 4040, Loss 2.927645\n", "Epoch 4041, Loss 2.927645\n", "Epoch 4042, Loss 2.927645\n", "Epoch 4043, Loss 2.927645\n", "Epoch 4044, Loss 2.927645\n", "Epoch 4045, Loss 2.927645\n", "Epoch 4046, Loss 2.927645\n", "Epoch 4047, Loss 2.927645\n", "Epoch 4048, Loss 2.927645\n", "Epoch 4049, Loss 2.927645\n", "Epoch 4050, Loss 2.927645\n", "Epoch 4051, Loss 2.927645\n", "Epoch 4052, Loss 2.927645\n", "Epoch 4053, Loss 2.927645\n", "Epoch 4054, Loss 2.927645\n", "Epoch 4055, Loss 2.927645\n", "Epoch 4056, Loss 2.927645\n", "Epoch 4057, Loss 2.927645\n", "Epoch 4058, Loss 2.927645\n", "Epoch 4059, Loss 2.927645\n", "Epoch 4060, Loss 2.927645\n", "Epoch 4061, Loss 2.927645\n", "Epoch 4062, Loss 2.927645\n", "Epoch 4063, Loss 2.927645\n", "Epoch 4064, Loss 2.927645\n", "Epoch 4065, Loss 2.927645\n", "Epoch 4066, Loss 2.927645\n", "Epoch 4067, Loss 2.927645\n", "Epoch 4068, Loss 2.927645\n", "Epoch 4069, Loss 2.927645\n", "Epoch 4070, Loss 2.927645\n", "Epoch 4071, Loss 2.927645\n", "Epoch 4072, Loss 2.927645\n", "Epoch 4073, Loss 2.927645\n", "Epoch 4074, Loss 2.927645\n", "Epoch 4075, Loss 2.927646\n", "Epoch 4076, Loss 2.927646\n", "Epoch 4077, Loss 2.927644\n", "Epoch 4078, Loss 2.927644\n", "Epoch 4079, Loss 2.927646\n", "Epoch 4080, Loss 2.927646\n", "Epoch 4081, Loss 2.927644\n", "Epoch 4082, Loss 2.927644\n", "Epoch 4083, Loss 2.927645\n", "Epoch 4084, Loss 2.927645\n", "Epoch 4085, Loss 2.927646\n", "Epoch 4086, Loss 2.927646\n", "Epoch 4087, Loss 2.927646\n", "Epoch 4088, Loss 2.927645\n", "Epoch 4089, Loss 2.927645\n", "Epoch 4090, Loss 2.927645\n", "Epoch 4091, Loss 2.927646\n", "Epoch 4092, Loss 2.927646\n", "Epoch 4093, Loss 2.927645\n", "Epoch 4094, Loss 2.927645\n", "Epoch 4095, Loss 2.927646\n", "Epoch 4096, Loss 2.927646\n", "Epoch 4097, Loss 2.927645\n", "Epoch 4098, Loss 2.927645\n", "Epoch 4099, Loss 2.927645\n", "Epoch 4100, Loss 2.927646\n", "Epoch 4101, Loss 2.927646\n", "Epoch 4102, Loss 2.927645\n", "Epoch 4103, Loss 2.927645\n", "Epoch 4104, Loss 2.927646\n", "Epoch 4105, Loss 2.927646\n", "Epoch 4106, Loss 2.927645\n", "Epoch 4107, Loss 2.927645\n", "Epoch 4108, Loss 2.927645\n", "Epoch 4109, Loss 2.927646\n", "Epoch 4110, Loss 2.927646\n", "Epoch 4111, Loss 2.927645\n", "Epoch 4112, Loss 2.927645\n", "Epoch 4113, Loss 2.927646\n", "Epoch 4114, Loss 2.927646\n", "Epoch 4115, Loss 2.927645\n", "Epoch 4116, Loss 2.927645\n", "Epoch 4117, Loss 2.927645\n", "Epoch 4118, Loss 2.927646\n", "Epoch 4119, Loss 2.927646\n", "Epoch 4120, Loss 2.927645\n", "Epoch 4121, Loss 2.927645\n", "Epoch 4122, Loss 2.927646\n", "Epoch 4123, Loss 2.927646\n", "Epoch 4124, Loss 2.927645\n", "Epoch 4125, Loss 2.927645\n", "Epoch 4126, Loss 2.927645\n", "Epoch 4127, Loss 2.927646\n", "Epoch 4128, Loss 2.927646\n", "Epoch 4129, Loss 2.927645\n", "Epoch 4130, Loss 2.927645\n", "Epoch 4131, Loss 2.927646\n", "Epoch 4132, Loss 2.927646\n", "Epoch 4133, Loss 2.927645\n", "Epoch 4134, Loss 2.927645\n", "Epoch 4135, Loss 2.927645\n", "Epoch 4136, Loss 2.927646\n", "Epoch 4137, Loss 2.927646\n", "Epoch 4138, Loss 2.927645\n", "Epoch 4139, Loss 2.927645\n", "Epoch 4140, Loss 2.927646\n", "Epoch 4141, Loss 2.927646\n", "Epoch 4142, Loss 2.927645\n", "Epoch 4143, Loss 2.927645\n", "Epoch 4144, Loss 2.927645\n", "Epoch 4145, Loss 2.927646\n", "Epoch 4146, Loss 2.927646\n", "Epoch 4147, Loss 2.927645\n", "Epoch 4148, Loss 2.927645\n", "Epoch 4149, Loss 2.927646\n", "Epoch 4150, Loss 2.927646\n", "Epoch 4151, Loss 2.927645\n", "Epoch 4152, Loss 2.927645\n", "Epoch 4153, Loss 2.927645\n", "Epoch 4154, Loss 2.927646\n", "Epoch 4155, Loss 2.927646\n", "Epoch 4156, Loss 2.927645\n", "Epoch 4157, Loss 2.927645\n", "Epoch 4158, Loss 2.927646\n", "Epoch 4159, Loss 2.927646\n", "Epoch 4160, Loss 2.927645\n", "Epoch 4161, Loss 2.927645\n", "Epoch 4162, Loss 2.927645\n", "Epoch 4163, Loss 2.927646\n", "Epoch 4164, Loss 2.927646\n", "Epoch 4165, Loss 2.927645\n", "Epoch 4166, Loss 2.927645\n", "Epoch 4167, Loss 2.927646\n", "Epoch 4168, Loss 2.927646\n", "Epoch 4169, Loss 2.927645\n", "Epoch 4170, Loss 2.927645\n", "Epoch 4171, Loss 2.927645\n", "Epoch 4172, Loss 2.927646\n", "Epoch 4173, Loss 2.927646\n", "Epoch 4174, Loss 2.927645\n", "Epoch 4175, Loss 2.927645\n", "Epoch 4176, Loss 2.927646\n", "Epoch 4177, Loss 2.927646\n", "Epoch 4178, Loss 2.927645\n", "Epoch 4179, Loss 2.927645\n", "Epoch 4180, Loss 2.927645\n", "Epoch 4181, Loss 2.927646\n", "Epoch 4182, Loss 2.927646\n", "Epoch 4183, Loss 2.927645\n", "Epoch 4184, Loss 2.927645\n", "Epoch 4185, Loss 2.927646\n", "Epoch 4186, Loss 2.927646\n", "Epoch 4187, Loss 2.927645\n", "Epoch 4188, Loss 2.927645\n", "Epoch 4189, Loss 2.927645\n", "Epoch 4190, Loss 2.927646\n", "Epoch 4191, Loss 2.927646\n", "Epoch 4192, Loss 2.927645\n", "Epoch 4193, Loss 2.927645\n", "Epoch 4194, Loss 2.927646\n", "Epoch 4195, Loss 2.927646\n", "Epoch 4196, Loss 2.927645\n", "Epoch 4197, Loss 2.927645\n", "Epoch 4198, Loss 2.927645\n", "Epoch 4199, Loss 2.927646\n", "Epoch 4200, Loss 2.927646\n", "Epoch 4201, Loss 2.927645\n", "Epoch 4202, Loss 2.927645\n", "Epoch 4203, Loss 2.927646\n", "Epoch 4204, Loss 2.927646\n", "Epoch 4205, Loss 2.927645\n", "Epoch 4206, Loss 2.927645\n", "Epoch 4207, Loss 2.927645\n", "Epoch 4208, Loss 2.927646\n", "Epoch 4209, Loss 2.927646\n", "Epoch 4210, Loss 2.927645\n", "Epoch 4211, Loss 2.927645\n", "Epoch 4212, Loss 2.927646\n", "Epoch 4213, Loss 2.927646\n", "Epoch 4214, Loss 2.927645\n", "Epoch 4215, Loss 2.927645\n", "Epoch 4216, Loss 2.927645\n", "Epoch 4217, Loss 2.927646\n", "Epoch 4218, Loss 2.927646\n", "Epoch 4219, Loss 2.927645\n", "Epoch 4220, Loss 2.927645\n", "Epoch 4221, Loss 2.927646\n", "Epoch 4222, Loss 2.927646\n", "Epoch 4223, Loss 2.927645\n", "Epoch 4224, Loss 2.927645\n", "Epoch 4225, Loss 2.927645\n", "Epoch 4226, Loss 2.927646\n", "Epoch 4227, Loss 2.927646\n", "Epoch 4228, Loss 2.927645\n", "Epoch 4229, Loss 2.927645\n", "Epoch 4230, Loss 2.927646\n", "Epoch 4231, Loss 2.927646\n", "Epoch 4232, Loss 2.927645\n", "Epoch 4233, Loss 2.927645\n", "Epoch 4234, Loss 2.927645\n", "Epoch 4235, Loss 2.927646\n", "Epoch 4236, Loss 2.927646\n", "Epoch 4237, Loss 2.927645\n", "Epoch 4238, Loss 2.927645\n", "Epoch 4239, Loss 2.927646\n", "Epoch 4240, Loss 2.927646\n", "Epoch 4241, Loss 2.927645\n", "Epoch 4242, Loss 2.927645\n", "Epoch 4243, Loss 2.927645\n", "Epoch 4244, Loss 2.927646\n", "Epoch 4245, Loss 2.927646\n", "Epoch 4246, Loss 2.927645\n", "Epoch 4247, Loss 2.927645\n", "Epoch 4248, Loss 2.927646\n", "Epoch 4249, Loss 2.927646\n", "Epoch 4250, Loss 2.927645\n", "Epoch 4251, Loss 2.927645\n", "Epoch 4252, Loss 2.927645\n", "Epoch 4253, Loss 2.927646\n", "Epoch 4254, Loss 2.927646\n", "Epoch 4255, Loss 2.927645\n", "Epoch 4256, Loss 2.927645\n", "Epoch 4257, Loss 2.927646\n", "Epoch 4258, Loss 2.927646\n", "Epoch 4259, Loss 2.927645\n", "Epoch 4260, Loss 2.927645\n", "Epoch 4261, Loss 2.927645\n", "Epoch 4262, Loss 2.927646\n", "Epoch 4263, Loss 2.927646\n", "Epoch 4264, Loss 2.927645\n", "Epoch 4265, Loss 2.927645\n", "Epoch 4266, Loss 2.927646\n", "Epoch 4267, Loss 2.927646\n", "Epoch 4268, Loss 2.927645\n", "Epoch 4269, Loss 2.927645\n", "Epoch 4270, Loss 2.927645\n", "Epoch 4271, Loss 2.927646\n", "Epoch 4272, Loss 2.927646\n", "Epoch 4273, Loss 2.927645\n", "Epoch 4274, Loss 2.927645\n", "Epoch 4275, Loss 2.927646\n", "Epoch 4276, Loss 2.927646\n", "Epoch 4277, Loss 2.927645\n", "Epoch 4278, Loss 2.927645\n", "Epoch 4279, Loss 2.927645\n", "Epoch 4280, Loss 2.927646\n", "Epoch 4281, Loss 2.927645\n", "Epoch 4282, Loss 2.927647\n", "Epoch 4283, Loss 2.927645\n", "Epoch 4284, Loss 2.927645\n", "Epoch 4285, Loss 2.927645\n", "Epoch 4286, Loss 2.927645\n", "Epoch 4287, Loss 2.927647\n", "Epoch 4288, Loss 2.927645\n", "Epoch 4289, Loss 2.927646\n", "Epoch 4290, Loss 2.927645\n", "Epoch 4291, Loss 2.927647\n", "Epoch 4292, Loss 2.927647\n", "Epoch 4293, Loss 2.927646\n", "Epoch 4294, Loss 2.927645\n", "Epoch 4295, Loss 2.927645\n", "Epoch 4296, Loss 2.927645\n", "Epoch 4297, Loss 2.927645\n", "Epoch 4298, Loss 2.927645\n", "Epoch 4299, Loss 2.927645\n", "Epoch 4300, Loss 2.927645\n", "Epoch 4301, Loss 2.927645\n", "Epoch 4302, Loss 2.927646\n", "Epoch 4303, Loss 2.927646\n", "Epoch 4304, Loss 2.927646\n", "Epoch 4305, Loss 2.927646\n", "Epoch 4306, Loss 2.927646\n", "Epoch 4307, Loss 2.927646\n", "Epoch 4308, Loss 2.927646\n", "Epoch 4309, Loss 2.927646\n", "Epoch 4310, Loss 2.927646\n", "Epoch 4311, Loss 2.927646\n", "Epoch 4312, Loss 2.927646\n", "Epoch 4313, Loss 2.927646\n", "Epoch 4314, Loss 2.927646\n", "Epoch 4315, Loss 2.927646\n", "Epoch 4316, Loss 2.927646\n", "Epoch 4317, Loss 2.927646\n", "Epoch 4318, Loss 2.927646\n", "Epoch 4319, Loss 2.927646\n", "Epoch 4320, Loss 2.927646\n", "Epoch 4321, Loss 2.927646\n", "Epoch 4322, Loss 2.927646\n", "Epoch 4323, Loss 2.927646\n", "Epoch 4324, Loss 2.927646\n", "Epoch 4325, Loss 2.927646\n", "Epoch 4326, Loss 2.927646\n", "Epoch 4327, Loss 2.927646\n", "Epoch 4328, Loss 2.927646\n", "Epoch 4329, Loss 2.927646\n", "Epoch 4330, Loss 2.927646\n", "Epoch 4331, Loss 2.927646\n", "Epoch 4332, Loss 2.927646\n", "Epoch 4333, Loss 2.927646\n", "Epoch 4334, Loss 2.927646\n", "Epoch 4335, Loss 2.927646\n", "Epoch 4336, Loss 2.927646\n", "Epoch 4337, Loss 2.927646\n", "Epoch 4338, Loss 2.927646\n", "Epoch 4339, Loss 2.927646\n", "Epoch 4340, Loss 2.927646\n", "Epoch 4341, Loss 2.927646\n", "Epoch 4342, Loss 2.927646\n", "Epoch 4343, Loss 2.927646\n", "Epoch 4344, Loss 2.927646\n", "Epoch 4345, Loss 2.927646\n", "Epoch 4346, Loss 2.927646\n", "Epoch 4347, Loss 2.927646\n", "Epoch 4348, Loss 2.927646\n", "Epoch 4349, Loss 2.927646\n", "Epoch 4350, Loss 2.927646\n", "Epoch 4351, Loss 2.927646\n", "Epoch 4352, Loss 2.927646\n", "Epoch 4353, Loss 2.927646\n", "Epoch 4354, Loss 2.927646\n", "Epoch 4355, Loss 2.927646\n", "Epoch 4356, Loss 2.927646\n", "Epoch 4357, Loss 2.927646\n", "Epoch 4358, Loss 2.927646\n", "Epoch 4359, Loss 2.927646\n", "Epoch 4360, Loss 2.927646\n", "Epoch 4361, Loss 2.927646\n", "Epoch 4362, Loss 2.927646\n", "Epoch 4363, Loss 2.927646\n", "Epoch 4364, Loss 2.927646\n", "Epoch 4365, Loss 2.927646\n", "Epoch 4366, Loss 2.927646\n", "Epoch 4367, Loss 2.927646\n", "Epoch 4368, Loss 2.927646\n", "Epoch 4369, Loss 2.927646\n", "Epoch 4370, Loss 2.927646\n", "Epoch 4371, Loss 2.927646\n", "Epoch 4372, Loss 2.927646\n", "Epoch 4373, Loss 2.927646\n", "Epoch 4374, Loss 2.927646\n", "Epoch 4375, Loss 2.927646\n", "Epoch 4376, Loss 2.927646\n", "Epoch 4377, Loss 2.927646\n", "Epoch 4378, Loss 2.927646\n", "Epoch 4379, Loss 2.927646\n", "Epoch 4380, Loss 2.927646\n", "Epoch 4381, Loss 2.927646\n", "Epoch 4382, Loss 2.927646\n", "Epoch 4383, Loss 2.927646\n", "Epoch 4384, Loss 2.927646\n", "Epoch 4385, Loss 2.927646\n", "Epoch 4386, Loss 2.927646\n", "Epoch 4387, Loss 2.927646\n", "Epoch 4388, Loss 2.927646\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4389, Loss 2.927646\n", "Epoch 4390, Loss 2.927646\n", "Epoch 4391, Loss 2.927646\n", "Epoch 4392, Loss 2.927646\n", "Epoch 4393, Loss 2.927646\n", "Epoch 4394, Loss 2.927646\n", "Epoch 4395, Loss 2.927646\n", "Epoch 4396, Loss 2.927646\n", "Epoch 4397, Loss 2.927646\n", "Epoch 4398, Loss 2.927646\n", "Epoch 4399, Loss 2.927646\n", "Epoch 4400, Loss 2.927646\n", "Epoch 4401, Loss 2.927646\n", "Epoch 4402, Loss 2.927646\n", "Epoch 4403, Loss 2.927646\n", "Epoch 4404, Loss 2.927646\n", "Epoch 4405, Loss 2.927646\n", "Epoch 4406, Loss 2.927646\n", "Epoch 4407, Loss 2.927646\n", "Epoch 4408, Loss 2.927646\n", "Epoch 4409, Loss 2.927646\n", "Epoch 4410, Loss 2.927646\n", "Epoch 4411, Loss 2.927646\n", "Epoch 4412, Loss 2.927646\n", "Epoch 4413, Loss 2.927646\n", "Epoch 4414, Loss 2.927646\n", "Epoch 4415, Loss 2.927646\n", "Epoch 4416, Loss 2.927646\n", "Epoch 4417, Loss 2.927646\n", "Epoch 4418, Loss 2.927646\n", "Epoch 4419, Loss 2.927646\n", "Epoch 4420, Loss 2.927646\n", "Epoch 4421, Loss 2.927646\n", "Epoch 4422, Loss 2.927646\n", "Epoch 4423, Loss 2.927646\n", "Epoch 4424, Loss 2.927646\n", "Epoch 4425, Loss 2.927646\n", "Epoch 4426, Loss 2.927646\n", "Epoch 4427, Loss 2.927646\n", "Epoch 4428, Loss 2.927646\n", "Epoch 4429, Loss 2.927646\n", "Epoch 4430, Loss 2.927646\n", "Epoch 4431, Loss 2.927646\n", "Epoch 4432, Loss 2.927646\n", "Epoch 4433, Loss 2.927646\n", "Epoch 4434, Loss 2.927646\n", "Epoch 4435, Loss 2.927646\n", "Epoch 4436, Loss 2.927646\n", "Epoch 4437, Loss 2.927646\n", "Epoch 4438, Loss 2.927646\n", "Epoch 4439, Loss 2.927646\n", "Epoch 4440, Loss 2.927646\n", "Epoch 4441, Loss 2.927646\n", "Epoch 4442, Loss 2.927646\n", "Epoch 4443, Loss 2.927646\n", "Epoch 4444, Loss 2.927646\n", "Epoch 4445, Loss 2.927646\n", "Epoch 4446, Loss 2.927646\n", "Epoch 4447, Loss 2.927646\n", "Epoch 4448, Loss 2.927646\n", "Epoch 4449, Loss 2.927646\n", "Epoch 4450, Loss 2.927646\n", "Epoch 4451, Loss 2.927646\n", "Epoch 4452, Loss 2.927646\n", "Epoch 4453, Loss 2.927646\n", "Epoch 4454, Loss 2.927646\n", "Epoch 4455, Loss 2.927646\n", "Epoch 4456, Loss 2.927646\n", "Epoch 4457, Loss 2.927646\n", "Epoch 4458, Loss 2.927646\n", "Epoch 4459, Loss 2.927646\n", "Epoch 4460, Loss 2.927646\n", "Epoch 4461, Loss 2.927646\n", "Epoch 4462, Loss 2.927646\n", "Epoch 4463, Loss 2.927646\n", "Epoch 4464, Loss 2.927646\n", "Epoch 4465, Loss 2.927646\n", "Epoch 4466, Loss 2.927646\n", "Epoch 4467, Loss 2.927646\n", "Epoch 4468, Loss 2.927646\n", "Epoch 4469, Loss 2.927646\n", "Epoch 4470, Loss 2.927646\n", "Epoch 4471, Loss 2.927646\n", "Epoch 4472, Loss 2.927646\n", "Epoch 4473, Loss 2.927646\n", "Epoch 4474, Loss 2.927646\n", "Epoch 4475, Loss 2.927646\n", "Epoch 4476, Loss 2.927646\n", "Epoch 4477, Loss 2.927646\n", "Epoch 4478, Loss 2.927646\n", "Epoch 4479, Loss 2.927646\n", "Epoch 4480, Loss 2.927646\n", "Epoch 4481, Loss 2.927646\n", "Epoch 4482, Loss 2.927646\n", "Epoch 4483, Loss 2.927646\n", "Epoch 4484, Loss 2.927646\n", "Epoch 4485, Loss 2.927646\n", "Epoch 4486, Loss 2.927646\n", "Epoch 4487, Loss 2.927646\n", "Epoch 4488, Loss 2.927646\n", "Epoch 4489, Loss 2.927646\n", "Epoch 4490, Loss 2.927646\n", "Epoch 4491, Loss 2.927646\n", "Epoch 4492, Loss 2.927646\n", "Epoch 4493, Loss 2.927646\n", "Epoch 4494, Loss 2.927646\n", "Epoch 4495, Loss 2.927646\n", "Epoch 4496, Loss 2.927646\n", "Epoch 4497, Loss 2.927646\n", "Epoch 4498, Loss 2.927646\n", "Epoch 4499, Loss 2.927646\n", "Epoch 4500, Loss 2.927646\n", "Epoch 4501, Loss 2.927646\n", "Epoch 4502, Loss 2.927646\n", "Epoch 4503, Loss 2.927646\n", "Epoch 4504, Loss 2.927646\n", "Epoch 4505, Loss 2.927646\n", "Epoch 4506, Loss 2.927646\n", "Epoch 4507, Loss 2.927646\n", "Epoch 4508, Loss 2.927646\n", "Epoch 4509, Loss 2.927646\n", "Epoch 4510, Loss 2.927646\n", "Epoch 4511, Loss 2.927646\n", "Epoch 4512, Loss 2.927646\n", "Epoch 4513, Loss 2.927646\n", "Epoch 4514, Loss 2.927646\n", "Epoch 4515, Loss 2.927646\n", "Epoch 4516, Loss 2.927646\n", "Epoch 4517, Loss 2.927646\n", "Epoch 4518, Loss 2.927646\n", "Epoch 4519, Loss 2.927646\n", "Epoch 4520, Loss 2.927646\n", "Epoch 4521, Loss 2.927646\n", "Epoch 4522, Loss 2.927646\n", "Epoch 4523, Loss 2.927646\n", "Epoch 4524, Loss 2.927646\n", "Epoch 4525, Loss 2.927646\n", "Epoch 4526, Loss 2.927646\n", "Epoch 4527, Loss 2.927646\n", "Epoch 4528, Loss 2.927646\n", "Epoch 4529, Loss 2.927646\n", "Epoch 4530, Loss 2.927646\n", "Epoch 4531, Loss 2.927646\n", "Epoch 4532, Loss 2.927646\n", "Epoch 4533, Loss 2.927646\n", "Epoch 4534, Loss 2.927646\n", "Epoch 4535, Loss 2.927646\n", "Epoch 4536, Loss 2.927646\n", "Epoch 4537, Loss 2.927646\n", "Epoch 4538, Loss 2.927646\n", "Epoch 4539, Loss 2.927646\n", "Epoch 4540, Loss 2.927646\n", "Epoch 4541, Loss 2.927646\n", "Epoch 4542, Loss 2.927646\n", "Epoch 4543, Loss 2.927646\n", "Epoch 4544, Loss 2.927646\n", "Epoch 4545, Loss 2.927646\n", "Epoch 4546, Loss 2.927646\n", "Epoch 4547, Loss 2.927646\n", "Epoch 4548, Loss 2.927646\n", "Epoch 4549, Loss 2.927646\n", "Epoch 4550, Loss 2.927646\n", "Epoch 4551, Loss 2.927646\n", "Epoch 4552, Loss 2.927646\n", "Epoch 4553, Loss 2.927646\n", "Epoch 4554, Loss 2.927646\n", "Epoch 4555, Loss 2.927646\n", "Epoch 4556, Loss 2.927646\n", "Epoch 4557, Loss 2.927646\n", "Epoch 4558, Loss 2.927646\n", "Epoch 4559, Loss 2.927646\n", "Epoch 4560, Loss 2.927646\n", "Epoch 4561, Loss 2.927646\n", "Epoch 4562, Loss 2.927646\n", "Epoch 4563, Loss 2.927646\n", "Epoch 4564, Loss 2.927646\n", "Epoch 4565, Loss 2.927646\n", "Epoch 4566, Loss 2.927646\n", "Epoch 4567, Loss 2.927646\n", "Epoch 4568, Loss 2.927646\n", "Epoch 4569, Loss 2.927646\n", "Epoch 4570, Loss 2.927646\n", "Epoch 4571, Loss 2.927646\n", "Epoch 4572, Loss 2.927646\n", "Epoch 4573, Loss 2.927646\n", "Epoch 4574, Loss 2.927646\n", "Epoch 4575, Loss 2.927646\n", "Epoch 4576, Loss 2.927646\n", "Epoch 4577, Loss 2.927646\n", "Epoch 4578, Loss 2.927646\n", "Epoch 4579, Loss 2.927646\n", "Epoch 4580, Loss 2.927646\n", "Epoch 4581, Loss 2.927646\n", "Epoch 4582, Loss 2.927646\n", "Epoch 4583, Loss 2.927646\n", "Epoch 4584, Loss 2.927646\n", "Epoch 4585, Loss 2.927646\n", "Epoch 4586, Loss 2.927646\n", "Epoch 4587, Loss 2.927646\n", "Epoch 4588, Loss 2.927646\n", "Epoch 4589, Loss 2.927646\n", "Epoch 4590, Loss 2.927646\n", "Epoch 4591, Loss 2.927646\n", "Epoch 4592, Loss 2.927646\n", "Epoch 4593, Loss 2.927646\n", "Epoch 4594, Loss 2.927646\n", "Epoch 4595, Loss 2.927646\n", "Epoch 4596, Loss 2.927646\n", "Epoch 4597, Loss 2.927646\n", "Epoch 4598, Loss 2.927646\n", "Epoch 4599, Loss 2.927646\n", "Epoch 4600, Loss 2.927646\n", "Epoch 4601, Loss 2.927646\n", "Epoch 4602, Loss 2.927646\n", "Epoch 4603, Loss 2.927646\n", "Epoch 4604, Loss 2.927646\n", "Epoch 4605, Loss 2.927646\n", "Epoch 4606, Loss 2.927646\n", "Epoch 4607, Loss 2.927646\n", "Epoch 4608, Loss 2.927646\n", "Epoch 4609, Loss 2.927646\n", "Epoch 4610, Loss 2.927646\n", "Epoch 4611, Loss 2.927646\n", "Epoch 4612, Loss 2.927646\n", "Epoch 4613, Loss 2.927646\n", "Epoch 4614, Loss 2.927646\n", "Epoch 4615, Loss 2.927646\n", "Epoch 4616, Loss 2.927646\n", "Epoch 4617, Loss 2.927646\n", "Epoch 4618, Loss 2.927646\n", "Epoch 4619, Loss 2.927646\n", "Epoch 4620, Loss 2.927646\n", "Epoch 4621, Loss 2.927646\n", "Epoch 4622, Loss 2.927646\n", "Epoch 4623, Loss 2.927646\n", "Epoch 4624, Loss 2.927646\n", "Epoch 4625, Loss 2.927646\n", "Epoch 4626, Loss 2.927646\n", "Epoch 4627, Loss 2.927646\n", "Epoch 4628, Loss 2.927646\n", "Epoch 4629, Loss 2.927646\n", "Epoch 4630, Loss 2.927646\n", "Epoch 4631, Loss 2.927646\n", "Epoch 4632, Loss 2.927646\n", "Epoch 4633, Loss 2.927646\n", "Epoch 4634, Loss 2.927646\n", "Epoch 4635, Loss 2.927646\n", "Epoch 4636, Loss 2.927646\n", "Epoch 4637, Loss 2.927646\n", "Epoch 4638, Loss 2.927646\n", "Epoch 4639, Loss 2.927646\n", "Epoch 4640, Loss 2.927646\n", "Epoch 4641, Loss 2.927646\n", "Epoch 4642, Loss 2.927646\n", "Epoch 4643, Loss 2.927646\n", "Epoch 4644, Loss 2.927646\n", "Epoch 4645, Loss 2.927646\n", "Epoch 4646, Loss 2.927646\n", "Epoch 4647, Loss 2.927646\n", "Epoch 4648, Loss 2.927646\n", "Epoch 4649, Loss 2.927646\n", "Epoch 4650, Loss 2.927646\n", "Epoch 4651, Loss 2.927646\n", "Epoch 4652, Loss 2.927646\n", "Epoch 4653, Loss 2.927646\n", "Epoch 4654, Loss 2.927646\n", "Epoch 4655, Loss 2.927646\n", "Epoch 4656, Loss 2.927646\n", "Epoch 4657, Loss 2.927646\n", "Epoch 4658, Loss 2.927646\n", "Epoch 4659, Loss 2.927646\n", "Epoch 4660, Loss 2.927646\n", "Epoch 4661, Loss 2.927646\n", "Epoch 4662, Loss 2.927646\n", "Epoch 4663, Loss 2.927646\n", "Epoch 4664, Loss 2.927646\n", "Epoch 4665, Loss 2.927646\n", "Epoch 4666, Loss 2.927646\n", "Epoch 4667, Loss 2.927646\n", "Epoch 4668, Loss 2.927646\n", "Epoch 4669, Loss 2.927646\n", "Epoch 4670, Loss 2.927646\n", "Epoch 4671, Loss 2.927646\n", "Epoch 4672, Loss 2.927646\n", "Epoch 4673, Loss 2.927646\n", "Epoch 4674, Loss 2.927646\n", "Epoch 4675, Loss 2.927646\n", "Epoch 4676, Loss 2.927646\n", "Epoch 4677, Loss 2.927646\n", "Epoch 4678, Loss 2.927646\n", "Epoch 4679, Loss 2.927646\n", "Epoch 4680, Loss 2.927646\n", "Epoch 4681, Loss 2.927646\n", "Epoch 4682, Loss 2.927646\n", "Epoch 4683, Loss 2.927646\n", "Epoch 4684, Loss 2.927646\n", "Epoch 4685, Loss 2.927646\n", "Epoch 4686, Loss 2.927646\n", "Epoch 4687, Loss 2.927645\n", "Epoch 4688, Loss 2.927646\n", "Epoch 4689, Loss 2.927646\n", "Epoch 4690, Loss 2.927645\n", "Epoch 4691, Loss 2.927645\n", "Epoch 4692, Loss 2.927646\n", "Epoch 4693, Loss 2.927646\n", "Epoch 4694, Loss 2.927645\n", "Epoch 4695, Loss 2.927646\n", "Epoch 4696, Loss 2.927646\n", "Epoch 4697, Loss 2.927645\n", "Epoch 4698, Loss 2.927645\n", "Epoch 4699, Loss 2.927646\n", "Epoch 4700, Loss 2.927646\n", "Epoch 4701, Loss 2.927645\n", "Epoch 4702, Loss 2.927646\n", "Epoch 4703, Loss 2.927646\n", "Epoch 4704, Loss 2.927645\n", "Epoch 4705, Loss 2.927645\n", "Epoch 4706, Loss 2.927646\n", "Epoch 4707, Loss 2.927646\n", "Epoch 4708, Loss 2.927645\n", "Epoch 4709, Loss 2.927646\n", "Epoch 4710, Loss 2.927646\n", "Epoch 4711, Loss 2.927645\n", "Epoch 4712, Loss 2.927645\n", "Epoch 4713, Loss 2.927646\n", "Epoch 4714, Loss 2.927645\n", "Epoch 4715, Loss 2.927645\n", "Epoch 4716, Loss 2.927646\n", "Epoch 4717, Loss 2.927646\n", "Epoch 4718, Loss 2.927645\n", "Epoch 4719, Loss 2.927646\n", "Epoch 4720, Loss 2.927646\n", "Epoch 4721, Loss 2.927645\n", "Epoch 4722, Loss 2.927645\n", "Epoch 4723, Loss 2.927646\n", "Epoch 4724, Loss 2.927646\n", "Epoch 4725, Loss 2.927645\n", "Epoch 4726, Loss 2.927646\n", "Epoch 4727, Loss 2.927646\n", "Epoch 4728, Loss 2.927645\n", "Epoch 4729, Loss 2.927645\n", "Epoch 4730, Loss 2.927646\n", "Epoch 4731, Loss 2.927646\n", "Epoch 4732, Loss 2.927645\n", "Epoch 4733, Loss 2.927646\n", "Epoch 4734, Loss 2.927646\n", "Epoch 4735, Loss 2.927645\n", "Epoch 4736, Loss 2.927645\n", "Epoch 4737, Loss 2.927646\n", "Epoch 4738, Loss 2.927645\n", "Epoch 4739, Loss 2.927645\n", "Epoch 4740, Loss 2.927646\n", "Epoch 4741, Loss 2.927646\n", "Epoch 4742, Loss 2.927645\n", "Epoch 4743, Loss 2.927646\n", "Epoch 4744, Loss 2.927646\n", "Epoch 4745, Loss 2.927645\n", "Epoch 4746, Loss 2.927645\n", "Epoch 4747, Loss 2.927646\n", "Epoch 4748, Loss 2.927646\n", "Epoch 4749, Loss 2.927645\n", "Epoch 4750, Loss 2.927646\n", "Epoch 4751, Loss 2.927646\n", "Epoch 4752, Loss 2.927645\n", "Epoch 4753, Loss 2.927645\n", "Epoch 4754, Loss 2.927646\n", "Epoch 4755, Loss 2.927646\n", "Epoch 4756, Loss 2.927645\n", "Epoch 4757, Loss 2.927646\n", "Epoch 4758, Loss 2.927646\n", "Epoch 4759, Loss 2.927645\n", "Epoch 4760, Loss 2.927645\n", "Epoch 4761, Loss 2.927646\n", "Epoch 4762, Loss 2.927645\n", "Epoch 4763, Loss 2.927645\n", "Epoch 4764, Loss 2.927646\n", "Epoch 4765, Loss 2.927646\n", "Epoch 4766, Loss 2.927645\n", "Epoch 4767, Loss 2.927646\n", "Epoch 4768, Loss 2.927646\n", "Epoch 4769, Loss 2.927645\n", "Epoch 4770, Loss 2.927645\n", "Epoch 4771, Loss 2.927646\n", "Epoch 4772, Loss 2.927646\n", "Epoch 4773, Loss 2.927645\n", "Epoch 4774, Loss 2.927646\n", "Epoch 4775, Loss 2.927646\n", "Epoch 4776, Loss 2.927645\n", "Epoch 4777, Loss 2.927645\n", "Epoch 4778, Loss 2.927646\n", "Epoch 4779, Loss 2.927646\n", "Epoch 4780, Loss 2.927645\n", "Epoch 4781, Loss 2.927646\n", "Epoch 4782, Loss 2.927646\n", "Epoch 4783, Loss 2.927645\n", "Epoch 4784, Loss 2.927645\n", "Epoch 4785, Loss 2.927646\n", "Epoch 4786, Loss 2.927645\n", "Epoch 4787, Loss 2.927645\n", "Epoch 4788, Loss 2.927646\n", "Epoch 4789, Loss 2.927646\n", "Epoch 4790, Loss 2.927645\n", "Epoch 4791, Loss 2.927646\n", "Epoch 4792, Loss 2.927646\n", "Epoch 4793, Loss 2.927645\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4794, Loss 2.927645\n", "Epoch 4795, Loss 2.927646\n", "Epoch 4796, Loss 2.927646\n", "Epoch 4797, Loss 2.927645\n", "Epoch 4798, Loss 2.927646\n", "Epoch 4799, Loss 2.927646\n", "Epoch 4800, Loss 2.927645\n", "Epoch 4801, Loss 2.927645\n", "Epoch 4802, Loss 2.927646\n", "Epoch 4803, Loss 2.927645\n", "Epoch 4804, Loss 2.927645\n", "Epoch 4805, Loss 2.927646\n", "Epoch 4806, Loss 2.927646\n", "Epoch 4807, Loss 2.927645\n", "Epoch 4808, Loss 2.927645\n", "Epoch 4809, Loss 2.927645\n", "Epoch 4810, Loss 2.927645\n", "Epoch 4811, Loss 2.927646\n", "Epoch 4812, Loss 2.927645\n", "Epoch 4813, Loss 2.927646\n", "Epoch 4814, Loss 2.927646\n", "Epoch 4815, Loss 2.927646\n", "Epoch 4816, Loss 2.927645\n", "Epoch 4817, Loss 2.927646\n", "Epoch 4818, Loss 2.927646\n", "Epoch 4819, Loss 2.927646\n", "Epoch 4820, Loss 2.927645\n", "Epoch 4821, Loss 2.927646\n", "Epoch 4822, Loss 2.927645\n", "Epoch 4823, Loss 2.927645\n", "Epoch 4824, Loss 2.927645\n", "Epoch 4825, Loss 2.927645\n", "Epoch 4826, Loss 2.927646\n", "Epoch 4827, Loss 2.927645\n", "Epoch 4828, Loss 2.927646\n", "Epoch 4829, Loss 2.927646\n", "Epoch 4830, Loss 2.927646\n", "Epoch 4831, Loss 2.927645\n", "Epoch 4832, Loss 2.927646\n", "Epoch 4833, Loss 2.927645\n", "Epoch 4834, Loss 2.927645\n", "Epoch 4835, Loss 2.927645\n", "Epoch 4836, Loss 2.927645\n", "Epoch 4837, Loss 2.927646\n", "Epoch 4838, Loss 2.927645\n", "Epoch 4839, Loss 2.927646\n", "Epoch 4840, Loss 2.927646\n", "Epoch 4841, Loss 2.927646\n", "Epoch 4842, Loss 2.927645\n", "Epoch 4843, Loss 2.927646\n", "Epoch 4844, Loss 2.927646\n", "Epoch 4845, Loss 2.927646\n", "Epoch 4846, Loss 2.927645\n", "Epoch 4847, Loss 2.927646\n", "Epoch 4848, Loss 2.927645\n", "Epoch 4849, Loss 2.927645\n", "Epoch 4850, Loss 2.927645\n", "Epoch 4851, Loss 2.927645\n", "Epoch 4852, Loss 2.927646\n", "Epoch 4853, Loss 2.927645\n", "Epoch 4854, Loss 2.927646\n", "Epoch 4855, Loss 2.927646\n", "Epoch 4856, Loss 2.927646\n", "Epoch 4857, Loss 2.927645\n", "Epoch 4858, Loss 2.927646\n", "Epoch 4859, Loss 2.927645\n", "Epoch 4860, Loss 2.927645\n", "Epoch 4861, Loss 2.927645\n", "Epoch 4862, Loss 2.927645\n", "Epoch 4863, Loss 2.927646\n", "Epoch 4864, Loss 2.927645\n", "Epoch 4865, Loss 2.927646\n", "Epoch 4866, Loss 2.927646\n", "Epoch 4867, Loss 2.927646\n", "Epoch 4868, Loss 2.927645\n", "Epoch 4869, Loss 2.927646\n", "Epoch 4870, Loss 2.927645\n", "Epoch 4871, Loss 2.927645\n", "Epoch 4872, Loss 2.927645\n", "Epoch 4873, Loss 2.927645\n", "Epoch 4874, Loss 2.927646\n", "Epoch 4875, Loss 2.927645\n", "Epoch 4876, Loss 2.927646\n", "Epoch 4877, Loss 2.927646\n", "Epoch 4878, Loss 2.927646\n", "Epoch 4879, Loss 2.927645\n", "Epoch 4880, Loss 2.927646\n", "Epoch 4881, Loss 2.927646\n", "Epoch 4882, Loss 2.927646\n", "Epoch 4883, Loss 2.927645\n", "Epoch 4884, Loss 2.927646\n", "Epoch 4885, Loss 2.927645\n", "Epoch 4886, Loss 2.927645\n", "Epoch 4887, Loss 2.927645\n", "Epoch 4888, Loss 2.927645\n", "Epoch 4889, Loss 2.927646\n", "Epoch 4890, Loss 2.927645\n", "Epoch 4891, Loss 2.927646\n", "Epoch 4892, Loss 2.927646\n", "Epoch 4893, Loss 2.927646\n", "Epoch 4894, Loss 2.927645\n", "Epoch 4895, Loss 2.927646\n", "Epoch 4896, Loss 2.927645\n", "Epoch 4897, Loss 2.927645\n", "Epoch 4898, Loss 2.927645\n", "Epoch 4899, Loss 2.927645\n", "Epoch 4900, Loss 2.927646\n", "Epoch 4901, Loss 2.927645\n", "Epoch 4902, Loss 2.927646\n", "Epoch 4903, Loss 2.927646\n", "Epoch 4904, Loss 2.927646\n", "Epoch 4905, Loss 2.927645\n", "Epoch 4906, Loss 2.927646\n", "Epoch 4907, Loss 2.927645\n", "Epoch 4908, Loss 2.927645\n", "Epoch 4909, Loss 2.927645\n", "Epoch 4910, Loss 2.927645\n", "Epoch 4911, Loss 2.927646\n", "Epoch 4912, Loss 2.927645\n", "Epoch 4913, Loss 2.927646\n", "Epoch 4914, Loss 2.927646\n", "Epoch 4915, Loss 2.927646\n", "Epoch 4916, Loss 2.927645\n", "Epoch 4917, Loss 2.927646\n", "Epoch 4918, Loss 2.927645\n", "Epoch 4919, Loss 2.927645\n", "Epoch 4920, Loss 2.927645\n", "Epoch 4921, Loss 2.927645\n", "Epoch 4922, Loss 2.927646\n", "Epoch 4923, Loss 2.927645\n", "Epoch 4924, Loss 2.927646\n", "Epoch 4925, Loss 2.927646\n", "Epoch 4926, Loss 2.927646\n", "Epoch 4927, Loss 2.927645\n", "Epoch 4928, Loss 2.927646\n", "Epoch 4929, Loss 2.927645\n", "Epoch 4930, Loss 2.927645\n", "Epoch 4931, Loss 2.927645\n", "Epoch 4932, Loss 2.927645\n", "Epoch 4933, Loss 2.927646\n", "Epoch 4934, Loss 2.927645\n", "Epoch 4935, Loss 2.927646\n", "Epoch 4936, Loss 2.927646\n", "Epoch 4937, Loss 2.927646\n", "Epoch 4938, Loss 2.927645\n", "Epoch 4939, Loss 2.927646\n", "Epoch 4940, Loss 2.927645\n", "Epoch 4941, Loss 2.927645\n", "Epoch 4942, Loss 2.927645\n", "Epoch 4943, Loss 2.927645\n", "Epoch 4944, Loss 2.927646\n", "Epoch 4945, Loss 2.927645\n", "Epoch 4946, Loss 2.927646\n", "Epoch 4947, Loss 2.927646\n", "Epoch 4948, Loss 2.927646\n", "Epoch 4949, Loss 2.927645\n", "Epoch 4950, Loss 2.927646\n", "Epoch 4951, Loss 2.927645\n", "Epoch 4952, Loss 2.927645\n", "Epoch 4953, Loss 2.927645\n", "Epoch 4954, Loss 2.927645\n", "Epoch 4955, Loss 2.927646\n", "Epoch 4956, Loss 2.927645\n", "Epoch 4957, Loss 2.927646\n", "Epoch 4958, Loss 2.927646\n", "Epoch 4959, Loss 2.927646\n", "Epoch 4960, Loss 2.927645\n", "Epoch 4961, Loss 2.927646\n", "Epoch 4962, Loss 2.927645\n", "Epoch 4963, Loss 2.927645\n", "Epoch 4964, Loss 2.927645\n", "Epoch 4965, Loss 2.927645\n", "Epoch 4966, Loss 2.927646\n", "Epoch 4967, Loss 2.927645\n", "Epoch 4968, Loss 2.927646\n", "Epoch 4969, Loss 2.927646\n", "Epoch 4970, Loss 2.927646\n", "Epoch 4971, Loss 2.927645\n", "Epoch 4972, Loss 2.927646\n", "Epoch 4973, Loss 2.927645\n", "Epoch 4974, Loss 2.927645\n", "Epoch 4975, Loss 2.927645\n", "Epoch 4976, Loss 2.927645\n", "Epoch 4977, Loss 2.927646\n", "Epoch 4978, Loss 2.927645\n", "Epoch 4979, Loss 2.927646\n", "Epoch 4980, Loss 2.927646\n", "Epoch 4981, Loss 2.927646\n", "Epoch 4982, Loss 2.927645\n", "Epoch 4983, Loss 2.927646\n", "Epoch 4984, Loss 2.927645\n", "Epoch 4985, Loss 2.927645\n", "Epoch 4986, Loss 2.927645\n", "Epoch 4987, Loss 2.927645\n", "Epoch 4988, Loss 2.927646\n", "Epoch 4989, Loss 2.927645\n", "Epoch 4990, Loss 2.927646\n", "Epoch 4991, Loss 2.927646\n", "Epoch 4992, Loss 2.927646\n", "Epoch 4993, Loss 2.927645\n", "Epoch 4994, Loss 2.927646\n", "Epoch 4995, Loss 2.927645\n", "Epoch 4996, Loss 2.927645\n", "Epoch 4997, Loss 2.927645\n", "Epoch 4998, Loss 2.927645\n", "Epoch 4999, Loss 2.927646\n" ] }, { "data": { "text/plain": [ "tensor([ 0.5368, -17.3048], requires_grad=True)" ] }, "execution_count": 29, "metadata": {}, "output_type": "execute_result" } ], "source": [ "def model(t_u, w, b):\n", " return w * t_u + b\n", "\n", "def loss_fn(t_p, t_c):\n", " sq_diffs = (t_p - t_c)**2\n", " return sq_diffs.mean()\n", "\n", "params = torch.tensor([1.0, 0.0], requires_grad=True)\n", "\n", "nepochs = 5000\n", "learning_rate = 1e-1\n", "\n", "optimizer = optim.Adam([params], lr=learning_rate)\n", "\n", "for epoch in range(nepochs):\n", " # forward pass\n", " t_p = model(t_u, *params)\n", " loss = loss_fn(t_p, t_c)\n", "\n", " print('Epoch %d, Loss %f' % (epoch, float(loss)))\n", " \n", " # backward pass\n", " optimizer.zero_grad()\n", " loss.backward()\n", " optimizer.step()\n", "\n", "t_p = model(t_u, *params)\n", "\n", "params" ] }, { "cell_type": "code", "execution_count": 102, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[]" ] }, "execution_count": 102, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXYAAAD8CAYAAABjAo9vAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAHyFJREFUeJzt3Xl8FdX9//HXh30HEUQU0oiCogiCEUUUwbCJUqutVGvVqhVbpW7VqqCIiMjXDevWNu62bqnKT1FUFkEEKhIQA7IoIrKILIphl5Cc3x/3MhAIJPfeuZl7J+/n4+Ej+ZzMnfnc0rwZzpk7Y845REQkPKoE3YCIiPhLwS4iEjIKdhGRkFGwi4iEjIJdRCRkFOwiIiGjYBcRCRkFu4hIyCjYRURCploQB23SpInLzMwM4tAiImlr9uzZ651zTcvaLpBgz8zMJC8vL4hDi4ikLTP7tjzbaSpGRCRkFOwiIiGjYBcRCRkFu4hIyCjYRURCRsEuIlIR8nNhdDsY1ijyNT83aYcK5HJHEZFKJT8Xxl4HhdsidcGKSA3QfoDvh9MZu4hIsk0avjvUdyncFhlPAgW7iEiyFayMbTxBCnYRkWRr2CK28QQp2EVEki17KFSvXXKseu3IeBIo2EVEkq39AOj/KDRsCVjka/9Hk7JwCroqRkSkYrQfkLQg35vO2EVEQkbBLiISMgp2EZEKMOPr9dz6ej7bC4uSfizNsYuIJNG2HUV0GTWJn7YWAnBDr9Y0b1i7jFclRsEuIpIkT3+8lBHvLvTqN685NemhDgp2ERHfrfhxK6ffP9mrB2S14P7fdKiw4yvYRUR84pzjqhdnM3HhGm/s0yHZHFK/VoX2oWAXEfHBx1+t45JnPvXq//v18fz2pIxAelGwi4gkYOuOnWSNmMjWHZGrXVo1rcv713ejRrXgLjosd7CbWUvgReBQoBjIcc793cyGAVcB66KbDnbOjfO7URGRVPPklCXc//5ir37r2q50aNkowI4iYjlj3wn81Tk3x8zqA7PNbEL0Z6Odcw/6356ISOpZtn4L3R+c4tUXn5zBvecdH1xDeyl3sDvnVgOro99vMrOFwOHJakxEJNU457jsuVlM/XKdN5Z3R0+a1KsZYFf7imuO3cwygY7ATKArMMjMLgXyiJzVb/CrQRGRVDB58Vouf26WVz88oAPnd0rO/dQTFXOwm1k94A3gBufcRjP7B3AP4KJfHwKuKOV1A4GBABkZwawUi4jEavPPO+k4fDyFRQ6AYw6tz9i/nEb1qql7R5aYgt3MqhMJ9Zecc28COOfW7PHzp4B3Snutcy4HyAHIyspy8TYsIlJR/j7xK0ZP/NKr3/nLabQ7vGGAHZVPLFfFGPAMsNA59/Ae482j8+8A5wHz/W1RRKRifb1uM9kPfeTVl3fN5K7+xwXYUWxiOWPvClwCzDOzudGxwcBFZnYCkamYZcDVvnYoIlJBiosdFz89k/8t/cEbm3NnLxrXrRFgV7GL5aqYaYCV8iNdsy4iaW/CgjVc9WKeVz96UUd+2eGwADuKnz55KiKV2sbthbQfNt6r27doyJt/PpVqKbw4WhYFu4hUWg+NX8xjHy7x6veuP522zRsE2JE/FOwiUul8tWYTvUZP9eqru7Xi9n5tA+zIXwp2Eak0ioodA/71P2Z/u/szlHOH9qJRnfRaHC2Lgl1EKoX35q3mzy/N8eonL+5Ev+ObB9hR8ijYRSTUCrYW0mH47sXRE39xELlXd6FqldIu8gsHBbuIhNZ97y3kXx8t9erxN3ajTbP6AXZUMRTsIhI6i77fSN9HPvbqQT2O4uY+RwfYUcVSsItIaBQVO857cjr5Kwu8sc/v6k3D2tUD7KriKdhFJBTGfv4df3nlM6/OueREeh93aIAdBUfBLiJpbcOWHXS8Z4JXn9KqMS//8RSqhHhxtCwKdhFJW8PHLuDZ6d949cSbzuCoQ+oF2FFqULCLSNqZv6qAcx6b5tU39GzNDT3bBNhRalGwi0ja2FlUzDmPTWPR95sAqFbF+GxoL+rXqlyLo2VRsItIWhjz2UpufO1zr372D1mceUyzADtKXQp2EfFffi5MGg4FK6FhC8geCu0HxLWrHzb/zIkjJnp1tzZNeeHyk4g81E1Ko2AXEX/l58LY66BwW6QuWBGpIeZwv/P/zeffn3zr1VNu7k5mk7p+dRpaCnYR8dek4btDfZfCbZHxcgZ7/sqf+OXj0736lj5Hc22Po/zsMtQU7CLir4KVsY3vobComL6PTOXrdVsAqFOjKrOG9KRuTUVVLPS/loj4q2GLyPRLaeMHkJu3gr+9nu/VL17RmW5tmvrdXaWgYBcRf2UPLTnHDlC9dmS8FGs3bafzvZO8umfbQ3jq0iwtjiag3MFuZi2BF4FDgWIgxzn3dzNrDLwGZALLgAHOuQ3724+IhNyuefRyXBVz2xv5vDpr99n9x3/rQcvGdSqq09Ay51z5NjRrDjR3zs0xs/rAbOBXwB+AH51zo8zsNuAg59ytB9pXVlaWy8vLS6xzEUlbc5Zv4PwnZ3j14H7HMLDbkQF2lB7MbLZzLqus7cp9xu6cWw2sjn6/ycwWAocD5wLdo5u9AEwBDhjsIlI57dhZTPbDU1jxY2SapmHt6nxyeza1a1QNuLNwiWuO3cwygY7ATKBZNPRxzq02s0P285qBwECAjIyMeA4rImns5ZnLGTxm3u76jydz6lFNAuwovGIOdjOrB7wB3OCc21jeBQ7nXA6QA5GpmFiPKyLpac3G7Zw8cvfi6FntDuXJiztpcTSJYgp2M6tOJNRfcs69GR1eY2bNo2frzYG1fjcpIunHOccVz89i8uJ13ti0W3vQ4iAtjiZbLFfFGPAMsNA59/AeP3obuAwYFf36lq8dikjaeWvuKq5/da5X39X/WC7vekSAHVUusZyxdwUuAeaZ2a4/scFEAj3XzK4ElgMX+NuiiKSLrTt2cuzQD0qMzb+7D/X0ydEKFctVMdOA/U2KZfvTjoikq5ty5/LmnFVe/fCADpzf6cCfNpXk0F+jIpKQL9dsovfoqV5ds1oVFt3TV4ujAVKwi0hcnHMcOXgcxXtc4zbhxm60blY/uKYEULCLSBz+m7eCW/a4YdeArBbc/5sOAXYke1Kwi0i5bf55J+3uKrk4umB4H+rUUJSkEv1piEi5XPvyHN7NX+3Vj13Ukf4dDguwI9kfBbuIHNCC7zbS79GPvbpBrWrkD+sTYEdSFgW7iJTKOccRt48rMfbhX8+gVdN6AXUk5aVgF5F97H3Drt+fksGIXx0fYEcSCwW7iHgKthXS4e7xJcYW3dOXWtV1W910omAXEQCuejGPCQvWePU/f9+Jvu2aB9iRxEvBLlLJ5a/8iV8+Pt2rD6lfk0+H9AywI0mUgl2kkiptcXTqLT3IOFi31U13CnaRSuiFGcu46+0vvPqKrkcwtP+xAXYkflKwi1QiP23dwQnDJ5QYWzyiLzWraXE0TBTsIpXEpc9+ytQvdz/N6JnLsshu2yzAjiRZFOwiITdn+QbOf3KGV2c0rsPUv/UIsCNJNgW7SEgVFztaDS65ODr9tjM5vFHtgDqSiqJgFwmhpz9eyoh3F3r1n7sfya19jwmwI6lICnaREPlh88+cOGJiibEvR5xFjWpVAupIgqBgFwmJ3/7rf8z85kevfuGKzpzRpmmAHUlQyh3sZvYscA6w1jnXLjo2DLgK2LXUPtg5N670PYhIMsxa9iMX/PN/Xt2mWT3G33hGgB1J0GI5Y38eeBx4ca/x0c65B33rSKSyys+FScOhYCU0bAHZQ6H9gP1uXlQceebonj65PZtDG9ZKdqeS4sod7M65qWaWmbxWRCqx/FwYex0UbovUBSsiNZQa7k9MXsIDHyz26uuyW3NTrzYV0amkAT/m2AeZ2aVAHvBX59wGH/YpUrlMGr471Hcp3BYZ3yPY127aTud7J5XYbMm9Z1GtqhZHZbdEg/0fwD2Ai359CLiitA3NbCAwECAjIyPBw4qETMHKMsfPfXwan68s8OqX/3gypx7VJNmdSRpKKNidc97Nm83sKeCdA2ybA+QAZGVluUSOKxI6DVtEpl9KGZ/x9Xp+99RMb6hDi4a8Nei0CmxO0k1C/34zsz3vwn8eMD+xdkQqqeyhUL3kJ0Jd9dpct65/iVD/dEi2Ql3KFMvljq8A3YEmZrYSuAvobmYnEJmKWQZcnYQeRdJTLFe57BqPbr+xZjPu2HQ+bxdHQvyWPkdzbY+jKqhxSXfmXMXPimRlZbm8vLwKP65Ihdn7KheInJH3f/SAlzB+X7CdU+4ruTj69ch+VK1iyepU0oiZzXbOZZW1nT55KpIM5bzKZU99H5nKou83eXXu1V3ofETjZHYpIaVgF0mGclzlssvUL9dx6bOfenXnzMbk/qlLsjqTSkDBLpIMB7jKZZfComJaD3mvxI9n39GTg+vVTHZ3EnL6VINIMpRylQvVa0fGgfvfX1Qi1If0a8uyUWeXP9Tzc2F0OxjWKPI1P9evziUEdMYukgx7XeWy66qYVRn96XrbuyU2XTqyH1ViWRyN8fYDUvnoqhiRCtL9gcks+2GrV795zal0yjgo9h2NbrefaZ6WcKM+ShJmuipGJEV8uGgNVzy/+0Tm9NZN+PeVJ8e/wxgWZqVyUrCLJMnPO4s4+o73S4x9dmcvDqpbI7Edl2NhVio3LZ6KJMGIdxaUCPVh/Y9l2aizEw91KHNhVkRn7CI+Wv7DVro9MLnEWMyLo2XZz8KsFk5lFwW7iE9OGTmJ7zdu9+q3B3WlfYtGyTlY+wEKctkvBbtIgt6f/z1/+s9sr+7ZthlPX1bmhQsiSaNgF4nT9sIijrmz5OLo53f1pmHt6gF1JBKhYBeJw9C35vPi/7716pHnHc/vTtaTwSQ1KNhFYvDN+i30eHBKybH7+mGm2+pK6lCwi5TTCcPH89PWQq9+97rTOO6whmW/MJYHboj4QMEuUoZ38r9j0MufefXZxzfniYs7le/Fuq+LBEDBLrIf23YU0XZoycXRecN6U79WDIujcTxwQyRRCnaRUtz6ej6v5e3+2P4Dv2nPBVktY9+R7usiAVCwi+xhydpN9Hx4qldXrWIsufes+BdHdV8XCUC5g93MngXOAdY659pFxxoDrwGZwDJggHNug/9tiiSXc462Q99ne2GxN/bBDd04+tD6ie04e2jpD7XWfV0kiWK5CdjzQN+9xm4DJjnnWgOTorVIWhnz2UqOuH2cF+rndzycZaPOTjzUITKP3v/RyL3SscjX/o9qfl2Sqtxn7M65qWaWudfwuUD36PcvAFOAW33oSyTptvy8k+Pu+qDE2Bd396FuTZ9nKHVfF6lgif4/uJlzbjWAc261mR3iQ08iSXfja3MZ89kqr37ktyfwq46HB9iRiH8qbPHUzAYCAwEyMvTRawnGou830veRj726bo2qzL+7jz45KqGSaLCvMbPm0bP15sDa/W3onMsBciDyzNMEjysSE+ccR9w+rsTYxJvO4KhD6gXUkUjyJPoEpbeBy6LfXwa8leD+RHyXm7eiRKhfeFJLlo06W6EuoRXL5Y6vEFkobWJmK4G7gFFArpldCSwHLkhGkyLx2LS9kOOHjS8xtnB4X2rXqBpQRyIVI5arYi7az4+yfepFxDfXvDSbcfO+9+onfteJs9s3D7AjkYqjT55KqMxfVcA5j03z6sZ1azDnzl4BdiRS8RTsEgqlLY5Oubk7mU3qBtSRSHAU7JL2/vPJt9zx/+Z79WVdfsHd57YLsCORYCnYJW0VbC2kw/CSi6OL7ulLrepaHJXKTcEuaenK52cxadHuj03kXHIivYumwuMd9KQiqfQU7JJWPl/xE+c+Md2rD2tYixm3Z+tJRSJ7ULBLWigudrQaXHJx9OO/9aBl4zqRQk8qEvEo2CXlPTf9G+4eu8Crrzr9CIacfWzJjfSkIhGPgl1S1oYtO+h4z4QSY4tH9KVmtVIWR/WkIhGPgl1S0u+fnsm0Jeu9+tk/ZHHmMc32/wI9qUjEo2CXlDL72w38+h8zvLpVk7p8eHP3sl+4ax590nBdFSOVnoJdUkJRsePIvRZHZ9x2Joc1ql3+nehJRSKAgl1SQM7Urxk5bpFXX9vjSG7pc0yAHYmkNwW7BGb95p/JGjGxxNhX955F9aqJPiZApHJTsEsgLvjnDGYt2+DV/76yM6e3bhpgRyLhoWCXCjVz6Q/8NucTr27bvAHvXX96gB2JhI+CXSpEaYujMwdn06xBrcR3np+rq2FE9qBgl6R7/MOveHD8l159Y882XN+ztT871z1iRPahYJekWbtxO51HTioxtuTes6jm5+Ko7hEjsg8FuyRF/8emMW9VgVe/ctUpdDnyYP8PpHvEiOxDwS6+mr5kPRc/PdOrO2Y0Ysw1XZN3QN0jRmQfvgS7mS0DNgFFwE7nXJYf+5X0UVhUTOsh75UYmzWkJ03r10zugXWPGJF9+HnG3sM5t77szSRsHh6/mEc/XOLVt/Y9hj93P7JiDq57xIjsQ1MxErfVBdvoct+HJca+HtmPqlWsYhvRPWJESvAr2B0w3swc8C/nXI5P+5UU1fPhj1iydrNX//dPXTgps3GAHYnILn4Fe1fn3HdmdggwwcwWOeem7rmBmQ0EBgJkZGT4dFipaJMXr+Xy52Z59SmtGvPqwC4BdiQie/Ml2J1z30W/rjWzMUBnYOpe2+QAOQBZWVnOj+NKxdmxs5g2d5RcHJ19R08OrpfkxVERiVnCwW5mdYEqzrlN0e97A8MT7kxSxn3vLeRfHy316jvObssfT28VYEciciB+nLE3A8aY2a79veyce9+H/UrAVm7Yymn/N7nE2NKR/ahS0YujIhKThIPdObcU6OBDL5JCTr//Q1b8uPva8DHXnErHjIMC7EhEykuXO0oJExas4aoX87z6jDZNeeGKzgF2JCKxUrALAD/vLOLoO0rOoM0d2otGdWoE1JGIxEvBLtw99guem77Mq+859zgu6ZIZWD8ikhgFeypL8gMkvv1hC2c8MKXE2Df39SO6EC4iaUrBnqqS/ACJk+6dyLpNP3v12EGncXyLhgnvV0SCp8fBp6oDPUAiAe/NW03mbe96od772GYsG3W2Ql0kRHTGnqp8foDE9sIijrmz5OJo/rDeNKhVPa79iUjqUrCnKh8fIDF4zDxenrncq0edfzwXdtb9ekTCSsGeqnx4gMTX6zaT/dBHJca0OCoSfgr2VJXgAySOH/YBm7bv9Or3rj+dts0bJKNTEUkxCvZUFscDJN6au4rrX53r1f07HMZjF3X0uzMRSWEK9pDYumMnxw79oMTY/Lv7UK+m/ohFKhv91ofAsLe/4PkZy7z6oQs68OsTY19kFZFwULCnse9+2sapo3Y/c7RG1SosHtFXi6MilZyCPQ0557ju1bmM/fw7b2z6bWdyeKPaAXYlIqlCwZ5mPln6AxfmfOLVw889jkt1wy4R2YOCPU1sLyyi66gP+WHLDgCaN6zF5Ju7U6t61YA7E5FUo2BPA89O+4bh7yzw6v/+qQsnZTYOsCMRSWUK9hS29zNHf92pBQ8NKOUphEm+va+IpBcFewpyzvGn/8zmgy/WeGMzB2fTrEGtfTdO8u19RST9+HLbXjPra2aLzWyJmd3mxz4rq+lL1nPE7eO8UB953vEsG3V26aEOSbu9r4ikr4TP2M2sKvAE0AtYCcwys7edcwsO/ErZ07YdRXQeOdG7v0tG4zpMvOkMalQr4+9en2/vKyLpz4+pmM7AEufcUgAzexU4F1Cwl1PO1K8ZOW6RV795zal0yjiofC/28fa+IhIOfgT74cCeybISONmH/Ybe8h+20u2B3YujF57UklG/bh/bTny4va+IhIsfwV7a59fdPhuZDQQGAmRkVO6HPDjnuPKFPD5ctNYb+3RINofU3888+oEkeHtfEQkfP4J9JdByj7oF8N3eGznncoAcgKysrH2Cv7L46Mt1XPbsp159/2/aMyCr5QFeUQ5x3N5XRMLLj2CfBbQ2syOAVcCFwO982G+obPl5JyeOmMD2wmIAjmxal/dv6Eb1qnqeuIj4K+Fgd87tNLNBwAdAVeBZ59wXCXcWIk9MXsIDHyz26rcHdaV9i0YBdiQiYebLB5Scc+OAcX7sK0y+Wb+FHg9O8epLTvkF9/yqXXANiUiloE+eJkFxseOy5z7l46/We2Oz7+jJwfVqBtiViFQWCnafTVq4hitfyPPq0b/twHkddU25iFQcBbtPNm0v5IThEygqjlzw07Z5A8YO6ko1LY6KSAVTsPtg9IQv+fukr7z63etO47jDGgbYkYhUZgr2BCxZu5meD3/k1Vd0PYKh/Y8NsCMREQV7XIqLHRc99Qkzv/nRG/vszl4cVLdGgF2JiEQo2GP0wRffc/W/Z3v1Yxd1pH+HwwLsSESkJAV7ORVsK6TD3eO9ukOLhrx5TVeqVintVjkiIsFRsJfDAx8s4onJX3v1+zeczjGHNgiwIxGR/VOwH8CXazbRe/RUr776jFbcflbbADsSESmbgr0URcWOC/45gznLf/LGPh/am4Z1qgfYlYhI+SjY9zJu3mqueWmOV//z953o2655gB2JiMRGwR7109YdnDB8gldn/eIgXru6ixZHRSTtKNiBkeMWkjN1qVdPuLEbrZvVD7AjEZH4VepgX/DdRvo9+rFXD+pxFDf3OTrAjkREEpc+wZ6f69tzPXcWFXPekzOYt6pg9+6H9aZBLS2Oikj6S49gz8+FsddB4bZIXbAiUkPM4f7W3FVc/+pcr37q0ix6HdvMr05FRAKXHsE+afjuUN+lcFtkvJzB/uOWHXS6Z/fiaJdWB/PSH0+mihZHRSRk0iPYC1bGNr6XYW9/wfMzlnn1pL+ewZFN6/nQmIhI6kmPYG/YIjL9Utr4AcxfVcA5j03z6ht7tuH6nq397k5EJKWkR7BnDy05xw5QvXZkvBQ7i4o557FpLPp+EwA1qlZhztBe1KuZHm9XRCQRCSWdmQ0DrgLWRYcGO+fGJdrUPnbNo5fjqpg356zkptzPvfq5P5xEj2MO8b0lEZFU5ccp7Gjn3IM+7OfA2g844ELp+s0/kzViolef0aYpz19+EmZaHBWRyiUUcxNDxszjpZnLvXrKzd3JbFI3wI5ERILjR7APMrNLgTzgr865DaVtZGYDgYEAGRkZPhwWPl/xE+c+Md2rb+lzNNf2OMqXfYuIpCtzzh14A7OJwKGl/GgI8AmwHnDAPUBz59wVZR00KyvL5eXlxd5t1I6dxfR9ZCpL128BoG6Nqnw6pCd1tTgqIiFmZrOdc1llbVdmEjrnepbzgE8B75Rn20TkzlrB397I9+oXr+hMtzZNk31YEZG0kehVMc2dc6uj5XnA/MRb2r/cvN2h3rNtM5669EQtjoqI7CXRuYv7zewEIlMxy4CrE+7oANo0q88JLRvx2EUdadm4TjIPJSKStsqcY0+GROfYRUQqo/LOsVepiGZERKTiKNhFREJGwS4iEjIKdhGRkFGwi4iEjIJdRCRkFOwiIiGjYBcRCZlAPqBkZuuAb0v5URMiNxULg7C8l7C8D9B7SUVheR9QMe/lF865Mm+OFUiw74+Z5ZXnU1XpICzvJSzvA/ReUlFY3gek1nvRVIyISMgo2EVEQibVgj0n6AZ8FJb3Epb3AXovqSgs7wNS6L2k1By7iIgkLtXO2EVEJEEpEexm1tLMJpvZQjP7wsyuD7qneJhZLTP71Mw+j76Pu4PuKVFmVtXMPjOzpD/2MJnMbJmZzTOzuWaWtg8DMLNGZva6mS2K/r50CbqneJjZ0dE/i13/bTSzG4LuKx5mdmP0932+mb1iZrUC7ykVpmLMrDmRB2HPMbP6wGzgV865BQG3FhOLPKevrnNus5lVB6YB1zvnPgm4tbiZ2U1AFtDAOXdO0P3Ey8yWAVnOubS+ZtrMXgA+ds49bWY1gDrOuZ+C7isRZlYVWAWc7Jwr7fMtKcvMDifye36sc26bmeUC45xzzwfZV0qcsTvnVjvn5kS/3wQsBA4PtqvYuYjN0bJ69L/g/+aMk5m1AM4Gng66FwEzawB0A54BcM7tSPdQj8oGvk63UN9DNaC2mVUD6gDfBdxPagT7nswsE+gIzAy2k/hEpy7mAmuBCc65tHwfUY8AfwOKg27EBw4Yb2azzWxg0M3EqRWwDnguOj32tJnVDbopH1wIvBJ0E/Fwzq0CHgSWA6uBAufc+GC7SrFgN7N6wBvADc65jUH3Ew/nXJFz7gSgBdDZzNoF3VM8zOwcYK1zbnbQvfikq3OuE3AWcK2ZdQu6oThUAzoB/3DOdQS2ALcF21JiotNJvwT+G3Qv8TCzg4BzgSOAw4C6Zvb7YLtKoWCPzkm/AbzknHsz6H4SFf0n8hSgb8CtxKsr8Mvo3PSrwJlm9p9gW4qfc+676Ne1wBigc7AdxWUlsHKPfwW+TiTo09lZwBzn3JqgG4lTT+Ab59w651wh8CZwasA9pUawRxcdnwEWOuceDrqfeJlZUzNrFP2+NpE/9EXBdhUf59ztzrkWzrlMIv9U/tA5F/iZSDzMrG50UZ7o1EVvYH6wXcXOOfc9sMLMjo4OZQNpdYFBKS4iTadhopYDp5hZnWiOZRNZIwxUtaAbiOoKXALMi85PAwx2zo0LsKd4NAdeiK7yVwFynXNpfZlgSDQDxkR+76gGvOycez/YluL2F+Cl6BTGUuDygPuJm5nVAXoBVwfdS7ycczPN7HVgDrAT+IwU+ARqSlzuKCIi/kmJqRgREfGPgl1EJGQU7CIiIaNgFxEJGQW7iEjIKNhFREJGwS4iEjIKdhGRkPn/erOFYOwD2HsAAAAASUVORK5CYII=\n", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "from matplotlib import pyplot as plt\n", "\n", "plt.plot(0.1 * t_u.numpy(), t_p.detach().numpy())\n", "plt.plot(0.1 * t_u.numpy(), t_c.numpy(), 'o')" ] }, { "cell_type": "code", "execution_count": 103, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "(tensor([ 7, 2, 1, 3, 8, 0, 5, 6, 10]), tensor([9, 4]))" ] }, "execution_count": 103, "metadata": {}, "output_type": "execute_result" } ], "source": [ "n_samples = t_u.shape[0]\n", "n_val = int(0.2 * n_samples)\n", "\n", "shuffled_indices = torch.randperm(n_samples)\n", "\n", "train_indices = shuffled_indices[:-n_val]\n", "val_indices = shuffled_indices[-n_val:]\n", "\n", "train_indices, val_indices" ] }, { "cell_type": "code", "execution_count": 105, "metadata": {}, "outputs": [], "source": [ "t_u_train = t_u[train_indices]\n", "t_c_train = t_c[train_indices]\n", "\n", "t_u_val = t_u[val_indices]\n", "t_c_val = t_c[val_indices]" ] }, { "cell_type": "code", "execution_count": 106, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 0, Training loss 89.636589, Validation loss 38.639244\n", "Epoch 1, Training loss 44.926880, Validation loss 2.545443\n", "Epoch 2, Training loss 37.123985, Validation loss 0.556166\n", "Epoch 3, Training loss 35.668991, Validation loss 1.966753\n", "Epoch 4, Training loss 35.306217, Validation loss 2.938725\n", "Epoch 5, Training loss 35.131748, Validation loss 3.410529\n", "Epoch 6, Training loss 34.990128, Validation loss 3.620039\n", "Epoch 7, Training loss 34.854614, Validation loss 3.711267\n", "Epoch 8, Training loss 34.720608, Validation loss 3.751791\n", "Epoch 9, Training loss 34.587326, Validation loss 3.770980\n", "Epoch 10, Training loss 34.454620, Validation loss 3.781280\n", "Epoch 11, Training loss 34.322464, Validation loss 3.787878\n", "Epoch 12, Training loss 34.190853, Validation loss 3.792933\n", "Epoch 13, Training loss 34.059795, Validation loss 3.797347\n", "Epoch 14, Training loss 33.929268, Validation loss 3.801498\n", "Epoch 15, Training loss 33.799282, Validation loss 3.805542\n", "Epoch 16, Training loss 33.669830, Validation loss 3.809525\n", "Epoch 17, Training loss 33.540913, Validation loss 3.813503\n", "Epoch 18, Training loss 33.412533, Validation loss 3.817465\n", "Epoch 19, Training loss 33.284683, Validation loss 3.821422\n", "Epoch 20, Training loss 33.157356, Validation loss 3.825380\n", "Epoch 21, Training loss 33.030552, Validation loss 3.829336\n", "Epoch 22, Training loss 32.904278, Validation loss 3.833289\n", "Epoch 23, Training loss 32.778519, Validation loss 3.837246\n", "Epoch 24, Training loss 32.653286, Validation loss 3.841198\n", "Epoch 25, Training loss 32.528564, Validation loss 3.845149\n", "Epoch 26, Training loss 32.404358, Validation loss 3.849101\n", "Epoch 27, Training loss 32.280670, Validation loss 3.853049\n", "Epoch 28, Training loss 32.157486, Validation loss 3.856997\n", "Epoch 29, Training loss 32.034805, Validation loss 3.860938\n", "Epoch 30, Training loss 31.912642, Validation loss 3.864882\n", "Epoch 31, Training loss 31.790972, Validation loss 3.868822\n", "Epoch 32, Training loss 31.669817, Validation loss 3.872770\n", "Epoch 33, Training loss 31.549154, Validation loss 3.876711\n", "Epoch 34, Training loss 31.428989, Validation loss 3.880648\n", "Epoch 35, Training loss 31.309319, Validation loss 3.884587\n", "Epoch 36, Training loss 31.190151, Validation loss 3.888522\n", "Epoch 37, Training loss 31.071463, Validation loss 3.892457\n", "Epoch 38, Training loss 30.953268, Validation loss 3.896389\n", "Epoch 39, Training loss 30.835567, Validation loss 3.900322\n", "Epoch 40, Training loss 30.718344, Validation loss 3.904251\n", "Epoch 41, Training loss 30.601610, Validation loss 3.908180\n", "Epoch 42, Training loss 30.485352, Validation loss 3.912106\n", "Epoch 43, Training loss 30.369577, Validation loss 3.916032\n", "Epoch 44, Training loss 30.254290, Validation loss 3.919955\n", "Epoch 45, Training loss 30.139465, Validation loss 3.923877\n", "Epoch 46, Training loss 30.025112, Validation loss 3.927799\n", "Epoch 47, Training loss 29.911245, Validation loss 3.931718\n", "Epoch 48, Training loss 29.797834, Validation loss 3.935637\n", "Epoch 49, Training loss 29.684902, Validation loss 3.939548\n", "Epoch 50, Training loss 29.572428, Validation loss 3.943459\n", "Epoch 51, Training loss 29.460423, Validation loss 3.947375\n", "Epoch 52, Training loss 29.348873, Validation loss 3.951279\n", "Epoch 53, Training loss 29.237793, Validation loss 3.955189\n", "Epoch 54, Training loss 29.127167, Validation loss 3.959096\n", "Epoch 55, Training loss 29.016991, Validation loss 3.963000\n", "Epoch 56, Training loss 28.907280, Validation loss 3.966904\n", "Epoch 57, Training loss 28.798021, Validation loss 3.970800\n", "Epoch 58, Training loss 28.689205, Validation loss 3.974701\n", "Epoch 59, Training loss 28.580841, Validation loss 3.978595\n", "Epoch 60, Training loss 28.472923, Validation loss 3.982491\n", "Epoch 61, Training loss 28.365456, Validation loss 3.986380\n", "Epoch 62, Training loss 28.258430, Validation loss 3.990272\n", "Epoch 63, Training loss 28.151842, Validation loss 3.994159\n", "Epoch 64, Training loss 28.045706, Validation loss 3.998043\n", "Epoch 65, Training loss 27.939995, Validation loss 4.001928\n", "Epoch 66, Training loss 27.834719, Validation loss 4.005810\n", "Epoch 67, Training loss 27.729881, Validation loss 4.009686\n", "Epoch 68, Training loss 27.625481, Validation loss 4.013565\n", "Epoch 69, Training loss 27.521505, Validation loss 4.017441\n", "Epoch 70, Training loss 27.417961, Validation loss 4.021309\n", "Epoch 71, Training loss 27.314848, Validation loss 4.025184\n", "Epoch 72, Training loss 27.212156, Validation loss 4.029050\n", "Epoch 73, Training loss 27.109886, Validation loss 4.032919\n", "Epoch 74, Training loss 27.008038, Validation loss 4.036782\n", "Epoch 75, Training loss 26.906612, Validation loss 4.040641\n", "Epoch 76, Training loss 26.805607, Validation loss 4.044500\n", "Epoch 77, Training loss 26.705017, Validation loss 4.048361\n", "Epoch 78, Training loss 26.604837, Validation loss 4.052212\n", "Epoch 79, Training loss 26.505079, Validation loss 4.056064\n", "Epoch 80, Training loss 26.405731, Validation loss 4.059914\n", "Epoch 81, Training loss 26.306791, Validation loss 4.063765\n", "Epoch 82, Training loss 26.208258, Validation loss 4.067606\n", "Epoch 83, Training loss 26.110134, Validation loss 4.071452\n", "Epoch 84, Training loss 26.012411, Validation loss 4.075290\n", "Epoch 85, Training loss 25.915098, Validation loss 4.079127\n", "Epoch 86, Training loss 25.818184, Validation loss 4.082957\n", "Epoch 87, Training loss 25.721668, Validation loss 4.086793\n", "Epoch 88, Training loss 25.625546, Validation loss 4.090622\n", "Epoch 89, Training loss 25.529825, Validation loss 4.094450\n", "Epoch 90, Training loss 25.434504, Validation loss 4.098275\n", "Epoch 91, Training loss 25.339573, Validation loss 4.102098\n", "Epoch 92, Training loss 25.245029, Validation loss 4.105922\n", "Epoch 93, Training loss 25.150883, Validation loss 4.109741\n", "Epoch 94, Training loss 25.057116, Validation loss 4.113548\n", "Epoch 95, Training loss 24.963743, Validation loss 4.117363\n", "Epoch 96, Training loss 24.870750, Validation loss 4.121170\n", "Epoch 97, Training loss 24.778149, Validation loss 4.124976\n", "Epoch 98, Training loss 24.685926, Validation loss 4.128781\n", "Epoch 99, Training loss 24.594078, Validation loss 4.132582\n", "Epoch 100, Training loss 24.502617, Validation loss 4.136377\n", "Epoch 101, Training loss 24.411526, Validation loss 4.140173\n", "Epoch 102, Training loss 24.320820, Validation loss 4.143965\n", "Epoch 103, Training loss 24.230476, Validation loss 4.147751\n", "Epoch 104, Training loss 24.140516, Validation loss 4.151541\n", "Epoch 105, Training loss 24.050922, Validation loss 4.155325\n", "Epoch 106, Training loss 23.961700, Validation loss 4.159110\n", "Epoch 107, Training loss 23.872847, Validation loss 4.162889\n", "Epoch 108, Training loss 23.784355, Validation loss 4.166659\n", "Epoch 109, Training loss 23.696232, Validation loss 4.170433\n", "Epoch 110, Training loss 23.608473, Validation loss 4.174201\n", "Epoch 111, Training loss 23.521078, Validation loss 4.177965\n", "Epoch 112, Training loss 23.434036, Validation loss 4.181736\n", "Epoch 113, Training loss 23.347361, Validation loss 4.185493\n", "Epoch 114, Training loss 23.261038, Validation loss 4.189254\n", "Epoch 115, Training loss 23.175079, Validation loss 4.193009\n", "Epoch 116, Training loss 23.089470, Validation loss 4.196764\n", "Epoch 117, Training loss 23.004215, Validation loss 4.200517\n", "Epoch 118, Training loss 22.919310, Validation loss 4.204261\n", "Epoch 119, Training loss 22.834757, Validation loss 4.207996\n", "Epoch 120, Training loss 22.750555, Validation loss 4.211741\n", "Epoch 121, Training loss 22.666693, Validation loss 4.215477\n", "Epoch 122, Training loss 22.583183, Validation loss 4.219209\n", "Epoch 123, Training loss 22.500021, Validation loss 4.222942\n", "Epoch 124, Training loss 22.417192, Validation loss 4.226667\n", "Epoch 125, Training loss 22.334717, Validation loss 4.230398\n", "Epoch 126, Training loss 22.252573, Validation loss 4.234123\n", "Epoch 127, Training loss 22.170769, Validation loss 4.237837\n", "Epoch 128, Training loss 22.089306, Validation loss 4.241556\n", "Epoch 129, Training loss 22.008179, Validation loss 4.245266\n", "Epoch 130, Training loss 21.927383, Validation loss 4.248972\n", "Epoch 131, Training loss 21.846922, Validation loss 4.252682\n", "Epoch 132, Training loss 21.766794, Validation loss 4.256386\n", "Epoch 133, Training loss 21.686996, Validation loss 4.260079\n", "Epoch 134, Training loss 21.607525, Validation loss 4.263781\n", "Epoch 135, Training loss 21.528383, Validation loss 4.267475\n", "Epoch 136, Training loss 21.449570, Validation loss 4.271164\n", "Epoch 137, Training loss 21.371084, Validation loss 4.274851\n", "Epoch 138, Training loss 21.292919, Validation loss 4.278536\n", "Epoch 139, Training loss 21.215076, Validation loss 4.282219\n", "Epoch 140, Training loss 21.137552, Validation loss 4.285896\n", "Epoch 141, Training loss 21.060354, Validation loss 4.289568\n", "Epoch 142, Training loss 20.983467, Validation loss 4.293238\n", "Epoch 143, Training loss 20.906904, Validation loss 4.296906\n", "Epoch 144, Training loss 20.830662, Validation loss 4.300564\n", "Epoch 145, Training loss 20.754723, Validation loss 4.304226\n", "Epoch 146, Training loss 20.679100, Validation loss 4.307892\n", "Epoch 147, Training loss 20.603792, Validation loss 4.311543\n", "Epoch 148, Training loss 20.528788, Validation loss 4.315188\n", "Epoch 149, Training loss 20.454100, Validation loss 4.318840\n", "Epoch 150, Training loss 20.379723, Validation loss 4.322480\n", "Epoch 151, Training loss 20.305645, Validation loss 4.326127\n", "Epoch 152, Training loss 20.231880, Validation loss 4.329759\n", "Epoch 153, Training loss 20.158413, Validation loss 4.333397\n", "Epoch 154, Training loss 20.085255, Validation loss 4.337025\n", "Epoch 155, Training loss 20.012390, Validation loss 4.340654\n", "Epoch 156, Training loss 19.939837, Validation loss 4.344274\n", "Epoch 157, Training loss 19.867580, Validation loss 4.347894\n", "Epoch 158, Training loss 19.795616, Validation loss 4.351516\n", "Epoch 159, Training loss 19.723949, Validation loss 4.355124\n", "Epoch 160, Training loss 19.652582, Validation loss 4.358737\n", "Epoch 161, Training loss 19.581507, Validation loss 4.362344\n", "Epoch 162, Training loss 19.510729, Validation loss 4.365940\n", "Epoch 163, Training loss 19.440241, Validation loss 4.369545\n", "Epoch 164, Training loss 19.370041, Validation loss 4.373140\n", "Epoch 165, Training loss 19.300137, Validation loss 4.376733\n", "Epoch 166, Training loss 19.230516, Validation loss 4.380322\n", "Epoch 167, Training loss 19.161184, Validation loss 4.383909\n", "Epoch 168, Training loss 19.092140, Validation loss 4.387490\n", "Epoch 169, Training loss 19.023375, Validation loss 4.391068\n", "Epoch 170, Training loss 18.954903, Validation loss 4.394642\n", "Epoch 171, Training loss 18.886705, Validation loss 4.398210\n", "Epoch 172, Training loss 18.818796, Validation loss 4.401775\n", "Epoch 173, Training loss 18.751160, Validation loss 4.405341\n", "Epoch 174, Training loss 18.683804, Validation loss 4.408897\n", "Epoch 175, Training loss 18.616730, Validation loss 4.412454\n", "Epoch 176, Training loss 18.549932, Validation loss 4.416011\n", "Epoch 177, Training loss 18.483410, Validation loss 4.419563\n", "Epoch 178, Training loss 18.417160, Validation loss 4.423106\n", "Epoch 179, Training loss 18.351185, Validation loss 4.426651\n", "Epoch 180, Training loss 18.285482, Validation loss 4.430189\n", "Epoch 181, Training loss 18.220045, Validation loss 4.433727\n", "Epoch 182, Training loss 18.154884, Validation loss 4.437255\n", "Epoch 183, Training loss 18.089993, Validation loss 4.440779\n", "Epoch 184, Training loss 18.025366, Validation loss 4.444305\n", "Epoch 185, Training loss 17.961008, Validation loss 4.447823\n", "Epoch 186, Training loss 17.896912, Validation loss 4.451342\n", "Epoch 187, Training loss 17.833082, Validation loss 4.454858\n", "Epoch 188, Training loss 17.769518, Validation loss 4.458364\n", "Epoch 189, Training loss 17.706215, Validation loss 4.461869\n", "Epoch 190, Training loss 17.643175, Validation loss 4.465367\n", "Epoch 191, Training loss 17.580393, Validation loss 4.468874\n", "Epoch 192, Training loss 17.517870, Validation loss 4.472365\n", "Epoch 193, Training loss 17.455605, Validation loss 4.475858\n", "Epoch 194, Training loss 17.393593, Validation loss 4.479342\n", "Epoch 195, Training loss 17.331842, Validation loss 4.482827\n", "Epoch 196, Training loss 17.270348, Validation loss 4.486306\n", "Epoch 197, Training loss 17.209105, Validation loss 4.489780\n", "Epoch 198, Training loss 17.148113, Validation loss 4.493252\n", "Epoch 199, Training loss 17.087376, Validation loss 4.496719\n", "Epoch 200, Training loss 17.026890, Validation loss 4.500183\n", "Epoch 201, Training loss 16.966648, Validation loss 4.503644\n", "Epoch 202, Training loss 16.906660, Validation loss 4.507097\n", "Epoch 203, Training loss 16.846918, Validation loss 4.510555\n", "Epoch 204, Training loss 16.787416, Validation loss 4.514009\n", "Epoch 205, Training loss 16.728167, Validation loss 4.517443\n", "Epoch 206, Training loss 16.669161, Validation loss 4.520886\n", "Epoch 207, Training loss 16.610401, Validation loss 4.524329\n", "Epoch 208, Training loss 16.551878, Validation loss 4.527760\n", "Epoch 209, Training loss 16.493599, Validation loss 4.531187\n", "Epoch 210, Training loss 16.435560, Validation loss 4.534619\n", "Epoch 211, Training loss 16.377760, Validation loss 4.538035\n", "Epoch 212, Training loss 16.320198, Validation loss 4.541455\n", "Epoch 213, Training loss 16.262877, Validation loss 4.544868\n", "Epoch 214, Training loss 16.205788, Validation loss 4.548281\n", "Epoch 215, Training loss 16.148943, Validation loss 4.551685\n", "Epoch 216, Training loss 16.092325, Validation loss 4.555091\n", "Epoch 217, Training loss 16.035942, Validation loss 4.558488\n", "Epoch 218, Training loss 15.979793, Validation loss 4.561889\n", "Epoch 219, Training loss 15.923872, Validation loss 4.565278\n", "Epoch 220, Training loss 15.868186, Validation loss 4.568663\n", "Epoch 221, Training loss 15.812727, Validation loss 4.572048\n", "Epoch 222, Training loss 15.757503, Validation loss 4.575430\n", "Epoch 223, Training loss 15.702499, Validation loss 4.578803\n", "Epoch 224, Training loss 15.647725, Validation loss 4.582172\n", "Epoch 225, Training loss 15.593177, Validation loss 4.585541\n", "Epoch 226, Training loss 15.538854, Validation loss 4.588906\n", "Epoch 227, Training loss 15.484755, Validation loss 4.592263\n", "Epoch 228, Training loss 15.430880, Validation loss 4.595616\n", "Epoch 229, Training loss 15.377228, Validation loss 4.598977\n", "Epoch 230, Training loss 15.323795, Validation loss 4.602321\n", "Epoch 231, Training loss 15.270584, Validation loss 4.605670\n", "Epoch 232, Training loss 15.217592, Validation loss 4.609002\n", "Epoch 233, Training loss 15.164819, Validation loss 4.612342\n", "Epoch 234, Training loss 15.112264, Validation loss 4.615678\n", "Epoch 235, Training loss 15.059926, Validation loss 4.619005\n", "Epoch 236, Training loss 15.007804, Validation loss 4.622329\n", "Epoch 237, Training loss 14.955897, Validation loss 4.625651\n", "Epoch 238, Training loss 14.904207, Validation loss 4.628970\n", "Epoch 239, Training loss 14.852726, Validation loss 4.632280\n", "Epoch 240, Training loss 14.801461, Validation loss 4.635590\n", "Epoch 241, Training loss 14.750401, Validation loss 4.638896\n", "Epoch 242, Training loss 14.699559, Validation loss 4.642192\n", "Epoch 243, Training loss 14.648926, Validation loss 4.645489\n", "Epoch 244, Training loss 14.598495, Validation loss 4.648781\n", "Epoch 245, Training loss 14.548275, Validation loss 4.652073\n", "Epoch 246, Training loss 14.498265, Validation loss 4.655356\n", "Epoch 247, Training loss 14.448461, Validation loss 4.658635\n", "Epoch 248, Training loss 14.398861, Validation loss 4.661918\n", "Epoch 249, Training loss 14.349472, Validation loss 4.665192\n", "Epoch 250, Training loss 14.300274, Validation loss 4.668457\n", "Epoch 251, Training loss 14.251292, Validation loss 4.671717\n", "Epoch 252, Training loss 14.202505, Validation loss 4.674981\n", "Epoch 253, Training loss 14.153922, Validation loss 4.678236\n", "Epoch 254, Training loss 14.105541, Validation loss 4.681488\n", "Epoch 255, Training loss 14.057355, Validation loss 4.684738\n", "Epoch 256, Training loss 14.009371, Validation loss 4.687987\n", "Epoch 257, Training loss 13.961583, Validation loss 4.691224\n", "Epoch 258, Training loss 13.913994, Validation loss 4.694460\n", "Epoch 259, Training loss 13.866596, Validation loss 4.697696\n", "Epoch 260, Training loss 13.819401, Validation loss 4.700926\n", "Epoch 261, Training loss 13.772396, Validation loss 4.704148\n", "Epoch 262, Training loss 13.725590, Validation loss 4.707366\n", "Epoch 263, Training loss 13.678970, Validation loss 4.710582\n", "Epoch 264, Training loss 13.632549, Validation loss 4.713797\n", "Epoch 265, Training loss 13.586314, Validation loss 4.717009\n", "Epoch 266, Training loss 13.540273, Validation loss 4.720211\n", "Epoch 267, Training loss 13.494421, Validation loss 4.723407\n", "Epoch 268, Training loss 13.448759, Validation loss 4.726599\n", "Epoch 269, Training loss 13.403286, Validation loss 4.729795\n", "Epoch 270, Training loss 13.358000, Validation loss 4.732981\n", "Epoch 271, Training loss 13.312900, Validation loss 4.736167\n", "Epoch 272, Training loss 13.267986, Validation loss 4.739351\n", "Epoch 273, Training loss 13.223259, Validation loss 4.742527\n", "Epoch 274, Training loss 13.178716, Validation loss 4.745693\n", "Epoch 275, Training loss 13.134354, Validation loss 4.748863\n", "Epoch 276, Training loss 13.090178, Validation loss 4.752024\n", "Epoch 277, Training loss 13.046182, Validation loss 4.755192\n", "Epoch 278, Training loss 13.002372, Validation loss 4.758347\n", "Epoch 279, Training loss 12.958744, Validation loss 4.761492\n", "Epoch 280, Training loss 12.915287, Validation loss 4.764645\n", "Epoch 281, Training loss 12.872015, Validation loss 4.767788\n", "Epoch 282, Training loss 12.828922, Validation loss 4.770922\n", "Epoch 283, Training loss 12.786005, Validation loss 4.774055\n", "Epoch 284, Training loss 12.743266, Validation loss 4.777192\n", "Epoch 285, Training loss 12.700704, Validation loss 4.780319\n", "Epoch 286, Training loss 12.658318, Validation loss 4.783440\n", "Epoch 287, Training loss 12.616102, Validation loss 4.786556\n", "Epoch 288, Training loss 12.574066, Validation loss 4.789680\n", "Epoch 289, Training loss 12.532202, Validation loss 4.792786\n", "Epoch 290, Training loss 12.490509, Validation loss 4.795896\n", "Epoch 291, Training loss 12.448992, Validation loss 4.798995\n", "Epoch 292, Training loss 12.407642, Validation loss 4.802089\n", "Epoch 293, Training loss 12.366466, Validation loss 4.805191\n", "Epoch 294, Training loss 12.325457, Validation loss 4.808279\n", "Epoch 295, Training loss 12.284618, Validation loss 4.811361\n", "Epoch 296, Training loss 12.243946, Validation loss 4.814450\n", "Epoch 297, Training loss 12.203446, Validation loss 4.817522\n", "Epoch 298, Training loss 12.163113, Validation loss 4.820597\n", "Epoch 299, Training loss 12.122942, Validation loss 4.823666\n", "Epoch 300, Training loss 12.082939, Validation loss 4.826725\n", "Epoch 301, Training loss 12.043100, Validation loss 4.829788\n", "Epoch 302, Training loss 12.003428, Validation loss 4.832850\n", "Epoch 303, Training loss 11.963917, Validation loss 4.835905\n", "Epoch 304, Training loss 11.924572, Validation loss 4.838955\n", "Epoch 305, Training loss 11.885386, Validation loss 4.841996\n", "Epoch 306, Training loss 11.846363, Validation loss 4.845043\n", "Epoch 307, Training loss 11.807503, Validation loss 4.848077\n", "Epoch 308, Training loss 11.768800, Validation loss 4.851113\n", "Epoch 309, Training loss 11.730259, Validation loss 4.854132\n", "Epoch 310, Training loss 11.691875, Validation loss 4.857157\n", "Epoch 311, Training loss 11.653655, Validation loss 4.860186\n", "Epoch 312, Training loss 11.615588, Validation loss 4.863199\n", "Epoch 313, Training loss 11.577679, Validation loss 4.866208\n", "Epoch 314, Training loss 11.539924, Validation loss 4.869219\n", "Epoch 315, Training loss 11.502328, Validation loss 4.872225\n", "Epoch 316, Training loss 11.464884, Validation loss 4.875220\n", "Epoch 317, Training loss 11.427594, Validation loss 4.878215\n", "Epoch 318, Training loss 11.390463, Validation loss 4.881211\n", "Epoch 319, Training loss 11.353482, Validation loss 4.884194\n", "Epoch 320, Training loss 11.316653, Validation loss 4.887180\n", "Epoch 321, Training loss 11.279977, Validation loss 4.890160\n", "Epoch 322, Training loss 11.243454, Validation loss 4.893133\n", "Epoch 323, Training loss 11.207082, Validation loss 4.896105\n", "Epoch 324, Training loss 11.170859, Validation loss 4.899073\n", "Epoch 325, Training loss 11.134785, Validation loss 4.902037\n", "Epoch 326, Training loss 11.098857, Validation loss 4.904996\n", "Epoch 327, Training loss 11.063084, Validation loss 4.907954\n", "Epoch 328, Training loss 11.027454, Validation loss 4.910897\n", "Epoch 329, Training loss 10.991970, Validation loss 4.913843\n", "Epoch 330, Training loss 10.956630, Validation loss 4.916782\n", "Epoch 331, Training loss 10.921444, Validation loss 4.919725\n", "Epoch 332, Training loss 10.886397, Validation loss 4.922657\n", "Epoch 333, Training loss 10.851497, Validation loss 4.925592\n", "Epoch 334, Training loss 10.816739, Validation loss 4.928509\n", "Epoch 335, Training loss 10.782125, Validation loss 4.931432\n", "Epoch 336, Training loss 10.747659, Validation loss 4.934357\n", "Epoch 337, Training loss 10.713329, Validation loss 4.937264\n", "Epoch 338, Training loss 10.679141, Validation loss 4.940182\n", "Epoch 339, Training loss 10.645098, Validation loss 4.943081\n", "Epoch 340, Training loss 10.611195, Validation loss 4.945982\n", "Epoch 341, Training loss 10.577427, Validation loss 4.948878\n", "Epoch 342, Training loss 10.543803, Validation loss 4.951767\n", "Epoch 343, Training loss 10.510316, Validation loss 4.954663\n", "Epoch 344, Training loss 10.476968, Validation loss 4.957545\n", "Epoch 345, Training loss 10.443753, Validation loss 4.960428\n", "Epoch 346, Training loss 10.410678, Validation loss 4.963306\n", "Epoch 347, Training loss 10.377743, Validation loss 4.966174\n", "Epoch 348, Training loss 10.344941, Validation loss 4.969048\n", "Epoch 349, Training loss 10.312275, Validation loss 4.971908\n", "Epoch 350, Training loss 10.279742, Validation loss 4.974770\n", "Epoch 351, Training loss 10.247344, Validation loss 4.977621\n", "Epoch 352, Training loss 10.215082, Validation loss 4.980474\n", "Epoch 353, Training loss 10.182953, Validation loss 4.983322\n", "Epoch 354, Training loss 10.150954, Validation loss 4.986167\n", "Epoch 355, Training loss 10.119088, Validation loss 4.989003\n", "Epoch 356, Training loss 10.087357, Validation loss 4.991848\n", "Epoch 357, Training loss 10.055753, Validation loss 4.994675\n", "Epoch 358, Training loss 10.024279, Validation loss 4.997504\n", "Epoch 359, Training loss 9.992933, Validation loss 5.000323\n", "Epoch 360, Training loss 9.961721, Validation loss 5.003144\n", "Epoch 361, Training loss 9.930637, Validation loss 5.005962\n", "Epoch 362, Training loss 9.899676, Validation loss 5.008775\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 363, Training loss 9.868852, Validation loss 5.011578\n", "Epoch 364, Training loss 9.838148, Validation loss 5.014381\n", "Epoch 365, Training loss 9.807575, Validation loss 5.017183\n", "Epoch 366, Training loss 9.777126, Validation loss 5.019975\n", "Epoch 367, Training loss 9.746805, Validation loss 5.022760\n", "Epoch 368, Training loss 9.716602, Validation loss 5.025547\n", "Epoch 369, Training loss 9.686527, Validation loss 5.028328\n", "Epoch 370, Training loss 9.656579, Validation loss 5.031111\n", "Epoch 371, Training loss 9.626751, Validation loss 5.033892\n", "Epoch 372, Training loss 9.597053, Validation loss 5.036658\n", "Epoch 373, Training loss 9.567469, Validation loss 5.039417\n", "Epoch 374, Training loss 9.538012, Validation loss 5.042183\n", "Epoch 375, Training loss 9.508677, Validation loss 5.044938\n", "Epoch 376, Training loss 9.479460, Validation loss 5.047704\n", "Epoch 377, Training loss 9.450366, Validation loss 5.050450\n", "Epoch 378, Training loss 9.421391, Validation loss 5.053199\n", "Epoch 379, Training loss 9.392537, Validation loss 5.055932\n", "Epoch 380, Training loss 9.363800, Validation loss 5.058668\n", "Epoch 381, Training loss 9.335182, Validation loss 5.061409\n", "Epoch 382, Training loss 9.306680, Validation loss 5.064140\n", "Epoch 383, Training loss 9.278298, Validation loss 5.066869\n", "Epoch 384, Training loss 9.250031, Validation loss 5.069587\n", "Epoch 385, Training loss 9.221884, Validation loss 5.072302\n", "Epoch 386, Training loss 9.193849, Validation loss 5.075024\n", "Epoch 387, Training loss 9.165936, Validation loss 5.077726\n", "Epoch 388, Training loss 9.138136, Validation loss 5.080431\n", "Epoch 389, Training loss 9.110446, Validation loss 5.083138\n", "Epoch 390, Training loss 9.082875, Validation loss 5.085829\n", "Epoch 391, Training loss 9.055415, Validation loss 5.088517\n", "Epoch 392, Training loss 9.028072, Validation loss 5.091216\n", "Epoch 393, Training loss 9.000841, Validation loss 5.093901\n", "Epoch 394, Training loss 8.973721, Validation loss 5.096582\n", "Epoch 395, Training loss 8.946712, Validation loss 5.099257\n", "Epoch 396, Training loss 8.919819, Validation loss 5.101930\n", "Epoch 397, Training loss 8.893032, Validation loss 5.104600\n", "Epoch 398, Training loss 8.866358, Validation loss 5.107272\n", "Epoch 399, Training loss 8.839793, Validation loss 5.109937\n", "Epoch 400, Training loss 8.813338, Validation loss 5.112578\n", "Epoch 401, Training loss 8.786991, Validation loss 5.115243\n", "Epoch 402, Training loss 8.760753, Validation loss 5.117893\n", "Epoch 403, Training loss 8.734624, Validation loss 5.120531\n", "Epoch 404, Training loss 8.708605, Validation loss 5.123167\n", "Epoch 405, Training loss 8.682690, Validation loss 5.125805\n", "Epoch 406, Training loss 8.656881, Validation loss 5.128435\n", "Epoch 407, Training loss 8.631185, Validation loss 5.131072\n", "Epoch 408, Training loss 8.605588, Validation loss 5.133702\n", "Epoch 409, Training loss 8.580096, Validation loss 5.136322\n", "Epoch 410, Training loss 8.554713, Validation loss 5.138938\n", "Epoch 411, Training loss 8.529435, Validation loss 5.141543\n", "Epoch 412, Training loss 8.504261, Validation loss 5.144155\n", "Epoch 413, Training loss 8.479190, Validation loss 5.146764\n", "Epoch 414, Training loss 8.454222, Validation loss 5.149365\n", "Epoch 415, Training loss 8.429357, Validation loss 5.151960\n", "Epoch 416, Training loss 8.404596, Validation loss 5.154552\n", "Epoch 417, Training loss 8.379939, Validation loss 5.157147\n", "Epoch 418, Training loss 8.355381, Validation loss 5.159730\n", "Epoch 419, Training loss 8.330922, Validation loss 5.162311\n", "Epoch 420, Training loss 8.306569, Validation loss 5.164884\n", "Epoch 421, Training loss 8.282315, Validation loss 5.167468\n", "Epoch 422, Training loss 8.258158, Validation loss 5.170027\n", "Epoch 423, Training loss 8.234103, Validation loss 5.172597\n", "Epoch 424, Training loss 8.210148, Validation loss 5.175159\n", "Epoch 425, Training loss 8.186290, Validation loss 5.177720\n", "Epoch 426, Training loss 8.162530, Validation loss 5.180268\n", "Epoch 427, Training loss 8.138874, Validation loss 5.182819\n", "Epoch 428, Training loss 8.115314, Validation loss 5.185367\n", "Epoch 429, Training loss 8.091844, Validation loss 5.187916\n", "Epoch 430, Training loss 8.068474, Validation loss 5.190446\n", "Epoch 431, Training loss 8.045202, Validation loss 5.192976\n", "Epoch 432, Training loss 8.022027, Validation loss 5.195514\n", "Epoch 433, Training loss 7.998944, Validation loss 5.198039\n", "Epoch 434, Training loss 7.975959, Validation loss 5.200553\n", "Epoch 435, Training loss 7.953066, Validation loss 5.203073\n", "Epoch 436, Training loss 7.930273, Validation loss 5.205582\n", "Epoch 437, Training loss 7.907571, Validation loss 5.208092\n", "Epoch 438, Training loss 7.884961, Validation loss 5.210608\n", "Epoch 439, Training loss 7.862444, Validation loss 5.213113\n", "Epoch 440, Training loss 7.840023, Validation loss 5.215614\n", "Epoch 441, Training loss 7.817697, Validation loss 5.218105\n", "Epoch 442, Training loss 7.795458, Validation loss 5.220597\n", "Epoch 443, Training loss 7.773314, Validation loss 5.223073\n", "Epoch 444, Training loss 7.751257, Validation loss 5.225564\n", "Epoch 445, Training loss 7.729292, Validation loss 5.228047\n", "Epoch 446, Training loss 7.707419, Validation loss 5.230511\n", "Epoch 447, Training loss 7.685637, Validation loss 5.232988\n", "Epoch 448, Training loss 7.663943, Validation loss 5.235455\n", "Epoch 449, Training loss 7.642342, Validation loss 5.237918\n", "Epoch 450, Training loss 7.620829, Validation loss 5.240374\n", "Epoch 451, Training loss 7.599404, Validation loss 5.242836\n", "Epoch 452, Training loss 7.578066, Validation loss 5.245295\n", "Epoch 453, Training loss 7.556818, Validation loss 5.247738\n", "Epoch 454, Training loss 7.535658, Validation loss 5.250174\n", "Epoch 455, Training loss 7.514582, Validation loss 5.252619\n", "Epoch 456, Training loss 7.493596, Validation loss 5.255050\n", "Epoch 457, Training loss 7.472694, Validation loss 5.257490\n", "Epoch 458, Training loss 7.451881, Validation loss 5.259914\n", "Epoch 459, Training loss 7.431153, Validation loss 5.262340\n", "Epoch 460, Training loss 7.410507, Validation loss 5.264762\n", "Epoch 461, Training loss 7.389952, Validation loss 5.267173\n", "Epoch 462, Training loss 7.369479, Validation loss 5.269589\n", "Epoch 463, Training loss 7.349091, Validation loss 5.272003\n", "Epoch 464, Training loss 7.328790, Validation loss 5.274414\n", "Epoch 465, Training loss 7.308570, Validation loss 5.276814\n", "Epoch 466, Training loss 7.288433, Validation loss 5.279209\n", "Epoch 467, Training loss 7.268380, Validation loss 5.281611\n", "Epoch 468, Training loss 7.248406, Validation loss 5.283993\n", "Epoch 469, Training loss 7.228518, Validation loss 5.286375\n", "Epoch 470, Training loss 7.208712, Validation loss 5.288755\n", "Epoch 471, Training loss 7.188987, Validation loss 5.291141\n", "Epoch 472, Training loss 7.169346, Validation loss 5.293518\n", "Epoch 473, Training loss 7.149783, Validation loss 5.295885\n", "Epoch 474, Training loss 7.130302, Validation loss 5.298261\n", "Epoch 475, Training loss 7.110899, Validation loss 5.300621\n", "Epoch 476, Training loss 7.091578, Validation loss 5.302973\n", "Epoch 477, Training loss 7.072335, Validation loss 5.305332\n", "Epoch 478, Training loss 7.053175, Validation loss 5.307674\n", "Epoch 479, Training loss 7.034093, Validation loss 5.310026\n", "Epoch 480, Training loss 7.015085, Validation loss 5.312379\n", "Epoch 481, Training loss 6.996163, Validation loss 5.314721\n", "Epoch 482, Training loss 6.977314, Validation loss 5.317051\n", "Epoch 483, Training loss 6.958544, Validation loss 5.319382\n", "Epoch 484, Training loss 6.939852, Validation loss 5.321718\n", "Epoch 485, Training loss 6.921235, Validation loss 5.324043\n", "Epoch 486, Training loss 6.902696, Validation loss 5.326365\n", "Epoch 487, Training loss 6.884234, Validation loss 5.328679\n", "Epoch 488, Training loss 6.865849, Validation loss 5.330990\n", "Epoch 489, Training loss 6.847540, Validation loss 5.333306\n", "Epoch 490, Training loss 6.829307, Validation loss 5.335607\n", "Epoch 491, Training loss 6.811147, Validation loss 5.337908\n", "Epoch 492, Training loss 6.793063, Validation loss 5.340206\n", "Epoch 493, Training loss 6.775049, Validation loss 5.342505\n", "Epoch 494, Training loss 6.757116, Validation loss 5.344802\n", "Epoch 495, Training loss 6.739259, Validation loss 5.347086\n", "Epoch 496, Training loss 6.721470, Validation loss 5.349380\n", "Epoch 497, Training loss 6.703757, Validation loss 5.351654\n", "Epoch 498, Training loss 6.686114, Validation loss 5.353938\n", "Epoch 499, Training loss 6.668547, Validation loss 5.356209\n", "Epoch 500, Training loss 6.651049, Validation loss 5.358478\n", "Epoch 501, Training loss 6.633627, Validation loss 5.360734\n", "Epoch 502, Training loss 6.616277, Validation loss 5.362996\n", "Epoch 503, Training loss 6.598996, Validation loss 5.365258\n", "Epoch 504, Training loss 6.581785, Validation loss 5.367518\n", "Epoch 505, Training loss 6.564651, Validation loss 5.369761\n", "Epoch 506, Training loss 6.547584, Validation loss 5.372015\n", "Epoch 507, Training loss 6.530587, Validation loss 5.374256\n", "Epoch 508, Training loss 6.513659, Validation loss 5.376493\n", "Epoch 509, Training loss 6.496805, Validation loss 5.378728\n", "Epoch 510, Training loss 6.480013, Validation loss 5.380973\n", "Epoch 511, Training loss 6.463297, Validation loss 5.383192\n", "Epoch 512, Training loss 6.446650, Validation loss 5.385421\n", "Epoch 513, Training loss 6.430068, Validation loss 5.387647\n", "Epoch 514, Training loss 6.413557, Validation loss 5.389861\n", "Epoch 515, Training loss 6.397113, Validation loss 5.392071\n", "Epoch 516, Training loss 6.380738, Validation loss 5.394291\n", "Epoch 517, Training loss 6.364428, Validation loss 5.396486\n", "Epoch 518, Training loss 6.348187, Validation loss 5.398686\n", "Epoch 519, Training loss 6.332014, Validation loss 5.400892\n", "Epoch 520, Training loss 6.315905, Validation loss 5.403090\n", "Epoch 521, Training loss 6.299866, Validation loss 5.405285\n", "Epoch 522, Training loss 6.283893, Validation loss 5.407472\n", "Epoch 523, Training loss 6.267985, Validation loss 5.409660\n", "Epoch 524, Training loss 6.252144, Validation loss 5.411836\n", "Epoch 525, Training loss 6.236364, Validation loss 5.414021\n", "Epoch 526, Training loss 6.220651, Validation loss 5.416191\n", "Epoch 527, Training loss 6.205004, Validation loss 5.418357\n", "Epoch 528, Training loss 6.189423, Validation loss 5.420519\n", "Epoch 529, Training loss 6.173902, Validation loss 5.422687\n", "Epoch 530, Training loss 6.158449, Validation loss 5.424847\n", "Epoch 531, Training loss 6.143057, Validation loss 5.427003\n", "Epoch 532, Training loss 6.127729, Validation loss 5.429156\n", "Epoch 533, Training loss 6.112467, Validation loss 5.431301\n", "Epoch 534, Training loss 6.097265, Validation loss 5.433456\n", "Epoch 535, Training loss 6.082128, Validation loss 5.435599\n", "Epoch 536, Training loss 6.067049, Validation loss 5.437738\n", "Epoch 537, Training loss 6.052037, Validation loss 5.439866\n", "Epoch 538, Training loss 6.037083, Validation loss 5.441994\n", "Epoch 539, Training loss 6.022195, Validation loss 5.444127\n", "Epoch 540, Training loss 6.007365, Validation loss 5.446257\n", "Epoch 541, Training loss 5.992599, Validation loss 5.448375\n", "Epoch 542, Training loss 5.977892, Validation loss 5.450489\n", "Epoch 543, Training loss 5.963243, Validation loss 5.452604\n", "Epoch 544, Training loss 5.948661, Validation loss 5.454702\n", "Epoch 545, Training loss 5.934138, Validation loss 5.456815\n", "Epoch 546, Training loss 5.919669, Validation loss 5.458915\n", "Epoch 547, Training loss 5.905266, Validation loss 5.461012\n", "Epoch 548, Training loss 5.890920, Validation loss 5.463105\n", "Epoch 549, Training loss 5.876631, Validation loss 5.465195\n", "Epoch 550, Training loss 5.862403, Validation loss 5.467286\n", "Epoch 551, Training loss 5.848233, Validation loss 5.469368\n", "Epoch 552, Training loss 5.834124, Validation loss 5.471457\n", "Epoch 553, Training loss 5.820074, Validation loss 5.473536\n", "Epoch 554, Training loss 5.806075, Validation loss 5.475613\n", "Epoch 555, Training loss 5.792140, Validation loss 5.477690\n", "Epoch 556, Training loss 5.778259, Validation loss 5.479747\n", "Epoch 557, Training loss 5.764439, Validation loss 5.481812\n", "Epoch 558, Training loss 5.750673, Validation loss 5.483875\n", "Epoch 559, Training loss 5.736965, Validation loss 5.485934\n", "Epoch 560, Training loss 5.723315, Validation loss 5.487985\n", "Epoch 561, Training loss 5.709720, Validation loss 5.490032\n", "Epoch 562, Training loss 5.696179, Validation loss 5.492075\n", "Epoch 563, Training loss 5.682699, Validation loss 5.494129\n", "Epoch 564, Training loss 5.669269, Validation loss 5.496165\n", "Epoch 565, Training loss 5.655893, Validation loss 5.498207\n", "Epoch 566, Training loss 5.642580, Validation loss 5.500237\n", "Epoch 567, Training loss 5.629317, Validation loss 5.502262\n", "Epoch 568, Training loss 5.616109, Validation loss 5.504293\n", "Epoch 569, Training loss 5.602955, Validation loss 5.506316\n", "Epoch 570, Training loss 5.589857, Validation loss 5.508331\n", "Epoch 571, Training loss 5.576812, Validation loss 5.510338\n", "Epoch 572, Training loss 5.563822, Validation loss 5.512355\n", "Epoch 573, Training loss 5.550882, Validation loss 5.514368\n", "Epoch 574, Training loss 5.537999, Validation loss 5.516363\n", "Epoch 575, Training loss 5.525169, Validation loss 5.518370\n", "Epoch 576, Training loss 5.512392, Validation loss 5.520367\n", "Epoch 577, Training loss 5.499668, Validation loss 5.522366\n", "Epoch 578, Training loss 5.486993, Validation loss 5.524360\n", "Epoch 579, Training loss 5.474376, Validation loss 5.526342\n", "Epoch 580, Training loss 5.461808, Validation loss 5.528325\n", "Epoch 581, Training loss 5.449292, Validation loss 5.530300\n", "Epoch 582, Training loss 5.436829, Validation loss 5.532284\n", "Epoch 583, Training loss 5.424416, Validation loss 5.534266\n", "Epoch 584, Training loss 5.412050, Validation loss 5.536234\n", "Epoch 585, Training loss 5.399740, Validation loss 5.538198\n", "Epoch 586, Training loss 5.387481, Validation loss 5.540164\n", "Epoch 587, Training loss 5.375270, Validation loss 5.542126\n", "Epoch 588, Training loss 5.363112, Validation loss 5.544079\n", "Epoch 589, Training loss 5.351005, Validation loss 5.546033\n", "Epoch 590, Training loss 5.338944, Validation loss 5.547993\n", "Epoch 591, Training loss 5.326936, Validation loss 5.549935\n", "Epoch 592, Training loss 5.314974, Validation loss 5.551883\n", "Epoch 593, Training loss 5.303066, Validation loss 5.553818\n", "Epoch 594, Training loss 5.291203, Validation loss 5.555749\n", "Epoch 595, Training loss 5.279392, Validation loss 5.557681\n", "Epoch 596, Training loss 5.267627, Validation loss 5.559618\n", "Epoch 597, Training loss 5.255912, Validation loss 5.561552\n", "Epoch 598, Training loss 5.244246, Validation loss 5.563477\n", "Epoch 599, Training loss 5.232628, Validation loss 5.565398\n", "Epoch 600, Training loss 5.221059, Validation loss 5.567316\n", "Epoch 601, Training loss 5.209535, Validation loss 5.569235\n", "Epoch 602, Training loss 5.198061, Validation loss 5.571140\n", "Epoch 603, Training loss 5.186633, Validation loss 5.573043\n", "Epoch 604, Training loss 5.175253, Validation loss 5.574945\n", "Epoch 605, Training loss 5.163918, Validation loss 5.576844\n", "Epoch 606, Training loss 5.152632, Validation loss 5.578740\n", "Epoch 607, Training loss 5.141390, Validation loss 5.580635\n", "Epoch 608, Training loss 5.130198, Validation loss 5.582527\n", "Epoch 609, Training loss 5.119051, Validation loss 5.584420\n", "Epoch 610, Training loss 5.107948, Validation loss 5.586305\n", "Epoch 611, Training loss 5.096891, Validation loss 5.588185\n", "Epoch 612, Training loss 5.085882, Validation loss 5.590067\n", "Epoch 613, Training loss 5.074915, Validation loss 5.591945\n", "Epoch 614, Training loss 5.063998, Validation loss 5.593819\n", "Epoch 615, Training loss 5.053123, Validation loss 5.595684\n", "Epoch 616, Training loss 5.042295, Validation loss 5.597550\n", "Epoch 617, Training loss 5.031506, Validation loss 5.599413\n", "Epoch 618, Training loss 5.020767, Validation loss 5.601262\n", "Epoch 619, Training loss 5.010071, Validation loss 5.603117\n", "Epoch 620, Training loss 4.999418, Validation loss 5.604977\n", "Epoch 621, Training loss 4.988808, Validation loss 5.606829\n", "Epoch 622, Training loss 4.978243, Validation loss 5.608663\n", "Epoch 623, Training loss 4.967727, Validation loss 5.610507\n", "Epoch 624, Training loss 4.957249, Validation loss 5.612338\n", "Epoch 625, Training loss 4.946814, Validation loss 5.614182\n", "Epoch 626, Training loss 4.936424, Validation loss 5.616020\n", "Epoch 627, Training loss 4.926075, Validation loss 5.617848\n", "Epoch 628, Training loss 4.915769, Validation loss 5.619673\n", "Epoch 629, Training loss 4.905506, Validation loss 5.621489\n", "Epoch 630, Training loss 4.895284, Validation loss 5.623310\n", "Epoch 631, Training loss 4.885107, Validation loss 5.625123\n", "Epoch 632, Training loss 4.874970, Validation loss 5.626932\n", "Epoch 633, Training loss 4.864876, Validation loss 5.628747\n", "Epoch 634, Training loss 4.854824, Validation loss 5.630548\n", "Epoch 635, Training loss 4.844811, Validation loss 5.632359\n", "Epoch 636, Training loss 4.834840, Validation loss 5.634161\n", "Epoch 637, Training loss 4.824911, Validation loss 5.635964\n", "Epoch 638, Training loss 4.815024, Validation loss 5.637759\n", "Epoch 639, Training loss 4.805176, Validation loss 5.639550\n", "Epoch 640, Training loss 4.795371, Validation loss 5.641333\n", "Epoch 641, Training loss 4.785605, Validation loss 5.643111\n", "Epoch 642, Training loss 4.775877, Validation loss 5.644900\n", "Epoch 643, Training loss 4.766193, Validation loss 5.646680\n", "Epoch 644, Training loss 4.756548, Validation loss 5.648456\n", "Epoch 645, Training loss 4.746942, Validation loss 5.650228\n", "Epoch 646, Training loss 4.737375, Validation loss 5.651999\n", "Epoch 647, Training loss 4.727849, Validation loss 5.653764\n", "Epoch 648, Training loss 4.718359, Validation loss 5.655524\n", "Epoch 649, Training loss 4.708913, Validation loss 5.657289\n", "Epoch 650, Training loss 4.699502, Validation loss 5.659041\n", "Epoch 651, Training loss 4.690132, Validation loss 5.660803\n", "Epoch 652, Training loss 4.680799, Validation loss 5.662543\n", "Epoch 653, Training loss 4.671504, Validation loss 5.664288\n", "Epoch 654, Training loss 4.662252, Validation loss 5.666042\n", "Epoch 655, Training loss 4.653034, Validation loss 5.667775\n", "Epoch 656, Training loss 4.643857, Validation loss 5.669517\n", "Epoch 657, Training loss 4.634716, Validation loss 5.671250\n", "Epoch 658, Training loss 4.625611, Validation loss 5.672989\n", "Epoch 659, Training loss 4.616545, Validation loss 5.674710\n", "Epoch 660, Training loss 4.607518, Validation loss 5.676436\n", "Epoch 661, Training loss 4.598527, Validation loss 5.678164\n", "Epoch 662, Training loss 4.589572, Validation loss 5.679886\n", "Epoch 663, Training loss 4.580656, Validation loss 5.681596\n", "Epoch 664, Training loss 4.571778, Validation loss 5.683306\n", "Epoch 665, Training loss 4.562934, Validation loss 5.685030\n", "Epoch 666, Training loss 4.554127, Validation loss 5.686737\n", "Epoch 667, Training loss 4.545355, Validation loss 5.688445\n", "Epoch 668, Training loss 4.536622, Validation loss 5.690148\n", "Epoch 669, Training loss 4.527922, Validation loss 5.691838\n", "Epoch 670, Training loss 4.519258, Validation loss 5.693538\n", "Epoch 671, Training loss 4.510632, Validation loss 5.695230\n", "Epoch 672, Training loss 4.502043, Validation loss 5.696912\n", "Epoch 673, Training loss 4.493484, Validation loss 5.698604\n", "Epoch 674, Training loss 4.484967, Validation loss 5.700284\n", "Epoch 675, Training loss 4.476478, Validation loss 5.701977\n", "Epoch 676, Training loss 4.468030, Validation loss 5.703648\n", "Epoch 677, Training loss 4.459615, Validation loss 5.705315\n", "Epoch 678, Training loss 4.451236, Validation loss 5.706996\n", "Epoch 679, Training loss 4.442887, Validation loss 5.708660\n", "Epoch 680, Training loss 4.434576, Validation loss 5.710320\n", "Epoch 681, Training loss 4.426301, Validation loss 5.711988\n", "Epoch 682, Training loss 4.418053, Validation loss 5.713649\n", "Epoch 683, Training loss 4.409848, Validation loss 5.715297\n", "Epoch 684, Training loss 4.401670, Validation loss 5.716953\n", "Epoch 685, Training loss 4.393528, Validation loss 5.718602\n", "Epoch 686, Training loss 4.385423, Validation loss 5.720251\n", "Epoch 687, Training loss 4.377346, Validation loss 5.721905\n", "Epoch 688, Training loss 4.369307, Validation loss 5.723541\n", "Epoch 689, Training loss 4.361296, Validation loss 5.725183\n", "Epoch 690, Training loss 4.353322, Validation loss 5.726820\n", "Epoch 691, Training loss 4.345379, Validation loss 5.728448\n", "Epoch 692, Training loss 4.337471, Validation loss 5.730072\n", "Epoch 693, Training loss 4.329592, Validation loss 5.731706\n", "Epoch 694, Training loss 4.321750, Validation loss 5.733332\n", "Epoch 695, Training loss 4.313939, Validation loss 5.734953\n", "Epoch 696, Training loss 4.306158, Validation loss 5.736566\n", "Epoch 697, Training loss 4.298409, Validation loss 5.738188\n", "Epoch 698, Training loss 4.290696, Validation loss 5.739806\n", "Epoch 699, Training loss 4.283011, Validation loss 5.741406\n", "Epoch 700, Training loss 4.275358, Validation loss 5.743021\n", "Epoch 701, Training loss 4.267740, Validation loss 5.744627\n", "Epoch 702, Training loss 4.260150, Validation loss 5.746223\n", "Epoch 703, Training loss 4.252592, Validation loss 5.747821\n", "Epoch 704, Training loss 4.245068, Validation loss 5.749415\n", "Epoch 705, Training loss 4.237569, Validation loss 5.751013\n", "Epoch 706, Training loss 4.230102, Validation loss 5.752589\n", "Epoch 707, Training loss 4.222672, Validation loss 5.754170\n", "Epoch 708, Training loss 4.215267, Validation loss 5.755756\n", "Epoch 709, Training loss 4.207897, Validation loss 5.757352\n", "Epoch 710, Training loss 4.200556, Validation loss 5.758935\n", "Epoch 711, Training loss 4.193244, Validation loss 5.760504\n", "Epoch 712, Training loss 4.185960, Validation loss 5.762078\n", "Epoch 713, Training loss 4.178707, Validation loss 5.763643\n", "Epoch 714, Training loss 4.171487, Validation loss 5.765214\n", "Epoch 715, Training loss 4.164296, Validation loss 5.766771\n", "Epoch 716, Training loss 4.157134, Validation loss 5.768338\n", "Epoch 717, Training loss 4.150000, Validation loss 5.769891\n", "Epoch 718, Training loss 4.142900, Validation loss 5.771445\n", "Epoch 719, Training loss 4.135826, Validation loss 5.773000\n", "Epoch 720, Training loss 4.128779, Validation loss 5.774549\n", "Epoch 721, Training loss 4.121765, Validation loss 5.776100\n", "Epoch 722, Training loss 4.114775, Validation loss 5.777651\n", "Epoch 723, Training loss 4.107821, Validation loss 5.779193\n", "Epoch 724, Training loss 4.100892, Validation loss 5.780717\n", "Epoch 725, Training loss 4.093991, Validation loss 5.782256\n", "Epoch 726, Training loss 4.087120, Validation loss 5.783790\n", "Epoch 727, Training loss 4.080276, Validation loss 5.785325\n", "Epoch 728, Training loss 4.073461, Validation loss 5.786860\n", "Epoch 729, Training loss 4.066672, Validation loss 5.788382\n", "Epoch 730, Training loss 4.059915, Validation loss 5.789895\n", "Epoch 731, Training loss 4.053183, Validation loss 5.791422\n", "Epoch 732, Training loss 4.046478, Validation loss 5.792926\n", "Epoch 733, Training loss 4.039803, Validation loss 5.794436\n", "Epoch 734, Training loss 4.033154, Validation loss 5.795941\n", "Epoch 735, Training loss 4.026535, Validation loss 5.797456\n", "Epoch 736, Training loss 4.019942, Validation loss 5.798962\n", "Epoch 737, Training loss 4.013372, Validation loss 5.800468\n", "Epoch 738, Training loss 4.006832, Validation loss 5.801966\n", "Epoch 739, Training loss 4.000322, Validation loss 5.803455\n", "Epoch 740, Training loss 3.993837, Validation loss 5.804963\n", "Epoch 741, Training loss 3.987378, Validation loss 5.806452\n", "Epoch 742, Training loss 3.980944, Validation loss 5.807942\n", "Epoch 743, Training loss 3.974538, Validation loss 5.809433\n", "Epoch 744, Training loss 3.968160, Validation loss 5.810905\n", "Epoch 745, Training loss 3.961808, Validation loss 5.812391\n", "Epoch 746, Training loss 3.955481, Validation loss 5.813878\n", "Epoch 747, Training loss 3.949184, Validation loss 5.815348\n", "Epoch 748, Training loss 3.942907, Validation loss 5.816831\n", "Epoch 749, Training loss 3.936658, Validation loss 5.818291\n", "Epoch 750, Training loss 3.930434, Validation loss 5.819747\n", "Epoch 751, Training loss 3.924241, Validation loss 5.821218\n", "Epoch 752, Training loss 3.918068, Validation loss 5.822688\n", "Epoch 753, Training loss 3.911921, Validation loss 5.824141\n", "Epoch 754, Training loss 3.905800, Validation loss 5.825599\n", "Epoch 755, Training loss 3.899706, Validation loss 5.827056\n", "Epoch 756, Training loss 3.893637, Validation loss 5.828501\n", "Epoch 757, Training loss 3.887590, Validation loss 5.829950\n", "Epoch 758, Training loss 3.881569, Validation loss 5.831395\n", "Epoch 759, Training loss 3.875573, Validation loss 5.832841\n", "Epoch 760, Training loss 3.869603, Validation loss 5.834282\n", "Epoch 761, Training loss 3.863657, Validation loss 5.835719\n", "Epoch 762, Training loss 3.857736, Validation loss 5.837152\n", "Epoch 763, Training loss 3.851837, Validation loss 5.838585\n", "Epoch 764, Training loss 3.845964, Validation loss 5.840019\n", "Epoch 765, Training loss 3.840116, Validation loss 5.841448\n", "Epoch 766, Training loss 3.834294, Validation loss 5.842868\n", "Epoch 767, Training loss 3.828491, Validation loss 5.844289\n", "Epoch 768, Training loss 3.822717, Validation loss 5.845719\n", "Epoch 769, Training loss 3.816962, Validation loss 5.847136\n", "Epoch 770, Training loss 3.811234, Validation loss 5.848538\n", "Epoch 771, Training loss 3.805530, Validation loss 5.849965\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 772, Training loss 3.799846, Validation loss 5.851369\n", "Epoch 773, Training loss 3.794189, Validation loss 5.852777\n", "Epoch 774, Training loss 3.788556, Validation loss 5.854182\n", "Epoch 775, Training loss 3.782944, Validation loss 5.855582\n", "Epoch 776, Training loss 3.777354, Validation loss 5.856974\n", "Epoch 777, Training loss 3.771790, Validation loss 5.858370\n", "Epoch 778, Training loss 3.766248, Validation loss 5.859762\n", "Epoch 779, Training loss 3.760728, Validation loss 5.861158\n", "Epoch 780, Training loss 3.755230, Validation loss 5.862546\n", "Epoch 781, Training loss 3.749756, Validation loss 5.863925\n", "Epoch 782, Training loss 3.744306, Validation loss 5.865318\n", "Epoch 783, Training loss 3.738876, Validation loss 5.866693\n", "Epoch 784, Training loss 3.733468, Validation loss 5.868069\n", "Epoch 785, Training loss 3.728086, Validation loss 5.869444\n", "Epoch 786, Training loss 3.722723, Validation loss 5.870825\n", "Epoch 787, Training loss 3.717381, Validation loss 5.872201\n", "Epoch 788, Training loss 3.712066, Validation loss 5.873563\n", "Epoch 789, Training loss 3.706769, Validation loss 5.874931\n", "Epoch 790, Training loss 3.701494, Validation loss 5.876294\n", "Epoch 791, Training loss 3.696240, Validation loss 5.877658\n", "Epoch 792, Training loss 3.691010, Validation loss 5.879012\n", "Epoch 793, Training loss 3.685802, Validation loss 5.880372\n", "Epoch 794, Training loss 3.680615, Validation loss 5.881727\n", "Epoch 795, Training loss 3.675448, Validation loss 5.883064\n", "Epoch 796, Training loss 3.670303, Validation loss 5.884410\n", "Epoch 797, Training loss 3.665180, Validation loss 5.885766\n", "Epoch 798, Training loss 3.660076, Validation loss 5.887104\n", "Epoch 799, Training loss 3.654995, Validation loss 5.888447\n", "Epoch 800, Training loss 3.649936, Validation loss 5.889790\n", "Epoch 801, Training loss 3.644896, Validation loss 5.891129\n", "Epoch 802, Training loss 3.639876, Validation loss 5.892453\n", "Epoch 803, Training loss 3.634878, Validation loss 5.893778\n", "Epoch 804, Training loss 3.629901, Validation loss 5.895109\n", "Epoch 805, Training loss 3.624943, Validation loss 5.896439\n", "Epoch 806, Training loss 3.620009, Validation loss 5.897760\n", "Epoch 807, Training loss 3.615091, Validation loss 5.899082\n", "Epoch 808, Training loss 3.610196, Validation loss 5.900404\n", "Epoch 809, Training loss 3.605320, Validation loss 5.901717\n", "Epoch 810, Training loss 3.600466, Validation loss 5.903035\n", "Epoch 811, Training loss 3.595630, Validation loss 5.904345\n", "Epoch 812, Training loss 3.590813, Validation loss 5.905649\n", "Epoch 813, Training loss 3.586019, Validation loss 5.906949\n", "Epoch 814, Training loss 3.581242, Validation loss 5.908254\n", "Epoch 815, Training loss 3.576487, Validation loss 5.909564\n", "Epoch 816, Training loss 3.571750, Validation loss 5.910860\n", "Epoch 817, Training loss 3.567033, Validation loss 5.912152\n", "Epoch 818, Training loss 3.562335, Validation loss 5.913454\n", "Epoch 819, Training loss 3.557657, Validation loss 5.914751\n", "Epoch 820, Training loss 3.552998, Validation loss 5.916034\n", "Epoch 821, Training loss 3.548359, Validation loss 5.917318\n", "Epoch 822, Training loss 3.543738, Validation loss 5.918602\n", "Epoch 823, Training loss 3.539136, Validation loss 5.919886\n", "Epoch 824, Training loss 3.534553, Validation loss 5.921166\n", "Epoch 825, Training loss 3.529990, Validation loss 5.922437\n", "Epoch 826, Training loss 3.525445, Validation loss 5.923721\n", "Epoch 827, Training loss 3.520919, Validation loss 5.924988\n", "Epoch 828, Training loss 3.516415, Validation loss 5.926255\n", "Epoch 829, Training loss 3.511925, Validation loss 5.927522\n", "Epoch 830, Training loss 3.507454, Validation loss 5.928780\n", "Epoch 831, Training loss 3.503002, Validation loss 5.930043\n", "Epoch 832, Training loss 3.498569, Validation loss 5.931311\n", "Epoch 833, Training loss 3.494153, Validation loss 5.932570\n", "Epoch 834, Training loss 3.489756, Validation loss 5.933824\n", "Epoch 835, Training loss 3.485377, Validation loss 5.935083\n", "Epoch 836, Training loss 3.481019, Validation loss 5.936338\n", "Epoch 837, Training loss 3.476675, Validation loss 5.937593\n", "Epoch 838, Training loss 3.472352, Validation loss 5.938835\n", "Epoch 839, Training loss 3.468044, Validation loss 5.940081\n", "Epoch 840, Training loss 3.463756, Validation loss 5.941318\n", "Epoch 841, Training loss 3.459485, Validation loss 5.942564\n", "Epoch 842, Training loss 3.455231, Validation loss 5.943807\n", "Epoch 843, Training loss 3.450993, Validation loss 5.945026\n", "Epoch 844, Training loss 3.446775, Validation loss 5.946255\n", "Epoch 845, Training loss 3.442575, Validation loss 5.947489\n", "Epoch 846, Training loss 3.438390, Validation loss 5.948718\n", "Epoch 847, Training loss 3.434222, Validation loss 5.949943\n", "Epoch 848, Training loss 3.430075, Validation loss 5.951168\n", "Epoch 849, Training loss 3.425940, Validation loss 5.952393\n", "Epoch 850, Training loss 3.421826, Validation loss 5.953605\n", "Epoch 851, Training loss 3.417728, Validation loss 5.954825\n", "Epoch 852, Training loss 3.413647, Validation loss 5.956033\n", "Epoch 853, Training loss 3.409582, Validation loss 5.957245\n", "Epoch 854, Training loss 3.405534, Validation loss 5.958457\n", "Epoch 855, Training loss 3.401503, Validation loss 5.959665\n", "Epoch 856, Training loss 3.397486, Validation loss 5.960864\n", "Epoch 857, Training loss 3.393489, Validation loss 5.962072\n", "Epoch 858, Training loss 3.389508, Validation loss 5.963276\n", "Epoch 859, Training loss 3.385542, Validation loss 5.964476\n", "Epoch 860, Training loss 3.381597, Validation loss 5.965675\n", "Epoch 861, Training loss 3.377662, Validation loss 5.966866\n", "Epoch 862, Training loss 3.373748, Validation loss 5.968056\n", "Epoch 863, Training loss 3.369846, Validation loss 5.969252\n", "Epoch 864, Training loss 3.365963, Validation loss 5.970438\n", "Epoch 865, Training loss 3.362096, Validation loss 5.971616\n", "Epoch 866, Training loss 3.358241, Validation loss 5.972802\n", "Epoch 867, Training loss 3.354407, Validation loss 5.973989\n", "Epoch 868, Training loss 3.350586, Validation loss 5.975158\n", "Epoch 869, Training loss 3.346781, Validation loss 5.976332\n", "Epoch 870, Training loss 3.342993, Validation loss 5.977510\n", "Epoch 871, Training loss 3.339219, Validation loss 5.978679\n", "Epoch 872, Training loss 3.335464, Validation loss 5.979844\n", "Epoch 873, Training loss 3.331721, Validation loss 5.981013\n", "Epoch 874, Training loss 3.327995, Validation loss 5.982179\n", "Epoch 875, Training loss 3.324285, Validation loss 5.983349\n", "Epoch 876, Training loss 3.320590, Validation loss 5.984514\n", "Epoch 877, Training loss 3.316908, Validation loss 5.985666\n", "Epoch 878, Training loss 3.313244, Validation loss 5.986817\n", "Epoch 879, Training loss 3.309594, Validation loss 5.987974\n", "Epoch 880, Training loss 3.305955, Validation loss 5.989118\n", "Epoch 881, Training loss 3.302337, Validation loss 5.990270\n", "Epoch 882, Training loss 3.298733, Validation loss 5.991422\n", "Epoch 883, Training loss 3.295143, Validation loss 5.992575\n", "Epoch 884, Training loss 3.291568, Validation loss 5.993714\n", "Epoch 885, Training loss 3.288004, Validation loss 5.994863\n", "Epoch 886, Training loss 3.284458, Validation loss 5.995997\n", "Epoch 887, Training loss 3.280928, Validation loss 5.997128\n", "Epoch 888, Training loss 3.277410, Validation loss 5.998262\n", "Epoch 889, Training loss 3.273908, Validation loss 5.999398\n", "Epoch 890, Training loss 3.270418, Validation loss 6.000528\n", "Epoch 891, Training loss 3.266945, Validation loss 6.001650\n", "Epoch 892, Training loss 3.263488, Validation loss 6.002771\n", "Epoch 893, Training loss 3.260039, Validation loss 6.003903\n", "Epoch 894, Training loss 3.256613, Validation loss 6.005025\n", "Epoch 895, Training loss 3.253194, Validation loss 6.006147\n", "Epoch 896, Training loss 3.249791, Validation loss 6.007260\n", "Epoch 897, Training loss 3.246403, Validation loss 6.008378\n", "Epoch 898, Training loss 3.243028, Validation loss 6.009487\n", "Epoch 899, Training loss 3.239668, Validation loss 6.010595\n", "Epoch 900, Training loss 3.236322, Validation loss 6.011709\n", "Epoch 901, Training loss 3.232990, Validation loss 6.012818\n", "Epoch 902, Training loss 3.229669, Validation loss 6.013919\n", "Epoch 903, Training loss 3.226363, Validation loss 6.015014\n", "Epoch 904, Training loss 3.223073, Validation loss 6.016119\n", "Epoch 905, Training loss 3.219795, Validation loss 6.017220\n", "Epoch 906, Training loss 3.216529, Validation loss 6.018315\n", "Epoch 907, Training loss 3.213278, Validation loss 6.019412\n", "Epoch 908, Training loss 3.210042, Validation loss 6.020508\n", "Epoch 909, Training loss 3.206816, Validation loss 6.021590\n", "Epoch 910, Training loss 3.203604, Validation loss 6.022677\n", "Epoch 911, Training loss 3.200408, Validation loss 6.023769\n", "Epoch 912, Training loss 3.197221, Validation loss 6.024858\n", "Epoch 913, Training loss 3.194049, Validation loss 6.025940\n", "Epoch 914, Training loss 3.190891, Validation loss 6.027014\n", "Epoch 915, Training loss 3.187745, Validation loss 6.028084\n", "Epoch 916, Training loss 3.184614, Validation loss 6.029167\n", "Epoch 917, Training loss 3.181493, Validation loss 6.030241\n", "Epoch 918, Training loss 3.178388, Validation loss 6.031316\n", "Epoch 919, Training loss 3.175294, Validation loss 6.032391\n", "Epoch 920, Training loss 3.172214, Validation loss 6.033451\n", "Epoch 921, Training loss 3.169145, Validation loss 6.034517\n", "Epoch 922, Training loss 3.166089, Validation loss 6.035583\n", "Epoch 923, Training loss 3.163047, Validation loss 6.036648\n", "Epoch 924, Training loss 3.160015, Validation loss 6.037700\n", "Epoch 925, Training loss 3.157000, Validation loss 6.038752\n", "Epoch 926, Training loss 3.153991, Validation loss 6.039823\n", "Epoch 927, Training loss 3.150999, Validation loss 6.040875\n", "Epoch 928, Training loss 3.148018, Validation loss 6.041928\n", "Epoch 929, Training loss 3.145049, Validation loss 6.042966\n", "Epoch 930, Training loss 3.142093, Validation loss 6.044024\n", "Epoch 931, Training loss 3.139150, Validation loss 6.045063\n", "Epoch 932, Training loss 3.136218, Validation loss 6.046102\n", "Epoch 933, Training loss 3.133297, Validation loss 6.047141\n", "Epoch 934, Training loss 3.130390, Validation loss 6.048176\n", "Epoch 935, Training loss 3.127493, Validation loss 6.049210\n", "Epoch 936, Training loss 3.124609, Validation loss 6.050245\n", "Epoch 937, Training loss 3.121738, Validation loss 6.051285\n", "Epoch 938, Training loss 3.118877, Validation loss 6.052315\n", "Epoch 939, Training loss 3.116030, Validation loss 6.053350\n", "Epoch 940, Training loss 3.113193, Validation loss 6.054386\n", "Epoch 941, Training loss 3.110367, Validation loss 6.055402\n", "Epoch 942, Training loss 3.107554, Validation loss 6.056429\n", "Epoch 943, Training loss 3.104752, Validation loss 6.057460\n", "Epoch 944, Training loss 3.101963, Validation loss 6.058472\n", "Epoch 945, Training loss 3.099184, Validation loss 6.059494\n", "Epoch 946, Training loss 3.096418, Validation loss 6.060507\n", "Epoch 947, Training loss 3.093661, Validation loss 6.061524\n", "Epoch 948, Training loss 3.090917, Validation loss 6.062528\n", "Epoch 949, Training loss 3.088186, Validation loss 6.063536\n", "Epoch 950, Training loss 3.085465, Validation loss 6.064549\n", "Epoch 951, Training loss 3.082754, Validation loss 6.065563\n", "Epoch 952, Training loss 3.080054, Validation loss 6.066562\n", "Epoch 953, Training loss 3.077366, Validation loss 6.067566\n", "Epoch 954, Training loss 3.074689, Validation loss 6.068556\n", "Epoch 955, Training loss 3.072025, Validation loss 6.069556\n", "Epoch 956, Training loss 3.069370, Validation loss 6.070556\n", "Epoch 957, Training loss 3.066726, Validation loss 6.071551\n", "Epoch 958, Training loss 3.064094, Validation loss 6.072542\n", "Epoch 959, Training loss 3.061469, Validation loss 6.073533\n", "Epoch 960, Training loss 3.058856, Validation loss 6.074519\n", "Epoch 961, Training loss 3.056257, Validation loss 6.075515\n", "Epoch 962, Training loss 3.053666, Validation loss 6.076492\n", "Epoch 963, Training loss 3.051089, Validation loss 6.077483\n", "Epoch 964, Training loss 3.048520, Validation loss 6.078465\n", "Epoch 965, Training loss 3.045960, Validation loss 6.079443\n", "Epoch 966, Training loss 3.043413, Validation loss 6.080420\n", "Epoch 967, Training loss 3.040877, Validation loss 6.081393\n", "Epoch 968, Training loss 3.038350, Validation loss 6.082361\n", "Epoch 969, Training loss 3.035834, Validation loss 6.083340\n", "Epoch 970, Training loss 3.033329, Validation loss 6.084312\n", "Epoch 971, Training loss 3.030834, Validation loss 6.085286\n", "Epoch 972, Training loss 3.028351, Validation loss 6.086246\n", "Epoch 973, Training loss 3.025874, Validation loss 6.087219\n", "Epoch 974, Training loss 3.023410, Validation loss 6.088169\n", "Epoch 975, Training loss 3.020955, Validation loss 6.089134\n", "Epoch 976, Training loss 3.018511, Validation loss 6.090089\n", "Epoch 977, Training loss 3.016077, Validation loss 6.091044\n", "Epoch 978, Training loss 3.013653, Validation loss 6.092004\n", "Epoch 979, Training loss 3.011238, Validation loss 6.092959\n", "Epoch 980, Training loss 3.008834, Validation loss 6.093910\n", "Epoch 981, Training loss 3.006442, Validation loss 6.094856\n", "Epoch 982, Training loss 3.004056, Validation loss 6.095812\n", "Epoch 983, Training loss 3.001681, Validation loss 6.096759\n", "Epoch 984, Training loss 2.999318, Validation loss 6.097701\n", "Epoch 985, Training loss 2.996961, Validation loss 6.098633\n", "Epoch 986, Training loss 2.994618, Validation loss 6.099575\n", "Epoch 987, Training loss 2.992283, Validation loss 6.100517\n", "Epoch 988, Training loss 2.989957, Validation loss 6.101460\n", "Epoch 989, Training loss 2.987640, Validation loss 6.102397\n", "Epoch 990, Training loss 2.985334, Validation loss 6.103325\n", "Epoch 991, Training loss 2.983036, Validation loss 6.104268\n", "Epoch 992, Training loss 2.980748, Validation loss 6.105182\n", "Epoch 993, Training loss 2.978471, Validation loss 6.106106\n", "Epoch 994, Training loss 2.976203, Validation loss 6.107035\n", "Epoch 995, Training loss 2.973940, Validation loss 6.107959\n", "Epoch 996, Training loss 2.971691, Validation loss 6.108878\n", "Epoch 997, Training loss 2.969450, Validation loss 6.109807\n", "Epoch 998, Training loss 2.967219, Validation loss 6.110723\n", "Epoch 999, Training loss 2.964997, Validation loss 6.111647\n", "Epoch 1000, Training loss 2.962783, Validation loss 6.112566\n", "Epoch 1001, Training loss 2.960580, Validation loss 6.113472\n", "Epoch 1002, Training loss 2.958385, Validation loss 6.114388\n", "Epoch 1003, Training loss 2.956197, Validation loss 6.115294\n", "Epoch 1004, Training loss 2.954022, Validation loss 6.116204\n", "Epoch 1005, Training loss 2.951852, Validation loss 6.117106\n", "Epoch 1006, Training loss 2.949695, Validation loss 6.118017\n", "Epoch 1007, Training loss 2.947544, Validation loss 6.118928\n", "Epoch 1008, Training loss 2.945404, Validation loss 6.119821\n", "Epoch 1009, Training loss 2.943272, Validation loss 6.120713\n", "Epoch 1010, Training loss 2.941147, Validation loss 6.121619\n", "Epoch 1011, Training loss 2.939032, Validation loss 6.122512\n", "Epoch 1012, Training loss 2.936926, Validation loss 6.123409\n", "Epoch 1013, Training loss 2.934830, Validation loss 6.124307\n", "Epoch 1014, Training loss 2.932740, Validation loss 6.125185\n", "Epoch 1015, Training loss 2.930660, Validation loss 6.126073\n", "Epoch 1016, Training loss 2.928588, Validation loss 6.126962\n", "Epoch 1017, Training loss 2.926526, Validation loss 6.127845\n", "Epoch 1018, Training loss 2.924471, Validation loss 6.128734\n", "Epoch 1019, Training loss 2.922425, Validation loss 6.129622\n", "Epoch 1020, Training loss 2.920387, Validation loss 6.130506\n", "Epoch 1021, Training loss 2.918359, Validation loss 6.131381\n", "Epoch 1022, Training loss 2.916337, Validation loss 6.132259\n", "Epoch 1023, Training loss 2.914325, Validation loss 6.133139\n", "Epoch 1024, Training loss 2.912321, Validation loss 6.134009\n", "Epoch 1025, Training loss 2.910325, Validation loss 6.134884\n", "Epoch 1026, Training loss 2.908336, Validation loss 6.135754\n", "Epoch 1027, Training loss 2.906357, Validation loss 6.136620\n", "Epoch 1028, Training loss 2.904387, Validation loss 6.137495\n", "Epoch 1029, Training loss 2.902423, Validation loss 6.138356\n", "Epoch 1030, Training loss 2.900468, Validation loss 6.139222\n", "Epoch 1031, Training loss 2.898522, Validation loss 6.140078\n", "Epoch 1032, Training loss 2.896582, Validation loss 6.140934\n", "Epoch 1033, Training loss 2.894652, Validation loss 6.141791\n", "Epoch 1034, Training loss 2.892730, Validation loss 6.142662\n", "Epoch 1035, Training loss 2.890814, Validation loss 6.143514\n", "Epoch 1036, Training loss 2.888906, Validation loss 6.144366\n", "Epoch 1037, Training loss 2.887008, Validation loss 6.145213\n", "Epoch 1038, Training loss 2.885117, Validation loss 6.146075\n", "Epoch 1039, Training loss 2.883230, Validation loss 6.146922\n", "Epoch 1040, Training loss 2.881356, Validation loss 6.147760\n", "Epoch 1041, Training loss 2.879489, Validation loss 6.148603\n", "Epoch 1042, Training loss 2.877627, Validation loss 6.149446\n", "Epoch 1043, Training loss 2.875775, Validation loss 6.150294\n", "Epoch 1044, Training loss 2.873929, Validation loss 6.151142\n", "Epoch 1045, Training loss 2.872093, Validation loss 6.151976\n", "Epoch 1046, Training loss 2.870262, Validation loss 6.152820\n", "Epoch 1047, Training loss 2.868440, Validation loss 6.153658\n", "Epoch 1048, Training loss 2.866626, Validation loss 6.154488\n", "Epoch 1049, Training loss 2.864819, Validation loss 6.155317\n", "Epoch 1050, Training loss 2.863018, Validation loss 6.156146\n", "Epoch 1051, Training loss 2.861226, Validation loss 6.156976\n", "Epoch 1052, Training loss 2.859439, Validation loss 6.157810\n", "Epoch 1053, Training loss 2.857662, Validation loss 6.158630\n", "Epoch 1054, Training loss 2.855891, Validation loss 6.159451\n", "Epoch 1055, Training loss 2.854131, Validation loss 6.160275\n", "Epoch 1056, Training loss 2.852374, Validation loss 6.161096\n", "Epoch 1057, Training loss 2.850625, Validation loss 6.161925\n", "Epoch 1058, Training loss 2.848883, Validation loss 6.162736\n", "Epoch 1059, Training loss 2.847149, Validation loss 6.163548\n", "Epoch 1060, Training loss 2.845422, Validation loss 6.164364\n", "Epoch 1061, Training loss 2.843702, Validation loss 6.165179\n", "Epoch 1062, Training loss 2.841990, Validation loss 6.165996\n", "Epoch 1063, Training loss 2.840283, Validation loss 6.166807\n", "Epoch 1064, Training loss 2.838586, Validation loss 6.167608\n", "Epoch 1065, Training loss 2.836894, Validation loss 6.168415\n", "Epoch 1066, Training loss 2.835210, Validation loss 6.169231\n", "Epoch 1067, Training loss 2.833530, Validation loss 6.170029\n", "Epoch 1068, Training loss 2.831859, Validation loss 6.170831\n", "Epoch 1069, Training loss 2.830197, Validation loss 6.171638\n", "Epoch 1070, Training loss 2.828540, Validation loss 6.172441\n", "Epoch 1071, Training loss 2.826891, Validation loss 6.173228\n", "Epoch 1072, Training loss 2.825246, Validation loss 6.174031\n", "Epoch 1073, Training loss 2.823610, Validation loss 6.174824\n", "Epoch 1074, Training loss 2.821979, Validation loss 6.175608\n", "Epoch 1075, Training loss 2.820358, Validation loss 6.176405\n", "Epoch 1076, Training loss 2.818740, Validation loss 6.177194\n", "Epoch 1077, Training loss 2.817129, Validation loss 6.177973\n", "Epoch 1078, Training loss 2.815526, Validation loss 6.178776\n", "Epoch 1079, Training loss 2.813930, Validation loss 6.179560\n", "Epoch 1080, Training loss 2.812342, Validation loss 6.180343\n", "Epoch 1081, Training loss 2.810758, Validation loss 6.181122\n", "Epoch 1082, Training loss 2.809181, Validation loss 6.181907\n", "Epoch 1083, Training loss 2.807611, Validation loss 6.182681\n", "Epoch 1084, Training loss 2.806047, Validation loss 6.183460\n", "Epoch 1085, Training loss 2.804488, Validation loss 6.184236\n", "Epoch 1086, Training loss 2.802938, Validation loss 6.185010\n", "Epoch 1087, Training loss 2.801394, Validation loss 6.185780\n", "Epoch 1088, Training loss 2.799855, Validation loss 6.186550\n", "Epoch 1089, Training loss 2.798325, Validation loss 6.187325\n", "Epoch 1090, Training loss 2.796798, Validation loss 6.188086\n", "Epoch 1091, Training loss 2.795279, Validation loss 6.188856\n", "Epoch 1092, Training loss 2.793765, Validation loss 6.189621\n", "Epoch 1093, Training loss 2.792260, Validation loss 6.190382\n", "Epoch 1094, Training loss 2.790761, Validation loss 6.191153\n", "Epoch 1095, Training loss 2.789264, Validation loss 6.191904\n", "Epoch 1096, Training loss 2.787776, Validation loss 6.192679\n", "Epoch 1097, Training loss 2.786295, Validation loss 6.193431\n", "Epoch 1098, Training loss 2.784820, Validation loss 6.194178\n", "Epoch 1099, Training loss 2.783350, Validation loss 6.194939\n", "Epoch 1100, Training loss 2.781886, Validation loss 6.195696\n", "Epoch 1101, Training loss 2.780428, Validation loss 6.196452\n", "Epoch 1102, Training loss 2.778976, Validation loss 6.197205\n", "Epoch 1103, Training loss 2.777532, Validation loss 6.197956\n", "Epoch 1104, Training loss 2.776092, Validation loss 6.198699\n", "Epoch 1105, Training loss 2.774656, Validation loss 6.199441\n", "Epoch 1106, Training loss 2.773228, Validation loss 6.200179\n", "Epoch 1107, Training loss 2.771807, Validation loss 6.200931\n", "Epoch 1108, Training loss 2.770391, Validation loss 6.201664\n", "Epoch 1109, Training loss 2.768983, Validation loss 6.202407\n", "Epoch 1110, Training loss 2.767576, Validation loss 6.203145\n", "Epoch 1111, Training loss 2.766176, Validation loss 6.203882\n", "Epoch 1112, Training loss 2.764784, Validation loss 6.204611\n", "Epoch 1113, Training loss 2.763396, Validation loss 6.205354\n", "Epoch 1114, Training loss 2.762017, Validation loss 6.206097\n", "Epoch 1115, Training loss 2.760638, Validation loss 6.206831\n", "Epoch 1116, Training loss 2.759270, Validation loss 6.207563\n", "Epoch 1117, Training loss 2.757907, Validation loss 6.208292\n", "Epoch 1118, Training loss 2.756547, Validation loss 6.209017\n", "Epoch 1119, Training loss 2.755193, Validation loss 6.209741\n", "Epoch 1120, Training loss 2.753845, Validation loss 6.210465\n", "Epoch 1121, Training loss 2.752502, Validation loss 6.211185\n", "Epoch 1122, Training loss 2.751167, Validation loss 6.211904\n", "Epoch 1123, Training loss 2.749836, Validation loss 6.212619\n", "Epoch 1124, Training loss 2.748511, Validation loss 6.213347\n", "Epoch 1125, Training loss 2.747192, Validation loss 6.214067\n", "Epoch 1126, Training loss 2.745876, Validation loss 6.214782\n", "Epoch 1127, Training loss 2.744568, Validation loss 6.215492\n", "Epoch 1128, Training loss 2.743264, Validation loss 6.216217\n", "Epoch 1129, Training loss 2.741965, Validation loss 6.216927\n", "Epoch 1130, Training loss 2.740672, Validation loss 6.217637\n", "Epoch 1131, Training loss 2.739387, Validation loss 6.218343\n", "Epoch 1132, Training loss 2.738103, Validation loss 6.219053\n", "Epoch 1133, Training loss 2.736825, Validation loss 6.219764\n", "Epoch 1134, Training loss 2.735555, Validation loss 6.220469\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1135, Training loss 2.734287, Validation loss 6.221161\n", "Epoch 1136, Training loss 2.733026, Validation loss 6.221871\n", "Epoch 1137, Training loss 2.731770, Validation loss 6.222568\n", "Epoch 1138, Training loss 2.730519, Validation loss 6.223259\n", "Epoch 1139, Training loss 2.729275, Validation loss 6.223970\n", "Epoch 1140, Training loss 2.728032, Validation loss 6.224667\n", "Epoch 1141, Training loss 2.726798, Validation loss 6.225358\n", "Epoch 1142, Training loss 2.725568, Validation loss 6.226059\n", "Epoch 1143, Training loss 2.724341, Validation loss 6.226756\n", "Epoch 1144, Training loss 2.723122, Validation loss 6.227438\n", "Epoch 1145, Training loss 2.721906, Validation loss 6.228120\n", "Epoch 1146, Training loss 2.720696, Validation loss 6.228807\n", "Epoch 1147, Training loss 2.719491, Validation loss 6.229499\n", "Epoch 1148, Training loss 2.718291, Validation loss 6.230196\n", "Epoch 1149, Training loss 2.717095, Validation loss 6.230874\n", "Epoch 1150, Training loss 2.715905, Validation loss 6.231552\n", "Epoch 1151, Training loss 2.714721, Validation loss 6.232234\n", "Epoch 1152, Training loss 2.713540, Validation loss 6.232907\n", "Epoch 1153, Training loss 2.712363, Validation loss 6.233599\n", "Epoch 1154, Training loss 2.711193, Validation loss 6.234277\n", "Epoch 1155, Training loss 2.710027, Validation loss 6.234950\n", "Epoch 1156, Training loss 2.708865, Validation loss 6.235623\n", "Epoch 1157, Training loss 2.707709, Validation loss 6.236292\n", "Epoch 1158, Training loss 2.706558, Validation loss 6.236970\n", "Epoch 1159, Training loss 2.705411, Validation loss 6.237643\n", "Epoch 1160, Training loss 2.704270, Validation loss 6.238316\n", "Epoch 1161, Training loss 2.703130, Validation loss 6.238989\n", "Epoch 1162, Training loss 2.701998, Validation loss 6.239648\n", "Epoch 1163, Training loss 2.700871, Validation loss 6.240322\n", "Epoch 1164, Training loss 2.699747, Validation loss 6.240981\n", "Epoch 1165, Training loss 2.698627, Validation loss 6.241631\n", "Epoch 1166, Training loss 2.697513, Validation loss 6.242304\n", "Epoch 1167, Training loss 2.696405, Validation loss 6.242973\n", "Epoch 1168, Training loss 2.695299, Validation loss 6.243632\n", "Epoch 1169, Training loss 2.694201, Validation loss 6.244282\n", "Epoch 1170, Training loss 2.693103, Validation loss 6.244941\n", "Epoch 1171, Training loss 2.692012, Validation loss 6.245591\n", "Epoch 1172, Training loss 2.690926, Validation loss 6.246241\n", "Epoch 1173, Training loss 2.689844, Validation loss 6.246904\n", "Epoch 1174, Training loss 2.688765, Validation loss 6.247554\n", "Epoch 1175, Training loss 2.687691, Validation loss 6.248199\n", "Epoch 1176, Training loss 2.686624, Validation loss 6.248845\n", "Epoch 1177, Training loss 2.685559, Validation loss 6.249504\n", "Epoch 1178, Training loss 2.684498, Validation loss 6.250150\n", "Epoch 1179, Training loss 2.683443, Validation loss 6.250785\n", "Epoch 1180, Training loss 2.682391, Validation loss 6.251426\n", "Epoch 1181, Training loss 2.681343, Validation loss 6.252076\n", "Epoch 1182, Training loss 2.680302, Validation loss 6.252716\n", "Epoch 1183, Training loss 2.679263, Validation loss 6.253366\n", "Epoch 1184, Training loss 2.678227, Validation loss 6.253997\n", "Epoch 1185, Training loss 2.677197, Validation loss 6.254633\n", "Epoch 1186, Training loss 2.676172, Validation loss 6.255264\n", "Epoch 1187, Training loss 2.675152, Validation loss 6.255900\n", "Epoch 1188, Training loss 2.674134, Validation loss 6.256546\n", "Epoch 1189, Training loss 2.673122, Validation loss 6.257176\n", "Epoch 1190, Training loss 2.672112, Validation loss 6.257808\n", "Epoch 1191, Training loss 2.671107, Validation loss 6.258434\n", "Epoch 1192, Training loss 2.670106, Validation loss 6.259066\n", "Epoch 1193, Training loss 2.669110, Validation loss 6.259687\n", "Epoch 1194, Training loss 2.668117, Validation loss 6.260309\n", "Epoch 1195, Training loss 2.667130, Validation loss 6.260941\n", "Epoch 1196, Training loss 2.666145, Validation loss 6.261562\n", "Epoch 1197, Training loss 2.665166, Validation loss 6.262184\n", "Epoch 1198, Training loss 2.664189, Validation loss 6.262811\n", "Epoch 1199, Training loss 2.663219, Validation loss 6.263428\n", "Epoch 1200, Training loss 2.662248, Validation loss 6.264050\n", "Epoch 1201, Training loss 2.661284, Validation loss 6.264657\n", "Epoch 1202, Training loss 2.660325, Validation loss 6.265279\n", "Epoch 1203, Training loss 2.659368, Validation loss 6.265892\n", "Epoch 1204, Training loss 2.658417, Validation loss 6.266504\n", "Epoch 1205, Training loss 2.657469, Validation loss 6.267121\n", "Epoch 1206, Training loss 2.656524, Validation loss 6.267738\n", "Epoch 1207, Training loss 2.655585, Validation loss 6.268341\n", "Epoch 1208, Training loss 2.654648, Validation loss 6.268953\n", "Epoch 1209, Training loss 2.653714, Validation loss 6.269566\n", "Epoch 1210, Training loss 2.652787, Validation loss 6.270169\n", "Epoch 1211, Training loss 2.651862, Validation loss 6.270772\n", "Epoch 1212, Training loss 2.650939, Validation loss 6.271385\n", "Epoch 1213, Training loss 2.650023, Validation loss 6.271983\n", "Epoch 1214, Training loss 2.649111, Validation loss 6.272581\n", "Epoch 1215, Training loss 2.648199, Validation loss 6.273180\n", "Epoch 1216, Training loss 2.647294, Validation loss 6.273792\n", "Epoch 1217, Training loss 2.646393, Validation loss 6.274391\n", "Epoch 1218, Training loss 2.645493, Validation loss 6.274989\n", "Epoch 1219, Training loss 2.644597, Validation loss 6.275573\n", "Epoch 1220, Training loss 2.643707, Validation loss 6.276157\n", "Epoch 1221, Training loss 2.642819, Validation loss 6.276765\n", "Epoch 1222, Training loss 2.641936, Validation loss 6.277364\n", "Epoch 1223, Training loss 2.641055, Validation loss 6.277952\n", "Epoch 1224, Training loss 2.640180, Validation loss 6.278536\n", "Epoch 1225, Training loss 2.639307, Validation loss 6.279125\n", "Epoch 1226, Training loss 2.638437, Validation loss 6.279710\n", "Epoch 1227, Training loss 2.637570, Validation loss 6.280293\n", "Epoch 1228, Training loss 2.636709, Validation loss 6.280897\n", "Epoch 1229, Training loss 2.635851, Validation loss 6.281476\n", "Epoch 1230, Training loss 2.634995, Validation loss 6.282061\n", "Epoch 1231, Training loss 2.634145, Validation loss 6.282635\n", "Epoch 1232, Training loss 2.633294, Validation loss 6.283219\n", "Epoch 1233, Training loss 2.632452, Validation loss 6.283794\n", "Epoch 1234, Training loss 2.631611, Validation loss 6.284369\n", "Epoch 1235, Training loss 2.630770, Validation loss 6.284948\n", "Epoch 1236, Training loss 2.629938, Validation loss 6.285523\n", "Epoch 1237, Training loss 2.629106, Validation loss 6.286108\n", "Epoch 1238, Training loss 2.628281, Validation loss 6.286677\n", "Epoch 1239, Training loss 2.627457, Validation loss 6.287257\n", "Epoch 1240, Training loss 2.626637, Validation loss 6.287832\n", "Epoch 1241, Training loss 2.625819, Validation loss 6.288397\n", "Epoch 1242, Training loss 2.625006, Validation loss 6.288967\n", "Epoch 1243, Training loss 2.624196, Validation loss 6.289533\n", "Epoch 1244, Training loss 2.623389, Validation loss 6.290098\n", "Epoch 1245, Training loss 2.622585, Validation loss 6.290663\n", "Epoch 1246, Training loss 2.621786, Validation loss 6.291228\n", "Epoch 1247, Training loss 2.620987, Validation loss 6.291799\n", "Epoch 1248, Training loss 2.620194, Validation loss 6.292350\n", "Epoch 1249, Training loss 2.619403, Validation loss 6.292915\n", "Epoch 1250, Training loss 2.618615, Validation loss 6.293471\n", "Epoch 1251, Training loss 2.617833, Validation loss 6.294026\n", "Epoch 1252, Training loss 2.617050, Validation loss 6.294597\n", "Epoch 1253, Training loss 2.616275, Validation loss 6.295157\n", "Epoch 1254, Training loss 2.615500, Validation loss 6.295713\n", "Epoch 1255, Training loss 2.614728, Validation loss 6.296259\n", "Epoch 1256, Training loss 2.613959, Validation loss 6.296821\n", "Epoch 1257, Training loss 2.613196, Validation loss 6.297367\n", "Epoch 1258, Training loss 2.612435, Validation loss 6.297932\n", "Epoch 1259, Training loss 2.611677, Validation loss 6.298473\n", "Epoch 1260, Training loss 2.610920, Validation loss 6.299025\n", "Epoch 1261, Training loss 2.610170, Validation loss 6.299576\n", "Epoch 1262, Training loss 2.609420, Validation loss 6.300127\n", "Epoch 1263, Training loss 2.608673, Validation loss 6.300664\n", "Epoch 1264, Training loss 2.607931, Validation loss 6.301210\n", "Epoch 1265, Training loss 2.607189, Validation loss 6.301757\n", "Epoch 1266, Training loss 2.606454, Validation loss 6.302299\n", "Epoch 1267, Training loss 2.605719, Validation loss 6.302840\n", "Epoch 1268, Training loss 2.604989, Validation loss 6.303372\n", "Epoch 1269, Training loss 2.604262, Validation loss 6.303924\n", "Epoch 1270, Training loss 2.603537, Validation loss 6.304461\n", "Epoch 1271, Training loss 2.602815, Validation loss 6.304993\n", "Epoch 1272, Training loss 2.602095, Validation loss 6.305515\n", "Epoch 1273, Training loss 2.601379, Validation loss 6.306057\n", "Epoch 1274, Training loss 2.600669, Validation loss 6.306599\n", "Epoch 1275, Training loss 2.599956, Validation loss 6.307126\n", "Epoch 1276, Training loss 2.599251, Validation loss 6.307658\n", "Epoch 1277, Training loss 2.598546, Validation loss 6.308200\n", "Epoch 1278, Training loss 2.597845, Validation loss 6.308732\n", "Epoch 1279, Training loss 2.597148, Validation loss 6.309250\n", "Epoch 1280, Training loss 2.596452, Validation loss 6.309772\n", "Epoch 1281, Training loss 2.595761, Validation loss 6.310309\n", "Epoch 1282, Training loss 2.595071, Validation loss 6.310837\n", "Epoch 1283, Training loss 2.594383, Validation loss 6.311364\n", "Epoch 1284, Training loss 2.593699, Validation loss 6.311877\n", "Epoch 1285, Training loss 2.593017, Validation loss 6.312395\n", "Epoch 1286, Training loss 2.592339, Validation loss 6.312927\n", "Epoch 1287, Training loss 2.591665, Validation loss 6.313445\n", "Epoch 1288, Training loss 2.590991, Validation loss 6.313977\n", "Epoch 1289, Training loss 2.590321, Validation loss 6.314486\n", "Epoch 1290, Training loss 2.589654, Validation loss 6.314999\n", "Epoch 1291, Training loss 2.588989, Validation loss 6.315526\n", "Epoch 1292, Training loss 2.588330, Validation loss 6.316039\n", "Epoch 1293, Training loss 2.587670, Validation loss 6.316543\n", "Epoch 1294, Training loss 2.587013, Validation loss 6.317060\n", "Epoch 1295, Training loss 2.586361, Validation loss 6.317578\n", "Epoch 1296, Training loss 2.585709, Validation loss 6.318092\n", "Epoch 1297, Training loss 2.585059, Validation loss 6.318591\n", "Epoch 1298, Training loss 2.584415, Validation loss 6.319099\n", "Epoch 1299, Training loss 2.583773, Validation loss 6.319612\n", "Epoch 1300, Training loss 2.583132, Validation loss 6.320120\n", "Epoch 1301, Training loss 2.582494, Validation loss 6.320624\n", "Epoch 1302, Training loss 2.581861, Validation loss 6.321133\n", "Epoch 1303, Training loss 2.581227, Validation loss 6.321627\n", "Epoch 1304, Training loss 2.580597, Validation loss 6.322140\n", "Epoch 1305, Training loss 2.579970, Validation loss 6.322639\n", "Epoch 1306, Training loss 2.579346, Validation loss 6.323147\n", "Epoch 1307, Training loss 2.578723, Validation loss 6.323650\n", "Epoch 1308, Training loss 2.578105, Validation loss 6.324154\n", "Epoch 1309, Training loss 2.577488, Validation loss 6.324638\n", "Epoch 1310, Training loss 2.576873, Validation loss 6.325137\n", "Epoch 1311, Training loss 2.576262, Validation loss 6.325636\n", "Epoch 1312, Training loss 2.575652, Validation loss 6.326125\n", "Epoch 1313, Training loss 2.575045, Validation loss 6.326624\n", "Epoch 1314, Training loss 2.574441, Validation loss 6.327113\n", "Epoch 1315, Training loss 2.573840, Validation loss 6.327612\n", "Epoch 1316, Training loss 2.573240, Validation loss 6.328111\n", "Epoch 1317, Training loss 2.572644, Validation loss 6.328586\n", "Epoch 1318, Training loss 2.572048, Validation loss 6.329075\n", "Epoch 1319, Training loss 2.571457, Validation loss 6.329565\n", "Epoch 1320, Training loss 2.570869, Validation loss 6.330049\n", "Epoch 1321, Training loss 2.570282, Validation loss 6.330543\n", "Epoch 1322, Training loss 2.569697, Validation loss 6.331033\n", "Epoch 1323, Training loss 2.569114, Validation loss 6.331517\n", "Epoch 1324, Training loss 2.568534, Validation loss 6.332006\n", "Epoch 1325, Training loss 2.567957, Validation loss 6.332486\n", "Epoch 1326, Training loss 2.567381, Validation loss 6.332966\n", "Epoch 1327, Training loss 2.566811, Validation loss 6.333441\n", "Epoch 1328, Training loss 2.566240, Validation loss 6.333925\n", "Epoch 1329, Training loss 2.565672, Validation loss 6.334404\n", "Epoch 1330, Training loss 2.565106, Validation loss 6.334885\n", "Epoch 1331, Training loss 2.564543, Validation loss 6.335355\n", "Epoch 1332, Training loss 2.563982, Validation loss 6.335825\n", "Epoch 1333, Training loss 2.563423, Validation loss 6.336309\n", "Epoch 1334, Training loss 2.562869, Validation loss 6.336784\n", "Epoch 1335, Training loss 2.562315, Validation loss 6.337264\n", "Epoch 1336, Training loss 2.561762, Validation loss 6.337734\n", "Epoch 1337, Training loss 2.561213, Validation loss 6.338200\n", "Epoch 1338, Training loss 2.560667, Validation loss 6.338665\n", "Epoch 1339, Training loss 2.560121, Validation loss 6.339139\n", "Epoch 1340, Training loss 2.559579, Validation loss 6.339604\n", "Epoch 1341, Training loss 2.559039, Validation loss 6.340070\n", "Epoch 1342, Training loss 2.558500, Validation loss 6.340530\n", "Epoch 1343, Training loss 2.557965, Validation loss 6.340996\n", "Epoch 1344, Training loss 2.557430, Validation loss 6.341461\n", "Epoch 1345, Training loss 2.556898, Validation loss 6.341931\n", "Epoch 1346, Training loss 2.556370, Validation loss 6.342402\n", "Epoch 1347, Training loss 2.555842, Validation loss 6.342862\n", "Epoch 1348, Training loss 2.555318, Validation loss 6.343328\n", "Epoch 1349, Training loss 2.554794, Validation loss 6.343779\n", "Epoch 1350, Training loss 2.554275, Validation loss 6.344234\n", "Epoch 1351, Training loss 2.553757, Validation loss 6.344694\n", "Epoch 1352, Training loss 2.553240, Validation loss 6.345155\n", "Epoch 1353, Training loss 2.552726, Validation loss 6.345606\n", "Epoch 1354, Training loss 2.552213, Validation loss 6.346066\n", "Epoch 1355, Training loss 2.551703, Validation loss 6.346522\n", "Epoch 1356, Training loss 2.551197, Validation loss 6.346968\n", "Epoch 1357, Training loss 2.550690, Validation loss 6.347434\n", "Epoch 1358, Training loss 2.550186, Validation loss 6.347875\n", "Epoch 1359, Training loss 2.549685, Validation loss 6.348316\n", "Epoch 1360, Training loss 2.549186, Validation loss 6.348786\n", "Epoch 1361, Training loss 2.548688, Validation loss 6.349223\n", "Epoch 1362, Training loss 2.548193, Validation loss 6.349674\n", "Epoch 1363, Training loss 2.547700, Validation loss 6.350120\n", "Epoch 1364, Training loss 2.547209, Validation loss 6.350566\n", "Epoch 1365, Training loss 2.546720, Validation loss 6.351022\n", "Epoch 1366, Training loss 2.546230, Validation loss 6.351463\n", "Epoch 1367, Training loss 2.545746, Validation loss 6.351890\n", "Epoch 1368, Training loss 2.545263, Validation loss 6.352341\n", "Epoch 1369, Training loss 2.544781, Validation loss 6.352773\n", "Epoch 1370, Training loss 2.544302, Validation loss 6.353224\n", "Epoch 1371, Training loss 2.543825, Validation loss 6.353665\n", "Epoch 1372, Training loss 2.543349, Validation loss 6.354096\n", "Epoch 1373, Training loss 2.542876, Validation loss 6.354543\n", "Epoch 1374, Training loss 2.542404, Validation loss 6.354980\n", "Epoch 1375, Training loss 2.541934, Validation loss 6.355415\n", "Epoch 1376, Training loss 2.541468, Validation loss 6.355838\n", "Epoch 1377, Training loss 2.541002, Validation loss 6.356284\n", "Epoch 1378, Training loss 2.540539, Validation loss 6.356730\n", "Epoch 1379, Training loss 2.540075, Validation loss 6.357162\n", "Epoch 1380, Training loss 2.539618, Validation loss 6.357593\n", "Epoch 1381, Training loss 2.539159, Validation loss 6.358025\n", "Epoch 1382, Training loss 2.538703, Validation loss 6.358457\n", "Epoch 1383, Training loss 2.538249, Validation loss 6.358879\n", "Epoch 1384, Training loss 2.537796, Validation loss 6.359315\n", "Epoch 1385, Training loss 2.537345, Validation loss 6.359733\n", "Epoch 1386, Training loss 2.536897, Validation loss 6.360149\n", "Epoch 1387, Training loss 2.536450, Validation loss 6.360581\n", "Epoch 1388, Training loss 2.536006, Validation loss 6.361003\n", "Epoch 1389, Training loss 2.535562, Validation loss 6.361440\n", "Epoch 1390, Training loss 2.535121, Validation loss 6.361857\n", "Epoch 1391, Training loss 2.534682, Validation loss 6.362274\n", "Epoch 1392, Training loss 2.534243, Validation loss 6.362706\n", "Epoch 1393, Training loss 2.533808, Validation loss 6.363118\n", "Epoch 1394, Training loss 2.533374, Validation loss 6.363555\n", "Epoch 1395, Training loss 2.532943, Validation loss 6.363967\n", "Epoch 1396, Training loss 2.532512, Validation loss 6.364384\n", "Epoch 1397, Training loss 2.532083, Validation loss 6.364811\n", "Epoch 1398, Training loss 2.531654, Validation loss 6.365219\n", "Epoch 1399, Training loss 2.531230, Validation loss 6.365631\n", "Epoch 1400, Training loss 2.530808, Validation loss 6.366034\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1401, Training loss 2.530386, Validation loss 6.366461\n", "Epoch 1402, Training loss 2.529966, Validation loss 6.366873\n", "Epoch 1403, Training loss 2.529548, Validation loss 6.367300\n", "Epoch 1404, Training loss 2.529131, Validation loss 6.367712\n", "Epoch 1405, Training loss 2.528717, Validation loss 6.368114\n", "Epoch 1406, Training loss 2.528303, Validation loss 6.368523\n", "Epoch 1407, Training loss 2.527892, Validation loss 6.368930\n", "Epoch 1408, Training loss 2.527482, Validation loss 6.369352\n", "Epoch 1409, Training loss 2.527074, Validation loss 6.369760\n", "Epoch 1410, Training loss 2.526669, Validation loss 6.370152\n", "Epoch 1411, Training loss 2.526264, Validation loss 6.370556\n", "Epoch 1412, Training loss 2.525861, Validation loss 6.370963\n", "Epoch 1413, Training loss 2.525460, Validation loss 6.371370\n", "Epoch 1414, Training loss 2.525060, Validation loss 6.371788\n", "Epoch 1415, Training loss 2.524662, Validation loss 6.372171\n", "Epoch 1416, Training loss 2.524266, Validation loss 6.372588\n", "Epoch 1417, Training loss 2.523871, Validation loss 6.372991\n", "Epoch 1418, Training loss 2.523479, Validation loss 6.373379\n", "Epoch 1419, Training loss 2.523088, Validation loss 6.373782\n", "Epoch 1420, Training loss 2.522697, Validation loss 6.374175\n", "Epoch 1421, Training loss 2.522310, Validation loss 6.374582\n", "Epoch 1422, Training loss 2.521923, Validation loss 6.374980\n", "Epoch 1423, Training loss 2.521537, Validation loss 6.375378\n", "Epoch 1424, Training loss 2.521154, Validation loss 6.375766\n", "Epoch 1425, Training loss 2.520773, Validation loss 6.376164\n", "Epoch 1426, Training loss 2.520392, Validation loss 6.376557\n", "Epoch 1427, Training loss 2.520014, Validation loss 6.376945\n", "Epoch 1428, Training loss 2.519637, Validation loss 6.377343\n", "Epoch 1429, Training loss 2.519261, Validation loss 6.377727\n", "Epoch 1430, Training loss 2.518887, Validation loss 6.378129\n", "Epoch 1431, Training loss 2.518515, Validation loss 6.378512\n", "Epoch 1432, Training loss 2.518144, Validation loss 6.378901\n", "Epoch 1433, Training loss 2.517773, Validation loss 6.379284\n", "Epoch 1434, Training loss 2.517406, Validation loss 6.379682\n", "Epoch 1435, Training loss 2.517039, Validation loss 6.380070\n", "Epoch 1436, Training loss 2.516675, Validation loss 6.380458\n", "Epoch 1437, Training loss 2.516311, Validation loss 6.380851\n", "Epoch 1438, Training loss 2.515950, Validation loss 6.381234\n", "Epoch 1439, Training loss 2.515590, Validation loss 6.381613\n", "Epoch 1440, Training loss 2.515231, Validation loss 6.381996\n", "Epoch 1441, Training loss 2.514874, Validation loss 6.382380\n", "Epoch 1442, Training loss 2.514519, Validation loss 6.382758\n", "Epoch 1443, Training loss 2.514163, Validation loss 6.383127\n", "Epoch 1444, Training loss 2.513811, Validation loss 6.383510\n", "Epoch 1445, Training loss 2.513459, Validation loss 6.383893\n", "Epoch 1446, Training loss 2.513109, Validation loss 6.384272\n", "Epoch 1447, Training loss 2.512761, Validation loss 6.384645\n", "Epoch 1448, Training loss 2.512415, Validation loss 6.385034\n", "Epoch 1449, Training loss 2.512069, Validation loss 6.385417\n", "Epoch 1450, Training loss 2.511725, Validation loss 6.385786\n", "Epoch 1451, Training loss 2.511381, Validation loss 6.386155\n", "Epoch 1452, Training loss 2.511041, Validation loss 6.386528\n", "Epoch 1453, Training loss 2.510700, Validation loss 6.386897\n", "Epoch 1454, Training loss 2.510361, Validation loss 6.387270\n", "Epoch 1455, Training loss 2.510025, Validation loss 6.387649\n", "Epoch 1456, Training loss 2.509689, Validation loss 6.388018\n", "Epoch 1457, Training loss 2.509354, Validation loss 6.388386\n", "Epoch 1458, Training loss 2.509022, Validation loss 6.388745\n", "Epoch 1459, Training loss 2.508690, Validation loss 6.389110\n", "Epoch 1460, Training loss 2.508358, Validation loss 6.389478\n", "Epoch 1461, Training loss 2.508029, Validation loss 6.389847\n", "Epoch 1462, Training loss 2.507701, Validation loss 6.390211\n", "Epoch 1463, Training loss 2.507375, Validation loss 6.390580\n", "Epoch 1464, Training loss 2.507051, Validation loss 6.390948\n", "Epoch 1465, Training loss 2.506727, Validation loss 6.391313\n", "Epoch 1466, Training loss 2.506405, Validation loss 6.391685\n", "Epoch 1467, Training loss 2.506084, Validation loss 6.392040\n", "Epoch 1468, Training loss 2.505765, Validation loss 6.392408\n", "Epoch 1469, Training loss 2.505446, Validation loss 6.392768\n", "Epoch 1470, Training loss 2.505130, Validation loss 6.393136\n", "Epoch 1471, Training loss 2.504813, Validation loss 6.393496\n", "Epoch 1472, Training loss 2.504499, Validation loss 6.393845\n", "Epoch 1473, Training loss 2.504185, Validation loss 6.394194\n", "Epoch 1474, Training loss 2.503874, Validation loss 6.394558\n", "Epoch 1475, Training loss 2.503563, Validation loss 6.394922\n", "Epoch 1476, Training loss 2.503254, Validation loss 6.395286\n", "Epoch 1477, Training loss 2.502946, Validation loss 6.395635\n", "Epoch 1478, Training loss 2.502639, Validation loss 6.395985\n", "Epoch 1479, Training loss 2.502335, Validation loss 6.396338\n", "Epoch 1480, Training loss 2.502031, Validation loss 6.396698\n", "Epoch 1481, Training loss 2.501727, Validation loss 6.397042\n", "Epoch 1482, Training loss 2.501427, Validation loss 6.397396\n", "Epoch 1483, Training loss 2.501127, Validation loss 6.397745\n", "Epoch 1484, Training loss 2.500825, Validation loss 6.398089\n", "Epoch 1485, Training loss 2.500527, Validation loss 6.398449\n", "Epoch 1486, Training loss 2.500232, Validation loss 6.398798\n", "Epoch 1487, Training loss 2.499936, Validation loss 6.399142\n", "Epoch 1488, Training loss 2.499641, Validation loss 6.399496\n", "Epoch 1489, Training loss 2.499348, Validation loss 6.399846\n", "Epoch 1490, Training loss 2.499057, Validation loss 6.400195\n", "Epoch 1491, Training loss 2.498766, Validation loss 6.400544\n", "Epoch 1492, Training loss 2.498477, Validation loss 6.400879\n", "Epoch 1493, Training loss 2.498189, Validation loss 6.401218\n", "Epoch 1494, Training loss 2.497902, Validation loss 6.401573\n", "Epoch 1495, Training loss 2.497616, Validation loss 6.401907\n", "Epoch 1496, Training loss 2.497332, Validation loss 6.402256\n", "Epoch 1497, Training loss 2.497049, Validation loss 6.402586\n", "Epoch 1498, Training loss 2.496764, Validation loss 6.402921\n", "Epoch 1499, Training loss 2.496483, Validation loss 6.403265\n", "Epoch 1500, Training loss 2.496204, Validation loss 6.403609\n", "Epoch 1501, Training loss 2.495926, Validation loss 6.403954\n", "Epoch 1502, Training loss 2.495648, Validation loss 6.404289\n", "Epoch 1503, Training loss 2.495371, Validation loss 6.404628\n", "Epoch 1504, Training loss 2.495095, Validation loss 6.404967\n", "Epoch 1505, Training loss 2.494822, Validation loss 6.405302\n", "Epoch 1506, Training loss 2.494550, Validation loss 6.405642\n", "Epoch 1507, Training loss 2.494277, Validation loss 6.405982\n", "Epoch 1508, Training loss 2.494005, Validation loss 6.406297\n", "Epoch 1509, Training loss 2.493735, Validation loss 6.406631\n", "Epoch 1510, Training loss 2.493467, Validation loss 6.406971\n", "Epoch 1511, Training loss 2.493201, Validation loss 6.407300\n", "Epoch 1512, Training loss 2.492933, Validation loss 6.407635\n", "Epoch 1513, Training loss 2.492668, Validation loss 6.407965\n", "Epoch 1514, Training loss 2.492404, Validation loss 6.408299\n", "Epoch 1515, Training loss 2.492141, Validation loss 6.408624\n", "Epoch 1516, Training loss 2.491878, Validation loss 6.408949\n", "Epoch 1517, Training loss 2.491618, Validation loss 6.409284\n", "Epoch 1518, Training loss 2.491358, Validation loss 6.409609\n", "Epoch 1519, Training loss 2.491099, Validation loss 6.409934\n", "Epoch 1520, Training loss 2.490840, Validation loss 6.410263\n", "Epoch 1521, Training loss 2.490585, Validation loss 6.410578\n", "Epoch 1522, Training loss 2.490329, Validation loss 6.410898\n", "Epoch 1523, Training loss 2.490074, Validation loss 6.411228\n", "Epoch 1524, Training loss 2.489821, Validation loss 6.411553\n", "Epoch 1525, Training loss 2.489568, Validation loss 6.411878\n", "Epoch 1526, Training loss 2.489316, Validation loss 6.412208\n", "Epoch 1527, Training loss 2.489067, Validation loss 6.412522\n", "Epoch 1528, Training loss 2.488816, Validation loss 6.412843\n", "Epoch 1529, Training loss 2.488569, Validation loss 6.413163\n", "Epoch 1530, Training loss 2.488321, Validation loss 6.413487\n", "Epoch 1531, Training loss 2.488076, Validation loss 6.413793\n", "Epoch 1532, Training loss 2.487828, Validation loss 6.414122\n", "Epoch 1533, Training loss 2.487586, Validation loss 6.414437\n", "Epoch 1534, Training loss 2.487342, Validation loss 6.414757\n", "Epoch 1535, Training loss 2.487100, Validation loss 6.415062\n", "Epoch 1536, Training loss 2.486858, Validation loss 6.415387\n", "Epoch 1537, Training loss 2.486618, Validation loss 6.415698\n", "Epoch 1538, Training loss 2.486380, Validation loss 6.416003\n", "Epoch 1539, Training loss 2.486139, Validation loss 6.416323\n", "Epoch 1540, Training loss 2.485904, Validation loss 6.416633\n", "Epoch 1541, Training loss 2.485667, Validation loss 6.416944\n", "Epoch 1542, Training loss 2.485431, Validation loss 6.417273\n", "Epoch 1543, Training loss 2.485197, Validation loss 6.417573\n", "Epoch 1544, Training loss 2.484964, Validation loss 6.417879\n", "Epoch 1545, Training loss 2.484731, Validation loss 6.418199\n", "Epoch 1546, Training loss 2.484499, Validation loss 6.418509\n", "Epoch 1547, Training loss 2.484268, Validation loss 6.418824\n", "Epoch 1548, Training loss 2.484039, Validation loss 6.419114\n", "Epoch 1549, Training loss 2.483811, Validation loss 6.419435\n", "Epoch 1550, Training loss 2.483582, Validation loss 6.419740\n", "Epoch 1551, Training loss 2.483356, Validation loss 6.420035\n", "Epoch 1552, Training loss 2.483129, Validation loss 6.420350\n", "Epoch 1553, Training loss 2.482905, Validation loss 6.420655\n", "Epoch 1554, Training loss 2.482682, Validation loss 6.420956\n", "Epoch 1555, Training loss 2.482457, Validation loss 6.421266\n", "Epoch 1556, Training loss 2.482236, Validation loss 6.421562\n", "Epoch 1557, Training loss 2.482015, Validation loss 6.421862\n", "Epoch 1558, Training loss 2.481794, Validation loss 6.422167\n", "Epoch 1559, Training loss 2.481574, Validation loss 6.422462\n", "Epoch 1560, Training loss 2.481357, Validation loss 6.422768\n", "Epoch 1561, Training loss 2.481140, Validation loss 6.423073\n", "Epoch 1562, Training loss 2.480922, Validation loss 6.423359\n", "Epoch 1563, Training loss 2.480707, Validation loss 6.423669\n", "Epoch 1564, Training loss 2.480491, Validation loss 6.423960\n", "Epoch 1565, Training loss 2.480278, Validation loss 6.424260\n", "Epoch 1566, Training loss 2.480066, Validation loss 6.424545\n", "Epoch 1567, Training loss 2.479852, Validation loss 6.424850\n", "Epoch 1568, Training loss 2.479642, Validation loss 6.425146\n", "Epoch 1569, Training loss 2.479431, Validation loss 6.425442\n", "Epoch 1570, Training loss 2.479222, Validation loss 6.425747\n", "Epoch 1571, Training loss 2.479013, Validation loss 6.426037\n", "Epoch 1572, Training loss 2.478806, Validation loss 6.426318\n", "Epoch 1573, Training loss 2.478598, Validation loss 6.426628\n", "Epoch 1574, Training loss 2.478391, Validation loss 6.426909\n", "Epoch 1575, Training loss 2.478187, Validation loss 6.427195\n", "Epoch 1576, Training loss 2.477982, Validation loss 6.427490\n", "Epoch 1577, Training loss 2.477777, Validation loss 6.427785\n", "Epoch 1578, Training loss 2.477574, Validation loss 6.428071\n", "Epoch 1579, Training loss 2.477374, Validation loss 6.428362\n", "Epoch 1580, Training loss 2.477174, Validation loss 6.428657\n", "Epoch 1581, Training loss 2.476972, Validation loss 6.428953\n", "Epoch 1582, Training loss 2.476772, Validation loss 6.429228\n", "Epoch 1583, Training loss 2.476573, Validation loss 6.429514\n", "Epoch 1584, Training loss 2.476377, Validation loss 6.429810\n", "Epoch 1585, Training loss 2.476178, Validation loss 6.430080\n", "Epoch 1586, Training loss 2.475983, Validation loss 6.430366\n", "Epoch 1587, Training loss 2.475789, Validation loss 6.430661\n", "Epoch 1588, Training loss 2.475592, Validation loss 6.430942\n", "Epoch 1589, Training loss 2.475399, Validation loss 6.431228\n", "Epoch 1590, Training loss 2.475205, Validation loss 6.431513\n", "Epoch 1591, Training loss 2.475013, Validation loss 6.431794\n", "Epoch 1592, Training loss 2.474823, Validation loss 6.432075\n", "Epoch 1593, Training loss 2.474633, Validation loss 6.432350\n", "Epoch 1594, Training loss 2.474442, Validation loss 6.432631\n", "Epoch 1595, Training loss 2.474253, Validation loss 6.432912\n", "Epoch 1596, Training loss 2.474066, Validation loss 6.433198\n", "Epoch 1597, Training loss 2.473877, Validation loss 6.433474\n", "Epoch 1598, Training loss 2.473691, Validation loss 6.433754\n", "Epoch 1599, Training loss 2.473505, Validation loss 6.434039\n", "Epoch 1600, Training loss 2.473319, Validation loss 6.434296\n", "Epoch 1601, Training loss 2.473134, Validation loss 6.434587\n", "Epoch 1602, Training loss 2.472951, Validation loss 6.434853\n", "Epoch 1603, Training loss 2.472770, Validation loss 6.435138\n", "Epoch 1604, Training loss 2.472587, Validation loss 6.435399\n", "Epoch 1605, Training loss 2.472405, Validation loss 6.435680\n", "Epoch 1606, Training loss 2.472225, Validation loss 6.435955\n", "Epoch 1607, Training loss 2.472044, Validation loss 6.436226\n", "Epoch 1608, Training loss 2.471865, Validation loss 6.436507\n", "Epoch 1609, Training loss 2.471686, Validation loss 6.436773\n", "Epoch 1610, Training loss 2.471510, Validation loss 6.437048\n", "Epoch 1611, Training loss 2.471333, Validation loss 6.437320\n", "Epoch 1612, Training loss 2.471156, Validation loss 6.437576\n", "Epoch 1613, Training loss 2.470981, Validation loss 6.437851\n", "Epoch 1614, Training loss 2.470806, Validation loss 6.438127\n", "Epoch 1615, Training loss 2.470634, Validation loss 6.438393\n", "Epoch 1616, Training loss 2.470459, Validation loss 6.438659\n", "Epoch 1617, Training loss 2.470286, Validation loss 6.438935\n", "Epoch 1618, Training loss 2.470114, Validation loss 6.439200\n", "Epoch 1619, Training loss 2.469942, Validation loss 6.439457\n", "Epoch 1620, Training loss 2.469770, Validation loss 6.439733\n", "Epoch 1621, Training loss 2.469604, Validation loss 6.439994\n", "Epoch 1622, Training loss 2.469435, Validation loss 6.440274\n", "Epoch 1623, Training loss 2.469265, Validation loss 6.440535\n", "Epoch 1624, Training loss 2.469098, Validation loss 6.440786\n", "Epoch 1625, Training loss 2.468932, Validation loss 6.441057\n", "Epoch 1626, Training loss 2.468764, Validation loss 6.441313\n", "Epoch 1627, Training loss 2.468599, Validation loss 6.441560\n", "Epoch 1628, Training loss 2.468433, Validation loss 6.441840\n", "Epoch 1629, Training loss 2.468270, Validation loss 6.442106\n", "Epoch 1630, Training loss 2.468107, Validation loss 6.442362\n", "Epoch 1631, Training loss 2.467942, Validation loss 6.442633\n", "Epoch 1632, Training loss 2.467782, Validation loss 6.442874\n", "Epoch 1633, Training loss 2.467620, Validation loss 6.443140\n", "Epoch 1634, Training loss 2.467458, Validation loss 6.443406\n", "Epoch 1635, Training loss 2.467299, Validation loss 6.443652\n", "Epoch 1636, Training loss 2.467139, Validation loss 6.443913\n", "Epoch 1637, Training loss 2.466981, Validation loss 6.444169\n", "Epoch 1638, Training loss 2.466823, Validation loss 6.444430\n", "Epoch 1639, Training loss 2.466664, Validation loss 6.444692\n", "Epoch 1640, Training loss 2.466508, Validation loss 6.444947\n", "Epoch 1641, Training loss 2.466350, Validation loss 6.445199\n", "Epoch 1642, Training loss 2.466195, Validation loss 6.445459\n", "Epoch 1643, Training loss 2.466040, Validation loss 6.445701\n", "Epoch 1644, Training loss 2.465886, Validation loss 6.445947\n", "Epoch 1645, Training loss 2.465734, Validation loss 6.446208\n", "Epoch 1646, Training loss 2.465580, Validation loss 6.446454\n", "Epoch 1647, Training loss 2.465427, Validation loss 6.446715\n", "Epoch 1648, Training loss 2.465274, Validation loss 6.446966\n", "Epoch 1649, Training loss 2.465124, Validation loss 6.447207\n", "Epoch 1650, Training loss 2.464973, Validation loss 6.447459\n", "Epoch 1651, Training loss 2.464824, Validation loss 6.447724\n", "Epoch 1652, Training loss 2.464674, Validation loss 6.447966\n", "Epoch 1653, Training loss 2.464525, Validation loss 6.448217\n", "Epoch 1654, Training loss 2.464376, Validation loss 6.448458\n", "Epoch 1655, Training loss 2.464231, Validation loss 6.448719\n", "Epoch 1656, Training loss 2.464083, Validation loss 6.448956\n", "Epoch 1657, Training loss 2.463936, Validation loss 6.449202\n", "Epoch 1658, Training loss 2.463790, Validation loss 6.449448\n", "Epoch 1659, Training loss 2.463645, Validation loss 6.449694\n", "Epoch 1660, Training loss 2.463501, Validation loss 6.449936\n", "Epoch 1661, Training loss 2.463357, Validation loss 6.450186\n", "Epoch 1662, Training loss 2.463214, Validation loss 6.450438\n", "Epoch 1663, Training loss 2.463071, Validation loss 6.450684\n", "Epoch 1664, Training loss 2.462929, Validation loss 6.450905\n", "Epoch 1665, Training loss 2.462788, Validation loss 6.451161\n", "Epoch 1666, Training loss 2.462648, Validation loss 6.451407\n", "Epoch 1667, Training loss 2.462506, Validation loss 6.451648\n", "Epoch 1668, Training loss 2.462368, Validation loss 6.451890\n", "Epoch 1669, Training loss 2.462227, Validation loss 6.452126\n", "Epoch 1670, Training loss 2.462090, Validation loss 6.452362\n", "Epoch 1671, Training loss 2.461950, Validation loss 6.452594\n", "Epoch 1672, Training loss 2.461813, Validation loss 6.452845\n", "Epoch 1673, Training loss 2.461676, Validation loss 6.453091\n", "Epoch 1674, Training loss 2.461541, Validation loss 6.453322\n", "Epoch 1675, Training loss 2.461404, Validation loss 6.453564\n", "Epoch 1676, Training loss 2.461269, Validation loss 6.453795\n", "Epoch 1677, Training loss 2.461133, Validation loss 6.454031\n", "Epoch 1678, Training loss 2.461001, Validation loss 6.454272\n", "Epoch 1679, Training loss 2.460865, Validation loss 6.454504\n", "Epoch 1680, Training loss 2.460732, Validation loss 6.454745\n", "Epoch 1681, Training loss 2.460600, Validation loss 6.454991\n", "Epoch 1682, Training loss 2.460469, Validation loss 6.455213\n", "Epoch 1683, Training loss 2.460337, Validation loss 6.455454\n", "Epoch 1684, Training loss 2.460207, Validation loss 6.455685\n", "Epoch 1685, Training loss 2.460077, Validation loss 6.455921\n", "Epoch 1686, Training loss 2.459946, Validation loss 6.456152\n", "Epoch 1687, Training loss 2.459817, Validation loss 6.456384\n", "Epoch 1688, Training loss 2.459688, Validation loss 6.456611\n", "Epoch 1689, Training loss 2.459561, Validation loss 6.456847\n", "Epoch 1690, Training loss 2.459432, Validation loss 6.457088\n", "Epoch 1691, Training loss 2.459305, Validation loss 6.457299\n", "Epoch 1692, Training loss 2.459178, Validation loss 6.457536\n", "Epoch 1693, Training loss 2.459053, Validation loss 6.457763\n", "Epoch 1694, Training loss 2.458928, Validation loss 6.457984\n", "Epoch 1695, Training loss 2.458802, Validation loss 6.458225\n", "Epoch 1696, Training loss 2.458678, Validation loss 6.458441\n", "Epoch 1697, Training loss 2.458554, Validation loss 6.458678\n", "Epoch 1698, Training loss 2.458430, Validation loss 6.458909\n", "Epoch 1699, Training loss 2.458308, Validation loss 6.459130\n", "Epoch 1700, Training loss 2.458186, Validation loss 6.459356\n", "Epoch 1701, Training loss 2.458064, Validation loss 6.459588\n", "Epoch 1702, Training loss 2.457943, Validation loss 6.459800\n", "Epoch 1703, Training loss 2.457821, Validation loss 6.460031\n", "Epoch 1704, Training loss 2.457700, Validation loss 6.460252\n", "Epoch 1705, Training loss 2.457580, Validation loss 6.460474\n", "Epoch 1706, Training loss 2.457462, Validation loss 6.460700\n", "Epoch 1707, Training loss 2.457343, Validation loss 6.460927\n", "Epoch 1708, Training loss 2.457224, Validation loss 6.461148\n", "Epoch 1709, Training loss 2.457107, Validation loss 6.461370\n", "Epoch 1710, Training loss 2.456989, Validation loss 6.461586\n", "Epoch 1711, Training loss 2.456871, Validation loss 6.461802\n", "Epoch 1712, Training loss 2.456754, Validation loss 6.462024\n", "Epoch 1713, Training loss 2.456639, Validation loss 6.462260\n", "Epoch 1714, Training loss 2.456524, Validation loss 6.462471\n", "Epoch 1715, Training loss 2.456409, Validation loss 6.462693\n", "Epoch 1716, Training loss 2.456293, Validation loss 6.462919\n", "Epoch 1717, Training loss 2.456179, Validation loss 6.463126\n", "Epoch 1718, Training loss 2.456065, Validation loss 6.463333\n", "Epoch 1719, Training loss 2.455953, Validation loss 6.463564\n", "Epoch 1720, Training loss 2.455840, Validation loss 6.463780\n", "Epoch 1721, Training loss 2.455726, Validation loss 6.463996\n", "Epoch 1722, Training loss 2.455615, Validation loss 6.464218\n", "Epoch 1723, Training loss 2.455506, Validation loss 6.464429\n", "Epoch 1724, Training loss 2.455394, Validation loss 6.464651\n", "Epoch 1725, Training loss 2.455283, Validation loss 6.464867\n", "Epoch 1726, Training loss 2.455172, Validation loss 6.465074\n", "Epoch 1727, Training loss 2.455063, Validation loss 6.465295\n", "Epoch 1728, Training loss 2.454955, Validation loss 6.465507\n", "Epoch 1729, Training loss 2.454845, Validation loss 6.465714\n", "Epoch 1730, Training loss 2.454739, Validation loss 6.465930\n", "Epoch 1731, Training loss 2.454630, Validation loss 6.466151\n", "Epoch 1732, Training loss 2.454523, Validation loss 6.466352\n", "Epoch 1733, Training loss 2.454416, Validation loss 6.466559\n", "Epoch 1734, Training loss 2.454310, Validation loss 6.466775\n", "Epoch 1735, Training loss 2.454203, Validation loss 6.466982\n", "Epoch 1736, Training loss 2.454099, Validation loss 6.467194\n", "Epoch 1737, Training loss 2.453993, Validation loss 6.467400\n", "Epoch 1738, Training loss 2.453888, Validation loss 6.467607\n", "Epoch 1739, Training loss 2.453784, Validation loss 6.467823\n", "Epoch 1740, Training loss 2.453679, Validation loss 6.468035\n", "Epoch 1741, Training loss 2.453577, Validation loss 6.468251\n", "Epoch 1742, Training loss 2.453474, Validation loss 6.468448\n", "Epoch 1743, Training loss 2.453372, Validation loss 6.468645\n", "Epoch 1744, Training loss 2.453269, Validation loss 6.468856\n", "Epoch 1745, Training loss 2.453167, Validation loss 6.469058\n", "Epoch 1746, Training loss 2.453066, Validation loss 6.469269\n", "Epoch 1747, Training loss 2.452966, Validation loss 6.469471\n", "Epoch 1748, Training loss 2.452865, Validation loss 6.469677\n", "Epoch 1749, Training loss 2.452765, Validation loss 6.469889\n", "Epoch 1750, Training loss 2.452667, Validation loss 6.470095\n", "Epoch 1751, Training loss 2.452567, Validation loss 6.470302\n", "Epoch 1752, Training loss 2.452468, Validation loss 6.470508\n", "Epoch 1753, Training loss 2.452370, Validation loss 6.470719\n", "Epoch 1754, Training loss 2.452271, Validation loss 6.470911\n", "Epoch 1755, Training loss 2.452174, Validation loss 6.471107\n", "Epoch 1756, Training loss 2.452076, Validation loss 6.471324\n", "Epoch 1757, Training loss 2.451980, Validation loss 6.471520\n", "Epoch 1758, Training loss 2.451885, Validation loss 6.471712\n", "Epoch 1759, Training loss 2.451787, Validation loss 6.471909\n", "Epoch 1760, Training loss 2.451692, Validation loss 6.472116\n", "Epoch 1761, Training loss 2.451596, Validation loss 6.472312\n", "Epoch 1762, Training loss 2.451503, Validation loss 6.472504\n", "Epoch 1763, Training loss 2.451408, Validation loss 6.472715\n", "Epoch 1764, Training loss 2.451314, Validation loss 6.472907\n", "Epoch 1765, Training loss 2.451219, Validation loss 6.473104\n", "Epoch 1766, Training loss 2.451127, Validation loss 6.473295\n", "Epoch 1767, Training loss 2.451035, Validation loss 6.473496\n", "Epoch 1768, Training loss 2.450942, Validation loss 6.473688\n", "Epoch 1769, Training loss 2.450850, Validation loss 6.473895\n", "Epoch 1770, Training loss 2.450757, Validation loss 6.474101\n", "Epoch 1771, Training loss 2.450667, Validation loss 6.474293\n", "Epoch 1772, Training loss 2.450576, Validation loss 6.474494\n", "Epoch 1773, Training loss 2.450485, Validation loss 6.474696\n", "Epoch 1774, Training loss 2.450395, Validation loss 6.474887\n", "Epoch 1775, Training loss 2.450305, Validation loss 6.475074\n", "Epoch 1776, Training loss 2.450216, Validation loss 6.475275\n", "Epoch 1777, Training loss 2.450126, Validation loss 6.475471\n", "Epoch 1778, Training loss 2.450038, Validation loss 6.475658\n", "Epoch 1779, Training loss 2.449950, Validation loss 6.475835\n", "Epoch 1780, Training loss 2.449863, Validation loss 6.476037\n", "Epoch 1781, Training loss 2.449775, Validation loss 6.476218\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1782, Training loss 2.449687, Validation loss 6.476420\n", "Epoch 1783, Training loss 2.449600, Validation loss 6.476597\n", "Epoch 1784, Training loss 2.449513, Validation loss 6.476799\n", "Epoch 1785, Training loss 2.449428, Validation loss 6.476980\n", "Epoch 1786, Training loss 2.449342, Validation loss 6.477171\n", "Epoch 1787, Training loss 2.449258, Validation loss 6.477364\n", "Epoch 1788, Training loss 2.449172, Validation loss 6.477550\n", "Epoch 1789, Training loss 2.449087, Validation loss 6.477751\n", "Epoch 1790, Training loss 2.449003, Validation loss 6.477938\n", "Epoch 1791, Training loss 2.448919, Validation loss 6.478125\n", "Epoch 1792, Training loss 2.448835, Validation loss 6.478292\n", "Epoch 1793, Training loss 2.448751, Validation loss 6.478474\n", "Epoch 1794, Training loss 2.448670, Validation loss 6.478670\n", "Epoch 1795, Training loss 2.448586, Validation loss 6.478861\n", "Epoch 1796, Training loss 2.448503, Validation loss 6.479043\n", "Epoch 1797, Training loss 2.448421, Validation loss 6.479219\n", "Epoch 1798, Training loss 2.448340, Validation loss 6.479412\n", "Epoch 1799, Training loss 2.448258, Validation loss 6.479593\n", "Epoch 1800, Training loss 2.448177, Validation loss 6.479789\n", "Epoch 1801, Training loss 2.448096, Validation loss 6.479961\n", "Epoch 1802, Training loss 2.448017, Validation loss 6.480147\n", "Epoch 1803, Training loss 2.447936, Validation loss 6.480334\n", "Epoch 1804, Training loss 2.447856, Validation loss 6.480521\n", "Epoch 1805, Training loss 2.447778, Validation loss 6.480712\n", "Epoch 1806, Training loss 2.447698, Validation loss 6.480894\n", "Epoch 1807, Training loss 2.447619, Validation loss 6.481071\n", "Epoch 1808, Training loss 2.447541, Validation loss 6.481257\n", "Epoch 1809, Training loss 2.447464, Validation loss 6.481434\n", "Epoch 1810, Training loss 2.447386, Validation loss 6.481616\n", "Epoch 1811, Training loss 2.447309, Validation loss 6.481797\n", "Epoch 1812, Training loss 2.447231, Validation loss 6.481983\n", "Epoch 1813, Training loss 2.447154, Validation loss 6.482160\n", "Epoch 1814, Training loss 2.447078, Validation loss 6.482332\n", "Epoch 1815, Training loss 2.447001, Validation loss 6.482504\n", "Epoch 1816, Training loss 2.446927, Validation loss 6.482680\n", "Epoch 1817, Training loss 2.446851, Validation loss 6.482867\n", "Epoch 1818, Training loss 2.446774, Validation loss 6.483044\n", "Epoch 1819, Training loss 2.446700, Validation loss 6.483211\n", "Epoch 1820, Training loss 2.446625, Validation loss 6.483392\n", "Epoch 1821, Training loss 2.446552, Validation loss 6.483564\n", "Epoch 1822, Training loss 2.446477, Validation loss 6.483755\n", "Epoch 1823, Training loss 2.446404, Validation loss 6.483932\n", "Epoch 1824, Training loss 2.446330, Validation loss 6.484108\n", "Epoch 1825, Training loss 2.446258, Validation loss 6.484280\n", "Epoch 1826, Training loss 2.446184, Validation loss 6.484462\n", "Epoch 1827, Training loss 2.446113, Validation loss 6.484638\n", "Epoch 1828, Training loss 2.446039, Validation loss 6.484795\n", "Epoch 1829, Training loss 2.445969, Validation loss 6.484972\n", "Epoch 1830, Training loss 2.445896, Validation loss 6.485144\n", "Epoch 1831, Training loss 2.445825, Validation loss 6.485315\n", "Epoch 1832, Training loss 2.445754, Validation loss 6.485487\n", "Epoch 1833, Training loss 2.445683, Validation loss 6.485663\n", "Epoch 1834, Training loss 2.445612, Validation loss 6.485840\n", "Epoch 1835, Training loss 2.445543, Validation loss 6.486017\n", "Epoch 1836, Training loss 2.445473, Validation loss 6.486178\n", "Epoch 1837, Training loss 2.445404, Validation loss 6.486355\n", "Epoch 1838, Training loss 2.445335, Validation loss 6.486517\n", "Epoch 1839, Training loss 2.445265, Validation loss 6.486693\n", "Epoch 1840, Training loss 2.445196, Validation loss 6.486860\n", "Epoch 1841, Training loss 2.445127, Validation loss 6.487032\n", "Epoch 1842, Training loss 2.445061, Validation loss 6.487203\n", "Epoch 1843, Training loss 2.444993, Validation loss 6.487375\n", "Epoch 1844, Training loss 2.444926, Validation loss 6.487546\n", "Epoch 1845, Training loss 2.444859, Validation loss 6.487713\n", "Epoch 1846, Training loss 2.444791, Validation loss 6.487875\n", "Epoch 1847, Training loss 2.444724, Validation loss 6.488042\n", "Epoch 1848, Training loss 2.444659, Validation loss 6.488218\n", "Epoch 1849, Training loss 2.444592, Validation loss 6.488375\n", "Epoch 1850, Training loss 2.444525, Validation loss 6.488546\n", "Epoch 1851, Training loss 2.444461, Validation loss 6.488718\n", "Epoch 1852, Training loss 2.444395, Validation loss 6.488879\n", "Epoch 1853, Training loss 2.444330, Validation loss 6.489041\n", "Epoch 1854, Training loss 2.444267, Validation loss 6.489208\n", "Epoch 1855, Training loss 2.444201, Validation loss 6.489370\n", "Epoch 1856, Training loss 2.444137, Validation loss 6.489536\n", "Epoch 1857, Training loss 2.444074, Validation loss 6.489698\n", "Epoch 1858, Training loss 2.444008, Validation loss 6.489869\n", "Epoch 1859, Training loss 2.443945, Validation loss 6.490031\n", "Epoch 1860, Training loss 2.443882, Validation loss 6.490189\n", "Epoch 1861, Training loss 2.443819, Validation loss 6.490345\n", "Epoch 1862, Training loss 2.443757, Validation loss 6.490507\n", "Epoch 1863, Training loss 2.443694, Validation loss 6.490678\n", "Epoch 1864, Training loss 2.443632, Validation loss 6.490840\n", "Epoch 1865, Training loss 2.443569, Validation loss 6.491011\n", "Epoch 1866, Training loss 2.443508, Validation loss 6.491178\n", "Epoch 1867, Training loss 2.443448, Validation loss 6.491324\n", "Epoch 1868, Training loss 2.443386, Validation loss 6.491491\n", "Epoch 1869, Training loss 2.443324, Validation loss 6.491643\n", "Epoch 1870, Training loss 2.443264, Validation loss 6.491800\n", "Epoch 1871, Training loss 2.443204, Validation loss 6.491961\n", "Epoch 1872, Training loss 2.443145, Validation loss 6.492123\n", "Epoch 1873, Training loss 2.443084, Validation loss 6.492290\n", "Epoch 1874, Training loss 2.443024, Validation loss 6.492436\n", "Epoch 1875, Training loss 2.442965, Validation loss 6.492598\n", "Epoch 1876, Training loss 2.442906, Validation loss 6.492760\n", "Epoch 1877, Training loss 2.442846, Validation loss 6.492916\n", "Epoch 1878, Training loss 2.442788, Validation loss 6.493073\n", "Epoch 1879, Training loss 2.442730, Validation loss 6.493225\n", "Epoch 1880, Training loss 2.442672, Validation loss 6.493382\n", "Epoch 1881, Training loss 2.442613, Validation loss 6.493543\n", "Epoch 1882, Training loss 2.442557, Validation loss 6.493690\n", "Epoch 1883, Training loss 2.442498, Validation loss 6.493856\n", "Epoch 1884, Training loss 2.442442, Validation loss 6.494003\n", "Epoch 1885, Training loss 2.442384, Validation loss 6.494160\n", "Epoch 1886, Training loss 2.442327, Validation loss 6.494311\n", "Epoch 1887, Training loss 2.442271, Validation loss 6.494473\n", "Epoch 1888, Training loss 2.442214, Validation loss 6.494629\n", "Epoch 1889, Training loss 2.442159, Validation loss 6.494776\n", "Epoch 1890, Training loss 2.442103, Validation loss 6.494933\n", "Epoch 1891, Training loss 2.442048, Validation loss 6.495090\n", "Epoch 1892, Training loss 2.441990, Validation loss 6.495246\n", "Epoch 1893, Training loss 2.441938, Validation loss 6.495402\n", "Epoch 1894, Training loss 2.441883, Validation loss 6.495539\n", "Epoch 1895, Training loss 2.441828, Validation loss 6.495691\n", "Epoch 1896, Training loss 2.441772, Validation loss 6.495848\n", "Epoch 1897, Training loss 2.441720, Validation loss 6.496009\n", "Epoch 1898, Training loss 2.441665, Validation loss 6.496151\n", "Epoch 1899, Training loss 2.441610, Validation loss 6.496303\n", "Epoch 1900, Training loss 2.441557, Validation loss 6.496445\n", "Epoch 1901, Training loss 2.441503, Validation loss 6.496601\n", "Epoch 1902, Training loss 2.441450, Validation loss 6.496748\n", "Epoch 1903, Training loss 2.441398, Validation loss 6.496894\n", "Epoch 1904, Training loss 2.441345, Validation loss 6.497046\n", "Epoch 1905, Training loss 2.441293, Validation loss 6.497193\n", "Epoch 1906, Training loss 2.441241, Validation loss 6.497344\n", "Epoch 1907, Training loss 2.441188, Validation loss 6.497481\n", "Epoch 1908, Training loss 2.441137, Validation loss 6.497628\n", "Epoch 1909, Training loss 2.441086, Validation loss 6.497784\n", "Epoch 1910, Training loss 2.441033, Validation loss 6.497931\n", "Epoch 1911, Training loss 2.440982, Validation loss 6.498087\n", "Epoch 1912, Training loss 2.440930, Validation loss 6.498229\n", "Epoch 1913, Training loss 2.440880, Validation loss 6.498376\n", "Epoch 1914, Training loss 2.440829, Validation loss 6.498528\n", "Epoch 1915, Training loss 2.440779, Validation loss 6.498679\n", "Epoch 1916, Training loss 2.440730, Validation loss 6.498811\n", "Epoch 1917, Training loss 2.440680, Validation loss 6.498962\n", "Epoch 1918, Training loss 2.440630, Validation loss 6.499114\n", "Epoch 1919, Training loss 2.440580, Validation loss 6.499255\n", "Epoch 1920, Training loss 2.440532, Validation loss 6.499393\n", "Epoch 1921, Training loss 2.440482, Validation loss 6.499539\n", "Epoch 1922, Training loss 2.440434, Validation loss 6.499681\n", "Epoch 1923, Training loss 2.440385, Validation loss 6.499832\n", "Epoch 1924, Training loss 2.440336, Validation loss 6.499964\n", "Epoch 1925, Training loss 2.440287, Validation loss 6.500115\n", "Epoch 1926, Training loss 2.440240, Validation loss 6.500252\n", "Epoch 1927, Training loss 2.440191, Validation loss 6.500394\n", "Epoch 1928, Training loss 2.440145, Validation loss 6.500531\n", "Epoch 1929, Training loss 2.440097, Validation loss 6.500672\n", "Epoch 1930, Training loss 2.440049, Validation loss 6.500813\n", "Epoch 1931, Training loss 2.440002, Validation loss 6.500975\n", "Epoch 1932, Training loss 2.439955, Validation loss 6.501112\n", "Epoch 1933, Training loss 2.439909, Validation loss 6.501238\n", "Epoch 1934, Training loss 2.439862, Validation loss 6.501385\n", "Epoch 1935, Training loss 2.439816, Validation loss 6.501517\n", "Epoch 1936, Training loss 2.439769, Validation loss 6.501649\n", "Epoch 1937, Training loss 2.439723, Validation loss 6.501800\n", "Epoch 1938, Training loss 2.439676, Validation loss 6.501942\n", "Epoch 1939, Training loss 2.439631, Validation loss 6.502069\n", "Epoch 1940, Training loss 2.439586, Validation loss 6.502220\n", "Epoch 1941, Training loss 2.439542, Validation loss 6.502347\n", "Epoch 1942, Training loss 2.439497, Validation loss 6.502479\n", "Epoch 1943, Training loss 2.439452, Validation loss 6.502630\n", "Epoch 1944, Training loss 2.439407, Validation loss 6.502771\n", "Epoch 1945, Training loss 2.439363, Validation loss 6.502908\n", "Epoch 1946, Training loss 2.439318, Validation loss 6.503040\n", "Epoch 1947, Training loss 2.439274, Validation loss 6.503171\n", "Epoch 1948, Training loss 2.439230, Validation loss 6.503308\n", "Epoch 1949, Training loss 2.439186, Validation loss 6.503440\n", "Epoch 1950, Training loss 2.439143, Validation loss 6.503572\n", "Epoch 1951, Training loss 2.439099, Validation loss 6.503713\n", "Epoch 1952, Training loss 2.439057, Validation loss 6.503859\n", "Epoch 1953, Training loss 2.439015, Validation loss 6.503986\n", "Epoch 1954, Training loss 2.438970, Validation loss 6.504123\n", "Epoch 1955, Training loss 2.438929, Validation loss 6.504254\n", "Epoch 1956, Training loss 2.438886, Validation loss 6.504386\n", "Epoch 1957, Training loss 2.438842, Validation loss 6.504518\n", "Epoch 1958, Training loss 2.438800, Validation loss 6.504654\n", "Epoch 1959, Training loss 2.438758, Validation loss 6.504781\n", "Epoch 1960, Training loss 2.438716, Validation loss 6.504918\n", "Epoch 1961, Training loss 2.438674, Validation loss 6.505054\n", "Epoch 1962, Training loss 2.438634, Validation loss 6.505190\n", "Epoch 1963, Training loss 2.438593, Validation loss 6.505322\n", "Epoch 1964, Training loss 2.438552, Validation loss 6.505459\n", "Epoch 1965, Training loss 2.438511, Validation loss 6.505580\n", "Epoch 1966, Training loss 2.438469, Validation loss 6.505717\n", "Epoch 1967, Training loss 2.438430, Validation loss 6.505838\n", "Epoch 1968, Training loss 2.438388, Validation loss 6.505975\n", "Epoch 1969, Training loss 2.438349, Validation loss 6.506112\n", "Epoch 1970, Training loss 2.438309, Validation loss 6.506244\n", "Epoch 1971, Training loss 2.438270, Validation loss 6.506370\n", "Epoch 1972, Training loss 2.438229, Validation loss 6.506492\n", "Epoch 1973, Training loss 2.438189, Validation loss 6.506628\n", "Epoch 1974, Training loss 2.438150, Validation loss 6.506755\n", "Epoch 1975, Training loss 2.438111, Validation loss 6.506876\n", "Epoch 1976, Training loss 2.438071, Validation loss 6.507013\n", "Epoch 1977, Training loss 2.438032, Validation loss 6.507154\n", "Epoch 1978, Training loss 2.437993, Validation loss 6.507276\n", "Epoch 1979, Training loss 2.437955, Validation loss 6.507393\n", "Epoch 1980, Training loss 2.437917, Validation loss 6.507519\n", "Epoch 1981, Training loss 2.437880, Validation loss 6.507646\n", "Epoch 1982, Training loss 2.437840, Validation loss 6.507773\n", "Epoch 1983, Training loss 2.437803, Validation loss 6.507904\n", "Epoch 1984, Training loss 2.437764, Validation loss 6.508026\n", "Epoch 1985, Training loss 2.437726, Validation loss 6.508147\n", "Epoch 1986, Training loss 2.437688, Validation loss 6.508288\n", "Epoch 1987, Training loss 2.437654, Validation loss 6.508415\n", "Epoch 1988, Training loss 2.437615, Validation loss 6.508532\n", "Epoch 1989, Training loss 2.437578, Validation loss 6.508658\n", "Epoch 1990, Training loss 2.437540, Validation loss 6.508785\n", "Epoch 1991, Training loss 2.437503, Validation loss 6.508917\n", "Epoch 1992, Training loss 2.437467, Validation loss 6.509033\n", "Epoch 1993, Training loss 2.437432, Validation loss 6.509155\n", "Epoch 1994, Training loss 2.437396, Validation loss 6.509291\n", "Epoch 1995, Training loss 2.437358, Validation loss 6.509398\n", "Epoch 1996, Training loss 2.437321, Validation loss 6.509524\n", "Epoch 1997, Training loss 2.437287, Validation loss 6.509661\n", "Epoch 1998, Training loss 2.437251, Validation loss 6.509778\n", "Epoch 1999, Training loss 2.437215, Validation loss 6.509904\n", "Epoch 2000, Training loss 2.437180, Validation loss 6.510026\n", "Epoch 2001, Training loss 2.437144, Validation loss 6.510133\n", "Epoch 2002, Training loss 2.437109, Validation loss 6.510259\n", "Epoch 2003, Training loss 2.437074, Validation loss 6.510391\n", "Epoch 2004, Training loss 2.437040, Validation loss 6.510502\n", "Epoch 2005, Training loss 2.437004, Validation loss 6.510629\n", "Epoch 2006, Training loss 2.436971, Validation loss 6.510740\n", "Epoch 2007, Training loss 2.436935, Validation loss 6.510862\n", "Epoch 2008, Training loss 2.436900, Validation loss 6.511003\n", "Epoch 2009, Training loss 2.436868, Validation loss 6.511120\n", "Epoch 2010, Training loss 2.436833, Validation loss 6.511227\n", "Epoch 2011, Training loss 2.436800, Validation loss 6.511353\n", "Epoch 2012, Training loss 2.436766, Validation loss 6.511484\n", "Epoch 2013, Training loss 2.436732, Validation loss 6.511591\n", "Epoch 2014, Training loss 2.436698, Validation loss 6.511712\n", "Epoch 2015, Training loss 2.436666, Validation loss 6.511829\n", "Epoch 2016, Training loss 2.436633, Validation loss 6.511950\n", "Epoch 2017, Training loss 2.436599, Validation loss 6.512067\n", "Epoch 2018, Training loss 2.436567, Validation loss 6.512184\n", "Epoch 2019, Training loss 2.436535, Validation loss 6.512300\n", "Epoch 2020, Training loss 2.436501, Validation loss 6.512417\n", "Epoch 2021, Training loss 2.436469, Validation loss 6.512538\n", "Epoch 2022, Training loss 2.436437, Validation loss 6.512650\n", "Epoch 2023, Training loss 2.436404, Validation loss 6.512772\n", "Epoch 2024, Training loss 2.436372, Validation loss 6.512888\n", "Epoch 2025, Training loss 2.436340, Validation loss 6.513004\n", "Epoch 2026, Training loss 2.436308, Validation loss 6.513111\n", "Epoch 2027, Training loss 2.436276, Validation loss 6.513233\n", "Epoch 2028, Training loss 2.436246, Validation loss 6.513349\n", "Epoch 2029, Training loss 2.436213, Validation loss 6.513461\n", "Epoch 2030, Training loss 2.436182, Validation loss 6.513582\n", "Epoch 2031, Training loss 2.436151, Validation loss 6.513694\n", "Epoch 2032, Training loss 2.436121, Validation loss 6.513800\n", "Epoch 2033, Training loss 2.436089, Validation loss 6.513922\n", "Epoch 2034, Training loss 2.436059, Validation loss 6.514033\n", "Epoch 2035, Training loss 2.436028, Validation loss 6.514150\n", "Epoch 2036, Training loss 2.435997, Validation loss 6.514266\n", "Epoch 2037, Training loss 2.435966, Validation loss 6.514378\n", "Epoch 2038, Training loss 2.435938, Validation loss 6.514484\n", "Epoch 2039, Training loss 2.435906, Validation loss 6.514596\n", "Epoch 2040, Training loss 2.435876, Validation loss 6.514708\n", "Epoch 2041, Training loss 2.435846, Validation loss 6.514824\n", "Epoch 2042, Training loss 2.435817, Validation loss 6.514940\n", "Epoch 2043, Training loss 2.435788, Validation loss 6.515067\n", "Epoch 2044, Training loss 2.435758, Validation loss 6.515173\n", "Epoch 2045, Training loss 2.435729, Validation loss 6.515295\n", "Epoch 2046, Training loss 2.435699, Validation loss 6.515406\n", "Epoch 2047, Training loss 2.435670, Validation loss 6.515502\n", "Epoch 2048, Training loss 2.435640, Validation loss 6.515615\n", "Epoch 2049, Training loss 2.435613, Validation loss 6.515721\n", "Epoch 2050, Training loss 2.435583, Validation loss 6.515828\n", "Epoch 2051, Training loss 2.435555, Validation loss 6.515939\n", "Epoch 2052, Training loss 2.435525, Validation loss 6.516055\n", "Epoch 2053, Training loss 2.435498, Validation loss 6.516167\n", "Epoch 2054, Training loss 2.435469, Validation loss 6.516278\n", "Epoch 2055, Training loss 2.435440, Validation loss 6.516385\n", "Epoch 2056, Training loss 2.435412, Validation loss 6.516501\n", "Epoch 2057, Training loss 2.435384, Validation loss 6.516613\n", "Epoch 2058, Training loss 2.435357, Validation loss 6.516709\n", "Epoch 2059, Training loss 2.435329, Validation loss 6.516826\n", "Epoch 2060, Training loss 2.435301, Validation loss 6.516927\n", "Epoch 2061, Training loss 2.435274, Validation loss 6.517029\n", "Epoch 2062, Training loss 2.435245, Validation loss 6.517145\n", "Epoch 2063, Training loss 2.435220, Validation loss 6.517251\n", "Epoch 2064, Training loss 2.435192, Validation loss 6.517363\n", "Epoch 2065, Training loss 2.435165, Validation loss 6.517459\n", "Epoch 2066, Training loss 2.435138, Validation loss 6.517566\n", "Epoch 2067, Training loss 2.435111, Validation loss 6.517678\n", "Epoch 2068, Training loss 2.435084, Validation loss 6.517779\n", "Epoch 2069, Training loss 2.435058, Validation loss 6.517885\n", "Epoch 2070, Training loss 2.435032, Validation loss 6.517987\n", "Epoch 2071, Training loss 2.435005, Validation loss 6.518118\n", "Epoch 2072, Training loss 2.434978, Validation loss 6.518210\n", "Epoch 2073, Training loss 2.434953, Validation loss 6.518306\n", "Epoch 2074, Training loss 2.434926, Validation loss 6.518413\n", "Epoch 2075, Training loss 2.434900, Validation loss 6.518524\n", "Epoch 2076, Training loss 2.434874, Validation loss 6.518631\n", "Epoch 2077, Training loss 2.434849, Validation loss 6.518742\n", "Epoch 2078, Training loss 2.434822, Validation loss 6.518828\n", "Epoch 2079, Training loss 2.434797, Validation loss 6.518940\n", "Epoch 2080, Training loss 2.434772, Validation loss 6.519052\n", "Epoch 2081, Training loss 2.434746, Validation loss 6.519153\n", "Epoch 2082, Training loss 2.434722, Validation loss 6.519259\n", "Epoch 2083, Training loss 2.434697, Validation loss 6.519356\n", "Epoch 2084, Training loss 2.434671, Validation loss 6.519462\n", "Epoch 2085, Training loss 2.434647, Validation loss 6.519568\n", "Epoch 2086, Training loss 2.434621, Validation loss 6.519665\n", "Epoch 2087, Training loss 2.434596, Validation loss 6.519766\n", "Epoch 2088, Training loss 2.434572, Validation loss 6.519883\n", "Epoch 2089, Training loss 2.434547, Validation loss 6.519979\n", "Epoch 2090, Training loss 2.434524, Validation loss 6.520081\n", "Epoch 2091, Training loss 2.434499, Validation loss 6.520172\n", "Epoch 2092, Training loss 2.434475, Validation loss 6.520279\n", "Epoch 2093, Training loss 2.434451, Validation loss 6.520375\n", "Epoch 2094, Training loss 2.434426, Validation loss 6.520472\n", "Epoch 2095, Training loss 2.434402, Validation loss 6.520573\n", "Epoch 2096, Training loss 2.434378, Validation loss 6.520679\n", "Epoch 2097, Training loss 2.434355, Validation loss 6.520771\n", "Epoch 2098, Training loss 2.434331, Validation loss 6.520872\n", "Epoch 2099, Training loss 2.434307, Validation loss 6.520993\n", "Epoch 2100, Training loss 2.434284, Validation loss 6.521085\n", "Epoch 2101, Training loss 2.434262, Validation loss 6.521191\n", "Epoch 2102, Training loss 2.434238, Validation loss 6.521288\n", "Epoch 2103, Training loss 2.434214, Validation loss 6.521394\n", "Epoch 2104, Training loss 2.434192, Validation loss 6.521476\n", "Epoch 2105, Training loss 2.434169, Validation loss 6.521577\n", "Epoch 2106, Training loss 2.434146, Validation loss 6.521673\n", "Epoch 2107, Training loss 2.434124, Validation loss 6.521770\n", "Epoch 2108, Training loss 2.434101, Validation loss 6.521861\n", "Epoch 2109, Training loss 2.434079, Validation loss 6.521973\n", "Epoch 2110, Training loss 2.434056, Validation loss 6.522059\n", "Epoch 2111, Training loss 2.434033, Validation loss 6.522156\n", "Epoch 2112, Training loss 2.434012, Validation loss 6.522262\n", "Epoch 2113, Training loss 2.433989, Validation loss 6.522348\n", "Epoch 2114, Training loss 2.433967, Validation loss 6.522450\n", "Epoch 2115, Training loss 2.433945, Validation loss 6.522556\n", "Epoch 2116, Training loss 2.433924, Validation loss 6.522642\n", "Epoch 2117, Training loss 2.433902, Validation loss 6.522744\n", "Epoch 2118, Training loss 2.433879, Validation loss 6.522845\n", "Epoch 2119, Training loss 2.433858, Validation loss 6.522937\n", "Epoch 2120, Training loss 2.433836, Validation loss 6.523033\n", "Epoch 2121, Training loss 2.433815, Validation loss 6.523129\n", "Epoch 2122, Training loss 2.433793, Validation loss 6.523221\n", "Epoch 2123, Training loss 2.433772, Validation loss 6.523317\n", "Epoch 2124, Training loss 2.433750, Validation loss 6.523409\n", "Epoch 2125, Training loss 2.433730, Validation loss 6.523510\n", "Epoch 2126, Training loss 2.433707, Validation loss 6.523601\n", "Epoch 2127, Training loss 2.433687, Validation loss 6.523693\n", "Epoch 2128, Training loss 2.433667, Validation loss 6.523785\n", "Epoch 2129, Training loss 2.433645, Validation loss 6.523890\n", "Epoch 2130, Training loss 2.433625, Validation loss 6.523977\n", "Epoch 2131, Training loss 2.433603, Validation loss 6.524074\n", "Epoch 2132, Training loss 2.433583, Validation loss 6.524169\n", "Epoch 2133, Training loss 2.433564, Validation loss 6.524261\n", "Epoch 2134, Training loss 2.433543, Validation loss 6.524357\n", "Epoch 2135, Training loss 2.433523, Validation loss 6.524434\n", "Epoch 2136, Training loss 2.433504, Validation loss 6.524540\n", "Epoch 2137, Training loss 2.433482, Validation loss 6.524621\n", "Epoch 2138, Training loss 2.433463, Validation loss 6.524728\n", "Epoch 2139, Training loss 2.433442, Validation loss 6.524814\n", "Epoch 2140, Training loss 2.433423, Validation loss 6.524896\n", "Epoch 2141, Training loss 2.433403, Validation loss 6.524992\n", "Epoch 2142, Training loss 2.433383, Validation loss 6.525083\n", "Epoch 2143, Training loss 2.433364, Validation loss 6.525169\n", "Epoch 2144, Training loss 2.433344, Validation loss 6.525266\n", "Epoch 2145, Training loss 2.433324, Validation loss 6.525357\n", "Epoch 2146, Training loss 2.433306, Validation loss 6.525454\n", "Epoch 2147, Training loss 2.433287, Validation loss 6.525545\n", "Epoch 2148, Training loss 2.433267, Validation loss 6.525636\n", "Epoch 2149, Training loss 2.433248, Validation loss 6.525723\n", "Epoch 2150, Training loss 2.433228, Validation loss 6.525809\n", "Epoch 2151, Training loss 2.433210, Validation loss 6.525895\n", "Epoch 2152, Training loss 2.433191, Validation loss 6.525991\n", "Epoch 2153, Training loss 2.433172, Validation loss 6.526093\n", "Epoch 2154, Training loss 2.433153, Validation loss 6.526169\n", "Epoch 2155, Training loss 2.433134, Validation loss 6.526260\n", "Epoch 2156, Training loss 2.433116, Validation loss 6.526352\n", "Epoch 2157, Training loss 2.433097, Validation loss 6.526433\n", "Epoch 2158, Training loss 2.433080, Validation loss 6.526520\n", "Epoch 2159, Training loss 2.433061, Validation loss 6.526621\n", "Epoch 2160, Training loss 2.433042, Validation loss 6.526697\n", "Epoch 2161, Training loss 2.433024, Validation loss 6.526793\n", "Epoch 2162, Training loss 2.433007, Validation loss 6.526879\n", "Epoch 2163, Training loss 2.432989, Validation loss 6.526971\n", "Epoch 2164, Training loss 2.432970, Validation loss 6.527058\n", "Epoch 2165, Training loss 2.432952, Validation loss 6.527148\n", "Epoch 2166, Training loss 2.432933, Validation loss 6.527230\n", "Epoch 2167, Training loss 2.432917, Validation loss 6.527311\n", "Epoch 2168, Training loss 2.432899, Validation loss 6.527403\n", "Epoch 2169, Training loss 2.432881, Validation loss 6.527479\n", "Epoch 2170, Training loss 2.432863, Validation loss 6.527565\n", "Epoch 2171, Training loss 2.432847, Validation loss 6.527647\n", "Epoch 2172, Training loss 2.432830, Validation loss 6.527753\n", "Epoch 2173, Training loss 2.432812, Validation loss 6.527834\n", "Epoch 2174, Training loss 2.432794, Validation loss 6.527920\n", "Epoch 2175, Training loss 2.432777, Validation loss 6.528007\n", "Epoch 2176, Training loss 2.432760, Validation loss 6.528088\n", "Epoch 2177, Training loss 2.432742, Validation loss 6.528179\n", "Epoch 2178, Training loss 2.432726, Validation loss 6.528255\n", "Epoch 2179, Training loss 2.432710, Validation loss 6.528357\n", "Epoch 2180, Training loss 2.432693, Validation loss 6.528438\n", "Epoch 2181, Training loss 2.432676, Validation loss 6.528514\n", "Epoch 2182, Training loss 2.432659, Validation loss 6.528591\n", "Epoch 2183, Training loss 2.432643, Validation loss 6.528672\n", "Epoch 2184, Training loss 2.432627, Validation loss 6.528768\n", "Epoch 2185, Training loss 2.432610, Validation loss 6.528835\n", "Epoch 2186, Training loss 2.432592, Validation loss 6.528931\n", "Epoch 2187, Training loss 2.432577, Validation loss 6.529022\n", "Epoch 2188, Training loss 2.432561, Validation loss 6.529108\n", "Epoch 2189, Training loss 2.432544, Validation loss 6.529180\n", "Epoch 2190, Training loss 2.432528, Validation loss 6.529261\n", "Epoch 2191, Training loss 2.432512, Validation loss 6.529352\n", "Epoch 2192, Training loss 2.432496, Validation loss 6.529428\n", "Epoch 2193, Training loss 2.432480, Validation loss 6.529515\n", "Epoch 2194, Training loss 2.432464, Validation loss 6.529601\n", "Epoch 2195, Training loss 2.432448, Validation loss 6.529682\n", "Epoch 2196, Training loss 2.432432, Validation loss 6.529758\n", "Epoch 2197, Training loss 2.432417, Validation loss 6.529840\n", "Epoch 2198, Training loss 2.432400, Validation loss 6.529921\n", "Epoch 2199, Training loss 2.432384, Validation loss 6.529992\n", "Epoch 2200, Training loss 2.432370, Validation loss 6.530084\n", "Epoch 2201, Training loss 2.432354, Validation loss 6.530165\n", "Epoch 2202, Training loss 2.432340, Validation loss 6.530251\n", "Epoch 2203, Training loss 2.432324, Validation loss 6.530332\n", "Epoch 2204, Training loss 2.432309, Validation loss 6.530403\n", "Epoch 2205, Training loss 2.432293, Validation loss 6.530484\n", "Epoch 2206, Training loss 2.432278, Validation loss 6.530565\n", "Epoch 2207, Training loss 2.432263, Validation loss 6.530652\n", "Epoch 2208, Training loss 2.432248, Validation loss 6.530733\n", "Epoch 2209, Training loss 2.432233, Validation loss 6.530805\n", "Epoch 2210, Training loss 2.432219, Validation loss 6.530881\n", "Epoch 2211, Training loss 2.432204, Validation loss 6.530962\n", "Epoch 2212, Training loss 2.432189, Validation loss 6.531048\n", "Epoch 2213, Training loss 2.432174, Validation loss 6.531119\n", "Epoch 2214, Training loss 2.432159, Validation loss 6.531216\n", "Epoch 2215, Training loss 2.432145, Validation loss 6.531281\n", "Epoch 2216, Training loss 2.432130, Validation loss 6.531358\n", "Epoch 2217, Training loss 2.432116, Validation loss 6.531445\n", "Epoch 2218, Training loss 2.432102, Validation loss 6.531520\n", "Epoch 2219, Training loss 2.432088, Validation loss 6.531601\n", "Epoch 2220, Training loss 2.432072, Validation loss 6.531678\n", "Epoch 2221, Training loss 2.432058, Validation loss 6.531739\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2222, Training loss 2.432044, Validation loss 6.531830\n", "Epoch 2223, Training loss 2.432030, Validation loss 6.531912\n", "Epoch 2224, Training loss 2.432017, Validation loss 6.531998\n", "Epoch 2225, Training loss 2.432002, Validation loss 6.532059\n", "Epoch 2226, Training loss 2.431988, Validation loss 6.532135\n", "Epoch 2227, Training loss 2.431975, Validation loss 6.532211\n", "Epoch 2228, Training loss 2.431960, Validation loss 6.532288\n", "Epoch 2229, Training loss 2.431948, Validation loss 6.532364\n", "Epoch 2230, Training loss 2.431933, Validation loss 6.532445\n", "Epoch 2231, Training loss 2.431920, Validation loss 6.532521\n", "Epoch 2232, Training loss 2.431907, Validation loss 6.532598\n", "Epoch 2233, Training loss 2.431893, Validation loss 6.532673\n", "Epoch 2234, Training loss 2.431880, Validation loss 6.532754\n", "Epoch 2235, Training loss 2.431867, Validation loss 6.532836\n", "Epoch 2236, Training loss 2.431853, Validation loss 6.532897\n", "Epoch 2237, Training loss 2.431840, Validation loss 6.532973\n", "Epoch 2238, Training loss 2.431826, Validation loss 6.533045\n", "Epoch 2239, Training loss 2.431813, Validation loss 6.533131\n", "Epoch 2240, Training loss 2.431800, Validation loss 6.533197\n", "Epoch 2241, Training loss 2.431786, Validation loss 6.533263\n", "Epoch 2242, Training loss 2.431773, Validation loss 6.533354\n", "Epoch 2243, Training loss 2.431760, Validation loss 6.533421\n", "Epoch 2244, Training loss 2.431748, Validation loss 6.533482\n", "Epoch 2245, Training loss 2.431736, Validation loss 6.533563\n", "Epoch 2246, Training loss 2.431722, Validation loss 6.533644\n", "Epoch 2247, Training loss 2.431710, Validation loss 6.533720\n", "Epoch 2248, Training loss 2.431697, Validation loss 6.533796\n", "Epoch 2249, Training loss 2.431685, Validation loss 6.533867\n", "Epoch 2250, Training loss 2.431672, Validation loss 6.533934\n", "Epoch 2251, Training loss 2.431659, Validation loss 6.534000\n", "Epoch 2252, Training loss 2.431647, Validation loss 6.534066\n", "Epoch 2253, Training loss 2.431634, Validation loss 6.534153\n", "Epoch 2254, Training loss 2.431621, Validation loss 6.534228\n", "Epoch 2255, Training loss 2.431609, Validation loss 6.534299\n", "Epoch 2256, Training loss 2.431597, Validation loss 6.534376\n", "Epoch 2257, Training loss 2.431585, Validation loss 6.534447\n", "Epoch 2258, Training loss 2.431573, Validation loss 6.534523\n", "Epoch 2259, Training loss 2.431561, Validation loss 6.534598\n", "Epoch 2260, Training loss 2.431548, Validation loss 6.534670\n", "Epoch 2261, Training loss 2.431536, Validation loss 6.534741\n", "Epoch 2262, Training loss 2.431525, Validation loss 6.534807\n", "Epoch 2263, Training loss 2.431512, Validation loss 6.534879\n", "Epoch 2264, Training loss 2.431499, Validation loss 6.534955\n", "Epoch 2265, Training loss 2.431488, Validation loss 6.535011\n", "Epoch 2266, Training loss 2.431476, Validation loss 6.535082\n", "Epoch 2267, Training loss 2.431466, Validation loss 6.535148\n", "Epoch 2268, Training loss 2.431453, Validation loss 6.535219\n", "Epoch 2269, Training loss 2.431441, Validation loss 6.535305\n", "Epoch 2270, Training loss 2.431429, Validation loss 6.535367\n", "Epoch 2271, Training loss 2.431419, Validation loss 6.535428\n", "Epoch 2272, Training loss 2.431407, Validation loss 6.535509\n", "Epoch 2273, Training loss 2.431396, Validation loss 6.535580\n", "Epoch 2274, Training loss 2.431386, Validation loss 6.535661\n", "Epoch 2275, Training loss 2.431373, Validation loss 6.535732\n", "Epoch 2276, Training loss 2.431362, Validation loss 6.535789\n", "Epoch 2277, Training loss 2.431351, Validation loss 6.535874\n", "Epoch 2278, Training loss 2.431339, Validation loss 6.535931\n", "Epoch 2279, Training loss 2.431327, Validation loss 6.535996\n", "Epoch 2280, Training loss 2.431316, Validation loss 6.536073\n", "Epoch 2281, Training loss 2.431306, Validation loss 6.536134\n", "Epoch 2282, Training loss 2.431295, Validation loss 6.536195\n", "Epoch 2283, Training loss 2.431284, Validation loss 6.536261\n", "Epoch 2284, Training loss 2.431273, Validation loss 6.536342\n", "Epoch 2285, Training loss 2.431262, Validation loss 6.536413\n", "Epoch 2286, Training loss 2.431251, Validation loss 6.536474\n", "Epoch 2287, Training loss 2.431240, Validation loss 6.536535\n", "Epoch 2288, Training loss 2.431229, Validation loss 6.536621\n", "Epoch 2289, Training loss 2.431220, Validation loss 6.536677\n", "Epoch 2290, Training loss 2.431208, Validation loss 6.536739\n", "Epoch 2291, Training loss 2.431197, Validation loss 6.536815\n", "Epoch 2292, Training loss 2.431187, Validation loss 6.536880\n", "Epoch 2293, Training loss 2.431175, Validation loss 6.536957\n", "Epoch 2294, Training loss 2.431166, Validation loss 6.537028\n", "Epoch 2295, Training loss 2.431154, Validation loss 6.537084\n", "Epoch 2296, Training loss 2.431145, Validation loss 6.537155\n", "Epoch 2297, Training loss 2.431134, Validation loss 6.537216\n", "Epoch 2298, Training loss 2.431124, Validation loss 6.537277\n", "Epoch 2299, Training loss 2.431114, Validation loss 6.537349\n", "Epoch 2300, Training loss 2.431104, Validation loss 6.537419\n", "Epoch 2301, Training loss 2.431093, Validation loss 6.537476\n", "Epoch 2302, Training loss 2.431084, Validation loss 6.537547\n", "Epoch 2303, Training loss 2.431073, Validation loss 6.537608\n", "Epoch 2304, Training loss 2.431062, Validation loss 6.537683\n", "Epoch 2305, Training loss 2.431052, Validation loss 6.537730\n", "Epoch 2306, Training loss 2.431042, Validation loss 6.537816\n", "Epoch 2307, Training loss 2.431033, Validation loss 6.537872\n", "Epoch 2308, Training loss 2.431024, Validation loss 6.537943\n", "Epoch 2309, Training loss 2.431013, Validation loss 6.538009\n", "Epoch 2310, Training loss 2.431002, Validation loss 6.538070\n", "Epoch 2311, Training loss 2.430993, Validation loss 6.538131\n", "Epoch 2312, Training loss 2.430985, Validation loss 6.538198\n", "Epoch 2313, Training loss 2.430974, Validation loss 6.538268\n", "Epoch 2314, Training loss 2.430964, Validation loss 6.538324\n", "Epoch 2315, Training loss 2.430955, Validation loss 6.538385\n", "Epoch 2316, Training loss 2.430945, Validation loss 6.538452\n", "Epoch 2317, Training loss 2.430935, Validation loss 6.538513\n", "Epoch 2318, Training loss 2.430926, Validation loss 6.538589\n", "Epoch 2319, Training loss 2.430916, Validation loss 6.538645\n", "Epoch 2320, Training loss 2.430907, Validation loss 6.538711\n", "Epoch 2321, Training loss 2.430899, Validation loss 6.538776\n", "Epoch 2322, Training loss 2.430888, Validation loss 6.538834\n", "Epoch 2323, Training loss 2.430880, Validation loss 6.538884\n", "Epoch 2324, Training loss 2.430870, Validation loss 6.538960\n", "Epoch 2325, Training loss 2.430860, Validation loss 6.539031\n", "Epoch 2326, Training loss 2.430851, Validation loss 6.539083\n", "Epoch 2327, Training loss 2.430844, Validation loss 6.539153\n", "Epoch 2328, Training loss 2.430833, Validation loss 6.539214\n", "Epoch 2329, Training loss 2.430824, Validation loss 6.539275\n", "Epoch 2330, Training loss 2.430816, Validation loss 6.539346\n", "Epoch 2331, Training loss 2.430807, Validation loss 6.539402\n", "Epoch 2332, Training loss 2.430797, Validation loss 6.539458\n", "Epoch 2333, Training loss 2.430788, Validation loss 6.539524\n", "Epoch 2334, Training loss 2.430779, Validation loss 6.539590\n", "Epoch 2335, Training loss 2.430772, Validation loss 6.539637\n", "Epoch 2336, Training loss 2.430762, Validation loss 6.539712\n", "Epoch 2337, Training loss 2.430752, Validation loss 6.539778\n", "Epoch 2338, Training loss 2.430746, Validation loss 6.539820\n", "Epoch 2339, Training loss 2.430735, Validation loss 6.539890\n", "Epoch 2340, Training loss 2.430726, Validation loss 6.539951\n", "Epoch 2341, Training loss 2.430718, Validation loss 6.540012\n", "Epoch 2342, Training loss 2.430710, Validation loss 6.540064\n", "Epoch 2343, Training loss 2.430700, Validation loss 6.540134\n", "Epoch 2344, Training loss 2.430692, Validation loss 6.540195\n", "Epoch 2345, Training loss 2.430684, Validation loss 6.540246\n", "Epoch 2346, Training loss 2.430675, Validation loss 6.540313\n", "Epoch 2347, Training loss 2.430667, Validation loss 6.540363\n", "Epoch 2348, Training loss 2.430659, Validation loss 6.540429\n", "Epoch 2349, Training loss 2.430650, Validation loss 6.540485\n", "Epoch 2350, Training loss 2.430641, Validation loss 6.540542\n", "Epoch 2351, Training loss 2.430634, Validation loss 6.540603\n", "Epoch 2352, Training loss 2.430625, Validation loss 6.540668\n", "Epoch 2353, Training loss 2.430617, Validation loss 6.540734\n", "Epoch 2354, Training loss 2.430609, Validation loss 6.540795\n", "Epoch 2355, Training loss 2.430600, Validation loss 6.540846\n", "Epoch 2356, Training loss 2.430592, Validation loss 6.540903\n", "Epoch 2357, Training loss 2.430585, Validation loss 6.540974\n", "Epoch 2358, Training loss 2.430576, Validation loss 6.541019\n", "Epoch 2359, Training loss 2.430568, Validation loss 6.541080\n", "Epoch 2360, Training loss 2.430560, Validation loss 6.541131\n", "Epoch 2361, Training loss 2.430552, Validation loss 6.541198\n", "Epoch 2362, Training loss 2.430544, Validation loss 6.541258\n", "Epoch 2363, Training loss 2.430537, Validation loss 6.541314\n", "Epoch 2364, Training loss 2.430529, Validation loss 6.541370\n", "Epoch 2365, Training loss 2.430520, Validation loss 6.541416\n", "Epoch 2366, Training loss 2.430513, Validation loss 6.541482\n", "Epoch 2367, Training loss 2.430505, Validation loss 6.541548\n", "Epoch 2368, Training loss 2.430498, Validation loss 6.541599\n", "Epoch 2369, Training loss 2.430490, Validation loss 6.541670\n", "Epoch 2370, Training loss 2.430481, Validation loss 6.541731\n", "Epoch 2371, Training loss 2.430475, Validation loss 6.541772\n", "Epoch 2372, Training loss 2.430467, Validation loss 6.541842\n", "Epoch 2373, Training loss 2.430461, Validation loss 6.541894\n", "Epoch 2374, Training loss 2.430451, Validation loss 6.541955\n", "Epoch 2375, Training loss 2.430444, Validation loss 6.542006\n", "Epoch 2376, Training loss 2.430437, Validation loss 6.542067\n", "Epoch 2377, Training loss 2.430429, Validation loss 6.542123\n", "Epoch 2378, Training loss 2.430423, Validation loss 6.542169\n", "Epoch 2379, Training loss 2.430415, Validation loss 6.542234\n", "Epoch 2380, Training loss 2.430408, Validation loss 6.542286\n", "Epoch 2381, Training loss 2.430399, Validation loss 6.542336\n", "Epoch 2382, Training loss 2.430392, Validation loss 6.542388\n", "Epoch 2383, Training loss 2.430385, Validation loss 6.542449\n", "Epoch 2384, Training loss 2.430377, Validation loss 6.542504\n", "Epoch 2385, Training loss 2.430372, Validation loss 6.542565\n", "Epoch 2386, Training loss 2.430363, Validation loss 6.542621\n", "Epoch 2387, Training loss 2.430356, Validation loss 6.542672\n", "Epoch 2388, Training loss 2.430349, Validation loss 6.542723\n", "Epoch 2389, Training loss 2.430341, Validation loss 6.542774\n", "Epoch 2390, Training loss 2.430335, Validation loss 6.542835\n", "Epoch 2391, Training loss 2.430327, Validation loss 6.542896\n", "Epoch 2392, Training loss 2.430322, Validation loss 6.542937\n", "Epoch 2393, Training loss 2.430314, Validation loss 6.543003\n", "Epoch 2394, Training loss 2.430306, Validation loss 6.543054\n", "Epoch 2395, Training loss 2.430301, Validation loss 6.543109\n", "Epoch 2396, Training loss 2.430293, Validation loss 6.543160\n", "Epoch 2397, Training loss 2.430286, Validation loss 6.543217\n", "Epoch 2398, Training loss 2.430280, Validation loss 6.543262\n", "Epoch 2399, Training loss 2.430273, Validation loss 6.543324\n", "Epoch 2400, Training loss 2.430267, Validation loss 6.543379\n", "Epoch 2401, Training loss 2.430259, Validation loss 6.543445\n", "Epoch 2402, Training loss 2.430253, Validation loss 6.543481\n", "Epoch 2403, Training loss 2.430245, Validation loss 6.543532\n", "Epoch 2404, Training loss 2.430239, Validation loss 6.543593\n", "Epoch 2405, Training loss 2.430233, Validation loss 6.543653\n", "Epoch 2406, Training loss 2.430227, Validation loss 6.543700\n", "Epoch 2407, Training loss 2.430220, Validation loss 6.543746\n", "Epoch 2408, Training loss 2.430212, Validation loss 6.543797\n", "Epoch 2409, Training loss 2.430207, Validation loss 6.543848\n", "Epoch 2410, Training loss 2.430200, Validation loss 6.543903\n", "Epoch 2411, Training loss 2.430193, Validation loss 6.543964\n", "Epoch 2412, Training loss 2.430186, Validation loss 6.544025\n", "Epoch 2413, Training loss 2.430181, Validation loss 6.544071\n", "Epoch 2414, Training loss 2.430173, Validation loss 6.544127\n", "Epoch 2415, Training loss 2.430168, Validation loss 6.544183\n", "Epoch 2416, Training loss 2.430161, Validation loss 6.544219\n", "Epoch 2417, Training loss 2.430156, Validation loss 6.544285\n", "Epoch 2418, Training loss 2.430148, Validation loss 6.544326\n", "Epoch 2419, Training loss 2.430142, Validation loss 6.544372\n", "Epoch 2420, Training loss 2.430137, Validation loss 6.544433\n", "Epoch 2421, Training loss 2.430131, Validation loss 6.544478\n", "Epoch 2422, Training loss 2.430124, Validation loss 6.544549\n", "Epoch 2423, Training loss 2.430117, Validation loss 6.544600\n", "Epoch 2424, Training loss 2.430111, Validation loss 6.544641\n", "Epoch 2425, Training loss 2.430105, Validation loss 6.544692\n", "Epoch 2426, Training loss 2.430099, Validation loss 6.544753\n", "Epoch 2427, Training loss 2.430094, Validation loss 6.544799\n", "Epoch 2428, Training loss 2.430086, Validation loss 6.544840\n", "Epoch 2429, Training loss 2.430081, Validation loss 6.544890\n", "Epoch 2430, Training loss 2.430075, Validation loss 6.544946\n", "Epoch 2431, Training loss 2.430068, Validation loss 6.545002\n", "Epoch 2432, Training loss 2.430063, Validation loss 6.545038\n", "Epoch 2433, Training loss 2.430058, Validation loss 6.545094\n", "Epoch 2434, Training loss 2.430051, Validation loss 6.545150\n", "Epoch 2435, Training loss 2.430046, Validation loss 6.545196\n", "Epoch 2436, Training loss 2.430039, Validation loss 6.545246\n", "Epoch 2437, Training loss 2.430034, Validation loss 6.545292\n", "Epoch 2438, Training loss 2.430027, Validation loss 6.545348\n", "Epoch 2439, Training loss 2.430023, Validation loss 6.545399\n", "Epoch 2440, Training loss 2.430016, Validation loss 6.545440\n", "Epoch 2441, Training loss 2.430012, Validation loss 6.545501\n", "Epoch 2442, Training loss 2.430005, Validation loss 6.545551\n", "Epoch 2443, Training loss 2.430000, Validation loss 6.545592\n", "Epoch 2444, Training loss 2.429994, Validation loss 6.545643\n", "Epoch 2445, Training loss 2.429988, Validation loss 6.545694\n", "Epoch 2446, Training loss 2.429983, Validation loss 6.545750\n", "Epoch 2447, Training loss 2.429977, Validation loss 6.545791\n", "Epoch 2448, Training loss 2.429972, Validation loss 6.545846\n", "Epoch 2449, Training loss 2.429966, Validation loss 6.545893\n", "Epoch 2450, Training loss 2.429960, Validation loss 6.545938\n", "Epoch 2451, Training loss 2.429954, Validation loss 6.545984\n", "Epoch 2452, Training loss 2.429949, Validation loss 6.546030\n", "Epoch 2453, Training loss 2.429943, Validation loss 6.546086\n", "Epoch 2454, Training loss 2.429940, Validation loss 6.546131\n", "Epoch 2455, Training loss 2.429932, Validation loss 6.546183\n", "Epoch 2456, Training loss 2.429927, Validation loss 6.546219\n", "Epoch 2457, Training loss 2.429922, Validation loss 6.546269\n", "Epoch 2458, Training loss 2.429917, Validation loss 6.546315\n", "Epoch 2459, Training loss 2.429912, Validation loss 6.546366\n", "Epoch 2460, Training loss 2.429906, Validation loss 6.546407\n", "Epoch 2461, Training loss 2.429902, Validation loss 6.546468\n", "Epoch 2462, Training loss 2.429897, Validation loss 6.546508\n", "Epoch 2463, Training loss 2.429892, Validation loss 6.546559\n", "Epoch 2464, Training loss 2.429885, Validation loss 6.546605\n", "Epoch 2465, Training loss 2.429881, Validation loss 6.546637\n", "Epoch 2466, Training loss 2.429875, Validation loss 6.546692\n", "Epoch 2467, Training loss 2.429871, Validation loss 6.546752\n", "Epoch 2468, Training loss 2.429865, Validation loss 6.546788\n", "Epoch 2469, Training loss 2.429861, Validation loss 6.546834\n", "Epoch 2470, Training loss 2.429855, Validation loss 6.546885\n", "Epoch 2471, Training loss 2.429851, Validation loss 6.546930\n", "Epoch 2472, Training loss 2.429846, Validation loss 6.546971\n", "Epoch 2473, Training loss 2.429840, Validation loss 6.547023\n", "Epoch 2474, Training loss 2.429836, Validation loss 6.547073\n", "Epoch 2475, Training loss 2.429832, Validation loss 6.547119\n", "Epoch 2476, Training loss 2.429826, Validation loss 6.547170\n", "Epoch 2477, Training loss 2.429820, Validation loss 6.547215\n", "Epoch 2478, Training loss 2.429814, Validation loss 6.547256\n", "Epoch 2479, Training loss 2.429810, Validation loss 6.547297\n", "Epoch 2480, Training loss 2.429806, Validation loss 6.547338\n", "Epoch 2481, Training loss 2.429800, Validation loss 6.547379\n", "Epoch 2482, Training loss 2.429795, Validation loss 6.547430\n", "Epoch 2483, Training loss 2.429792, Validation loss 6.547490\n", "Epoch 2484, Training loss 2.429786, Validation loss 6.547526\n", "Epoch 2485, Training loss 2.429781, Validation loss 6.547557\n", "Epoch 2486, Training loss 2.429778, Validation loss 6.547604\n", "Epoch 2487, Training loss 2.429773, Validation loss 6.547664\n", "Epoch 2488, Training loss 2.429767, Validation loss 6.547709\n", "Epoch 2489, Training loss 2.429763, Validation loss 6.547760\n", "Epoch 2490, Training loss 2.429758, Validation loss 6.547786\n", "Epoch 2491, Training loss 2.429754, Validation loss 6.547852\n", "Epoch 2492, Training loss 2.429748, Validation loss 6.547888\n", "Epoch 2493, Training loss 2.429744, Validation loss 6.547934\n", "Epoch 2494, Training loss 2.429738, Validation loss 6.547984\n", "Epoch 2495, Training loss 2.429735, Validation loss 6.548020\n", "Epoch 2496, Training loss 2.429730, Validation loss 6.548070\n", "Epoch 2497, Training loss 2.429726, Validation loss 6.548106\n", "Epoch 2498, Training loss 2.429721, Validation loss 6.548147\n", "Epoch 2499, Training loss 2.429717, Validation loss 6.548188\n", "Epoch 2500, Training loss 2.429712, Validation loss 6.548239\n", "Epoch 2501, Training loss 2.429707, Validation loss 6.548285\n", "Epoch 2502, Training loss 2.429704, Validation loss 6.548321\n", "Epoch 2503, Training loss 2.429699, Validation loss 6.548371\n", "Epoch 2504, Training loss 2.429694, Validation loss 6.548412\n", "Epoch 2505, Training loss 2.429690, Validation loss 6.548453\n", "Epoch 2506, Training loss 2.429685, Validation loss 6.548498\n", "Epoch 2507, Training loss 2.429681, Validation loss 6.548539\n", "Epoch 2508, Training loss 2.429677, Validation loss 6.548575\n", "Epoch 2509, Training loss 2.429673, Validation loss 6.548626\n", "Epoch 2510, Training loss 2.429669, Validation loss 6.548662\n", "Epoch 2511, Training loss 2.429665, Validation loss 6.548697\n", "Epoch 2512, Training loss 2.429661, Validation loss 6.548744\n", "Epoch 2513, Training loss 2.429655, Validation loss 6.548789\n", "Epoch 2514, Training loss 2.429651, Validation loss 6.548844\n", "Epoch 2515, Training loss 2.429646, Validation loss 6.548890\n", "Epoch 2516, Training loss 2.429643, Validation loss 6.548931\n", "Epoch 2517, Training loss 2.429639, Validation loss 6.548957\n", "Epoch 2518, Training loss 2.429636, Validation loss 6.549007\n", "Epoch 2519, Training loss 2.429630, Validation loss 6.549058\n", "Epoch 2520, Training loss 2.429626, Validation loss 6.549094\n", "Epoch 2521, Training loss 2.429623, Validation loss 6.549125\n", "Epoch 2522, Training loss 2.429617, Validation loss 6.549186\n", "Epoch 2523, Training loss 2.429614, Validation loss 6.549216\n", "Epoch 2524, Training loss 2.429610, Validation loss 6.549257\n", "Epoch 2525, Training loss 2.429606, Validation loss 6.549298\n", "Epoch 2526, Training loss 2.429602, Validation loss 6.549339\n", "Epoch 2527, Training loss 2.429598, Validation loss 6.549394\n", "Epoch 2528, Training loss 2.429593, Validation loss 6.549425\n", "Epoch 2529, Training loss 2.429591, Validation loss 6.549460\n", "Epoch 2530, Training loss 2.429586, Validation loss 6.549497\n", "Epoch 2531, Training loss 2.429582, Validation loss 6.549532\n", "Epoch 2532, Training loss 2.429578, Validation loss 6.549593\n", "Epoch 2533, Training loss 2.429573, Validation loss 6.549634\n", "Epoch 2534, Training loss 2.429571, Validation loss 6.549674\n", "Epoch 2535, Training loss 2.429567, Validation loss 6.549720\n", "Epoch 2536, Training loss 2.429564, Validation loss 6.549756\n", "Epoch 2537, Training loss 2.429559, Validation loss 6.549802\n", "Epoch 2538, Training loss 2.429555, Validation loss 6.549842\n", "Epoch 2539, Training loss 2.429551, Validation loss 6.549878\n", "Epoch 2540, Training loss 2.429547, Validation loss 6.549924\n", "Epoch 2541, Training loss 2.429543, Validation loss 6.549954\n", "Epoch 2542, Training loss 2.429539, Validation loss 6.549995\n", "Epoch 2543, Training loss 2.429536, Validation loss 6.550036\n", "Epoch 2544, Training loss 2.429532, Validation loss 6.550067\n", "Epoch 2545, Training loss 2.429529, Validation loss 6.550108\n", "Epoch 2546, Training loss 2.429526, Validation loss 6.550148\n", "Epoch 2547, Training loss 2.429521, Validation loss 6.550199\n", "Epoch 2548, Training loss 2.429518, Validation loss 6.550230\n", "Epoch 2549, Training loss 2.429514, Validation loss 6.550270\n", "Epoch 2550, Training loss 2.429510, Validation loss 6.550306\n", "Epoch 2551, Training loss 2.429507, Validation loss 6.550347\n", "Epoch 2552, Training loss 2.429503, Validation loss 6.550392\n", "Epoch 2553, Training loss 2.429499, Validation loss 6.550433\n", "Epoch 2554, Training loss 2.429497, Validation loss 6.550479\n", "Epoch 2555, Training loss 2.429492, Validation loss 6.550505\n", "Epoch 2556, Training loss 2.429489, Validation loss 6.550560\n", "Epoch 2557, Training loss 2.429486, Validation loss 6.550596\n", "Epoch 2558, Training loss 2.429482, Validation loss 6.550632\n", "Epoch 2559, Training loss 2.429479, Validation loss 6.550667\n", "Epoch 2560, Training loss 2.429475, Validation loss 6.550703\n", "Epoch 2561, Training loss 2.429470, Validation loss 6.550749\n", "Epoch 2562, Training loss 2.429468, Validation loss 6.550779\n", "Epoch 2563, Training loss 2.429465, Validation loss 6.550820\n", "Epoch 2564, Training loss 2.429460, Validation loss 6.550846\n", "Epoch 2565, Training loss 2.429458, Validation loss 6.550877\n", "Epoch 2566, Training loss 2.429454, Validation loss 6.550942\n", "Epoch 2567, Training loss 2.429450, Validation loss 6.550982\n", "Epoch 2568, Training loss 2.429446, Validation loss 6.551003\n", "Epoch 2569, Training loss 2.429444, Validation loss 6.551054\n", "Epoch 2570, Training loss 2.429440, Validation loss 6.551075\n", "Epoch 2571, Training loss 2.429437, Validation loss 6.551136\n", "Epoch 2572, Training loss 2.429433, Validation loss 6.551166\n", "Epoch 2573, Training loss 2.429430, Validation loss 6.551207\n", "Epoch 2574, Training loss 2.429426, Validation loss 6.551242\n", "Epoch 2575, Training loss 2.429424, Validation loss 6.551269\n", "Epoch 2576, Training loss 2.429421, Validation loss 6.551319\n", "Epoch 2577, Training loss 2.429418, Validation loss 6.551354\n", "Epoch 2578, Training loss 2.429414, Validation loss 6.551391\n", "Epoch 2579, Training loss 2.429411, Validation loss 6.551416\n", "Epoch 2580, Training loss 2.429409, Validation loss 6.551452\n", "Epoch 2581, Training loss 2.429404, Validation loss 6.551493\n", "Epoch 2582, Training loss 2.429401, Validation loss 6.551538\n", "Epoch 2583, Training loss 2.429398, Validation loss 6.551583\n", "Epoch 2584, Training loss 2.429395, Validation loss 6.551614\n", "Epoch 2585, Training loss 2.429392, Validation loss 6.551641\n", "Epoch 2586, Training loss 2.429389, Validation loss 6.551671\n", "Epoch 2587, Training loss 2.429385, Validation loss 6.551726\n", "Epoch 2588, Training loss 2.429384, Validation loss 6.551762\n", "Epoch 2589, Training loss 2.429379, Validation loss 6.551803\n", "Epoch 2590, Training loss 2.429376, Validation loss 6.551824\n", "Epoch 2591, Training loss 2.429372, Validation loss 6.551859\n", "Epoch 2592, Training loss 2.429369, Validation loss 6.551909\n", "Epoch 2593, Training loss 2.429368, Validation loss 6.551950\n", "Epoch 2594, Training loss 2.429363, Validation loss 6.551980\n", "Epoch 2595, Training loss 2.429359, Validation loss 6.552011\n", "Epoch 2596, Training loss 2.429358, Validation loss 6.552052\n", "Epoch 2597, Training loss 2.429355, Validation loss 6.552093\n", "Epoch 2598, Training loss 2.429353, Validation loss 6.552129\n", "Epoch 2599, Training loss 2.429349, Validation loss 6.552149\n", "Epoch 2600, Training loss 2.429346, Validation loss 6.552185\n", "Epoch 2601, Training loss 2.429343, Validation loss 6.552211\n", "Epoch 2602, Training loss 2.429341, Validation loss 6.552261\n", "Epoch 2603, Training loss 2.429338, Validation loss 6.552292\n", "Epoch 2604, Training loss 2.429334, Validation loss 6.552338\n", "Epoch 2605, Training loss 2.429330, Validation loss 6.552373\n", "Epoch 2606, Training loss 2.429328, Validation loss 6.552399\n", "Epoch 2607, Training loss 2.429326, Validation loss 6.552425\n", "Epoch 2608, Training loss 2.429323, Validation loss 6.552470\n", "Epoch 2609, Training loss 2.429321, Validation loss 6.552505\n", "Epoch 2610, Training loss 2.429317, Validation loss 6.552542\n", "Epoch 2611, Training loss 2.429314, Validation loss 6.552582\n", "Epoch 2612, Training loss 2.429311, Validation loss 6.552618\n", "Epoch 2613, Training loss 2.429310, Validation loss 6.552639\n", "Epoch 2614, Training loss 2.429306, Validation loss 6.552684\n", "Epoch 2615, Training loss 2.429303, Validation loss 6.552714\n", "Epoch 2616, Training loss 2.429301, Validation loss 6.552745\n", "Epoch 2617, Training loss 2.429297, Validation loss 6.552781\n", "Epoch 2618, Training loss 2.429295, Validation loss 6.552812\n", "Epoch 2619, Training loss 2.429292, Validation loss 6.552862\n", "Epoch 2620, Training loss 2.429289, Validation loss 6.552883\n", "Epoch 2621, Training loss 2.429286, Validation loss 6.552923\n", "Epoch 2622, Training loss 2.429284, Validation loss 6.552949\n", "Epoch 2623, Training loss 2.429281, Validation loss 6.552984\n", "Epoch 2624, Training loss 2.429278, Validation loss 6.553020\n", "Epoch 2625, Training loss 2.429277, Validation loss 6.553051\n", "Epoch 2626, Training loss 2.429272, Validation loss 6.553086\n", "Epoch 2627, Training loss 2.429271, Validation loss 6.553117\n", "Epoch 2628, Training loss 2.429268, Validation loss 6.553163\n", "Epoch 2629, Training loss 2.429265, Validation loss 6.553178\n", "Epoch 2630, Training loss 2.429263, Validation loss 6.553219\n", "Epoch 2631, Training loss 2.429260, Validation loss 6.553264\n", "Epoch 2632, Training loss 2.429258, Validation loss 6.553290\n", "Epoch 2633, Training loss 2.429255, Validation loss 6.553326\n", "Epoch 2634, Training loss 2.429252, Validation loss 6.553361\n", "Epoch 2635, Training loss 2.429250, Validation loss 6.553387\n", "Epoch 2636, Training loss 2.429248, Validation loss 6.553423\n", "Epoch 2637, Training loss 2.429246, Validation loss 6.553458\n", "Epoch 2638, Training loss 2.429243, Validation loss 6.553489\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2639, Training loss 2.429240, Validation loss 6.553535\n", "Epoch 2640, Training loss 2.429237, Validation loss 6.553550\n", "Epoch 2641, Training loss 2.429234, Validation loss 6.553580\n", "Epoch 2642, Training loss 2.429232, Validation loss 6.553617\n", "Epoch 2643, Training loss 2.429229, Validation loss 6.553657\n", "Epoch 2644, Training loss 2.429227, Validation loss 6.553687\n", "Epoch 2645, Training loss 2.429225, Validation loss 6.553708\n", "Epoch 2646, Training loss 2.429222, Validation loss 6.553748\n", "Epoch 2647, Training loss 2.429220, Validation loss 6.553769\n", "Epoch 2648, Training loss 2.429217, Validation loss 6.553815\n", "Epoch 2649, Training loss 2.429214, Validation loss 6.553850\n", "Epoch 2650, Training loss 2.429212, Validation loss 6.553881\n", "Epoch 2651, Training loss 2.429209, Validation loss 6.553911\n", "Epoch 2652, Training loss 2.429208, Validation loss 6.553942\n", "Epoch 2653, Training loss 2.429205, Validation loss 6.553977\n", "Epoch 2654, Training loss 2.429203, Validation loss 6.553999\n", "Epoch 2655, Training loss 2.429201, Validation loss 6.554044\n", "Epoch 2656, Training loss 2.429198, Validation loss 6.554065\n", "Epoch 2657, Training loss 2.429196, Validation loss 6.554090\n", "Epoch 2658, Training loss 2.429193, Validation loss 6.554135\n", "Epoch 2659, Training loss 2.429191, Validation loss 6.554156\n", "Epoch 2660, Training loss 2.429188, Validation loss 6.554192\n", "Epoch 2661, Training loss 2.429187, Validation loss 6.554227\n", "Epoch 2662, Training loss 2.429183, Validation loss 6.554263\n", "Epoch 2663, Training loss 2.429182, Validation loss 6.554289\n", "Epoch 2664, Training loss 2.429179, Validation loss 6.554324\n", "Epoch 2665, Training loss 2.429177, Validation loss 6.554350\n", "Epoch 2666, Training loss 2.429176, Validation loss 6.554370\n", "Epoch 2667, Training loss 2.429173, Validation loss 6.554416\n", "Epoch 2668, Training loss 2.429170, Validation loss 6.554451\n", "Epoch 2669, Training loss 2.429170, Validation loss 6.554492\n", "Epoch 2670, Training loss 2.429166, Validation loss 6.554502\n", "Epoch 2671, Training loss 2.429164, Validation loss 6.554538\n", "Epoch 2672, Training loss 2.429162, Validation loss 6.554564\n", "Epoch 2673, Training loss 2.429160, Validation loss 6.554614\n", "Epoch 2674, Training loss 2.429157, Validation loss 6.554645\n", "Epoch 2675, Training loss 2.429155, Validation loss 6.554665\n", "Epoch 2676, Training loss 2.429154, Validation loss 6.554696\n", "Epoch 2677, Training loss 2.429150, Validation loss 6.554727\n", "Epoch 2678, Training loss 2.429147, Validation loss 6.554757\n", "Epoch 2679, Training loss 2.429146, Validation loss 6.554778\n", "Epoch 2680, Training loss 2.429145, Validation loss 6.554818\n", "Epoch 2681, Training loss 2.429143, Validation loss 6.554839\n", "Epoch 2682, Training loss 2.429140, Validation loss 6.554864\n", "Epoch 2683, Training loss 2.429138, Validation loss 6.554905\n", "Epoch 2684, Training loss 2.429137, Validation loss 6.554930\n", "Epoch 2685, Training loss 2.429134, Validation loss 6.554951\n", "Epoch 2686, Training loss 2.429132, Validation loss 6.554986\n", "Epoch 2687, Training loss 2.429130, Validation loss 6.555017\n", "Epoch 2688, Training loss 2.429127, Validation loss 6.555053\n", "Epoch 2689, Training loss 2.429125, Validation loss 6.555088\n", "Epoch 2690, Training loss 2.429123, Validation loss 6.555104\n", "Epoch 2691, Training loss 2.429121, Validation loss 6.555134\n", "Epoch 2692, Training loss 2.429120, Validation loss 6.555179\n", "Epoch 2693, Training loss 2.429118, Validation loss 6.555215\n", "Epoch 2694, Training loss 2.429115, Validation loss 6.555241\n", "Epoch 2695, Training loss 2.429113, Validation loss 6.555271\n", "Epoch 2696, Training loss 2.429111, Validation loss 6.555287\n", "Epoch 2697, Training loss 2.429110, Validation loss 6.555317\n", "Epoch 2698, Training loss 2.429108, Validation loss 6.555343\n", "Epoch 2699, Training loss 2.429106, Validation loss 6.555378\n", "Epoch 2700, Training loss 2.429105, Validation loss 6.555409\n", "Epoch 2701, Training loss 2.429101, Validation loss 6.555425\n", "Epoch 2702, Training loss 2.429100, Validation loss 6.555455\n", "Epoch 2703, Training loss 2.429098, Validation loss 6.555486\n", "Epoch 2704, Training loss 2.429096, Validation loss 6.555507\n", "Epoch 2705, Training loss 2.429093, Validation loss 6.555542\n", "Epoch 2706, Training loss 2.429092, Validation loss 6.555582\n", "Epoch 2707, Training loss 2.429090, Validation loss 6.555613\n", "Epoch 2708, Training loss 2.429088, Validation loss 6.555634\n", "Epoch 2709, Training loss 2.429087, Validation loss 6.555674\n", "Epoch 2710, Training loss 2.429084, Validation loss 6.555684\n", "Epoch 2711, Training loss 2.429083, Validation loss 6.555705\n", "Epoch 2712, Training loss 2.429082, Validation loss 6.555746\n", "Epoch 2713, Training loss 2.429079, Validation loss 6.555771\n", "Epoch 2714, Training loss 2.429078, Validation loss 6.555801\n", "Epoch 2715, Training loss 2.429075, Validation loss 6.555832\n", "Epoch 2716, Training loss 2.429074, Validation loss 6.555867\n", "Epoch 2717, Training loss 2.429071, Validation loss 6.555888\n", "Epoch 2718, Training loss 2.429069, Validation loss 6.555904\n", "Epoch 2719, Training loss 2.429068, Validation loss 6.555944\n", "Epoch 2720, Training loss 2.429065, Validation loss 6.555970\n", "Epoch 2721, Training loss 2.429065, Validation loss 6.555995\n", "Epoch 2722, Training loss 2.429063, Validation loss 6.556021\n", "Epoch 2723, Training loss 2.429061, Validation loss 6.556046\n", "Epoch 2724, Training loss 2.429059, Validation loss 6.556072\n", "Epoch 2725, Training loss 2.429058, Validation loss 6.556098\n", "Epoch 2726, Training loss 2.429056, Validation loss 6.556133\n", "Epoch 2727, Training loss 2.429054, Validation loss 6.556168\n", "Epoch 2728, Training loss 2.429051, Validation loss 6.556189\n", "Epoch 2729, Training loss 2.429049, Validation loss 6.556205\n", "Epoch 2730, Training loss 2.429049, Validation loss 6.556240\n", "Epoch 2731, Training loss 2.429048, Validation loss 6.556271\n", "Epoch 2732, Training loss 2.429045, Validation loss 6.556286\n", "Epoch 2733, Training loss 2.429043, Validation loss 6.556322\n", "Epoch 2734, Training loss 2.429041, Validation loss 6.556352\n", "Epoch 2735, Training loss 2.429039, Validation loss 6.556377\n", "Epoch 2736, Training loss 2.429038, Validation loss 6.556417\n", "Epoch 2737, Training loss 2.429037, Validation loss 6.556428\n", "Epoch 2738, Training loss 2.429035, Validation loss 6.556458\n", "Epoch 2739, Training loss 2.429033, Validation loss 6.556495\n", "Epoch 2740, Training loss 2.429031, Validation loss 6.556505\n", "Epoch 2741, Training loss 2.429029, Validation loss 6.556545\n", "Epoch 2742, Training loss 2.429028, Validation loss 6.556576\n", "Epoch 2743, Training loss 2.429027, Validation loss 6.556596\n", "Epoch 2744, Training loss 2.429025, Validation loss 6.556627\n", "Epoch 2745, Training loss 2.429024, Validation loss 6.556652\n", "Epoch 2746, Training loss 2.429022, Validation loss 6.556683\n", "Epoch 2747, Training loss 2.429021, Validation loss 6.556703\n", "Epoch 2748, Training loss 2.429018, Validation loss 6.556728\n", "Epoch 2749, Training loss 2.429017, Validation loss 6.556750\n", "Epoch 2750, Training loss 2.429015, Validation loss 6.556785\n", "Epoch 2751, Training loss 2.429013, Validation loss 6.556806\n", "Epoch 2752, Training loss 2.429012, Validation loss 6.556831\n", "Epoch 2753, Training loss 2.429011, Validation loss 6.556847\n", "Epoch 2754, Training loss 2.429008, Validation loss 6.556872\n", "Epoch 2755, Training loss 2.429007, Validation loss 6.556893\n", "Epoch 2756, Training loss 2.429006, Validation loss 6.556928\n", "Epoch 2757, Training loss 2.429003, Validation loss 6.556953\n", "Epoch 2758, Training loss 2.429003, Validation loss 6.556989\n", "Epoch 2759, Training loss 2.429003, Validation loss 6.557019\n", "Epoch 2760, Training loss 2.429000, Validation loss 6.557040\n", "Epoch 2761, Training loss 2.428999, Validation loss 6.557055\n", "Epoch 2762, Training loss 2.428996, Validation loss 6.557080\n", "Epoch 2763, Training loss 2.428995, Validation loss 6.557106\n", "Epoch 2764, Training loss 2.428993, Validation loss 6.557132\n", "Epoch 2765, Training loss 2.428993, Validation loss 6.557162\n", "Epoch 2766, Training loss 2.428990, Validation loss 6.557188\n", "Epoch 2767, Training loss 2.428990, Validation loss 6.557213\n", "Epoch 2768, Training loss 2.428988, Validation loss 6.557243\n", "Epoch 2769, Training loss 2.428985, Validation loss 6.557269\n", "Epoch 2770, Training loss 2.428985, Validation loss 6.557290\n", "Epoch 2771, Training loss 2.428983, Validation loss 6.557315\n", "Epoch 2772, Training loss 2.428982, Validation loss 6.557345\n", "Epoch 2773, Training loss 2.428981, Validation loss 6.557366\n", "Epoch 2774, Training loss 2.428979, Validation loss 6.557386\n", "Epoch 2775, Training loss 2.428977, Validation loss 6.557407\n", "Epoch 2776, Training loss 2.428976, Validation loss 6.557437\n", "Epoch 2777, Training loss 2.428976, Validation loss 6.557458\n", "Epoch 2778, Training loss 2.428975, Validation loss 6.557478\n", "Epoch 2779, Training loss 2.428972, Validation loss 6.557508\n", "Epoch 2780, Training loss 2.428969, Validation loss 6.557539\n", "Epoch 2781, Training loss 2.428969, Validation loss 6.557560\n", "Epoch 2782, Training loss 2.428969, Validation loss 6.557590\n", "Epoch 2783, Training loss 2.428966, Validation loss 6.557615\n", "Epoch 2784, Training loss 2.428965, Validation loss 6.557626\n", "Epoch 2785, Training loss 2.428964, Validation loss 6.557652\n", "Epoch 2786, Training loss 2.428962, Validation loss 6.557692\n", "Epoch 2787, Training loss 2.428960, Validation loss 6.557713\n", "Epoch 2788, Training loss 2.428960, Validation loss 6.557748\n", "Epoch 2789, Training loss 2.428959, Validation loss 6.557754\n", "Epoch 2790, Training loss 2.428958, Validation loss 6.557784\n", "Epoch 2791, Training loss 2.428955, Validation loss 6.557799\n", "Epoch 2792, Training loss 2.428954, Validation loss 6.557825\n", "Epoch 2793, Training loss 2.428954, Validation loss 6.557860\n", "Epoch 2794, Training loss 2.428952, Validation loss 6.557866\n", "Epoch 2795, Training loss 2.428950, Validation loss 6.557906\n", "Epoch 2796, Training loss 2.428948, Validation loss 6.557921\n", "Epoch 2797, Training loss 2.428947, Validation loss 6.557947\n", "Epoch 2798, Training loss 2.428946, Validation loss 6.557972\n", "Epoch 2799, Training loss 2.428945, Validation loss 6.558003\n", "Epoch 2800, Training loss 2.428943, Validation loss 6.558018\n", "Epoch 2801, Training loss 2.428942, Validation loss 6.558043\n", "Epoch 2802, Training loss 2.428940, Validation loss 6.558069\n", "Epoch 2803, Training loss 2.428939, Validation loss 6.558094\n", "Epoch 2804, Training loss 2.428938, Validation loss 6.558115\n", "Epoch 2805, Training loss 2.428936, Validation loss 6.558131\n", "Epoch 2806, Training loss 2.428935, Validation loss 6.558151\n", "Epoch 2807, Training loss 2.428935, Validation loss 6.558172\n", "Epoch 2808, Training loss 2.428933, Validation loss 6.558192\n", "Epoch 2809, Training loss 2.428932, Validation loss 6.558228\n", "Epoch 2810, Training loss 2.428932, Validation loss 6.558243\n", "Epoch 2811, Training loss 2.428929, Validation loss 6.558273\n", "Epoch 2812, Training loss 2.428929, Validation loss 6.558304\n", "Epoch 2813, Training loss 2.428926, Validation loss 6.558334\n", "Epoch 2814, Training loss 2.428926, Validation loss 6.558345\n", "Epoch 2815, Training loss 2.428925, Validation loss 6.558374\n", "Epoch 2816, Training loss 2.428925, Validation loss 6.558385\n", "Epoch 2817, Training loss 2.428923, Validation loss 6.558415\n", "Epoch 2818, Training loss 2.428922, Validation loss 6.558426\n", "Epoch 2819, Training loss 2.428919, Validation loss 6.558456\n", "Epoch 2820, Training loss 2.428920, Validation loss 6.558477\n", "Epoch 2821, Training loss 2.428918, Validation loss 6.558508\n", "Epoch 2822, Training loss 2.428917, Validation loss 6.558528\n", "Epoch 2823, Training loss 2.428915, Validation loss 6.558553\n", "Epoch 2824, Training loss 2.428914, Validation loss 6.558574\n", "Epoch 2825, Training loss 2.428913, Validation loss 6.558594\n", "Epoch 2826, Training loss 2.428912, Validation loss 6.558610\n", "Epoch 2827, Training loss 2.428911, Validation loss 6.558635\n", "Epoch 2828, Training loss 2.428910, Validation loss 6.558665\n", "Epoch 2829, Training loss 2.428908, Validation loss 6.558691\n", "Epoch 2830, Training loss 2.428907, Validation loss 6.558706\n", "Epoch 2831, Training loss 2.428905, Validation loss 6.558732\n", "Epoch 2832, Training loss 2.428905, Validation loss 6.558743\n", "Epoch 2833, Training loss 2.428905, Validation loss 6.558758\n", "Epoch 2834, Training loss 2.428903, Validation loss 6.558784\n", "Epoch 2835, Training loss 2.428901, Validation loss 6.558799\n", "Epoch 2836, Training loss 2.428901, Validation loss 6.558824\n", "Epoch 2837, Training loss 2.428900, Validation loss 6.558849\n", "Epoch 2838, Training loss 2.428899, Validation loss 6.558885\n", "Epoch 2839, Training loss 2.428898, Validation loss 6.558909\n", "Epoch 2840, Training loss 2.428895, Validation loss 6.558926\n", "Epoch 2841, Training loss 2.428895, Validation loss 6.558936\n", "Epoch 2842, Training loss 2.428893, Validation loss 6.558971\n", "Epoch 2843, Training loss 2.428893, Validation loss 6.558987\n", "Epoch 2844, Training loss 2.428892, Validation loss 6.558997\n", "Epoch 2845, Training loss 2.428891, Validation loss 6.559013\n", "Epoch 2846, Training loss 2.428889, Validation loss 6.559048\n", "Epoch 2847, Training loss 2.428888, Validation loss 6.559073\n", "Epoch 2848, Training loss 2.428887, Validation loss 6.559103\n", "Epoch 2849, Training loss 2.428886, Validation loss 6.559119\n", "Epoch 2850, Training loss 2.428885, Validation loss 6.559130\n", "Epoch 2851, Training loss 2.428884, Validation loss 6.559160\n", "Epoch 2852, Training loss 2.428882, Validation loss 6.559185\n", "Epoch 2853, Training loss 2.428882, Validation loss 6.559196\n", "Epoch 2854, Training loss 2.428882, Validation loss 6.559216\n", "Epoch 2855, Training loss 2.428879, Validation loss 6.559237\n", "Epoch 2856, Training loss 2.428879, Validation loss 6.559257\n", "Epoch 2857, Training loss 2.428878, Validation loss 6.559278\n", "Epoch 2858, Training loss 2.428878, Validation loss 6.559298\n", "Epoch 2859, Training loss 2.428875, Validation loss 6.559319\n", "Epoch 2860, Training loss 2.428873, Validation loss 6.559339\n", "Epoch 2861, Training loss 2.428874, Validation loss 6.559364\n", "Epoch 2862, Training loss 2.428873, Validation loss 6.559384\n", "Epoch 2863, Training loss 2.428871, Validation loss 6.559405\n", "Epoch 2864, Training loss 2.428870, Validation loss 6.559425\n", "Epoch 2865, Training loss 2.428870, Validation loss 6.559455\n", "Epoch 2866, Training loss 2.428870, Validation loss 6.559486\n", "Epoch 2867, Training loss 2.428868, Validation loss 6.559501\n", "Epoch 2868, Training loss 2.428867, Validation loss 6.559512\n", "Epoch 2869, Training loss 2.428867, Validation loss 6.559527\n", "Epoch 2870, Training loss 2.428864, Validation loss 6.559543\n", "Epoch 2871, Training loss 2.428865, Validation loss 6.559564\n", "Epoch 2872, Training loss 2.428863, Validation loss 6.559584\n", "Epoch 2873, Training loss 2.428862, Validation loss 6.559609\n", "Epoch 2874, Training loss 2.428861, Validation loss 6.559634\n", "Epoch 2875, Training loss 2.428861, Validation loss 6.559655\n", "Epoch 2876, Training loss 2.428860, Validation loss 6.559680\n", "Epoch 2877, Training loss 2.428859, Validation loss 6.559695\n", "Epoch 2878, Training loss 2.428858, Validation loss 6.559721\n", "Epoch 2879, Training loss 2.428856, Validation loss 6.559726\n", "Epoch 2880, Training loss 2.428855, Validation loss 6.559752\n", "Epoch 2881, Training loss 2.428855, Validation loss 6.559767\n", "Epoch 2882, Training loss 2.428854, Validation loss 6.559792\n", "Epoch 2883, Training loss 2.428853, Validation loss 6.559798\n", "Epoch 2884, Training loss 2.428851, Validation loss 6.559843\n", "Epoch 2885, Training loss 2.428851, Validation loss 6.559858\n", "Epoch 2886, Training loss 2.428850, Validation loss 6.559874\n", "Epoch 2887, Training loss 2.428849, Validation loss 6.559904\n", "Epoch 2888, Training loss 2.428848, Validation loss 6.559919\n", "Epoch 2889, Training loss 2.428847, Validation loss 6.559930\n", "Epoch 2890, Training loss 2.428846, Validation loss 6.559945\n", "Epoch 2891, Training loss 2.428846, Validation loss 6.559971\n", "Epoch 2892, Training loss 2.428845, Validation loss 6.559986\n", "Epoch 2893, Training loss 2.428844, Validation loss 6.560016\n", "Epoch 2894, Training loss 2.428844, Validation loss 6.560041\n", "Epoch 2895, Training loss 2.428842, Validation loss 6.560062\n", "Epoch 2896, Training loss 2.428842, Validation loss 6.560072\n", "Epoch 2897, Training loss 2.428840, Validation loss 6.560093\n", "Epoch 2898, Training loss 2.428839, Validation loss 6.560108\n", "Epoch 2899, Training loss 2.428838, Validation loss 6.560119\n", "Epoch 2900, Training loss 2.428837, Validation loss 6.560139\n", "Epoch 2901, Training loss 2.428838, Validation loss 6.560160\n", "Epoch 2902, Training loss 2.428836, Validation loss 6.560170\n", "Epoch 2903, Training loss 2.428836, Validation loss 6.560190\n", "Epoch 2904, Training loss 2.428834, Validation loss 6.560216\n", "Epoch 2905, Training loss 2.428833, Validation loss 6.560236\n", "Epoch 2906, Training loss 2.428833, Validation loss 6.560266\n", "Epoch 2907, Training loss 2.428832, Validation loss 6.560281\n", "Epoch 2908, Training loss 2.428831, Validation loss 6.560311\n", "Epoch 2909, Training loss 2.428829, Validation loss 6.560322\n", "Epoch 2910, Training loss 2.428829, Validation loss 6.560333\n", "Epoch 2911, Training loss 2.428828, Validation loss 6.560338\n", "Epoch 2912, Training loss 2.428827, Validation loss 6.560369\n", "Epoch 2913, Training loss 2.428827, Validation loss 6.560379\n", "Epoch 2914, Training loss 2.428826, Validation loss 6.560419\n", "Epoch 2915, Training loss 2.428824, Validation loss 6.560440\n", "Epoch 2916, Training loss 2.428824, Validation loss 6.560445\n", "Epoch 2917, Training loss 2.428824, Validation loss 6.560470\n", "Epoch 2918, Training loss 2.428823, Validation loss 6.560491\n", "Epoch 2919, Training loss 2.428822, Validation loss 6.560515\n", "Epoch 2920, Training loss 2.428823, Validation loss 6.560521\n", "Epoch 2921, Training loss 2.428821, Validation loss 6.560542\n", "Epoch 2922, Training loss 2.428819, Validation loss 6.560567\n", "Epoch 2923, Training loss 2.428819, Validation loss 6.560573\n", "Epoch 2924, Training loss 2.428819, Validation loss 6.560603\n", "Epoch 2925, Training loss 2.428819, Validation loss 6.560618\n", "Epoch 2926, Training loss 2.428815, Validation loss 6.560633\n", "Epoch 2927, Training loss 2.428815, Validation loss 6.560649\n", "Epoch 2928, Training loss 2.428815, Validation loss 6.560664\n", "Epoch 2929, Training loss 2.428815, Validation loss 6.560679\n", "Epoch 2930, Training loss 2.428814, Validation loss 6.560710\n", "Epoch 2931, Training loss 2.428813, Validation loss 6.560725\n", "Epoch 2932, Training loss 2.428812, Validation loss 6.560740\n", "Epoch 2933, Training loss 2.428812, Validation loss 6.560756\n", "Epoch 2934, Training loss 2.428810, Validation loss 6.560771\n", "Epoch 2935, Training loss 2.428809, Validation loss 6.560796\n", "Epoch 2936, Training loss 2.428809, Validation loss 6.560812\n", "Epoch 2937, Training loss 2.428808, Validation loss 6.560837\n", "Epoch 2938, Training loss 2.428808, Validation loss 6.560847\n", "Epoch 2939, Training loss 2.428806, Validation loss 6.560868\n", "Epoch 2940, Training loss 2.428807, Validation loss 6.560873\n", "Epoch 2941, Training loss 2.428805, Validation loss 6.560894\n", "Epoch 2942, Training loss 2.428805, Validation loss 6.560919\n", "Epoch 2943, Training loss 2.428803, Validation loss 6.560929\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2944, Training loss 2.428803, Validation loss 6.560945\n", "Epoch 2945, Training loss 2.428803, Validation loss 6.560955\n", "Epoch 2946, Training loss 2.428802, Validation loss 6.560990\n", "Epoch 2947, Training loss 2.428802, Validation loss 6.561020\n", "Epoch 2948, Training loss 2.428801, Validation loss 6.561021\n", "Epoch 2949, Training loss 2.428799, Validation loss 6.561047\n", "Epoch 2950, Training loss 2.428799, Validation loss 6.561051\n", "Epoch 2951, Training loss 2.428798, Validation loss 6.561077\n", "Epoch 2952, Training loss 2.428798, Validation loss 6.561077\n", "Epoch 2953, Training loss 2.428797, Validation loss 6.561108\n", "Epoch 2954, Training loss 2.428797, Validation loss 6.561128\n", "Epoch 2955, Training loss 2.428797, Validation loss 6.561138\n", "Epoch 2956, Training loss 2.428794, Validation loss 6.561158\n", "Epoch 2957, Training loss 2.428794, Validation loss 6.561179\n", "Epoch 2958, Training loss 2.428794, Validation loss 6.561189\n", "Epoch 2959, Training loss 2.428792, Validation loss 6.561210\n", "Epoch 2960, Training loss 2.428793, Validation loss 6.561230\n", "Epoch 2961, Training loss 2.428791, Validation loss 6.561245\n", "Epoch 2962, Training loss 2.428790, Validation loss 6.561265\n", "Epoch 2963, Training loss 2.428791, Validation loss 6.561286\n", "Epoch 2964, Training loss 2.428790, Validation loss 6.561296\n", "Epoch 2965, Training loss 2.428788, Validation loss 6.561301\n", "Epoch 2966, Training loss 2.428788, Validation loss 6.561322\n", "Epoch 2967, Training loss 2.428789, Validation loss 6.561347\n", "Epoch 2968, Training loss 2.428788, Validation loss 6.561357\n", "Epoch 2969, Training loss 2.428786, Validation loss 6.561388\n", "Epoch 2970, Training loss 2.428785, Validation loss 6.561403\n", "Epoch 2971, Training loss 2.428784, Validation loss 6.561419\n", "Epoch 2972, Training loss 2.428784, Validation loss 6.561434\n", "Epoch 2973, Training loss 2.428783, Validation loss 6.561434\n", "Epoch 2974, Training loss 2.428785, Validation loss 6.561450\n", "Epoch 2975, Training loss 2.428783, Validation loss 6.561465\n", "Epoch 2976, Training loss 2.428782, Validation loss 6.561481\n", "Epoch 2977, Training loss 2.428781, Validation loss 6.561510\n", "Epoch 2978, Training loss 2.428781, Validation loss 6.561526\n", "Epoch 2979, Training loss 2.428779, Validation loss 6.561541\n", "Epoch 2980, Training loss 2.428779, Validation loss 6.561542\n", "Epoch 2981, Training loss 2.428778, Validation loss 6.561566\n", "Epoch 2982, Training loss 2.428778, Validation loss 6.561592\n", "Epoch 2983, Training loss 2.428778, Validation loss 6.561602\n", "Epoch 2984, Training loss 2.428777, Validation loss 6.561623\n", "Epoch 2985, Training loss 2.428776, Validation loss 6.561633\n", "Epoch 2986, Training loss 2.428776, Validation loss 6.561658\n", "Epoch 2987, Training loss 2.428775, Validation loss 6.561664\n", "Epoch 2988, Training loss 2.428774, Validation loss 6.561679\n", "Epoch 2989, Training loss 2.428774, Validation loss 6.561694\n", "Epoch 2990, Training loss 2.428773, Validation loss 6.561715\n", "Epoch 2991, Training loss 2.428772, Validation loss 6.561725\n", "Epoch 2992, Training loss 2.428773, Validation loss 6.561746\n", "Epoch 2993, Training loss 2.428771, Validation loss 6.561780\n", "Epoch 2994, Training loss 2.428770, Validation loss 6.561796\n", "Epoch 2995, Training loss 2.428770, Validation loss 6.561811\n", "Epoch 2996, Training loss 2.428771, Validation loss 6.561816\n", "Epoch 2997, Training loss 2.428770, Validation loss 6.561831\n", "Epoch 2998, Training loss 2.428768, Validation loss 6.561847\n", "Epoch 2999, Training loss 2.428767, Validation loss 6.561857\n", "Epoch 3000, Training loss 2.428767, Validation loss 6.561877\n", "Epoch 3001, Training loss 2.428767, Validation loss 6.561893\n", "Epoch 3002, Training loss 2.428767, Validation loss 6.561913\n", "Epoch 3003, Training loss 2.428765, Validation loss 6.561924\n", "Epoch 3004, Training loss 2.428765, Validation loss 6.561939\n", "Epoch 3005, Training loss 2.428764, Validation loss 6.561964\n", "Epoch 3006, Training loss 2.428764, Validation loss 6.561974\n", "Epoch 3007, Training loss 2.428765, Validation loss 6.561995\n", "Epoch 3008, Training loss 2.428763, Validation loss 6.562005\n", "Epoch 3009, Training loss 2.428763, Validation loss 6.562015\n", "Epoch 3010, Training loss 2.428763, Validation loss 6.562036\n", "Epoch 3011, Training loss 2.428761, Validation loss 6.562046\n", "Epoch 3012, Training loss 2.428762, Validation loss 6.562066\n", "Epoch 3013, Training loss 2.428761, Validation loss 6.562077\n", "Epoch 3014, Training loss 2.428759, Validation loss 6.562087\n", "Epoch 3015, Training loss 2.428759, Validation loss 6.562107\n", "Epoch 3016, Training loss 2.428759, Validation loss 6.562118\n", "Epoch 3017, Training loss 2.428758, Validation loss 6.562138\n", "Epoch 3018, Training loss 2.428758, Validation loss 6.562158\n", "Epoch 3019, Training loss 2.428757, Validation loss 6.562173\n", "Epoch 3020, Training loss 2.428757, Validation loss 6.562178\n", "Epoch 3021, Training loss 2.428756, Validation loss 6.562208\n", "Epoch 3022, Training loss 2.428755, Validation loss 6.562209\n", "Epoch 3023, Training loss 2.428755, Validation loss 6.562224\n", "Epoch 3024, Training loss 2.428755, Validation loss 6.562244\n", "Epoch 3025, Training loss 2.428755, Validation loss 6.562255\n", "Epoch 3026, Training loss 2.428754, Validation loss 6.562270\n", "Epoch 3027, Training loss 2.428752, Validation loss 6.562275\n", "Epoch 3028, Training loss 2.428752, Validation loss 6.562286\n", "Epoch 3029, Training loss 2.428754, Validation loss 6.562306\n", "Epoch 3030, Training loss 2.428751, Validation loss 6.562322\n", "Epoch 3031, Training loss 2.428751, Validation loss 6.562332\n", "Epoch 3032, Training loss 2.428750, Validation loss 6.562357\n", "Epoch 3033, Training loss 2.428749, Validation loss 6.562382\n", "Epoch 3034, Training loss 2.428750, Validation loss 6.562392\n", "Epoch 3035, Training loss 2.428749, Validation loss 6.562398\n", "Epoch 3036, Training loss 2.428749, Validation loss 6.562423\n", "Epoch 3037, Training loss 2.428748, Validation loss 6.562428\n", "Epoch 3038, Training loss 2.428748, Validation loss 6.562443\n", "Epoch 3039, Training loss 2.428748, Validation loss 6.562464\n", "Epoch 3040, Training loss 2.428746, Validation loss 6.562469\n", "Epoch 3041, Training loss 2.428746, Validation loss 6.562484\n", "Epoch 3042, Training loss 2.428746, Validation loss 6.562490\n", "Epoch 3043, Training loss 2.428745, Validation loss 6.562515\n", "Epoch 3044, Training loss 2.428744, Validation loss 6.562521\n", "Epoch 3045, Training loss 2.428745, Validation loss 6.562545\n", "Epoch 3046, Training loss 2.428744, Validation loss 6.562561\n", "Epoch 3047, Training loss 2.428743, Validation loss 6.562576\n", "Epoch 3048, Training loss 2.428743, Validation loss 6.562592\n", "Epoch 3049, Training loss 2.428742, Validation loss 6.562607\n", "Epoch 3050, Training loss 2.428742, Validation loss 6.562627\n", "Epoch 3051, Training loss 2.428741, Validation loss 6.562642\n", "Epoch 3052, Training loss 2.428742, Validation loss 6.562657\n", "Epoch 3053, Training loss 2.428741, Validation loss 6.562658\n", "Epoch 3054, Training loss 2.428740, Validation loss 6.562673\n", "Epoch 3055, Training loss 2.428740, Validation loss 6.562678\n", "Epoch 3056, Training loss 2.428739, Validation loss 6.562694\n", "Epoch 3057, Training loss 2.428740, Validation loss 6.562714\n", "Epoch 3058, Training loss 2.428739, Validation loss 6.562738\n", "Epoch 3059, Training loss 2.428738, Validation loss 6.562744\n", "Epoch 3060, Training loss 2.428738, Validation loss 6.562755\n", "Epoch 3061, Training loss 2.428737, Validation loss 6.562765\n", "Epoch 3062, Training loss 2.428736, Validation loss 6.562785\n", "Epoch 3063, Training loss 2.428737, Validation loss 6.562796\n", "Epoch 3064, Training loss 2.428735, Validation loss 6.562820\n", "Epoch 3065, Training loss 2.428734, Validation loss 6.562831\n", "Epoch 3066, Training loss 2.428735, Validation loss 6.562837\n", "Epoch 3067, Training loss 2.428734, Validation loss 6.562847\n", "Epoch 3068, Training loss 2.428735, Validation loss 6.562857\n", "Epoch 3069, Training loss 2.428734, Validation loss 6.562867\n", "Epoch 3070, Training loss 2.428734, Validation loss 6.562873\n", "Epoch 3071, Training loss 2.428734, Validation loss 6.562883\n", "Epoch 3072, Training loss 2.428732, Validation loss 6.562918\n", "Epoch 3073, Training loss 2.428733, Validation loss 6.562938\n", "Epoch 3074, Training loss 2.428731, Validation loss 6.562953\n", "Epoch 3075, Training loss 2.428732, Validation loss 6.562954\n", "Epoch 3076, Training loss 2.428731, Validation loss 6.562973\n", "Epoch 3077, Training loss 2.428730, Validation loss 6.562984\n", "Epoch 3078, Training loss 2.428731, Validation loss 6.563004\n", "Epoch 3079, Training loss 2.428729, Validation loss 6.563004\n", "Epoch 3080, Training loss 2.428729, Validation loss 6.563025\n", "Epoch 3081, Training loss 2.428728, Validation loss 6.563035\n", "Epoch 3082, Training loss 2.428728, Validation loss 6.563041\n", "Epoch 3083, Training loss 2.428728, Validation loss 6.563061\n", "Epoch 3084, Training loss 2.428728, Validation loss 6.563061\n", "Epoch 3085, Training loss 2.428727, Validation loss 6.563091\n", "Epoch 3086, Training loss 2.428726, Validation loss 6.563092\n", "Epoch 3087, Training loss 2.428727, Validation loss 6.563122\n", "Epoch 3088, Training loss 2.428726, Validation loss 6.563132\n", "Epoch 3089, Training loss 2.428725, Validation loss 6.563157\n", "Epoch 3090, Training loss 2.428724, Validation loss 6.563167\n", "Epoch 3091, Training loss 2.428725, Validation loss 6.563173\n", "Epoch 3092, Training loss 2.428724, Validation loss 6.563183\n", "Epoch 3093, Training loss 2.428724, Validation loss 6.563198\n", "Epoch 3094, Training loss 2.428723, Validation loss 6.563203\n", "Epoch 3095, Training loss 2.428723, Validation loss 6.563213\n", "Epoch 3096, Training loss 2.428722, Validation loss 6.563234\n", "Epoch 3097, Training loss 2.428722, Validation loss 6.563230\n", "Epoch 3098, Training loss 2.428721, Validation loss 6.563254\n", "Epoch 3099, Training loss 2.428721, Validation loss 6.563265\n", "Epoch 3100, Training loss 2.428722, Validation loss 6.563285\n", "Epoch 3101, Training loss 2.428721, Validation loss 6.563300\n", "Epoch 3102, Training loss 2.428720, Validation loss 6.563295\n", "Epoch 3103, Training loss 2.428720, Validation loss 6.563326\n", "Epoch 3104, Training loss 2.428720, Validation loss 6.563331\n", "Epoch 3105, Training loss 2.428718, Validation loss 6.563346\n", "Epoch 3106, Training loss 2.428719, Validation loss 6.563361\n", "Epoch 3107, Training loss 2.428719, Validation loss 6.563367\n", "Epoch 3108, Training loss 2.428717, Validation loss 6.563382\n", "Epoch 3109, Training loss 2.428718, Validation loss 6.563397\n", "Epoch 3110, Training loss 2.428719, Validation loss 6.563402\n", "Epoch 3111, Training loss 2.428717, Validation loss 6.563417\n", "Epoch 3112, Training loss 2.428718, Validation loss 6.563423\n", "Epoch 3113, Training loss 2.428716, Validation loss 6.563438\n", "Epoch 3114, Training loss 2.428715, Validation loss 6.563454\n", "Epoch 3115, Training loss 2.428717, Validation loss 6.563458\n", "Epoch 3116, Training loss 2.428715, Validation loss 6.563474\n", "Epoch 3117, Training loss 2.428715, Validation loss 6.563479\n", "Epoch 3118, Training loss 2.428716, Validation loss 6.563504\n", "Epoch 3119, Training loss 2.428713, Validation loss 6.563529\n", "Epoch 3120, Training loss 2.428715, Validation loss 6.563525\n", "Epoch 3121, Training loss 2.428714, Validation loss 6.563550\n", "Epoch 3122, Training loss 2.428714, Validation loss 6.563560\n", "Epoch 3123, Training loss 2.428713, Validation loss 6.563566\n", "Epoch 3124, Training loss 2.428713, Validation loss 6.563581\n", "Epoch 3125, Training loss 2.428713, Validation loss 6.563591\n", "Epoch 3126, Training loss 2.428711, Validation loss 6.563597\n", "Epoch 3127, Training loss 2.428712, Validation loss 6.563607\n", "Epoch 3128, Training loss 2.428710, Validation loss 6.563632\n", "Epoch 3129, Training loss 2.428711, Validation loss 6.563627\n", "Epoch 3130, Training loss 2.428710, Validation loss 6.563638\n", "Epoch 3131, Training loss 2.428711, Validation loss 6.563648\n", "Epoch 3132, Training loss 2.428710, Validation loss 6.563673\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3133, Training loss 2.428710, Validation loss 6.563679\n", "Epoch 3134, Training loss 2.428709, Validation loss 6.563703\n", "Epoch 3135, Training loss 2.428710, Validation loss 6.563723\n", "Epoch 3136, Training loss 2.428708, Validation loss 6.563738\n", "Epoch 3137, Training loss 2.428708, Validation loss 6.563739\n", "Epoch 3138, Training loss 2.428708, Validation loss 6.563759\n", "Epoch 3139, Training loss 2.428708, Validation loss 6.563759\n", "Epoch 3140, Training loss 2.428708, Validation loss 6.563779\n", "Epoch 3141, Training loss 2.428708, Validation loss 6.563780\n", "Epoch 3142, Training loss 2.428707, Validation loss 6.563795\n", "Epoch 3143, Training loss 2.428706, Validation loss 6.563796\n", "Epoch 3144, Training loss 2.428707, Validation loss 6.563816\n", "Epoch 3145, Training loss 2.428706, Validation loss 6.563836\n", "Epoch 3146, Training loss 2.428706, Validation loss 6.563836\n", "Epoch 3147, Training loss 2.428706, Validation loss 6.563851\n", "Epoch 3148, Training loss 2.428706, Validation loss 6.563852\n", "Epoch 3149, Training loss 2.428705, Validation loss 6.563862\n", "Epoch 3150, Training loss 2.428706, Validation loss 6.563882\n", "Epoch 3151, Training loss 2.428704, Validation loss 6.563892\n", "Epoch 3152, Training loss 2.428703, Validation loss 6.563912\n", "Epoch 3153, Training loss 2.428702, Validation loss 6.563923\n", "Epoch 3154, Training loss 2.428702, Validation loss 6.563932\n", "Epoch 3155, Training loss 2.428703, Validation loss 6.563943\n", "Epoch 3156, Training loss 2.428703, Validation loss 6.563953\n", "Epoch 3157, Training loss 2.428702, Validation loss 6.563968\n", "Epoch 3158, Training loss 2.428702, Validation loss 6.563978\n", "Epoch 3159, Training loss 2.428701, Validation loss 6.563989\n", "Epoch 3160, Training loss 2.428702, Validation loss 6.563999\n", "Epoch 3161, Training loss 2.428702, Validation loss 6.564009\n", "Epoch 3162, Training loss 2.428701, Validation loss 6.564030\n", "Epoch 3163, Training loss 2.428700, Validation loss 6.564025\n", "Epoch 3164, Training loss 2.428700, Validation loss 6.564035\n", "Epoch 3165, Training loss 2.428699, Validation loss 6.564045\n", "Epoch 3166, Training loss 2.428701, Validation loss 6.564060\n", "Epoch 3167, Training loss 2.428700, Validation loss 6.564071\n", "Epoch 3168, Training loss 2.428699, Validation loss 6.564091\n", "Epoch 3169, Training loss 2.428699, Validation loss 6.564106\n", "Epoch 3170, Training loss 2.428699, Validation loss 6.564106\n", "Epoch 3171, Training loss 2.428699, Validation loss 6.564112\n", "Epoch 3172, Training loss 2.428699, Validation loss 6.564142\n", "Epoch 3173, Training loss 2.428699, Validation loss 6.564147\n", "Epoch 3174, Training loss 2.428697, Validation loss 6.564152\n", "Epoch 3175, Training loss 2.428697, Validation loss 6.564163\n", "Epoch 3176, Training loss 2.428696, Validation loss 6.564168\n", "Epoch 3177, Training loss 2.428697, Validation loss 6.564173\n", "Epoch 3178, Training loss 2.428696, Validation loss 6.564193\n", "Epoch 3179, Training loss 2.428695, Validation loss 6.564208\n", "Epoch 3180, Training loss 2.428696, Validation loss 6.564213\n", "Epoch 3181, Training loss 2.428695, Validation loss 6.564214\n", "Epoch 3182, Training loss 2.428695, Validation loss 6.564229\n", "Epoch 3183, Training loss 2.428695, Validation loss 6.564249\n", "Epoch 3184, Training loss 2.428696, Validation loss 6.564254\n", "Epoch 3185, Training loss 2.428696, Validation loss 6.564270\n", "Epoch 3186, Training loss 2.428696, Validation loss 6.564280\n", "Epoch 3187, Training loss 2.428694, Validation loss 6.564295\n", "Epoch 3188, Training loss 2.428693, Validation loss 6.564301\n", "Epoch 3189, Training loss 2.428694, Validation loss 6.564315\n", "Epoch 3190, Training loss 2.428694, Validation loss 6.564330\n", "Epoch 3191, Training loss 2.428693, Validation loss 6.564336\n", "Epoch 3192, Training loss 2.428693, Validation loss 6.564346\n", "Epoch 3193, Training loss 2.428693, Validation loss 6.564356\n", "Epoch 3194, Training loss 2.428692, Validation loss 6.564366\n", "Epoch 3195, Training loss 2.428693, Validation loss 6.564377\n", "Epoch 3196, Training loss 2.428691, Validation loss 6.564387\n", "Epoch 3197, Training loss 2.428691, Validation loss 6.564402\n", "Epoch 3198, Training loss 2.428691, Validation loss 6.564397\n", "Epoch 3199, Training loss 2.428691, Validation loss 6.564422\n", "Epoch 3200, Training loss 2.428691, Validation loss 6.564418\n", "Epoch 3201, Training loss 2.428690, Validation loss 6.564443\n", "Epoch 3202, Training loss 2.428691, Validation loss 6.564438\n", "Epoch 3203, Training loss 2.428690, Validation loss 6.564463\n", "Epoch 3204, Training loss 2.428689, Validation loss 6.564459\n", "Epoch 3205, Training loss 2.428689, Validation loss 6.564474\n", "Epoch 3206, Training loss 2.428689, Validation loss 6.564484\n", "Epoch 3207, Training loss 2.428689, Validation loss 6.564499\n", "Epoch 3208, Training loss 2.428688, Validation loss 6.564514\n", "Epoch 3209, Training loss 2.428688, Validation loss 6.564524\n", "Epoch 3210, Training loss 2.428688, Validation loss 6.564540\n", "Epoch 3211, Training loss 2.428688, Validation loss 6.564545\n", "Epoch 3212, Training loss 2.428688, Validation loss 6.564545\n", "Epoch 3213, Training loss 2.428687, Validation loss 6.564565\n", "Epoch 3214, Training loss 2.428688, Validation loss 6.564570\n", "Epoch 3215, Training loss 2.428687, Validation loss 6.564586\n", "Epoch 3216, Training loss 2.428686, Validation loss 6.564586\n", "Epoch 3217, Training loss 2.428687, Validation loss 6.564596\n", "Epoch 3218, Training loss 2.428686, Validation loss 6.564611\n", "Epoch 3219, Training loss 2.428687, Validation loss 6.564611\n", "Epoch 3220, Training loss 2.428685, Validation loss 6.564617\n", "Epoch 3221, Training loss 2.428686, Validation loss 6.564637\n", "Epoch 3222, Training loss 2.428686, Validation loss 6.564652\n", "Epoch 3223, Training loss 2.428686, Validation loss 6.564657\n", "Epoch 3224, Training loss 2.428685, Validation loss 6.564668\n", "Epoch 3225, Training loss 2.428684, Validation loss 6.564678\n", "Epoch 3226, Training loss 2.428685, Validation loss 6.564688\n", "Epoch 3227, Training loss 2.428684, Validation loss 6.564703\n", "Epoch 3228, Training loss 2.428684, Validation loss 6.564713\n", "Epoch 3229, Training loss 2.428684, Validation loss 6.564723\n", "Epoch 3230, Training loss 2.428684, Validation loss 6.564734\n", "Epoch 3231, Training loss 2.428683, Validation loss 6.564744\n", "Epoch 3232, Training loss 2.428683, Validation loss 6.564754\n", "Epoch 3233, Training loss 2.428684, Validation loss 6.564754\n", "Epoch 3234, Training loss 2.428683, Validation loss 6.564764\n", "Epoch 3235, Training loss 2.428683, Validation loss 6.564775\n", "Epoch 3236, Training loss 2.428682, Validation loss 6.564785\n", "Epoch 3237, Training loss 2.428682, Validation loss 6.564795\n", "Epoch 3238, Training loss 2.428681, Validation loss 6.564805\n", "Epoch 3239, Training loss 2.428681, Validation loss 6.564800\n", "Epoch 3240, Training loss 2.428682, Validation loss 6.564811\n", "Epoch 3241, Training loss 2.428682, Validation loss 6.564821\n", "Epoch 3242, Training loss 2.428681, Validation loss 6.564831\n", "Epoch 3243, Training loss 2.428681, Validation loss 6.564841\n", "Epoch 3244, Training loss 2.428681, Validation loss 6.564861\n", "Epoch 3245, Training loss 2.428680, Validation loss 6.564871\n", "Epoch 3246, Training loss 2.428681, Validation loss 6.564891\n", "Epoch 3247, Training loss 2.428679, Validation loss 6.564892\n", "Epoch 3248, Training loss 2.428681, Validation loss 6.564897\n", "Epoch 3249, Training loss 2.428679, Validation loss 6.564917\n", "Epoch 3250, Training loss 2.428679, Validation loss 6.564918\n", "Epoch 3251, Training loss 2.428680, Validation loss 6.564922\n", "Epoch 3252, Training loss 2.428680, Validation loss 6.564942\n", "Epoch 3253, Training loss 2.428679, Validation loss 6.564943\n", "Epoch 3254, Training loss 2.428678, Validation loss 6.564948\n", "Epoch 3255, Training loss 2.428679, Validation loss 6.564948\n", "Epoch 3256, Training loss 2.428679, Validation loss 6.564969\n", "Epoch 3257, Training loss 2.428679, Validation loss 6.564974\n", "Epoch 3258, Training loss 2.428678, Validation loss 6.564974\n", "Epoch 3259, Training loss 2.428678, Validation loss 6.564994\n", "Epoch 3260, Training loss 2.428678, Validation loss 6.565000\n", "Epoch 3261, Training loss 2.428677, Validation loss 6.565000\n", "Epoch 3262, Training loss 2.428678, Validation loss 6.565020\n", "Epoch 3263, Training loss 2.428677, Validation loss 6.565025\n", "Epoch 3264, Training loss 2.428676, Validation loss 6.565025\n", "Epoch 3265, Training loss 2.428676, Validation loss 6.565055\n", "Epoch 3266, Training loss 2.428675, Validation loss 6.565070\n", "Epoch 3267, Training loss 2.428677, Validation loss 6.565080\n", "Epoch 3268, Training loss 2.428676, Validation loss 6.565085\n", "Epoch 3269, Training loss 2.428676, Validation loss 6.565101\n", "Epoch 3270, Training loss 2.428676, Validation loss 6.565111\n", "Epoch 3271, Training loss 2.428676, Validation loss 6.565111\n", "Epoch 3272, Training loss 2.428675, Validation loss 6.565121\n", "Epoch 3273, Training loss 2.428676, Validation loss 6.565136\n", "Epoch 3274, Training loss 2.428675, Validation loss 6.565142\n", "Epoch 3275, Training loss 2.428675, Validation loss 6.565152\n", "Epoch 3276, Training loss 2.428674, Validation loss 6.565147\n", "Epoch 3277, Training loss 2.428675, Validation loss 6.565162\n", "Epoch 3278, Training loss 2.428674, Validation loss 6.565167\n", "Epoch 3279, Training loss 2.428674, Validation loss 6.565183\n", "Epoch 3280, Training loss 2.428674, Validation loss 6.565193\n", "Epoch 3281, Training loss 2.428674, Validation loss 6.565188\n", "Epoch 3282, Training loss 2.428674, Validation loss 6.565203\n", "Epoch 3283, Training loss 2.428674, Validation loss 6.565213\n", "Epoch 3284, Training loss 2.428672, Validation loss 6.565208\n", "Epoch 3285, Training loss 2.428673, Validation loss 6.565234\n", "Epoch 3286, Training loss 2.428673, Validation loss 6.565229\n", "Epoch 3287, Training loss 2.428672, Validation loss 6.565249\n", "Epoch 3288, Training loss 2.428672, Validation loss 6.565259\n", "Epoch 3289, Training loss 2.428672, Validation loss 6.565274\n", "Epoch 3290, Training loss 2.428671, Validation loss 6.565279\n", "Epoch 3291, Training loss 2.428673, Validation loss 6.565284\n", "Epoch 3292, Training loss 2.428672, Validation loss 6.565300\n", "Epoch 3293, Training loss 2.428671, Validation loss 6.565305\n", "Epoch 3294, Training loss 2.428672, Validation loss 6.565310\n", "Epoch 3295, Training loss 2.428670, Validation loss 6.565325\n", "Epoch 3296, Training loss 2.428671, Validation loss 6.565331\n", "Epoch 3297, Training loss 2.428671, Validation loss 6.565340\n", "Epoch 3298, Training loss 2.428671, Validation loss 6.565355\n", "Epoch 3299, Training loss 2.428670, Validation loss 6.565361\n", "Epoch 3300, Training loss 2.428670, Validation loss 6.565366\n", "Epoch 3301, Training loss 2.428670, Validation loss 6.565366\n", "Epoch 3302, Training loss 2.428670, Validation loss 6.565372\n", "Epoch 3303, Training loss 2.428669, Validation loss 6.565376\n", "Epoch 3304, Training loss 2.428670, Validation loss 6.565392\n", "Epoch 3305, Training loss 2.428670, Validation loss 6.565397\n", "Epoch 3306, Training loss 2.428670, Validation loss 6.565402\n", "Epoch 3307, Training loss 2.428669, Validation loss 6.565417\n", "Epoch 3308, Training loss 2.428668, Validation loss 6.565423\n", "Epoch 3309, Training loss 2.428669, Validation loss 6.565433\n", "Epoch 3310, Training loss 2.428669, Validation loss 6.565457\n", "Epoch 3311, Training loss 2.428668, Validation loss 6.565453\n", "Epoch 3312, Training loss 2.428668, Validation loss 6.565463\n", "Epoch 3313, Training loss 2.428668, Validation loss 6.565463\n", "Epoch 3314, Training loss 2.428670, Validation loss 6.565488\n", "Epoch 3315, Training loss 2.428668, Validation loss 6.565488\n", "Epoch 3316, Training loss 2.428668, Validation loss 6.565494\n", "Epoch 3317, Training loss 2.428668, Validation loss 6.565494\n", "Epoch 3318, Training loss 2.428667, Validation loss 6.565504\n", "Epoch 3319, Training loss 2.428667, Validation loss 6.565510\n", "Epoch 3320, Training loss 2.428667, Validation loss 6.565529\n", "Epoch 3321, Training loss 2.428666, Validation loss 6.565524\n", "Epoch 3322, Training loss 2.428667, Validation loss 6.565535\n", "Epoch 3323, Training loss 2.428666, Validation loss 6.565545\n", "Epoch 3324, Training loss 2.428666, Validation loss 6.565540\n", "Epoch 3325, Training loss 2.428666, Validation loss 6.565555\n", "Epoch 3326, Training loss 2.428666, Validation loss 6.565565\n", "Epoch 3327, Training loss 2.428667, Validation loss 6.565576\n", "Epoch 3328, Training loss 2.428667, Validation loss 6.565576\n", "Epoch 3329, Training loss 2.428665, Validation loss 6.565601\n", "Epoch 3330, Training loss 2.428665, Validation loss 6.565596\n", "Epoch 3331, Training loss 2.428665, Validation loss 6.565606\n", "Epoch 3332, Training loss 2.428665, Validation loss 6.565606\n", "Epoch 3333, Training loss 2.428665, Validation loss 6.565612\n", "Epoch 3334, Training loss 2.428666, Validation loss 6.565621\n", "Epoch 3335, Training loss 2.428665, Validation loss 6.565641\n", "Epoch 3336, Training loss 2.428665, Validation loss 6.565661\n", "Epoch 3337, Training loss 2.428665, Validation loss 6.565667\n", "Epoch 3338, Training loss 2.428664, Validation loss 6.565672\n", "Epoch 3339, Training loss 2.428663, Validation loss 6.565677\n", "Epoch 3340, Training loss 2.428664, Validation loss 6.565682\n", "Epoch 3341, Training loss 2.428663, Validation loss 6.565698\n", "Epoch 3342, Training loss 2.428664, Validation loss 6.565702\n", "Epoch 3343, Training loss 2.428664, Validation loss 6.565708\n", "Epoch 3344, Training loss 2.428664, Validation loss 6.565713\n", "Epoch 3345, Training loss 2.428663, Validation loss 6.565728\n", "Epoch 3346, Training loss 2.428664, Validation loss 6.565723\n", "Epoch 3347, Training loss 2.428664, Validation loss 6.565739\n", "Epoch 3348, Training loss 2.428664, Validation loss 6.565743\n", "Epoch 3349, Training loss 2.428664, Validation loss 6.565749\n", "Epoch 3350, Training loss 2.428664, Validation loss 6.565754\n", "Epoch 3351, Training loss 2.428662, Validation loss 6.565754\n", "Epoch 3352, Training loss 2.428662, Validation loss 6.565774\n", "Epoch 3353, Training loss 2.428663, Validation loss 6.565764\n", "Epoch 3354, Training loss 2.428662, Validation loss 6.565784\n", "Epoch 3355, Training loss 2.428663, Validation loss 6.565785\n", "Epoch 3356, Training loss 2.428663, Validation loss 6.565795\n", "Epoch 3357, Training loss 2.428661, Validation loss 6.565795\n", "Epoch 3358, Training loss 2.428662, Validation loss 6.565815\n", "Epoch 3359, Training loss 2.428662, Validation loss 6.565816\n", "Epoch 3360, Training loss 2.428662, Validation loss 6.565835\n", "Epoch 3361, Training loss 2.428662, Validation loss 6.565845\n", "Epoch 3362, Training loss 2.428661, Validation loss 6.565856\n", "Epoch 3363, Training loss 2.428662, Validation loss 6.565870\n", "Epoch 3364, Training loss 2.428662, Validation loss 6.565870\n", "Epoch 3365, Training loss 2.428661, Validation loss 6.565881\n", "Epoch 3366, Training loss 2.428662, Validation loss 6.565876\n", "Epoch 3367, Training loss 2.428660, Validation loss 6.565886\n", "Epoch 3368, Training loss 2.428660, Validation loss 6.565901\n", "Epoch 3369, Training loss 2.428660, Validation loss 6.565911\n", "Epoch 3370, Training loss 2.428661, Validation loss 6.565922\n", "Epoch 3371, Training loss 2.428660, Validation loss 6.565917\n", "Epoch 3372, Training loss 2.428660, Validation loss 6.565917\n", "Epoch 3373, Training loss 2.428660, Validation loss 6.565932\n", "Epoch 3374, Training loss 2.428660, Validation loss 6.565928\n", "Epoch 3375, Training loss 2.428660, Validation loss 6.565938\n", "Epoch 3376, Training loss 2.428661, Validation loss 6.565948\n", "Epoch 3377, Training loss 2.428660, Validation loss 6.565958\n", "Epoch 3378, Training loss 2.428659, Validation loss 6.565973\n", "Epoch 3379, Training loss 2.428659, Validation loss 6.565973\n", "Epoch 3380, Training loss 2.428660, Validation loss 6.565969\n", "Epoch 3381, Training loss 2.428659, Validation loss 6.565979\n", "Epoch 3382, Training loss 2.428659, Validation loss 6.565993\n", "Epoch 3383, Training loss 2.428659, Validation loss 6.566004\n", "Epoch 3384, Training loss 2.428658, Validation loss 6.565999\n", "Epoch 3385, Training loss 2.428658, Validation loss 6.566010\n", "Epoch 3386, Training loss 2.428658, Validation loss 6.566030\n", "Epoch 3387, Training loss 2.428658, Validation loss 6.566034\n", "Epoch 3388, Training loss 2.428658, Validation loss 6.566040\n", "Epoch 3389, Training loss 2.428658, Validation loss 6.566045\n", "Epoch 3390, Training loss 2.428658, Validation loss 6.566050\n", "Epoch 3391, Training loss 2.428657, Validation loss 6.566055\n", "Epoch 3392, Training loss 2.428658, Validation loss 6.566060\n", "Epoch 3393, Training loss 2.428658, Validation loss 6.566065\n", "Epoch 3394, Training loss 2.428657, Validation loss 6.566085\n", "Epoch 3395, Training loss 2.428658, Validation loss 6.566091\n", "Epoch 3396, Training loss 2.428658, Validation loss 6.566095\n", "Epoch 3397, Training loss 2.428658, Validation loss 6.566101\n", "Epoch 3398, Training loss 2.428656, Validation loss 6.566101\n", "Epoch 3399, Training loss 2.428657, Validation loss 6.566106\n", "Epoch 3400, Training loss 2.428657, Validation loss 6.566111\n", "Epoch 3401, Training loss 2.428658, Validation loss 6.566116\n", "Epoch 3402, Training loss 2.428656, Validation loss 6.566121\n", "Epoch 3403, Training loss 2.428658, Validation loss 6.566127\n", "Epoch 3404, Training loss 2.428656, Validation loss 6.566132\n", "Epoch 3405, Training loss 2.428657, Validation loss 6.566137\n", "Epoch 3406, Training loss 2.428656, Validation loss 6.566152\n", "Epoch 3407, Training loss 2.428657, Validation loss 6.566153\n", "Epoch 3408, Training loss 2.428656, Validation loss 6.566158\n", "Epoch 3409, Training loss 2.428656, Validation loss 6.566172\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3410, Training loss 2.428657, Validation loss 6.566188\n", "Epoch 3411, Training loss 2.428655, Validation loss 6.566188\n", "Epoch 3412, Training loss 2.428656, Validation loss 6.566193\n", "Epoch 3413, Training loss 2.428656, Validation loss 6.566198\n", "Epoch 3414, Training loss 2.428656, Validation loss 6.566203\n", "Epoch 3415, Training loss 2.428655, Validation loss 6.566218\n", "Epoch 3416, Training loss 2.428655, Validation loss 6.566233\n", "Epoch 3417, Training loss 2.428655, Validation loss 6.566229\n", "Epoch 3418, Training loss 2.428656, Validation loss 6.566243\n", "Epoch 3419, Training loss 2.428655, Validation loss 6.566254\n", "Epoch 3420, Training loss 2.428654, Validation loss 6.566249\n", "Epoch 3421, Training loss 2.428655, Validation loss 6.566264\n", "Epoch 3422, Training loss 2.428655, Validation loss 6.566264\n", "Epoch 3423, Training loss 2.428654, Validation loss 6.566269\n", "Epoch 3424, Training loss 2.428654, Validation loss 6.566284\n", "Epoch 3425, Training loss 2.428654, Validation loss 6.566279\n", "Epoch 3426, Training loss 2.428655, Validation loss 6.566295\n", "Epoch 3427, Training loss 2.428654, Validation loss 6.566295\n", "Epoch 3428, Training loss 2.428654, Validation loss 6.566300\n", "Epoch 3429, Training loss 2.428653, Validation loss 6.566315\n", "Epoch 3430, Training loss 2.428654, Validation loss 6.566315\n", "Epoch 3431, Training loss 2.428654, Validation loss 6.566310\n", "Epoch 3432, Training loss 2.428654, Validation loss 6.566325\n", "Epoch 3433, Training loss 2.428653, Validation loss 6.566336\n", "Epoch 3434, Training loss 2.428654, Validation loss 6.566331\n", "Epoch 3435, Training loss 2.428653, Validation loss 6.566346\n", "Epoch 3436, Training loss 2.428653, Validation loss 6.566341\n", "Epoch 3437, Training loss 2.428653, Validation loss 6.566356\n", "Epoch 3438, Training loss 2.428653, Validation loss 6.566366\n", "Epoch 3439, Training loss 2.428654, Validation loss 6.566361\n", "Epoch 3440, Training loss 2.428652, Validation loss 6.566377\n", "Epoch 3441, Training loss 2.428653, Validation loss 6.566377\n", "Epoch 3442, Training loss 2.428653, Validation loss 6.566382\n", "Epoch 3443, Training loss 2.428652, Validation loss 6.566397\n", "Epoch 3444, Training loss 2.428653, Validation loss 6.566392\n", "Epoch 3445, Training loss 2.428653, Validation loss 6.566402\n", "Epoch 3446, Training loss 2.428652, Validation loss 6.566418\n", "Epoch 3447, Training loss 2.428653, Validation loss 6.566427\n", "Epoch 3448, Training loss 2.428652, Validation loss 6.566432\n", "Epoch 3449, Training loss 2.428652, Validation loss 6.566442\n", "Epoch 3450, Training loss 2.428652, Validation loss 6.566442\n", "Epoch 3451, Training loss 2.428653, Validation loss 6.566448\n", "Epoch 3452, Training loss 2.428653, Validation loss 6.566467\n", "Epoch 3453, Training loss 2.428652, Validation loss 6.566458\n", "Epoch 3454, Training loss 2.428652, Validation loss 6.566463\n", "Epoch 3455, Training loss 2.428652, Validation loss 6.566483\n", "Epoch 3456, Training loss 2.428652, Validation loss 6.566473\n", "Epoch 3457, Training loss 2.428651, Validation loss 6.566489\n", "Epoch 3458, Training loss 2.428652, Validation loss 6.566483\n", "Epoch 3459, Training loss 2.428651, Validation loss 6.566499\n", "Epoch 3460, Training loss 2.428651, Validation loss 6.566508\n", "Epoch 3461, Training loss 2.428651, Validation loss 6.566499\n", "Epoch 3462, Training loss 2.428651, Validation loss 6.566514\n", "Epoch 3463, Training loss 2.428651, Validation loss 6.566524\n", "Epoch 3464, Training loss 2.428651, Validation loss 6.566529\n", "Epoch 3465, Training loss 2.428651, Validation loss 6.566529\n", "Epoch 3466, Training loss 2.428651, Validation loss 6.566539\n", "Epoch 3467, Training loss 2.428651, Validation loss 6.566540\n", "Epoch 3468, Training loss 2.428651, Validation loss 6.566545\n", "Epoch 3469, Training loss 2.428651, Validation loss 6.566545\n", "Epoch 3470, Training loss 2.428650, Validation loss 6.566555\n", "Epoch 3471, Training loss 2.428651, Validation loss 6.566560\n", "Epoch 3472, Training loss 2.428650, Validation loss 6.566570\n", "Epoch 3473, Training loss 2.428650, Validation loss 6.566570\n", "Epoch 3474, Training loss 2.428651, Validation loss 6.566576\n", "Epoch 3475, Training loss 2.428650, Validation loss 6.566596\n", "Epoch 3476, Training loss 2.428649, Validation loss 6.566586\n", "Epoch 3477, Training loss 2.428650, Validation loss 6.566601\n", "Epoch 3478, Training loss 2.428651, Validation loss 6.566611\n", "Epoch 3479, Training loss 2.428650, Validation loss 6.566621\n", "Epoch 3480, Training loss 2.428649, Validation loss 6.566626\n", "Epoch 3481, Training loss 2.428650, Validation loss 6.566636\n", "Epoch 3482, Training loss 2.428649, Validation loss 6.566636\n", "Epoch 3483, Training loss 2.428649, Validation loss 6.566647\n", "Epoch 3484, Training loss 2.428647, Validation loss 6.566647\n", "Epoch 3485, Training loss 2.428649, Validation loss 6.566657\n", "Epoch 3486, Training loss 2.428650, Validation loss 6.566657\n", "Epoch 3487, Training loss 2.428650, Validation loss 6.566667\n", "Epoch 3488, Training loss 2.428649, Validation loss 6.566677\n", "Epoch 3489, Training loss 2.428649, Validation loss 6.566677\n", "Epoch 3490, Training loss 2.428649, Validation loss 6.566688\n", "Epoch 3491, Training loss 2.428648, Validation loss 6.566688\n", "Epoch 3492, Training loss 2.428649, Validation loss 6.566698\n", "Epoch 3493, Training loss 2.428648, Validation loss 6.566698\n", "Epoch 3494, Training loss 2.428649, Validation loss 6.566693\n", "Epoch 3495, Training loss 2.428648, Validation loss 6.566703\n", "Epoch 3496, Training loss 2.428649, Validation loss 6.566703\n", "Epoch 3497, Training loss 2.428649, Validation loss 6.566713\n", "Epoch 3498, Training loss 2.428648, Validation loss 6.566713\n", "Epoch 3499, Training loss 2.428648, Validation loss 6.566729\n", "Epoch 3500, Training loss 2.428648, Validation loss 6.566729\n", "Epoch 3501, Training loss 2.428647, Validation loss 6.566739\n", "Epoch 3502, Training loss 2.428648, Validation loss 6.566739\n", "Epoch 3503, Training loss 2.428647, Validation loss 6.566749\n", "Epoch 3504, Training loss 2.428648, Validation loss 6.566759\n", "Epoch 3505, Training loss 2.428647, Validation loss 6.566759\n", "Epoch 3506, Training loss 2.428647, Validation loss 6.566770\n", "Epoch 3507, Training loss 2.428647, Validation loss 6.566770\n", "Epoch 3508, Training loss 2.428648, Validation loss 6.566780\n", "Epoch 3509, Training loss 2.428647, Validation loss 6.566780\n", "Epoch 3510, Training loss 2.428649, Validation loss 6.566775\n", "Epoch 3511, Training loss 2.428648, Validation loss 6.566785\n", "Epoch 3512, Training loss 2.428648, Validation loss 6.566795\n", "Epoch 3513, Training loss 2.428647, Validation loss 6.566814\n", "Epoch 3514, Training loss 2.428647, Validation loss 6.566820\n", "Epoch 3515, Training loss 2.428648, Validation loss 6.566814\n", "Epoch 3516, Training loss 2.428649, Validation loss 6.566815\n", "Epoch 3517, Training loss 2.428647, Validation loss 6.566835\n", "Epoch 3518, Training loss 2.428647, Validation loss 6.566830\n", "Epoch 3519, Training loss 2.428647, Validation loss 6.566835\n", "Epoch 3520, Training loss 2.428647, Validation loss 6.566836\n", "Epoch 3521, Training loss 2.428648, Validation loss 6.566841\n", "Epoch 3522, Training loss 2.428647, Validation loss 6.566851\n", "Epoch 3523, Training loss 2.428646, Validation loss 6.566855\n", "Epoch 3524, Training loss 2.428647, Validation loss 6.566856\n", "Epoch 3525, Training loss 2.428646, Validation loss 6.566861\n", "Epoch 3526, Training loss 2.428646, Validation loss 6.566871\n", "Epoch 3527, Training loss 2.428647, Validation loss 6.566876\n", "Epoch 3528, Training loss 2.428646, Validation loss 6.566877\n", "Epoch 3529, Training loss 2.428647, Validation loss 6.566882\n", "Epoch 3530, Training loss 2.428646, Validation loss 6.566892\n", "Epoch 3531, Training loss 2.428647, Validation loss 6.566896\n", "Epoch 3532, Training loss 2.428646, Validation loss 6.566897\n", "Epoch 3533, Training loss 2.428646, Validation loss 6.566902\n", "Epoch 3534, Training loss 2.428645, Validation loss 6.566897\n", "Epoch 3535, Training loss 2.428646, Validation loss 6.566917\n", "Epoch 3536, Training loss 2.428646, Validation loss 6.566922\n", "Epoch 3537, Training loss 2.428645, Validation loss 6.566923\n", "Epoch 3538, Training loss 2.428645, Validation loss 6.566917\n", "Epoch 3539, Training loss 2.428646, Validation loss 6.566937\n", "Epoch 3540, Training loss 2.428645, Validation loss 6.566943\n", "Epoch 3541, Training loss 2.428645, Validation loss 6.566943\n", "Epoch 3542, Training loss 2.428646, Validation loss 6.566938\n", "Epoch 3543, Training loss 2.428645, Validation loss 6.566958\n", "Epoch 3544, Training loss 2.428646, Validation loss 6.566963\n", "Epoch 3545, Training loss 2.428645, Validation loss 6.566954\n", "Epoch 3546, Training loss 2.428645, Validation loss 6.566958\n", "Epoch 3547, Training loss 2.428645, Validation loss 6.566964\n", "Epoch 3548, Training loss 2.428646, Validation loss 6.566994\n", "Epoch 3549, Training loss 2.428645, Validation loss 6.566984\n", "Epoch 3550, Training loss 2.428645, Validation loss 6.566998\n", "Epoch 3551, Training loss 2.428645, Validation loss 6.566994\n", "Epoch 3552, Training loss 2.428644, Validation loss 6.567009\n", "Epoch 3553, Training loss 2.428644, Validation loss 6.567024\n", "Epoch 3554, Training loss 2.428645, Validation loss 6.567019\n", "Epoch 3555, Training loss 2.428645, Validation loss 6.567034\n", "Epoch 3556, Training loss 2.428644, Validation loss 6.567029\n", "Epoch 3557, Training loss 2.428644, Validation loss 6.567044\n", "Epoch 3558, Training loss 2.428644, Validation loss 6.567039\n", "Epoch 3559, Training loss 2.428646, Validation loss 6.567044\n", "Epoch 3560, Training loss 2.428646, Validation loss 6.567055\n", "Epoch 3561, Training loss 2.428644, Validation loss 6.567050\n", "Epoch 3562, Training loss 2.428644, Validation loss 6.567050\n", "Epoch 3563, Training loss 2.428645, Validation loss 6.567060\n", "Epoch 3564, Training loss 2.428645, Validation loss 6.567075\n", "Epoch 3565, Training loss 2.428643, Validation loss 6.567070\n", "Epoch 3566, Training loss 2.428643, Validation loss 6.567070\n", "Epoch 3567, Training loss 2.428645, Validation loss 6.567080\n", "Epoch 3568, Training loss 2.428645, Validation loss 6.567085\n", "Epoch 3569, Training loss 2.428644, Validation loss 6.567091\n", "Epoch 3570, Training loss 2.428642, Validation loss 6.567096\n", "Epoch 3571, Training loss 2.428643, Validation loss 6.567091\n", "Epoch 3572, Training loss 2.428644, Validation loss 6.567106\n", "Epoch 3573, Training loss 2.428644, Validation loss 6.567101\n", "Epoch 3574, Training loss 2.428645, Validation loss 6.567116\n", "Epoch 3575, Training loss 2.428643, Validation loss 6.567111\n", "Epoch 3576, Training loss 2.428643, Validation loss 6.567126\n", "Epoch 3577, Training loss 2.428643, Validation loss 6.567122\n", "Epoch 3578, Training loss 2.428644, Validation loss 6.567136\n", "Epoch 3579, Training loss 2.428643, Validation loss 6.567132\n", "Epoch 3580, Training loss 2.428643, Validation loss 6.567132\n", "Epoch 3581, Training loss 2.428643, Validation loss 6.567142\n", "Epoch 3582, Training loss 2.428643, Validation loss 6.567142\n", "Epoch 3583, Training loss 2.428644, Validation loss 6.567142\n", "Epoch 3584, Training loss 2.428643, Validation loss 6.567152\n", "Epoch 3585, Training loss 2.428643, Validation loss 6.567167\n", "Epoch 3586, Training loss 2.428643, Validation loss 6.567163\n", "Epoch 3587, Training loss 2.428643, Validation loss 6.567158\n", "Epoch 3588, Training loss 2.428643, Validation loss 6.567173\n", "Epoch 3589, Training loss 2.428643, Validation loss 6.567178\n", "Epoch 3590, Training loss 2.428643, Validation loss 6.567187\n", "Epoch 3591, Training loss 2.428643, Validation loss 6.567193\n", "Epoch 3592, Training loss 2.428643, Validation loss 6.567198\n", "Epoch 3593, Training loss 2.428643, Validation loss 6.567203\n", "Epoch 3594, Training loss 2.428643, Validation loss 6.567213\n", "Epoch 3595, Training loss 2.428642, Validation loss 6.567218\n", "Epoch 3596, Training loss 2.428643, Validation loss 6.567223\n", "Epoch 3597, Training loss 2.428643, Validation loss 6.567233\n", "Epoch 3598, Training loss 2.428642, Validation loss 6.567218\n", "Epoch 3599, Training loss 2.428643, Validation loss 6.567238\n", "Epoch 3600, Training loss 2.428641, Validation loss 6.567244\n", "Epoch 3601, Training loss 2.428642, Validation loss 6.567248\n", "Epoch 3602, Training loss 2.428643, Validation loss 6.567254\n", "Epoch 3603, Training loss 2.428643, Validation loss 6.567264\n", "Epoch 3604, Training loss 2.428643, Validation loss 6.567254\n", "Epoch 3605, Training loss 2.428643, Validation loss 6.567259\n", "Epoch 3606, Training loss 2.428641, Validation loss 6.567264\n", "Epoch 3607, Training loss 2.428642, Validation loss 6.567269\n", "Epoch 3608, Training loss 2.428642, Validation loss 6.567259\n", "Epoch 3609, Training loss 2.428641, Validation loss 6.567265\n", "Epoch 3610, Training loss 2.428642, Validation loss 6.567274\n", "Epoch 3611, Training loss 2.428643, Validation loss 6.567279\n", "Epoch 3612, Training loss 2.428643, Validation loss 6.567295\n", "Epoch 3613, Training loss 2.428642, Validation loss 6.567300\n", "Epoch 3614, Training loss 2.428641, Validation loss 6.567290\n", "Epoch 3615, Training loss 2.428641, Validation loss 6.567300\n", "Epoch 3616, Training loss 2.428641, Validation loss 6.567305\n", "Epoch 3617, Training loss 2.428641, Validation loss 6.567310\n", "Epoch 3618, Training loss 2.428642, Validation loss 6.567315\n", "Epoch 3619, Training loss 2.428643, Validation loss 6.567306\n", "Epoch 3620, Training loss 2.428641, Validation loss 6.567310\n", "Epoch 3621, Training loss 2.428641, Validation loss 6.567320\n", "Epoch 3622, Training loss 2.428642, Validation loss 6.567326\n", "Epoch 3623, Training loss 2.428641, Validation loss 6.567330\n", "Epoch 3624, Training loss 2.428642, Validation loss 6.567331\n", "Epoch 3625, Training loss 2.428641, Validation loss 6.567336\n", "Epoch 3626, Training loss 2.428641, Validation loss 6.567346\n", "Epoch 3627, Training loss 2.428641, Validation loss 6.567351\n", "Epoch 3628, Training loss 2.428640, Validation loss 6.567356\n", "Epoch 3629, Training loss 2.428640, Validation loss 6.567361\n", "Epoch 3630, Training loss 2.428642, Validation loss 6.567351\n", "Epoch 3631, Training loss 2.428640, Validation loss 6.567357\n", "Epoch 3632, Training loss 2.428641, Validation loss 6.567367\n", "Epoch 3633, Training loss 2.428642, Validation loss 6.567381\n", "Epoch 3634, Training loss 2.428641, Validation loss 6.567382\n", "Epoch 3635, Training loss 2.428641, Validation loss 6.567397\n", "Epoch 3636, Training loss 2.428641, Validation loss 6.567397\n", "Epoch 3637, Training loss 2.428641, Validation loss 6.567397\n", "Epoch 3638, Training loss 2.428640, Validation loss 6.567407\n", "Epoch 3639, Training loss 2.428641, Validation loss 6.567407\n", "Epoch 3640, Training loss 2.428640, Validation loss 6.567402\n", "Epoch 3641, Training loss 2.428641, Validation loss 6.567402\n", "Epoch 3642, Training loss 2.428640, Validation loss 6.567412\n", "Epoch 3643, Training loss 2.428641, Validation loss 6.567427\n", "Epoch 3644, Training loss 2.428641, Validation loss 6.567427\n", "Epoch 3645, Training loss 2.428641, Validation loss 6.567427\n", "Epoch 3646, Training loss 2.428640, Validation loss 6.567437\n", "Epoch 3647, Training loss 2.428641, Validation loss 6.567437\n", "Epoch 3648, Training loss 2.428640, Validation loss 6.567437\n", "Epoch 3649, Training loss 2.428640, Validation loss 6.567433\n", "Epoch 3650, Training loss 2.428639, Validation loss 6.567443\n", "Epoch 3651, Training loss 2.428640, Validation loss 6.567443\n", "Epoch 3652, Training loss 2.428640, Validation loss 6.567443\n", "Epoch 3653, Training loss 2.428639, Validation loss 6.567453\n", "Epoch 3654, Training loss 2.428639, Validation loss 6.567453\n", "Epoch 3655, Training loss 2.428641, Validation loss 6.567468\n", "Epoch 3656, Training loss 2.428639, Validation loss 6.567468\n", "Epoch 3657, Training loss 2.428640, Validation loss 6.567468\n", "Epoch 3658, Training loss 2.428640, Validation loss 6.567478\n", "Epoch 3659, Training loss 2.428640, Validation loss 6.567474\n", "Epoch 3660, Training loss 2.428640, Validation loss 6.567474\n", "Epoch 3661, Training loss 2.428639, Validation loss 6.567484\n", "Epoch 3662, Training loss 2.428639, Validation loss 6.567499\n", "Epoch 3663, Training loss 2.428640, Validation loss 6.567499\n", "Epoch 3664, Training loss 2.428639, Validation loss 6.567499\n", "Epoch 3665, Training loss 2.428641, Validation loss 6.567499\n", "Epoch 3666, Training loss 2.428639, Validation loss 6.567509\n", "Epoch 3667, Training loss 2.428639, Validation loss 6.567509\n", "Epoch 3668, Training loss 2.428639, Validation loss 6.567505\n", "Epoch 3669, Training loss 2.428639, Validation loss 6.567515\n", "Epoch 3670, Training loss 2.428641, Validation loss 6.567515\n", "Epoch 3671, Training loss 2.428639, Validation loss 6.567515\n", "Epoch 3672, Training loss 2.428640, Validation loss 6.567515\n", "Epoch 3673, Training loss 2.428639, Validation loss 6.567540\n", "Epoch 3674, Training loss 2.428639, Validation loss 6.567540\n", "Epoch 3675, Training loss 2.428638, Validation loss 6.567540\n", "Epoch 3676, Training loss 2.428639, Validation loss 6.567540\n", "Epoch 3677, Training loss 2.428638, Validation loss 6.567550\n", "Epoch 3678, Training loss 2.428639, Validation loss 6.567546\n", "Epoch 3679, Training loss 2.428639, Validation loss 6.567546\n", "Epoch 3680, Training loss 2.428639, Validation loss 6.567556\n", "Epoch 3681, Training loss 2.428638, Validation loss 6.567565\n", "Epoch 3682, Training loss 2.428639, Validation loss 6.567575\n", "Epoch 3683, Training loss 2.428639, Validation loss 6.567585\n", "Epoch 3684, Training loss 2.428638, Validation loss 6.567586\n", "Epoch 3685, Training loss 2.428639, Validation loss 6.567595\n", "Epoch 3686, Training loss 2.428639, Validation loss 6.567596\n", "Epoch 3687, Training loss 2.428638, Validation loss 6.567591\n", "Epoch 3688, Training loss 2.428639, Validation loss 6.567611\n", "Epoch 3689, Training loss 2.428639, Validation loss 6.567601\n", "Epoch 3690, Training loss 2.428640, Validation loss 6.567611\n", "Epoch 3691, Training loss 2.428639, Validation loss 6.567611\n", "Epoch 3692, Training loss 2.428639, Validation loss 6.567606\n", "Epoch 3693, Training loss 2.428639, Validation loss 6.567626\n", "Epoch 3694, Training loss 2.428638, Validation loss 6.567616\n", "Epoch 3695, Training loss 2.428638, Validation loss 6.567621\n", "Epoch 3696, Training loss 2.428638, Validation loss 6.567626\n", "Epoch 3697, Training loss 2.428638, Validation loss 6.567632\n", "Epoch 3698, Training loss 2.428638, Validation loss 6.567641\n", "Epoch 3699, Training loss 2.428639, Validation loss 6.567632\n", "Epoch 3700, Training loss 2.428638, Validation loss 6.567636\n", "Epoch 3701, Training loss 2.428638, Validation loss 6.567647\n", "Epoch 3702, Training loss 2.428639, Validation loss 6.567647\n", "Epoch 3703, Training loss 2.428638, Validation loss 6.567657\n", "Epoch 3704, Training loss 2.428638, Validation loss 6.567657\n", "Epoch 3705, Training loss 2.428639, Validation loss 6.567652\n", "Epoch 3706, Training loss 2.428638, Validation loss 6.567662\n", "Epoch 3707, Training loss 2.428637, Validation loss 6.567662\n", "Epoch 3708, Training loss 2.428638, Validation loss 6.567653\n", "Epoch 3709, Training loss 2.428638, Validation loss 6.567673\n", "Epoch 3710, Training loss 2.428637, Validation loss 6.567667\n", "Epoch 3711, Training loss 2.428639, Validation loss 6.567688\n", "Epoch 3712, Training loss 2.428638, Validation loss 6.567677\n", "Epoch 3713, Training loss 2.428638, Validation loss 6.567683\n", "Epoch 3714, Training loss 2.428638, Validation loss 6.567688\n", "Epoch 3715, Training loss 2.428638, Validation loss 6.567683\n", "Epoch 3716, Training loss 2.428638, Validation loss 6.567703\n", "Epoch 3717, Training loss 2.428639, Validation loss 6.567693\n", "Epoch 3718, Training loss 2.428638, Validation loss 6.567703\n", "Epoch 3719, Training loss 2.428638, Validation loss 6.567703\n", "Epoch 3720, Training loss 2.428638, Validation loss 6.567708\n", "Epoch 3721, Training loss 2.428638, Validation loss 6.567698\n", "Epoch 3722, Training loss 2.428638, Validation loss 6.567708\n", "Epoch 3723, Training loss 2.428638, Validation loss 6.567714\n", "Epoch 3724, Training loss 2.428638, Validation loss 6.567719\n", "Epoch 3725, Training loss 2.428639, Validation loss 6.567724\n", "Epoch 3726, Training loss 2.428637, Validation loss 6.567734\n", "Epoch 3727, Training loss 2.428638, Validation loss 6.567734\n", "Epoch 3728, Training loss 2.428638, Validation loss 6.567729\n", "Epoch 3729, Training loss 2.428638, Validation loss 6.567734\n", "Epoch 3730, Training loss 2.428638, Validation loss 6.567739\n", "Epoch 3731, Training loss 2.428638, Validation loss 6.567749\n", "Epoch 3732, Training loss 2.428638, Validation loss 6.567750\n", "Epoch 3733, Training loss 2.428636, Validation loss 6.567744\n", "Epoch 3734, Training loss 2.428636, Validation loss 6.567745\n", "Epoch 3735, Training loss 2.428637, Validation loss 6.567764\n", "Epoch 3736, Training loss 2.428637, Validation loss 6.567775\n", "Epoch 3737, Training loss 2.428637, Validation loss 6.567780\n", "Epoch 3738, Training loss 2.428638, Validation loss 6.567790\n", "Epoch 3739, Training loss 2.428638, Validation loss 6.567794\n", "Epoch 3740, Training loss 2.428637, Validation loss 6.567790\n", "Epoch 3741, Training loss 2.428637, Validation loss 6.567790\n", "Epoch 3742, Training loss 2.428637, Validation loss 6.567785\n", "Epoch 3743, Training loss 2.428637, Validation loss 6.567800\n", "Epoch 3744, Training loss 2.428638, Validation loss 6.567800\n", "Epoch 3745, Training loss 2.428638, Validation loss 6.567815\n", "Epoch 3746, Training loss 2.428636, Validation loss 6.567810\n", "Epoch 3747, Training loss 2.428636, Validation loss 6.567810\n", "Epoch 3748, Training loss 2.428636, Validation loss 6.567805\n", "Epoch 3749, Training loss 2.428638, Validation loss 6.567810\n", "Epoch 3750, Training loss 2.428638, Validation loss 6.567821\n", "Epoch 3751, Training loss 2.428637, Validation loss 6.567821\n", "Epoch 3752, Training loss 2.428638, Validation loss 6.567835\n", "Epoch 3753, Training loss 2.428638, Validation loss 6.567831\n", "Epoch 3754, Training loss 2.428636, Validation loss 6.567831\n", "Epoch 3755, Training loss 2.428638, Validation loss 6.567826\n", "Epoch 3756, Training loss 2.428637, Validation loss 6.567841\n", "Epoch 3757, Training loss 2.428637, Validation loss 6.567841\n", "Epoch 3758, Training loss 2.428636, Validation loss 6.567836\n", "Epoch 3759, Training loss 2.428636, Validation loss 6.567841\n", "Epoch 3760, Training loss 2.428637, Validation loss 6.567851\n", "Epoch 3761, Training loss 2.428637, Validation loss 6.567846\n", "Epoch 3762, Training loss 2.428637, Validation loss 6.567851\n", "Epoch 3763, Training loss 2.428637, Validation loss 6.567861\n", "Epoch 3764, Training loss 2.428638, Validation loss 6.567856\n", "Epoch 3765, Training loss 2.428638, Validation loss 6.567861\n", "Epoch 3766, Training loss 2.428636, Validation loss 6.567871\n", "Epoch 3767, Training loss 2.428636, Validation loss 6.567876\n", "Epoch 3768, Training loss 2.428637, Validation loss 6.567871\n", "Epoch 3769, Training loss 2.428636, Validation loss 6.567882\n", "Epoch 3770, Training loss 2.428637, Validation loss 6.567866\n", "Epoch 3771, Training loss 2.428636, Validation loss 6.567877\n", "Epoch 3772, Training loss 2.428636, Validation loss 6.567882\n", "Epoch 3773, Training loss 2.428638, Validation loss 6.567892\n", "Epoch 3774, Training loss 2.428636, Validation loss 6.567887\n", "Epoch 3775, Training loss 2.428637, Validation loss 6.567892\n", "Epoch 3776, Training loss 2.428636, Validation loss 6.567902\n", "Epoch 3777, Training loss 2.428636, Validation loss 6.567887\n", "Epoch 3778, Training loss 2.428636, Validation loss 6.567902\n", "Epoch 3779, Training loss 2.428637, Validation loss 6.567912\n", "Epoch 3780, Training loss 2.428636, Validation loss 6.567897\n", "Epoch 3781, Training loss 2.428636, Validation loss 6.567912\n", "Epoch 3782, Training loss 2.428636, Validation loss 6.567912\n", "Epoch 3783, Training loss 2.428636, Validation loss 6.567907\n", "Epoch 3784, Training loss 2.428637, Validation loss 6.567923\n", "Epoch 3785, Training loss 2.428637, Validation loss 6.567923\n", "Epoch 3786, Training loss 2.428636, Validation loss 6.567918\n", "Epoch 3787, Training loss 2.428636, Validation loss 6.567933\n", "Epoch 3788, Training loss 2.428636, Validation loss 6.567933\n", "Epoch 3789, Training loss 2.428635, Validation loss 6.567928\n", "Epoch 3790, Training loss 2.428638, Validation loss 6.567933\n", "Epoch 3791, Training loss 2.428636, Validation loss 6.567943\n", "Epoch 3792, Training loss 2.428636, Validation loss 6.567938\n", "Epoch 3793, Training loss 2.428636, Validation loss 6.567938\n", "Epoch 3794, Training loss 2.428638, Validation loss 6.567953\n", "Epoch 3795, Training loss 2.428636, Validation loss 6.567953\n", "Epoch 3796, Training loss 2.428636, Validation loss 6.567948\n", "Epoch 3797, Training loss 2.428635, Validation loss 6.567973\n", "Epoch 3798, Training loss 2.428636, Validation loss 6.567968\n", "Epoch 3799, Training loss 2.428636, Validation loss 6.567973\n", "Epoch 3800, Training loss 2.428636, Validation loss 6.567978\n", "Epoch 3801, Training loss 2.428635, Validation loss 6.567984\n", "Epoch 3802, Training loss 2.428636, Validation loss 6.567978\n", "Epoch 3803, Training loss 2.428635, Validation loss 6.567984\n", "Epoch 3804, Training loss 2.428636, Validation loss 6.567988\n", "Epoch 3805, Training loss 2.428634, Validation loss 6.567994\n", "Epoch 3806, Training loss 2.428636, Validation loss 6.567999\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3807, Training loss 2.428637, Validation loss 6.567994\n", "Epoch 3808, Training loss 2.428636, Validation loss 6.567999\n", "Epoch 3809, Training loss 2.428636, Validation loss 6.568004\n", "Epoch 3810, Training loss 2.428636, Validation loss 6.568009\n", "Epoch 3811, Training loss 2.428637, Validation loss 6.568004\n", "Epoch 3812, Training loss 2.428636, Validation loss 6.568009\n", "Epoch 3813, Training loss 2.428636, Validation loss 6.568014\n", "Epoch 3814, Training loss 2.428635, Validation loss 6.568019\n", "Epoch 3815, Training loss 2.428636, Validation loss 6.568025\n", "Epoch 3816, Training loss 2.428636, Validation loss 6.568024\n", "Epoch 3817, Training loss 2.428636, Validation loss 6.568029\n", "Epoch 3818, Training loss 2.428636, Validation loss 6.568034\n", "Epoch 3819, Training loss 2.428634, Validation loss 6.568039\n", "Epoch 3820, Training loss 2.428635, Validation loss 6.568034\n", "Epoch 3821, Training loss 2.428635, Validation loss 6.568039\n", "Epoch 3822, Training loss 2.428635, Validation loss 6.568045\n", "Epoch 3823, Training loss 2.428634, Validation loss 6.568049\n", "Epoch 3824, Training loss 2.428634, Validation loss 6.568045\n", "Epoch 3825, Training loss 2.428635, Validation loss 6.568049\n", "Epoch 3826, Training loss 2.428636, Validation loss 6.568055\n", "Epoch 3827, Training loss 2.428635, Validation loss 6.568045\n", "Epoch 3828, Training loss 2.428636, Validation loss 6.568050\n", "Epoch 3829, Training loss 2.428636, Validation loss 6.568055\n", "Epoch 3830, Training loss 2.428636, Validation loss 6.568050\n", "Epoch 3831, Training loss 2.428636, Validation loss 6.568055\n", "Epoch 3832, Training loss 2.428636, Validation loss 6.568060\n", "Epoch 3833, Training loss 2.428635, Validation loss 6.568066\n", "Epoch 3834, Training loss 2.428634, Validation loss 6.568060\n", "Epoch 3835, Training loss 2.428635, Validation loss 6.568066\n", "Epoch 3836, Training loss 2.428636, Validation loss 6.568070\n", "Epoch 3837, Training loss 2.428636, Validation loss 6.568076\n", "Epoch 3838, Training loss 2.428635, Validation loss 6.568081\n", "Epoch 3839, Training loss 2.428636, Validation loss 6.568076\n", "Epoch 3840, Training loss 2.428635, Validation loss 6.568081\n", "Epoch 3841, Training loss 2.428635, Validation loss 6.568086\n", "Epoch 3842, Training loss 2.428634, Validation loss 6.568091\n", "Epoch 3843, Training loss 2.428635, Validation loss 6.568086\n", "Epoch 3844, Training loss 2.428634, Validation loss 6.568091\n", "Epoch 3845, Training loss 2.428636, Validation loss 6.568096\n", "Epoch 3846, Training loss 2.428635, Validation loss 6.568101\n", "Epoch 3847, Training loss 2.428634, Validation loss 6.568107\n", "Epoch 3848, Training loss 2.428634, Validation loss 6.568101\n", "Epoch 3849, Training loss 2.428634, Validation loss 6.568107\n", "Epoch 3850, Training loss 2.428636, Validation loss 6.568111\n", "Epoch 3851, Training loss 2.428634, Validation loss 6.568117\n", "Epoch 3852, Training loss 2.428635, Validation loss 6.568111\n", "Epoch 3853, Training loss 2.428636, Validation loss 6.568121\n", "Epoch 3854, Training loss 2.428636, Validation loss 6.568127\n", "Epoch 3855, Training loss 2.428634, Validation loss 6.568131\n", "Epoch 3856, Training loss 2.428636, Validation loss 6.568137\n", "Epoch 3857, Training loss 2.428635, Validation loss 6.568131\n", "Epoch 3858, Training loss 2.428636, Validation loss 6.568137\n", "Epoch 3859, Training loss 2.428634, Validation loss 6.568142\n", "Epoch 3860, Training loss 2.428634, Validation loss 6.568147\n", "Epoch 3861, Training loss 2.428635, Validation loss 6.568142\n", "Epoch 3862, Training loss 2.428635, Validation loss 6.568147\n", "Epoch 3863, Training loss 2.428635, Validation loss 6.568132\n", "Epoch 3864, Training loss 2.428635, Validation loss 6.568152\n", "Epoch 3865, Training loss 2.428635, Validation loss 6.568157\n", "Epoch 3866, Training loss 2.428635, Validation loss 6.568172\n", "Epoch 3867, Training loss 2.428635, Validation loss 6.568168\n", "Epoch 3868, Training loss 2.428634, Validation loss 6.568168\n", "Epoch 3869, Training loss 2.428635, Validation loss 6.568163\n", "Epoch 3870, Training loss 2.428636, Validation loss 6.568172\n", "Epoch 3871, Training loss 2.428634, Validation loss 6.568168\n", "Epoch 3872, Training loss 2.428634, Validation loss 6.568168\n", "Epoch 3873, Training loss 2.428634, Validation loss 6.568182\n", "Epoch 3874, Training loss 2.428634, Validation loss 6.568178\n", "Epoch 3875, Training loss 2.428634, Validation loss 6.568178\n", "Epoch 3876, Training loss 2.428634, Validation loss 6.568178\n", "Epoch 3877, Training loss 2.428635, Validation loss 6.568173\n", "Epoch 3878, Training loss 2.428635, Validation loss 6.568188\n", "Epoch 3879, Training loss 2.428635, Validation loss 6.568188\n", "Epoch 3880, Training loss 2.428634, Validation loss 6.568188\n", "Epoch 3881, Training loss 2.428634, Validation loss 6.568203\n", "Epoch 3882, Training loss 2.428634, Validation loss 6.568198\n", "Epoch 3883, Training loss 2.428636, Validation loss 6.568198\n", "Epoch 3884, Training loss 2.428634, Validation loss 6.568213\n", "Epoch 3885, Training loss 2.428635, Validation loss 6.568213\n", "Epoch 3886, Training loss 2.428635, Validation loss 6.568209\n", "Epoch 3887, Training loss 2.428634, Validation loss 6.568209\n", "Epoch 3888, Training loss 2.428634, Validation loss 6.568204\n", "Epoch 3889, Training loss 2.428635, Validation loss 6.568223\n", "Epoch 3890, Training loss 2.428634, Validation loss 6.568219\n", "Epoch 3891, Training loss 2.428634, Validation loss 6.568219\n", "Epoch 3892, Training loss 2.428632, Validation loss 6.568223\n", "Epoch 3893, Training loss 2.428634, Validation loss 6.568219\n", "Epoch 3894, Training loss 2.428634, Validation loss 6.568219\n", "Epoch 3895, Training loss 2.428634, Validation loss 6.568219\n", "Epoch 3896, Training loss 2.428634, Validation loss 6.568233\n", "Epoch 3897, Training loss 2.428634, Validation loss 6.568229\n", "Epoch 3898, Training loss 2.428635, Validation loss 6.568229\n", "Epoch 3899, Training loss 2.428634, Validation loss 6.568229\n", "Epoch 3900, Training loss 2.428634, Validation loss 6.568224\n", "Epoch 3901, Training loss 2.428633, Validation loss 6.568239\n", "Epoch 3902, Training loss 2.428634, Validation loss 6.568239\n", "Epoch 3903, Training loss 2.428634, Validation loss 6.568239\n", "Epoch 3904, Training loss 2.428634, Validation loss 6.568254\n", "Epoch 3905, Training loss 2.428634, Validation loss 6.568250\n", "Epoch 3906, Training loss 2.428634, Validation loss 6.568250\n", "Epoch 3907, Training loss 2.428634, Validation loss 6.568245\n", "Epoch 3908, Training loss 2.428633, Validation loss 6.568245\n", "Epoch 3909, Training loss 2.428634, Validation loss 6.568260\n", "Epoch 3910, Training loss 2.428633, Validation loss 6.568260\n", "Epoch 3911, Training loss 2.428634, Validation loss 6.568255\n", "Epoch 3912, Training loss 2.428633, Validation loss 6.568260\n", "Epoch 3913, Training loss 2.428634, Validation loss 6.568270\n", "Epoch 3914, Training loss 2.428634, Validation loss 6.568270\n", "Epoch 3915, Training loss 2.428634, Validation loss 6.568274\n", "Epoch 3916, Training loss 2.428634, Validation loss 6.568270\n", "Epoch 3917, Training loss 2.428635, Validation loss 6.568270\n", "Epoch 3918, Training loss 2.428634, Validation loss 6.568270\n", "Epoch 3919, Training loss 2.428633, Validation loss 6.568265\n", "Epoch 3920, Training loss 2.428634, Validation loss 6.568280\n", "Epoch 3921, Training loss 2.428633, Validation loss 6.568280\n", "Epoch 3922, Training loss 2.428634, Validation loss 6.568280\n", "Epoch 3923, Training loss 2.428634, Validation loss 6.568295\n", "Epoch 3924, Training loss 2.428634, Validation loss 6.568291\n", "Epoch 3925, Training loss 2.428635, Validation loss 6.568291\n", "Epoch 3926, Training loss 2.428634, Validation loss 6.568305\n", "Epoch 3927, Training loss 2.428634, Validation loss 6.568305\n", "Epoch 3928, Training loss 2.428634, Validation loss 6.568300\n", "Epoch 3929, Training loss 2.428633, Validation loss 6.568300\n", "Epoch 3930, Training loss 2.428633, Validation loss 6.568295\n", "Epoch 3931, Training loss 2.428633, Validation loss 6.568311\n", "Epoch 3932, Training loss 2.428633, Validation loss 6.568311\n", "Epoch 3933, Training loss 2.428634, Validation loss 6.568311\n", "Epoch 3934, Training loss 2.428634, Validation loss 6.568316\n", "Epoch 3935, Training loss 2.428634, Validation loss 6.568311\n", "Epoch 3936, Training loss 2.428633, Validation loss 6.568321\n", "Epoch 3937, Training loss 2.428635, Validation loss 6.568321\n", "Epoch 3938, Training loss 2.428633, Validation loss 6.568316\n", "Epoch 3939, Training loss 2.428633, Validation loss 6.568321\n", "Epoch 3940, Training loss 2.428633, Validation loss 6.568321\n", "Epoch 3941, Training loss 2.428633, Validation loss 6.568321\n", "Epoch 3942, Training loss 2.428634, Validation loss 6.568316\n", "Epoch 3943, Training loss 2.428634, Validation loss 6.568331\n", "Epoch 3944, Training loss 2.428634, Validation loss 6.568331\n", "Epoch 3945, Training loss 2.428634, Validation loss 6.568341\n", "Epoch 3946, Training loss 2.428633, Validation loss 6.568346\n", "Epoch 3947, Training loss 2.428632, Validation loss 6.568356\n", "Epoch 3948, Training loss 2.428633, Validation loss 6.568361\n", "Epoch 3949, Training loss 2.428634, Validation loss 6.568366\n", "Epoch 3950, Training loss 2.428634, Validation loss 6.568371\n", "Epoch 3951, Training loss 2.428633, Validation loss 6.568361\n", "Epoch 3952, Training loss 2.428634, Validation loss 6.568371\n", "Epoch 3953, Training loss 2.428633, Validation loss 6.568381\n", "Epoch 3954, Training loss 2.428635, Validation loss 6.568371\n", "Epoch 3955, Training loss 2.428632, Validation loss 6.568381\n", "Epoch 3956, Training loss 2.428633, Validation loss 6.568371\n", "Epoch 3957, Training loss 2.428633, Validation loss 6.568377\n", "Epoch 3958, Training loss 2.428634, Validation loss 6.568381\n", "Epoch 3959, Training loss 2.428633, Validation loss 6.568387\n", "Epoch 3960, Training loss 2.428633, Validation loss 6.568381\n", "Epoch 3961, Training loss 2.428634, Validation loss 6.568387\n", "Epoch 3962, Training loss 2.428633, Validation loss 6.568377\n", "Epoch 3963, Training loss 2.428633, Validation loss 6.568387\n", "Epoch 3964, Training loss 2.428634, Validation loss 6.568387\n", "Epoch 3965, Training loss 2.428633, Validation loss 6.568397\n", "Epoch 3966, Training loss 2.428633, Validation loss 6.568387\n", "Epoch 3967, Training loss 2.428633, Validation loss 6.568397\n", "Epoch 3968, Training loss 2.428633, Validation loss 6.568402\n", "Epoch 3969, Training loss 2.428634, Validation loss 6.568397\n", "Epoch 3970, Training loss 2.428635, Validation loss 6.568402\n", "Epoch 3971, Training loss 2.428632, Validation loss 6.568392\n", "Epoch 3972, Training loss 2.428633, Validation loss 6.568402\n", "Epoch 3973, Training loss 2.428633, Validation loss 6.568402\n", "Epoch 3974, Training loss 2.428635, Validation loss 6.568412\n", "Epoch 3975, Training loss 2.428634, Validation loss 6.568422\n", "Epoch 3976, Training loss 2.428633, Validation loss 6.568412\n", "Epoch 3977, Training loss 2.428634, Validation loss 6.568418\n", "Epoch 3978, Training loss 2.428633, Validation loss 6.568412\n", "Epoch 3979, Training loss 2.428633, Validation loss 6.568418\n", "Epoch 3980, Training loss 2.428634, Validation loss 6.568418\n", "Epoch 3981, Training loss 2.428633, Validation loss 6.568428\n", "Epoch 3982, Training loss 2.428633, Validation loss 6.568418\n", "Epoch 3983, Training loss 2.428634, Validation loss 6.568428\n", "Epoch 3984, Training loss 2.428633, Validation loss 6.568418\n", "Epoch 3985, Training loss 2.428634, Validation loss 6.568428\n", "Epoch 3986, Training loss 2.428633, Validation loss 6.568433\n", "Epoch 3987, Training loss 2.428633, Validation loss 6.568428\n", "Epoch 3988, Training loss 2.428633, Validation loss 6.568433\n", "Epoch 3989, Training loss 2.428634, Validation loss 6.568433\n", "Epoch 3990, Training loss 2.428632, Validation loss 6.568443\n", "Epoch 3991, Training loss 2.428634, Validation loss 6.568433\n", "Epoch 3992, Training loss 2.428632, Validation loss 6.568443\n", "Epoch 3993, Training loss 2.428634, Validation loss 6.568433\n", "Epoch 3994, Training loss 2.428631, Validation loss 6.568443\n", "Epoch 3995, Training loss 2.428633, Validation loss 6.568443\n", "Epoch 3996, Training loss 2.428634, Validation loss 6.568453\n", "Epoch 3997, Training loss 2.428633, Validation loss 6.568459\n", "Epoch 3998, Training loss 2.428633, Validation loss 6.568453\n", "Epoch 3999, Training loss 2.428632, Validation loss 6.568459\n", "Epoch 4000, Training loss 2.428633, Validation loss 6.568449\n", "Epoch 4001, Training loss 2.428634, Validation loss 6.568459\n", "Epoch 4002, Training loss 2.428632, Validation loss 6.568449\n", "Epoch 4003, Training loss 2.428633, Validation loss 6.568459\n", "Epoch 4004, Training loss 2.428633, Validation loss 6.568474\n", "Epoch 4005, Training loss 2.428632, Validation loss 6.568469\n", "Epoch 4006, Training loss 2.428632, Validation loss 6.568459\n", "Epoch 4007, Training loss 2.428633, Validation loss 6.568469\n", "Epoch 4008, Training loss 2.428634, Validation loss 6.568474\n", "Epoch 4009, Training loss 2.428634, Validation loss 6.568464\n", "Epoch 4010, Training loss 2.428632, Validation loss 6.568474\n", "Epoch 4011, Training loss 2.428634, Validation loss 6.568474\n", "Epoch 4012, Training loss 2.428633, Validation loss 6.568484\n", "Epoch 4013, Training loss 2.428632, Validation loss 6.568474\n", "Epoch 4014, Training loss 2.428632, Validation loss 6.568484\n", "Epoch 4015, Training loss 2.428632, Validation loss 6.568489\n", "Epoch 4016, Training loss 2.428633, Validation loss 6.568484\n", "Epoch 4017, Training loss 2.428632, Validation loss 6.568474\n", "Epoch 4018, Training loss 2.428632, Validation loss 6.568480\n", "Epoch 4019, Training loss 2.428632, Validation loss 6.568500\n", "Epoch 4020, Training loss 2.428634, Validation loss 6.568490\n", "Epoch 4021, Training loss 2.428633, Validation loss 6.568500\n", "Epoch 4022, Training loss 2.428633, Validation loss 6.568490\n", "Epoch 4023, Training loss 2.428632, Validation loss 6.568500\n", "Epoch 4024, Training loss 2.428634, Validation loss 6.568490\n", "Epoch 4025, Training loss 2.428633, Validation loss 6.568500\n", "Epoch 4026, Training loss 2.428633, Validation loss 6.568500\n", "Epoch 4027, Training loss 2.428632, Validation loss 6.568505\n", "Epoch 4028, Training loss 2.428632, Validation loss 6.568515\n", "Epoch 4029, Training loss 2.428632, Validation loss 6.568505\n", "Epoch 4030, Training loss 2.428633, Validation loss 6.568515\n", "Epoch 4031, Training loss 2.428632, Validation loss 6.568505\n", "Epoch 4032, Training loss 2.428633, Validation loss 6.568515\n", "Epoch 4033, Training loss 2.428633, Validation loss 6.568515\n", "Epoch 4034, Training loss 2.428632, Validation loss 6.568525\n", "Epoch 4035, Training loss 2.428632, Validation loss 6.568530\n", "Epoch 4036, Training loss 2.428633, Validation loss 6.568525\n", "Epoch 4037, Training loss 2.428633, Validation loss 6.568530\n", "Epoch 4038, Training loss 2.428632, Validation loss 6.568521\n", "Epoch 4039, Training loss 2.428632, Validation loss 6.568530\n", "Epoch 4040, Training loss 2.428633, Validation loss 6.568521\n", "Epoch 4041, Training loss 2.428632, Validation loss 6.568530\n", "Epoch 4042, Training loss 2.428632, Validation loss 6.568541\n", "Epoch 4043, Training loss 2.428633, Validation loss 6.568541\n", "Epoch 4044, Training loss 2.428632, Validation loss 6.568545\n", "Epoch 4045, Training loss 2.428632, Validation loss 6.568545\n", "Epoch 4046, Training loss 2.428633, Validation loss 6.568555\n", "Epoch 4047, Training loss 2.428633, Validation loss 6.568555\n", "Epoch 4048, Training loss 2.428633, Validation loss 6.568555\n", "Epoch 4049, Training loss 2.428634, Validation loss 6.568570\n", "Epoch 4050, Training loss 2.428632, Validation loss 6.568570\n", "Epoch 4051, Training loss 2.428633, Validation loss 6.568570\n", "Epoch 4052, Training loss 2.428632, Validation loss 6.568570\n", "Epoch 4053, Training loss 2.428633, Validation loss 6.568565\n", "Epoch 4054, Training loss 2.428632, Validation loss 6.568565\n", "Epoch 4055, Training loss 2.428632, Validation loss 6.568570\n", "Epoch 4056, Training loss 2.428633, Validation loss 6.568581\n", "Epoch 4057, Training loss 2.428631, Validation loss 6.568581\n", "Epoch 4058, Training loss 2.428633, Validation loss 6.568565\n", "Epoch 4059, Training loss 2.428632, Validation loss 6.568576\n", "Epoch 4060, Training loss 2.428632, Validation loss 6.568576\n", "Epoch 4061, Training loss 2.428633, Validation loss 6.568581\n", "Epoch 4062, Training loss 2.428632, Validation loss 6.568581\n", "Epoch 4063, Training loss 2.428633, Validation loss 6.568591\n", "Epoch 4064, Training loss 2.428633, Validation loss 6.568591\n", "Epoch 4065, Training loss 2.428633, Validation loss 6.568586\n", "Epoch 4066, Training loss 2.428634, Validation loss 6.568586\n", "Epoch 4067, Training loss 2.428631, Validation loss 6.568586\n", "Epoch 4068, Training loss 2.428633, Validation loss 6.568591\n", "Epoch 4069, Training loss 2.428632, Validation loss 6.568601\n", "Epoch 4070, Training loss 2.428633, Validation loss 6.568586\n", "Epoch 4071, Training loss 2.428632, Validation loss 6.568586\n", "Epoch 4072, Training loss 2.428632, Validation loss 6.568596\n", "Epoch 4073, Training loss 2.428633, Validation loss 6.568596\n", "Epoch 4074, Training loss 2.428631, Validation loss 6.568601\n", "Epoch 4075, Training loss 2.428632, Validation loss 6.568601\n", "Epoch 4076, Training loss 2.428632, Validation loss 6.568611\n", "Epoch 4077, Training loss 2.428632, Validation loss 6.568596\n", "Epoch 4078, Training loss 2.428633, Validation loss 6.568596\n", "Epoch 4079, Training loss 2.428632, Validation loss 6.568606\n", "Epoch 4080, Training loss 2.428633, Validation loss 6.568611\n", "Epoch 4081, Training loss 2.428632, Validation loss 6.568611\n", "Epoch 4082, Training loss 2.428633, Validation loss 6.568611\n", "Epoch 4083, Training loss 2.428632, Validation loss 6.568622\n", "Epoch 4084, Training loss 2.428631, Validation loss 6.568606\n", "Epoch 4085, Training loss 2.428633, Validation loss 6.568617\n", "Epoch 4086, Training loss 2.428633, Validation loss 6.568617\n", "Epoch 4087, Training loss 2.428632, Validation loss 6.568622\n", "Epoch 4088, Training loss 2.428633, Validation loss 6.568622\n", "Epoch 4089, Training loss 2.428632, Validation loss 6.568617\n", "Epoch 4090, Training loss 2.428632, Validation loss 6.568617\n", "Epoch 4091, Training loss 2.428632, Validation loss 6.568617\n", "Epoch 4092, Training loss 2.428632, Validation loss 6.568627\n", "Epoch 4093, Training loss 2.428633, Validation loss 6.568632\n", "Epoch 4094, Training loss 2.428632, Validation loss 6.568632\n", "Epoch 4095, Training loss 2.428632, Validation loss 6.568632\n", "Epoch 4096, Training loss 2.428632, Validation loss 6.568642\n", "Epoch 4097, Training loss 2.428632, Validation loss 6.568627\n", "Epoch 4098, Training loss 2.428632, Validation loss 6.568627\n", "Epoch 4099, Training loss 2.428632, Validation loss 6.568642\n", "Epoch 4100, Training loss 2.428632, Validation loss 6.568642\n", "Epoch 4101, Training loss 2.428632, Validation loss 6.568642\n", "Epoch 4102, Training loss 2.428632, Validation loss 6.568637\n", "Epoch 4103, Training loss 2.428632, Validation loss 6.568637\n", "Epoch 4104, Training loss 2.428632, Validation loss 6.568637\n", "Epoch 4105, Training loss 2.428632, Validation loss 6.568647\n", "Epoch 4106, Training loss 2.428632, Validation loss 6.568652\n", "Epoch 4107, Training loss 2.428632, Validation loss 6.568652\n", "Epoch 4108, Training loss 2.428632, Validation loss 6.568652\n", "Epoch 4109, Training loss 2.428633, Validation loss 6.568647\n", "Epoch 4110, Training loss 2.428632, Validation loss 6.568647\n", "Epoch 4111, Training loss 2.428632, Validation loss 6.568647\n", "Epoch 4112, Training loss 2.428633, Validation loss 6.568662\n", "Epoch 4113, Training loss 2.428631, Validation loss 6.568662\n", "Epoch 4114, Training loss 2.428632, Validation loss 6.568647\n", "Epoch 4115, Training loss 2.428632, Validation loss 6.568657\n", "Epoch 4116, Training loss 2.428632, Validation loss 6.568657\n", "Epoch 4117, Training loss 2.428632, Validation loss 6.568657\n", "Epoch 4118, Training loss 2.428633, Validation loss 6.568672\n", "Epoch 4119, Training loss 2.428631, Validation loss 6.568672\n", "Epoch 4120, Training loss 2.428632, Validation loss 6.568672\n", "Epoch 4121, Training loss 2.428632, Validation loss 6.568667\n", "Epoch 4122, Training loss 2.428632, Validation loss 6.568667\n", "Epoch 4123, Training loss 2.428632, Validation loss 6.568667\n", "Epoch 4124, Training loss 2.428632, Validation loss 6.568667\n", "Epoch 4125, Training loss 2.428633, Validation loss 6.568683\n", "Epoch 4126, Training loss 2.428633, Validation loss 6.568683\n", "Epoch 4127, Training loss 2.428631, Validation loss 6.568683\n", "Epoch 4128, Training loss 2.428632, Validation loss 6.568678\n", "Epoch 4129, Training loss 2.428631, Validation loss 6.568678\n", "Epoch 4130, Training loss 2.428632, Validation loss 6.568678\n", "Epoch 4131, Training loss 2.428632, Validation loss 6.568683\n", "Epoch 4132, Training loss 2.428632, Validation loss 6.568693\n", "Epoch 4133, Training loss 2.428632, Validation loss 6.568678\n", "Epoch 4134, Training loss 2.428632, Validation loss 6.568678\n", "Epoch 4135, Training loss 2.428632, Validation loss 6.568688\n", "Epoch 4136, Training loss 2.428632, Validation loss 6.568688\n", "Epoch 4137, Training loss 2.428632, Validation loss 6.568693\n", "Epoch 4138, Training loss 2.428633, Validation loss 6.568703\n", "Epoch 4139, Training loss 2.428632, Validation loss 6.568703\n", "Epoch 4140, Training loss 2.428632, Validation loss 6.568688\n", "Epoch 4141, Training loss 2.428632, Validation loss 6.568698\n", "Epoch 4142, Training loss 2.428632, Validation loss 6.568698\n", "Epoch 4143, Training loss 2.428632, Validation loss 6.568698\n", "Epoch 4144, Training loss 2.428632, Validation loss 6.568703\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4145, Training loss 2.428632, Validation loss 6.568694\n", "Epoch 4146, Training loss 2.428632, Validation loss 6.568698\n", "Epoch 4147, Training loss 2.428632, Validation loss 6.568698\n", "Epoch 4148, Training loss 2.428632, Validation loss 6.568708\n", "Epoch 4149, Training loss 2.428632, Validation loss 6.568708\n", "Epoch 4150, Training loss 2.428632, Validation loss 6.568713\n", "Epoch 4151, Training loss 2.428632, Validation loss 6.568713\n", "Epoch 4152, Training loss 2.428632, Validation loss 6.568708\n", "Epoch 4153, Training loss 2.428632, Validation loss 6.568708\n", "Epoch 4154, Training loss 2.428632, Validation loss 6.568719\n", "Epoch 4155, Training loss 2.428634, Validation loss 6.568719\n", "Epoch 4156, Training loss 2.428632, Validation loss 6.568724\n", "Epoch 4157, Training loss 2.428632, Validation loss 6.568724\n", "Epoch 4158, Training loss 2.428633, Validation loss 6.568734\n", "Epoch 4159, Training loss 2.428631, Validation loss 6.568719\n", "Epoch 4160, Training loss 2.428633, Validation loss 6.568719\n", "Epoch 4161, Training loss 2.428632, Validation loss 6.568729\n", "Epoch 4162, Training loss 2.428632, Validation loss 6.568729\n", "Epoch 4163, Training loss 2.428632, Validation loss 6.568734\n", "Epoch 4164, Training loss 2.428632, Validation loss 6.568734\n", "Epoch 4165, Training loss 2.428633, Validation loss 6.568739\n", "Epoch 4166, Training loss 2.428631, Validation loss 6.568748\n", "Epoch 4167, Training loss 2.428632, Validation loss 6.568744\n", "Epoch 4168, Training loss 2.428632, Validation loss 6.568739\n", "Epoch 4169, Training loss 2.428631, Validation loss 6.568748\n", "Epoch 4170, Training loss 2.428632, Validation loss 6.568754\n", "Epoch 4171, Training loss 2.428632, Validation loss 6.568744\n", "Epoch 4172, Training loss 2.428632, Validation loss 6.568749\n", "Epoch 4173, Training loss 2.428632, Validation loss 6.568754\n", "Epoch 4174, Training loss 2.428633, Validation loss 6.568749\n", "Epoch 4175, Training loss 2.428631, Validation loss 6.568759\n", "Epoch 4176, Training loss 2.428632, Validation loss 6.568764\n", "Epoch 4177, Training loss 2.428632, Validation loss 6.568759\n", "Epoch 4178, Training loss 2.428632, Validation loss 6.568764\n", "Epoch 4179, Training loss 2.428632, Validation loss 6.568754\n", "Epoch 4180, Training loss 2.428632, Validation loss 6.568760\n", "Epoch 4181, Training loss 2.428632, Validation loss 6.568754\n", "Epoch 4182, Training loss 2.428631, Validation loss 6.568760\n", "Epoch 4183, Training loss 2.428632, Validation loss 6.568754\n", "Epoch 4184, Training loss 2.428632, Validation loss 6.568755\n", "Epoch 4185, Training loss 2.428632, Validation loss 6.568769\n", "Epoch 4186, Training loss 2.428631, Validation loss 6.568770\n", "Epoch 4187, Training loss 2.428632, Validation loss 6.568765\n", "Epoch 4188, Training loss 2.428632, Validation loss 6.568770\n", "Epoch 4189, Training loss 2.428633, Validation loss 6.568765\n", "Epoch 4190, Training loss 2.428632, Validation loss 6.568770\n", "Epoch 4191, Training loss 2.428632, Validation loss 6.568779\n", "Epoch 4192, Training loss 2.428631, Validation loss 6.568774\n", "Epoch 4193, Training loss 2.428632, Validation loss 6.568779\n", "Epoch 4194, Training loss 2.428632, Validation loss 6.568770\n", "Epoch 4195, Training loss 2.428632, Validation loss 6.568775\n", "Epoch 4196, Training loss 2.428632, Validation loss 6.568780\n", "Epoch 4197, Training loss 2.428631, Validation loss 6.568775\n", "Epoch 4198, Training loss 2.428633, Validation loss 6.568780\n", "Epoch 4199, Training loss 2.428632, Validation loss 6.568770\n", "Epoch 4200, Training loss 2.428631, Validation loss 6.568775\n", "Epoch 4201, Training loss 2.428632, Validation loss 6.568785\n", "Epoch 4202, Training loss 2.428631, Validation loss 6.568780\n", "Epoch 4203, Training loss 2.428632, Validation loss 6.568785\n", "Epoch 4204, Training loss 2.428632, Validation loss 6.568780\n", "Epoch 4205, Training loss 2.428632, Validation loss 6.568785\n", "Epoch 4206, Training loss 2.428631, Validation loss 6.568780\n", "Epoch 4207, Training loss 2.428633, Validation loss 6.568780\n", "Epoch 4208, Training loss 2.428632, Validation loss 6.568795\n", "Epoch 4209, Training loss 2.428632, Validation loss 6.568795\n", "Epoch 4210, Training loss 2.428632, Validation loss 6.568790\n", "Epoch 4211, Training loss 2.428631, Validation loss 6.568795\n", "Epoch 4212, Training loss 2.428632, Validation loss 6.568790\n", "Epoch 4213, Training loss 2.428632, Validation loss 6.568795\n", "Epoch 4214, Training loss 2.428632, Validation loss 6.568805\n", "Epoch 4215, Training loss 2.428631, Validation loss 6.568800\n", "Epoch 4216, Training loss 2.428633, Validation loss 6.568805\n", "Epoch 4217, Training loss 2.428631, Validation loss 6.568795\n", "Epoch 4218, Training loss 2.428632, Validation loss 6.568800\n", "Epoch 4219, Training loss 2.428632, Validation loss 6.568806\n", "Epoch 4220, Training loss 2.428632, Validation loss 6.568800\n", "Epoch 4221, Training loss 2.428632, Validation loss 6.568806\n", "Epoch 4222, Training loss 2.428630, Validation loss 6.568796\n", "Epoch 4223, Training loss 2.428632, Validation loss 6.568801\n", "Epoch 4224, Training loss 2.428631, Validation loss 6.568810\n", "Epoch 4225, Training loss 2.428632, Validation loss 6.568806\n", "Epoch 4226, Training loss 2.428632, Validation loss 6.568810\n", "Epoch 4227, Training loss 2.428632, Validation loss 6.568806\n", "Epoch 4228, Training loss 2.428631, Validation loss 6.568810\n", "Epoch 4229, Training loss 2.428632, Validation loss 6.568806\n", "Epoch 4230, Training loss 2.428631, Validation loss 6.568826\n", "Epoch 4231, Training loss 2.428631, Validation loss 6.568820\n", "Epoch 4232, Training loss 2.428632, Validation loss 6.568821\n", "Epoch 4233, Training loss 2.428632, Validation loss 6.568816\n", "Epoch 4234, Training loss 2.428632, Validation loss 6.568821\n", "Epoch 4235, Training loss 2.428631, Validation loss 6.568816\n", "Epoch 4236, Training loss 2.428632, Validation loss 6.568821\n", "Epoch 4237, Training loss 2.428632, Validation loss 6.568811\n", "Epoch 4238, Training loss 2.428632, Validation loss 6.568826\n", "Epoch 4239, Training loss 2.428632, Validation loss 6.568830\n", "Epoch 4240, Training loss 2.428631, Validation loss 6.568821\n", "Epoch 4241, Training loss 2.428632, Validation loss 6.568826\n", "Epoch 4242, Training loss 2.428632, Validation loss 6.568821\n", "Epoch 4243, Training loss 2.428632, Validation loss 6.568826\n", "Epoch 4244, Training loss 2.428632, Validation loss 6.568831\n", "Epoch 4245, Training loss 2.428632, Validation loss 6.568821\n", "Epoch 4246, Training loss 2.428632, Validation loss 6.568836\n", "Epoch 4247, Training loss 2.428632, Validation loss 6.568836\n", "Epoch 4248, Training loss 2.428632, Validation loss 6.568831\n", "Epoch 4249, Training loss 2.428631, Validation loss 6.568836\n", "Epoch 4250, Training loss 2.428632, Validation loss 6.568831\n", "Epoch 4251, Training loss 2.428632, Validation loss 6.568836\n", "Epoch 4252, Training loss 2.428633, Validation loss 6.568831\n", "Epoch 4253, Training loss 2.428631, Validation loss 6.568851\n", "Epoch 4254, Training loss 2.428632, Validation loss 6.568846\n", "Epoch 4255, Training loss 2.428632, Validation loss 6.568847\n", "Epoch 4256, Training loss 2.428633, Validation loss 6.568841\n", "Epoch 4257, Training loss 2.428631, Validation loss 6.568847\n", "Epoch 4258, Training loss 2.428631, Validation loss 6.568841\n", "Epoch 4259, Training loss 2.428631, Validation loss 6.568847\n", "Epoch 4260, Training loss 2.428631, Validation loss 6.568837\n", "Epoch 4261, Training loss 2.428631, Validation loss 6.568842\n", "Epoch 4262, Training loss 2.428631, Validation loss 6.568837\n", "Epoch 4263, Training loss 2.428632, Validation loss 6.568847\n", "Epoch 4264, Training loss 2.428631, Validation loss 6.568851\n", "Epoch 4265, Training loss 2.428632, Validation loss 6.568847\n", "Epoch 4266, Training loss 2.428632, Validation loss 6.568851\n", "Epoch 4267, Training loss 2.428631, Validation loss 6.568857\n", "Epoch 4268, Training loss 2.428632, Validation loss 6.568847\n", "Epoch 4269, Training loss 2.428632, Validation loss 6.568861\n", "Epoch 4270, Training loss 2.428631, Validation loss 6.568862\n", "Epoch 4271, Training loss 2.428632, Validation loss 6.568857\n", "Epoch 4272, Training loss 2.428632, Validation loss 6.568862\n", "Epoch 4273, Training loss 2.428632, Validation loss 6.568857\n", "Epoch 4274, Training loss 2.428631, Validation loss 6.568862\n", "Epoch 4275, Training loss 2.428632, Validation loss 6.568852\n", "Epoch 4276, Training loss 2.428631, Validation loss 6.568857\n", "Epoch 4277, Training loss 2.428631, Validation loss 6.568871\n", "Epoch 4278, Training loss 2.428631, Validation loss 6.568872\n", "Epoch 4279, Training loss 2.428631, Validation loss 6.568867\n", "Epoch 4280, Training loss 2.428631, Validation loss 6.568872\n", "Epoch 4281, Training loss 2.428632, Validation loss 6.568867\n", "Epoch 4282, Training loss 2.428632, Validation loss 6.568872\n", "Epoch 4283, Training loss 2.428632, Validation loss 6.568862\n", "Epoch 4284, Training loss 2.428632, Validation loss 6.568868\n", "Epoch 4285, Training loss 2.428632, Validation loss 6.568877\n", "Epoch 4286, Training loss 2.428631, Validation loss 6.568872\n", "Epoch 4287, Training loss 2.428632, Validation loss 6.568877\n", "Epoch 4288, Training loss 2.428632, Validation loss 6.568872\n", "Epoch 4289, Training loss 2.428631, Validation loss 6.568877\n", "Epoch 4290, Training loss 2.428632, Validation loss 6.568882\n", "Epoch 4291, Training loss 2.428632, Validation loss 6.568872\n", "Epoch 4292, Training loss 2.428632, Validation loss 6.568887\n", "Epoch 4293, Training loss 2.428631, Validation loss 6.568888\n", "Epoch 4294, Training loss 2.428632, Validation loss 6.568882\n", "Epoch 4295, Training loss 2.428631, Validation loss 6.568888\n", "Epoch 4296, Training loss 2.428629, Validation loss 6.568882\n", "Epoch 4297, Training loss 2.428632, Validation loss 6.568888\n", "Epoch 4298, Training loss 2.428631, Validation loss 6.568878\n", "Epoch 4299, Training loss 2.428632, Validation loss 6.568883\n", "Epoch 4300, Training loss 2.428631, Validation loss 6.568878\n", "Epoch 4301, Training loss 2.428632, Validation loss 6.568898\n", "Epoch 4302, Training loss 2.428630, Validation loss 6.568892\n", "Epoch 4303, Training loss 2.428632, Validation loss 6.568898\n", "Epoch 4304, Training loss 2.428632, Validation loss 6.568892\n", "Epoch 4305, Training loss 2.428631, Validation loss 6.568898\n", "Epoch 4306, Training loss 2.428632, Validation loss 6.568888\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4307, Training loss 2.428632, Validation loss 6.568893\n", "Epoch 4308, Training loss 2.428631, Validation loss 6.568903\n", "Epoch 4309, Training loss 2.428631, Validation loss 6.568898\n", "Epoch 4310, Training loss 2.428632, Validation loss 6.568903\n", "Epoch 4311, Training loss 2.428632, Validation loss 6.568898\n", "Epoch 4312, Training loss 2.428632, Validation loss 6.568903\n", "Epoch 4313, Training loss 2.428632, Validation loss 6.568903\n", "Epoch 4314, Training loss 2.428630, Validation loss 6.568898\n", "Epoch 4315, Training loss 2.428632, Validation loss 6.568903\n", "Epoch 4316, Training loss 2.428631, Validation loss 6.568913\n", "Epoch 4317, Training loss 2.428631, Validation loss 6.568908\n", "Epoch 4318, Training loss 2.428631, Validation loss 6.568913\n", "Epoch 4319, Training loss 2.428631, Validation loss 6.568908\n", "Epoch 4320, Training loss 2.428631, Validation loss 6.568913\n", "Epoch 4321, Training loss 2.428632, Validation loss 6.568913\n", "Epoch 4322, Training loss 2.428631, Validation loss 6.568918\n", "Epoch 4323, Training loss 2.428631, Validation loss 6.568923\n", "Epoch 4324, Training loss 2.428632, Validation loss 6.568928\n", "Epoch 4325, Training loss 2.428631, Validation loss 6.568923\n", "Epoch 4326, Training loss 2.428632, Validation loss 6.568933\n", "Epoch 4327, Training loss 2.428631, Validation loss 6.568928\n", "Epoch 4328, Training loss 2.428631, Validation loss 6.568943\n", "Epoch 4329, Training loss 2.428632, Validation loss 6.568943\n", "Epoch 4330, Training loss 2.428631, Validation loss 6.568943\n", "Epoch 4331, Training loss 2.428631, Validation loss 6.568943\n", "Epoch 4332, Training loss 2.428632, Validation loss 6.568948\n", "Epoch 4333, Training loss 2.428632, Validation loss 6.568943\n", "Epoch 4334, Training loss 2.428630, Validation loss 6.568958\n", "Epoch 4335, Training loss 2.428632, Validation loss 6.568943\n", "Epoch 4336, Training loss 2.428632, Validation loss 6.568943\n", "Epoch 4337, Training loss 2.428632, Validation loss 6.568938\n", "Epoch 4338, Training loss 2.428630, Validation loss 6.568943\n", "Epoch 4339, Training loss 2.428632, Validation loss 6.568938\n", "Epoch 4340, Training loss 2.428630, Validation loss 6.568953\n", "Epoch 4341, Training loss 2.428630, Validation loss 6.568938\n", "Epoch 4342, Training loss 2.428632, Validation loss 6.568953\n", "Epoch 4343, Training loss 2.428631, Validation loss 6.568953\n", "Epoch 4344, Training loss 2.428631, Validation loss 6.568949\n", "Epoch 4345, Training loss 2.428632, Validation loss 6.568953\n", "Epoch 4346, Training loss 2.428632, Validation loss 6.568949\n", "Epoch 4347, Training loss 2.428631, Validation loss 6.568953\n", "Epoch 4348, Training loss 2.428631, Validation loss 6.568953\n", "Epoch 4349, Training loss 2.428631, Validation loss 6.568963\n", "Epoch 4350, Training loss 2.428632, Validation loss 6.568953\n", "Epoch 4351, Training loss 2.428632, Validation loss 6.568968\n", "Epoch 4352, Training loss 2.428632, Validation loss 6.568963\n", "Epoch 4353, Training loss 2.428631, Validation loss 6.568949\n", "Epoch 4354, Training loss 2.428631, Validation loss 6.568963\n", "Epoch 4355, Training loss 2.428632, Validation loss 6.568963\n", "Epoch 4356, Training loss 2.428632, Validation loss 6.568963\n", "Epoch 4357, Training loss 2.428631, Validation loss 6.568963\n", "Epoch 4358, Training loss 2.428632, Validation loss 6.568968\n", "Epoch 4359, Training loss 2.428631, Validation loss 6.568963\n", "Epoch 4360, Training loss 2.428632, Validation loss 6.568959\n", "Epoch 4361, Training loss 2.428631, Validation loss 6.568963\n", "Epoch 4362, Training loss 2.428632, Validation loss 6.568963\n", "Epoch 4363, Training loss 2.428631, Validation loss 6.568974\n", "Epoch 4364, Training loss 2.428631, Validation loss 6.568963\n", "Epoch 4365, Training loss 2.428632, Validation loss 6.568959\n", "Epoch 4366, Training loss 2.428631, Validation loss 6.568974\n", "Epoch 4367, Training loss 2.428631, Validation loss 6.568963\n", "Epoch 4368, Training loss 2.428630, Validation loss 6.568974\n", "Epoch 4369, Training loss 2.428632, Validation loss 6.568974\n", "Epoch 4370, Training loss 2.428632, Validation loss 6.568959\n", "Epoch 4371, Training loss 2.428632, Validation loss 6.568974\n", "Epoch 4372, Training loss 2.428632, Validation loss 6.568969\n", "Epoch 4373, Training loss 2.428630, Validation loss 6.568974\n", "Epoch 4374, Training loss 2.428631, Validation loss 6.568974\n", "Epoch 4375, Training loss 2.428631, Validation loss 6.568969\n", "Epoch 4376, Training loss 2.428631, Validation loss 6.568974\n", "Epoch 4377, Training loss 2.428631, Validation loss 6.568969\n", "Epoch 4378, Training loss 2.428631, Validation loss 6.568984\n", "Epoch 4379, Training loss 2.428631, Validation loss 6.568969\n", "Epoch 4380, Training loss 2.428632, Validation loss 6.568984\n", "Epoch 4381, Training loss 2.428631, Validation loss 6.568984\n", "Epoch 4382, Training loss 2.428631, Validation loss 6.568984\n", "Epoch 4383, Training loss 2.428632, Validation loss 6.568984\n", "Epoch 4384, Training loss 2.428631, Validation loss 6.568979\n", "Epoch 4385, Training loss 2.428631, Validation loss 6.568984\n", "Epoch 4386, Training loss 2.428632, Validation loss 6.568984\n", "Epoch 4387, Training loss 2.428632, Validation loss 6.568984\n", "Epoch 4388, Training loss 2.428631, Validation loss 6.568984\n", "Epoch 4389, Training loss 2.428632, Validation loss 6.568999\n", "Epoch 4390, Training loss 2.428632, Validation loss 6.568984\n", "Epoch 4391, Training loss 2.428631, Validation loss 6.568979\n", "Epoch 4392, Training loss 2.428632, Validation loss 6.568994\n", "Epoch 4393, Training loss 2.428632, Validation loss 6.568984\n", "Epoch 4394, Training loss 2.428631, Validation loss 6.568994\n", "Epoch 4395, Training loss 2.428630, Validation loss 6.568994\n", "Epoch 4396, Training loss 2.428632, Validation loss 6.568979\n", "Epoch 4397, Training loss 2.428631, Validation loss 6.568994\n", "Epoch 4398, Training loss 2.428630, Validation loss 6.568990\n", "Epoch 4399, Training loss 2.428631, Validation loss 6.568979\n", "Epoch 4400, Training loss 2.428631, Validation loss 6.568994\n", "Epoch 4401, Training loss 2.428631, Validation loss 6.568990\n", "Epoch 4402, Training loss 2.428632, Validation loss 6.568994\n", "Epoch 4403, Training loss 2.428632, Validation loss 6.568990\n", "Epoch 4404, Training loss 2.428631, Validation loss 6.568994\n", "Epoch 4405, Training loss 2.428630, Validation loss 6.568994\n", "Epoch 4406, Training loss 2.428632, Validation loss 6.569004\n", "Epoch 4407, Training loss 2.428631, Validation loss 6.568994\n", "Epoch 4408, Training loss 2.428630, Validation loss 6.568990\n", "Epoch 4409, Training loss 2.428632, Validation loss 6.569004\n", "Epoch 4410, Training loss 2.428632, Validation loss 6.568990\n", "Epoch 4411, Training loss 2.428632, Validation loss 6.569004\n", "Epoch 4412, Training loss 2.428630, Validation loss 6.569004\n", "Epoch 4413, Training loss 2.428631, Validation loss 6.569004\n", "Epoch 4414, Training loss 2.428630, Validation loss 6.569004\n", "Epoch 4415, Training loss 2.428631, Validation loss 6.569000\n", "Epoch 4416, Training loss 2.428632, Validation loss 6.569004\n", "Epoch 4417, Training loss 2.428631, Validation loss 6.569000\n", "Epoch 4418, Training loss 2.428631, Validation loss 6.569015\n", "Epoch 4419, Training loss 2.428632, Validation loss 6.569004\n", "Epoch 4420, Training loss 2.428631, Validation loss 6.569015\n", "Epoch 4421, Training loss 2.428631, Validation loss 6.569015\n", "Epoch 4422, Training loss 2.428631, Validation loss 6.569000\n", "Epoch 4423, Training loss 2.428632, Validation loss 6.569015\n", "Epoch 4424, Training loss 2.428632, Validation loss 6.569015\n", "Epoch 4425, Training loss 2.428630, Validation loss 6.569015\n", "Epoch 4426, Training loss 2.428631, Validation loss 6.569015\n", "Epoch 4427, Training loss 2.428631, Validation loss 6.569010\n", "Epoch 4428, Training loss 2.428630, Validation loss 6.569015\n", "Epoch 4429, Training loss 2.428632, Validation loss 6.569010\n", "Epoch 4430, Training loss 2.428632, Validation loss 6.569015\n", "Epoch 4431, Training loss 2.428631, Validation loss 6.569015\n", "Epoch 4432, Training loss 2.428630, Validation loss 6.569010\n", "Epoch 4433, Training loss 2.428632, Validation loss 6.569015\n", "Epoch 4434, Training loss 2.428632, Validation loss 6.569010\n", "Epoch 4435, Training loss 2.428630, Validation loss 6.569025\n", "Epoch 4436, Training loss 2.428631, Validation loss 6.569010\n", "Epoch 4437, Training loss 2.428632, Validation loss 6.569010\n", "Epoch 4438, Training loss 2.428631, Validation loss 6.569025\n", "Epoch 4439, Training loss 2.428632, Validation loss 6.569030\n", "Epoch 4440, Training loss 2.428632, Validation loss 6.569025\n", "Epoch 4441, Training loss 2.428631, Validation loss 6.569020\n", "Epoch 4442, Training loss 2.428631, Validation loss 6.569025\n", "Epoch 4443, Training loss 2.428633, Validation loss 6.569025\n", "Epoch 4444, Training loss 2.428631, Validation loss 6.569025\n", "Epoch 4445, Training loss 2.428630, Validation loss 6.569025\n", "Epoch 4446, Training loss 2.428632, Validation loss 6.569020\n", "Epoch 4447, Training loss 2.428632, Validation loss 6.569035\n", "Epoch 4448, Training loss 2.428631, Validation loss 6.569020\n", "Epoch 4449, Training loss 2.428632, Validation loss 6.569035\n", "Epoch 4450, Training loss 2.428632, Validation loss 6.569035\n", "Epoch 4451, Training loss 2.428632, Validation loss 6.569020\n", "Epoch 4452, Training loss 2.428632, Validation loss 6.569035\n", "Epoch 4453, Training loss 2.428632, Validation loss 6.569039\n", "Epoch 4454, Training loss 2.428631, Validation loss 6.569035\n", "Epoch 4455, Training loss 2.428630, Validation loss 6.569030\n", "Epoch 4456, Training loss 2.428630, Validation loss 6.569030\n", "Epoch 4457, Training loss 2.428632, Validation loss 6.569035\n", "Epoch 4458, Training loss 2.428631, Validation loss 6.569030\n", "Epoch 4459, Training loss 2.428632, Validation loss 6.569035\n", "Epoch 4460, Training loss 2.428631, Validation loss 6.569030\n", "Epoch 4461, Training loss 2.428631, Validation loss 6.569045\n", "Epoch 4462, Training loss 2.428632, Validation loss 6.569035\n", "Epoch 4463, Training loss 2.428632, Validation loss 6.569030\n", "Epoch 4464, Training loss 2.428630, Validation loss 6.569045\n", "Epoch 4465, Training loss 2.428630, Validation loss 6.569030\n", "Epoch 4466, Training loss 2.428632, Validation loss 6.569045\n", "Epoch 4467, Training loss 2.428632, Validation loss 6.569040\n", "Epoch 4468, Training loss 2.428631, Validation loss 6.569045\n", "Epoch 4469, Training loss 2.428630, Validation loss 6.569045\n", "Epoch 4470, Training loss 2.428631, Validation loss 6.569040\n", "Epoch 4471, Training loss 2.428631, Validation loss 6.569045\n", "Epoch 4472, Training loss 2.428631, Validation loss 6.569040\n", "Epoch 4473, Training loss 2.428632, Validation loss 6.569045\n", "Epoch 4474, Training loss 2.428631, Validation loss 6.569040\n", "Epoch 4475, Training loss 2.428631, Validation loss 6.569056\n", "Epoch 4476, Training loss 2.428632, Validation loss 6.569045\n", "Epoch 4477, Training loss 2.428632, Validation loss 6.569040\n", "Epoch 4478, Training loss 2.428631, Validation loss 6.569056\n", "Epoch 4479, Training loss 2.428630, Validation loss 6.569040\n", "Epoch 4480, Training loss 2.428631, Validation loss 6.569056\n", "Epoch 4481, Training loss 2.428632, Validation loss 6.569056\n", "Epoch 4482, Training loss 2.428631, Validation loss 6.569056\n", "Epoch 4483, Training loss 2.428632, Validation loss 6.569056\n", "Epoch 4484, Training loss 2.428631, Validation loss 6.569051\n", "Epoch 4485, Training loss 2.428630, Validation loss 6.569056\n", "Epoch 4486, Training loss 2.428631, Validation loss 6.569051\n", "Epoch 4487, Training loss 2.428631, Validation loss 6.569051\n", "Epoch 4488, Training loss 2.428631, Validation loss 6.569056\n", "Epoch 4489, Training loss 2.428630, Validation loss 6.569051\n", "Epoch 4490, Training loss 2.428631, Validation loss 6.569066\n", "Epoch 4491, Training loss 2.428632, Validation loss 6.569051\n", "Epoch 4492, Training loss 2.428630, Validation loss 6.569066\n", "Epoch 4493, Training loss 2.428631, Validation loss 6.569061\n", "Epoch 4494, Training loss 2.428632, Validation loss 6.569051\n", "Epoch 4495, Training loss 2.428631, Validation loss 6.569066\n", "Epoch 4496, Training loss 2.428631, Validation loss 6.569061\n", "Epoch 4497, Training loss 2.428631, Validation loss 6.569066\n", "Epoch 4498, Training loss 2.428630, Validation loss 6.569061\n", "Epoch 4499, Training loss 2.428632, Validation loss 6.569066\n", "Epoch 4500, Training loss 2.428632, Validation loss 6.569066\n", "Epoch 4501, Training loss 2.428631, Validation loss 6.569061\n", "Epoch 4502, Training loss 2.428631, Validation loss 6.569066\n", "Epoch 4503, Training loss 2.428633, Validation loss 6.569061\n", "Epoch 4504, Training loss 2.428631, Validation loss 6.569076\n", "Epoch 4505, Training loss 2.428631, Validation loss 6.569061\n", "Epoch 4506, Training loss 2.428632, Validation loss 6.569076\n", "Epoch 4507, Training loss 2.428632, Validation loss 6.569076\n", "Epoch 4508, Training loss 2.428632, Validation loss 6.569061\n", "Epoch 4509, Training loss 2.428631, Validation loss 6.569076\n", "Epoch 4510, Training loss 2.428631, Validation loss 6.569071\n", "Epoch 4511, Training loss 2.428631, Validation loss 6.569076\n", "Epoch 4512, Training loss 2.428631, Validation loss 6.569071\n", "Epoch 4513, Training loss 2.428631, Validation loss 6.569076\n", "Epoch 4514, Training loss 2.428631, Validation loss 6.569076\n", "Epoch 4515, Training loss 2.428630, Validation loss 6.569071\n", "Epoch 4516, Training loss 2.428631, Validation loss 6.569071\n", "Epoch 4517, Training loss 2.428631, Validation loss 6.569071\n", "Epoch 4518, Training loss 2.428630, Validation loss 6.569086\n", "Epoch 4519, Training loss 2.428630, Validation loss 6.569086\n", "Epoch 4520, Training loss 2.428631, Validation loss 6.569071\n", "Epoch 4521, Training loss 2.428631, Validation loss 6.569086\n", "Epoch 4522, Training loss 2.428632, Validation loss 6.569081\n", "Epoch 4523, Training loss 2.428631, Validation loss 6.569086\n", "Epoch 4524, Training loss 2.428630, Validation loss 6.569081\n", "Epoch 4525, Training loss 2.428630, Validation loss 6.569081\n", "Epoch 4526, Training loss 2.428631, Validation loss 6.569086\n", "Epoch 4527, Training loss 2.428632, Validation loss 6.569081\n", "Epoch 4528, Training loss 2.428632, Validation loss 6.569086\n", "Epoch 4529, Training loss 2.428631, Validation loss 6.569081\n", "Epoch 4530, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4531, Training loss 2.428631, Validation loss 6.569081\n", "Epoch 4532, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4533, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4534, Training loss 2.428631, Validation loss 6.569081\n", "Epoch 4535, Training loss 2.428632, Validation loss 6.569097\n", "Epoch 4536, Training loss 2.428631, Validation loss 6.569092\n", "Epoch 4537, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4538, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4539, Training loss 2.428630, Validation loss 6.569092\n", "Epoch 4540, Training loss 2.428632, Validation loss 6.569097\n", "Epoch 4541, Training loss 2.428632, Validation loss 6.569092\n", "Epoch 4542, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4543, Training loss 2.428631, Validation loss 6.569092\n", "Epoch 4544, Training loss 2.428630, Validation loss 6.569092\n", "Epoch 4545, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4546, Training loss 2.428630, Validation loss 6.569092\n", "Epoch 4547, Training loss 2.428632, Validation loss 6.569107\n", "Epoch 4548, Training loss 2.428631, Validation loss 6.569092\n", "Epoch 4549, Training loss 2.428630, Validation loss 6.569092\n", "Epoch 4550, Training loss 2.428632, Validation loss 6.569102\n", "Epoch 4551, Training loss 2.428632, Validation loss 6.569092\n", "Epoch 4552, Training loss 2.428630, Validation loss 6.569107\n", "Epoch 4553, Training loss 2.428630, Validation loss 6.569102\n", "Epoch 4554, Training loss 2.428632, Validation loss 6.569107\n", "Epoch 4555, Training loss 2.428632, Validation loss 6.569102\n", "Epoch 4556, Training loss 2.428631, Validation loss 6.569117\n", "Epoch 4557, Training loss 2.428631, Validation loss 6.569107\n", "Epoch 4558, Training loss 2.428630, Validation loss 6.569102\n", "Epoch 4559, Training loss 2.428631, Validation loss 6.569117\n", "Epoch 4560, Training loss 2.428632, Validation loss 6.569102\n", "Epoch 4561, Training loss 2.428631, Validation loss 6.569117\n", "Epoch 4562, Training loss 2.428630, Validation loss 6.569112\n", "Epoch 4563, Training loss 2.428631, Validation loss 6.569127\n", "Epoch 4564, Training loss 2.428632, Validation loss 6.569127\n", "Epoch 4565, Training loss 2.428631, Validation loss 6.569122\n", "Epoch 4566, Training loss 2.428631, Validation loss 6.569117\n", "Epoch 4567, Training loss 2.428630, Validation loss 6.569117\n", "Epoch 4568, Training loss 2.428631, Validation loss 6.569122\n", "Epoch 4569, Training loss 2.428632, Validation loss 6.569132\n", "Epoch 4570, Training loss 2.428631, Validation loss 6.569127\n", "Epoch 4571, Training loss 2.428630, Validation loss 6.569122\n", "Epoch 4572, Training loss 2.428632, Validation loss 6.569132\n", "Epoch 4573, Training loss 2.428631, Validation loss 6.569137\n", "Epoch 4574, Training loss 2.428631, Validation loss 6.569127\n", "Epoch 4575, Training loss 2.428630, Validation loss 6.569132\n", "Epoch 4576, Training loss 2.428631, Validation loss 6.569141\n", "Epoch 4577, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4578, Training loss 2.428631, Validation loss 6.569127\n", "Epoch 4579, Training loss 2.428632, Validation loss 6.569122\n", "Epoch 4580, Training loss 2.428631, Validation loss 6.569132\n", "Epoch 4581, Training loss 2.428630, Validation loss 6.569137\n", "Epoch 4582, Training loss 2.428630, Validation loss 6.569147\n", "Epoch 4583, Training loss 2.428631, Validation loss 6.569141\n", "Epoch 4584, Training loss 2.428632, Validation loss 6.569132\n", "Epoch 4585, Training loss 2.428631, Validation loss 6.569137\n", "Epoch 4586, Training loss 2.428630, Validation loss 6.569147\n", "Epoch 4587, Training loss 2.428631, Validation loss 6.569137\n", "Epoch 4588, Training loss 2.428631, Validation loss 6.569142\n", "Epoch 4589, Training loss 2.428631, Validation loss 6.569141\n", "Epoch 4590, Training loss 2.428632, Validation loss 6.569147\n", "Epoch 4591, Training loss 2.428632, Validation loss 6.569137\n", "Epoch 4592, Training loss 2.428631, Validation loss 6.569142\n", "Epoch 4593, Training loss 2.428631, Validation loss 6.569152\n", "Epoch 4594, Training loss 2.428631, Validation loss 6.569142\n", "Epoch 4595, Training loss 2.428630, Validation loss 6.569137\n", "Epoch 4596, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4597, Training loss 2.428631, Validation loss 6.569137\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4598, Training loss 2.428632, Validation loss 6.569142\n", "Epoch 4599, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4600, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4601, Training loss 2.428631, Validation loss 6.569137\n", "Epoch 4602, Training loss 2.428631, Validation loss 6.569142\n", "Epoch 4603, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4604, Training loss 2.428632, Validation loss 6.569157\n", "Epoch 4605, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4606, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4607, Training loss 2.428631, Validation loss 6.569152\n", "Epoch 4608, Training loss 2.428630, Validation loss 6.569157\n", "Epoch 4609, Training loss 2.428630, Validation loss 6.569147\n", "Epoch 4610, Training loss 2.428631, Validation loss 6.569157\n", "Epoch 4611, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4612, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4613, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4614, Training loss 2.428631, Validation loss 6.569157\n", "Epoch 4615, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4616, Training loss 2.428631, Validation loss 6.569152\n", "Epoch 4617, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4618, Training loss 2.428631, Validation loss 6.569157\n", "Epoch 4619, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4620, Training loss 2.428631, Validation loss 6.569157\n", "Epoch 4621, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4622, Training loss 2.428630, Validation loss 6.569167\n", "Epoch 4623, Training loss 2.428630, Validation loss 6.569158\n", "Epoch 4624, Training loss 2.428631, Validation loss 6.569157\n", "Epoch 4625, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4626, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4627, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4628, Training loss 2.428631, Validation loss 6.569167\n", "Epoch 4629, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4630, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4631, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4632, Training loss 2.428631, Validation loss 6.569167\n", "Epoch 4633, Training loss 2.428631, Validation loss 6.569158\n", "Epoch 4634, Training loss 2.428630, Validation loss 6.569167\n", "Epoch 4635, Training loss 2.428630, Validation loss 6.569158\n", "Epoch 4636, Training loss 2.428630, Validation loss 6.569152\n", "Epoch 4637, Training loss 2.428631, Validation loss 6.569158\n", "Epoch 4638, Training loss 2.428631, Validation loss 6.569167\n", "Epoch 4639, Training loss 2.428631, Validation loss 6.569158\n", "Epoch 4640, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4641, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4642, Training loss 2.428630, Validation loss 6.569152\n", "Epoch 4643, Training loss 2.428630, Validation loss 6.569158\n", "Epoch 4644, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4645, Training loss 2.428632, Validation loss 6.569172\n", "Epoch 4646, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4647, Training loss 2.428630, Validation loss 6.569158\n", "Epoch 4648, Training loss 2.428630, Validation loss 6.569167\n", "Epoch 4649, Training loss 2.428630, Validation loss 6.569172\n", "Epoch 4650, Training loss 2.428630, Validation loss 6.569162\n", "Epoch 4651, Training loss 2.428631, Validation loss 6.569172\n", "Epoch 4652, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4653, Training loss 2.428631, Validation loss 6.569158\n", "Epoch 4654, Training loss 2.428630, Validation loss 6.569162\n", "Epoch 4655, Training loss 2.428630, Validation loss 6.569172\n", "Epoch 4656, Training loss 2.428631, Validation loss 6.569178\n", "Epoch 4657, Training loss 2.428632, Validation loss 6.569168\n", "Epoch 4658, Training loss 2.428632, Validation loss 6.569167\n", "Epoch 4659, Training loss 2.428631, Validation loss 6.569172\n", "Epoch 4660, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4661, Training loss 2.428630, Validation loss 6.569168\n", "Epoch 4662, Training loss 2.428630, Validation loss 6.569178\n", "Epoch 4663, Training loss 2.428630, Validation loss 6.569168\n", "Epoch 4664, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4665, Training loss 2.428631, Validation loss 6.569172\n", "Epoch 4666, Training loss 2.428632, Validation loss 6.569178\n", "Epoch 4667, Training loss 2.428632, Validation loss 6.569168\n", "Epoch 4668, Training loss 2.428631, Validation loss 6.569173\n", "Epoch 4669, Training loss 2.428630, Validation loss 6.569172\n", "Epoch 4670, Training loss 2.428630, Validation loss 6.569162\n", "Epoch 4671, Training loss 2.428632, Validation loss 6.569168\n", "Epoch 4672, Training loss 2.428630, Validation loss 6.569178\n", "Epoch 4673, Training loss 2.428630, Validation loss 6.569168\n", "Epoch 4674, Training loss 2.428630, Validation loss 6.569173\n", "Epoch 4675, Training loss 2.428630, Validation loss 6.569168\n", "Epoch 4676, Training loss 2.428631, Validation loss 6.569178\n", "Epoch 4677, Training loss 2.428632, Validation loss 6.569182\n", "Epoch 4678, Training loss 2.428632, Validation loss 6.569173\n", "Epoch 4679, Training loss 2.428631, Validation loss 6.569182\n", "Epoch 4680, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4681, Training loss 2.428631, Validation loss 6.569178\n", "Epoch 4682, Training loss 2.428630, Validation loss 6.569173\n", "Epoch 4683, Training loss 2.428632, Validation loss 6.569182\n", "Epoch 4684, Training loss 2.428632, Validation loss 6.569173\n", "Epoch 4685, Training loss 2.428632, Validation loss 6.569178\n", "Epoch 4686, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4687, Training loss 2.428631, Validation loss 6.569182\n", "Epoch 4688, Training loss 2.428630, Validation loss 6.569173\n", "Epoch 4689, Training loss 2.428631, Validation loss 6.569182\n", "Epoch 4690, Training loss 2.428632, Validation loss 6.569188\n", "Epoch 4691, Training loss 2.428632, Validation loss 6.569178\n", "Epoch 4692, Training loss 2.428631, Validation loss 6.569183\n", "Epoch 4693, Training loss 2.428631, Validation loss 6.569182\n", "Epoch 4694, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4695, Training loss 2.428631, Validation loss 6.569178\n", "Epoch 4696, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4697, Training loss 2.428632, Validation loss 6.569193\n", "Epoch 4698, Training loss 2.428632, Validation loss 6.569183\n", "Epoch 4699, Training loss 2.428630, Validation loss 6.569178\n", "Epoch 4700, Training loss 2.428630, Validation loss 6.569188\n", "Epoch 4701, Training loss 2.428630, Validation loss 6.569178\n", "Epoch 4702, Training loss 2.428630, Validation loss 6.569183\n", "Epoch 4703, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4704, Training loss 2.428632, Validation loss 6.569188\n", "Epoch 4705, Training loss 2.428632, Validation loss 6.569178\n", "Epoch 4706, Training loss 2.428632, Validation loss 6.569183\n", "Epoch 4707, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4708, Training loss 2.428631, Validation loss 6.569183\n", "Epoch 4709, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4710, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4711, Training loss 2.428632, Validation loss 6.569178\n", "Epoch 4712, Training loss 2.428630, Validation loss 6.569183\n", "Epoch 4713, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4714, Training loss 2.428632, Validation loss 6.569198\n", "Epoch 4715, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4716, Training loss 2.428631, Validation loss 6.569183\n", "Epoch 4717, Training loss 2.428632, Validation loss 6.569193\n", "Epoch 4718, Training loss 2.428631, Validation loss 6.569198\n", "Epoch 4719, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4720, Training loss 2.428630, Validation loss 6.569193\n", "Epoch 4721, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4722, Training loss 2.428631, Validation loss 6.569183\n", "Epoch 4723, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4724, Training loss 2.428632, Validation loss 6.569198\n", "Epoch 4725, Training loss 2.428630, Validation loss 6.569203\n", "Epoch 4726, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4727, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4728, Training loss 2.428630, Validation loss 6.569198\n", "Epoch 4729, Training loss 2.428630, Validation loss 6.569188\n", "Epoch 4730, Training loss 2.428630, Validation loss 6.569193\n", "Epoch 4731, Training loss 2.428632, Validation loss 6.569203\n", "Epoch 4732, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4733, Training loss 2.428632, Validation loss 6.569188\n", "Epoch 4734, Training loss 2.428632, Validation loss 6.569198\n", "Epoch 4735, Training loss 2.428632, Validation loss 6.569203\n", "Epoch 4736, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4737, Training loss 2.428632, Validation loss 6.569199\n", "Epoch 4738, Training loss 2.428630, Validation loss 6.569198\n", "Epoch 4739, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4740, Training loss 2.428630, Validation loss 6.569193\n", "Epoch 4741, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4742, Training loss 2.428631, Validation loss 6.569208\n", "Epoch 4743, Training loss 2.428632, Validation loss 6.569203\n", "Epoch 4744, Training loss 2.428632, Validation loss 6.569193\n", "Epoch 4745, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4746, Training loss 2.428631, Validation loss 6.569208\n", "Epoch 4747, Training loss 2.428631, Validation loss 6.569199\n", "Epoch 4748, Training loss 2.428631, Validation loss 6.569208\n", "Epoch 4749, Training loss 2.428631, Validation loss 6.569213\n", "Epoch 4750, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4751, Training loss 2.428631, Validation loss 6.569199\n", "Epoch 4752, Training loss 2.428632, Validation loss 6.569208\n", "Epoch 4753, Training loss 2.428631, Validation loss 6.569213\n", "Epoch 4754, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4755, Training loss 2.428630, Validation loss 6.569213\n", "Epoch 4756, Training loss 2.428630, Validation loss 6.569203\n", "Epoch 4757, Training loss 2.428630, Validation loss 6.569199\n", "Epoch 4758, Training loss 2.428630, Validation loss 6.569203\n", "Epoch 4759, Training loss 2.428630, Validation loss 6.569194\n", "Epoch 4760, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4761, Training loss 2.428630, Validation loss 6.569209\n", "Epoch 4762, Training loss 2.428631, Validation loss 6.569208\n", "Epoch 4763, Training loss 2.428631, Validation loss 6.569199\n", "Epoch 4764, Training loss 2.428630, Validation loss 6.569203\n", "Epoch 4765, Training loss 2.428631, Validation loss 6.569213\n", "Epoch 4766, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4767, Training loss 2.428631, Validation loss 6.569209\n", "Epoch 4768, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4769, Training loss 2.428630, Validation loss 6.569213\n", "Epoch 4770, Training loss 2.428630, Validation loss 6.569203\n", "Epoch 4771, Training loss 2.428631, Validation loss 6.569209\n", "Epoch 4772, Training loss 2.428632, Validation loss 6.569219\n", "Epoch 4773, Training loss 2.428631, Validation loss 6.569213\n", "Epoch 4774, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4775, Training loss 2.428630, Validation loss 6.569209\n", "Epoch 4776, Training loss 2.428630, Validation loss 6.569219\n", "Epoch 4777, Training loss 2.428630, Validation loss 6.569209\n", "Epoch 4778, Training loss 2.428632, Validation loss 6.569214\n", "Epoch 4779, Training loss 2.428631, Validation loss 6.569213\n", "Epoch 4780, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4781, Training loss 2.428630, Validation loss 6.569209\n", "Epoch 4782, Training loss 2.428631, Validation loss 6.569214\n", "Epoch 4783, Training loss 2.428630, Validation loss 6.569223\n", "Epoch 4784, Training loss 2.428632, Validation loss 6.569219\n", "Epoch 4785, Training loss 2.428632, Validation loss 6.569209\n", "Epoch 4786, Training loss 2.428632, Validation loss 6.569219\n", "Epoch 4787, Training loss 2.428631, Validation loss 6.569223\n", "Epoch 4788, Training loss 2.428631, Validation loss 6.569214\n", "Epoch 4789, Training loss 2.428630, Validation loss 6.569219\n", "Epoch 4790, Training loss 2.428630, Validation loss 6.569219\n", "Epoch 4791, Training loss 2.428631, Validation loss 6.569209\n", "Epoch 4792, Training loss 2.428632, Validation loss 6.569214\n", "Epoch 4793, Training loss 2.428631, Validation loss 6.569223\n", "Epoch 4794, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4795, Training loss 2.428630, Validation loss 6.569219\n", "Epoch 4796, Training loss 2.428630, Validation loss 6.569214\n", "Epoch 4797, Training loss 2.428632, Validation loss 6.569223\n", "Epoch 4798, Training loss 2.428632, Validation loss 6.569214\n", "Epoch 4799, Training loss 2.428632, Validation loss 6.569219\n", "Epoch 4800, Training loss 2.428632, Validation loss 6.569229\n", "Epoch 4801, Training loss 2.428631, Validation loss 6.569223\n", "Epoch 4802, Training loss 2.428631, Validation loss 6.569214\n", "Epoch 4803, Training loss 2.428630, Validation loss 6.569223\n", "Epoch 4804, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4805, Training loss 2.428631, Validation loss 6.569219\n", "Epoch 4806, Training loss 2.428632, Validation loss 6.569224\n", "Epoch 4807, Training loss 2.428631, Validation loss 6.569223\n", "Epoch 4808, Training loss 2.428630, Validation loss 6.569229\n", "Epoch 4809, Training loss 2.428630, Validation loss 6.569219\n", "Epoch 4810, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4811, Training loss 2.428632, Validation loss 6.569234\n", "Epoch 4812, Training loss 2.428632, Validation loss 6.569224\n", "Epoch 4813, Training loss 2.428631, Validation loss 6.569219\n", "Epoch 4814, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4815, Training loss 2.428631, Validation loss 6.569219\n", "Epoch 4816, Training loss 2.428631, Validation loss 6.569224\n", "Epoch 4817, Training loss 2.428630, Validation loss 6.569234\n", "Epoch 4818, Training loss 2.428630, Validation loss 6.569224\n", "Epoch 4819, Training loss 2.428630, Validation loss 6.569229\n", "Epoch 4820, Training loss 2.428631, Validation loss 6.569224\n", "Epoch 4821, Training loss 2.428630, Validation loss 6.569234\n", "Epoch 4822, Training loss 2.428630, Validation loss 6.569224\n", "Epoch 4823, Training loss 2.428630, Validation loss 6.569229\n", "Epoch 4824, Training loss 2.428632, Validation loss 6.569239\n", "Epoch 4825, Training loss 2.428632, Validation loss 6.569229\n", "Epoch 4826, Training loss 2.428631, Validation loss 6.569224\n", "Epoch 4827, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4828, Training loss 2.428631, Validation loss 6.569239\n", "Epoch 4829, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4830, Training loss 2.428630, Validation loss 6.569234\n", "Epoch 4831, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4832, Training loss 2.428631, Validation loss 6.569224\n", "Epoch 4833, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4834, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4835, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4836, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4837, Training loss 2.428630, Validation loss 6.569229\n", "Epoch 4838, Training loss 2.428631, Validation loss 6.569239\n", "Epoch 4839, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4840, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4841, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4842, Training loss 2.428631, Validation loss 6.569239\n", "Epoch 4843, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4844, Training loss 2.428630, Validation loss 6.569234\n", "Epoch 4845, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4846, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4847, Training loss 2.428631, Validation loss 6.569240\n", "Epoch 4848, Training loss 2.428630, Validation loss 6.569239\n", "Epoch 4849, Training loss 2.428630, Validation loss 6.569229\n", "Epoch 4850, Training loss 2.428630, Validation loss 6.569234\n", "Epoch 4851, Training loss 2.428632, Validation loss 6.569240\n", "Epoch 4852, Training loss 2.428632, Validation loss 6.569249\n", "Epoch 4853, Training loss 2.428632, Validation loss 6.569240\n", "Epoch 4854, Training loss 2.428632, Validation loss 6.569234\n", "Epoch 4855, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4856, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4857, Training loss 2.428630, Validation loss 6.569240\n", "Epoch 4858, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4859, Training loss 2.428632, Validation loss 6.569244\n", "Epoch 4860, Training loss 2.428632, Validation loss 6.569234\n", "Epoch 4861, Training loss 2.428632, Validation loss 6.569240\n", "Epoch 4862, Training loss 2.428631, Validation loss 6.569249\n", "Epoch 4863, Training loss 2.428630, Validation loss 6.569254\n", "Epoch 4864, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4865, Training loss 2.428631, Validation loss 6.569240\n", "Epoch 4866, Training loss 2.428632, Validation loss 6.569249\n", "Epoch 4867, Training loss 2.428632, Validation loss 6.569240\n", "Epoch 4868, Training loss 2.428630, Validation loss 6.569244\n", "Epoch 4869, Training loss 2.428631, Validation loss 6.569254\n", "Epoch 4870, Training loss 2.428630, Validation loss 6.569249\n", "Epoch 4871, Training loss 2.428630, Validation loss 6.569240\n", "Epoch 4872, Training loss 2.428632, Validation loss 6.569249\n", "Epoch 4873, Training loss 2.428632, Validation loss 6.569240\n", "Epoch 4874, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4875, Training loss 2.428630, Validation loss 6.569250\n", "Epoch 4876, Training loss 2.428630, Validation loss 6.569249\n", "Epoch 4877, Training loss 2.428630, Validation loss 6.569240\n", "Epoch 4878, Training loss 2.428630, Validation loss 6.569244\n", "Epoch 4879, Training loss 2.428632, Validation loss 6.569254\n", "Epoch 4880, Training loss 2.428632, Validation loss 6.569244\n", "Epoch 4881, Training loss 2.428632, Validation loss 6.569250\n", "Epoch 4882, Training loss 2.428630, Validation loss 6.569244\n", "Epoch 4883, Training loss 2.428630, Validation loss 6.569254\n", "Epoch 4884, Training loss 2.428630, Validation loss 6.569244\n", "Epoch 4885, Training loss 2.428631, Validation loss 6.569250\n", "Epoch 4886, Training loss 2.428630, Validation loss 6.569260\n", "Epoch 4887, Training loss 2.428630, Validation loss 6.569250\n", "Epoch 4888, Training loss 2.428631, Validation loss 6.569255\n", "Epoch 4889, Training loss 2.428631, Validation loss 6.569250\n", "Epoch 4890, Training loss 2.428631, Validation loss 6.569260\n", "Epoch 4891, Training loss 2.428630, Validation loss 6.569250\n", "Epoch 4892, Training loss 2.428632, Validation loss 6.569255\n", "Epoch 4893, Training loss 2.428631, Validation loss 6.569264\n", "Epoch 4894, Training loss 2.428632, Validation loss 6.569255\n", "Epoch 4895, Training loss 2.428631, Validation loss 6.569250\n", "Epoch 4896, Training loss 2.428629, Validation loss 6.569255\n", "Epoch 4897, Training loss 2.428629, Validation loss 6.569245\n", "Epoch 4898, Training loss 2.428631, Validation loss 6.569255\n", "Epoch 4899, Training loss 2.428632, Validation loss 6.569260\n", "Epoch 4900, Training loss 2.428631, Validation loss 6.569260\n", "Epoch 4901, Training loss 2.428631, Validation loss 6.569250\n", "Epoch 4902, Training loss 2.428631, Validation loss 6.569255\n", "Epoch 4903, Training loss 2.428630, Validation loss 6.569260\n", "Epoch 4904, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4905, Training loss 2.428630, Validation loss 6.569260\n", "Epoch 4906, Training loss 2.428631, Validation loss 6.569255\n", "Epoch 4907, Training loss 2.428632, Validation loss 6.569264\n", "Epoch 4908, Training loss 2.428632, Validation loss 6.569255\n", "Epoch 4909, Training loss 2.428631, Validation loss 6.569260\n", "Epoch 4910, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4911, Training loss 2.428631, Validation loss 6.569264\n", "Epoch 4912, Training loss 2.428631, Validation loss 6.569255\n", "Epoch 4913, Training loss 2.428631, Validation loss 6.569260\n", "Epoch 4914, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4915, Training loss 2.428630, Validation loss 6.569260\n", "Epoch 4916, Training loss 2.428630, Validation loss 6.569265\n", "Epoch 4917, Training loss 2.428631, Validation loss 6.569264\n", "Epoch 4918, Training loss 2.428632, Validation loss 6.569270\n", "Epoch 4919, Training loss 2.428632, Validation loss 6.569260\n", "Epoch 4920, Training loss 2.428632, Validation loss 6.569265\n", "Epoch 4921, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4922, Training loss 2.428630, Validation loss 6.569265\n", "Epoch 4923, Training loss 2.428631, Validation loss 6.569260\n", "Epoch 4924, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4925, Training loss 2.428632, Validation loss 6.569275\n", "Epoch 4926, Training loss 2.428632, Validation loss 6.569265\n", "Epoch 4927, Training loss 2.428632, Validation loss 6.569270\n", "Epoch 4928, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4929, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4930, Training loss 2.428631, Validation loss 6.569265\n", "Epoch 4931, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4932, Training loss 2.428630, Validation loss 6.569265\n", "Epoch 4933, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4934, Training loss 2.428631, Validation loss 6.569265\n", "Epoch 4935, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4936, Training loss 2.428630, Validation loss 6.569265\n", "Epoch 4937, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4938, Training loss 2.428631, Validation loss 6.569280\n", "Epoch 4939, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4940, Training loss 2.428631, Validation loss 6.569265\n", "Epoch 4941, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4942, Training loss 2.428630, Validation loss 6.569280\n", "Epoch 4943, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4944, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4945, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4946, Training loss 2.428631, Validation loss 6.569265\n", "Epoch 4947, Training loss 2.428632, Validation loss 6.569270\n", "Epoch 4948, Training loss 2.428631, Validation loss 6.569280\n", "Epoch 4949, Training loss 2.428630, Validation loss 6.569285\n", "Epoch 4950, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4951, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4952, Training loss 2.428631, Validation loss 6.569280\n", "Epoch 4953, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4954, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4955, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4956, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4957, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4958, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4959, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4960, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4961, Training loss 2.428631, Validation loss 6.569281\n", "Epoch 4962, Training loss 2.428631, Validation loss 6.569290\n", "Epoch 4963, Training loss 2.428630, Validation loss 6.569285\n", "Epoch 4964, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4965, Training loss 2.428631, Validation loss 6.569281\n", "Epoch 4966, Training loss 2.428631, Validation loss 6.569290\n", "Epoch 4967, Training loss 2.428631, Validation loss 6.569281\n", "Epoch 4968, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4969, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4970, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4971, Training loss 2.428630, Validation loss 6.569281\n", "Epoch 4972, Training loss 2.428632, Validation loss 6.569285\n", "Epoch 4973, Training loss 2.428631, Validation loss 6.569295\n", "Epoch 4974, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4975, Training loss 2.428631, Validation loss 6.569281\n", "Epoch 4976, Training loss 2.428631, Validation loss 6.569290\n", "Epoch 4977, Training loss 2.428630, Validation loss 6.569295\n", "Epoch 4978, Training loss 2.428630, Validation loss 6.569285\n", "Epoch 4979, Training loss 2.428631, Validation loss 6.569291\n", "Epoch 4980, Training loss 2.428632, Validation loss 6.569290\n", "Epoch 4981, Training loss 2.428632, Validation loss 6.569281\n", "Epoch 4982, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4983, Training loss 2.428631, Validation loss 6.569295\n", "Epoch 4984, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4985, Training loss 2.428631, Validation loss 6.569291\n", "Epoch 4986, Training loss 2.428632, Validation loss 6.569290\n", "Epoch 4987, Training loss 2.428631, Validation loss 6.569295\n", "Epoch 4988, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4989, Training loss 2.428630, Validation loss 6.569291\n", "Epoch 4990, Training loss 2.428630, Validation loss 6.569300\n", "Epoch 4991, Training loss 2.428630, Validation loss 6.569291\n", "Epoch 4992, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4993, Training loss 2.428631, Validation loss 6.569295\n", "Epoch 4994, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4995, Training loss 2.428631, Validation loss 6.569291\n", "Epoch 4996, Training loss 2.428631, Validation loss 6.569296\n", "Epoch 4997, Training loss 2.428630, Validation loss 6.569295\n", "Epoch 4998, Training loss 2.428630, Validation loss 6.569285\n", "Epoch 4999, Training loss 2.428632, Validation loss 6.569291\n" ] }, { "data": { "text/plain": [ "tensor([ 5.4878, -17.4613], requires_grad=True)" ] }, "execution_count": 106, "metadata": {}, "output_type": "execute_result" } ], "source": [ "def model(t_u, w, b):\n", " return w * t_u + b\n", "\n", "def loss_fn(t_p, t_c):\n", " sq_diffs = (t_p - t_c)**2\n", " return sq_diffs.mean()\n", "\n", "params = torch.tensor([1.0, 0.0], requires_grad=True)\n", "\n", "nepochs = 5000\n", "learning_rate = 1e-2\n", "\n", "optimizer = optim.SGD([params], lr=learning_rate)\n", "\n", "t_un_train = 0.1 * t_u_train\n", "t_un_val = 0.1 * t_u_val\n", "\n", "for epoch in range(nepochs):\n", " \n", " # forward pass\n", " t_p_train = model(t_un_train, *params)\n", " loss_train = loss_fn(t_p_train, t_c_train)\n", "\n", " t_p_val = model(t_un_val, *params)\n", " loss_val = loss_fn(t_p_val, t_c_val)\n", "\n", " print('Epoch %d, Training loss %f, Validation loss %f' % (epoch, float(loss_train), float(loss_val)))\n", " \n", " # backward pass\n", " optimizer.zero_grad()\n", " loss_train.backward() \n", " optimizer.step()\n", "\n", "t_p = model(t_un, *params)\n", "\n", "params" ] }, { "cell_type": "code", "execution_count": 107, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 0, Training loss 2.428631, Validation loss 6.569300\n", "Epoch 1, Training loss 2.428631, Validation loss 6.569291\n", "Epoch 2, Training loss 2.428630, Validation loss 6.569296\n", "Epoch 3, Training loss 2.428630, Validation loss 6.569291\n", "Epoch 4, Training loss 2.428630, Validation loss 6.569281\n", "Epoch 5, Training loss 2.428630, Validation loss 6.569291\n", "Epoch 6, Training loss 2.428632, Validation loss 6.569296\n", "Epoch 7, Training loss 2.428631, Validation loss 6.569305\n", "Epoch 8, Training loss 2.428631, Validation loss 6.569296\n", "Epoch 9, Training loss 2.428631, Validation loss 6.569291\n", "Epoch 10, Training loss 2.428630, Validation loss 6.569296\n", "Epoch 11, Training loss 2.428631, Validation loss 6.569286\n", "Epoch 12, Training loss 2.428631, Validation loss 6.569296\n", "Epoch 13, Training loss 2.428631, Validation loss 6.569301\n", "Epoch 14, Training loss 2.428630, Validation loss 6.569291\n", "Epoch 15, Training loss 2.428631, Validation loss 6.569291\n", "Epoch 16, Training loss 2.428630, Validation loss 6.569296\n", "Epoch 17, Training loss 2.428631, Validation loss 6.569301\n", "Epoch 18, Training loss 2.428632, Validation loss 6.569310\n", "Epoch 19, Training loss 2.428632, Validation loss 6.569301\n", "Epoch 20, Training loss 2.428631, Validation loss 6.569296\n", "Epoch 21, Training loss 2.428631, Validation loss 6.569305\n", "Epoch 22, Training loss 2.428631, Validation loss 6.569296\n", "Epoch 23, Training loss 2.428630, Validation loss 6.569301\n", "Epoch 24, Training loss 2.428631, Validation loss 6.569310\n", "Epoch 25, Training loss 2.428632, Validation loss 6.569305\n", "Epoch 26, Training loss 2.428632, Validation loss 6.569296\n", "Epoch 27, Training loss 2.428631, Validation loss 6.569301\n", "Epoch 28, Training loss 2.428631, Validation loss 6.569310\n", "Epoch 29, Training loss 2.428631, Validation loss 6.569301\n", "Epoch 30, Training loss 2.428630, Validation loss 6.569306\n", "Epoch 31, Training loss 2.428630, Validation loss 6.569316\n", "Epoch 32, Training loss 2.428630, Validation loss 6.569306\n", "Epoch 33, Training loss 2.428631, Validation loss 6.569301\n", "Epoch 34, Training loss 2.428631, Validation loss 6.569306\n", "Epoch 35, Training loss 2.428631, Validation loss 6.569316\n", "Epoch 36, Training loss 2.428631, Validation loss 6.569306\n", "Epoch 37, Training loss 2.428632, Validation loss 6.569311\n", "Epoch 38, Training loss 2.428631, Validation loss 6.569310\n", "Epoch 39, Training loss 2.428631, Validation loss 6.569301\n", "Epoch 40, Training loss 2.428631, Validation loss 6.569316\n", "Epoch 41, Training loss 2.428631, Validation loss 6.569306\n", "Epoch 42, Training loss 2.428631, Validation loss 6.569311\n", "Epoch 43, Training loss 2.428630, Validation loss 6.569321\n", "Epoch 44, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 45, Training loss 2.428631, Validation loss 6.569316\n", "Epoch 46, Training loss 2.428631, Validation loss 6.569306\n", "Epoch 47, Training loss 2.428631, Validation loss 6.569325\n", "Epoch 48, Training loss 2.428631, Validation loss 6.569316\n", "Epoch 49, Training loss 2.428632, Validation loss 6.569331\n", "Epoch 50, Training loss 2.428632, Validation loss 6.569321\n", "Epoch 51, Training loss 2.428632, Validation loss 6.569311\n", "Epoch 52, Training loss 2.428631, Validation loss 6.569326\n", "Epoch 53, Training loss 2.428631, Validation loss 6.569316\n", "Epoch 54, Training loss 2.428631, Validation loss 6.569325\n", "Epoch 55, Training loss 2.428631, Validation loss 6.569316\n", "Epoch 56, Training loss 2.428631, Validation loss 6.569306\n", "Epoch 57, Training loss 2.428631, Validation loss 6.569321\n", "Epoch 58, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 59, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 60, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 61, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 62, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 63, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 64, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 65, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 66, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 67, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 68, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 69, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 70, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 71, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 72, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 73, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 74, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 75, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 76, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 77, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 78, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 79, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 80, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 81, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 82, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 83, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 84, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 85, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 86, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 87, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 88, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 89, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 90, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 91, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 92, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 93, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 94, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 95, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 96, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 97, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 98, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 99, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 100, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 101, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 102, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 103, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 104, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 105, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 106, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 107, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 108, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 109, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 110, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 111, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 112, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 113, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 114, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 115, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 116, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 117, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 118, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 119, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 120, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 121, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 122, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 123, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 124, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 125, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 126, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 127, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 128, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 129, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 130, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 131, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 132, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 133, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 134, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 135, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 136, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 137, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 138, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 139, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 140, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 141, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 142, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 143, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 144, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 145, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 146, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 147, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 148, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 149, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 150, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 151, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 152, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 153, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 154, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 155, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 156, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 157, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 158, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 159, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 160, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 161, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 162, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 163, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 164, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 165, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 166, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 167, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 168, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 169, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 170, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 171, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 172, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 173, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 174, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 175, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 176, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 177, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 178, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 179, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 180, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 181, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 182, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 183, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 184, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 185, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 186, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 187, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 188, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 189, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 190, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 191, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 192, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 193, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 194, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 195, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 196, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 197, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 198, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 199, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 200, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 201, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 202, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 203, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 204, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 205, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 206, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 207, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 208, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 209, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 210, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 211, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 212, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 213, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 214, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 215, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 216, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 217, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 218, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 219, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 220, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 221, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 222, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 223, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 224, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 225, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 226, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 227, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 228, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 229, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 230, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 231, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 232, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 233, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 234, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 235, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 236, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 237, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 238, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 239, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 240, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 241, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 242, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 243, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 244, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 245, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 246, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 247, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 248, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 249, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 250, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 251, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 252, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 253, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 254, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 255, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 256, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 257, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 258, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 259, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 260, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 261, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 262, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 263, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 264, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 265, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 266, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 267, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 268, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 269, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 270, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 271, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 272, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 273, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 274, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 275, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 276, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 277, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 278, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 279, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 280, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 281, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 282, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 283, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 284, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 285, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 286, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 287, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 288, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 289, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 290, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 291, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 292, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 293, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 294, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 295, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 296, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 297, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 298, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 299, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 300, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 301, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 302, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 303, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 304, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 305, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 306, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 307, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 308, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 309, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 310, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 311, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 312, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 313, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 314, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 315, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 316, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 317, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 318, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 319, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 320, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 321, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 322, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 323, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 324, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 325, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 326, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 327, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 328, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 329, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 330, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 331, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 332, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 333, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 334, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 335, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 336, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 337, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 338, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 339, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 340, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 341, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 342, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 343, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 344, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 345, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 346, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 347, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 348, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 349, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 350, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 351, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 352, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 353, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 354, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 355, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 356, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 357, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 358, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 359, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 360, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 361, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 362, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 363, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 364, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 365, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 366, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 367, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 368, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 369, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 370, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 371, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 372, Training loss 2.428630, Validation loss 6.569311\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 373, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 374, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 375, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 376, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 377, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 378, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 379, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 380, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 381, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 382, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 383, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 384, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 385, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 386, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 387, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 388, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 389, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 390, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 391, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 392, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 393, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 394, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 395, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 396, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 397, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 398, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 399, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 400, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 401, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 402, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 403, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 404, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 405, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 406, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 407, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 408, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 409, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 410, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 411, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 412, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 413, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 414, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 415, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 416, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 417, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 418, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 419, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 420, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 421, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 422, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 423, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 424, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 425, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 426, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 427, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 428, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 429, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 430, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 431, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 432, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 433, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 434, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 435, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 436, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 437, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 438, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 439, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 440, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 441, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 442, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 443, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 444, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 445, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 446, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 447, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 448, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 449, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 450, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 451, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 452, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 453, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 454, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 455, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 456, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 457, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 458, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 459, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 460, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 461, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 462, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 463, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 464, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 465, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 466, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 467, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 468, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 469, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 470, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 471, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 472, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 473, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 474, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 475, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 476, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 477, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 478, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 479, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 480, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 481, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 482, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 483, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 484, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 485, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 486, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 487, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 488, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 489, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 490, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 491, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 492, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 493, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 494, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 495, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 496, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 497, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 498, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 499, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 500, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 501, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 502, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 503, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 504, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 505, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 506, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 507, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 508, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 509, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 510, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 511, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 512, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 513, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 514, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 515, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 516, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 517, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 518, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 519, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 520, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 521, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 522, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 523, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 524, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 525, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 526, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 527, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 528, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 529, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 530, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 531, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 532, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 533, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 534, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 535, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 536, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 537, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 538, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 539, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 540, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 541, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 542, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 543, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 544, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 545, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 546, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 547, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 548, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 549, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 550, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 551, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 552, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 553, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 554, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 555, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 556, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 557, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 558, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 559, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 560, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 561, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 562, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 563, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 564, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 565, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 566, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 567, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 568, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 569, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 570, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 571, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 572, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 573, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 574, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 575, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 576, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 577, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 578, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 579, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 580, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 581, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 582, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 583, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 584, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 585, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 586, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 587, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 588, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 589, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 590, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 591, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 592, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 593, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 594, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 595, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 596, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 597, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 598, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 599, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 600, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 601, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 602, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 603, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 604, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 605, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 606, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 607, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 608, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 609, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 610, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 611, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 612, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 613, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 614, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 615, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 616, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 617, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 618, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 619, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 620, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 621, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 622, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 623, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 624, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 625, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 626, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 627, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 628, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 629, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 630, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 631, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 632, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 633, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 634, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 635, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 636, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 637, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 638, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 639, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 640, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 641, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 642, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 643, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 644, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 645, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 646, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 647, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 648, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 649, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 650, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 651, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 652, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 653, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 654, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 655, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 656, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 657, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 658, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 659, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 660, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 661, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 662, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 663, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 664, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 665, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 666, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 667, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 668, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 669, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 670, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 671, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 672, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 673, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 674, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 675, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 676, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 677, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 678, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 679, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 680, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 681, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 682, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 683, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 684, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 685, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 686, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 687, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 688, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 689, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 690, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 691, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 692, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 693, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 694, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 695, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 696, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 697, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 698, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 699, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 700, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 701, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 702, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 703, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 704, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 705, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 706, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 707, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 708, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 709, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 710, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 711, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 712, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 713, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 714, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 715, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 716, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 717, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 718, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 719, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 720, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 721, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 722, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 723, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 724, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 725, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 726, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 727, Training loss 2.428630, Validation loss 6.569311\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 728, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 729, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 730, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 731, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 732, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 733, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 734, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 735, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 736, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 737, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 738, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 739, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 740, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 741, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 742, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 743, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 744, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 745, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 746, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 747, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 748, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 749, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 750, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 751, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 752, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 753, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 754, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 755, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 756, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 757, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 758, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 759, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 760, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 761, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 762, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 763, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 764, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 765, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 766, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 767, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 768, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 769, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 770, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 771, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 772, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 773, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 774, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 775, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 776, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 777, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 778, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 779, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 780, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 781, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 782, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 783, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 784, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 785, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 786, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 787, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 788, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 789, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 790, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 791, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 792, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 793, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 794, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 795, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 796, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 797, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 798, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 799, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 800, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 801, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 802, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 803, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 804, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 805, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 806, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 807, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 808, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 809, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 810, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 811, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 812, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 813, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 814, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 815, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 816, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 817, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 818, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 819, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 820, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 821, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 822, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 823, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 824, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 825, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 826, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 827, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 828, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 829, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 830, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 831, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 832, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 833, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 834, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 835, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 836, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 837, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 838, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 839, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 840, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 841, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 842, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 843, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 844, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 845, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 846, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 847, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 848, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 849, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 850, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 851, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 852, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 853, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 854, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 855, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 856, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 857, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 858, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 859, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 860, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 861, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 862, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 863, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 864, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 865, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 866, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 867, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 868, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 869, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 870, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 871, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 872, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 873, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 874, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 875, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 876, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 877, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 878, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 879, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 880, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 881, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 882, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 883, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 884, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 885, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 886, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 887, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 888, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 889, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 890, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 891, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 892, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 893, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 894, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 895, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 896, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 897, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 898, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 899, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 900, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 901, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 902, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 903, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 904, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 905, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 906, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 907, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 908, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 909, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 910, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 911, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 912, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 913, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 914, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 915, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 916, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 917, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 918, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 919, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 920, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 921, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 922, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 923, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 924, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 925, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 926, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 927, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 928, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 929, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 930, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 931, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 932, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 933, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 934, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 935, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 936, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 937, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 938, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 939, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 940, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 941, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 942, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 943, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 944, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 945, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 946, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 947, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 948, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 949, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 950, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 951, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 952, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 953, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 954, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 955, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 956, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 957, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 958, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 959, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 960, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 961, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 962, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 963, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 964, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 965, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 966, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 967, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 968, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 969, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 970, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 971, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 972, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 973, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 974, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 975, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 976, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 977, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 978, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 979, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 980, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 981, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 982, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 983, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 984, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 985, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 986, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 987, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 988, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 989, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 990, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 991, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 992, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 993, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 994, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 995, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 996, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 997, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 998, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 999, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1000, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1001, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1002, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1003, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1004, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1005, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1006, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1007, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1008, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1009, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1010, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1011, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1012, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1013, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1014, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1015, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1016, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1017, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1018, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1019, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1020, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1021, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1022, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1023, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1024, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1025, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1026, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1027, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1028, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1029, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1030, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1031, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1032, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1033, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1034, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1035, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1036, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1037, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1038, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1039, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1040, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1041, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1042, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1043, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1044, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1045, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1046, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1047, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1048, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1049, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1050, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1051, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1052, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1053, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1054, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1055, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1056, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1057, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1058, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1059, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1060, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1061, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1062, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1063, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1064, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1065, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1066, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1067, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1068, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1069, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1070, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1071, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1072, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1073, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1074, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1075, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1076, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1077, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1078, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1079, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1080, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1081, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1082, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1083, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1084, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1085, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1086, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1087, Training loss 2.428630, Validation loss 6.569311\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1088, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1089, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1090, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1091, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1092, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1093, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1094, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1095, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1096, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1097, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1098, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1099, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1100, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1101, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1102, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1103, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1104, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1105, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1106, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1107, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1108, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1109, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1110, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1111, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1112, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1113, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1114, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1115, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1116, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1117, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1118, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1119, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1120, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1121, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1122, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1123, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1124, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1125, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1126, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1127, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1128, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1129, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1130, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1131, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1132, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1133, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1134, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1135, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1136, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1137, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1138, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1139, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1140, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1141, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1142, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1143, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1144, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1145, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1146, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1147, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1148, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1149, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1150, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1151, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1152, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1153, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1154, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1155, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1156, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1157, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1158, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1159, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1160, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1161, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1162, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1163, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1164, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1165, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1166, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1167, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1168, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1169, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1170, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1171, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1172, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1173, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1174, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1175, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1176, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1177, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1178, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1179, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1180, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1181, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1182, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1183, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1184, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1185, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1186, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1187, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1188, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1189, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1190, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1191, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1192, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1193, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1194, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1195, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1196, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1197, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1198, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1199, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1200, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1201, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1202, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1203, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1204, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1205, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1206, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1207, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1208, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1209, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1210, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1211, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1212, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1213, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1214, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1215, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1216, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1217, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1218, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1219, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1220, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1221, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1222, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1223, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1224, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1225, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1226, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1227, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1228, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1229, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1230, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1231, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1232, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1233, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1234, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1235, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1236, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1237, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1238, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1239, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1240, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1241, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1242, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1243, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1244, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1245, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1246, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1247, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1248, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1249, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1250, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1251, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1252, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1253, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1254, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1255, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1256, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1257, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1258, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1259, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1260, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1261, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1262, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1263, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1264, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1265, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1266, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1267, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1268, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1269, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1270, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1271, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1272, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1273, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1274, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1275, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1276, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1277, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1278, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1279, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1280, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1281, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1282, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1283, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1284, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1285, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1286, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1287, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1288, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1289, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1290, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1291, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1292, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1293, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1294, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1295, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1296, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1297, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1298, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1299, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1300, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1301, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1302, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1303, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1304, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1305, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1306, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1307, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1308, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1309, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1310, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1311, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1312, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1313, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1314, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1315, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1316, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1317, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1318, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1319, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1320, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1321, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1322, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1323, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1324, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1325, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1326, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1327, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1328, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1329, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1330, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1331, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1332, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1333, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1334, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1335, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1336, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1337, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1338, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1339, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1340, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1341, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1342, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1343, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1344, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1345, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1346, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1347, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1348, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1349, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1350, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1351, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1352, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1353, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1354, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1355, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1356, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1357, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1358, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1359, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1360, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1361, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1362, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1363, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1364, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1365, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1366, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1367, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1368, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1369, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1370, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1371, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1372, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1373, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1374, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1375, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1376, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1377, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1378, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1379, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1380, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1381, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1382, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1383, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1384, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1385, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1386, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1387, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1388, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1389, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1390, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1391, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1392, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1393, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1394, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1395, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1396, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1397, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1398, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1399, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1400, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1401, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1402, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1403, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1404, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1405, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1406, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1407, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1408, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1409, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1410, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1411, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1412, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1413, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1414, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1415, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1416, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1417, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1418, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1419, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1420, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1421, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1422, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1423, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1424, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1425, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1426, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1427, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1428, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1429, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1430, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1431, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1432, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1433, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1434, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1435, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1436, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1437, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1438, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1439, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1440, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1441, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1442, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1443, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1444, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1445, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1446, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1447, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1448, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1449, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1450, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1451, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1452, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1453, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1454, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1455, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1456, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1457, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1458, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1459, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1460, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1461, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1462, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1463, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1464, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1465, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1466, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1467, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1468, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1469, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1470, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1471, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1472, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1473, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1474, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1475, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1476, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1477, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1478, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1479, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1480, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1481, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1482, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1483, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1484, Training loss 2.428630, Validation loss 6.569311\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1485, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1486, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1487, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1488, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1489, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1490, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1491, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1492, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1493, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1494, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1495, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1496, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1497, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1498, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1499, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1500, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1501, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1502, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1503, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1504, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1505, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1506, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1507, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1508, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1509, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1510, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1511, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1512, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1513, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1514, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1515, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1516, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1517, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1518, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1519, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1520, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1521, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1522, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1523, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1524, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1525, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1526, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1527, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1528, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1529, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1530, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1531, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1532, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1533, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1534, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1535, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1536, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1537, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1538, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1539, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1540, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1541, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1542, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1543, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1544, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1545, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1546, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1547, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1548, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1549, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1550, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1551, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1552, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1553, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1554, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1555, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1556, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1557, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1558, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1559, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1560, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1561, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1562, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1563, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1564, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1565, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1566, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1567, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1568, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1569, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1570, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1571, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1572, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1573, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1574, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1575, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1576, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1577, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1578, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1579, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1580, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1581, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1582, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1583, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1584, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1585, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1586, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1587, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1588, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1589, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1590, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1591, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1592, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1593, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1594, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1595, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1596, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1597, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1598, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1599, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1600, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1601, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1602, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1603, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1604, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1605, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1606, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1607, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1608, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1609, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1610, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1611, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1612, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1613, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1614, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1615, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1616, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1617, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1618, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1619, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1620, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1621, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1622, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1623, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1624, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1625, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1626, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1627, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1628, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1629, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1630, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1631, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1632, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1633, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1634, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1635, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1636, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1637, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1638, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1639, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1640, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1641, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1642, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1643, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1644, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1645, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1646, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1647, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1648, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1649, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1650, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1651, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1652, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1653, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1654, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1655, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1656, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1657, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1658, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1659, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1660, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1661, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1662, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1663, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1664, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1665, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1666, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1667, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1668, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1669, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1670, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1671, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1672, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1673, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1674, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1675, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1676, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1677, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1678, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1679, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1680, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1681, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1682, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1683, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1684, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1685, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1686, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1687, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1688, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1689, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1690, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1691, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1692, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1693, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1694, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1695, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1696, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1697, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1698, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1699, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1700, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1701, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1702, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1703, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1704, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1705, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1706, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1707, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1708, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1709, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1710, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1711, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1712, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1713, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1714, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1715, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1716, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1717, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1718, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1719, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1720, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1721, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1722, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1723, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1724, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1725, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1726, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1727, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1728, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1729, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1730, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1731, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1732, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1733, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1734, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1735, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1736, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1737, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1738, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1739, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1740, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1741, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1742, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1743, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1744, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1745, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1746, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1747, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1748, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1749, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1750, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1751, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1752, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1753, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1754, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1755, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1756, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1757, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1758, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1759, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1760, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1761, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1762, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1763, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1764, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1765, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1766, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1767, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1768, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1769, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1770, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1771, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1772, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1773, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1774, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1775, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1776, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1777, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1778, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1779, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1780, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1781, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1782, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1783, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1784, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1785, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1786, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1787, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1788, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1789, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1790, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1791, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1792, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1793, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1794, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1795, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1796, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1797, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1798, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1799, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1800, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1801, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1802, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1803, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1804, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1805, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1806, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1807, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1808, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1809, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1810, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1811, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1812, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1813, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1814, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1815, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1816, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1817, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1818, Training loss 2.428630, Validation loss 6.569311\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1819, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1820, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1821, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1822, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1823, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1824, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1825, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1826, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1827, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1828, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1829, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1830, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1831, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1832, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1833, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1834, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1835, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1836, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1837, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1838, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1839, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1840, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1841, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1842, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1843, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1844, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1845, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1846, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1847, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1848, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1849, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1850, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1851, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1852, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1853, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1854, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1855, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1856, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1857, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1858, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1859, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1860, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1861, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1862, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1863, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1864, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1865, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1866, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1867, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1868, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1869, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1870, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1871, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1872, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1873, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1874, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1875, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1876, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1877, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1878, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1879, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1880, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1881, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1882, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1883, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1884, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1885, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1886, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1887, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1888, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1889, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1890, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1891, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1892, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1893, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1894, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1895, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1896, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1897, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1898, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1899, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1900, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1901, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1902, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1903, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1904, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1905, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1906, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1907, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1908, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1909, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1910, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1911, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1912, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1913, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1914, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1915, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1916, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1917, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1918, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1919, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1920, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1921, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1922, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1923, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1924, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1925, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1926, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1927, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1928, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1929, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1930, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1931, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1932, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1933, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1934, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1935, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1936, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1937, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1938, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1939, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1940, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1941, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1942, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1943, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1944, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1945, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1946, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1947, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1948, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1949, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1950, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1951, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1952, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1953, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1954, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1955, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1956, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1957, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1958, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1959, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1960, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1961, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1962, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1963, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1964, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1965, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1966, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1967, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1968, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1969, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1970, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1971, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1972, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1973, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1974, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1975, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1976, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1977, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1978, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1979, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1980, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1981, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1982, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1983, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1984, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1985, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1986, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1987, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1988, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1989, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1990, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1991, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1992, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1993, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1994, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1995, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1996, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1997, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1998, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 1999, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2000, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2001, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2002, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2003, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2004, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2005, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2006, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2007, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2008, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2009, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2010, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2011, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2012, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2013, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2014, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2015, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2016, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2017, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2018, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2019, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2020, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2021, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2022, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2023, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2024, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2025, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2026, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2027, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2028, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2029, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2030, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2031, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2032, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2033, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2034, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2035, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2036, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2037, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2038, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2039, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2040, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2041, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2042, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2043, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2044, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2045, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2046, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2047, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2048, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2049, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2050, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2051, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2052, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2053, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2054, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2055, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2056, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2057, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2058, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2059, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2060, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2061, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2062, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2063, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2064, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2065, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2066, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2067, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2068, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2069, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2070, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2071, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2072, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2073, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2074, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2075, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2076, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2077, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2078, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2079, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2080, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2081, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2082, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2083, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2084, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2085, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2086, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2087, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2088, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2089, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2090, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2091, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2092, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2093, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2094, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2095, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2096, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2097, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2098, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2099, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2100, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2101, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2102, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2103, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2104, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2105, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2106, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2107, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2108, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2109, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2110, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2111, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2112, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2113, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2114, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2115, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2116, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2117, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2118, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2119, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2120, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2121, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2122, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2123, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2124, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2125, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2126, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2127, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2128, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2129, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2130, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2131, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2132, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2133, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2134, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2135, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2136, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2137, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2138, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2139, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2140, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2141, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2142, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2143, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2144, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2145, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2146, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2147, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2148, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2149, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2150, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2151, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2152, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2153, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2154, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2155, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2156, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2157, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2158, Training loss 2.428630, Validation loss 6.569311\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2159, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2160, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2161, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2162, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2163, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2164, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2165, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2166, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2167, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2168, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2169, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2170, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2171, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2172, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2173, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2174, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2175, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2176, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2177, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2178, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2179, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2180, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2181, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2182, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2183, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2184, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2185, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2186, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2187, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2188, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2189, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2190, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2191, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2192, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2193, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2194, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2195, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2196, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2197, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2198, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2199, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2200, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2201, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2202, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2203, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2204, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2205, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2206, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2207, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2208, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2209, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2210, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2211, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2212, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2213, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2214, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2215, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2216, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2217, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2218, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2219, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2220, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2221, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2222, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2223, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2224, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2225, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2226, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2227, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2228, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2229, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2230, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2231, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2232, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2233, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2234, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2235, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2236, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2237, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2238, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2239, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2240, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2241, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2242, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2243, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2244, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2245, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2246, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2247, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2248, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2249, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2250, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2251, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2252, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2253, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2254, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2255, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2256, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2257, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2258, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2259, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2260, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2261, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2262, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2263, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2264, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2265, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2266, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2267, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2268, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2269, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2270, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2271, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2272, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2273, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2274, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2275, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2276, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2277, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2278, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2279, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2280, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2281, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2282, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2283, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2284, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2285, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2286, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2287, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2288, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2289, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2290, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2291, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2292, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2293, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2294, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2295, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2296, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2297, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2298, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2299, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2300, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2301, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2302, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2303, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2304, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2305, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2306, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2307, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2308, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2309, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2310, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2311, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2312, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2313, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2314, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2315, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2316, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2317, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2318, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2319, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2320, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2321, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2322, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2323, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2324, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2325, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2326, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2327, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2328, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2329, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2330, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2331, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2332, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2333, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2334, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2335, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2336, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2337, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2338, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2339, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2340, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2341, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2342, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2343, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2344, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2345, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2346, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2347, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2348, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2349, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2350, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2351, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2352, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2353, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2354, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2355, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2356, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2357, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2358, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2359, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2360, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2361, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2362, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2363, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2364, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2365, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2366, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2367, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2368, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2369, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2370, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2371, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2372, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2373, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2374, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2375, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2376, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2377, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2378, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2379, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2380, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2381, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2382, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2383, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2384, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2385, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2386, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2387, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2388, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2389, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2390, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2391, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2392, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2393, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2394, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2395, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2396, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2397, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2398, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2399, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2400, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2401, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2402, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2403, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2404, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2405, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2406, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2407, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2408, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2409, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2410, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2411, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2412, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2413, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2414, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2415, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2416, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2417, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2418, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2419, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2420, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2421, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2422, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2423, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2424, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2425, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2426, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2427, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2428, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2429, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2430, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2431, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2432, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2433, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2434, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2435, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2436, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2437, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2438, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2439, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2440, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2441, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2442, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2443, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2444, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2445, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2446, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2447, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2448, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2449, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2450, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2451, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2452, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2453, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2454, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2455, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2456, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2457, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2458, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2459, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2460, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2461, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2462, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2463, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2464, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2465, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2466, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2467, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2468, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2469, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2470, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2471, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2472, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2473, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2474, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2475, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2476, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2477, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2478, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2479, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2480, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2481, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2482, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2483, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2484, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2485, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2486, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2487, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2488, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2489, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2490, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2491, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2492, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2493, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2494, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2495, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2496, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2497, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2498, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2499, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2500, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2501, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2502, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2503, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2504, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2505, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2506, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2507, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2508, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2509, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2510, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2511, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2512, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2513, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2514, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2515, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2516, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2517, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2518, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2519, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2520, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2521, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2522, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2523, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2524, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2525, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2526, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2527, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2528, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2529, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2530, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2531, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2532, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2533, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2534, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2535, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2536, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2537, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2538, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2539, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2540, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2541, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2542, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2543, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2544, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2545, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2546, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2547, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2548, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2549, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2550, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2551, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2552, Training loss 2.428630, Validation loss 6.569311\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2553, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2554, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2555, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2556, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2557, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2558, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2559, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2560, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2561, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2562, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2563, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2564, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2565, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2566, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2567, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2568, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2569, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2570, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2571, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2572, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2573, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2574, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2575, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2576, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2577, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2578, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2579, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2580, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2581, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2582, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2583, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2584, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2585, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2586, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2587, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2588, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2589, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2590, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2591, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2592, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2593, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2594, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2595, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2596, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2597, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2598, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2599, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2600, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2601, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2602, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2603, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2604, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2605, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2606, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2607, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2608, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2609, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2610, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2611, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2612, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2613, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2614, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2615, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2616, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2617, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2618, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2619, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2620, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2621, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2622, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2623, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2624, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2625, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2626, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2627, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2628, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2629, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2630, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2631, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2632, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2633, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2634, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2635, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2636, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2637, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2638, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2639, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2640, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2641, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2642, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2643, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2644, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2645, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2646, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2647, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2648, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2649, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2650, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2651, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2652, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2653, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2654, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2655, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2656, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2657, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2658, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2659, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2660, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2661, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2662, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2663, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2664, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2665, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2666, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2667, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2668, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2669, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2670, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2671, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2672, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2673, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2674, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2675, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2676, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2677, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2678, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2679, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2680, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2681, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2682, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2683, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2684, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2685, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2686, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2687, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2688, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2689, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2690, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2691, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2692, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2693, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2694, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2695, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2696, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2697, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2698, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2699, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2700, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2701, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2702, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2703, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2704, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2705, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2706, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2707, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2708, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2709, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2710, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2711, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2712, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2713, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2714, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2715, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2716, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2717, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2718, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2719, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2720, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2721, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2722, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2723, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2724, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2725, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2726, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2727, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2728, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2729, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2730, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2731, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2732, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2733, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2734, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2735, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2736, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2737, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2738, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2739, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2740, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2741, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2742, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2743, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2744, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2745, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2746, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2747, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2748, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2749, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2750, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2751, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2752, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2753, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2754, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2755, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2756, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2757, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2758, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2759, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2760, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2761, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2762, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2763, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2764, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2765, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2766, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2767, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2768, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2769, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2770, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2771, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2772, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2773, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2774, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2775, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2776, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2777, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2778, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2779, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2780, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2781, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2782, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2783, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2784, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2785, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2786, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2787, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2788, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2789, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2790, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2791, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2792, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2793, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2794, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2795, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2796, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2797, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2798, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2799, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2800, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2801, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2802, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2803, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2804, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2805, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2806, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2807, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2808, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2809, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2810, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2811, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2812, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2813, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2814, Training loss 2.428630, Validation loss 6.569311\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2815, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2816, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2817, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2818, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2819, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2820, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2821, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2822, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2823, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2824, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2825, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2826, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2827, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2828, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2829, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2830, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2831, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2832, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2833, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2834, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2835, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2836, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2837, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2838, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2839, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2840, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2841, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2842, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2843, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2844, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2845, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2846, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2847, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2848, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2849, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2850, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2851, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2852, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2853, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2854, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2855, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2856, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2857, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2858, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2859, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2860, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2861, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2862, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2863, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2864, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2865, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2866, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2867, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2868, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2869, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2870, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2871, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2872, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2873, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2874, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2875, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2876, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2877, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2878, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2879, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2880, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2881, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2882, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2883, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2884, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2885, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2886, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2887, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2888, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2889, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2890, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2891, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2892, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2893, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2894, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2895, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2896, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2897, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2898, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2899, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2900, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2901, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2902, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2903, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2904, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2905, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2906, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2907, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2908, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2909, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2910, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2911, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2912, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2913, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2914, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2915, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2916, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2917, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2918, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2919, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2920, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2921, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2922, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2923, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2924, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2925, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2926, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2927, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2928, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2929, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2930, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2931, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2932, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2933, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2934, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2935, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2936, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2937, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2938, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2939, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2940, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2941, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2942, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2943, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2944, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2945, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2946, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2947, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2948, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2949, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2950, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2951, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2952, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2953, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2954, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2955, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2956, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2957, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2958, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2959, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2960, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2961, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2962, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2963, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2964, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2965, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2966, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2967, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2968, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2969, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2970, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2971, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2972, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2973, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2974, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2975, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2976, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2977, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2978, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2979, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2980, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2981, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2982, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2983, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2984, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2985, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2986, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2987, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2988, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2989, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2990, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2991, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2992, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2993, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2994, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2995, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2996, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2997, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2998, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 2999, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3000, Training loss 2.428630, Validation loss 6.569311\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3001, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3002, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3003, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3004, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3005, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3006, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3007, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3008, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3009, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3010, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3011, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3012, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3013, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3014, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3015, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3016, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3017, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3018, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3019, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3020, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3021, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3022, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3023, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3024, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3025, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3026, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3027, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3028, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3029, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3030, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3031, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3032, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3033, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3034, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3035, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3036, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3037, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3038, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3039, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3040, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3041, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3042, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3043, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3044, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3045, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3046, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3047, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3048, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3049, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3050, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3051, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3052, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3053, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3054, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3055, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3056, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3057, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3058, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3059, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3060, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3061, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3062, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3063, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3064, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3065, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3066, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3067, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3068, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3069, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3070, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3071, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3072, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3073, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3074, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3075, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3076, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3077, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3078, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3079, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3080, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3081, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3082, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3083, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3084, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3085, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3086, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3087, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3088, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3089, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3090, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3091, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3092, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3093, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3094, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3095, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3096, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3097, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3098, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3099, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3100, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3101, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3102, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3103, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3104, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3105, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3106, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3107, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3108, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3109, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3110, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3111, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3112, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3113, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3114, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3115, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3116, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3117, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3118, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3119, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3120, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3121, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3122, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3123, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3124, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3125, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3126, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3127, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3128, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3129, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3130, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3131, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3132, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3133, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3134, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3135, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3136, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3137, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3138, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3139, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3140, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3141, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3142, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3143, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3144, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3145, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3146, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3147, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3148, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3149, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3150, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3151, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3152, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3153, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3154, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3155, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3156, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3157, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3158, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3159, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3160, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3161, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3162, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3163, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3164, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3165, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3166, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3167, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3168, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3169, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3170, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3171, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3172, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3173, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3174, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3175, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3176, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3177, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3178, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3179, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3180, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3181, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3182, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3183, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3184, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3185, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3186, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3187, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3188, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3189, Training loss 2.428630, Validation loss 6.569311\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3190, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3191, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3192, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3193, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3194, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3195, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3196, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3197, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3198, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3199, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3200, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3201, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3202, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3203, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3204, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3205, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3206, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3207, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3208, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3209, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3210, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3211, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3212, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3213, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3214, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3215, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3216, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3217, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3218, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3219, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3220, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3221, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3222, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3223, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3224, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3225, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3226, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3227, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3228, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3229, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3230, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3231, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3232, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3233, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3234, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3235, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3236, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3237, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3238, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3239, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3240, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3241, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3242, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3243, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3244, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3245, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3246, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3247, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3248, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3249, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3250, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3251, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3252, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3253, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3254, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3255, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3256, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3257, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3258, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3259, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3260, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3261, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3262, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3263, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3264, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3265, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3266, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3267, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3268, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3269, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3270, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3271, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3272, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3273, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3274, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3275, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3276, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3277, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3278, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3279, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3280, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3281, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3282, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3283, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3284, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3285, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3286, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3287, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3288, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3289, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3290, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3291, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3292, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3293, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3294, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3295, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3296, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3297, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3298, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3299, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3300, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3301, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3302, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3303, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3304, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3305, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3306, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3307, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3308, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3309, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3310, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3311, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3312, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3313, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3314, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3315, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3316, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3317, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3318, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3319, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3320, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3321, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3322, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3323, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3324, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3325, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3326, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3327, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3328, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3329, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3330, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3331, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3332, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3333, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3334, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3335, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3336, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3337, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3338, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3339, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3340, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3341, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3342, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3343, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3344, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3345, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3346, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3347, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3348, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3349, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3350, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3351, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3352, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3353, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3354, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3355, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3356, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3357, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3358, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3359, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3360, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3361, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3362, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3363, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3364, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3365, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3366, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3367, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3368, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3369, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3370, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3371, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3372, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3373, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3374, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3375, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3376, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3377, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3378, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3379, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3380, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3381, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3382, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3383, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3384, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3385, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3386, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3387, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3388, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3389, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3390, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3391, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3392, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3393, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3394, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3395, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3396, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3397, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3398, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3399, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3400, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3401, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3402, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3403, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3404, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3405, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3406, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3407, Training loss 2.428630, Validation loss 6.569311\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3408, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3409, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3410, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3411, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3412, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3413, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3414, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3415, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3416, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3417, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3418, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3419, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3420, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3421, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3422, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3423, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3424, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3425, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3426, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3427, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3428, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3429, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3430, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3431, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3432, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3433, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3434, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3435, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3436, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3437, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3438, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3439, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3440, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3441, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3442, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3443, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3444, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3445, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3446, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3447, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3448, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3449, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3450, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3451, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3452, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3453, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3454, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3455, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3456, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3457, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3458, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3459, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3460, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3461, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3462, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3463, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3464, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3465, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3466, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3467, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3468, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3469, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3470, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3471, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3472, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3473, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3474, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3475, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3476, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3477, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3478, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3479, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3480, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3481, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3482, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3483, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3484, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3485, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3486, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3487, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3488, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3489, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3490, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3491, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3492, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3493, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3494, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3495, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3496, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3497, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3498, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3499, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3500, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3501, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3502, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3503, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3504, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3505, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3506, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3507, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3508, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3509, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3510, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3511, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3512, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3513, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3514, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3515, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3516, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3517, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3518, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3519, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3520, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3521, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3522, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3523, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3524, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3525, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3526, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3527, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3528, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3529, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3530, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3531, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3532, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3533, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3534, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3535, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3536, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3537, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3538, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3539, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3540, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3541, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3542, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3543, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3544, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3545, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3546, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3547, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3548, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3549, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3550, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3551, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3552, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3553, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3554, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3555, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3556, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3557, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3558, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3559, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3560, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3561, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3562, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3563, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3564, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3565, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3566, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3567, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3568, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3569, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3570, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3571, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3572, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3573, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3574, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3575, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3576, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3577, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3578, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3579, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3580, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3581, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3582, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3583, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3584, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3585, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3586, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3587, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3588, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3589, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3590, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3591, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3592, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3593, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3594, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3595, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3596, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3597, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3598, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3599, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3600, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3601, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3602, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3603, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3604, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3605, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3606, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3607, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3608, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3609, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3610, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3611, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3612, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3613, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3614, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3615, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3616, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3617, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3618, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3619, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3620, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3621, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3622, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3623, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3624, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3625, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3626, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3627, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3628, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3629, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3630, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3631, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3632, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3633, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3634, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3635, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3636, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3637, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3638, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3639, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3640, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3641, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3642, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3643, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3644, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3645, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3646, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3647, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3648, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3649, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3650, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3651, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3652, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3653, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3654, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3655, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3656, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3657, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3658, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3659, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3660, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3661, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3662, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3663, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3664, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3665, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3666, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3667, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3668, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3669, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3670, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3671, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3672, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3673, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3674, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3675, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3676, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3677, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3678, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3679, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3680, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3681, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3682, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3683, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3684, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3685, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3686, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3687, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3688, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3689, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3690, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3691, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3692, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3693, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3694, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3695, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3696, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3697, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3698, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3699, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3700, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3701, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3702, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3703, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3704, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3705, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3706, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3707, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3708, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3709, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3710, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3711, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3712, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3713, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3714, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3715, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3716, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3717, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3718, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3719, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3720, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3721, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3722, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3723, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3724, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3725, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3726, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3727, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3728, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3729, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3730, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3731, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3732, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3733, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3734, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3735, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3736, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3737, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3738, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3739, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3740, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3741, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3742, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3743, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3744, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3745, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3746, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3747, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3748, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3749, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3750, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3751, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3752, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3753, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3754, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3755, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3756, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3757, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3758, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3759, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3760, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3761, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3762, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3763, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3764, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3765, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3766, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3767, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3768, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3769, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3770, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3771, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3772, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3773, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3774, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3775, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3776, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3777, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3778, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3779, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3780, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3781, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3782, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3783, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3784, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3785, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3786, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3787, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3788, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3789, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3790, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3791, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3792, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3793, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3794, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3795, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3796, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3797, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3798, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3799, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3800, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3801, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3802, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3803, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3804, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3805, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3806, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3807, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3808, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3809, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3810, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3811, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3812, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3813, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3814, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3815, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3816, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3817, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3818, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3819, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3820, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3821, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3822, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3823, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3824, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3825, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3826, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3827, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3828, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3829, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3830, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3831, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3832, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3833, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3834, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3835, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3836, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3837, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3838, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3839, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3840, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3841, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3842, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3843, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3844, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3845, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3846, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3847, Training loss 2.428630, Validation loss 6.569311\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3848, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3849, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3850, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3851, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3852, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3853, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3854, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3855, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3856, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3857, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3858, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3859, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3860, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3861, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3862, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3863, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3864, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3865, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3866, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3867, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3868, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3869, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3870, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3871, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3872, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3873, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3874, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3875, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3876, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3877, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3878, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3879, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3880, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3881, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3882, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3883, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3884, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3885, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3886, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3887, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3888, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3889, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3890, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3891, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3892, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3893, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3894, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3895, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3896, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3897, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3898, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3899, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3900, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3901, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3902, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3903, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3904, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3905, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3906, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3907, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3908, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3909, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3910, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3911, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3912, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3913, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3914, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3915, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3916, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3917, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3918, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3919, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3920, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3921, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3922, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3923, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3924, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3925, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3926, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3927, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3928, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3929, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3930, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3931, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3932, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3933, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3934, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3935, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3936, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3937, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3938, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3939, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3940, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3941, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3942, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3943, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3944, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3945, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3946, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3947, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3948, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3949, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3950, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3951, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3952, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3953, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3954, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3955, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3956, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3957, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3958, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3959, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3960, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3961, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3962, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3963, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3964, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3965, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3966, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3967, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3968, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3969, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3970, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3971, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3972, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3973, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3974, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3975, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3976, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3977, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3978, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3979, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3980, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3981, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3982, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3983, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3984, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3985, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3986, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3987, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3988, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3989, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3990, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3991, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3992, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3993, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3994, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3995, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3996, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3997, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3998, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 3999, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4000, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4001, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4002, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4003, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4004, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4005, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4006, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4007, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4008, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4009, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4010, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4011, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4012, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4013, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4014, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4015, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4016, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4017, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4018, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4019, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4020, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4021, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4022, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4023, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4024, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4025, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4026, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4027, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4028, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4029, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4030, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4031, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4032, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4033, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4034, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4035, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4036, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4037, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4038, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4039, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4040, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4041, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4042, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4043, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4044, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4045, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4046, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4047, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4048, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4049, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4050, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4051, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4052, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4053, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4054, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4055, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4056, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4057, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4058, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4059, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4060, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4061, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4062, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4063, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4064, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4065, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4066, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4067, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4068, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4069, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4070, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4071, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4072, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4073, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4074, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4075, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4076, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4077, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4078, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4079, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4080, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4081, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4082, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4083, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4084, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4085, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4086, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4087, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4088, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4089, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4090, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4091, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4092, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4093, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4094, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4095, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4096, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4097, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4098, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4099, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4100, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4101, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4102, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4103, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4104, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4105, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4106, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4107, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4108, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4109, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4110, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4111, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4112, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4113, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4114, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4115, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4116, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4117, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4118, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4119, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4120, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4121, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4122, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4123, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4124, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4125, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4126, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4127, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4128, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4129, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4130, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4131, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4132, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4133, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4134, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4135, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4136, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4137, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4138, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4139, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4140, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4141, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4142, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4143, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4144, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4145, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4146, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4147, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4148, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4149, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4150, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4151, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4152, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4153, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4154, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4155, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4156, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4157, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4158, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4159, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4160, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4161, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4162, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4163, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4164, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4165, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4166, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4167, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4168, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4169, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4170, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4171, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4172, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4173, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4174, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4175, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4176, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4177, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4178, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4179, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4180, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4181, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4182, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4183, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4184, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4185, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4186, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4187, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4188, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4189, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4190, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4191, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4192, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4193, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4194, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4195, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4196, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4197, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4198, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4199, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4200, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4201, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4202, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4203, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4204, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4205, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4206, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4207, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4208, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4209, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4210, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4211, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4212, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4213, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4214, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4215, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4216, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4217, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4218, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4219, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4220, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4221, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4222, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4223, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4224, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4225, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4226, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4227, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4228, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4229, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4230, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4231, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4232, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4233, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4234, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4235, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4236, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4237, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4238, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4239, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4240, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4241, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4242, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4243, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4244, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4245, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4246, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4247, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4248, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4249, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4250, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4251, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4252, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4253, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4254, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4255, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4256, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4257, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4258, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4259, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4260, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4261, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4262, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4263, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4264, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4265, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4266, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4267, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4268, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4269, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4270, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4271, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4272, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4273, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4274, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4275, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4276, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4277, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4278, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4279, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4280, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4281, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4282, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4283, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4284, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4285, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4286, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4287, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4288, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4289, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4290, Training loss 2.428630, Validation loss 6.569311\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4291, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4292, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4293, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4294, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4295, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4296, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4297, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4298, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4299, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4300, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4301, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4302, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4303, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4304, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4305, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4306, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4307, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4308, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4309, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4310, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4311, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4312, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4313, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4314, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4315, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4316, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4317, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4318, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4319, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4320, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4321, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4322, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4323, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4324, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4325, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4326, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4327, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4328, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4329, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4330, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4331, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4332, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4333, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4334, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4335, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4336, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4337, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4338, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4339, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4340, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4341, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4342, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4343, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4344, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4345, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4346, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4347, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4348, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4349, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4350, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4351, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4352, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4353, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4354, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4355, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4356, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4357, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4358, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4359, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4360, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4361, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4362, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4363, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4364, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4365, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4366, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4367, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4368, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4369, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4370, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4371, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4372, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4373, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4374, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4375, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4376, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4377, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4378, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4379, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4380, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4381, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4382, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4383, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4384, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4385, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4386, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4387, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4388, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4389, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4390, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4391, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4392, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4393, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4394, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4395, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4396, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4397, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4398, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4399, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4400, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4401, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4402, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4403, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4404, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4405, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4406, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4407, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4408, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4409, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4410, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4411, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4412, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4413, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4414, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4415, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4416, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4417, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4418, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4419, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4420, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4421, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4422, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4423, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4424, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4425, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4426, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4427, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4428, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4429, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4430, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4431, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4432, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4433, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4434, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4435, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4436, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4437, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4438, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4439, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4440, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4441, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4442, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4443, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4444, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4445, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4446, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4447, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4448, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4449, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4450, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4451, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4452, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4453, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4454, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4455, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4456, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4457, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4458, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4459, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4460, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4461, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4462, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4463, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4464, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4465, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4466, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4467, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4468, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4469, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4470, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4471, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4472, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4473, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4474, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4475, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4476, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4477, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4478, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4479, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4480, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4481, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4482, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4483, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4484, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4485, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4486, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4487, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4488, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4489, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4490, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4491, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4492, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4493, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4494, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4495, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4496, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4497, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4498, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4499, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4500, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4501, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4502, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4503, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4504, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4505, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4506, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4507, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4508, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4509, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4510, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4511, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4512, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4513, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4514, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4515, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4516, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4517, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4518, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4519, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4520, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4521, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4522, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4523, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4524, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4525, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4526, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4527, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4528, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4529, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4530, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4531, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4532, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4533, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4534, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4535, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4536, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4537, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4538, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4539, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4540, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4541, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4542, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4543, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4544, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4545, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4546, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4547, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4548, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4549, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4550, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4551, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4552, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4553, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4554, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4555, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4556, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4557, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4558, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4559, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4560, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4561, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4562, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4563, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4564, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4565, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4566, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4567, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4568, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4569, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4570, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4571, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4572, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4573, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4574, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4575, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4576, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4577, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4578, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4579, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4580, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4581, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4582, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4583, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4584, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4585, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4586, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4587, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4588, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4589, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4590, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4591, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4592, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4593, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4594, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4595, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4596, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4597, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4598, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4599, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4600, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4601, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4602, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4603, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4604, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4605, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4606, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4607, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4608, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4609, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4610, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4611, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4612, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4613, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4614, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4615, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4616, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4617, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4618, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4619, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4620, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4621, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4622, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4623, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4624, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4625, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4626, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4627, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4628, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4629, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4630, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4631, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4632, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4633, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4634, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4635, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4636, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4637, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4638, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4639, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4640, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4641, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4642, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4643, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4644, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4645, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4646, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4647, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4648, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4649, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4650, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4651, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4652, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4653, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4654, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4655, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4656, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4657, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4658, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4659, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4660, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4661, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4662, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4663, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4664, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4665, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4666, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4667, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4668, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4669, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4670, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4671, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4672, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4673, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4674, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4675, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4676, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4677, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4678, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4679, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4680, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4681, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4682, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4683, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4684, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4685, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4686, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4687, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4688, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4689, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4690, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4691, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4692, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4693, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4694, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4695, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4696, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4697, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4698, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4699, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4700, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4701, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4702, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4703, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4704, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4705, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4706, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4707, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4708, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4709, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4710, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4711, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4712, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4713, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4714, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4715, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4716, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4717, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4718, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4719, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4720, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4721, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4722, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4723, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4724, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4725, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4726, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4727, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4728, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4729, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4730, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4731, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4732, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4733, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4734, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4735, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4736, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4737, Training loss 2.428630, Validation loss 6.569311\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4738, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4739, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4740, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4741, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4742, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4743, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4744, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4745, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4746, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4747, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4748, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4749, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4750, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4751, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4752, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4753, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4754, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4755, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4756, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4757, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4758, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4759, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4760, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4761, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4762, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4763, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4764, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4765, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4766, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4767, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4768, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4769, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4770, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4771, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4772, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4773, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4774, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4775, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4776, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4777, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4778, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4779, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4780, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4781, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4782, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4783, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4784, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4785, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4786, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4787, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4788, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4789, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4790, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4791, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4792, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4793, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4794, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4795, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4796, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4797, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4798, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4799, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4800, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4801, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4802, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4803, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4804, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4805, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4806, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4807, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4808, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4809, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4810, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4811, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4812, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4813, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4814, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4815, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4816, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4817, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4818, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4819, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4820, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4821, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4822, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4823, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4824, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4825, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4826, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4827, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4828, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4829, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4830, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4831, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4832, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4833, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4834, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4835, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4836, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4837, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4838, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4839, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4840, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4841, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4842, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4843, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4844, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4845, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4846, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4847, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4848, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4849, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4850, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4851, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4852, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4853, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4854, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4855, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4856, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4857, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4858, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4859, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4860, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4861, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4862, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4863, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4864, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4865, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4866, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4867, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4868, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4869, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4870, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4871, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4872, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4873, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4874, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4875, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4876, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4877, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4878, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4879, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4880, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4881, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4882, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4883, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4884, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4885, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4886, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4887, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4888, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4889, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4890, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4891, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4892, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4893, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4894, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4895, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4896, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4897, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4898, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4899, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4900, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4901, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4902, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4903, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4904, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4905, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4906, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4907, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4908, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4909, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4910, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4911, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4912, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4913, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4914, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4915, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4916, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4917, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4918, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4919, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4920, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4921, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4922, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4923, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4924, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4925, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4926, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4927, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4928, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4929, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4930, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4931, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4932, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4933, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4934, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4935, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4936, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4937, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4938, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4939, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4940, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4941, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4942, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4943, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4944, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4945, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4946, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4947, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4948, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4949, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4950, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4951, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4952, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4953, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4954, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4955, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4956, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4957, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4958, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4959, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4960, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4961, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4962, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4963, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4964, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4965, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4966, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4967, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4968, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4969, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4970, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4971, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4972, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4973, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4974, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4975, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4976, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4977, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4978, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4979, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4980, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4981, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4982, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4983, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4984, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4985, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4986, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4987, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4988, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4989, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4990, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4991, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4992, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4993, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4994, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4995, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4996, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4997, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4998, Training loss 2.428630, Validation loss 6.569311\n", "Epoch 4999, Training loss 2.428630, Validation loss 6.569311\n" ] } ], "source": [ "for epoch in range(nepochs):\n", " \n", " # forward pass\n", " t_p_train = model(t_un_train, *params)\n", " loss_train = loss_fn(t_p_train, t_c_train)\n", "\n", " with torch.no_grad():\n", " t_p_val = model(t_un_val, *params)\n", " loss_val = loss_fn(t_p_val, t_c_val)\n", "\n", " print('Epoch %d, Training loss %f, Validation loss %f' % (epoch, float(loss_train), float(loss_val)))\n", " \n", " # backward pass\n", " optimizer.zero_grad()\n", " loss_train.backward() \n", " optimizer.step()" ] }, { "cell_type": "code", "execution_count": 108, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n", "False\n" ] } ], "source": [ "for epoch in range(nepochs):\n", " # ...\n", " print(loss_val.requires_grad) # prints False\n", " # ..." ] }, { "cell_type": "code", "execution_count": 109, "metadata": {}, "outputs": [], "source": [ "def forward(t_u, t_c, is_train):\n", " with torch.set_grad_enabled(is_train):\n", " t_p = model(t_u, *params)\n", " loss = loss_fn(t_p, t_c)\n", " return loss" ] }, { "cell_type": "code", "execution_count": 110, "metadata": {}, "outputs": [], "source": [ "import torch\n", "import torch.nn as nn\n", "\n", "model = nn.Linear(1, 1) # We'll look into the arguments in a minute" ] }, { "cell_type": "code", "execution_count": 111, "metadata": {}, "outputs": [ { "ename": "NameError", "evalue": "name 'x' is not defined", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# DON'T DO THIS\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# DO THIS\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;31mNameError\u001b[0m: name 'x' is not defined" ] } ], "source": [ "y = model.forward(x) # DON'T DO THIS\n", "y = model(x) # DO THIS" ] }, { "cell_type": "code", "execution_count": 113, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "Parameter containing:\n", "tensor([[0.8208]], requires_grad=True)" ] }, "execution_count": 113, "metadata": {}, "output_type": "execute_result" } ], "source": [ "model.weight" ] }, { "cell_type": "code", "execution_count": 114, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "tensor([0.7944], grad_fn=)" ] }, "execution_count": 114, "metadata": {}, "output_type": "execute_result" } ], "source": [ "x = torch.ones(1)\n", "\n", "model(x)" ] }, { "cell_type": "code", "execution_count": 115, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "tensor([[0.7944],\n", " [0.7944],\n", " [0.7944],\n", " [0.7944],\n", " [0.7944],\n", " [0.7944],\n", " [0.7944],\n", " [0.7944],\n", " [0.7944],\n", " [0.7944]], grad_fn=)" ] }, "execution_count": 115, "metadata": {}, "output_type": "execute_result" } ], "source": [ "x = torch.ones(10, 1)\n", "\n", "model(x)" ] }, { "cell_type": "code", "execution_count": 116, "metadata": {}, "outputs": [], "source": [ "t_u = torch.unsqueeze(t_u, 1)\n", "t_c = torch.unsqueeze(t_c, 1)" ] }, { "cell_type": "code", "execution_count": 117, "metadata": {}, "outputs": [], "source": [ "model = nn.Linear(1, 1)" ] }, { "cell_type": "code", "execution_count": 118, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "" ] }, "execution_count": 118, "metadata": {}, "output_type": "execute_result" } ], "source": [ "model.parameters()" ] }, { "cell_type": "code", "execution_count": 121, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[Parameter containing:\n", " tensor([[-0.6506]], requires_grad=True), Parameter containing:\n", " tensor([0.7924], requires_grad=True)]" ] }, "execution_count": 121, "metadata": {}, "output_type": "execute_result" } ], "source": [ "list(model.parameters())" ] }, { "cell_type": "code", "execution_count": 122, "metadata": {}, "outputs": [], "source": [ "model = nn.Linear(1, 1)\n", "\n", "learning_rate = 1e-2\n", "\n", "optimizer = optim.SGD(model.parameters(), lr=learning_rate)" ] }, { "cell_type": "code", "execution_count": 123, "metadata": {}, "outputs": [], "source": [ "t_u_train = t_u[train_indices]\n", "t_c_train = t_c[train_indices]\n", "\n", "t_u_val = t_u[val_indices]\n", "t_c_val = t_c[val_indices]\n", "\n", "t_un_train = 0.1 * t_u_train\n", "t_un_val = 0.1 * t_u_val" ] }, { "cell_type": "code", "execution_count": 124, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 0, Training loss 377.888733, Validation loss 340.187164\n", "Epoch 1, Training loss 93.240906, Validation loss 43.281433\n", "Epoch 2, Training loss 44.177326, Validation loss 2.966456\n", "Epoch 3, Training loss 35.631279, Validation loss 0.495764\n", "Epoch 4, Training loss 34.054108, Validation loss 1.925004\n", "Epoch 5, Training loss 33.675919, Validation loss 2.942267\n", "Epoch 6, Training loss 33.504379, Validation loss 3.439312\n", "Epoch 7, Training loss 33.368816, Validation loss 3.660394\n", "Epoch 8, Training loss 33.239883, Validation loss 3.756605\n", "Epoch 9, Training loss 33.112530, Validation loss 3.799221\n", "Epoch 10, Training loss 32.985870, Validation loss 3.819282\n", "Epoch 11, Training loss 32.859776, Validation loss 3.829930\n", "Epoch 12, Training loss 32.734207, Validation loss 3.836667\n", "Epoch 13, Training loss 32.609146, Validation loss 3.841775\n", "Epoch 14, Training loss 32.484612, Validation loss 3.846200\n", "Epoch 15, Training loss 32.360588, Validation loss 3.850352\n", "Epoch 16, Training loss 32.237076, Validation loss 3.854382\n", "Epoch 17, Training loss 32.114067, Validation loss 3.858366\n", "Epoch 18, Training loss 31.991577, Validation loss 3.862327\n", "Epoch 19, Training loss 31.869585, Validation loss 3.866277\n", "Epoch 20, Training loss 31.748100, Validation loss 3.870224\n", "Epoch 21, Training loss 31.627115, Validation loss 3.874163\n", "Epoch 22, Training loss 31.506630, Validation loss 3.878105\n", "Epoch 23, Training loss 31.386641, Validation loss 3.882041\n", "Epoch 24, Training loss 31.267151, Validation loss 3.885976\n", "Epoch 25, Training loss 31.148149, Validation loss 3.889915\n", "Epoch 26, Training loss 31.029636, Validation loss 3.893846\n", "Epoch 27, Training loss 30.911612, Validation loss 3.897779\n", "Epoch 28, Training loss 30.794085, Validation loss 3.901713\n", "Epoch 29, Training loss 30.677040, Validation loss 3.905640\n", "Epoch 30, Training loss 30.560465, Validation loss 3.909571\n", "Epoch 31, Training loss 30.444387, Validation loss 3.913491\n", "Epoch 32, Training loss 30.328775, Validation loss 3.917418\n", "Epoch 33, Training loss 30.213654, Validation loss 3.921340\n", "Epoch 34, Training loss 30.098999, Validation loss 3.925263\n", "Epoch 35, Training loss 29.984819, Validation loss 3.929183\n", "Epoch 36, Training loss 29.871111, Validation loss 3.933101\n", "Epoch 37, Training loss 29.757866, Validation loss 3.937020\n", "Epoch 38, Training loss 29.645102, Validation loss 3.940933\n", "Epoch 39, Training loss 29.532793, Validation loss 3.944841\n", "Epoch 40, Training loss 29.420946, Validation loss 3.948755\n", "Epoch 41, Training loss 29.309563, Validation loss 3.952663\n", "Epoch 42, Training loss 29.198643, Validation loss 3.956569\n", "Epoch 43, Training loss 29.088175, Validation loss 3.960474\n", "Epoch 44, Training loss 28.978170, Validation loss 3.964376\n", "Epoch 45, Training loss 28.868612, Validation loss 3.968281\n", "Epoch 46, Training loss 28.759508, Validation loss 3.972181\n", "Epoch 47, Training loss 28.650858, Validation loss 3.976079\n", "Epoch 48, Training loss 28.542654, Validation loss 3.979972\n", "Epoch 49, Training loss 28.434891, Validation loss 3.983866\n", "Epoch 50, Training loss 28.327579, Validation loss 3.987761\n", "Epoch 51, Training loss 28.220713, Validation loss 3.991641\n", "Epoch 52, Training loss 28.114285, Validation loss 3.995536\n", "Epoch 53, Training loss 28.008293, Validation loss 3.999422\n", "Epoch 54, Training loss 27.902737, Validation loss 4.003298\n", "Epoch 55, Training loss 27.797615, Validation loss 4.007183\n", "Epoch 56, Training loss 27.692936, Validation loss 4.011058\n", "Epoch 57, Training loss 27.588684, Validation loss 4.014933\n", "Epoch 58, Training loss 27.484865, Validation loss 4.018810\n", "Epoch 59, Training loss 27.381472, Validation loss 4.022677\n", "Epoch 60, Training loss 27.278507, Validation loss 4.026552\n", "Epoch 61, Training loss 27.175961, Validation loss 4.030418\n", "Epoch 62, Training loss 27.073841, Validation loss 4.034285\n", "Epoch 63, Training loss 26.972143, Validation loss 4.038143\n", "Epoch 64, Training loss 26.870869, Validation loss 4.042006\n", "Epoch 65, Training loss 26.770012, Validation loss 4.045869\n", "Epoch 66, Training loss 26.669563, Validation loss 4.049720\n", "Epoch 67, Training loss 26.569536, Validation loss 4.053576\n", "Epoch 68, Training loss 26.469923, Validation loss 4.057426\n", "Epoch 69, Training loss 26.370716, Validation loss 4.061273\n", "Epoch 70, Training loss 26.271925, Validation loss 4.065120\n", "Epoch 71, Training loss 26.173531, Validation loss 4.068964\n", "Epoch 72, Training loss 26.075554, Validation loss 4.072807\n", "Epoch 73, Training loss 25.977976, Validation loss 4.076643\n", "Epoch 74, Training loss 25.880798, Validation loss 4.080482\n", "Epoch 75, Training loss 25.784031, Validation loss 4.084318\n", "Epoch 76, Training loss 25.687651, Validation loss 4.088150\n", "Epoch 77, Training loss 25.591675, Validation loss 4.091977\n", "Epoch 78, Training loss 25.496098, Validation loss 4.095804\n", "Epoch 79, Training loss 25.400904, Validation loss 4.099631\n", "Epoch 80, Training loss 25.306110, Validation loss 4.103450\n", "Epoch 81, Training loss 25.211708, Validation loss 4.107265\n", "Epoch 82, Training loss 25.117695, Validation loss 4.111086\n", "Epoch 83, Training loss 25.024075, Validation loss 4.114896\n", "Epoch 84, Training loss 24.930830, Validation loss 4.118706\n", "Epoch 85, Training loss 24.837976, Validation loss 4.122515\n", "Epoch 86, Training loss 24.745506, Validation loss 4.126323\n", "Epoch 87, Training loss 24.653419, Validation loss 4.130118\n", "Epoch 88, Training loss 24.561707, Validation loss 4.133926\n", "Epoch 89, Training loss 24.470377, Validation loss 4.137722\n", "Epoch 90, Training loss 24.379427, Validation loss 4.141511\n", "Epoch 91, Training loss 24.288853, Validation loss 4.145308\n", "Epoch 92, Training loss 24.198645, Validation loss 4.149097\n", "Epoch 93, Training loss 24.108812, Validation loss 4.152882\n", "Epoch 94, Training loss 24.019346, Validation loss 4.156662\n", "Epoch 95, Training loss 23.930258, Validation loss 4.160441\n", "Epoch 96, Training loss 23.841530, Validation loss 4.164222\n", "Epoch 97, Training loss 23.753174, Validation loss 4.167995\n", "Epoch 98, Training loss 23.665176, Validation loss 4.171763\n", "Epoch 99, Training loss 23.577549, Validation loss 4.175535\n", "Epoch 100, Training loss 23.490278, Validation loss 4.179304\n", "Epoch 101, Training loss 23.403364, Validation loss 4.183070\n", "Epoch 102, Training loss 23.316814, Validation loss 4.186822\n", "Epoch 103, Training loss 23.230623, Validation loss 4.190585\n", "Epoch 104, Training loss 23.144787, Validation loss 4.194331\n", "Epoch 105, Training loss 23.059299, Validation loss 4.198083\n", "Epoch 106, Training loss 22.974167, Validation loss 4.201835\n", "Epoch 107, Training loss 22.889391, Validation loss 4.205578\n", "Epoch 108, Training loss 22.804958, Validation loss 4.209323\n", "Epoch 109, Training loss 22.720871, Validation loss 4.213062\n", "Epoch 110, Training loss 22.637138, Validation loss 4.216797\n", "Epoch 111, Training loss 22.553753, Validation loss 4.220531\n", "Epoch 112, Training loss 22.470709, Validation loss 4.224258\n", "Epoch 113, Training loss 22.388004, Validation loss 4.227993\n", "Epoch 114, Training loss 22.305645, Validation loss 4.231714\n", "Epoch 115, Training loss 22.223619, Validation loss 4.235434\n", "Epoch 116, Training loss 22.141939, Validation loss 4.239151\n", "Epoch 117, Training loss 22.060591, Validation loss 4.242865\n", "Epoch 118, Training loss 21.979580, Validation loss 4.246579\n", "Epoch 119, Training loss 21.898905, Validation loss 4.250288\n", "Epoch 120, Training loss 21.818565, Validation loss 4.253993\n", "Epoch 121, Training loss 21.738552, Validation loss 4.257694\n", "Epoch 122, Training loss 21.658873, Validation loss 4.261395\n", "Epoch 123, Training loss 21.579517, Validation loss 4.265085\n", "Epoch 124, Training loss 21.500496, Validation loss 4.268775\n", "Epoch 125, Training loss 21.421797, Validation loss 4.272470\n", "Epoch 126, Training loss 21.343422, Validation loss 4.276151\n", "Epoch 127, Training loss 21.265371, Validation loss 4.279839\n", "Epoch 128, Training loss 21.187645, Validation loss 4.283519\n", "Epoch 129, Training loss 21.110237, Validation loss 4.287191\n", "Epoch 130, Training loss 21.033142, Validation loss 4.290872\n", "Epoch 131, Training loss 20.956375, Validation loss 4.294537\n", "Epoch 132, Training loss 20.879925, Validation loss 4.298195\n", "Epoch 133, Training loss 20.803785, Validation loss 4.301858\n", "Epoch 134, Training loss 20.727957, Validation loss 4.305527\n", "Epoch 135, Training loss 20.652443, Validation loss 4.309179\n", "Epoch 136, Training loss 20.577250, Validation loss 4.312829\n", "Epoch 137, Training loss 20.502363, Validation loss 4.316480\n", "Epoch 138, Training loss 20.427778, Validation loss 4.320132\n", "Epoch 139, Training loss 20.353506, Validation loss 4.323771\n", "Epoch 140, Training loss 20.279539, Validation loss 4.327412\n", "Epoch 141, Training loss 20.205879, Validation loss 4.331046\n", "Epoch 142, Training loss 20.132523, Validation loss 4.334679\n", "Epoch 143, Training loss 20.059467, Validation loss 4.338305\n", "Epoch 144, Training loss 19.986717, Validation loss 4.341932\n", "Epoch 145, Training loss 19.914267, Validation loss 4.345553\n", "Epoch 146, Training loss 19.842112, Validation loss 4.349174\n", "Epoch 147, Training loss 19.770256, Validation loss 4.352791\n", "Epoch 148, Training loss 19.698696, Validation loss 4.356404\n", "Epoch 149, Training loss 19.627430, Validation loss 4.360015\n", "Epoch 150, Training loss 19.556465, Validation loss 4.363620\n", "Epoch 151, Training loss 19.485786, Validation loss 4.367219\n", "Epoch 152, Training loss 19.415401, Validation loss 4.370814\n", "Epoch 153, Training loss 19.345304, Validation loss 4.374411\n", "Epoch 154, Training loss 19.275503, Validation loss 4.377997\n", "Epoch 155, Training loss 19.205982, Validation loss 4.381592\n", "Epoch 156, Training loss 19.136749, Validation loss 4.385174\n", "Epoch 157, Training loss 19.067804, Validation loss 4.388752\n", "Epoch 158, Training loss 18.999144, Validation loss 4.392331\n", "Epoch 159, Training loss 18.930769, Validation loss 4.395905\n", "Epoch 160, Training loss 18.862675, Validation loss 4.399470\n", "Epoch 161, Training loss 18.794859, Validation loss 4.403041\n", "Epoch 162, Training loss 18.727325, Validation loss 4.406597\n", "Epoch 163, Training loss 18.660072, Validation loss 4.410159\n", "Epoch 164, Training loss 18.593090, Validation loss 4.413717\n", "Epoch 165, Training loss 18.526390, Validation loss 4.417264\n", "Epoch 166, Training loss 18.459963, Validation loss 4.420817\n", "Epoch 167, Training loss 18.393810, Validation loss 4.424358\n", "Epoch 168, Training loss 18.327930, Validation loss 4.427900\n", "Epoch 169, Training loss 18.262323, Validation loss 4.431436\n", "Epoch 170, Training loss 18.196985, Validation loss 4.434969\n", "Epoch 171, Training loss 18.131920, Validation loss 4.438495\n", "Epoch 172, Training loss 18.067118, Validation loss 4.442024\n", "Epoch 173, Training loss 18.002586, Validation loss 4.445548\n", "Epoch 174, Training loss 17.938324, Validation loss 4.449068\n", "Epoch 175, Training loss 17.874323, Validation loss 4.452584\n", "Epoch 176, Training loss 17.810587, Validation loss 4.456095\n", "Epoch 177, Training loss 17.747118, Validation loss 4.459600\n", "Epoch 178, Training loss 17.683905, Validation loss 4.463108\n", "Epoch 179, Training loss 17.620958, Validation loss 4.466607\n", "Epoch 180, Training loss 17.558264, Validation loss 4.470100\n", "Epoch 181, Training loss 17.495829, Validation loss 4.473598\n", "Epoch 182, Training loss 17.433661, Validation loss 4.477083\n", "Epoch 183, Training loss 17.371740, Validation loss 4.480570\n", "Epoch 184, Training loss 17.310080, Validation loss 4.484053\n", "Epoch 185, Training loss 17.248674, Validation loss 4.487536\n", "Epoch 186, Training loss 17.187515, Validation loss 4.491009\n", "Epoch 187, Training loss 17.126614, Validation loss 4.494473\n", "Epoch 188, Training loss 17.065966, Validation loss 4.497943\n", "Epoch 189, Training loss 17.005564, Validation loss 4.501409\n", "Epoch 190, Training loss 16.945414, Validation loss 4.504863\n", "Epoch 191, Training loss 16.885511, Validation loss 4.508317\n", "Epoch 192, Training loss 16.825857, Validation loss 4.511776\n", "Epoch 193, Training loss 16.766445, Validation loss 4.515216\n", "Epoch 194, Training loss 16.707283, Validation loss 4.518663\n", "Epoch 195, Training loss 16.648361, Validation loss 4.522104\n", "Epoch 196, Training loss 16.589684, Validation loss 4.525541\n", "Epoch 197, Training loss 16.531254, Validation loss 4.528974\n", "Epoch 198, Training loss 16.473057, Validation loss 4.532403\n", "Epoch 199, Training loss 16.415102, Validation loss 4.535828\n", "Epoch 200, Training loss 16.357389, Validation loss 4.539241\n", "Epoch 201, Training loss 16.299913, Validation loss 4.542663\n", "Epoch 202, Training loss 16.242676, Validation loss 4.546077\n", "Epoch 203, Training loss 16.185669, Validation loss 4.549491\n", "Epoch 204, Training loss 16.128904, Validation loss 4.552889\n", "Epoch 205, Training loss 16.072371, Validation loss 4.556292\n", "Epoch 206, Training loss 16.016069, Validation loss 4.559686\n", "Epoch 207, Training loss 15.960003, Validation loss 4.563085\n", "Epoch 208, Training loss 15.904170, Validation loss 4.566475\n", "Epoch 209, Training loss 15.848560, Validation loss 4.569862\n", "Epoch 210, Training loss 15.793184, Validation loss 4.573240\n", "Epoch 211, Training loss 15.738035, Validation loss 4.576620\n", "Epoch 212, Training loss 15.683114, Validation loss 4.579998\n", "Epoch 213, Training loss 15.628423, Validation loss 4.583364\n", "Epoch 214, Training loss 15.573953, Validation loss 4.586736\n", "Epoch 215, Training loss 15.519708, Validation loss 4.590094\n", "Epoch 216, Training loss 15.465690, Validation loss 4.593452\n", "Epoch 217, Training loss 15.411894, Validation loss 4.596807\n", "Epoch 218, Training loss 15.358317, Validation loss 4.600153\n", "Epoch 219, Training loss 15.304969, Validation loss 4.603502\n", "Epoch 220, Training loss 15.251835, Validation loss 4.606848\n", "Epoch 221, Training loss 15.198918, Validation loss 4.610189\n", "Epoch 222, Training loss 15.146218, Validation loss 4.613523\n", "Epoch 223, Training loss 15.093741, Validation loss 4.616856\n", "Epoch 224, Training loss 15.041482, Validation loss 4.620181\n", "Epoch 225, Training loss 14.989434, Validation loss 4.623506\n", "Epoch 226, Training loss 14.937605, Validation loss 4.626822\n", "Epoch 227, Training loss 14.885984, Validation loss 4.630137\n", "Epoch 228, Training loss 14.834584, Validation loss 4.633453\n", "Epoch 229, Training loss 14.783388, Validation loss 4.636760\n", "Epoch 230, Training loss 14.732409, Validation loss 4.640059\n", "Epoch 231, Training loss 14.681636, Validation loss 4.643361\n", "Epoch 232, Training loss 14.631077, Validation loss 4.646655\n", "Epoch 233, Training loss 14.580723, Validation loss 4.649944\n", "Epoch 234, Training loss 14.530575, Validation loss 4.653233\n", "Epoch 235, Training loss 14.480640, Validation loss 4.656522\n", "Epoch 236, Training loss 14.430906, Validation loss 4.659798\n", "Epoch 237, Training loss 14.381378, Validation loss 4.663070\n", "Epoch 238, Training loss 14.332062, Validation loss 4.666340\n", "Epoch 239, Training loss 14.282940, Validation loss 4.669611\n", "Epoch 240, Training loss 14.234026, Validation loss 4.672873\n", "Epoch 241, Training loss 14.185309, Validation loss 4.676129\n", "Epoch 242, Training loss 14.136793, Validation loss 4.679395\n", "Epoch 243, Training loss 14.088481, Validation loss 4.682643\n", "Epoch 244, Training loss 14.040371, Validation loss 4.685894\n", "Epoch 245, Training loss 13.992454, Validation loss 4.689137\n", "Epoch 246, Training loss 13.944736, Validation loss 4.692370\n", "Epoch 247, Training loss 13.897215, Validation loss 4.695608\n", "Epoch 248, Training loss 13.849891, Validation loss 4.698841\n", "Epoch 249, Training loss 13.802764, Validation loss 4.702069\n", "Epoch 250, Training loss 13.755830, Validation loss 4.705284\n", "Epoch 251, Training loss 13.709087, Validation loss 4.708507\n", "Epoch 252, Training loss 13.662540, Validation loss 4.711720\n", "Epoch 253, Training loss 13.616182, Validation loss 4.714928\n", "Epoch 254, Training loss 13.570018, Validation loss 4.718136\n", "Epoch 255, Training loss 13.524043, Validation loss 4.721344\n", "Epoch 256, Training loss 13.478262, Validation loss 4.724542\n", "Epoch 257, Training loss 13.432661, Validation loss 4.727731\n", "Epoch 258, Training loss 13.387257, Validation loss 4.730923\n", "Epoch 259, Training loss 13.342034, Validation loss 4.734110\n", "Epoch 260, Training loss 13.296999, Validation loss 4.737289\n", "Epoch 261, Training loss 13.252155, Validation loss 4.740471\n", "Epoch 262, Training loss 13.207493, Validation loss 4.743648\n", "Epoch 263, Training loss 13.163013, Validation loss 4.746819\n", "Epoch 264, Training loss 13.118715, Validation loss 4.749987\n", "Epoch 265, Training loss 13.074604, Validation loss 4.753143\n", "Epoch 266, Training loss 13.030676, Validation loss 4.756305\n", "Epoch 267, Training loss 12.986928, Validation loss 4.759456\n", "Epoch 268, Training loss 12.943359, Validation loss 4.762611\n", "Epoch 269, Training loss 12.899972, Validation loss 4.765752\n", "Epoch 270, Training loss 12.856762, Validation loss 4.768897\n", "Epoch 271, Training loss 12.813731, Validation loss 4.772032\n", "Epoch 272, Training loss 12.770876, Validation loss 4.775167\n", "Epoch 273, Training loss 12.728199, Validation loss 4.778296\n", "Epoch 274, Training loss 12.685698, Validation loss 4.781423\n", "Epoch 275, Training loss 12.643375, Validation loss 4.784546\n", "Epoch 276, Training loss 12.601223, Validation loss 4.787663\n", "Epoch 277, Training loss 12.559246, Validation loss 4.790776\n", "Epoch 278, Training loss 12.517445, Validation loss 4.793883\n", "Epoch 279, Training loss 12.475814, Validation loss 4.796989\n", "Epoch 280, Training loss 12.434353, Validation loss 4.800094\n", "Epoch 281, Training loss 12.393068, Validation loss 4.803185\n", "Epoch 282, Training loss 12.351951, Validation loss 4.806275\n", "Epoch 283, Training loss 12.310999, Validation loss 4.809368\n", "Epoch 284, Training loss 12.270223, Validation loss 4.812448\n", "Epoch 285, Training loss 12.229609, Validation loss 4.815535\n", "Epoch 286, Training loss 12.189167, Validation loss 4.818603\n", "Epoch 287, Training loss 12.148890, Validation loss 4.821682\n", "Epoch 288, Training loss 12.108781, Validation loss 4.824749\n", "Epoch 289, Training loss 12.068835, Validation loss 4.827822\n", "Epoch 290, Training loss 12.029058, Validation loss 4.830877\n", "Epoch 291, Training loss 11.989440, Validation loss 4.833931\n", "Epoch 292, Training loss 11.949987, Validation loss 4.836975\n", "Epoch 293, Training loss 11.910695, Validation loss 4.840030\n", "Epoch 294, Training loss 11.871572, Validation loss 4.843068\n", "Epoch 295, Training loss 11.832604, Validation loss 4.846112\n", "Epoch 296, Training loss 11.793798, Validation loss 4.849151\n", "Epoch 297, Training loss 11.755157, Validation loss 4.852180\n", "Epoch 298, Training loss 11.716671, Validation loss 4.855208\n", "Epoch 299, Training loss 11.678346, Validation loss 4.858234\n", "Epoch 300, Training loss 11.640179, Validation loss 4.861247\n", "Epoch 301, Training loss 11.602169, Validation loss 4.864257\n", "Epoch 302, Training loss 11.564314, Validation loss 4.867275\n", "Epoch 303, Training loss 11.526615, Validation loss 4.870279\n", "Epoch 304, Training loss 11.489075, Validation loss 4.873277\n", "Epoch 305, Training loss 11.451687, Validation loss 4.876278\n", "Epoch 306, Training loss 11.414451, Validation loss 4.879273\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 307, Training loss 11.377375, Validation loss 4.882258\n", "Epoch 308, Training loss 11.340446, Validation loss 4.885246\n", "Epoch 309, Training loss 11.303672, Validation loss 4.888228\n", "Epoch 310, Training loss 11.267049, Validation loss 4.891205\n", "Epoch 311, Training loss 11.230578, Validation loss 4.894179\n", "Epoch 312, Training loss 11.194256, Validation loss 4.897153\n", "Epoch 313, Training loss 11.158085, Validation loss 4.900112\n", "Epoch 314, Training loss 11.122065, Validation loss 4.903078\n", "Epoch 315, Training loss 11.086187, Validation loss 4.906034\n", "Epoch 316, Training loss 11.050466, Validation loss 4.908988\n", "Epoch 317, Training loss 11.014886, Validation loss 4.911941\n", "Epoch 318, Training loss 10.979456, Validation loss 4.914883\n", "Epoch 319, Training loss 10.944174, Validation loss 4.917824\n", "Epoch 320, Training loss 10.909034, Validation loss 4.920767\n", "Epoch 321, Training loss 10.874042, Validation loss 4.923692\n", "Epoch 322, Training loss 10.839191, Validation loss 4.926623\n", "Epoch 323, Training loss 10.804487, Validation loss 4.929545\n", "Epoch 324, Training loss 10.769923, Validation loss 4.932465\n", "Epoch 325, Training loss 10.735504, Validation loss 4.935382\n", "Epoch 326, Training loss 10.701224, Validation loss 4.938291\n", "Epoch 327, Training loss 10.667088, Validation loss 4.941205\n", "Epoch 328, Training loss 10.633094, Validation loss 4.944104\n", "Epoch 329, Training loss 10.599239, Validation loss 4.947011\n", "Epoch 330, Training loss 10.565526, Validation loss 4.949907\n", "Epoch 331, Training loss 10.531947, Validation loss 4.952793\n", "Epoch 332, Training loss 10.498510, Validation loss 4.955682\n", "Epoch 333, Training loss 10.465212, Validation loss 4.958560\n", "Epoch 334, Training loss 10.432049, Validation loss 4.961440\n", "Epoch 335, Training loss 10.399022, Validation loss 4.964319\n", "Epoch 336, Training loss 10.366132, Validation loss 4.967192\n", "Epoch 337, Training loss 10.333381, Validation loss 4.970054\n", "Epoch 338, Training loss 10.300760, Validation loss 4.972922\n", "Epoch 339, Training loss 10.268276, Validation loss 4.975777\n", "Epoch 340, Training loss 10.235928, Validation loss 4.978637\n", "Epoch 341, Training loss 10.203709, Validation loss 4.981483\n", "Epoch 342, Training loss 10.171625, Validation loss 4.984336\n", "Epoch 343, Training loss 10.139676, Validation loss 4.987178\n", "Epoch 344, Training loss 10.107858, Validation loss 4.990010\n", "Epoch 345, Training loss 10.076169, Validation loss 4.992847\n", "Epoch 346, Training loss 10.044612, Validation loss 4.995676\n", "Epoch 347, Training loss 10.013185, Validation loss 4.998506\n", "Epoch 348, Training loss 9.981892, Validation loss 5.001325\n", "Epoch 349, Training loss 9.950722, Validation loss 5.004142\n", "Epoch 350, Training loss 9.919682, Validation loss 5.006953\n", "Epoch 351, Training loss 9.888770, Validation loss 5.009767\n", "Epoch 352, Training loss 9.857985, Validation loss 5.012574\n", "Epoch 353, Training loss 9.827330, Validation loss 5.015370\n", "Epoch 354, Training loss 9.796800, Validation loss 5.018168\n", "Epoch 355, Training loss 9.766396, Validation loss 5.020966\n", "Epoch 356, Training loss 9.736116, Validation loss 5.023747\n", "Epoch 357, Training loss 9.705960, Validation loss 5.026544\n", "Epoch 358, Training loss 9.675933, Validation loss 5.029321\n", "Epoch 359, Training loss 9.646028, Validation loss 5.032096\n", "Epoch 360, Training loss 9.616245, Validation loss 5.034869\n", "Epoch 361, Training loss 9.586585, Validation loss 5.037640\n", "Epoch 362, Training loss 9.557048, Validation loss 5.040396\n", "Epoch 363, Training loss 9.527632, Validation loss 5.043166\n", "Epoch 364, Training loss 9.498340, Validation loss 5.045914\n", "Epoch 365, Training loss 9.469171, Validation loss 5.048676\n", "Epoch 366, Training loss 9.440116, Validation loss 5.051428\n", "Epoch 367, Training loss 9.411180, Validation loss 5.054164\n", "Epoch 368, Training loss 9.382369, Validation loss 5.056907\n", "Epoch 369, Training loss 9.353676, Validation loss 5.059647\n", "Epoch 370, Training loss 9.325098, Validation loss 5.062381\n", "Epoch 371, Training loss 9.296637, Validation loss 5.065108\n", "Epoch 372, Training loss 9.268297, Validation loss 5.067829\n", "Epoch 373, Training loss 9.240076, Validation loss 5.070543\n", "Epoch 374, Training loss 9.211968, Validation loss 5.073264\n", "Epoch 375, Training loss 9.183977, Validation loss 5.075983\n", "Epoch 376, Training loss 9.156102, Validation loss 5.078686\n", "Epoch 377, Training loss 9.128344, Validation loss 5.081395\n", "Epoch 378, Training loss 9.100697, Validation loss 5.084097\n", "Epoch 379, Training loss 9.073165, Validation loss 5.086790\n", "Epoch 380, Training loss 9.045744, Validation loss 5.089475\n", "Epoch 381, Training loss 9.018442, Validation loss 5.092162\n", "Epoch 382, Training loss 8.991248, Validation loss 5.094854\n", "Epoch 383, Training loss 8.964168, Validation loss 5.097533\n", "Epoch 384, Training loss 8.937200, Validation loss 5.100200\n", "Epoch 385, Training loss 8.910341, Validation loss 5.102878\n", "Epoch 386, Training loss 8.883595, Validation loss 5.105548\n", "Epoch 387, Training loss 8.856958, Validation loss 5.108208\n", "Epoch 388, Training loss 8.830430, Validation loss 5.110874\n", "Epoch 389, Training loss 8.804013, Validation loss 5.113529\n", "Epoch 390, Training loss 8.777707, Validation loss 5.116178\n", "Epoch 391, Training loss 8.751509, Validation loss 5.118819\n", "Epoch 392, Training loss 8.725418, Validation loss 5.121475\n", "Epoch 393, Training loss 8.699435, Validation loss 5.124115\n", "Epoch 394, Training loss 8.673560, Validation loss 5.126750\n", "Epoch 395, Training loss 8.647788, Validation loss 5.129373\n", "Epoch 396, Training loss 8.622128, Validation loss 5.131997\n", "Epoch 397, Training loss 8.596569, Validation loss 5.134619\n", "Epoch 398, Training loss 8.571117, Validation loss 5.137239\n", "Epoch 399, Training loss 8.545772, Validation loss 5.139852\n", "Epoch 400, Training loss 8.520529, Validation loss 5.142470\n", "Epoch 401, Training loss 8.495392, Validation loss 5.145078\n", "Epoch 402, Training loss 8.470356, Validation loss 5.147683\n", "Epoch 403, Training loss 8.445425, Validation loss 5.150290\n", "Epoch 404, Training loss 8.420598, Validation loss 5.152877\n", "Epoch 405, Training loss 8.395872, Validation loss 5.155475\n", "Epoch 406, Training loss 8.371247, Validation loss 5.158065\n", "Epoch 407, Training loss 8.346725, Validation loss 5.160648\n", "Epoch 408, Training loss 8.322308, Validation loss 5.163229\n", "Epoch 409, Training loss 8.297989, Validation loss 5.165803\n", "Epoch 410, Training loss 8.273767, Validation loss 5.168375\n", "Epoch 411, Training loss 8.249647, Validation loss 5.170935\n", "Epoch 412, Training loss 8.225626, Validation loss 5.173505\n", "Epoch 413, Training loss 8.201704, Validation loss 5.176064\n", "Epoch 414, Training loss 8.177880, Validation loss 5.178620\n", "Epoch 415, Training loss 8.154160, Validation loss 5.181170\n", "Epoch 416, Training loss 8.130527, Validation loss 5.183717\n", "Epoch 417, Training loss 8.107003, Validation loss 5.186265\n", "Epoch 418, Training loss 8.083573, Validation loss 5.188802\n", "Epoch 419, Training loss 8.060236, Validation loss 5.191345\n", "Epoch 420, Training loss 8.036998, Validation loss 5.193877\n", "Epoch 421, Training loss 8.013855, Validation loss 5.196397\n", "Epoch 422, Training loss 7.990808, Validation loss 5.198928\n", "Epoch 423, Training loss 7.967856, Validation loss 5.201442\n", "Epoch 424, Training loss 7.945001, Validation loss 5.203963\n", "Epoch 425, Training loss 7.922238, Validation loss 5.206485\n", "Epoch 426, Training loss 7.899567, Validation loss 5.208996\n", "Epoch 427, Training loss 7.876991, Validation loss 5.211495\n", "Epoch 428, Training loss 7.854509, Validation loss 5.213996\n", "Epoch 429, Training loss 7.832119, Validation loss 5.216481\n", "Epoch 430, Training loss 7.809824, Validation loss 5.218972\n", "Epoch 431, Training loss 7.787618, Validation loss 5.221478\n", "Epoch 432, Training loss 7.765505, Validation loss 5.223971\n", "Epoch 433, Training loss 7.743483, Validation loss 5.226445\n", "Epoch 434, Training loss 7.721551, Validation loss 5.228920\n", "Epoch 435, Training loss 7.699707, Validation loss 5.231401\n", "Epoch 436, Training loss 7.677957, Validation loss 5.233862\n", "Epoch 437, Training loss 7.656300, Validation loss 5.236325\n", "Epoch 438, Training loss 7.634727, Validation loss 5.238789\n", "Epoch 439, Training loss 7.613241, Validation loss 5.241241\n", "Epoch 440, Training loss 7.591851, Validation loss 5.243703\n", "Epoch 441, Training loss 7.570543, Validation loss 5.246154\n", "Epoch 442, Training loss 7.549326, Validation loss 5.248593\n", "Epoch 443, Training loss 7.528196, Validation loss 5.251043\n", "Epoch 444, Training loss 7.507151, Validation loss 5.253476\n", "Epoch 445, Training loss 7.486197, Validation loss 5.255902\n", "Epoch 446, Training loss 7.465326, Validation loss 5.258339\n", "Epoch 447, Training loss 7.444541, Validation loss 5.260777\n", "Epoch 448, Training loss 7.423847, Validation loss 5.263189\n", "Epoch 449, Training loss 7.403229, Validation loss 5.265617\n", "Epoch 450, Training loss 7.382705, Validation loss 5.268033\n", "Epoch 451, Training loss 7.362262, Validation loss 5.270450\n", "Epoch 452, Training loss 7.341904, Validation loss 5.272855\n", "Epoch 453, Training loss 7.321630, Validation loss 5.275258\n", "Epoch 454, Training loss 7.301441, Validation loss 5.277658\n", "Epoch 455, Training loss 7.281332, Validation loss 5.280050\n", "Epoch 456, Training loss 7.261306, Validation loss 5.282439\n", "Epoch 457, Training loss 7.241365, Validation loss 5.284829\n", "Epoch 458, Training loss 7.221507, Validation loss 5.287217\n", "Epoch 459, Training loss 7.201731, Validation loss 5.289601\n", "Epoch 460, Training loss 7.182032, Validation loss 5.291987\n", "Epoch 461, Training loss 7.162420, Validation loss 5.294361\n", "Epoch 462, Training loss 7.142885, Validation loss 5.296720\n", "Epoch 463, Training loss 7.123432, Validation loss 5.299083\n", "Epoch 464, Training loss 7.104058, Validation loss 5.301443\n", "Epoch 465, Training loss 7.084769, Validation loss 5.303810\n", "Epoch 466, Training loss 7.065551, Validation loss 5.306159\n", "Epoch 467, Training loss 7.046420, Validation loss 5.308516\n", "Epoch 468, Training loss 7.027362, Validation loss 5.310859\n", "Epoch 469, Training loss 7.008387, Validation loss 5.313209\n", "Epoch 470, Training loss 6.989488, Validation loss 5.315546\n", "Epoch 471, Training loss 6.970669, Validation loss 5.317872\n", "Epoch 472, Training loss 6.951925, Validation loss 5.320212\n", "Epoch 473, Training loss 6.933261, Validation loss 5.322545\n", "Epoch 474, Training loss 6.914673, Validation loss 5.324862\n", "Epoch 475, Training loss 6.896160, Validation loss 5.327179\n", "Epoch 476, Training loss 6.877728, Validation loss 5.329498\n", "Epoch 477, Training loss 6.859365, Validation loss 5.331814\n", "Epoch 478, Training loss 6.841084, Validation loss 5.334122\n", "Epoch 479, Training loss 6.822875, Validation loss 5.336427\n", "Epoch 480, Training loss 6.804743, Validation loss 5.338734\n", "Epoch 481, Training loss 6.786684, Validation loss 5.341028\n", "Epoch 482, Training loss 6.768701, Validation loss 5.343314\n", "Epoch 483, Training loss 6.750792, Validation loss 5.345607\n", "Epoch 484, Training loss 6.732955, Validation loss 5.347892\n", "Epoch 485, Training loss 6.715196, Validation loss 5.350182\n", "Epoch 486, Training loss 6.697509, Validation loss 5.352456\n", "Epoch 487, Training loss 6.679894, Validation loss 5.354740\n", "Epoch 488, Training loss 6.662351, Validation loss 5.357007\n", "Epoch 489, Training loss 6.644879, Validation loss 5.359267\n", "Epoch 490, Training loss 6.627481, Validation loss 5.361528\n", "Epoch 491, Training loss 6.610154, Validation loss 5.363804\n", "Epoch 492, Training loss 6.592900, Validation loss 5.366054\n", "Epoch 493, Training loss 6.575720, Validation loss 5.368310\n", "Epoch 494, Training loss 6.558608, Validation loss 5.370558\n", "Epoch 495, Training loss 6.541563, Validation loss 5.372811\n", "Epoch 496, Training loss 6.524591, Validation loss 5.375044\n", "Epoch 497, Training loss 6.507690, Validation loss 5.377287\n", "Epoch 498, Training loss 6.490858, Validation loss 5.379530\n", "Epoch 499, Training loss 6.474095, Validation loss 5.381763\n", "Epoch 500, Training loss 6.457404, Validation loss 5.383982\n", "Epoch 501, Training loss 6.440779, Validation loss 5.386202\n", "Epoch 502, Training loss 6.424222, Validation loss 5.388415\n", "Epoch 503, Training loss 6.407733, Validation loss 5.390643\n", "Epoch 504, Training loss 6.391315, Validation loss 5.392858\n", "Epoch 505, Training loss 6.374967, Validation loss 5.395061\n", "Epoch 506, Training loss 6.358679, Validation loss 5.397264\n", "Epoch 507, Training loss 6.342463, Validation loss 5.399474\n", "Epoch 508, Training loss 6.326312, Validation loss 5.401668\n", "Epoch 509, Training loss 6.310229, Validation loss 5.403862\n", "Epoch 510, Training loss 6.294209, Validation loss 5.406057\n", "Epoch 511, Training loss 6.278260, Validation loss 5.408239\n", "Epoch 512, Training loss 6.262375, Validation loss 5.410428\n", "Epoch 513, Training loss 6.246554, Validation loss 5.412613\n", "Epoch 514, Training loss 6.230801, Validation loss 5.414786\n", "Epoch 515, Training loss 6.215113, Validation loss 5.416960\n", "Epoch 516, Training loss 6.199486, Validation loss 5.419126\n", "Epoch 517, Training loss 6.183925, Validation loss 5.421297\n", "Epoch 518, Training loss 6.168430, Validation loss 5.423456\n", "Epoch 519, Training loss 6.152997, Validation loss 5.425617\n", "Epoch 520, Training loss 6.137631, Validation loss 5.427765\n", "Epoch 521, Training loss 6.122324, Validation loss 5.429914\n", "Epoch 522, Training loss 6.107084, Validation loss 5.432073\n", "Epoch 523, Training loss 6.091903, Validation loss 5.434215\n", "Epoch 524, Training loss 6.076788, Validation loss 5.436354\n", "Epoch 525, Training loss 6.061732, Validation loss 5.438489\n", "Epoch 526, Training loss 6.046741, Validation loss 5.440620\n", "Epoch 527, Training loss 6.031814, Validation loss 5.442753\n", "Epoch 528, Training loss 6.016943, Validation loss 5.444874\n", "Epoch 529, Training loss 6.002138, Validation loss 5.446991\n", "Epoch 530, Training loss 5.987395, Validation loss 5.449113\n", "Epoch 531, Training loss 5.972708, Validation loss 5.451232\n", "Epoch 532, Training loss 5.958084, Validation loss 5.453348\n", "Epoch 533, Training loss 5.943521, Validation loss 5.455446\n", "Epoch 534, Training loss 5.929014, Validation loss 5.457564\n", "Epoch 535, Training loss 5.914569, Validation loss 5.459660\n", "Epoch 536, Training loss 5.900187, Validation loss 5.461761\n", "Epoch 537, Training loss 5.885860, Validation loss 5.463855\n", "Epoch 538, Training loss 5.871596, Validation loss 5.465936\n", "Epoch 539, Training loss 5.857386, Validation loss 5.468027\n", "Epoch 540, Training loss 5.843239, Validation loss 5.470115\n", "Epoch 541, Training loss 5.829150, Validation loss 5.472203\n", "Epoch 542, Training loss 5.815114, Validation loss 5.474270\n", "Epoch 543, Training loss 5.801143, Validation loss 5.476347\n", "Epoch 544, Training loss 5.787226, Validation loss 5.478412\n", "Epoch 545, Training loss 5.773369, Validation loss 5.480486\n", "Epoch 546, Training loss 5.759564, Validation loss 5.482539\n", "Epoch 547, Training loss 5.745820, Validation loss 5.484601\n", "Epoch 548, Training loss 5.732134, Validation loss 5.486665\n", "Epoch 549, Training loss 5.718503, Validation loss 5.488716\n", "Epoch 550, Training loss 5.704927, Validation loss 5.490764\n", "Epoch 551, Training loss 5.691407, Validation loss 5.492812\n", "Epoch 552, Training loss 5.677943, Validation loss 5.494843\n", "Epoch 553, Training loss 5.664536, Validation loss 5.496885\n", "Epoch 554, Training loss 5.651185, Validation loss 5.498927\n", "Epoch 555, Training loss 5.637887, Validation loss 5.500953\n", "Epoch 556, Training loss 5.624640, Validation loss 5.502983\n", "Epoch 557, Training loss 5.611454, Validation loss 5.505001\n", "Epoch 558, Training loss 5.598321, Validation loss 5.507024\n", "Epoch 559, Training loss 5.585241, Validation loss 5.509039\n", "Epoch 560, Training loss 5.572216, Validation loss 5.511055\n", "Epoch 561, Training loss 5.559245, Validation loss 5.513063\n", "Epoch 562, Training loss 5.546323, Validation loss 5.515077\n", "Epoch 563, Training loss 5.533461, Validation loss 5.517073\n", "Epoch 564, Training loss 5.520647, Validation loss 5.519074\n", "Epoch 565, Training loss 5.507891, Validation loss 5.521072\n", "Epoch 566, Training loss 5.495181, Validation loss 5.523067\n", "Epoch 567, Training loss 5.482527, Validation loss 5.525057\n", "Epoch 568, Training loss 5.469926, Validation loss 5.527048\n", "Epoch 569, Training loss 5.457377, Validation loss 5.529032\n", "Epoch 570, Training loss 5.444877, Validation loss 5.531006\n", "Epoch 571, Training loss 5.432432, Validation loss 5.532987\n", "Epoch 572, Training loss 5.420036, Validation loss 5.534964\n", "Epoch 573, Training loss 5.407691, Validation loss 5.536937\n", "Epoch 574, Training loss 5.395400, Validation loss 5.538893\n", "Epoch 575, Training loss 5.383160, Validation loss 5.540854\n", "Epoch 576, Training loss 5.370966, Validation loss 5.542825\n", "Epoch 577, Training loss 5.358825, Validation loss 5.544774\n", "Epoch 578, Training loss 5.346734, Validation loss 5.546733\n", "Epoch 579, Training loss 5.334694, Validation loss 5.548671\n", "Epoch 580, Training loss 5.322701, Validation loss 5.550631\n", "Epoch 581, Training loss 5.310760, Validation loss 5.552575\n", "Epoch 582, Training loss 5.298864, Validation loss 5.554505\n", "Epoch 583, Training loss 5.287020, Validation loss 5.556437\n", "Epoch 584, Training loss 5.275226, Validation loss 5.558369\n", "Epoch 585, Training loss 5.263483, Validation loss 5.560302\n", "Epoch 586, Training loss 5.251784, Validation loss 5.562231\n", "Epoch 587, Training loss 5.240134, Validation loss 5.564152\n", "Epoch 588, Training loss 5.228533, Validation loss 5.566070\n", "Epoch 589, Training loss 5.216980, Validation loss 5.567987\n", "Epoch 590, Training loss 5.205472, Validation loss 5.569906\n", "Epoch 591, Training loss 5.194014, Validation loss 5.571817\n", "Epoch 592, Training loss 5.182601, Validation loss 5.573719\n", "Epoch 593, Training loss 5.171238, Validation loss 5.575613\n", "Epoch 594, Training loss 5.159922, Validation loss 5.577526\n", "Epoch 595, Training loss 5.148650, Validation loss 5.579417\n", "Epoch 596, Training loss 5.137429, Validation loss 5.581313\n", "Epoch 597, Training loss 5.126249, Validation loss 5.583200\n", "Epoch 598, Training loss 5.115118, Validation loss 5.585085\n", "Epoch 599, Training loss 5.104032, Validation loss 5.586970\n", "Epoch 600, Training loss 5.092992, Validation loss 5.588851\n", "Epoch 601, Training loss 5.082000, Validation loss 5.590724\n", "Epoch 602, Training loss 5.071049, Validation loss 5.592597\n", "Epoch 603, Training loss 5.060148, Validation loss 5.594467\n", "Epoch 604, Training loss 5.049288, Validation loss 5.596332\n", "Epoch 605, Training loss 5.038473, Validation loss 5.598199\n", "Epoch 606, Training loss 5.027702, Validation loss 5.600057\n", "Epoch 607, Training loss 5.016979, Validation loss 5.601920\n", "Epoch 608, Training loss 5.006296, Validation loss 5.603770\n", "Epoch 609, Training loss 4.995661, Validation loss 5.605626\n", "Epoch 610, Training loss 4.985068, Validation loss 5.607473\n", "Epoch 611, Training loss 4.974522, Validation loss 5.609317\n", "Epoch 612, Training loss 4.964016, Validation loss 5.611161\n", "Epoch 613, Training loss 4.953552, Validation loss 5.612988\n", "Epoch 614, Training loss 4.943133, Validation loss 5.614824\n", "Epoch 615, Training loss 4.932758, Validation loss 5.616657\n", "Epoch 616, Training loss 4.922426, Validation loss 5.618481\n", "Epoch 617, Training loss 4.912134, Validation loss 5.620310\n", "Epoch 618, Training loss 4.901886, Validation loss 5.622136\n", "Epoch 619, Training loss 4.891680, Validation loss 5.623953\n", "Epoch 620, Training loss 4.881516, Validation loss 5.625762\n", "Epoch 621, Training loss 4.871395, Validation loss 5.627580\n", "Epoch 622, Training loss 4.861313, Validation loss 5.629385\n", "Epoch 623, Training loss 4.851276, Validation loss 5.631201\n", "Epoch 624, Training loss 4.841280, Validation loss 5.633007\n", "Epoch 625, Training loss 4.831324, Validation loss 5.634801\n", "Epoch 626, Training loss 4.821410, Validation loss 5.636600\n", "Epoch 627, Training loss 4.811536, Validation loss 5.638395\n", "Epoch 628, Training loss 4.801704, Validation loss 5.640173\n", "Epoch 629, Training loss 4.791910, Validation loss 5.641960\n", "Epoch 630, Training loss 4.782159, Validation loss 5.643752\n", "Epoch 631, Training loss 4.772447, Validation loss 5.645532\n", "Epoch 632, Training loss 4.762774, Validation loss 5.647311\n", "Epoch 633, Training loss 4.753145, Validation loss 5.649083\n", "Epoch 634, Training loss 4.743551, Validation loss 5.650855\n", "Epoch 635, Training loss 4.733998, Validation loss 5.652623\n", "Epoch 636, Training loss 4.724485, Validation loss 5.654392\n", "Epoch 637, Training loss 4.715011, Validation loss 5.656153\n", "Epoch 638, Training loss 4.705577, Validation loss 5.657913\n", "Epoch 639, Training loss 4.696179, Validation loss 5.659657\n", "Epoch 640, Training loss 4.686824, Validation loss 5.661406\n", "Epoch 641, Training loss 4.677505, Validation loss 5.663163\n", "Epoch 642, Training loss 4.668226, Validation loss 5.664913\n", "Epoch 643, Training loss 4.658983, Validation loss 5.666658\n", "Epoch 644, Training loss 4.649781, Validation loss 5.668396\n", "Epoch 645, Training loss 4.640616, Validation loss 5.670125\n", "Epoch 646, Training loss 4.631489, Validation loss 5.671872\n", "Epoch 647, Training loss 4.622397, Validation loss 5.673606\n", "Epoch 648, Training loss 4.613346, Validation loss 5.675332\n", "Epoch 649, Training loss 4.604332, Validation loss 5.677054\n", "Epoch 650, Training loss 4.595353, Validation loss 5.678781\n", "Epoch 651, Training loss 4.586411, Validation loss 5.680495\n", "Epoch 652, Training loss 4.577508, Validation loss 5.682214\n", "Epoch 653, Training loss 4.568642, Validation loss 5.683920\n", "Epoch 654, Training loss 4.559813, Validation loss 5.685631\n", "Epoch 655, Training loss 4.551017, Validation loss 5.687333\n", "Epoch 656, Training loss 4.542258, Validation loss 5.689041\n", "Epoch 657, Training loss 4.533536, Validation loss 5.690735\n", "Epoch 658, Training loss 4.524850, Validation loss 5.692434\n", "Epoch 659, Training loss 4.516203, Validation loss 5.694125\n", "Epoch 660, Training loss 4.507586, Validation loss 5.695821\n", "Epoch 661, Training loss 4.499007, Validation loss 5.697514\n", "Epoch 662, Training loss 4.490467, Validation loss 5.699202\n", "Epoch 663, Training loss 4.481956, Validation loss 5.700885\n", "Epoch 664, Training loss 4.473485, Validation loss 5.702565\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 665, Training loss 4.465043, Validation loss 5.704241\n", "Epoch 666, Training loss 4.456642, Validation loss 5.705917\n", "Epoch 667, Training loss 4.448274, Validation loss 5.707585\n", "Epoch 668, Training loss 4.439942, Validation loss 5.709254\n", "Epoch 669, Training loss 4.431640, Validation loss 5.710914\n", "Epoch 670, Training loss 4.423375, Validation loss 5.712574\n", "Epoch 671, Training loss 4.415143, Validation loss 5.714230\n", "Epoch 672, Training loss 4.406946, Validation loss 5.715896\n", "Epoch 673, Training loss 4.398783, Validation loss 5.717544\n", "Epoch 674, Training loss 4.390654, Validation loss 5.719197\n", "Epoch 675, Training loss 4.382559, Validation loss 5.720832\n", "Epoch 676, Training loss 4.374495, Validation loss 5.722482\n", "Epoch 677, Training loss 4.366464, Validation loss 5.724118\n", "Epoch 678, Training loss 4.358469, Validation loss 5.725755\n", "Epoch 679, Training loss 4.350506, Validation loss 5.727397\n", "Epoch 680, Training loss 4.342575, Validation loss 5.729034\n", "Epoch 681, Training loss 4.334678, Validation loss 5.730664\n", "Epoch 682, Training loss 4.326815, Validation loss 5.732289\n", "Epoch 683, Training loss 4.318979, Validation loss 5.733905\n", "Epoch 684, Training loss 4.311179, Validation loss 5.735531\n", "Epoch 685, Training loss 4.303409, Validation loss 5.737153\n", "Epoch 686, Training loss 4.295674, Validation loss 5.738757\n", "Epoch 687, Training loss 4.287971, Validation loss 5.740362\n", "Epoch 688, Training loss 4.280299, Validation loss 5.741976\n", "Epoch 689, Training loss 4.272658, Validation loss 5.743586\n", "Epoch 690, Training loss 4.265049, Validation loss 5.745183\n", "Epoch 691, Training loss 4.257471, Validation loss 5.746780\n", "Epoch 692, Training loss 4.249924, Validation loss 5.748387\n", "Epoch 693, Training loss 4.242409, Validation loss 5.749986\n", "Epoch 694, Training loss 4.234924, Validation loss 5.751571\n", "Epoch 695, Training loss 4.227469, Validation loss 5.753165\n", "Epoch 696, Training loss 4.220046, Validation loss 5.754751\n", "Epoch 697, Training loss 4.212656, Validation loss 5.756323\n", "Epoch 698, Training loss 4.205292, Validation loss 5.757905\n", "Epoch 699, Training loss 4.197961, Validation loss 5.759479\n", "Epoch 700, Training loss 4.190660, Validation loss 5.761053\n", "Epoch 701, Training loss 4.183388, Validation loss 5.762631\n", "Epoch 702, Training loss 4.176149, Validation loss 5.764197\n", "Epoch 703, Training loss 4.168938, Validation loss 5.765759\n", "Epoch 704, Training loss 4.161757, Validation loss 5.767321\n", "Epoch 705, Training loss 4.154603, Validation loss 5.768883\n", "Epoch 706, Training loss 4.147482, Validation loss 5.770436\n", "Epoch 707, Training loss 4.140390, Validation loss 5.771991\n", "Epoch 708, Training loss 4.133326, Validation loss 5.773554\n", "Epoch 709, Training loss 4.126291, Validation loss 5.775095\n", "Epoch 710, Training loss 4.119287, Validation loss 5.776646\n", "Epoch 711, Training loss 4.112309, Validation loss 5.778188\n", "Epoch 712, Training loss 4.105363, Validation loss 5.779730\n", "Epoch 713, Training loss 4.098442, Validation loss 5.781277\n", "Epoch 714, Training loss 4.091555, Validation loss 5.782802\n", "Epoch 715, Training loss 4.084691, Validation loss 5.784337\n", "Epoch 716, Training loss 4.077856, Validation loss 5.785872\n", "Epoch 717, Training loss 4.071051, Validation loss 5.787393\n", "Epoch 718, Training loss 4.064273, Validation loss 5.788915\n", "Epoch 719, Training loss 4.057524, Validation loss 5.790437\n", "Epoch 720, Training loss 4.050801, Validation loss 5.791950\n", "Epoch 721, Training loss 4.044109, Validation loss 5.793473\n", "Epoch 722, Training loss 4.037444, Validation loss 5.794983\n", "Epoch 723, Training loss 4.030800, Validation loss 5.796493\n", "Epoch 724, Training loss 4.024192, Validation loss 5.797990\n", "Epoch 725, Training loss 4.017607, Validation loss 5.799500\n", "Epoch 726, Training loss 4.011053, Validation loss 5.800993\n", "Epoch 727, Training loss 4.004521, Validation loss 5.802505\n", "Epoch 728, Training loss 3.998020, Validation loss 5.803998\n", "Epoch 729, Training loss 3.991543, Validation loss 5.805492\n", "Epoch 730, Training loss 3.985093, Validation loss 5.806982\n", "Epoch 731, Training loss 3.978670, Validation loss 5.808473\n", "Epoch 732, Training loss 3.972275, Validation loss 5.809963\n", "Epoch 733, Training loss 3.965905, Validation loss 5.811436\n", "Epoch 734, Training loss 3.959561, Validation loss 5.812908\n", "Epoch 735, Training loss 3.953244, Validation loss 5.814395\n", "Epoch 736, Training loss 3.946953, Validation loss 5.815865\n", "Epoch 737, Training loss 3.940689, Validation loss 5.817339\n", "Epoch 738, Training loss 3.934450, Validation loss 5.818799\n", "Epoch 739, Training loss 3.928236, Validation loss 5.820269\n", "Epoch 740, Training loss 3.922046, Validation loss 5.821744\n", "Epoch 741, Training loss 3.915887, Validation loss 5.823196\n", "Epoch 742, Training loss 3.909747, Validation loss 5.824668\n", "Epoch 743, Training loss 3.903636, Validation loss 5.826117\n", "Epoch 744, Training loss 3.897550, Validation loss 5.827556\n", "Epoch 745, Training loss 3.891488, Validation loss 5.829015\n", "Epoch 746, Training loss 3.885451, Validation loss 5.830464\n", "Epoch 747, Training loss 3.879439, Validation loss 5.831914\n", "Epoch 748, Training loss 3.873451, Validation loss 5.833350\n", "Epoch 749, Training loss 3.867492, Validation loss 5.834792\n", "Epoch 750, Training loss 3.861556, Validation loss 5.836225\n", "Epoch 751, Training loss 3.855639, Validation loss 5.837676\n", "Epoch 752, Training loss 3.849753, Validation loss 5.839095\n", "Epoch 753, Training loss 3.843889, Validation loss 5.840528\n", "Epoch 754, Training loss 3.838049, Validation loss 5.841949\n", "Epoch 755, Training loss 3.832234, Validation loss 5.843369\n", "Epoch 756, Training loss 3.826441, Validation loss 5.844785\n", "Epoch 757, Training loss 3.820673, Validation loss 5.846215\n", "Epoch 758, Training loss 3.814929, Validation loss 5.847637\n", "Epoch 759, Training loss 3.809209, Validation loss 5.849050\n", "Epoch 760, Training loss 3.803513, Validation loss 5.850462\n", "Epoch 761, Training loss 3.797837, Validation loss 5.851861\n", "Epoch 762, Training loss 3.792186, Validation loss 5.853266\n", "Epoch 763, Training loss 3.786563, Validation loss 5.854671\n", "Epoch 764, Training loss 3.780957, Validation loss 5.856075\n", "Epoch 765, Training loss 3.775377, Validation loss 5.857467\n", "Epoch 766, Training loss 3.769818, Validation loss 5.858858\n", "Epoch 767, Training loss 3.764284, Validation loss 5.860264\n", "Epoch 768, Training loss 3.758772, Validation loss 5.861656\n", "Epoch 769, Training loss 3.753285, Validation loss 5.863045\n", "Epoch 770, Training loss 3.747819, Validation loss 5.864429\n", "Epoch 771, Training loss 3.742374, Validation loss 5.865808\n", "Epoch 772, Training loss 3.736953, Validation loss 5.867188\n", "Epoch 773, Training loss 3.731555, Validation loss 5.868558\n", "Epoch 774, Training loss 3.726180, Validation loss 5.869934\n", "Epoch 775, Training loss 3.720824, Validation loss 5.871319\n", "Epoch 776, Training loss 3.715494, Validation loss 5.872677\n", "Epoch 777, Training loss 3.710181, Validation loss 5.874040\n", "Epoch 778, Training loss 3.704892, Validation loss 5.875407\n", "Epoch 779, Training loss 3.699626, Validation loss 5.876765\n", "Epoch 780, Training loss 3.694383, Validation loss 5.878134\n", "Epoch 781, Training loss 3.689158, Validation loss 5.879488\n", "Epoch 782, Training loss 3.683957, Validation loss 5.880843\n", "Epoch 783, Training loss 3.678778, Validation loss 5.882194\n", "Epoch 784, Training loss 3.673620, Validation loss 5.883550\n", "Epoch 785, Training loss 3.668480, Validation loss 5.884896\n", "Epoch 786, Training loss 3.663364, Validation loss 5.886238\n", "Epoch 787, Training loss 3.658271, Validation loss 5.887576\n", "Epoch 788, Training loss 3.653195, Validation loss 5.888919\n", "Epoch 789, Training loss 3.648142, Validation loss 5.890262\n", "Epoch 790, Training loss 3.643110, Validation loss 5.891591\n", "Epoch 791, Training loss 3.638098, Validation loss 5.892931\n", "Epoch 792, Training loss 3.633107, Validation loss 5.894251\n", "Epoch 793, Training loss 3.628139, Validation loss 5.895576\n", "Epoch 794, Training loss 3.623187, Validation loss 5.896903\n", "Epoch 795, Training loss 3.618257, Validation loss 5.898220\n", "Epoch 796, Training loss 3.613351, Validation loss 5.899541\n", "Epoch 797, Training loss 3.608459, Validation loss 5.900859\n", "Epoch 798, Training loss 3.603591, Validation loss 5.902182\n", "Epoch 799, Training loss 3.598742, Validation loss 5.903495\n", "Epoch 800, Training loss 3.593912, Validation loss 5.904808\n", "Epoch 801, Training loss 3.589107, Validation loss 5.906108\n", "Epoch 802, Training loss 3.584317, Validation loss 5.907418\n", "Epoch 803, Training loss 3.579549, Validation loss 5.908718\n", "Epoch 804, Training loss 3.574800, Validation loss 5.910024\n", "Epoch 805, Training loss 3.570069, Validation loss 5.911330\n", "Epoch 806, Training loss 3.565360, Validation loss 5.912617\n", "Epoch 807, Training loss 3.560670, Validation loss 5.913914\n", "Epoch 808, Training loss 3.555999, Validation loss 5.915207\n", "Epoch 809, Training loss 3.551347, Validation loss 5.916486\n", "Epoch 810, Training loss 3.546713, Validation loss 5.917778\n", "Epoch 811, Training loss 3.542097, Validation loss 5.919058\n", "Epoch 812, Training loss 3.537503, Validation loss 5.920342\n", "Epoch 813, Training loss 3.532927, Validation loss 5.921618\n", "Epoch 814, Training loss 3.528371, Validation loss 5.922897\n", "Epoch 815, Training loss 3.523834, Validation loss 5.924164\n", "Epoch 816, Training loss 3.519316, Validation loss 5.925445\n", "Epoch 817, Training loss 3.514816, Validation loss 5.926707\n", "Epoch 818, Training loss 3.510331, Validation loss 5.927969\n", "Epoch 819, Training loss 3.505868, Validation loss 5.929228\n", "Epoch 820, Training loss 3.501423, Validation loss 5.930505\n", "Epoch 821, Training loss 3.496996, Validation loss 5.931763\n", "Epoch 822, Training loss 3.492589, Validation loss 5.933017\n", "Epoch 823, Training loss 3.488199, Validation loss 5.934272\n", "Epoch 824, Training loss 3.483825, Validation loss 5.935522\n", "Epoch 825, Training loss 3.479471, Validation loss 5.936777\n", "Epoch 826, Training loss 3.475134, Validation loss 5.938027\n", "Epoch 827, Training loss 3.470816, Validation loss 5.939273\n", "Epoch 828, Training loss 3.466516, Validation loss 5.940524\n", "Epoch 829, Training loss 3.462236, Validation loss 5.941766\n", "Epoch 830, Training loss 3.457967, Validation loss 5.943008\n", "Epoch 831, Training loss 3.453723, Validation loss 5.944246\n", "Epoch 832, Training loss 3.449490, Validation loss 5.945475\n", "Epoch 833, Training loss 3.445279, Validation loss 5.946708\n", "Epoch 834, Training loss 3.441083, Validation loss 5.947932\n", "Epoch 835, Training loss 3.436906, Validation loss 5.949157\n", "Epoch 836, Training loss 3.432746, Validation loss 5.950377\n", "Epoch 837, Training loss 3.428602, Validation loss 5.951607\n", "Epoch 838, Training loss 3.424475, Validation loss 5.952823\n", "Epoch 839, Training loss 3.420366, Validation loss 5.954044\n", "Epoch 840, Training loss 3.416272, Validation loss 5.955265\n", "Epoch 841, Training loss 3.412199, Validation loss 5.956463\n", "Epoch 842, Training loss 3.408142, Validation loss 5.957689\n", "Epoch 843, Training loss 3.404097, Validation loss 5.958892\n", "Epoch 844, Training loss 3.400072, Validation loss 5.960091\n", "Epoch 845, Training loss 3.396065, Validation loss 5.961304\n", "Epoch 846, Training loss 3.392071, Validation loss 5.962508\n", "Epoch 847, Training loss 3.388096, Validation loss 5.963707\n", "Epoch 848, Training loss 3.384137, Validation loss 5.964907\n", "Epoch 849, Training loss 3.380194, Validation loss 5.966092\n", "Epoch 850, Training loss 3.376268, Validation loss 5.967292\n", "Epoch 851, Training loss 3.372357, Validation loss 5.968483\n", "Epoch 852, Training loss 3.368462, Validation loss 5.969669\n", "Epoch 853, Training loss 3.364583, Validation loss 5.970856\n", "Epoch 854, Training loss 3.360723, Validation loss 5.972042\n", "Epoch 855, Training loss 3.356877, Validation loss 5.973225\n", "Epoch 856, Training loss 3.353047, Validation loss 5.974398\n", "Epoch 857, Training loss 3.349233, Validation loss 5.975590\n", "Epoch 858, Training loss 3.345434, Validation loss 5.976754\n", "Epoch 859, Training loss 3.341649, Validation loss 5.977923\n", "Epoch 860, Training loss 3.337883, Validation loss 5.979097\n", "Epoch 861, Training loss 3.334131, Validation loss 5.980267\n", "Epoch 862, Training loss 3.330392, Validation loss 5.981427\n", "Epoch 863, Training loss 3.326673, Validation loss 5.982587\n", "Epoch 864, Training loss 3.322966, Validation loss 5.983748\n", "Epoch 865, Training loss 3.319278, Validation loss 5.984909\n", "Epoch 866, Training loss 3.315601, Validation loss 5.986070\n", "Epoch 867, Training loss 3.311940, Validation loss 5.987222\n", "Epoch 868, Training loss 3.308297, Validation loss 5.988383\n", "Epoch 869, Training loss 3.304664, Validation loss 5.989536\n", "Epoch 870, Training loss 3.301052, Validation loss 5.990684\n", "Epoch 871, Training loss 3.297451, Validation loss 5.991832\n", "Epoch 872, Training loss 3.293864, Validation loss 5.992980\n", "Epoch 873, Training loss 3.290295, Validation loss 5.994114\n", "Epoch 874, Training loss 3.286741, Validation loss 5.995254\n", "Epoch 875, Training loss 3.283198, Validation loss 5.996397\n", "Epoch 876, Training loss 3.279673, Validation loss 5.997537\n", "Epoch 877, Training loss 3.276161, Validation loss 5.998663\n", "Epoch 878, Training loss 3.272664, Validation loss 5.999798\n", "Epoch 879, Training loss 3.269181, Validation loss 6.000934\n", "Epoch 880, Training loss 3.265711, Validation loss 6.002060\n", "Epoch 881, Training loss 3.262259, Validation loss 6.003181\n", "Epoch 882, Training loss 3.258817, Validation loss 6.004303\n", "Epoch 883, Training loss 3.255393, Validation loss 6.005421\n", "Epoch 884, Training loss 3.251982, Validation loss 6.006534\n", "Epoch 885, Training loss 3.248585, Validation loss 6.007661\n", "Epoch 886, Training loss 3.245202, Validation loss 6.008770\n", "Epoch 887, Training loss 3.241831, Validation loss 6.009883\n", "Epoch 888, Training loss 3.238476, Validation loss 6.010987\n", "Epoch 889, Training loss 3.235132, Validation loss 6.012096\n", "Epoch 890, Training loss 3.231806, Validation loss 6.013210\n", "Epoch 891, Training loss 3.228491, Validation loss 6.014310\n", "Epoch 892, Training loss 3.225189, Validation loss 6.015415\n", "Epoch 893, Training loss 3.221904, Validation loss 6.016506\n", "Epoch 894, Training loss 3.218630, Validation loss 6.017602\n", "Epoch 895, Training loss 3.215369, Validation loss 6.018703\n", "Epoch 896, Training loss 3.212125, Validation loss 6.019804\n", "Epoch 897, Training loss 3.208892, Validation loss 6.020891\n", "Epoch 898, Training loss 3.205670, Validation loss 6.021983\n", "Epoch 899, Training loss 3.202466, Validation loss 6.023061\n", "Epoch 900, Training loss 3.199271, Validation loss 6.024158\n", "Epoch 901, Training loss 3.196092, Validation loss 6.025245\n", "Epoch 902, Training loss 3.192925, Validation loss 6.026323\n", "Epoch 903, Training loss 3.189772, Validation loss 6.027397\n", "Epoch 904, Training loss 3.186633, Validation loss 6.028476\n", "Epoch 905, Training loss 3.183503, Validation loss 6.029555\n", "Epoch 906, Training loss 3.180390, Validation loss 6.030620\n", "Epoch 907, Training loss 3.177286, Validation loss 6.031694\n", "Epoch 908, Training loss 3.174196, Validation loss 6.032764\n", "Epoch 909, Training loss 3.171120, Validation loss 6.033830\n", "Epoch 910, Training loss 3.168057, Validation loss 6.034891\n", "Epoch 911, Training loss 3.165005, Validation loss 6.035956\n", "Epoch 912, Training loss 3.161967, Validation loss 6.037018\n", "Epoch 913, Training loss 3.158941, Validation loss 6.038074\n", "Epoch 914, Training loss 3.155927, Validation loss 6.039131\n", "Epoch 915, Training loss 3.152926, Validation loss 6.040188\n", "Epoch 916, Training loss 3.149937, Validation loss 6.041236\n", "Epoch 917, Training loss 3.146960, Validation loss 6.042293\n", "Epoch 918, Training loss 3.143996, Validation loss 6.043341\n", "Epoch 919, Training loss 3.141043, Validation loss 6.044384\n", "Epoch 920, Training loss 3.138104, Validation loss 6.045428\n", "Epoch 921, Training loss 3.135176, Validation loss 6.046472\n", "Epoch 922, Training loss 3.132262, Validation loss 6.047516\n", "Epoch 923, Training loss 3.129359, Validation loss 6.048564\n", "Epoch 924, Training loss 3.126467, Validation loss 6.049590\n", "Epoch 925, Training loss 3.123586, Validation loss 6.050615\n", "Epoch 926, Training loss 3.120720, Validation loss 6.051660\n", "Epoch 927, Training loss 3.117864, Validation loss 6.052685\n", "Epoch 928, Training loss 3.115020, Validation loss 6.053711\n", "Epoch 929, Training loss 3.112186, Validation loss 6.054732\n", "Epoch 930, Training loss 3.109365, Validation loss 6.055773\n", "Epoch 931, Training loss 3.106557, Validation loss 6.056794\n", "Epoch 932, Training loss 3.103760, Validation loss 6.057812\n", "Epoch 933, Training loss 3.100973, Validation loss 6.058833\n", "Epoch 934, Training loss 3.098200, Validation loss 6.059846\n", "Epoch 935, Training loss 3.095437, Validation loss 6.060863\n", "Epoch 936, Training loss 3.092685, Validation loss 6.061876\n", "Epoch 937, Training loss 3.089944, Validation loss 6.062884\n", "Epoch 938, Training loss 3.087214, Validation loss 6.063889\n", "Epoch 939, Training loss 3.084498, Validation loss 6.064897\n", "Epoch 940, Training loss 3.081792, Validation loss 6.065901\n", "Epoch 941, Training loss 3.079097, Validation loss 6.066915\n", "Epoch 942, Training loss 3.076413, Validation loss 6.067914\n", "Epoch 943, Training loss 3.073741, Validation loss 6.068909\n", "Epoch 944, Training loss 3.071078, Validation loss 6.069918\n", "Epoch 945, Training loss 3.068427, Validation loss 6.070918\n", "Epoch 946, Training loss 3.065785, Validation loss 6.071904\n", "Epoch 947, Training loss 3.063157, Validation loss 6.072899\n", "Epoch 948, Training loss 3.060539, Validation loss 6.073881\n", "Epoch 949, Training loss 3.057931, Validation loss 6.074872\n", "Epoch 950, Training loss 3.055336, Validation loss 6.075863\n", "Epoch 951, Training loss 3.052752, Validation loss 6.076845\n", "Epoch 952, Training loss 3.050173, Validation loss 6.077822\n", "Epoch 953, Training loss 3.047608, Validation loss 6.078794\n", "Epoch 954, Training loss 3.045055, Validation loss 6.079782\n", "Epoch 955, Training loss 3.042511, Validation loss 6.080764\n", "Epoch 956, Training loss 3.039980, Validation loss 6.081741\n", "Epoch 957, Training loss 3.037457, Validation loss 6.082710\n", "Epoch 958, Training loss 3.034943, Validation loss 6.083683\n", "Epoch 959, Training loss 3.032440, Validation loss 6.084647\n", "Epoch 960, Training loss 3.029950, Validation loss 6.085615\n", "Epoch 961, Training loss 3.027469, Validation loss 6.086584\n", "Epoch 962, Training loss 3.024997, Validation loss 6.087553\n", "Epoch 963, Training loss 3.022535, Validation loss 6.088503\n", "Epoch 964, Training loss 3.020086, Validation loss 6.089478\n", "Epoch 965, Training loss 3.017646, Validation loss 6.090437\n", "Epoch 966, Training loss 3.015215, Validation loss 6.091393\n", "Epoch 967, Training loss 3.012795, Validation loss 6.092339\n", "Epoch 968, Training loss 3.010383, Validation loss 6.093303\n", "Epoch 969, Training loss 3.007983, Validation loss 6.094240\n", "Epoch 970, Training loss 3.005589, Validation loss 6.095186\n", "Epoch 971, Training loss 3.003211, Validation loss 6.096132\n", "Epoch 972, Training loss 3.000839, Validation loss 6.097084\n", "Epoch 973, Training loss 2.998479, Validation loss 6.098021\n", "Epoch 974, Training loss 2.996126, Validation loss 6.098963\n", "Epoch 975, Training loss 2.993787, Validation loss 6.099915\n", "Epoch 976, Training loss 2.991455, Validation loss 6.100852\n", "Epoch 977, Training loss 2.989130, Validation loss 6.101785\n", "Epoch 978, Training loss 2.986819, Validation loss 6.102727\n", "Epoch 979, Training loss 2.984515, Validation loss 6.103660\n", "Epoch 980, Training loss 2.982221, Validation loss 6.104589\n", "Epoch 981, Training loss 2.979935, Validation loss 6.105517\n", "Epoch 982, Training loss 2.977662, Validation loss 6.106451\n", "Epoch 983, Training loss 2.975398, Validation loss 6.107370\n", "Epoch 984, Training loss 2.973139, Validation loss 6.108299\n", "Epoch 985, Training loss 2.970892, Validation loss 6.109219\n", "Epoch 986, Training loss 2.968657, Validation loss 6.110133\n", "Epoch 987, Training loss 2.966426, Validation loss 6.111048\n", "Epoch 988, Training loss 2.964207, Validation loss 6.111959\n", "Epoch 989, Training loss 2.961997, Validation loss 6.112878\n", "Epoch 990, Training loss 2.959798, Validation loss 6.113789\n", "Epoch 991, Training loss 2.957605, Validation loss 6.114709\n", "Epoch 992, Training loss 2.955422, Validation loss 6.115610\n", "Epoch 993, Training loss 2.953248, Validation loss 6.116521\n", "Epoch 994, Training loss 2.951082, Validation loss 6.117427\n", "Epoch 995, Training loss 2.948925, Validation loss 6.118338\n", "Epoch 996, Training loss 2.946779, Validation loss 6.119245\n", "Epoch 997, Training loss 2.944642, Validation loss 6.120137\n", "Epoch 998, Training loss 2.942512, Validation loss 6.121038\n", "Epoch 999, Training loss 2.940392, Validation loss 6.121936\n", "Epoch 1000, Training loss 2.938281, Validation loss 6.122833\n", "Epoch 1001, Training loss 2.936176, Validation loss 6.123721\n", "Epoch 1002, Training loss 2.934083, Validation loss 6.124614\n", "Epoch 1003, Training loss 2.931996, Validation loss 6.125512\n", "Epoch 1004, Training loss 2.929919, Validation loss 6.126404\n", "Epoch 1005, Training loss 2.927851, Validation loss 6.127283\n", "Epoch 1006, Training loss 2.925791, Validation loss 6.128167\n", "Epoch 1007, Training loss 2.923740, Validation loss 6.129055\n", "Epoch 1008, Training loss 2.921695, Validation loss 6.129934\n", "Epoch 1009, Training loss 2.919662, Validation loss 6.130823\n", "Epoch 1010, Training loss 2.917636, Validation loss 6.131698\n", "Epoch 1011, Training loss 2.915617, Validation loss 6.132577\n", "Epoch 1012, Training loss 2.913610, Validation loss 6.133447\n", "Epoch 1013, Training loss 2.911607, Validation loss 6.134317\n", "Epoch 1014, Training loss 2.909615, Validation loss 6.135192\n", "Epoch 1015, Training loss 2.907628, Validation loss 6.136062\n", "Epoch 1016, Training loss 2.905652, Validation loss 6.136932\n", "Epoch 1017, Training loss 2.903683, Validation loss 6.137798\n", "Epoch 1018, Training loss 2.901724, Validation loss 6.138659\n", "Epoch 1019, Training loss 2.899773, Validation loss 6.139524\n", "Epoch 1020, Training loss 2.897827, Validation loss 6.140381\n", "Epoch 1021, Training loss 2.895892, Validation loss 6.141243\n", "Epoch 1022, Training loss 2.893963, Validation loss 6.142104\n", "Epoch 1023, Training loss 2.892043, Validation loss 6.142956\n", "Epoch 1024, Training loss 2.890129, Validation loss 6.143807\n", "Epoch 1025, Training loss 2.888228, Validation loss 6.144670\n", "Epoch 1026, Training loss 2.886330, Validation loss 6.145516\n", "Epoch 1027, Training loss 2.884440, Validation loss 6.146364\n", "Epoch 1028, Training loss 2.882560, Validation loss 6.147216\n", "Epoch 1029, Training loss 2.880689, Validation loss 6.148059\n", "Epoch 1030, Training loss 2.878822, Validation loss 6.148916\n", "Epoch 1031, Training loss 2.876963, Validation loss 6.149759\n", "Epoch 1032, Training loss 2.875115, Validation loss 6.150588\n", "Epoch 1033, Training loss 2.873272, Validation loss 6.151441\n", "Epoch 1034, Training loss 2.871438, Validation loss 6.152280\n", "Epoch 1035, Training loss 2.869611, Validation loss 6.153118\n", "Epoch 1036, Training loss 2.867790, Validation loss 6.153943\n", "Epoch 1037, Training loss 2.865979, Validation loss 6.154772\n", "Epoch 1038, Training loss 2.864173, Validation loss 6.155602\n", "Epoch 1039, Training loss 2.862377, Validation loss 6.156436\n", "Epoch 1040, Training loss 2.860587, Validation loss 6.157265\n", "Epoch 1041, Training loss 2.858804, Validation loss 6.158099\n", "Epoch 1042, Training loss 2.857028, Validation loss 6.158919\n", "Epoch 1043, Training loss 2.855260, Validation loss 6.159744\n", "Epoch 1044, Training loss 2.853501, Validation loss 6.160564\n", "Epoch 1045, Training loss 2.851746, Validation loss 6.161390\n", "Epoch 1046, Training loss 2.849999, Validation loss 6.162220\n", "Epoch 1047, Training loss 2.848261, Validation loss 6.163031\n", "Epoch 1048, Training loss 2.846531, Validation loss 6.163847\n", "Epoch 1049, Training loss 2.844805, Validation loss 6.164658\n", "Epoch 1050, Training loss 2.843088, Validation loss 6.165479\n", "Epoch 1051, Training loss 2.841379, Validation loss 6.166285\n", "Epoch 1052, Training loss 2.839674, Validation loss 6.167096\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1053, Training loss 2.837979, Validation loss 6.167898\n", "Epoch 1054, Training loss 2.836289, Validation loss 6.168700\n", "Epoch 1055, Training loss 2.834607, Validation loss 6.169507\n", "Epoch 1056, Training loss 2.832932, Validation loss 6.170314\n", "Epoch 1057, Training loss 2.831265, Validation loss 6.171111\n", "Epoch 1058, Training loss 2.829603, Validation loss 6.171914\n", "Epoch 1059, Training loss 2.827948, Validation loss 6.172721\n", "Epoch 1060, Training loss 2.826300, Validation loss 6.173523\n", "Epoch 1061, Training loss 2.824659, Validation loss 6.174312\n", "Epoch 1062, Training loss 2.823024, Validation loss 6.175100\n", "Epoch 1063, Training loss 2.821397, Validation loss 6.175888\n", "Epoch 1064, Training loss 2.819779, Validation loss 6.176676\n", "Epoch 1065, Training loss 2.818163, Validation loss 6.177474\n", "Epoch 1066, Training loss 2.816555, Validation loss 6.178263\n", "Epoch 1067, Training loss 2.814954, Validation loss 6.179047\n", "Epoch 1068, Training loss 2.813361, Validation loss 6.179831\n", "Epoch 1069, Training loss 2.811774, Validation loss 6.180614\n", "Epoch 1070, Training loss 2.810192, Validation loss 6.181394\n", "Epoch 1071, Training loss 2.808617, Validation loss 6.182178\n", "Epoch 1072, Training loss 2.807050, Validation loss 6.182957\n", "Epoch 1073, Training loss 2.805490, Validation loss 6.183736\n", "Epoch 1074, Training loss 2.803932, Validation loss 6.184511\n", "Epoch 1075, Training loss 2.802386, Validation loss 6.185286\n", "Epoch 1076, Training loss 2.800842, Validation loss 6.186061\n", "Epoch 1077, Training loss 2.799307, Validation loss 6.186826\n", "Epoch 1078, Training loss 2.797778, Validation loss 6.187601\n", "Epoch 1079, Training loss 2.796255, Validation loss 6.188371\n", "Epoch 1080, Training loss 2.794738, Validation loss 6.189132\n", "Epoch 1081, Training loss 2.793226, Validation loss 6.189898\n", "Epoch 1082, Training loss 2.791721, Validation loss 6.190654\n", "Epoch 1083, Training loss 2.790223, Validation loss 6.191415\n", "Epoch 1084, Training loss 2.788730, Validation loss 6.192185\n", "Epoch 1085, Training loss 2.787246, Validation loss 6.192942\n", "Epoch 1086, Training loss 2.785765, Validation loss 6.193693\n", "Epoch 1087, Training loss 2.784293, Validation loss 6.194454\n", "Epoch 1088, Training loss 2.782825, Validation loss 6.195220\n", "Epoch 1089, Training loss 2.781363, Validation loss 6.195963\n", "Epoch 1090, Training loss 2.779906, Validation loss 6.196719\n", "Epoch 1091, Training loss 2.778456, Validation loss 6.197471\n", "Epoch 1092, Training loss 2.777014, Validation loss 6.198218\n", "Epoch 1093, Training loss 2.775577, Validation loss 6.198970\n", "Epoch 1094, Training loss 2.774144, Validation loss 6.199708\n", "Epoch 1095, Training loss 2.772720, Validation loss 6.200450\n", "Epoch 1096, Training loss 2.771300, Validation loss 6.201198\n", "Epoch 1097, Training loss 2.769884, Validation loss 6.201936\n", "Epoch 1098, Training loss 2.768477, Validation loss 6.202678\n", "Epoch 1099, Training loss 2.767074, Validation loss 6.203416\n", "Epoch 1100, Training loss 2.765679, Validation loss 6.204149\n", "Epoch 1101, Training loss 2.764287, Validation loss 6.204883\n", "Epoch 1102, Training loss 2.762902, Validation loss 6.205612\n", "Epoch 1103, Training loss 2.761521, Validation loss 6.206349\n", "Epoch 1104, Training loss 2.760150, Validation loss 6.207088\n", "Epoch 1105, Training loss 2.758780, Validation loss 6.207822\n", "Epoch 1106, Training loss 2.757418, Validation loss 6.208546\n", "Epoch 1107, Training loss 2.756063, Validation loss 6.209270\n", "Epoch 1108, Training loss 2.754711, Validation loss 6.209989\n", "Epoch 1109, Training loss 2.753367, Validation loss 6.210713\n", "Epoch 1110, Training loss 2.752026, Validation loss 6.211437\n", "Epoch 1111, Training loss 2.750691, Validation loss 6.212162\n", "Epoch 1112, Training loss 2.749362, Validation loss 6.212881\n", "Epoch 1113, Training loss 2.748040, Validation loss 6.213605\n", "Epoch 1114, Training loss 2.746721, Validation loss 6.214325\n", "Epoch 1115, Training loss 2.745409, Validation loss 6.215040\n", "Epoch 1116, Training loss 2.744100, Validation loss 6.215750\n", "Epoch 1117, Training loss 2.742799, Validation loss 6.216451\n", "Epoch 1118, Training loss 2.741503, Validation loss 6.217166\n", "Epoch 1119, Training loss 2.740212, Validation loss 6.217876\n", "Epoch 1120, Training loss 2.738926, Validation loss 6.218596\n", "Epoch 1121, Training loss 2.737645, Validation loss 6.219292\n", "Epoch 1122, Training loss 2.736369, Validation loss 6.220003\n", "Epoch 1123, Training loss 2.735101, Validation loss 6.220703\n", "Epoch 1124, Training loss 2.733838, Validation loss 6.221414\n", "Epoch 1125, Training loss 2.732578, Validation loss 6.222125\n", "Epoch 1126, Training loss 2.731323, Validation loss 6.222821\n", "Epoch 1127, Training loss 2.730073, Validation loss 6.223513\n", "Epoch 1128, Training loss 2.728830, Validation loss 6.224214\n", "Epoch 1129, Training loss 2.727591, Validation loss 6.224915\n", "Epoch 1130, Training loss 2.726357, Validation loss 6.225597\n", "Epoch 1131, Training loss 2.725128, Validation loss 6.226298\n", "Epoch 1132, Training loss 2.723906, Validation loss 6.226995\n", "Epoch 1133, Training loss 2.722687, Validation loss 6.227682\n", "Epoch 1134, Training loss 2.721474, Validation loss 6.228369\n", "Epoch 1135, Training loss 2.720266, Validation loss 6.229061\n", "Epoch 1136, Training loss 2.719063, Validation loss 6.229738\n", "Epoch 1137, Training loss 2.717863, Validation loss 6.230430\n", "Epoch 1138, Training loss 2.716670, Validation loss 6.231108\n", "Epoch 1139, Training loss 2.715482, Validation loss 6.231805\n", "Epoch 1140, Training loss 2.714298, Validation loss 6.232482\n", "Epoch 1141, Training loss 2.713119, Validation loss 6.233155\n", "Epoch 1142, Training loss 2.711946, Validation loss 6.233838\n", "Epoch 1143, Training loss 2.710777, Validation loss 6.234516\n", "Epoch 1144, Training loss 2.709613, Validation loss 6.235185\n", "Epoch 1145, Training loss 2.708453, Validation loss 6.235862\n", "Epoch 1146, Training loss 2.707297, Validation loss 6.236531\n", "Epoch 1147, Training loss 2.706147, Validation loss 6.237208\n", "Epoch 1148, Training loss 2.705004, Validation loss 6.237877\n", "Epoch 1149, Training loss 2.703862, Validation loss 6.238550\n", "Epoch 1150, Training loss 2.702727, Validation loss 6.239219\n", "Epoch 1151, Training loss 2.701596, Validation loss 6.239878\n", "Epoch 1152, Training loss 2.700468, Validation loss 6.240547\n", "Epoch 1153, Training loss 2.699347, Validation loss 6.241206\n", "Epoch 1154, Training loss 2.698230, Validation loss 6.241875\n", "Epoch 1155, Training loss 2.697118, Validation loss 6.242534\n", "Epoch 1156, Training loss 2.696011, Validation loss 6.243197\n", "Epoch 1157, Training loss 2.694907, Validation loss 6.243852\n", "Epoch 1158, Training loss 2.693807, Validation loss 6.244516\n", "Epoch 1159, Training loss 2.692713, Validation loss 6.245166\n", "Epoch 1160, Training loss 2.691625, Validation loss 6.245825\n", "Epoch 1161, Training loss 2.690537, Validation loss 6.246475\n", "Epoch 1162, Training loss 2.689456, Validation loss 6.247134\n", "Epoch 1163, Training loss 2.688383, Validation loss 6.247789\n", "Epoch 1164, Training loss 2.687310, Validation loss 6.248439\n", "Epoch 1165, Training loss 2.686242, Validation loss 6.249079\n", "Epoch 1166, Training loss 2.685179, Validation loss 6.249729\n", "Epoch 1167, Training loss 2.684120, Validation loss 6.250374\n", "Epoch 1168, Training loss 2.683066, Validation loss 6.251019\n", "Epoch 1169, Training loss 2.682016, Validation loss 6.251660\n", "Epoch 1170, Training loss 2.680970, Validation loss 6.252305\n", "Epoch 1171, Training loss 2.679928, Validation loss 6.252941\n", "Epoch 1172, Training loss 2.678891, Validation loss 6.253582\n", "Epoch 1173, Training loss 2.677859, Validation loss 6.254218\n", "Epoch 1174, Training loss 2.676832, Validation loss 6.254853\n", "Epoch 1175, Training loss 2.675805, Validation loss 6.255499\n", "Epoch 1176, Training loss 2.674787, Validation loss 6.256130\n", "Epoch 1177, Training loss 2.673770, Validation loss 6.256771\n", "Epoch 1178, Training loss 2.672760, Validation loss 6.257397\n", "Epoch 1179, Training loss 2.671751, Validation loss 6.258028\n", "Epoch 1180, Training loss 2.670748, Validation loss 6.258655\n", "Epoch 1181, Training loss 2.669749, Validation loss 6.259281\n", "Epoch 1182, Training loss 2.668754, Validation loss 6.259912\n", "Epoch 1183, Training loss 2.667765, Validation loss 6.260539\n", "Epoch 1184, Training loss 2.666777, Validation loss 6.261166\n", "Epoch 1185, Training loss 2.665794, Validation loss 6.261783\n", "Epoch 1186, Training loss 2.664814, Validation loss 6.262404\n", "Epoch 1187, Training loss 2.663840, Validation loss 6.263031\n", "Epoch 1188, Training loss 2.662871, Validation loss 6.263643\n", "Epoch 1189, Training loss 2.661904, Validation loss 6.264265\n", "Epoch 1190, Training loss 2.660943, Validation loss 6.264892\n", "Epoch 1191, Training loss 2.659983, Validation loss 6.265504\n", "Epoch 1192, Training loss 2.659029, Validation loss 6.266121\n", "Epoch 1193, Training loss 2.658078, Validation loss 6.266724\n", "Epoch 1194, Training loss 2.657131, Validation loss 6.267336\n", "Epoch 1195, Training loss 2.656188, Validation loss 6.267949\n", "Epoch 1196, Training loss 2.655250, Validation loss 6.268571\n", "Epoch 1197, Training loss 2.654313, Validation loss 6.269179\n", "Epoch 1198, Training loss 2.653384, Validation loss 6.269787\n", "Epoch 1199, Training loss 2.652454, Validation loss 6.270385\n", "Epoch 1200, Training loss 2.651531, Validation loss 6.270993\n", "Epoch 1201, Training loss 2.650611, Validation loss 6.271601\n", "Epoch 1202, Training loss 2.649696, Validation loss 6.272194\n", "Epoch 1203, Training loss 2.648783, Validation loss 6.272787\n", "Epoch 1204, Training loss 2.647875, Validation loss 6.273386\n", "Epoch 1205, Training loss 2.646972, Validation loss 6.273989\n", "Epoch 1206, Training loss 2.646070, Validation loss 6.274597\n", "Epoch 1207, Training loss 2.645173, Validation loss 6.275190\n", "Epoch 1208, Training loss 2.644279, Validation loss 6.275788\n", "Epoch 1209, Training loss 2.643388, Validation loss 6.276382\n", "Epoch 1210, Training loss 2.642503, Validation loss 6.276971\n", "Epoch 1211, Training loss 2.641620, Validation loss 6.277570\n", "Epoch 1212, Training loss 2.640741, Validation loss 6.278158\n", "Epoch 1213, Training loss 2.639867, Validation loss 6.278743\n", "Epoch 1214, Training loss 2.638994, Validation loss 6.279336\n", "Epoch 1215, Training loss 2.638126, Validation loss 6.279916\n", "Epoch 1216, Training loss 2.637261, Validation loss 6.280509\n", "Epoch 1217, Training loss 2.636401, Validation loss 6.281103\n", "Epoch 1218, Training loss 2.635543, Validation loss 6.281682\n", "Epoch 1219, Training loss 2.634690, Validation loss 6.282262\n", "Epoch 1220, Training loss 2.633840, Validation loss 6.282846\n", "Epoch 1221, Training loss 2.632993, Validation loss 6.283431\n", "Epoch 1222, Training loss 2.632149, Validation loss 6.284000\n", "Epoch 1223, Training loss 2.631310, Validation loss 6.284585\n", "Epoch 1224, Training loss 2.630473, Validation loss 6.285160\n", "Epoch 1225, Training loss 2.629641, Validation loss 6.285744\n", "Epoch 1226, Training loss 2.628810, Validation loss 6.286314\n", "Epoch 1227, Training loss 2.627985, Validation loss 6.286879\n", "Epoch 1228, Training loss 2.627162, Validation loss 6.287449\n", "Epoch 1229, Training loss 2.626344, Validation loss 6.288024\n", "Epoch 1230, Training loss 2.625527, Validation loss 6.288594\n", "Epoch 1231, Training loss 2.624716, Validation loss 6.289164\n", "Epoch 1232, Training loss 2.623907, Validation loss 6.289739\n", "Epoch 1233, Training loss 2.623101, Validation loss 6.290299\n", "Epoch 1234, Training loss 2.622298, Validation loss 6.290860\n", "Epoch 1235, Training loss 2.621499, Validation loss 6.291420\n", "Epoch 1236, Training loss 2.620702, Validation loss 6.291986\n", "Epoch 1237, Training loss 2.619911, Validation loss 6.292546\n", "Epoch 1238, Training loss 2.619121, Validation loss 6.293112\n", "Epoch 1239, Training loss 2.618333, Validation loss 6.293677\n", "Epoch 1240, Training loss 2.617551, Validation loss 6.294229\n", "Epoch 1241, Training loss 2.616772, Validation loss 6.294794\n", "Epoch 1242, Training loss 2.615995, Validation loss 6.295350\n", "Epoch 1243, Training loss 2.615222, Validation loss 6.295900\n", "Epoch 1244, Training loss 2.614453, Validation loss 6.296456\n", "Epoch 1245, Training loss 2.613686, Validation loss 6.297012\n", "Epoch 1246, Training loss 2.612922, Validation loss 6.297568\n", "Epoch 1247, Training loss 2.612163, Validation loss 6.298124\n", "Epoch 1248, Training loss 2.611403, Validation loss 6.298666\n", "Epoch 1249, Training loss 2.610650, Validation loss 6.299212\n", "Epoch 1250, Training loss 2.609899, Validation loss 6.299773\n", "Epoch 1251, Training loss 2.609151, Validation loss 6.300320\n", "Epoch 1252, Training loss 2.608407, Validation loss 6.300856\n", "Epoch 1253, Training loss 2.607665, Validation loss 6.301393\n", "Epoch 1254, Training loss 2.606926, Validation loss 6.301934\n", "Epoch 1255, Training loss 2.606189, Validation loss 6.302476\n", "Epoch 1256, Training loss 2.605458, Validation loss 6.303032\n", "Epoch 1257, Training loss 2.604726, Validation loss 6.303569\n", "Epoch 1258, Training loss 2.604001, Validation loss 6.304111\n", "Epoch 1259, Training loss 2.603278, Validation loss 6.304657\n", "Epoch 1260, Training loss 2.602557, Validation loss 6.305189\n", "Epoch 1261, Training loss 2.601839, Validation loss 6.305726\n", "Epoch 1262, Training loss 2.601124, Validation loss 6.306258\n", "Epoch 1263, Training loss 2.600413, Validation loss 6.306795\n", "Epoch 1264, Training loss 2.599702, Validation loss 6.307318\n", "Epoch 1265, Training loss 2.598998, Validation loss 6.307855\n", "Epoch 1266, Training loss 2.598295, Validation loss 6.308396\n", "Epoch 1267, Training loss 2.597594, Validation loss 6.308919\n", "Epoch 1268, Training loss 2.596897, Validation loss 6.309437\n", "Epoch 1269, Training loss 2.596202, Validation loss 6.309969\n", "Epoch 1270, Training loss 2.595512, Validation loss 6.310487\n", "Epoch 1271, Training loss 2.594823, Validation loss 6.311024\n", "Epoch 1272, Training loss 2.594138, Validation loss 6.311561\n", "Epoch 1273, Training loss 2.593454, Validation loss 6.312074\n", "Epoch 1274, Training loss 2.592772, Validation loss 6.312597\n", "Epoch 1275, Training loss 2.592095, Validation loss 6.313109\n", "Epoch 1276, Training loss 2.591423, Validation loss 6.313637\n", "Epoch 1277, Training loss 2.590750, Validation loss 6.314170\n", "Epoch 1278, Training loss 2.590081, Validation loss 6.314678\n", "Epoch 1279, Training loss 2.589416, Validation loss 6.315196\n", "Epoch 1280, Training loss 2.588752, Validation loss 6.315719\n", "Epoch 1281, Training loss 2.588091, Validation loss 6.316222\n", "Epoch 1282, Training loss 2.587432, Validation loss 6.316740\n", "Epoch 1283, Training loss 2.586778, Validation loss 6.317253\n", "Epoch 1284, Training loss 2.586126, Validation loss 6.317771\n", "Epoch 1285, Training loss 2.585475, Validation loss 6.318274\n", "Epoch 1286, Training loss 2.584828, Validation loss 6.318778\n", "Epoch 1287, Training loss 2.584183, Validation loss 6.319291\n", "Epoch 1288, Training loss 2.583541, Validation loss 6.319799\n", "Epoch 1289, Training loss 2.582901, Validation loss 6.320298\n", "Epoch 1290, Training loss 2.582266, Validation loss 6.320811\n", "Epoch 1291, Training loss 2.581632, Validation loss 6.321315\n", "Epoch 1292, Training loss 2.581002, Validation loss 6.321809\n", "Epoch 1293, Training loss 2.580372, Validation loss 6.322312\n", "Epoch 1294, Training loss 2.579745, Validation loss 6.322825\n", "Epoch 1295, Training loss 2.579122, Validation loss 6.323325\n", "Epoch 1296, Training loss 2.578501, Validation loss 6.323829\n", "Epoch 1297, Training loss 2.577883, Validation loss 6.324327\n", "Epoch 1298, Training loss 2.577267, Validation loss 6.324821\n", "Epoch 1299, Training loss 2.576654, Validation loss 6.325310\n", "Epoch 1300, Training loss 2.576043, Validation loss 6.325824\n", "Epoch 1301, Training loss 2.575433, Validation loss 6.326303\n", "Epoch 1302, Training loss 2.574830, Validation loss 6.326807\n", "Epoch 1303, Training loss 2.574227, Validation loss 6.327306\n", "Epoch 1304, Training loss 2.573625, Validation loss 6.327785\n", "Epoch 1305, Training loss 2.573027, Validation loss 6.328279\n", "Epoch 1306, Training loss 2.572429, Validation loss 6.328764\n", "Epoch 1307, Training loss 2.571837, Validation loss 6.329258\n", "Epoch 1308, Training loss 2.571246, Validation loss 6.329747\n", "Epoch 1309, Training loss 2.570657, Validation loss 6.330241\n", "Epoch 1310, Training loss 2.570071, Validation loss 6.330730\n", "Epoch 1311, Training loss 2.569488, Validation loss 6.331196\n", "Epoch 1312, Training loss 2.568907, Validation loss 6.331680\n", "Epoch 1313, Training loss 2.568327, Validation loss 6.332179\n", "Epoch 1314, Training loss 2.567750, Validation loss 6.332654\n", "Epoch 1315, Training loss 2.567178, Validation loss 6.333143\n", "Epoch 1316, Training loss 2.566606, Validation loss 6.333609\n", "Epoch 1317, Training loss 2.566037, Validation loss 6.334093\n", "Epoch 1318, Training loss 2.565470, Validation loss 6.334568\n", "Epoch 1319, Training loss 2.564904, Validation loss 6.335057\n", "Epoch 1320, Training loss 2.564342, Validation loss 6.335523\n", "Epoch 1321, Training loss 2.563781, Validation loss 6.336002\n", "Epoch 1322, Training loss 2.563226, Validation loss 6.336473\n", "Epoch 1323, Training loss 2.562668, Validation loss 6.336947\n", "Epoch 1324, Training loss 2.562117, Validation loss 6.337413\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1325, Training loss 2.561565, Validation loss 6.337888\n", "Epoch 1326, Training loss 2.561016, Validation loss 6.338358\n", "Epoch 1327, Training loss 2.560470, Validation loss 6.338833\n", "Epoch 1328, Training loss 2.559926, Validation loss 6.339289\n", "Epoch 1329, Training loss 2.559384, Validation loss 6.339768\n", "Epoch 1330, Training loss 2.558843, Validation loss 6.340234\n", "Epoch 1331, Training loss 2.558306, Validation loss 6.340704\n", "Epoch 1332, Training loss 2.557771, Validation loss 6.341160\n", "Epoch 1333, Training loss 2.557240, Validation loss 6.341639\n", "Epoch 1334, Training loss 2.556708, Validation loss 6.342100\n", "Epoch 1335, Training loss 2.556178, Validation loss 6.342570\n", "Epoch 1336, Training loss 2.555654, Validation loss 6.343026\n", "Epoch 1337, Training loss 2.555128, Validation loss 6.343486\n", "Epoch 1338, Training loss 2.554607, Validation loss 6.343951\n", "Epoch 1339, Training loss 2.554086, Validation loss 6.344412\n", "Epoch 1340, Training loss 2.553570, Validation loss 6.344868\n", "Epoch 1341, Training loss 2.553054, Validation loss 6.345314\n", "Epoch 1342, Training loss 2.552541, Validation loss 6.345774\n", "Epoch 1343, Training loss 2.552029, Validation loss 6.346230\n", "Epoch 1344, Training loss 2.551519, Validation loss 6.346681\n", "Epoch 1345, Training loss 2.551013, Validation loss 6.347141\n", "Epoch 1346, Training loss 2.550508, Validation loss 6.347592\n", "Epoch 1347, Training loss 2.550004, Validation loss 6.348039\n", "Epoch 1348, Training loss 2.549503, Validation loss 6.348485\n", "Epoch 1349, Training loss 2.549005, Validation loss 6.348936\n", "Epoch 1350, Training loss 2.548509, Validation loss 6.349386\n", "Epoch 1351, Training loss 2.548014, Validation loss 6.349828\n", "Epoch 1352, Training loss 2.547521, Validation loss 6.350284\n", "Epoch 1353, Training loss 2.547031, Validation loss 6.350725\n", "Epoch 1354, Training loss 2.546542, Validation loss 6.351171\n", "Epoch 1355, Training loss 2.546057, Validation loss 6.351622\n", "Epoch 1356, Training loss 2.545572, Validation loss 6.352058\n", "Epoch 1357, Training loss 2.545090, Validation loss 6.352504\n", "Epoch 1358, Training loss 2.544608, Validation loss 6.352941\n", "Epoch 1359, Training loss 2.544129, Validation loss 6.353382\n", "Epoch 1360, Training loss 2.543653, Validation loss 6.353819\n", "Epoch 1361, Training loss 2.543178, Validation loss 6.354270\n", "Epoch 1362, Training loss 2.542706, Validation loss 6.354706\n", "Epoch 1363, Training loss 2.542235, Validation loss 6.355138\n", "Epoch 1364, Training loss 2.541767, Validation loss 6.355570\n", "Epoch 1365, Training loss 2.541300, Validation loss 6.356001\n", "Epoch 1366, Training loss 2.540833, Validation loss 6.356442\n", "Epoch 1367, Training loss 2.540372, Validation loss 6.356884\n", "Epoch 1368, Training loss 2.539911, Validation loss 6.357311\n", "Epoch 1369, Training loss 2.539451, Validation loss 6.357733\n", "Epoch 1370, Training loss 2.538994, Validation loss 6.358169\n", "Epoch 1371, Training loss 2.538538, Validation loss 6.358596\n", "Epoch 1372, Training loss 2.538085, Validation loss 6.359033\n", "Epoch 1373, Training loss 2.537633, Validation loss 6.359455\n", "Epoch 1374, Training loss 2.537183, Validation loss 6.359896\n", "Epoch 1375, Training loss 2.536736, Validation loss 6.360308\n", "Epoch 1376, Training loss 2.536291, Validation loss 6.360735\n", "Epoch 1377, Training loss 2.535846, Validation loss 6.361172\n", "Epoch 1378, Training loss 2.535402, Validation loss 6.361594\n", "Epoch 1379, Training loss 2.534961, Validation loss 6.362016\n", "Epoch 1380, Training loss 2.534523, Validation loss 6.362438\n", "Epoch 1381, Training loss 2.534088, Validation loss 6.362855\n", "Epoch 1382, Training loss 2.533652, Validation loss 6.363287\n", "Epoch 1383, Training loss 2.533218, Validation loss 6.363704\n", "Epoch 1384, Training loss 2.532786, Validation loss 6.364121\n", "Epoch 1385, Training loss 2.532356, Validation loss 6.364538\n", "Epoch 1386, Training loss 2.531929, Validation loss 6.364950\n", "Epoch 1387, Training loss 2.531503, Validation loss 6.365373\n", "Epoch 1388, Training loss 2.531077, Validation loss 6.365785\n", "Epoch 1389, Training loss 2.530656, Validation loss 6.366193\n", "Epoch 1390, Training loss 2.530233, Validation loss 6.366605\n", "Epoch 1391, Training loss 2.529815, Validation loss 6.367037\n", "Epoch 1392, Training loss 2.529396, Validation loss 6.367440\n", "Epoch 1393, Training loss 2.528981, Validation loss 6.367847\n", "Epoch 1394, Training loss 2.528568, Validation loss 6.368254\n", "Epoch 1395, Training loss 2.528155, Validation loss 6.368662\n", "Epoch 1396, Training loss 2.527745, Validation loss 6.369079\n", "Epoch 1397, Training loss 2.527335, Validation loss 6.369482\n", "Epoch 1398, Training loss 2.526929, Validation loss 6.369899\n", "Epoch 1399, Training loss 2.526523, Validation loss 6.370312\n", "Epoch 1400, Training loss 2.526120, Validation loss 6.370709\n", "Epoch 1401, Training loss 2.525715, Validation loss 6.371107\n", "Epoch 1402, Training loss 2.525316, Validation loss 6.371514\n", "Epoch 1403, Training loss 2.524917, Validation loss 6.371912\n", "Epoch 1404, Training loss 2.524519, Validation loss 6.372325\n", "Epoch 1405, Training loss 2.524123, Validation loss 6.372737\n", "Epoch 1406, Training loss 2.523728, Validation loss 6.373130\n", "Epoch 1407, Training loss 2.523338, Validation loss 6.373538\n", "Epoch 1408, Training loss 2.522946, Validation loss 6.373921\n", "Epoch 1409, Training loss 2.522557, Validation loss 6.374329\n", "Epoch 1410, Training loss 2.522170, Validation loss 6.374712\n", "Epoch 1411, Training loss 2.521783, Validation loss 6.375110\n", "Epoch 1412, Training loss 2.521399, Validation loss 6.375518\n", "Epoch 1413, Training loss 2.521017, Validation loss 6.375925\n", "Epoch 1414, Training loss 2.520636, Validation loss 6.376313\n", "Epoch 1415, Training loss 2.520256, Validation loss 6.376706\n", "Epoch 1416, Training loss 2.519879, Validation loss 6.377099\n", "Epoch 1417, Training loss 2.519502, Validation loss 6.377492\n", "Epoch 1418, Training loss 2.519126, Validation loss 6.377881\n", "Epoch 1419, Training loss 2.518752, Validation loss 6.378259\n", "Epoch 1420, Training loss 2.518380, Validation loss 6.378656\n", "Epoch 1421, Training loss 2.518011, Validation loss 6.379045\n", "Epoch 1422, Training loss 2.517641, Validation loss 6.379423\n", "Epoch 1423, Training loss 2.517274, Validation loss 6.379802\n", "Epoch 1424, Training loss 2.516908, Validation loss 6.380195\n", "Epoch 1425, Training loss 2.516545, Validation loss 6.380593\n", "Epoch 1426, Training loss 2.516181, Validation loss 6.380971\n", "Epoch 1427, Training loss 2.515819, Validation loss 6.381359\n", "Epoch 1428, Training loss 2.515460, Validation loss 6.381738\n", "Epoch 1429, Training loss 2.515101, Validation loss 6.382131\n", "Epoch 1430, Training loss 2.514745, Validation loss 6.382500\n", "Epoch 1431, Training loss 2.514390, Validation loss 6.382893\n", "Epoch 1432, Training loss 2.514034, Validation loss 6.383281\n", "Epoch 1433, Training loss 2.513684, Validation loss 6.383655\n", "Epoch 1434, Training loss 2.513333, Validation loss 6.384033\n", "Epoch 1435, Training loss 2.512983, Validation loss 6.384411\n", "Epoch 1436, Training loss 2.512635, Validation loss 6.384780\n", "Epoch 1437, Training loss 2.512289, Validation loss 6.385159\n", "Epoch 1438, Training loss 2.511943, Validation loss 6.385542\n", "Epoch 1439, Training loss 2.511599, Validation loss 6.385911\n", "Epoch 1440, Training loss 2.511257, Validation loss 6.386280\n", "Epoch 1441, Training loss 2.510916, Validation loss 6.386653\n", "Epoch 1442, Training loss 2.510576, Validation loss 6.387026\n", "Epoch 1443, Training loss 2.510237, Validation loss 6.387395\n", "Epoch 1444, Training loss 2.509901, Validation loss 6.387784\n", "Epoch 1445, Training loss 2.509566, Validation loss 6.388157\n", "Epoch 1446, Training loss 2.509231, Validation loss 6.388512\n", "Epoch 1447, Training loss 2.508898, Validation loss 6.388885\n", "Epoch 1448, Training loss 2.508567, Validation loss 6.389249\n", "Epoch 1449, Training loss 2.508238, Validation loss 6.389622\n", "Epoch 1450, Training loss 2.507909, Validation loss 6.389996\n", "Epoch 1451, Training loss 2.507582, Validation loss 6.390355\n", "Epoch 1452, Training loss 2.507256, Validation loss 6.390718\n", "Epoch 1453, Training loss 2.506931, Validation loss 6.391092\n", "Epoch 1454, Training loss 2.506608, Validation loss 6.391452\n", "Epoch 1455, Training loss 2.506288, Validation loss 6.391815\n", "Epoch 1456, Training loss 2.505967, Validation loss 6.392175\n", "Epoch 1457, Training loss 2.505647, Validation loss 6.392529\n", "Epoch 1458, Training loss 2.505330, Validation loss 6.392888\n", "Epoch 1459, Training loss 2.505011, Validation loss 6.393251\n", "Epoch 1460, Training loss 2.504698, Validation loss 6.393610\n", "Epoch 1461, Training loss 2.504384, Validation loss 6.393965\n", "Epoch 1462, Training loss 2.504071, Validation loss 6.394334\n", "Epoch 1463, Training loss 2.503759, Validation loss 6.394692\n", "Epoch 1464, Training loss 2.503449, Validation loss 6.395051\n", "Epoch 1465, Training loss 2.503142, Validation loss 6.395406\n", "Epoch 1466, Training loss 2.502832, Validation loss 6.395755\n", "Epoch 1467, Training loss 2.502529, Validation loss 6.396109\n", "Epoch 1468, Training loss 2.502224, Validation loss 6.396463\n", "Epoch 1469, Training loss 2.501918, Validation loss 6.396812\n", "Epoch 1470, Training loss 2.501617, Validation loss 6.397176\n", "Epoch 1471, Training loss 2.501315, Validation loss 6.397531\n", "Epoch 1472, Training loss 2.501017, Validation loss 6.397884\n", "Epoch 1473, Training loss 2.500716, Validation loss 6.398229\n", "Epoch 1474, Training loss 2.500419, Validation loss 6.398583\n", "Epoch 1475, Training loss 2.500124, Validation loss 6.398918\n", "Epoch 1476, Training loss 2.499827, Validation loss 6.399277\n", "Epoch 1477, Training loss 2.499535, Validation loss 6.399626\n", "Epoch 1478, Training loss 2.499242, Validation loss 6.399971\n", "Epoch 1479, Training loss 2.498950, Validation loss 6.400310\n", "Epoch 1480, Training loss 2.498659, Validation loss 6.400654\n", "Epoch 1481, Training loss 2.498371, Validation loss 6.401009\n", "Epoch 1482, Training loss 2.498082, Validation loss 6.401348\n", "Epoch 1483, Training loss 2.497796, Validation loss 6.401702\n", "Epoch 1484, Training loss 2.497512, Validation loss 6.402032\n", "Epoch 1485, Training loss 2.497227, Validation loss 6.402376\n", "Epoch 1486, Training loss 2.496942, Validation loss 6.402711\n", "Epoch 1487, Training loss 2.496662, Validation loss 6.403050\n", "Epoch 1488, Training loss 2.496381, Validation loss 6.403399\n", "Epoch 1489, Training loss 2.496102, Validation loss 6.403739\n", "Epoch 1490, Training loss 2.495824, Validation loss 6.404069\n", "Epoch 1491, Training loss 2.495546, Validation loss 6.404413\n", "Epoch 1492, Training loss 2.495271, Validation loss 6.404743\n", "Epoch 1493, Training loss 2.494995, Validation loss 6.405083\n", "Epoch 1494, Training loss 2.494721, Validation loss 6.405422\n", "Epoch 1495, Training loss 2.494447, Validation loss 6.405757\n", "Epoch 1496, Training loss 2.494177, Validation loss 6.406082\n", "Epoch 1497, Training loss 2.493907, Validation loss 6.406422\n", "Epoch 1498, Training loss 2.493637, Validation loss 6.406751\n", "Epoch 1499, Training loss 2.493369, Validation loss 6.407085\n", "Epoch 1500, Training loss 2.493103, Validation loss 6.407430\n", "Epoch 1501, Training loss 2.492835, Validation loss 6.407764\n", "Epoch 1502, Training loss 2.492570, Validation loss 6.408090\n", "Epoch 1503, Training loss 2.492306, Validation loss 6.408424\n", "Epoch 1504, Training loss 2.492044, Validation loss 6.408749\n", "Epoch 1505, Training loss 2.491782, Validation loss 6.409079\n", "Epoch 1506, Training loss 2.491521, Validation loss 6.409399\n", "Epoch 1507, Training loss 2.491260, Validation loss 6.409729\n", "Epoch 1508, Training loss 2.491005, Validation loss 6.410048\n", "Epoch 1509, Training loss 2.490746, Validation loss 6.410374\n", "Epoch 1510, Training loss 2.490489, Validation loss 6.410703\n", "Epoch 1511, Training loss 2.490233, Validation loss 6.411033\n", "Epoch 1512, Training loss 2.489980, Validation loss 6.411343\n", "Epoch 1513, Training loss 2.489728, Validation loss 6.411663\n", "Epoch 1514, Training loss 2.489476, Validation loss 6.411998\n", "Epoch 1515, Training loss 2.489223, Validation loss 6.412322\n", "Epoch 1516, Training loss 2.488973, Validation loss 6.412638\n", "Epoch 1517, Training loss 2.488724, Validation loss 6.412967\n", "Epoch 1518, Training loss 2.488477, Validation loss 6.413282\n", "Epoch 1519, Training loss 2.488231, Validation loss 6.413598\n", "Epoch 1520, Training loss 2.487984, Validation loss 6.413908\n", "Epoch 1521, Training loss 2.487738, Validation loss 6.414232\n", "Epoch 1522, Training loss 2.487495, Validation loss 6.414547\n", "Epoch 1523, Training loss 2.487252, Validation loss 6.414877\n", "Epoch 1524, Training loss 2.487011, Validation loss 6.415183\n", "Epoch 1525, Training loss 2.486769, Validation loss 6.415493\n", "Epoch 1526, Training loss 2.486529, Validation loss 6.415813\n", "Epoch 1527, Training loss 2.486291, Validation loss 6.416128\n", "Epoch 1528, Training loss 2.486053, Validation loss 6.416438\n", "Epoch 1529, Training loss 2.485816, Validation loss 6.416758\n", "Epoch 1530, Training loss 2.485580, Validation loss 6.417064\n", "Epoch 1531, Training loss 2.485345, Validation loss 6.417374\n", "Epoch 1532, Training loss 2.485110, Validation loss 6.417688\n", "Epoch 1533, Training loss 2.484877, Validation loss 6.418003\n", "Epoch 1534, Training loss 2.484645, Validation loss 6.418304\n", "Epoch 1535, Training loss 2.484414, Validation loss 6.418624\n", "Epoch 1536, Training loss 2.484184, Validation loss 6.418924\n", "Epoch 1537, Training loss 2.483954, Validation loss 6.419234\n", "Epoch 1538, Training loss 2.483726, Validation loss 6.419545\n", "Epoch 1539, Training loss 2.483500, Validation loss 6.419850\n", "Epoch 1540, Training loss 2.483272, Validation loss 6.420160\n", "Epoch 1541, Training loss 2.483046, Validation loss 6.420465\n", "Epoch 1542, Training loss 2.482822, Validation loss 6.420761\n", "Epoch 1543, Training loss 2.482599, Validation loss 6.421066\n", "Epoch 1544, Training loss 2.482377, Validation loss 6.421357\n", "Epoch 1545, Training loss 2.482155, Validation loss 6.421667\n", "Epoch 1546, Training loss 2.481934, Validation loss 6.421977\n", "Epoch 1547, Training loss 2.481713, Validation loss 6.422277\n", "Epoch 1548, Training loss 2.481495, Validation loss 6.422577\n", "Epoch 1549, Training loss 2.481276, Validation loss 6.422883\n", "Epoch 1550, Training loss 2.481060, Validation loss 6.423179\n", "Epoch 1551, Training loss 2.480842, Validation loss 6.423469\n", "Epoch 1552, Training loss 2.480628, Validation loss 6.423769\n", "Epoch 1553, Training loss 2.480414, Validation loss 6.424069\n", "Epoch 1554, Training loss 2.480200, Validation loss 6.424370\n", "Epoch 1555, Training loss 2.479987, Validation loss 6.424675\n", "Epoch 1556, Training loss 2.479774, Validation loss 6.424961\n", "Epoch 1557, Training loss 2.479563, Validation loss 6.425266\n", "Epoch 1558, Training loss 2.479353, Validation loss 6.425551\n", "Epoch 1559, Training loss 2.479144, Validation loss 6.425852\n", "Epoch 1560, Training loss 2.478935, Validation loss 6.426138\n", "Epoch 1561, Training loss 2.478728, Validation loss 6.426428\n", "Epoch 1562, Training loss 2.478521, Validation loss 6.426724\n", "Epoch 1563, Training loss 2.478316, Validation loss 6.427010\n", "Epoch 1564, Training loss 2.478111, Validation loss 6.427310\n", "Epoch 1565, Training loss 2.477906, Validation loss 6.427610\n", "Epoch 1566, Training loss 2.477701, Validation loss 6.427905\n", "Epoch 1567, Training loss 2.477500, Validation loss 6.428186\n", "Epoch 1568, Training loss 2.477299, Validation loss 6.428472\n", "Epoch 1569, Training loss 2.477097, Validation loss 6.428748\n", "Epoch 1570, Training loss 2.476898, Validation loss 6.429053\n", "Epoch 1571, Training loss 2.476699, Validation loss 6.429329\n", "Epoch 1572, Training loss 2.476501, Validation loss 6.429614\n", "Epoch 1573, Training loss 2.476303, Validation loss 6.429905\n", "Epoch 1574, Training loss 2.476106, Validation loss 6.430185\n", "Epoch 1575, Training loss 2.475910, Validation loss 6.430471\n", "Epoch 1576, Training loss 2.475715, Validation loss 6.430757\n", "Epoch 1577, Training loss 2.475521, Validation loss 6.431047\n", "Epoch 1578, Training loss 2.475329, Validation loss 6.431333\n", "Epoch 1579, Training loss 2.475135, Validation loss 6.431619\n", "Epoch 1580, Training loss 2.474944, Validation loss 6.431894\n", "Epoch 1581, Training loss 2.474751, Validation loss 6.432185\n", "Epoch 1582, Training loss 2.474561, Validation loss 6.432456\n", "Epoch 1583, Training loss 2.474372, Validation loss 6.432727\n", "Epoch 1584, Training loss 2.474184, Validation loss 6.433017\n", "Epoch 1585, Training loss 2.473996, Validation loss 6.433293\n", "Epoch 1586, Training loss 2.473809, Validation loss 6.433564\n", "Epoch 1587, Training loss 2.473622, Validation loss 6.433845\n", "Epoch 1588, Training loss 2.473437, Validation loss 6.434125\n", "Epoch 1589, Training loss 2.473251, Validation loss 6.434411\n", "Epoch 1590, Training loss 2.473067, Validation loss 6.434682\n", "Epoch 1591, Training loss 2.472885, Validation loss 6.434958\n", "Epoch 1592, Training loss 2.472701, Validation loss 6.435224\n", "Epoch 1593, Training loss 2.472520, Validation loss 6.435499\n", "Epoch 1594, Training loss 2.472338, Validation loss 6.435770\n", "Epoch 1595, Training loss 2.472159, Validation loss 6.436041\n", "Epoch 1596, Training loss 2.471980, Validation loss 6.436327\n", "Epoch 1597, Training loss 2.471800, Validation loss 6.436598\n", "Epoch 1598, Training loss 2.471622, Validation loss 6.436868\n", "Epoch 1599, Training loss 2.471443, Validation loss 6.437134\n", "Epoch 1600, Training loss 2.471268, Validation loss 6.437405\n", "Epoch 1601, Training loss 2.471091, Validation loss 6.437681\n", "Epoch 1602, Training loss 2.470916, Validation loss 6.437952\n", "Epoch 1603, Training loss 2.470741, Validation loss 6.438212\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1604, Training loss 2.470567, Validation loss 6.438484\n", "Epoch 1605, Training loss 2.470396, Validation loss 6.438754\n", "Epoch 1606, Training loss 2.470223, Validation loss 6.439025\n", "Epoch 1607, Training loss 2.470052, Validation loss 6.439286\n", "Epoch 1608, Training loss 2.469881, Validation loss 6.439552\n", "Epoch 1609, Training loss 2.469710, Validation loss 6.439828\n", "Epoch 1610, Training loss 2.469540, Validation loss 6.440099\n", "Epoch 1611, Training loss 2.469372, Validation loss 6.440354\n", "Epoch 1612, Training loss 2.469203, Validation loss 6.440621\n", "Epoch 1613, Training loss 2.469037, Validation loss 6.440882\n", "Epoch 1614, Training loss 2.468868, Validation loss 6.441148\n", "Epoch 1615, Training loss 2.468703, Validation loss 6.441404\n", "Epoch 1616, Training loss 2.468538, Validation loss 6.441675\n", "Epoch 1617, Training loss 2.468373, Validation loss 6.441926\n", "Epoch 1618, Training loss 2.468210, Validation loss 6.442196\n", "Epoch 1619, Training loss 2.468045, Validation loss 6.442457\n", "Epoch 1620, Training loss 2.467884, Validation loss 6.442709\n", "Epoch 1621, Training loss 2.467721, Validation loss 6.442975\n", "Epoch 1622, Training loss 2.467560, Validation loss 6.443236\n", "Epoch 1623, Training loss 2.467399, Validation loss 6.443487\n", "Epoch 1624, Training loss 2.467240, Validation loss 6.443758\n", "Epoch 1625, Training loss 2.467079, Validation loss 6.444004\n", "Epoch 1626, Training loss 2.466922, Validation loss 6.444269\n", "Epoch 1627, Training loss 2.466765, Validation loss 6.444526\n", "Epoch 1628, Training loss 2.466607, Validation loss 6.444772\n", "Epoch 1629, Training loss 2.466449, Validation loss 6.445033\n", "Epoch 1630, Training loss 2.466293, Validation loss 6.445284\n", "Epoch 1631, Training loss 2.466137, Validation loss 6.445535\n", "Epoch 1632, Training loss 2.465982, Validation loss 6.445801\n", "Epoch 1633, Training loss 2.465828, Validation loss 6.446057\n", "Epoch 1634, Training loss 2.465675, Validation loss 6.446303\n", "Epoch 1635, Training loss 2.465524, Validation loss 6.446554\n", "Epoch 1636, Training loss 2.465372, Validation loss 6.446811\n", "Epoch 1637, Training loss 2.465218, Validation loss 6.447062\n", "Epoch 1638, Training loss 2.465069, Validation loss 6.447303\n", "Epoch 1639, Training loss 2.464918, Validation loss 6.447559\n", "Epoch 1640, Training loss 2.464768, Validation loss 6.447795\n", "Epoch 1641, Training loss 2.464619, Validation loss 6.448051\n", "Epoch 1642, Training loss 2.464471, Validation loss 6.448298\n", "Epoch 1643, Training loss 2.464322, Validation loss 6.448564\n", "Epoch 1644, Training loss 2.464175, Validation loss 6.448790\n", "Epoch 1645, Training loss 2.464028, Validation loss 6.449051\n", "Epoch 1646, Training loss 2.463881, Validation loss 6.449287\n", "Epoch 1647, Training loss 2.463738, Validation loss 6.449543\n", "Epoch 1648, Training loss 2.463592, Validation loss 6.449780\n", "Epoch 1649, Training loss 2.463447, Validation loss 6.450036\n", "Epoch 1650, Training loss 2.463304, Validation loss 6.450277\n", "Epoch 1651, Training loss 2.463161, Validation loss 6.450518\n", "Epoch 1652, Training loss 2.463020, Validation loss 6.450759\n", "Epoch 1653, Training loss 2.462877, Validation loss 6.451000\n", "Epoch 1654, Training loss 2.462736, Validation loss 6.451252\n", "Epoch 1655, Training loss 2.462594, Validation loss 6.451483\n", "Epoch 1656, Training loss 2.462455, Validation loss 6.451734\n", "Epoch 1657, Training loss 2.462315, Validation loss 6.451985\n", "Epoch 1658, Training loss 2.462175, Validation loss 6.452221\n", "Epoch 1659, Training loss 2.462038, Validation loss 6.452443\n", "Epoch 1660, Training loss 2.461900, Validation loss 6.452684\n", "Epoch 1661, Training loss 2.461763, Validation loss 6.452930\n", "Epoch 1662, Training loss 2.461626, Validation loss 6.453172\n", "Epoch 1663, Training loss 2.461490, Validation loss 6.453403\n", "Epoch 1664, Training loss 2.461354, Validation loss 6.453654\n", "Epoch 1665, Training loss 2.461220, Validation loss 6.453880\n", "Epoch 1666, Training loss 2.461085, Validation loss 6.454126\n", "Epoch 1667, Training loss 2.460952, Validation loss 6.454358\n", "Epoch 1668, Training loss 2.460817, Validation loss 6.454599\n", "Epoch 1669, Training loss 2.460685, Validation loss 6.454830\n", "Epoch 1670, Training loss 2.460552, Validation loss 6.455071\n", "Epoch 1671, Training loss 2.460419, Validation loss 6.455293\n", "Epoch 1672, Training loss 2.460289, Validation loss 6.455524\n", "Epoch 1673, Training loss 2.460158, Validation loss 6.455770\n", "Epoch 1674, Training loss 2.460027, Validation loss 6.456002\n", "Epoch 1675, Training loss 2.459899, Validation loss 6.456229\n", "Epoch 1676, Training loss 2.459769, Validation loss 6.456455\n", "Epoch 1677, Training loss 2.459642, Validation loss 6.456691\n", "Epoch 1678, Training loss 2.459514, Validation loss 6.456918\n", "Epoch 1679, Training loss 2.459385, Validation loss 6.457158\n", "Epoch 1680, Training loss 2.459258, Validation loss 6.457380\n", "Epoch 1681, Training loss 2.459132, Validation loss 6.457617\n", "Epoch 1682, Training loss 2.459006, Validation loss 6.457843\n", "Epoch 1683, Training loss 2.458881, Validation loss 6.458074\n", "Epoch 1684, Training loss 2.458757, Validation loss 6.458310\n", "Epoch 1685, Training loss 2.458633, Validation loss 6.458527\n", "Epoch 1686, Training loss 2.458508, Validation loss 6.458748\n", "Epoch 1687, Training loss 2.458386, Validation loss 6.458984\n", "Epoch 1688, Training loss 2.458262, Validation loss 6.459216\n", "Epoch 1689, Training loss 2.458140, Validation loss 6.459442\n", "Epoch 1690, Training loss 2.458019, Validation loss 6.459673\n", "Epoch 1691, Training loss 2.457896, Validation loss 6.459895\n", "Epoch 1692, Training loss 2.457777, Validation loss 6.460107\n", "Epoch 1693, Training loss 2.457656, Validation loss 6.460343\n", "Epoch 1694, Training loss 2.457536, Validation loss 6.460569\n", "Epoch 1695, Training loss 2.457417, Validation loss 6.460791\n", "Epoch 1696, Training loss 2.457298, Validation loss 6.461017\n", "Epoch 1697, Training loss 2.457180, Validation loss 6.461233\n", "Epoch 1698, Training loss 2.457062, Validation loss 6.461440\n", "Epoch 1699, Training loss 2.456945, Validation loss 6.461681\n", "Epoch 1700, Training loss 2.456828, Validation loss 6.461903\n", "Epoch 1701, Training loss 2.456712, Validation loss 6.462109\n", "Epoch 1702, Training loss 2.456596, Validation loss 6.462331\n", "Epoch 1703, Training loss 2.456480, Validation loss 6.462557\n", "Epoch 1704, Training loss 2.456365, Validation loss 6.462773\n", "Epoch 1705, Training loss 2.456250, Validation loss 6.462995\n", "Epoch 1706, Training loss 2.456137, Validation loss 6.463222\n", "Epoch 1707, Training loss 2.456023, Validation loss 6.463443\n", "Epoch 1708, Training loss 2.455910, Validation loss 6.463654\n", "Epoch 1709, Training loss 2.455798, Validation loss 6.463861\n", "Epoch 1710, Training loss 2.455686, Validation loss 6.464072\n", "Epoch 1711, Training loss 2.455574, Validation loss 6.464289\n", "Epoch 1712, Training loss 2.455464, Validation loss 6.464500\n", "Epoch 1713, Training loss 2.455351, Validation loss 6.464731\n", "Epoch 1714, Training loss 2.455242, Validation loss 6.464943\n", "Epoch 1715, Training loss 2.455132, Validation loss 6.465159\n", "Epoch 1716, Training loss 2.455023, Validation loss 6.465366\n", "Epoch 1717, Training loss 2.454915, Validation loss 6.465577\n", "Epoch 1718, Training loss 2.454806, Validation loss 6.465784\n", "Epoch 1719, Training loss 2.454699, Validation loss 6.466011\n", "Epoch 1720, Training loss 2.454591, Validation loss 6.466222\n", "Epoch 1721, Training loss 2.454482, Validation loss 6.466443\n", "Epoch 1722, Training loss 2.454377, Validation loss 6.466660\n", "Epoch 1723, Training loss 2.454270, Validation loss 6.466866\n", "Epoch 1724, Training loss 2.454164, Validation loss 6.467068\n", "Epoch 1725, Training loss 2.454060, Validation loss 6.467275\n", "Epoch 1726, Training loss 2.453954, Validation loss 6.467481\n", "Epoch 1727, Training loss 2.453851, Validation loss 6.467688\n", "Epoch 1728, Training loss 2.453745, Validation loss 6.467904\n", "Epoch 1729, Training loss 2.453641, Validation loss 6.468111\n", "Epoch 1730, Training loss 2.453539, Validation loss 6.468312\n", "Epoch 1731, Training loss 2.453436, Validation loss 6.468509\n", "Epoch 1732, Training loss 2.453334, Validation loss 6.468735\n", "Epoch 1733, Training loss 2.453231, Validation loss 6.468936\n", "Epoch 1734, Training loss 2.453130, Validation loss 6.469143\n", "Epoch 1735, Training loss 2.453029, Validation loss 6.469349\n", "Epoch 1736, Training loss 2.452928, Validation loss 6.469556\n", "Epoch 1737, Training loss 2.452828, Validation loss 6.469762\n", "Epoch 1738, Training loss 2.452729, Validation loss 6.469959\n", "Epoch 1739, Training loss 2.452629, Validation loss 6.470165\n", "Epoch 1740, Training loss 2.452530, Validation loss 6.470372\n", "Epoch 1741, Training loss 2.452430, Validation loss 6.470578\n", "Epoch 1742, Training loss 2.452332, Validation loss 6.470771\n", "Epoch 1743, Training loss 2.452235, Validation loss 6.470972\n", "Epoch 1744, Training loss 2.452138, Validation loss 6.471169\n", "Epoch 1745, Training loss 2.452040, Validation loss 6.471375\n", "Epoch 1746, Training loss 2.451943, Validation loss 6.471577\n", "Epoch 1747, Training loss 2.451848, Validation loss 6.471778\n", "Epoch 1748, Training loss 2.451751, Validation loss 6.471990\n", "Epoch 1749, Training loss 2.451656, Validation loss 6.472186\n", "Epoch 1750, Training loss 2.451562, Validation loss 6.472397\n", "Epoch 1751, Training loss 2.451466, Validation loss 6.472589\n", "Epoch 1752, Training loss 2.451373, Validation loss 6.472785\n", "Epoch 1753, Training loss 2.451279, Validation loss 6.472982\n", "Epoch 1754, Training loss 2.451185, Validation loss 6.473188\n", "Epoch 1755, Training loss 2.451094, Validation loss 6.473385\n", "Epoch 1756, Training loss 2.451000, Validation loss 6.473581\n", "Epoch 1757, Training loss 2.450907, Validation loss 6.473774\n", "Epoch 1758, Training loss 2.450815, Validation loss 6.473970\n", "Epoch 1759, Training loss 2.450723, Validation loss 6.474172\n", "Epoch 1760, Training loss 2.450633, Validation loss 6.474363\n", "Epoch 1761, Training loss 2.450542, Validation loss 6.474545\n", "Epoch 1762, Training loss 2.450452, Validation loss 6.474742\n", "Epoch 1763, Training loss 2.450361, Validation loss 6.474938\n", "Epoch 1764, Training loss 2.450272, Validation loss 6.475130\n", "Epoch 1765, Training loss 2.450183, Validation loss 6.475321\n", "Epoch 1766, Training loss 2.450094, Validation loss 6.475513\n", "Epoch 1767, Training loss 2.450006, Validation loss 6.475710\n", "Epoch 1768, Training loss 2.449917, Validation loss 6.475906\n", "Epoch 1769, Training loss 2.449830, Validation loss 6.476097\n", "Epoch 1770, Training loss 2.449741, Validation loss 6.476289\n", "Epoch 1771, Training loss 2.449654, Validation loss 6.476476\n", "Epoch 1772, Training loss 2.449567, Validation loss 6.476672\n", "Epoch 1773, Training loss 2.449482, Validation loss 6.476859\n", "Epoch 1774, Training loss 2.449396, Validation loss 6.477051\n", "Epoch 1775, Training loss 2.449310, Validation loss 6.477242\n", "Epoch 1776, Training loss 2.449226, Validation loss 6.477434\n", "Epoch 1777, Training loss 2.449140, Validation loss 6.477625\n", "Epoch 1778, Training loss 2.449056, Validation loss 6.477802\n", "Epoch 1779, Training loss 2.448970, Validation loss 6.477994\n", "Epoch 1780, Training loss 2.448886, Validation loss 6.478186\n", "Epoch 1781, Training loss 2.448804, Validation loss 6.478362\n", "Epoch 1782, Training loss 2.448721, Validation loss 6.478559\n", "Epoch 1783, Training loss 2.448637, Validation loss 6.478750\n", "Epoch 1784, Training loss 2.448554, Validation loss 6.478931\n", "Epoch 1785, Training loss 2.448472, Validation loss 6.479123\n", "Epoch 1786, Training loss 2.448390, Validation loss 6.479320\n", "Epoch 1787, Training loss 2.448310, Validation loss 6.479496\n", "Epoch 1788, Training loss 2.448227, Validation loss 6.479678\n", "Epoch 1789, Training loss 2.448148, Validation loss 6.479869\n", "Epoch 1790, Training loss 2.448066, Validation loss 6.480051\n", "Epoch 1791, Training loss 2.447986, Validation loss 6.480228\n", "Epoch 1792, Training loss 2.447906, Validation loss 6.480405\n", "Epoch 1793, Training loss 2.447826, Validation loss 6.480587\n", "Epoch 1794, Training loss 2.447747, Validation loss 6.480768\n", "Epoch 1795, Training loss 2.447669, Validation loss 6.480949\n", "Epoch 1796, Training loss 2.447591, Validation loss 6.481126\n", "Epoch 1797, Training loss 2.447513, Validation loss 6.481313\n", "Epoch 1798, Training loss 2.447434, Validation loss 6.481500\n", "Epoch 1799, Training loss 2.447357, Validation loss 6.481671\n", "Epoch 1800, Training loss 2.447279, Validation loss 6.481863\n", "Epoch 1801, Training loss 2.447202, Validation loss 6.482049\n", "Epoch 1802, Training loss 2.447126, Validation loss 6.482226\n", "Epoch 1803, Training loss 2.447049, Validation loss 6.482397\n", "Epoch 1804, Training loss 2.446973, Validation loss 6.482589\n", "Epoch 1805, Training loss 2.446898, Validation loss 6.482760\n", "Epoch 1806, Training loss 2.446823, Validation loss 6.482937\n", "Epoch 1807, Training loss 2.446747, Validation loss 6.483104\n", "Epoch 1808, Training loss 2.446672, Validation loss 6.483276\n", "Epoch 1809, Training loss 2.446598, Validation loss 6.483458\n", "Epoch 1810, Training loss 2.446523, Validation loss 6.483639\n", "Epoch 1811, Training loss 2.446450, Validation loss 6.483806\n", "Epoch 1812, Training loss 2.446376, Validation loss 6.483988\n", "Epoch 1813, Training loss 2.446302, Validation loss 6.484169\n", "Epoch 1814, Training loss 2.446230, Validation loss 6.484341\n", "Epoch 1815, Training loss 2.446158, Validation loss 6.484522\n", "Epoch 1816, Training loss 2.446084, Validation loss 6.484694\n", "Epoch 1817, Training loss 2.446013, Validation loss 6.484871\n", "Epoch 1818, Training loss 2.445941, Validation loss 6.485047\n", "Epoch 1819, Training loss 2.445870, Validation loss 6.485214\n", "Epoch 1820, Training loss 2.445799, Validation loss 6.485376\n", "Epoch 1821, Training loss 2.445728, Validation loss 6.485547\n", "Epoch 1822, Training loss 2.445657, Validation loss 6.485724\n", "Epoch 1823, Training loss 2.445588, Validation loss 6.485901\n", "Epoch 1824, Training loss 2.445518, Validation loss 6.486082\n", "Epoch 1825, Training loss 2.445447, Validation loss 6.486244\n", "Epoch 1826, Training loss 2.445378, Validation loss 6.486411\n", "Epoch 1827, Training loss 2.445310, Validation loss 6.486582\n", "Epoch 1828, Training loss 2.445240, Validation loss 6.486764\n", "Epoch 1829, Training loss 2.445172, Validation loss 6.486911\n", "Epoch 1830, Training loss 2.445104, Validation loss 6.487082\n", "Epoch 1831, Training loss 2.445036, Validation loss 6.487254\n", "Epoch 1832, Training loss 2.444969, Validation loss 6.487431\n", "Epoch 1833, Training loss 2.444900, Validation loss 6.487597\n", "Epoch 1834, Training loss 2.444834, Validation loss 6.487773\n", "Epoch 1835, Training loss 2.444767, Validation loss 6.487940\n", "Epoch 1836, Training loss 2.444699, Validation loss 6.488117\n", "Epoch 1837, Training loss 2.444634, Validation loss 6.488274\n", "Epoch 1838, Training loss 2.444569, Validation loss 6.488426\n", "Epoch 1839, Training loss 2.444501, Validation loss 6.488602\n", "Epoch 1840, Training loss 2.444437, Validation loss 6.488773\n", "Epoch 1841, Training loss 2.444371, Validation loss 6.488935\n", "Epoch 1842, Training loss 2.444306, Validation loss 6.489117\n", "Epoch 1843, Training loss 2.444242, Validation loss 6.489279\n", "Epoch 1844, Training loss 2.444177, Validation loss 6.489440\n", "Epoch 1845, Training loss 2.444113, Validation loss 6.489588\n", "Epoch 1846, Training loss 2.444049, Validation loss 6.489749\n", "Epoch 1847, Training loss 2.443985, Validation loss 6.489921\n", "Epoch 1848, Training loss 2.443923, Validation loss 6.490082\n", "Epoch 1849, Training loss 2.443858, Validation loss 6.490239\n", "Epoch 1850, Training loss 2.443795, Validation loss 6.490416\n", "Epoch 1851, Training loss 2.443734, Validation loss 6.490567\n", "Epoch 1852, Training loss 2.443671, Validation loss 6.490738\n", "Epoch 1853, Training loss 2.443609, Validation loss 6.490900\n", "Epoch 1854, Training loss 2.443547, Validation loss 6.491052\n", "Epoch 1855, Training loss 2.443486, Validation loss 6.491229\n", "Epoch 1856, Training loss 2.443424, Validation loss 6.491390\n", "Epoch 1857, Training loss 2.443362, Validation loss 6.491542\n", "Epoch 1858, Training loss 2.443304, Validation loss 6.491693\n", "Epoch 1859, Training loss 2.443242, Validation loss 6.491860\n", "Epoch 1860, Training loss 2.443182, Validation loss 6.492027\n", "Epoch 1861, Training loss 2.443121, Validation loss 6.492184\n", "Epoch 1862, Training loss 2.443061, Validation loss 6.492345\n", "Epoch 1863, Training loss 2.443002, Validation loss 6.492497\n", "Epoch 1864, Training loss 2.442943, Validation loss 6.492649\n", "Epoch 1865, Training loss 2.442884, Validation loss 6.492801\n", "Epoch 1866, Training loss 2.442825, Validation loss 6.492977\n", "Epoch 1867, Training loss 2.442767, Validation loss 6.493138\n", "Epoch 1868, Training loss 2.442708, Validation loss 6.493299\n", "Epoch 1869, Training loss 2.442649, Validation loss 6.493442\n", "Epoch 1870, Training loss 2.442591, Validation loss 6.493604\n", "Epoch 1871, Training loss 2.442534, Validation loss 6.493750\n", "Epoch 1872, Training loss 2.442476, Validation loss 6.493917\n", "Epoch 1873, Training loss 2.442421, Validation loss 6.494073\n", "Epoch 1874, Training loss 2.442363, Validation loss 6.494215\n", "Epoch 1875, Training loss 2.442307, Validation loss 6.494367\n", "Epoch 1876, Training loss 2.442249, Validation loss 6.494524\n", "Epoch 1877, Training loss 2.442193, Validation loss 6.494690\n", "Epoch 1878, Training loss 2.442138, Validation loss 6.494837\n", "Epoch 1879, Training loss 2.442083, Validation loss 6.494978\n", "Epoch 1880, Training loss 2.442026, Validation loss 6.495145\n", "Epoch 1881, Training loss 2.441971, Validation loss 6.495302\n", "Epoch 1882, Training loss 2.441916, Validation loss 6.495458\n", "Epoch 1883, Training loss 2.441861, Validation loss 6.495600\n", "Epoch 1884, Training loss 2.441807, Validation loss 6.495757\n", "Epoch 1885, Training loss 2.441751, Validation loss 6.495898\n", "Epoch 1886, Training loss 2.441697, Validation loss 6.496055\n", "Epoch 1887, Training loss 2.441645, Validation loss 6.496207\n", "Epoch 1888, Training loss 2.441592, Validation loss 6.496368\n", "Epoch 1889, Training loss 2.441537, Validation loss 6.496515\n", "Epoch 1890, Training loss 2.441483, Validation loss 6.496652\n", "Epoch 1891, Training loss 2.441430, Validation loss 6.496799\n", "Epoch 1892, Training loss 2.441378, Validation loss 6.496950\n", "Epoch 1893, Training loss 2.441326, Validation loss 6.497091\n", "Epoch 1894, Training loss 2.441274, Validation loss 6.497253\n", "Epoch 1895, Training loss 2.441221, Validation loss 6.497405\n", "Epoch 1896, Training loss 2.441169, Validation loss 6.497551\n", "Epoch 1897, Training loss 2.441117, Validation loss 6.497708\n", "Epoch 1898, Training loss 2.441066, Validation loss 6.497849\n", "Epoch 1899, Training loss 2.441014, Validation loss 6.497992\n", "Epoch 1900, Training loss 2.440964, Validation loss 6.498133\n", "Epoch 1901, Training loss 2.440912, Validation loss 6.498295\n", "Epoch 1902, Training loss 2.440861, Validation loss 6.498426\n", "Epoch 1903, Training loss 2.440812, Validation loss 6.498563\n", "Epoch 1904, Training loss 2.440762, Validation loss 6.498710\n", "Epoch 1905, Training loss 2.440711, Validation loss 6.498871\n", "Epoch 1906, Training loss 2.440662, Validation loss 6.499022\n", "Epoch 1907, Training loss 2.440610, Validation loss 6.499155\n", "Epoch 1908, Training loss 2.440562, Validation loss 6.499311\n", "Epoch 1909, Training loss 2.440512, Validation loss 6.499448\n", "Epoch 1910, Training loss 2.440464, Validation loss 6.499575\n", "Epoch 1911, Training loss 2.440415, Validation loss 6.499727\n", "Epoch 1912, Training loss 2.440366, Validation loss 6.499878\n", "Epoch 1913, Training loss 2.440318, Validation loss 6.500020\n", "Epoch 1914, Training loss 2.440269, Validation loss 6.500146\n", "Epoch 1915, Training loss 2.440222, Validation loss 6.500293\n", "Epoch 1916, Training loss 2.440173, Validation loss 6.500439\n", "Epoch 1917, Training loss 2.440126, Validation loss 6.500586\n", "Epoch 1918, Training loss 2.440079, Validation loss 6.500723\n", "Epoch 1919, Training loss 2.440031, Validation loss 6.500864\n", "Epoch 1920, Training loss 2.439985, Validation loss 6.500997\n", "Epoch 1921, Training loss 2.439938, Validation loss 6.501153\n", "Epoch 1922, Training loss 2.439891, Validation loss 6.501280\n", "Epoch 1923, Training loss 2.439845, Validation loss 6.501431\n", "Epoch 1924, Training loss 2.439800, Validation loss 6.501577\n", "Epoch 1925, Training loss 2.439752, Validation loss 6.501719\n", "Epoch 1926, Training loss 2.439707, Validation loss 6.501851\n", "Epoch 1927, Training loss 2.439661, Validation loss 6.501997\n", "Epoch 1928, Training loss 2.439616, Validation loss 6.502134\n", "Epoch 1929, Training loss 2.439569, Validation loss 6.502261\n", "Epoch 1930, Training loss 2.439525, Validation loss 6.502393\n", "Epoch 1931, Training loss 2.439480, Validation loss 6.502543\n", "Epoch 1932, Training loss 2.439435, Validation loss 6.502666\n", "Epoch 1933, Training loss 2.439391, Validation loss 6.502808\n", "Epoch 1934, Training loss 2.439345, Validation loss 6.502944\n", "Epoch 1935, Training loss 2.439301, Validation loss 6.503090\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1936, Training loss 2.439257, Validation loss 6.503217\n", "Epoch 1937, Training loss 2.439214, Validation loss 6.503368\n", "Epoch 1938, Training loss 2.439170, Validation loss 6.503500\n", "Epoch 1939, Training loss 2.439126, Validation loss 6.503632\n", "Epoch 1940, Training loss 2.439083, Validation loss 6.503763\n", "Epoch 1941, Training loss 2.439040, Validation loss 6.503910\n", "Epoch 1942, Training loss 2.438998, Validation loss 6.504046\n", "Epoch 1943, Training loss 2.438954, Validation loss 6.504169\n", "Epoch 1944, Training loss 2.438911, Validation loss 6.504300\n", "Epoch 1945, Training loss 2.438870, Validation loss 6.504441\n", "Epoch 1946, Training loss 2.438827, Validation loss 6.504573\n", "Epoch 1947, Training loss 2.438784, Validation loss 6.504705\n", "Epoch 1948, Training loss 2.438742, Validation loss 6.504841\n", "Epoch 1949, Training loss 2.438702, Validation loss 6.504978\n", "Epoch 1950, Training loss 2.438659, Validation loss 6.505110\n", "Epoch 1951, Training loss 2.438619, Validation loss 6.505251\n", "Epoch 1952, Training loss 2.438576, Validation loss 6.505373\n", "Epoch 1953, Training loss 2.438536, Validation loss 6.505504\n", "Epoch 1954, Training loss 2.438495, Validation loss 6.505631\n", "Epoch 1955, Training loss 2.438455, Validation loss 6.505773\n", "Epoch 1956, Training loss 2.438413, Validation loss 6.505894\n", "Epoch 1957, Training loss 2.438373, Validation loss 6.506031\n", "Epoch 1958, Training loss 2.438334, Validation loss 6.506163\n", "Epoch 1959, Training loss 2.438294, Validation loss 6.506294\n", "Epoch 1960, Training loss 2.438252, Validation loss 6.506420\n", "Epoch 1961, Training loss 2.438214, Validation loss 6.506552\n", "Epoch 1962, Training loss 2.438174, Validation loss 6.506679\n", "Epoch 1963, Training loss 2.438135, Validation loss 6.506805\n", "Epoch 1964, Training loss 2.438095, Validation loss 6.506937\n", "Epoch 1965, Training loss 2.438057, Validation loss 6.507059\n", "Epoch 1966, Training loss 2.438018, Validation loss 6.507200\n", "Epoch 1967, Training loss 2.437979, Validation loss 6.507331\n", "Epoch 1968, Training loss 2.437940, Validation loss 6.507448\n", "Epoch 1969, Training loss 2.437902, Validation loss 6.507575\n", "Epoch 1970, Training loss 2.437865, Validation loss 6.507702\n", "Epoch 1971, Training loss 2.437825, Validation loss 6.507823\n", "Epoch 1972, Training loss 2.437788, Validation loss 6.507950\n", "Epoch 1973, Training loss 2.437749, Validation loss 6.508081\n", "Epoch 1974, Training loss 2.437713, Validation loss 6.508213\n", "Epoch 1975, Training loss 2.437675, Validation loss 6.508339\n", "Epoch 1976, Training loss 2.437637, Validation loss 6.508466\n", "Epoch 1977, Training loss 2.437601, Validation loss 6.508593\n", "Epoch 1978, Training loss 2.437563, Validation loss 6.508719\n", "Epoch 1979, Training loss 2.437527, Validation loss 6.508831\n", "Epoch 1980, Training loss 2.437490, Validation loss 6.508952\n", "Epoch 1981, Training loss 2.437454, Validation loss 6.509094\n", "Epoch 1982, Training loss 2.437418, Validation loss 6.509206\n", "Epoch 1983, Training loss 2.437381, Validation loss 6.509322\n", "Epoch 1984, Training loss 2.437345, Validation loss 6.509454\n", "Epoch 1985, Training loss 2.437309, Validation loss 6.509580\n", "Epoch 1986, Training loss 2.437273, Validation loss 6.509697\n", "Epoch 1987, Training loss 2.437238, Validation loss 6.509828\n", "Epoch 1988, Training loss 2.437202, Validation loss 6.509940\n", "Epoch 1989, Training loss 2.437166, Validation loss 6.510072\n", "Epoch 1990, Training loss 2.437132, Validation loss 6.510188\n", "Epoch 1991, Training loss 2.437097, Validation loss 6.510314\n", "Epoch 1992, Training loss 2.437060, Validation loss 6.510431\n", "Epoch 1993, Training loss 2.437027, Validation loss 6.510553\n", "Epoch 1994, Training loss 2.436991, Validation loss 6.510669\n", "Epoch 1995, Training loss 2.436957, Validation loss 6.510801\n", "Epoch 1996, Training loss 2.436923, Validation loss 6.510922\n", "Epoch 1997, Training loss 2.436889, Validation loss 6.511039\n", "Epoch 1998, Training loss 2.436855, Validation loss 6.511165\n", "Epoch 1999, Training loss 2.436820, Validation loss 6.511282\n", "Epoch 2000, Training loss 2.436787, Validation loss 6.511398\n", "Epoch 2001, Training loss 2.436754, Validation loss 6.511510\n", "Epoch 2002, Training loss 2.436720, Validation loss 6.511632\n", "Epoch 2003, Training loss 2.436688, Validation loss 6.511758\n", "Epoch 2004, Training loss 2.436653, Validation loss 6.511875\n", "Epoch 2005, Training loss 2.436620, Validation loss 6.511996\n", "Epoch 2006, Training loss 2.436587, Validation loss 6.512103\n", "Epoch 2007, Training loss 2.436554, Validation loss 6.512229\n", "Epoch 2008, Training loss 2.436521, Validation loss 6.512356\n", "Epoch 2009, Training loss 2.436490, Validation loss 6.512467\n", "Epoch 2010, Training loss 2.436456, Validation loss 6.512579\n", "Epoch 2011, Training loss 2.436424, Validation loss 6.512701\n", "Epoch 2012, Training loss 2.436391, Validation loss 6.512822\n", "Epoch 2013, Training loss 2.436360, Validation loss 6.512938\n", "Epoch 2014, Training loss 2.436329, Validation loss 6.513055\n", "Epoch 2015, Training loss 2.436296, Validation loss 6.513162\n", "Epoch 2016, Training loss 2.436265, Validation loss 6.513278\n", "Epoch 2017, Training loss 2.436234, Validation loss 6.513390\n", "Epoch 2018, Training loss 2.436202, Validation loss 6.513512\n", "Epoch 2019, Training loss 2.436170, Validation loss 6.513622\n", "Epoch 2020, Training loss 2.436139, Validation loss 6.513739\n", "Epoch 2021, Training loss 2.436109, Validation loss 6.513861\n", "Epoch 2022, Training loss 2.436077, Validation loss 6.513968\n", "Epoch 2023, Training loss 2.436048, Validation loss 6.514089\n", "Epoch 2024, Training loss 2.436017, Validation loss 6.514186\n", "Epoch 2025, Training loss 2.435985, Validation loss 6.514297\n", "Epoch 2026, Training loss 2.435956, Validation loss 6.514418\n", "Epoch 2027, Training loss 2.435925, Validation loss 6.514530\n", "Epoch 2028, Training loss 2.435896, Validation loss 6.514642\n", "Epoch 2029, Training loss 2.435864, Validation loss 6.514768\n", "Epoch 2030, Training loss 2.435834, Validation loss 6.514884\n", "Epoch 2031, Training loss 2.435805, Validation loss 6.514981\n", "Epoch 2032, Training loss 2.435776, Validation loss 6.515107\n", "Epoch 2033, Training loss 2.435747, Validation loss 6.515228\n", "Epoch 2034, Training loss 2.435716, Validation loss 6.515330\n", "Epoch 2035, Training loss 2.435687, Validation loss 6.515441\n", "Epoch 2036, Training loss 2.435659, Validation loss 6.515533\n", "Epoch 2037, Training loss 2.435630, Validation loss 6.515650\n", "Epoch 2038, Training loss 2.435601, Validation loss 6.515757\n", "Epoch 2039, Training loss 2.435573, Validation loss 6.515878\n", "Epoch 2040, Training loss 2.435544, Validation loss 6.515999\n", "Epoch 2041, Training loss 2.435515, Validation loss 6.516101\n", "Epoch 2042, Training loss 2.435486, Validation loss 6.516208\n", "Epoch 2043, Training loss 2.435458, Validation loss 6.516319\n", "Epoch 2044, Training loss 2.435430, Validation loss 6.516425\n", "Epoch 2045, Training loss 2.435402, Validation loss 6.516527\n", "Epoch 2046, Training loss 2.435374, Validation loss 6.516643\n", "Epoch 2047, Training loss 2.435347, Validation loss 6.516759\n", "Epoch 2048, Training loss 2.435319, Validation loss 6.516866\n", "Epoch 2049, Training loss 2.435291, Validation loss 6.516973\n", "Epoch 2050, Training loss 2.435263, Validation loss 6.517084\n", "Epoch 2051, Training loss 2.435236, Validation loss 6.517186\n", "Epoch 2052, Training loss 2.435210, Validation loss 6.517292\n", "Epoch 2053, Training loss 2.435182, Validation loss 6.517404\n", "Epoch 2054, Training loss 2.435155, Validation loss 6.517500\n", "Epoch 2055, Training loss 2.435128, Validation loss 6.517607\n", "Epoch 2056, Training loss 2.435101, Validation loss 6.517718\n", "Epoch 2057, Training loss 2.435074, Validation loss 6.517830\n", "Epoch 2058, Training loss 2.435049, Validation loss 6.517936\n", "Epoch 2059, Training loss 2.435022, Validation loss 6.518038\n", "Epoch 2060, Training loss 2.434994, Validation loss 6.518139\n", "Epoch 2061, Training loss 2.434969, Validation loss 6.518235\n", "Epoch 2062, Training loss 2.434942, Validation loss 6.518357\n", "Epoch 2063, Training loss 2.434916, Validation loss 6.518458\n", "Epoch 2064, Training loss 2.434890, Validation loss 6.518559\n", "Epoch 2065, Training loss 2.434864, Validation loss 6.518666\n", "Epoch 2066, Training loss 2.434839, Validation loss 6.518783\n", "Epoch 2067, Training loss 2.434815, Validation loss 6.518884\n", "Epoch 2068, Training loss 2.434788, Validation loss 6.518985\n", "Epoch 2069, Training loss 2.434762, Validation loss 6.519087\n", "Epoch 2070, Training loss 2.434738, Validation loss 6.519189\n", "Epoch 2071, Training loss 2.434711, Validation loss 6.519295\n", "Epoch 2072, Training loss 2.434687, Validation loss 6.519391\n", "Epoch 2073, Training loss 2.434662, Validation loss 6.519497\n", "Epoch 2074, Training loss 2.434637, Validation loss 6.519585\n", "Epoch 2075, Training loss 2.434613, Validation loss 6.519701\n", "Epoch 2076, Training loss 2.434587, Validation loss 6.519812\n", "Epoch 2077, Training loss 2.434563, Validation loss 6.519904\n", "Epoch 2078, Training loss 2.434539, Validation loss 6.520005\n", "Epoch 2079, Training loss 2.434513, Validation loss 6.520116\n", "Epoch 2080, Training loss 2.434491, Validation loss 6.520213\n", "Epoch 2081, Training loss 2.434466, Validation loss 6.520319\n", "Epoch 2082, Training loss 2.434442, Validation loss 6.520415\n", "Epoch 2083, Training loss 2.434417, Validation loss 6.520517\n", "Epoch 2084, Training loss 2.434394, Validation loss 6.520623\n", "Epoch 2085, Training loss 2.434370, Validation loss 6.520715\n", "Epoch 2086, Training loss 2.434346, Validation loss 6.520816\n", "Epoch 2087, Training loss 2.434323, Validation loss 6.520918\n", "Epoch 2088, Training loss 2.434299, Validation loss 6.521019\n", "Epoch 2089, Training loss 2.434277, Validation loss 6.521116\n", "Epoch 2090, Training loss 2.434252, Validation loss 6.521217\n", "Epoch 2091, Training loss 2.434229, Validation loss 6.521318\n", "Epoch 2092, Training loss 2.434207, Validation loss 6.521424\n", "Epoch 2093, Training loss 2.434182, Validation loss 6.521512\n", "Epoch 2094, Training loss 2.434160, Validation loss 6.521603\n", "Epoch 2095, Training loss 2.434135, Validation loss 6.521704\n", "Epoch 2096, Training loss 2.434115, Validation loss 6.521795\n", "Epoch 2097, Training loss 2.434092, Validation loss 6.521897\n", "Epoch 2098, Training loss 2.434070, Validation loss 6.522003\n", "Epoch 2099, Training loss 2.434047, Validation loss 6.522105\n", "Epoch 2100, Training loss 2.434025, Validation loss 6.522201\n", "Epoch 2101, Training loss 2.434001, Validation loss 6.522307\n", "Epoch 2102, Training loss 2.433980, Validation loss 6.522394\n", "Epoch 2103, Training loss 2.433958, Validation loss 6.522495\n", "Epoch 2104, Training loss 2.433935, Validation loss 6.522587\n", "Epoch 2105, Training loss 2.433914, Validation loss 6.522678\n", "Epoch 2106, Training loss 2.433892, Validation loss 6.522770\n", "Epoch 2107, Training loss 2.433872, Validation loss 6.522871\n", "Epoch 2108, Training loss 2.433850, Validation loss 6.522967\n", "Epoch 2109, Training loss 2.433827, Validation loss 6.523064\n", "Epoch 2110, Training loss 2.433806, Validation loss 6.523175\n", "Epoch 2111, Training loss 2.433784, Validation loss 6.523266\n", "Epoch 2112, Training loss 2.433764, Validation loss 6.523362\n", "Epoch 2113, Training loss 2.433742, Validation loss 6.523454\n", "Epoch 2114, Training loss 2.433723, Validation loss 6.523545\n", "Epoch 2115, Training loss 2.433699, Validation loss 6.523642\n", "Epoch 2116, Training loss 2.433681, Validation loss 6.523738\n", "Epoch 2117, Training loss 2.433659, Validation loss 6.523829\n", "Epoch 2118, Training loss 2.433637, Validation loss 6.523936\n", "Epoch 2119, Training loss 2.433618, Validation loss 6.524022\n", "Epoch 2120, Training loss 2.433596, Validation loss 6.524118\n", "Epoch 2121, Training loss 2.433576, Validation loss 6.524200\n", "Epoch 2122, Training loss 2.433557, Validation loss 6.524286\n", "Epoch 2123, Training loss 2.433536, Validation loss 6.524388\n", "Epoch 2124, Training loss 2.433515, Validation loss 6.524484\n", "Epoch 2125, Training loss 2.433495, Validation loss 6.524580\n", "Epoch 2126, Training loss 2.433475, Validation loss 6.524672\n", "Epoch 2127, Training loss 2.433455, Validation loss 6.524758\n", "Epoch 2128, Training loss 2.433436, Validation loss 6.524854\n", "Epoch 2129, Training loss 2.433414, Validation loss 6.524946\n", "Epoch 2130, Training loss 2.433397, Validation loss 6.525037\n", "Epoch 2131, Training loss 2.433377, Validation loss 6.525124\n", "Epoch 2132, Training loss 2.433356, Validation loss 6.525215\n", "Epoch 2133, Training loss 2.433336, Validation loss 6.525311\n", "Epoch 2134, Training loss 2.433316, Validation loss 6.525393\n", "Epoch 2135, Training loss 2.433299, Validation loss 6.525494\n", "Epoch 2136, Training loss 2.433278, Validation loss 6.525570\n", "Epoch 2137, Training loss 2.433261, Validation loss 6.525662\n", "Epoch 2138, Training loss 2.433241, Validation loss 6.525748\n", "Epoch 2139, Training loss 2.433221, Validation loss 6.525854\n", "Epoch 2140, Training loss 2.433201, Validation loss 6.525941\n", "Epoch 2141, Training loss 2.433184, Validation loss 6.526027\n", "Epoch 2142, Training loss 2.433165, Validation loss 6.526108\n", "Epoch 2143, Training loss 2.433146, Validation loss 6.526199\n", "Epoch 2144, Training loss 2.433128, Validation loss 6.526286\n", "Epoch 2145, Training loss 2.433110, Validation loss 6.526397\n", "Epoch 2146, Training loss 2.433090, Validation loss 6.526484\n", "Epoch 2147, Training loss 2.433074, Validation loss 6.526565\n", "Epoch 2148, Training loss 2.433054, Validation loss 6.526651\n", "Epoch 2149, Training loss 2.433036, Validation loss 6.526728\n", "Epoch 2150, Training loss 2.433019, Validation loss 6.526824\n", "Epoch 2151, Training loss 2.432999, Validation loss 6.526910\n", "Epoch 2152, Training loss 2.432980, Validation loss 6.526997\n", "Epoch 2153, Training loss 2.432963, Validation loss 6.527088\n", "Epoch 2154, Training loss 2.432946, Validation loss 6.527189\n", "Epoch 2155, Training loss 2.432927, Validation loss 6.527270\n", "Epoch 2156, Training loss 2.432910, Validation loss 6.527362\n", "Epoch 2157, Training loss 2.432893, Validation loss 6.527438\n", "Epoch 2158, Training loss 2.432874, Validation loss 6.527510\n", "Epoch 2159, Training loss 2.432858, Validation loss 6.527596\n", "Epoch 2160, Training loss 2.432839, Validation loss 6.527692\n", "Epoch 2161, Training loss 2.432823, Validation loss 6.527788\n", "Epoch 2162, Training loss 2.432804, Validation loss 6.527874\n", "Epoch 2163, Training loss 2.432788, Validation loss 6.527946\n", "Epoch 2164, Training loss 2.432771, Validation loss 6.528028\n", "Epoch 2165, Training loss 2.432754, Validation loss 6.528119\n", "Epoch 2166, Training loss 2.432736, Validation loss 6.528210\n", "Epoch 2167, Training loss 2.432719, Validation loss 6.528291\n", "Epoch 2168, Training loss 2.432703, Validation loss 6.528372\n", "Epoch 2169, Training loss 2.432685, Validation loss 6.528459\n", "Epoch 2170, Training loss 2.432669, Validation loss 6.528545\n", "Epoch 2171, Training loss 2.432653, Validation loss 6.528621\n", "Epoch 2172, Training loss 2.432635, Validation loss 6.528708\n", "Epoch 2173, Training loss 2.432620, Validation loss 6.528793\n", "Epoch 2174, Training loss 2.432602, Validation loss 6.528875\n", "Epoch 2175, Training loss 2.432586, Validation loss 6.528956\n", "Epoch 2176, Training loss 2.432571, Validation loss 6.529043\n", "Epoch 2177, Training loss 2.432554, Validation loss 6.529134\n", "Epoch 2178, Training loss 2.432538, Validation loss 6.529215\n", "Epoch 2179, Training loss 2.432522, Validation loss 6.529311\n", "Epoch 2180, Training loss 2.432505, Validation loss 6.529387\n", "Epoch 2181, Training loss 2.432489, Validation loss 6.529464\n", "Epoch 2182, Training loss 2.432474, Validation loss 6.529545\n", "Epoch 2183, Training loss 2.432459, Validation loss 6.529632\n", "Epoch 2184, Training loss 2.432441, Validation loss 6.529713\n", "Epoch 2185, Training loss 2.432426, Validation loss 6.529789\n", "Epoch 2186, Training loss 2.432410, Validation loss 6.529880\n", "Epoch 2187, Training loss 2.432395, Validation loss 6.529962\n", "Epoch 2188, Training loss 2.432379, Validation loss 6.530033\n", "Epoch 2189, Training loss 2.432364, Validation loss 6.530124\n", "Epoch 2190, Training loss 2.432348, Validation loss 6.530200\n", "Epoch 2191, Training loss 2.432332, Validation loss 6.530272\n", "Epoch 2192, Training loss 2.432318, Validation loss 6.530357\n", "Epoch 2193, Training loss 2.432303, Validation loss 6.530434\n", "Epoch 2194, Training loss 2.432289, Validation loss 6.530515\n", "Epoch 2195, Training loss 2.432273, Validation loss 6.530591\n", "Epoch 2196, Training loss 2.432258, Validation loss 6.530678\n", "Epoch 2197, Training loss 2.432242, Validation loss 6.530749\n", "Epoch 2198, Training loss 2.432227, Validation loss 6.530845\n", "Epoch 2199, Training loss 2.432213, Validation loss 6.530911\n", "Epoch 2200, Training loss 2.432199, Validation loss 6.531003\n", "Epoch 2201, Training loss 2.432184, Validation loss 6.531074\n", "Epoch 2202, Training loss 2.432168, Validation loss 6.531160\n", "Epoch 2203, Training loss 2.432154, Validation loss 6.531241\n", "Epoch 2204, Training loss 2.432139, Validation loss 6.531318\n", "Epoch 2205, Training loss 2.432125, Validation loss 6.531384\n", "Epoch 2206, Training loss 2.432111, Validation loss 6.531469\n", "Epoch 2207, Training loss 2.432096, Validation loss 6.531541\n", "Epoch 2208, Training loss 2.432081, Validation loss 6.531622\n", "Epoch 2209, Training loss 2.432068, Validation loss 6.531699\n", "Epoch 2210, Training loss 2.432053, Validation loss 6.531790\n", "Epoch 2211, Training loss 2.432038, Validation loss 6.531861\n", "Epoch 2212, Training loss 2.432025, Validation loss 6.531932\n", "Epoch 2213, Training loss 2.432012, Validation loss 6.532028\n", "Epoch 2214, Training loss 2.431996, Validation loss 6.532089\n", "Epoch 2215, Training loss 2.431983, Validation loss 6.532166\n", "Epoch 2216, Training loss 2.431970, Validation loss 6.532251\n", "Epoch 2217, Training loss 2.431956, Validation loss 6.532309\n", "Epoch 2218, Training loss 2.431942, Validation loss 6.532404\n", "Epoch 2219, Training loss 2.431929, Validation loss 6.532485\n", "Epoch 2220, Training loss 2.431914, Validation loss 6.532547\n", "Epoch 2221, Training loss 2.431902, Validation loss 6.532623\n", "Epoch 2222, Training loss 2.431887, Validation loss 6.532694\n", "Epoch 2223, Training loss 2.431873, Validation loss 6.532775\n", "Epoch 2224, Training loss 2.431861, Validation loss 6.532856\n", "Epoch 2225, Training loss 2.431848, Validation loss 6.532923\n", "Epoch 2226, Training loss 2.431834, Validation loss 6.532994\n", "Epoch 2227, Training loss 2.431822, Validation loss 6.533070\n", "Epoch 2228, Training loss 2.431807, Validation loss 6.533166\n", "Epoch 2229, Training loss 2.431794, Validation loss 6.533232\n", "Epoch 2230, Training loss 2.431782, Validation loss 6.533298\n", "Epoch 2231, Training loss 2.431767, Validation loss 6.533384\n", "Epoch 2232, Training loss 2.431756, Validation loss 6.533456\n", "Epoch 2233, Training loss 2.431743, Validation loss 6.533526\n", "Epoch 2234, Training loss 2.431730, Validation loss 6.533608\n", "Epoch 2235, Training loss 2.431718, Validation loss 6.533669\n", "Epoch 2236, Training loss 2.431705, Validation loss 6.533741\n", "Epoch 2237, Training loss 2.431692, Validation loss 6.533817\n", "Epoch 2238, Training loss 2.431679, Validation loss 6.533888\n", "Epoch 2239, Training loss 2.431667, Validation loss 6.533959\n", "Epoch 2240, Training loss 2.431654, Validation loss 6.534036\n", "Epoch 2241, Training loss 2.431642, Validation loss 6.534112\n", "Epoch 2242, Training loss 2.431630, Validation loss 6.534187\n", "Epoch 2243, Training loss 2.431618, Validation loss 6.534248\n", "Epoch 2244, Training loss 2.431604, Validation loss 6.534334\n", "Epoch 2245, Training loss 2.431591, Validation loss 6.534411\n", "Epoch 2246, Training loss 2.431580, Validation loss 6.534467\n", "Epoch 2247, Training loss 2.431567, Validation loss 6.534554\n", "Epoch 2248, Training loss 2.431555, Validation loss 6.534615\n", "Epoch 2249, Training loss 2.431543, Validation loss 6.534700\n", "Epoch 2250, Training loss 2.431532, Validation loss 6.534761\n", "Epoch 2251, Training loss 2.431518, Validation loss 6.534828\n", "Epoch 2252, Training loss 2.431508, Validation loss 6.534904\n", "Epoch 2253, Training loss 2.431496, Validation loss 6.534980\n", "Epoch 2254, Training loss 2.431484, Validation loss 6.535037\n", "Epoch 2255, Training loss 2.431472, Validation loss 6.535122\n", "Epoch 2256, Training loss 2.431460, Validation loss 6.535189\n", "Epoch 2257, Training loss 2.431449, Validation loss 6.535264\n", "Epoch 2258, Training loss 2.431437, Validation loss 6.535326\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2259, Training loss 2.431426, Validation loss 6.535397\n", "Epoch 2260, Training loss 2.431413, Validation loss 6.535468\n", "Epoch 2261, Training loss 2.431403, Validation loss 6.535539\n", "Epoch 2262, Training loss 2.431392, Validation loss 6.535605\n", "Epoch 2263, Training loss 2.431380, Validation loss 6.535677\n", "Epoch 2264, Training loss 2.431369, Validation loss 6.535743\n", "Epoch 2265, Training loss 2.431358, Validation loss 6.535814\n", "Epoch 2266, Training loss 2.431346, Validation loss 6.535900\n", "Epoch 2267, Training loss 2.431335, Validation loss 6.535956\n", "Epoch 2268, Training loss 2.431324, Validation loss 6.536022\n", "Epoch 2269, Training loss 2.431312, Validation loss 6.536098\n", "Epoch 2270, Training loss 2.431302, Validation loss 6.536160\n", "Epoch 2271, Training loss 2.431292, Validation loss 6.536230\n", "Epoch 2272, Training loss 2.431278, Validation loss 6.536296\n", "Epoch 2273, Training loss 2.431268, Validation loss 6.536363\n", "Epoch 2274, Training loss 2.431257, Validation loss 6.536439\n", "Epoch 2275, Training loss 2.431247, Validation loss 6.536505\n", "Epoch 2276, Training loss 2.431237, Validation loss 6.536576\n", "Epoch 2277, Training loss 2.431226, Validation loss 6.536647\n", "Epoch 2278, Training loss 2.431214, Validation loss 6.536713\n", "Epoch 2279, Training loss 2.431204, Validation loss 6.536769\n", "Epoch 2280, Training loss 2.431193, Validation loss 6.536845\n", "Epoch 2281, Training loss 2.431182, Validation loss 6.536911\n", "Epoch 2282, Training loss 2.431172, Validation loss 6.536967\n", "Epoch 2283, Training loss 2.431161, Validation loss 6.537039\n", "Epoch 2284, Training loss 2.431151, Validation loss 6.537105\n", "Epoch 2285, Training loss 2.431140, Validation loss 6.537176\n", "Epoch 2286, Training loss 2.431130, Validation loss 6.537251\n", "Epoch 2287, Training loss 2.431119, Validation loss 6.537313\n", "Epoch 2288, Training loss 2.431109, Validation loss 6.537374\n", "Epoch 2289, Training loss 2.431100, Validation loss 6.537445\n", "Epoch 2290, Training loss 2.431088, Validation loss 6.537501\n", "Epoch 2291, Training loss 2.431079, Validation loss 6.537577\n", "Epoch 2292, Training loss 2.431069, Validation loss 6.537643\n", "Epoch 2293, Training loss 2.431058, Validation loss 6.537704\n", "Epoch 2294, Training loss 2.431048, Validation loss 6.537770\n", "Epoch 2295, Training loss 2.431039, Validation loss 6.537831\n", "Epoch 2296, Training loss 2.431029, Validation loss 6.537898\n", "Epoch 2297, Training loss 2.431019, Validation loss 6.537973\n", "Epoch 2298, Training loss 2.431010, Validation loss 6.538025\n", "Epoch 2299, Training loss 2.431000, Validation loss 6.538086\n", "Epoch 2300, Training loss 2.430991, Validation loss 6.538157\n", "Epoch 2301, Training loss 2.430979, Validation loss 6.538228\n", "Epoch 2302, Training loss 2.430971, Validation loss 6.538280\n", "Epoch 2303, Training loss 2.430959, Validation loss 6.538360\n", "Epoch 2304, Training loss 2.430951, Validation loss 6.538421\n", "Epoch 2305, Training loss 2.430942, Validation loss 6.538487\n", "Epoch 2306, Training loss 2.430933, Validation loss 6.538543\n", "Epoch 2307, Training loss 2.430923, Validation loss 6.538610\n", "Epoch 2308, Training loss 2.430913, Validation loss 6.538665\n", "Epoch 2309, Training loss 2.430903, Validation loss 6.538732\n", "Epoch 2310, Training loss 2.430895, Validation loss 6.538797\n", "Epoch 2311, Training loss 2.430884, Validation loss 6.538858\n", "Epoch 2312, Training loss 2.430875, Validation loss 6.538915\n", "Epoch 2313, Training loss 2.430865, Validation loss 6.538976\n", "Epoch 2314, Training loss 2.430857, Validation loss 6.539046\n", "Epoch 2315, Training loss 2.430848, Validation loss 6.539103\n", "Epoch 2316, Training loss 2.430839, Validation loss 6.539168\n", "Epoch 2317, Training loss 2.430830, Validation loss 6.539229\n", "Epoch 2318, Training loss 2.430820, Validation loss 6.539291\n", "Epoch 2319, Training loss 2.430811, Validation loss 6.539351\n", "Epoch 2320, Training loss 2.430803, Validation loss 6.539427\n", "Epoch 2321, Training loss 2.430793, Validation loss 6.539479\n", "Epoch 2322, Training loss 2.430785, Validation loss 6.539544\n", "Epoch 2323, Training loss 2.430776, Validation loss 6.539610\n", "Epoch 2324, Training loss 2.430767, Validation loss 6.539667\n", "Epoch 2325, Training loss 2.430759, Validation loss 6.539728\n", "Epoch 2326, Training loss 2.430749, Validation loss 6.539784\n", "Epoch 2327, Training loss 2.430741, Validation loss 6.539855\n", "Epoch 2328, Training loss 2.430731, Validation loss 6.539921\n", "Epoch 2329, Training loss 2.430723, Validation loss 6.539971\n", "Epoch 2330, Training loss 2.430714, Validation loss 6.540033\n", "Epoch 2331, Training loss 2.430706, Validation loss 6.540104\n", "Epoch 2332, Training loss 2.430697, Validation loss 6.540165\n", "Epoch 2333, Training loss 2.430689, Validation loss 6.540221\n", "Epoch 2334, Training loss 2.430681, Validation loss 6.540272\n", "Epoch 2335, Training loss 2.430672, Validation loss 6.540328\n", "Epoch 2336, Training loss 2.430664, Validation loss 6.540394\n", "Epoch 2337, Training loss 2.430655, Validation loss 6.540455\n", "Epoch 2338, Training loss 2.430648, Validation loss 6.540511\n", "Epoch 2339, Training loss 2.430638, Validation loss 6.540572\n", "Epoch 2340, Training loss 2.430630, Validation loss 6.540628\n", "Epoch 2341, Training loss 2.430623, Validation loss 6.540694\n", "Epoch 2342, Training loss 2.430614, Validation loss 6.540745\n", "Epoch 2343, Training loss 2.430605, Validation loss 6.540811\n", "Epoch 2344, Training loss 2.430598, Validation loss 6.540882\n", "Epoch 2345, Training loss 2.430589, Validation loss 6.540942\n", "Epoch 2346, Training loss 2.430582, Validation loss 6.540984\n", "Epoch 2347, Training loss 2.430574, Validation loss 6.541035\n", "Epoch 2348, Training loss 2.430566, Validation loss 6.541106\n", "Epoch 2349, Training loss 2.430558, Validation loss 6.541167\n", "Epoch 2350, Training loss 2.430547, Validation loss 6.541223\n", "Epoch 2351, Training loss 2.430540, Validation loss 6.541279\n", "Epoch 2352, Training loss 2.430533, Validation loss 6.541335\n", "Epoch 2353, Training loss 2.430525, Validation loss 6.541391\n", "Epoch 2354, Training loss 2.430518, Validation loss 6.541447\n", "Epoch 2355, Training loss 2.430511, Validation loss 6.541512\n", "Epoch 2356, Training loss 2.430502, Validation loss 6.541578\n", "Epoch 2357, Training loss 2.430495, Validation loss 6.541625\n", "Epoch 2358, Training loss 2.430486, Validation loss 6.541686\n", "Epoch 2359, Training loss 2.430478, Validation loss 6.541741\n", "Epoch 2360, Training loss 2.430471, Validation loss 6.541802\n", "Epoch 2361, Training loss 2.430463, Validation loss 6.541853\n", "Epoch 2362, Training loss 2.430456, Validation loss 6.541904\n", "Epoch 2363, Training loss 2.430449, Validation loss 6.541965\n", "Epoch 2364, Training loss 2.430443, Validation loss 6.542031\n", "Epoch 2365, Training loss 2.430434, Validation loss 6.542092\n", "Epoch 2366, Training loss 2.430426, Validation loss 6.542143\n", "Epoch 2367, Training loss 2.430418, Validation loss 6.542189\n", "Epoch 2368, Training loss 2.430412, Validation loss 6.542255\n", "Epoch 2369, Training loss 2.430404, Validation loss 6.542311\n", "Epoch 2370, Training loss 2.430398, Validation loss 6.542357\n", "Epoch 2371, Training loss 2.430389, Validation loss 6.542422\n", "Epoch 2372, Training loss 2.430382, Validation loss 6.542479\n", "Epoch 2373, Training loss 2.430375, Validation loss 6.542535\n", "Epoch 2374, Training loss 2.430368, Validation loss 6.542596\n", "Epoch 2375, Training loss 2.430361, Validation loss 6.542646\n", "Epoch 2376, Training loss 2.430354, Validation loss 6.542698\n", "Epoch 2377, Training loss 2.430346, Validation loss 6.542748\n", "Epoch 2378, Training loss 2.430339, Validation loss 6.542809\n", "Epoch 2379, Training loss 2.430332, Validation loss 6.542866\n", "Epoch 2380, Training loss 2.430324, Validation loss 6.542912\n", "Epoch 2381, Training loss 2.430319, Validation loss 6.542957\n", "Epoch 2382, Training loss 2.430311, Validation loss 6.543023\n", "Epoch 2383, Training loss 2.430305, Validation loss 6.543079\n", "Epoch 2384, Training loss 2.430298, Validation loss 6.543145\n", "Epoch 2385, Training loss 2.430291, Validation loss 6.543191\n", "Epoch 2386, Training loss 2.430284, Validation loss 6.543247\n", "Epoch 2387, Training loss 2.430277, Validation loss 6.543293\n", "Epoch 2388, Training loss 2.430270, Validation loss 6.543349\n", "Epoch 2389, Training loss 2.430264, Validation loss 6.543405\n", "Epoch 2390, Training loss 2.430257, Validation loss 6.543456\n", "Epoch 2391, Training loss 2.430251, Validation loss 6.543496\n", "Epoch 2392, Training loss 2.430243, Validation loss 6.543572\n", "Epoch 2393, Training loss 2.430238, Validation loss 6.543628\n", "Epoch 2394, Training loss 2.430230, Validation loss 6.543674\n", "Epoch 2395, Training loss 2.430223, Validation loss 6.543715\n", "Epoch 2396, Training loss 2.430217, Validation loss 6.543776\n", "Epoch 2397, Training loss 2.430210, Validation loss 6.543827\n", "Epoch 2398, Training loss 2.430204, Validation loss 6.543878\n", "Epoch 2399, Training loss 2.430198, Validation loss 6.543934\n", "Epoch 2400, Training loss 2.430191, Validation loss 6.543980\n", "Epoch 2401, Training loss 2.430184, Validation loss 6.544041\n", "Epoch 2402, Training loss 2.430177, Validation loss 6.544096\n", "Epoch 2403, Training loss 2.430172, Validation loss 6.544152\n", "Epoch 2404, Training loss 2.430165, Validation loss 6.544208\n", "Epoch 2405, Training loss 2.430159, Validation loss 6.544254\n", "Epoch 2406, Training loss 2.430153, Validation loss 6.544295\n", "Epoch 2407, Training loss 2.430146, Validation loss 6.544356\n", "Epoch 2408, Training loss 2.430140, Validation loss 6.544416\n", "Epoch 2409, Training loss 2.430135, Validation loss 6.544448\n", "Epoch 2410, Training loss 2.430127, Validation loss 6.544504\n", "Epoch 2411, Training loss 2.430122, Validation loss 6.544564\n", "Epoch 2412, Training loss 2.430116, Validation loss 6.544615\n", "Epoch 2413, Training loss 2.430109, Validation loss 6.544666\n", "Epoch 2414, Training loss 2.430104, Validation loss 6.544707\n", "Epoch 2415, Training loss 2.430097, Validation loss 6.544768\n", "Epoch 2416, Training loss 2.430090, Validation loss 6.544814\n", "Epoch 2417, Training loss 2.430084, Validation loss 6.544860\n", "Epoch 2418, Training loss 2.430080, Validation loss 6.544911\n", "Epoch 2419, Training loss 2.430074, Validation loss 6.544967\n", "Epoch 2420, Training loss 2.430066, Validation loss 6.545022\n", "Epoch 2421, Training loss 2.430061, Validation loss 6.545058\n", "Epoch 2422, Training loss 2.430055, Validation loss 6.545124\n", "Epoch 2423, Training loss 2.430049, Validation loss 6.545170\n", "Epoch 2424, Training loss 2.430044, Validation loss 6.545216\n", "Epoch 2425, Training loss 2.430037, Validation loss 6.545272\n", "Epoch 2426, Training loss 2.430032, Validation loss 6.545332\n", "Epoch 2427, Training loss 2.430025, Validation loss 6.545373\n", "Epoch 2428, Training loss 2.430021, Validation loss 6.545420\n", "Epoch 2429, Training loss 2.430014, Validation loss 6.545460\n", "Epoch 2430, Training loss 2.430009, Validation loss 6.545521\n", "Epoch 2431, Training loss 2.430003, Validation loss 6.545572\n", "Epoch 2432, Training loss 2.429997, Validation loss 6.545613\n", "Epoch 2433, Training loss 2.429992, Validation loss 6.545649\n", "Epoch 2434, Training loss 2.429986, Validation loss 6.545719\n", "Epoch 2435, Training loss 2.429981, Validation loss 6.545760\n", "Epoch 2436, Training loss 2.429976, Validation loss 6.545796\n", "Epoch 2437, Training loss 2.429971, Validation loss 6.545852\n", "Epoch 2438, Training loss 2.429964, Validation loss 6.545913\n", "Epoch 2439, Training loss 2.429959, Validation loss 6.545959\n", "Epoch 2440, Training loss 2.429953, Validation loss 6.546005\n", "Epoch 2441, Training loss 2.429947, Validation loss 6.546051\n", "Epoch 2442, Training loss 2.429942, Validation loss 6.546096\n", "Epoch 2443, Training loss 2.429936, Validation loss 6.546147\n", "Epoch 2444, Training loss 2.429931, Validation loss 6.546203\n", "Epoch 2445, Training loss 2.429926, Validation loss 6.546244\n", "Epoch 2446, Training loss 2.429921, Validation loss 6.546289\n", "Epoch 2447, Training loss 2.429916, Validation loss 6.546340\n", "Epoch 2448, Training loss 2.429910, Validation loss 6.546391\n", "Epoch 2449, Training loss 2.429904, Validation loss 6.546432\n", "Epoch 2450, Training loss 2.429899, Validation loss 6.546488\n", "Epoch 2451, Training loss 2.429895, Validation loss 6.546534\n", "Epoch 2452, Training loss 2.429891, Validation loss 6.546584\n", "Epoch 2453, Training loss 2.429883, Validation loss 6.546625\n", "Epoch 2454, Training loss 2.429879, Validation loss 6.546671\n", "Epoch 2455, Training loss 2.429873, Validation loss 6.546722\n", "Epoch 2456, Training loss 2.429869, Validation loss 6.546768\n", "Epoch 2457, Training loss 2.429862, Validation loss 6.546814\n", "Epoch 2458, Training loss 2.429859, Validation loss 6.546859\n", "Epoch 2459, Training loss 2.429853, Validation loss 6.546910\n", "Epoch 2460, Training loss 2.429848, Validation loss 6.546951\n", "Epoch 2461, Training loss 2.429843, Validation loss 6.546988\n", "Epoch 2462, Training loss 2.429838, Validation loss 6.547043\n", "Epoch 2463, Training loss 2.429834, Validation loss 6.547093\n", "Epoch 2464, Training loss 2.429827, Validation loss 6.547139\n", "Epoch 2465, Training loss 2.429823, Validation loss 6.547170\n", "Epoch 2466, Training loss 2.429818, Validation loss 6.547216\n", "Epoch 2467, Training loss 2.429814, Validation loss 6.547276\n", "Epoch 2468, Training loss 2.429809, Validation loss 6.547318\n", "Epoch 2469, Training loss 2.429803, Validation loss 6.547359\n", "Epoch 2470, Training loss 2.429799, Validation loss 6.547409\n", "Epoch 2471, Training loss 2.429794, Validation loss 6.547460\n", "Epoch 2472, Training loss 2.429789, Validation loss 6.547491\n", "Epoch 2473, Training loss 2.429784, Validation loss 6.547537\n", "Epoch 2474, Training loss 2.429780, Validation loss 6.547587\n", "Epoch 2475, Training loss 2.429775, Validation loss 6.547633\n", "Epoch 2476, Training loss 2.429770, Validation loss 6.547684\n", "Epoch 2477, Training loss 2.429765, Validation loss 6.547729\n", "Epoch 2478, Training loss 2.429761, Validation loss 6.547766\n", "Epoch 2479, Training loss 2.429755, Validation loss 6.547812\n", "Epoch 2480, Training loss 2.429752, Validation loss 6.547857\n", "Epoch 2481, Training loss 2.429746, Validation loss 6.547893\n", "Epoch 2482, Training loss 2.429744, Validation loss 6.547939\n", "Epoch 2483, Training loss 2.429737, Validation loss 6.547985\n", "Epoch 2484, Training loss 2.429734, Validation loss 6.548035\n", "Epoch 2485, Training loss 2.429728, Validation loss 6.548071\n", "Epoch 2486, Training loss 2.429724, Validation loss 6.548132\n", "Epoch 2487, Training loss 2.429719, Validation loss 6.548168\n", "Epoch 2488, Training loss 2.429715, Validation loss 6.548219\n", "Epoch 2489, Training loss 2.429711, Validation loss 6.548259\n", "Epoch 2490, Training loss 2.429706, Validation loss 6.548300\n", "Epoch 2491, Training loss 2.429702, Validation loss 6.548341\n", "Epoch 2492, Training loss 2.429697, Validation loss 6.548382\n", "Epoch 2493, Training loss 2.429693, Validation loss 6.548428\n", "Epoch 2494, Training loss 2.429688, Validation loss 6.548468\n", "Epoch 2495, Training loss 2.429685, Validation loss 6.548514\n", "Epoch 2496, Training loss 2.429681, Validation loss 6.548554\n", "Epoch 2497, Training loss 2.429676, Validation loss 6.548605\n", "Epoch 2498, Training loss 2.429671, Validation loss 6.548651\n", "Epoch 2499, Training loss 2.429667, Validation loss 6.548687\n", "Epoch 2500, Training loss 2.429663, Validation loss 6.548722\n", "Epoch 2501, Training loss 2.429660, Validation loss 6.548769\n", "Epoch 2502, Training loss 2.429656, Validation loss 6.548814\n", "Epoch 2503, Training loss 2.429650, Validation loss 6.548870\n", "Epoch 2504, Training loss 2.429645, Validation loss 6.548901\n", "Epoch 2505, Training loss 2.429641, Validation loss 6.548937\n", "Epoch 2506, Training loss 2.429637, Validation loss 6.548977\n", "Epoch 2507, Training loss 2.429633, Validation loss 6.549023\n", "Epoch 2508, Training loss 2.429628, Validation loss 6.549073\n", "Epoch 2509, Training loss 2.429625, Validation loss 6.549114\n", "Epoch 2510, Training loss 2.429622, Validation loss 6.549145\n", "Epoch 2511, Training loss 2.429616, Validation loss 6.549201\n", "Epoch 2512, Training loss 2.429613, Validation loss 6.549242\n", "Epoch 2513, Training loss 2.429608, Validation loss 6.549287\n", "Epoch 2514, Training loss 2.429605, Validation loss 6.549318\n", "Epoch 2515, Training loss 2.429600, Validation loss 6.549358\n", "Epoch 2516, Training loss 2.429596, Validation loss 6.549409\n", "Epoch 2517, Training loss 2.429591, Validation loss 6.549445\n", "Epoch 2518, Training loss 2.429590, Validation loss 6.549481\n", "Epoch 2519, Training loss 2.429585, Validation loss 6.549512\n", "Epoch 2520, Training loss 2.429580, Validation loss 6.549568\n", "Epoch 2521, Training loss 2.429577, Validation loss 6.549613\n", "Epoch 2522, Training loss 2.429573, Validation loss 6.549654\n", "Epoch 2523, Training loss 2.429569, Validation loss 6.549685\n", "Epoch 2524, Training loss 2.429566, Validation loss 6.549721\n", "Epoch 2525, Training loss 2.429561, Validation loss 6.549776\n", "Epoch 2526, Training loss 2.429558, Validation loss 6.549812\n", "Epoch 2527, Training loss 2.429554, Validation loss 6.549852\n", "Epoch 2528, Training loss 2.429550, Validation loss 6.549898\n", "Epoch 2529, Training loss 2.429547, Validation loss 6.549919\n", "Epoch 2530, Training loss 2.429543, Validation loss 6.549975\n", "Epoch 2531, Training loss 2.429539, Validation loss 6.550005\n", "Epoch 2532, Training loss 2.429535, Validation loss 6.550046\n", "Epoch 2533, Training loss 2.429531, Validation loss 6.550087\n", "Epoch 2534, Training loss 2.429528, Validation loss 6.550117\n", "Epoch 2535, Training loss 2.429525, Validation loss 6.550169\n", "Epoch 2536, Training loss 2.429518, Validation loss 6.550209\n", "Epoch 2537, Training loss 2.429515, Validation loss 6.550240\n", "Epoch 2538, Training loss 2.429513, Validation loss 6.550291\n", "Epoch 2539, Training loss 2.429510, Validation loss 6.550326\n", "Epoch 2540, Training loss 2.429504, Validation loss 6.550357\n", "Epoch 2541, Training loss 2.429502, Validation loss 6.550403\n", "Epoch 2542, Training loss 2.429498, Validation loss 6.550444\n", "Epoch 2543, Training loss 2.429495, Validation loss 6.550489\n", "Epoch 2544, Training loss 2.429491, Validation loss 6.550515\n", "Epoch 2545, Training loss 2.429489, Validation loss 6.550570\n", "Epoch 2546, Training loss 2.429483, Validation loss 6.550606\n", "Epoch 2547, Training loss 2.429481, Validation loss 6.550642\n", "Epoch 2548, Training loss 2.429477, Validation loss 6.550677\n", "Epoch 2549, Training loss 2.429474, Validation loss 6.550723\n", "Epoch 2550, Training loss 2.429471, Validation loss 6.550764\n", "Epoch 2551, Training loss 2.429467, Validation loss 6.550809\n", "Epoch 2552, Training loss 2.429463, Validation loss 6.550840\n", "Epoch 2553, Training loss 2.429459, Validation loss 6.550866\n", "Epoch 2554, Training loss 2.429456, Validation loss 6.550917\n", "Epoch 2555, Training loss 2.429454, Validation loss 6.550957\n", "Epoch 2556, Training loss 2.429450, Validation loss 6.550993\n", "Epoch 2557, Training loss 2.429447, Validation loss 6.551028\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2558, Training loss 2.429442, Validation loss 6.551060\n", "Epoch 2559, Training loss 2.429439, Validation loss 6.551100\n", "Epoch 2560, Training loss 2.429436, Validation loss 6.551141\n", "Epoch 2561, Training loss 2.429433, Validation loss 6.551171\n", "Epoch 2562, Training loss 2.429428, Validation loss 6.551217\n", "Epoch 2563, Training loss 2.429426, Validation loss 6.551248\n", "Epoch 2564, Training loss 2.429423, Validation loss 6.551289\n", "Epoch 2565, Training loss 2.429420, Validation loss 6.551344\n", "Epoch 2566, Training loss 2.429417, Validation loss 6.551365\n", "Epoch 2567, Training loss 2.429414, Validation loss 6.551401\n", "Epoch 2568, Training loss 2.429410, Validation loss 6.551436\n", "Epoch 2569, Training loss 2.429406, Validation loss 6.551472\n", "Epoch 2570, Training loss 2.429403, Validation loss 6.551517\n", "Epoch 2571, Training loss 2.429400, Validation loss 6.551553\n", "Epoch 2572, Training loss 2.429396, Validation loss 6.551589\n", "Epoch 2573, Training loss 2.429394, Validation loss 6.551620\n", "Epoch 2574, Training loss 2.429391, Validation loss 6.551655\n", "Epoch 2575, Training loss 2.429388, Validation loss 6.551701\n", "Epoch 2576, Training loss 2.429384, Validation loss 6.551742\n", "Epoch 2577, Training loss 2.429382, Validation loss 6.551777\n", "Epoch 2578, Training loss 2.429378, Validation loss 6.551818\n", "Epoch 2579, Training loss 2.429374, Validation loss 6.551839\n", "Epoch 2580, Training loss 2.429372, Validation loss 6.551884\n", "Epoch 2581, Training loss 2.429368, Validation loss 6.551915\n", "Epoch 2582, Training loss 2.429367, Validation loss 6.551956\n", "Epoch 2583, Training loss 2.429363, Validation loss 6.551991\n", "Epoch 2584, Training loss 2.429359, Validation loss 6.552022\n", "Epoch 2585, Training loss 2.429357, Validation loss 6.552062\n", "Epoch 2586, Training loss 2.429353, Validation loss 6.552103\n", "Epoch 2587, Training loss 2.429351, Validation loss 6.552134\n", "Epoch 2588, Training loss 2.429349, Validation loss 6.552159\n", "Epoch 2589, Training loss 2.429345, Validation loss 6.552196\n", "Epoch 2590, Training loss 2.429341, Validation loss 6.552236\n", "Epoch 2591, Training loss 2.429339, Validation loss 6.552281\n", "Epoch 2592, Training loss 2.429335, Validation loss 6.552312\n", "Epoch 2593, Training loss 2.429333, Validation loss 6.552338\n", "Epoch 2594, Training loss 2.429330, Validation loss 6.552383\n", "Epoch 2595, Training loss 2.429327, Validation loss 6.552399\n", "Epoch 2596, Training loss 2.429324, Validation loss 6.552444\n", "Epoch 2597, Training loss 2.429322, Validation loss 6.552500\n", "Epoch 2598, Training loss 2.429319, Validation loss 6.552526\n", "Epoch 2599, Training loss 2.429317, Validation loss 6.552561\n", "Epoch 2600, Training loss 2.429314, Validation loss 6.552602\n", "Epoch 2601, Training loss 2.429311, Validation loss 6.552618\n", "Epoch 2602, Training loss 2.429307, Validation loss 6.552669\n", "Epoch 2603, Training loss 2.429305, Validation loss 6.552699\n", "Epoch 2604, Training loss 2.429302, Validation loss 6.552725\n", "Epoch 2605, Training loss 2.429299, Validation loss 6.552755\n", "Epoch 2606, Training loss 2.429297, Validation loss 6.552786\n", "Epoch 2607, Training loss 2.429295, Validation loss 6.552816\n", "Epoch 2608, Training loss 2.429291, Validation loss 6.552857\n", "Epoch 2609, Training loss 2.429287, Validation loss 6.552897\n", "Epoch 2610, Training loss 2.429286, Validation loss 6.552929\n", "Epoch 2611, Training loss 2.429284, Validation loss 6.552969\n", "Epoch 2612, Training loss 2.429280, Validation loss 6.552990\n", "Epoch 2613, Training loss 2.429277, Validation loss 6.553030\n", "Epoch 2614, Training loss 2.429274, Validation loss 6.553066\n", "Epoch 2615, Training loss 2.429273, Validation loss 6.553107\n", "Epoch 2616, Training loss 2.429270, Validation loss 6.553142\n", "Epoch 2617, Training loss 2.429266, Validation loss 6.553173\n", "Epoch 2618, Training loss 2.429264, Validation loss 6.553199\n", "Epoch 2619, Training loss 2.429262, Validation loss 6.553244\n", "Epoch 2620, Training loss 2.429260, Validation loss 6.553280\n", "Epoch 2621, Training loss 2.429256, Validation loss 6.553305\n", "Epoch 2622, Training loss 2.429254, Validation loss 6.553341\n", "Epoch 2623, Training loss 2.429251, Validation loss 6.553367\n", "Epoch 2624, Training loss 2.429248, Validation loss 6.553402\n", "Epoch 2625, Training loss 2.429247, Validation loss 6.553437\n", "Epoch 2626, Training loss 2.429243, Validation loss 6.553483\n", "Epoch 2627, Training loss 2.429241, Validation loss 6.553514\n", "Epoch 2628, Training loss 2.429238, Validation loss 6.553530\n", "Epoch 2629, Training loss 2.429237, Validation loss 6.553560\n", "Epoch 2630, Training loss 2.429234, Validation loss 6.553586\n", "Epoch 2631, Training loss 2.429230, Validation loss 6.553627\n", "Epoch 2632, Training loss 2.429228, Validation loss 6.553667\n", "Epoch 2633, Training loss 2.429225, Validation loss 6.553693\n", "Epoch 2634, Training loss 2.429225, Validation loss 6.553733\n", "Epoch 2635, Training loss 2.429223, Validation loss 6.553754\n", "Epoch 2636, Training loss 2.429218, Validation loss 6.553794\n", "Epoch 2637, Training loss 2.429217, Validation loss 6.553825\n", "Epoch 2638, Training loss 2.429215, Validation loss 6.553856\n", "Epoch 2639, Training loss 2.429211, Validation loss 6.553886\n", "Epoch 2640, Training loss 2.429209, Validation loss 6.553922\n", "Epoch 2641, Training loss 2.429208, Validation loss 6.553952\n", "Epoch 2642, Training loss 2.429206, Validation loss 6.553983\n", "Epoch 2643, Training loss 2.429202, Validation loss 6.554023\n", "Epoch 2644, Training loss 2.429199, Validation loss 6.554049\n", "Epoch 2645, Training loss 2.429196, Validation loss 6.554085\n", "Epoch 2646, Training loss 2.429196, Validation loss 6.554110\n", "Epoch 2647, Training loss 2.429193, Validation loss 6.554136\n", "Epoch 2648, Training loss 2.429190, Validation loss 6.554172\n", "Epoch 2649, Training loss 2.429187, Validation loss 6.554202\n", "Epoch 2650, Training loss 2.429185, Validation loss 6.554237\n", "Epoch 2651, Training loss 2.429184, Validation loss 6.554273\n", "Epoch 2652, Training loss 2.429181, Validation loss 6.554299\n", "Epoch 2653, Training loss 2.429178, Validation loss 6.554334\n", "Epoch 2654, Training loss 2.429176, Validation loss 6.554365\n", "Epoch 2655, Training loss 2.429175, Validation loss 6.554390\n", "Epoch 2656, Training loss 2.429173, Validation loss 6.554426\n", "Epoch 2657, Training loss 2.429170, Validation loss 6.554457\n", "Epoch 2658, Training loss 2.429168, Validation loss 6.554492\n", "Epoch 2659, Training loss 2.429166, Validation loss 6.554513\n", "Epoch 2660, Training loss 2.429164, Validation loss 6.554543\n", "Epoch 2661, Training loss 2.429160, Validation loss 6.554579\n", "Epoch 2662, Training loss 2.429160, Validation loss 6.554619\n", "Epoch 2663, Training loss 2.429157, Validation loss 6.554650\n", "Epoch 2664, Training loss 2.429155, Validation loss 6.554680\n", "Epoch 2665, Training loss 2.429153, Validation loss 6.554711\n", "Epoch 2666, Training loss 2.429151, Validation loss 6.554731\n", "Epoch 2667, Training loss 2.429148, Validation loss 6.554762\n", "Epoch 2668, Training loss 2.429146, Validation loss 6.554802\n", "Epoch 2669, Training loss 2.429144, Validation loss 6.554824\n", "Epoch 2670, Training loss 2.429142, Validation loss 6.554864\n", "Epoch 2671, Training loss 2.429139, Validation loss 6.554890\n", "Epoch 2672, Training loss 2.429137, Validation loss 6.554911\n", "Epoch 2673, Training loss 2.429136, Validation loss 6.554936\n", "Epoch 2674, Training loss 2.429134, Validation loss 6.554961\n", "Epoch 2675, Training loss 2.429131, Validation loss 6.555001\n", "Epoch 2676, Training loss 2.429129, Validation loss 6.555032\n", "Epoch 2677, Training loss 2.429126, Validation loss 6.555067\n", "Epoch 2678, Training loss 2.429125, Validation loss 6.555094\n", "Epoch 2679, Training loss 2.429123, Validation loss 6.555129\n", "Epoch 2680, Training loss 2.429120, Validation loss 6.555144\n", "Epoch 2681, Training loss 2.429118, Validation loss 6.555185\n", "Epoch 2682, Training loss 2.429116, Validation loss 6.555211\n", "Epoch 2683, Training loss 2.429114, Validation loss 6.555251\n", "Epoch 2684, Training loss 2.429113, Validation loss 6.555276\n", "Epoch 2685, Training loss 2.429111, Validation loss 6.555302\n", "Epoch 2686, Training loss 2.429109, Validation loss 6.555327\n", "Epoch 2687, Training loss 2.429107, Validation loss 6.555363\n", "Epoch 2688, Training loss 2.429106, Validation loss 6.555384\n", "Epoch 2689, Training loss 2.429103, Validation loss 6.555414\n", "Epoch 2690, Training loss 2.429101, Validation loss 6.555449\n", "Epoch 2691, Training loss 2.429100, Validation loss 6.555470\n", "Epoch 2692, Training loss 2.429097, Validation loss 6.555501\n", "Epoch 2693, Training loss 2.429095, Validation loss 6.555521\n", "Epoch 2694, Training loss 2.429093, Validation loss 6.555557\n", "Epoch 2695, Training loss 2.429091, Validation loss 6.555587\n", "Epoch 2696, Training loss 2.429089, Validation loss 6.555608\n", "Epoch 2697, Training loss 2.429088, Validation loss 6.555649\n", "Epoch 2698, Training loss 2.429086, Validation loss 6.555664\n", "Epoch 2699, Training loss 2.429084, Validation loss 6.555700\n", "Epoch 2700, Training loss 2.429083, Validation loss 6.555725\n", "Epoch 2701, Training loss 2.429079, Validation loss 6.555765\n", "Epoch 2702, Training loss 2.429078, Validation loss 6.555796\n", "Epoch 2703, Training loss 2.429076, Validation loss 6.555827\n", "Epoch 2704, Training loss 2.429075, Validation loss 6.555842\n", "Epoch 2705, Training loss 2.429073, Validation loss 6.555867\n", "Epoch 2706, Training loss 2.429070, Validation loss 6.555898\n", "Epoch 2707, Training loss 2.429069, Validation loss 6.555914\n", "Epoch 2708, Training loss 2.429068, Validation loss 6.555954\n", "Epoch 2709, Training loss 2.429066, Validation loss 6.555980\n", "Epoch 2710, Training loss 2.429063, Validation loss 6.556005\n", "Epoch 2711, Training loss 2.429062, Validation loss 6.556031\n", "Epoch 2712, Training loss 2.429061, Validation loss 6.556057\n", "Epoch 2713, Training loss 2.429059, Validation loss 6.556082\n", "Epoch 2714, Training loss 2.429056, Validation loss 6.556108\n", "Epoch 2715, Training loss 2.429055, Validation loss 6.556143\n", "Epoch 2716, Training loss 2.429053, Validation loss 6.556178\n", "Epoch 2717, Training loss 2.429053, Validation loss 6.556199\n", "Epoch 2718, Training loss 2.429050, Validation loss 6.556234\n", "Epoch 2719, Training loss 2.429048, Validation loss 6.556250\n", "Epoch 2720, Training loss 2.429046, Validation loss 6.556281\n", "Epoch 2721, Training loss 2.429044, Validation loss 6.556296\n", "Epoch 2722, Training loss 2.429044, Validation loss 6.556342\n", "Epoch 2723, Training loss 2.429042, Validation loss 6.556372\n", "Epoch 2724, Training loss 2.429040, Validation loss 6.556397\n", "Epoch 2725, Training loss 2.429038, Validation loss 6.556423\n", "Epoch 2726, Training loss 2.429036, Validation loss 6.556448\n", "Epoch 2727, Training loss 2.429034, Validation loss 6.556459\n", "Epoch 2728, Training loss 2.429033, Validation loss 6.556499\n", "Epoch 2729, Training loss 2.429032, Validation loss 6.556510\n", "Epoch 2730, Training loss 2.429031, Validation loss 6.556550\n", "Epoch 2731, Training loss 2.429029, Validation loss 6.556581\n", "Epoch 2732, Training loss 2.429026, Validation loss 6.556602\n", "Epoch 2733, Training loss 2.429024, Validation loss 6.556632\n", "Epoch 2734, Training loss 2.429023, Validation loss 6.556653\n", "Epoch 2735, Training loss 2.429021, Validation loss 6.556684\n", "Epoch 2736, Training loss 2.429019, Validation loss 6.556704\n", "Epoch 2737, Training loss 2.429019, Validation loss 6.556749\n", "Epoch 2738, Training loss 2.429015, Validation loss 6.556769\n", "Epoch 2739, Training loss 2.429014, Validation loss 6.556785\n", "Epoch 2740, Training loss 2.429014, Validation loss 6.556810\n", "Epoch 2741, Training loss 2.429011, Validation loss 6.556836\n", "Epoch 2742, Training loss 2.429010, Validation loss 6.556847\n", "Epoch 2743, Training loss 2.429009, Validation loss 6.556892\n", "Epoch 2744, Training loss 2.429007, Validation loss 6.556923\n", "Epoch 2745, Training loss 2.429005, Validation loss 6.556948\n", "Epoch 2746, Training loss 2.429005, Validation loss 6.556963\n", "Epoch 2747, Training loss 2.429002, Validation loss 6.556999\n", "Epoch 2748, Training loss 2.429001, Validation loss 6.557014\n", "Epoch 2749, Training loss 2.429000, Validation loss 6.557050\n", "Epoch 2750, Training loss 2.428998, Validation loss 6.557066\n", "Epoch 2751, Training loss 2.428996, Validation loss 6.557086\n", "Epoch 2752, Training loss 2.428995, Validation loss 6.557131\n", "Epoch 2753, Training loss 2.428994, Validation loss 6.557147\n", "Epoch 2754, Training loss 2.428992, Validation loss 6.557172\n", "Epoch 2755, Training loss 2.428990, Validation loss 6.557202\n", "Epoch 2756, Training loss 2.428989, Validation loss 6.557228\n", "Epoch 2757, Training loss 2.428988, Validation loss 6.557249\n", "Epoch 2758, Training loss 2.428987, Validation loss 6.557275\n", "Epoch 2759, Training loss 2.428985, Validation loss 6.557304\n", "Epoch 2760, Training loss 2.428982, Validation loss 6.557340\n", "Epoch 2761, Training loss 2.428982, Validation loss 6.557361\n", "Epoch 2762, Training loss 2.428980, Validation loss 6.557381\n", "Epoch 2763, Training loss 2.428977, Validation loss 6.557402\n", "Epoch 2764, Training loss 2.428977, Validation loss 6.557422\n", "Epoch 2765, Training loss 2.428976, Validation loss 6.557452\n", "Epoch 2766, Training loss 2.428974, Validation loss 6.557468\n", "Epoch 2767, Training loss 2.428972, Validation loss 6.557498\n", "Epoch 2768, Training loss 2.428971, Validation loss 6.557524\n", "Epoch 2769, Training loss 2.428970, Validation loss 6.557554\n", "Epoch 2770, Training loss 2.428968, Validation loss 6.557565\n", "Epoch 2771, Training loss 2.428968, Validation loss 6.557595\n", "Epoch 2772, Training loss 2.428966, Validation loss 6.557621\n", "Epoch 2773, Training loss 2.428966, Validation loss 6.557631\n", "Epoch 2774, Training loss 2.428963, Validation loss 6.557657\n", "Epoch 2775, Training loss 2.428961, Validation loss 6.557697\n", "Epoch 2776, Training loss 2.428960, Validation loss 6.557728\n", "Epoch 2777, Training loss 2.428958, Validation loss 6.557753\n", "Epoch 2778, Training loss 2.428958, Validation loss 6.557769\n", "Epoch 2779, Training loss 2.428957, Validation loss 6.557789\n", "Epoch 2780, Training loss 2.428955, Validation loss 6.557824\n", "Epoch 2781, Training loss 2.428954, Validation loss 6.557830\n", "Epoch 2782, Training loss 2.428953, Validation loss 6.557865\n", "Epoch 2783, Training loss 2.428952, Validation loss 6.557891\n", "Epoch 2784, Training loss 2.428950, Validation loss 6.557906\n", "Epoch 2785, Training loss 2.428949, Validation loss 6.557932\n", "Epoch 2786, Training loss 2.428947, Validation loss 6.557957\n", "Epoch 2787, Training loss 2.428946, Validation loss 6.557972\n", "Epoch 2788, Training loss 2.428944, Validation loss 6.557998\n", "Epoch 2789, Training loss 2.428945, Validation loss 6.558023\n", "Epoch 2790, Training loss 2.428941, Validation loss 6.558054\n", "Epoch 2791, Training loss 2.428940, Validation loss 6.558069\n", "Epoch 2792, Training loss 2.428939, Validation loss 6.558105\n", "Epoch 2793, Training loss 2.428938, Validation loss 6.558121\n", "Epoch 2794, Training loss 2.428936, Validation loss 6.558141\n", "Epoch 2795, Training loss 2.428936, Validation loss 6.558162\n", "Epoch 2796, Training loss 2.428934, Validation loss 6.558197\n", "Epoch 2797, Training loss 2.428932, Validation loss 6.558207\n", "Epoch 2798, Training loss 2.428932, Validation loss 6.558223\n", "Epoch 2799, Training loss 2.428931, Validation loss 6.558243\n", "Epoch 2800, Training loss 2.428930, Validation loss 6.558273\n", "Epoch 2801, Training loss 2.428928, Validation loss 6.558304\n", "Epoch 2802, Training loss 2.428927, Validation loss 6.558330\n", "Epoch 2803, Training loss 2.428927, Validation loss 6.558345\n", "Epoch 2804, Training loss 2.428924, Validation loss 6.558370\n", "Epoch 2805, Training loss 2.428924, Validation loss 6.558395\n", "Epoch 2806, Training loss 2.428921, Validation loss 6.558421\n", "Epoch 2807, Training loss 2.428920, Validation loss 6.558436\n", "Epoch 2808, Training loss 2.428919, Validation loss 6.558472\n", "Epoch 2809, Training loss 2.428919, Validation loss 6.558496\n", "Epoch 2810, Training loss 2.428917, Validation loss 6.558517\n", "Epoch 2811, Training loss 2.428917, Validation loss 6.558537\n", "Epoch 2812, Training loss 2.428915, Validation loss 6.558554\n", "Epoch 2813, Training loss 2.428913, Validation loss 6.558574\n", "Epoch 2814, Training loss 2.428913, Validation loss 6.558599\n", "Epoch 2815, Training loss 2.428912, Validation loss 6.558619\n", "Epoch 2816, Training loss 2.428910, Validation loss 6.558650\n", "Epoch 2817, Training loss 2.428909, Validation loss 6.558665\n", "Epoch 2818, Training loss 2.428908, Validation loss 6.558681\n", "Epoch 2819, Training loss 2.428907, Validation loss 6.558706\n", "Epoch 2820, Training loss 2.428906, Validation loss 6.558722\n", "Epoch 2821, Training loss 2.428904, Validation loss 6.558747\n", "Epoch 2822, Training loss 2.428904, Validation loss 6.558778\n", "Epoch 2823, Training loss 2.428902, Validation loss 6.558793\n", "Epoch 2824, Training loss 2.428901, Validation loss 6.558819\n", "Epoch 2825, Training loss 2.428900, Validation loss 6.558834\n", "Epoch 2826, Training loss 2.428898, Validation loss 6.558869\n", "Epoch 2827, Training loss 2.428899, Validation loss 6.558894\n", "Epoch 2828, Training loss 2.428897, Validation loss 6.558910\n", "Epoch 2829, Training loss 2.428896, Validation loss 6.558935\n", "Epoch 2830, Training loss 2.428895, Validation loss 6.558951\n", "Epoch 2831, Training loss 2.428894, Validation loss 6.558971\n", "Epoch 2832, Training loss 2.428893, Validation loss 6.558977\n", "Epoch 2833, Training loss 2.428891, Validation loss 6.559012\n", "Epoch 2834, Training loss 2.428890, Validation loss 6.559033\n", "Epoch 2835, Training loss 2.428889, Validation loss 6.559048\n", "Epoch 2836, Training loss 2.428888, Validation loss 6.559078\n", "Epoch 2837, Training loss 2.428886, Validation loss 6.559104\n", "Epoch 2838, Training loss 2.428886, Validation loss 6.559124\n", "Epoch 2839, Training loss 2.428885, Validation loss 6.559150\n", "Epoch 2840, Training loss 2.428885, Validation loss 6.559160\n", "Epoch 2841, Training loss 2.428883, Validation loss 6.559191\n", "Epoch 2842, Training loss 2.428883, Validation loss 6.559206\n", "Epoch 2843, Training loss 2.428881, Validation loss 6.559216\n", "Epoch 2844, Training loss 2.428879, Validation loss 6.559257\n", "Epoch 2845, Training loss 2.428878, Validation loss 6.559277\n", "Epoch 2846, Training loss 2.428878, Validation loss 6.559298\n", "Epoch 2847, Training loss 2.428878, Validation loss 6.559318\n", "Epoch 2848, Training loss 2.428875, Validation loss 6.559339\n", "Epoch 2849, Training loss 2.428874, Validation loss 6.559359\n", "Epoch 2850, Training loss 2.428874, Validation loss 6.559380\n", "Epoch 2851, Training loss 2.428873, Validation loss 6.559390\n", "Epoch 2852, Training loss 2.428871, Validation loss 6.559410\n", "Epoch 2853, Training loss 2.428870, Validation loss 6.559440\n", "Epoch 2854, Training loss 2.428870, Validation loss 6.559465\n", "Epoch 2855, Training loss 2.428867, Validation loss 6.559481\n", "Epoch 2856, Training loss 2.428868, Validation loss 6.559511\n", "Epoch 2857, Training loss 2.428867, Validation loss 6.559526\n", "Epoch 2858, Training loss 2.428866, Validation loss 6.559537\n", "Epoch 2859, Training loss 2.428865, Validation loss 6.559553\n", "Epoch 2860, Training loss 2.428864, Validation loss 6.559568\n", "Epoch 2861, Training loss 2.428863, Validation loss 6.559589\n", "Epoch 2862, Training loss 2.428862, Validation loss 6.559619\n", "Epoch 2863, Training loss 2.428860, Validation loss 6.559654\n", "Epoch 2864, Training loss 2.428860, Validation loss 6.559660\n", "Epoch 2865, Training loss 2.428859, Validation loss 6.559685\n", "Epoch 2866, Training loss 2.428859, Validation loss 6.559701\n", "Epoch 2867, Training loss 2.428857, Validation loss 6.559726\n", "Epoch 2868, Training loss 2.428855, Validation loss 6.559731\n", "Epoch 2869, Training loss 2.428855, Validation loss 6.559756\n", "Epoch 2870, Training loss 2.428853, Validation loss 6.559772\n", "Epoch 2871, Training loss 2.428855, Validation loss 6.559797\n", "Epoch 2872, Training loss 2.428853, Validation loss 6.559813\n", "Epoch 2873, Training loss 2.428852, Validation loss 6.559838\n", "Epoch 2874, Training loss 2.428850, Validation loss 6.559854\n", "Epoch 2875, Training loss 2.428849, Validation loss 6.559884\n", "Epoch 2876, Training loss 2.428849, Validation loss 6.559899\n", "Epoch 2877, Training loss 2.428849, Validation loss 6.559925\n", "Epoch 2878, Training loss 2.428847, Validation loss 6.559945\n", "Epoch 2879, Training loss 2.428845, Validation loss 6.559960\n", "Epoch 2880, Training loss 2.428845, Validation loss 6.559971\n", "Epoch 2881, Training loss 2.428844, Validation loss 6.559997\n", "Epoch 2882, Training loss 2.428844, Validation loss 6.560021\n", "Epoch 2883, Training loss 2.428843, Validation loss 6.560042\n", "Epoch 2884, Training loss 2.428842, Validation loss 6.560052\n", "Epoch 2885, Training loss 2.428841, Validation loss 6.560072\n", "Epoch 2886, Training loss 2.428840, Validation loss 6.560093\n", "Epoch 2887, Training loss 2.428839, Validation loss 6.560099\n", "Epoch 2888, Training loss 2.428838, Validation loss 6.560139\n", "Epoch 2889, Training loss 2.428839, Validation loss 6.560149\n", "Epoch 2890, Training loss 2.428837, Validation loss 6.560164\n", "Epoch 2891, Training loss 2.428836, Validation loss 6.560185\n", "Epoch 2892, Training loss 2.428835, Validation loss 6.560205\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2893, Training loss 2.428833, Validation loss 6.560235\n", "Epoch 2894, Training loss 2.428832, Validation loss 6.560246\n", "Epoch 2895, Training loss 2.428832, Validation loss 6.560262\n", "Epoch 2896, Training loss 2.428831, Validation loss 6.560291\n", "Epoch 2897, Training loss 2.428832, Validation loss 6.560302\n", "Epoch 2898, Training loss 2.428829, Validation loss 6.560322\n", "Epoch 2899, Training loss 2.428828, Validation loss 6.560333\n", "Epoch 2900, Training loss 2.428828, Validation loss 6.560363\n", "Epoch 2901, Training loss 2.428826, Validation loss 6.560379\n", "Epoch 2902, Training loss 2.428826, Validation loss 6.560390\n", "Epoch 2903, Training loss 2.428825, Validation loss 6.560419\n", "Epoch 2904, Training loss 2.428824, Validation loss 6.560444\n", "Epoch 2905, Training loss 2.428823, Validation loss 6.560464\n", "Epoch 2906, Training loss 2.428824, Validation loss 6.560470\n", "Epoch 2907, Training loss 2.428823, Validation loss 6.560491\n", "Epoch 2908, Training loss 2.428822, Validation loss 6.560515\n", "Epoch 2909, Training loss 2.428821, Validation loss 6.560521\n", "Epoch 2910, Training loss 2.428820, Validation loss 6.560542\n", "Epoch 2911, Training loss 2.428819, Validation loss 6.560547\n", "Epoch 2912, Training loss 2.428819, Validation loss 6.560573\n", "Epoch 2913, Training loss 2.428817, Validation loss 6.560603\n", "Epoch 2914, Training loss 2.428817, Validation loss 6.560618\n", "Epoch 2915, Training loss 2.428817, Validation loss 6.560633\n", "Epoch 2916, Training loss 2.428815, Validation loss 6.560659\n", "Epoch 2917, Training loss 2.428815, Validation loss 6.560674\n", "Epoch 2918, Training loss 2.428815, Validation loss 6.560694\n", "Epoch 2919, Training loss 2.428812, Validation loss 6.560710\n", "Epoch 2920, Training loss 2.428813, Validation loss 6.560725\n", "Epoch 2921, Training loss 2.428812, Validation loss 6.560740\n", "Epoch 2922, Training loss 2.428811, Validation loss 6.560766\n", "Epoch 2923, Training loss 2.428810, Validation loss 6.560781\n", "Epoch 2924, Training loss 2.428809, Validation loss 6.560806\n", "Epoch 2925, Training loss 2.428809, Validation loss 6.560812\n", "Epoch 2926, Training loss 2.428807, Validation loss 6.560833\n", "Epoch 2927, Training loss 2.428808, Validation loss 6.560843\n", "Epoch 2928, Training loss 2.428807, Validation loss 6.560868\n", "Epoch 2929, Training loss 2.428806, Validation loss 6.560884\n", "Epoch 2930, Training loss 2.428804, Validation loss 6.560894\n", "Epoch 2931, Training loss 2.428805, Validation loss 6.560919\n", "Epoch 2932, Training loss 2.428804, Validation loss 6.560939\n", "Epoch 2933, Training loss 2.428803, Validation loss 6.560945\n", "Epoch 2934, Training loss 2.428803, Validation loss 6.560955\n", "Epoch 2935, Training loss 2.428801, Validation loss 6.560986\n", "Epoch 2936, Training loss 2.428802, Validation loss 6.561001\n", "Epoch 2937, Training loss 2.428800, Validation loss 6.561031\n", "Epoch 2938, Training loss 2.428800, Validation loss 6.561047\n", "Epoch 2939, Training loss 2.428799, Validation loss 6.561062\n", "Epoch 2940, Training loss 2.428798, Validation loss 6.561077\n", "Epoch 2941, Training loss 2.428797, Validation loss 6.561107\n", "Epoch 2942, Training loss 2.428796, Validation loss 6.561108\n", "Epoch 2943, Training loss 2.428797, Validation loss 6.561138\n", "Epoch 2944, Training loss 2.428796, Validation loss 6.561138\n", "Epoch 2945, Training loss 2.428795, Validation loss 6.561169\n", "Epoch 2946, Training loss 2.428795, Validation loss 6.561179\n", "Epoch 2947, Training loss 2.428792, Validation loss 6.561199\n", "Epoch 2948, Training loss 2.428792, Validation loss 6.561220\n", "Epoch 2949, Training loss 2.428792, Validation loss 6.561234\n", "Epoch 2950, Training loss 2.428791, Validation loss 6.561255\n", "Epoch 2951, Training loss 2.428791, Validation loss 6.561275\n", "Epoch 2952, Training loss 2.428791, Validation loss 6.561286\n", "Epoch 2953, Training loss 2.428789, Validation loss 6.561291\n", "Epoch 2954, Training loss 2.428789, Validation loss 6.561312\n", "Epoch 2955, Training loss 2.428787, Validation loss 6.561322\n", "Epoch 2956, Training loss 2.428788, Validation loss 6.561347\n", "Epoch 2957, Training loss 2.428788, Validation loss 6.561368\n", "Epoch 2958, Training loss 2.428786, Validation loss 6.561388\n", "Epoch 2959, Training loss 2.428785, Validation loss 6.561403\n", "Epoch 2960, Training loss 2.428784, Validation loss 6.561419\n", "Epoch 2961, Training loss 2.428784, Validation loss 6.561434\n", "Epoch 2962, Training loss 2.428784, Validation loss 6.561444\n", "Epoch 2963, Training loss 2.428783, Validation loss 6.561460\n", "Epoch 2964, Training loss 2.428782, Validation loss 6.561475\n", "Epoch 2965, Training loss 2.428782, Validation loss 6.561491\n", "Epoch 2966, Training loss 2.428781, Validation loss 6.561506\n", "Epoch 2967, Training loss 2.428780, Validation loss 6.561526\n", "Epoch 2968, Training loss 2.428779, Validation loss 6.561541\n", "Epoch 2969, Training loss 2.428780, Validation loss 6.561566\n", "Epoch 2970, Training loss 2.428780, Validation loss 6.561587\n", "Epoch 2971, Training loss 2.428778, Validation loss 6.561597\n", "Epoch 2972, Training loss 2.428777, Validation loss 6.561622\n", "Epoch 2973, Training loss 2.428776, Validation loss 6.561627\n", "Epoch 2974, Training loss 2.428776, Validation loss 6.561653\n", "Epoch 2975, Training loss 2.428775, Validation loss 6.561658\n", "Epoch 2976, Training loss 2.428774, Validation loss 6.561668\n", "Epoch 2977, Training loss 2.428774, Validation loss 6.561689\n", "Epoch 2978, Training loss 2.428773, Validation loss 6.561699\n", "Epoch 2979, Training loss 2.428773, Validation loss 6.561705\n", "Epoch 2980, Training loss 2.428772, Validation loss 6.561730\n", "Epoch 2981, Training loss 2.428772, Validation loss 6.561750\n", "Epoch 2982, Training loss 2.428771, Validation loss 6.561775\n", "Epoch 2983, Training loss 2.428771, Validation loss 6.561791\n", "Epoch 2984, Training loss 2.428770, Validation loss 6.561811\n", "Epoch 2985, Training loss 2.428770, Validation loss 6.561826\n", "Epoch 2986, Training loss 2.428769, Validation loss 6.561827\n", "Epoch 2987, Training loss 2.428768, Validation loss 6.561857\n", "Epoch 2988, Training loss 2.428768, Validation loss 6.561872\n", "Epoch 2989, Training loss 2.428767, Validation loss 6.561888\n", "Epoch 2990, Training loss 2.428767, Validation loss 6.561888\n", "Epoch 2991, Training loss 2.428765, Validation loss 6.561908\n", "Epoch 2992, Training loss 2.428765, Validation loss 6.561924\n", "Epoch 2993, Training loss 2.428765, Validation loss 6.561934\n", "Epoch 2994, Training loss 2.428764, Validation loss 6.561964\n", "Epoch 2995, Training loss 2.428765, Validation loss 6.561974\n", "Epoch 2996, Training loss 2.428763, Validation loss 6.561995\n", "Epoch 2997, Training loss 2.428763, Validation loss 6.562005\n", "Epoch 2998, Training loss 2.428763, Validation loss 6.562025\n", "Epoch 2999, Training loss 2.428762, Validation loss 6.562036\n", "Epoch 3000, Training loss 2.428761, Validation loss 6.562046\n", "Epoch 3001, Training loss 2.428761, Validation loss 6.562066\n", "Epoch 3002, Training loss 2.428760, Validation loss 6.562077\n", "Epoch 3003, Training loss 2.428760, Validation loss 6.562087\n", "Epoch 3004, Training loss 2.428759, Validation loss 6.562107\n", "Epoch 3005, Training loss 2.428758, Validation loss 6.562118\n", "Epoch 3006, Training loss 2.428759, Validation loss 6.562138\n", "Epoch 3007, Training loss 2.428759, Validation loss 6.562158\n", "Epoch 3008, Training loss 2.428757, Validation loss 6.562178\n", "Epoch 3009, Training loss 2.428757, Validation loss 6.562188\n", "Epoch 3010, Training loss 2.428757, Validation loss 6.562203\n", "Epoch 3011, Training loss 2.428756, Validation loss 6.562224\n", "Epoch 3012, Training loss 2.428754, Validation loss 6.562234\n", "Epoch 3013, Training loss 2.428755, Validation loss 6.562240\n", "Epoch 3014, Training loss 2.428754, Validation loss 6.562255\n", "Epoch 3015, Training loss 2.428754, Validation loss 6.562275\n", "Epoch 3016, Training loss 2.428753, Validation loss 6.562285\n", "Epoch 3017, Training loss 2.428752, Validation loss 6.562301\n", "Epoch 3018, Training loss 2.428752, Validation loss 6.562302\n", "Epoch 3019, Training loss 2.428752, Validation loss 6.562342\n", "Epoch 3020, Training loss 2.428750, Validation loss 6.562356\n", "Epoch 3021, Training loss 2.428751, Validation loss 6.562362\n", "Epoch 3022, Training loss 2.428750, Validation loss 6.562387\n", "Epoch 3023, Training loss 2.428750, Validation loss 6.562397\n", "Epoch 3024, Training loss 2.428749, Validation loss 6.562407\n", "Epoch 3025, Training loss 2.428748, Validation loss 6.562428\n", "Epoch 3026, Training loss 2.428749, Validation loss 6.562434\n", "Epoch 3027, Training loss 2.428748, Validation loss 6.562449\n", "Epoch 3028, Training loss 2.428746, Validation loss 6.562454\n", "Epoch 3029, Training loss 2.428747, Validation loss 6.562479\n", "Epoch 3030, Training loss 2.428747, Validation loss 6.562485\n", "Epoch 3031, Training loss 2.428746, Validation loss 6.562500\n", "Epoch 3032, Training loss 2.428745, Validation loss 6.562520\n", "Epoch 3033, Training loss 2.428744, Validation loss 6.562535\n", "Epoch 3034, Training loss 2.428744, Validation loss 6.562555\n", "Epoch 3035, Training loss 2.428744, Validation loss 6.562571\n", "Epoch 3036, Training loss 2.428744, Validation loss 6.562576\n", "Epoch 3037, Training loss 2.428743, Validation loss 6.562592\n", "Epoch 3038, Training loss 2.428742, Validation loss 6.562607\n", "Epoch 3039, Training loss 2.428742, Validation loss 6.562622\n", "Epoch 3040, Training loss 2.428741, Validation loss 6.562642\n", "Epoch 3041, Training loss 2.428742, Validation loss 6.562638\n", "Epoch 3042, Training loss 2.428740, Validation loss 6.562658\n", "Epoch 3043, Training loss 2.428739, Validation loss 6.562673\n", "Epoch 3044, Training loss 2.428741, Validation loss 6.562689\n", "Epoch 3045, Training loss 2.428739, Validation loss 6.562704\n", "Epoch 3046, Training loss 2.428739, Validation loss 6.562709\n", "Epoch 3047, Training loss 2.428739, Validation loss 6.562738\n", "Epoch 3048, Training loss 2.428737, Validation loss 6.562744\n", "Epoch 3049, Training loss 2.428737, Validation loss 6.562765\n", "Epoch 3050, Training loss 2.428739, Validation loss 6.562775\n", "Epoch 3051, Training loss 2.428736, Validation loss 6.562785\n", "Epoch 3052, Training loss 2.428737, Validation loss 6.562791\n", "Epoch 3053, Training loss 2.428737, Validation loss 6.562820\n", "Epoch 3054, Training loss 2.428735, Validation loss 6.562826\n", "Epoch 3055, Training loss 2.428736, Validation loss 6.562837\n", "Epoch 3056, Training loss 2.428735, Validation loss 6.562847\n", "Epoch 3057, Training loss 2.428734, Validation loss 6.562857\n", "Epoch 3058, Training loss 2.428733, Validation loss 6.562882\n", "Epoch 3059, Training loss 2.428733, Validation loss 6.562892\n", "Epoch 3060, Training loss 2.428732, Validation loss 6.562898\n", "Epoch 3061, Training loss 2.428732, Validation loss 6.562928\n", "Epoch 3062, Training loss 2.428731, Validation loss 6.562948\n", "Epoch 3063, Training loss 2.428730, Validation loss 6.562948\n", "Epoch 3064, Training loss 2.428731, Validation loss 6.562968\n", "Epoch 3065, Training loss 2.428731, Validation loss 6.562984\n", "Epoch 3066, Training loss 2.428730, Validation loss 6.562999\n", "Epoch 3067, Training loss 2.428730, Validation loss 6.563004\n", "Epoch 3068, Training loss 2.428729, Validation loss 6.563004\n", "Epoch 3069, Training loss 2.428730, Validation loss 6.563025\n", "Epoch 3070, Training loss 2.428729, Validation loss 6.563035\n", "Epoch 3071, Training loss 2.428729, Validation loss 6.563041\n", "Epoch 3072, Training loss 2.428729, Validation loss 6.563056\n", "Epoch 3073, Training loss 2.428726, Validation loss 6.563071\n", "Epoch 3074, Training loss 2.428727, Validation loss 6.563091\n", "Epoch 3075, Training loss 2.428726, Validation loss 6.563092\n", "Epoch 3076, Training loss 2.428726, Validation loss 6.563122\n", "Epoch 3077, Training loss 2.428725, Validation loss 6.563142\n", "Epoch 3078, Training loss 2.428725, Validation loss 6.563152\n", "Epoch 3079, Training loss 2.428725, Validation loss 6.563167\n", "Epoch 3080, Training loss 2.428725, Validation loss 6.563173\n", "Epoch 3081, Training loss 2.428724, Validation loss 6.563183\n", "Epoch 3082, Training loss 2.428724, Validation loss 6.563208\n", "Epoch 3083, Training loss 2.428724, Validation loss 6.563218\n", "Epoch 3084, Training loss 2.428725, Validation loss 6.563224\n", "Epoch 3085, Training loss 2.428723, Validation loss 6.563234\n", "Epoch 3086, Training loss 2.428722, Validation loss 6.563249\n", "Epoch 3087, Training loss 2.428723, Validation loss 6.563254\n", "Epoch 3088, Training loss 2.428722, Validation loss 6.563265\n", "Epoch 3089, Training loss 2.428721, Validation loss 6.563285\n", "Epoch 3090, Training loss 2.428720, Validation loss 6.563300\n", "Epoch 3091, Training loss 2.428720, Validation loss 6.563315\n", "Epoch 3092, Training loss 2.428720, Validation loss 6.563321\n", "Epoch 3093, Training loss 2.428719, Validation loss 6.563336\n", "Epoch 3094, Training loss 2.428720, Validation loss 6.563351\n", "Epoch 3095, Training loss 2.428718, Validation loss 6.563356\n", "Epoch 3096, Training loss 2.428719, Validation loss 6.563367\n", "Epoch 3097, Training loss 2.428718, Validation loss 6.563373\n", "Epoch 3098, Training loss 2.428716, Validation loss 6.563388\n", "Epoch 3099, Training loss 2.428718, Validation loss 6.563403\n", "Epoch 3100, Training loss 2.428718, Validation loss 6.563428\n", "Epoch 3101, Training loss 2.428716, Validation loss 6.563443\n", "Epoch 3102, Training loss 2.428716, Validation loss 6.563448\n", "Epoch 3103, Training loss 2.428716, Validation loss 6.563463\n", "Epoch 3104, Training loss 2.428716, Validation loss 6.563469\n", "Epoch 3105, Training loss 2.428716, Validation loss 6.563484\n", "Epoch 3106, Training loss 2.428714, Validation loss 6.563499\n", "Epoch 3107, Training loss 2.428715, Validation loss 6.563514\n", "Epoch 3108, Training loss 2.428714, Validation loss 6.563519\n", "Epoch 3109, Training loss 2.428713, Validation loss 6.563545\n", "Epoch 3110, Training loss 2.428714, Validation loss 6.563555\n", "Epoch 3111, Training loss 2.428714, Validation loss 6.563570\n", "Epoch 3112, Training loss 2.428712, Validation loss 6.563576\n", "Epoch 3113, Training loss 2.428712, Validation loss 6.563586\n", "Epoch 3114, Training loss 2.428712, Validation loss 6.563591\n", "Epoch 3115, Training loss 2.428712, Validation loss 6.563606\n", "Epoch 3116, Training loss 2.428711, Validation loss 6.563617\n", "Epoch 3117, Training loss 2.428711, Validation loss 6.563622\n", "Epoch 3118, Training loss 2.428710, Validation loss 6.563632\n", "Epoch 3119, Training loss 2.428710, Validation loss 6.563657\n", "Epoch 3120, Training loss 2.428710, Validation loss 6.563653\n", "Epoch 3121, Training loss 2.428710, Validation loss 6.563663\n", "Epoch 3122, Training loss 2.428709, Validation loss 6.563683\n", "Epoch 3123, Training loss 2.428709, Validation loss 6.563698\n", "Epoch 3124, Training loss 2.428710, Validation loss 6.563718\n", "Epoch 3125, Training loss 2.428709, Validation loss 6.563734\n", "Epoch 3126, Training loss 2.428709, Validation loss 6.563754\n", "Epoch 3127, Training loss 2.428709, Validation loss 6.563754\n", "Epoch 3128, Training loss 2.428708, Validation loss 6.563759\n", "Epoch 3129, Training loss 2.428708, Validation loss 6.563779\n", "Epoch 3130, Training loss 2.428708, Validation loss 6.563780\n", "Epoch 3131, Training loss 2.428707, Validation loss 6.563800\n", "Epoch 3132, Training loss 2.428706, Validation loss 6.563800\n", "Epoch 3133, Training loss 2.428706, Validation loss 6.563816\n", "Epoch 3134, Training loss 2.428705, Validation loss 6.563821\n", "Epoch 3135, Training loss 2.428705, Validation loss 6.563836\n", "Epoch 3136, Training loss 2.428705, Validation loss 6.563837\n", "Epoch 3137, Training loss 2.428704, Validation loss 6.563857\n", "Epoch 3138, Training loss 2.428705, Validation loss 6.563872\n", "Epoch 3139, Training loss 2.428705, Validation loss 6.563887\n", "Epoch 3140, Training loss 2.428704, Validation loss 6.563897\n", "Epoch 3141, Training loss 2.428704, Validation loss 6.563907\n", "Epoch 3142, Training loss 2.428703, Validation loss 6.563917\n", "Epoch 3143, Training loss 2.428703, Validation loss 6.563938\n", "Epoch 3144, Training loss 2.428703, Validation loss 6.563948\n", "Epoch 3145, Training loss 2.428702, Validation loss 6.563958\n", "Epoch 3146, Training loss 2.428702, Validation loss 6.563973\n", "Epoch 3147, Training loss 2.428702, Validation loss 6.563984\n", "Epoch 3148, Training loss 2.428702, Validation loss 6.563994\n", "Epoch 3149, Training loss 2.428700, Validation loss 6.564004\n", "Epoch 3150, Training loss 2.428700, Validation loss 6.564014\n", "Epoch 3151, Training loss 2.428700, Validation loss 6.564020\n", "Epoch 3152, Training loss 2.428700, Validation loss 6.564030\n", "Epoch 3153, Training loss 2.428700, Validation loss 6.564040\n", "Epoch 3154, Training loss 2.428701, Validation loss 6.564051\n", "Epoch 3155, Training loss 2.428701, Validation loss 6.564065\n", "Epoch 3156, Training loss 2.428700, Validation loss 6.564075\n", "Epoch 3157, Training loss 2.428699, Validation loss 6.564095\n", "Epoch 3158, Training loss 2.428699, Validation loss 6.564111\n", "Epoch 3159, Training loss 2.428698, Validation loss 6.564112\n", "Epoch 3160, Training loss 2.428698, Validation loss 6.564116\n", "Epoch 3161, Training loss 2.428698, Validation loss 6.564137\n", "Epoch 3162, Training loss 2.428697, Validation loss 6.564152\n", "Epoch 3163, Training loss 2.428697, Validation loss 6.564157\n", "Epoch 3164, Training loss 2.428697, Validation loss 6.564157\n", "Epoch 3165, Training loss 2.428697, Validation loss 6.564173\n", "Epoch 3166, Training loss 2.428696, Validation loss 6.564178\n", "Epoch 3167, Training loss 2.428695, Validation loss 6.564198\n", "Epoch 3168, Training loss 2.428695, Validation loss 6.564203\n", "Epoch 3169, Training loss 2.428696, Validation loss 6.564219\n", "Epoch 3170, Training loss 2.428696, Validation loss 6.564219\n", "Epoch 3171, Training loss 2.428695, Validation loss 6.564234\n", "Epoch 3172, Training loss 2.428694, Validation loss 6.564240\n", "Epoch 3173, Training loss 2.428695, Validation loss 6.564260\n", "Epoch 3174, Training loss 2.428695, Validation loss 6.564264\n", "Epoch 3175, Training loss 2.428694, Validation loss 6.564285\n", "Epoch 3176, Training loss 2.428694, Validation loss 6.564300\n", "Epoch 3177, Training loss 2.428694, Validation loss 6.564310\n", "Epoch 3178, Training loss 2.428693, Validation loss 6.564321\n", "Epoch 3179, Training loss 2.428692, Validation loss 6.564335\n", "Epoch 3180, Training loss 2.428692, Validation loss 6.564341\n", "Epoch 3181, Training loss 2.428692, Validation loss 6.564356\n", "Epoch 3182, Training loss 2.428692, Validation loss 6.564362\n", "Epoch 3183, Training loss 2.428692, Validation loss 6.564362\n", "Epoch 3184, Training loss 2.428693, Validation loss 6.564382\n", "Epoch 3185, Training loss 2.428689, Validation loss 6.564382\n", "Epoch 3186, Training loss 2.428690, Validation loss 6.564407\n", "Epoch 3187, Training loss 2.428691, Validation loss 6.564403\n", "Epoch 3188, Training loss 2.428690, Validation loss 6.564427\n", "Epoch 3189, Training loss 2.428691, Validation loss 6.564423\n", "Epoch 3190, Training loss 2.428690, Validation loss 6.564448\n", "Epoch 3191, Training loss 2.428689, Validation loss 6.564444\n", "Epoch 3192, Training loss 2.428691, Validation loss 6.564449\n", "Epoch 3193, Training loss 2.428689, Validation loss 6.564473\n", "Epoch 3194, Training loss 2.428689, Validation loss 6.564488\n", "Epoch 3195, Training loss 2.428689, Validation loss 6.564499\n", "Epoch 3196, Training loss 2.428689, Validation loss 6.564514\n", "Epoch 3197, Training loss 2.428689, Validation loss 6.564519\n", "Epoch 3198, Training loss 2.428689, Validation loss 6.564519\n", "Epoch 3199, Training loss 2.428687, Validation loss 6.564540\n", "Epoch 3200, Training loss 2.428689, Validation loss 6.564545\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3201, Training loss 2.428688, Validation loss 6.564560\n", "Epoch 3202, Training loss 2.428687, Validation loss 6.564560\n", "Epoch 3203, Training loss 2.428688, Validation loss 6.564570\n", "Epoch 3204, Training loss 2.428687, Validation loss 6.564586\n", "Epoch 3205, Training loss 2.428687, Validation loss 6.564601\n", "Epoch 3206, Training loss 2.428685, Validation loss 6.564610\n", "Epoch 3207, Training loss 2.428687, Validation loss 6.564611\n", "Epoch 3208, Training loss 2.428686, Validation loss 6.564627\n", "Epoch 3209, Training loss 2.428686, Validation loss 6.564617\n", "Epoch 3210, Training loss 2.428687, Validation loss 6.564632\n", "Epoch 3211, Training loss 2.428686, Validation loss 6.564652\n", "Epoch 3212, Training loss 2.428685, Validation loss 6.564657\n", "Epoch 3213, Training loss 2.428684, Validation loss 6.564682\n", "Epoch 3214, Training loss 2.428684, Validation loss 6.564692\n", "Epoch 3215, Training loss 2.428685, Validation loss 6.564703\n", "Epoch 3216, Training loss 2.428684, Validation loss 6.564703\n", "Epoch 3217, Training loss 2.428685, Validation loss 6.564713\n", "Epoch 3218, Training loss 2.428683, Validation loss 6.564723\n", "Epoch 3219, Training loss 2.428684, Validation loss 6.564734\n", "Epoch 3220, Training loss 2.428684, Validation loss 6.564744\n", "Epoch 3221, Training loss 2.428684, Validation loss 6.564754\n", "Epoch 3222, Training loss 2.428683, Validation loss 6.564754\n", "Epoch 3223, Training loss 2.428683, Validation loss 6.564760\n", "Epoch 3224, Training loss 2.428683, Validation loss 6.564770\n", "Epoch 3225, Training loss 2.428682, Validation loss 6.564780\n", "Epoch 3226, Training loss 2.428682, Validation loss 6.564790\n", "Epoch 3227, Training loss 2.428683, Validation loss 6.564800\n", "Epoch 3228, Training loss 2.428681, Validation loss 6.564800\n", "Epoch 3229, Training loss 2.428681, Validation loss 6.564811\n", "Epoch 3230, Training loss 2.428680, Validation loss 6.564821\n", "Epoch 3231, Training loss 2.428681, Validation loss 6.564831\n", "Epoch 3232, Training loss 2.428681, Validation loss 6.564851\n", "Epoch 3233, Training loss 2.428681, Validation loss 6.564871\n", "Epoch 3234, Training loss 2.428680, Validation loss 6.564881\n", "Epoch 3235, Training loss 2.428681, Validation loss 6.564881\n", "Epoch 3236, Training loss 2.428680, Validation loss 6.564897\n", "Epoch 3237, Training loss 2.428680, Validation loss 6.564907\n", "Epoch 3238, Training loss 2.428680, Validation loss 6.564907\n", "Epoch 3239, Training loss 2.428679, Validation loss 6.564922\n", "Epoch 3240, Training loss 2.428679, Validation loss 6.564932\n", "Epoch 3241, Training loss 2.428679, Validation loss 6.564933\n", "Epoch 3242, Training loss 2.428680, Validation loss 6.564948\n", "Epoch 3243, Training loss 2.428678, Validation loss 6.564958\n", "Epoch 3244, Training loss 2.428678, Validation loss 6.564959\n", "Epoch 3245, Training loss 2.428679, Validation loss 6.564974\n", "Epoch 3246, Training loss 2.428678, Validation loss 6.564983\n", "Epoch 3247, Training loss 2.428678, Validation loss 6.564984\n", "Epoch 3248, Training loss 2.428678, Validation loss 6.565000\n", "Epoch 3249, Training loss 2.428679, Validation loss 6.565009\n", "Epoch 3250, Training loss 2.428678, Validation loss 6.565010\n", "Epoch 3251, Training loss 2.428676, Validation loss 6.565025\n", "Epoch 3252, Training loss 2.428677, Validation loss 6.565025\n", "Epoch 3253, Training loss 2.428676, Validation loss 6.565044\n", "Epoch 3254, Training loss 2.428677, Validation loss 6.565070\n", "Epoch 3255, Training loss 2.428677, Validation loss 6.565080\n", "Epoch 3256, Training loss 2.428675, Validation loss 6.565095\n", "Epoch 3257, Training loss 2.428677, Validation loss 6.565090\n", "Epoch 3258, Training loss 2.428677, Validation loss 6.565101\n", "Epoch 3259, Training loss 2.428675, Validation loss 6.565115\n", "Epoch 3260, Training loss 2.428675, Validation loss 6.565111\n", "Epoch 3261, Training loss 2.428675, Validation loss 6.565131\n", "Epoch 3262, Training loss 2.428675, Validation loss 6.565126\n", "Epoch 3263, Training loss 2.428675, Validation loss 6.565142\n", "Epoch 3264, Training loss 2.428674, Validation loss 6.565152\n", "Epoch 3265, Training loss 2.428675, Validation loss 6.565152\n", "Epoch 3266, Training loss 2.428674, Validation loss 6.565162\n", "Epoch 3267, Training loss 2.428675, Validation loss 6.565187\n", "Epoch 3268, Training loss 2.428674, Validation loss 6.565183\n", "Epoch 3269, Training loss 2.428673, Validation loss 6.565193\n", "Epoch 3270, Training loss 2.428674, Validation loss 6.565188\n", "Epoch 3271, Training loss 2.428674, Validation loss 6.565203\n", "Epoch 3272, Training loss 2.428673, Validation loss 6.565208\n", "Epoch 3273, Training loss 2.428674, Validation loss 6.565224\n", "Epoch 3274, Training loss 2.428672, Validation loss 6.565234\n", "Epoch 3275, Training loss 2.428674, Validation loss 6.565229\n", "Epoch 3276, Training loss 2.428672, Validation loss 6.565254\n", "Epoch 3277, Training loss 2.428672, Validation loss 6.565269\n", "Epoch 3278, Training loss 2.428673, Validation loss 6.565274\n", "Epoch 3279, Training loss 2.428672, Validation loss 6.565279\n", "Epoch 3280, Training loss 2.428672, Validation loss 6.565294\n", "Epoch 3281, Training loss 2.428672, Validation loss 6.565300\n", "Epoch 3282, Training loss 2.428672, Validation loss 6.565305\n", "Epoch 3283, Training loss 2.428672, Validation loss 6.565320\n", "Epoch 3284, Training loss 2.428671, Validation loss 6.565330\n", "Epoch 3285, Training loss 2.428671, Validation loss 6.565335\n", "Epoch 3286, Training loss 2.428670, Validation loss 6.565340\n", "Epoch 3287, Training loss 2.428671, Validation loss 6.565355\n", "Epoch 3288, Training loss 2.428671, Validation loss 6.565346\n", "Epoch 3289, Training loss 2.428671, Validation loss 6.565361\n", "Epoch 3290, Training loss 2.428671, Validation loss 6.565366\n", "Epoch 3291, Training loss 2.428671, Validation loss 6.565372\n", "Epoch 3292, Training loss 2.428669, Validation loss 6.565387\n", "Epoch 3293, Training loss 2.428670, Validation loss 6.565392\n", "Epoch 3294, Training loss 2.428669, Validation loss 6.565397\n", "Epoch 3295, Training loss 2.428669, Validation loss 6.565413\n", "Epoch 3296, Training loss 2.428671, Validation loss 6.565422\n", "Epoch 3297, Training loss 2.428670, Validation loss 6.565427\n", "Epoch 3298, Training loss 2.428668, Validation loss 6.565443\n", "Epoch 3299, Training loss 2.428668, Validation loss 6.565457\n", "Epoch 3300, Training loss 2.428668, Validation loss 6.565468\n", "Epoch 3301, Training loss 2.428668, Validation loss 6.565463\n", "Epoch 3302, Training loss 2.428668, Validation loss 6.565473\n", "Epoch 3303, Training loss 2.428668, Validation loss 6.565473\n", "Epoch 3304, Training loss 2.428667, Validation loss 6.565479\n", "Epoch 3305, Training loss 2.428668, Validation loss 6.565498\n", "Epoch 3306, Training loss 2.428668, Validation loss 6.565504\n", "Epoch 3307, Training loss 2.428668, Validation loss 6.565504\n", "Epoch 3308, Training loss 2.428668, Validation loss 6.565514\n", "Epoch 3309, Training loss 2.428667, Validation loss 6.565529\n", "Epoch 3310, Training loss 2.428667, Validation loss 6.565535\n", "Epoch 3311, Training loss 2.428667, Validation loss 6.565535\n", "Epoch 3312, Training loss 2.428667, Validation loss 6.565545\n", "Epoch 3313, Training loss 2.428668, Validation loss 6.565545\n", "Epoch 3314, Training loss 2.428667, Validation loss 6.565551\n", "Epoch 3315, Training loss 2.428666, Validation loss 6.565565\n", "Epoch 3316, Training loss 2.428666, Validation loss 6.565576\n", "Epoch 3317, Training loss 2.428666, Validation loss 6.565576\n", "Epoch 3318, Training loss 2.428666, Validation loss 6.565581\n", "Epoch 3319, Training loss 2.428666, Validation loss 6.565592\n", "Epoch 3320, Training loss 2.428666, Validation loss 6.565606\n", "Epoch 3321, Training loss 2.428665, Validation loss 6.565617\n", "Epoch 3322, Training loss 2.428665, Validation loss 6.565617\n", "Epoch 3323, Training loss 2.428665, Validation loss 6.565632\n", "Epoch 3324, Training loss 2.428665, Validation loss 6.565641\n", "Epoch 3325, Training loss 2.428665, Validation loss 6.565661\n", "Epoch 3326, Training loss 2.428665, Validation loss 6.565671\n", "Epoch 3327, Training loss 2.428664, Validation loss 6.565672\n", "Epoch 3328, Training loss 2.428664, Validation loss 6.565672\n", "Epoch 3329, Training loss 2.428664, Validation loss 6.565692\n", "Epoch 3330, Training loss 2.428664, Validation loss 6.565682\n", "Epoch 3331, Training loss 2.428664, Validation loss 6.565702\n", "Epoch 3332, Training loss 2.428663, Validation loss 6.565703\n", "Epoch 3333, Training loss 2.428664, Validation loss 6.565713\n", "Epoch 3334, Training loss 2.428663, Validation loss 6.565713\n", "Epoch 3335, Training loss 2.428664, Validation loss 6.565733\n", "Epoch 3336, Training loss 2.428663, Validation loss 6.565734\n", "Epoch 3337, Training loss 2.428663, Validation loss 6.565743\n", "Epoch 3338, Training loss 2.428662, Validation loss 6.565744\n", "Epoch 3339, Training loss 2.428663, Validation loss 6.565754\n", "Epoch 3340, Training loss 2.428663, Validation loss 6.565754\n", "Epoch 3341, Training loss 2.428663, Validation loss 6.565774\n", "Epoch 3342, Training loss 2.428663, Validation loss 6.565775\n", "Epoch 3343, Training loss 2.428662, Validation loss 6.565784\n", "Epoch 3344, Training loss 2.428662, Validation loss 6.565785\n", "Epoch 3345, Training loss 2.428663, Validation loss 6.565805\n", "Epoch 3346, Training loss 2.428662, Validation loss 6.565795\n", "Epoch 3347, Training loss 2.428663, Validation loss 6.565815\n", "Epoch 3348, Training loss 2.428662, Validation loss 6.565825\n", "Epoch 3349, Training loss 2.428661, Validation loss 6.565836\n", "Epoch 3350, Training loss 2.428661, Validation loss 6.565841\n", "Epoch 3351, Training loss 2.428662, Validation loss 6.565851\n", "Epoch 3352, Training loss 2.428660, Validation loss 6.565861\n", "Epoch 3353, Training loss 2.428662, Validation loss 6.565876\n", "Epoch 3354, Training loss 2.428662, Validation loss 6.565886\n", "Epoch 3355, Training loss 2.428662, Validation loss 6.565896\n", "Epoch 3356, Training loss 2.428660, Validation loss 6.565891\n", "Epoch 3357, Training loss 2.428661, Validation loss 6.565891\n", "Epoch 3358, Training loss 2.428660, Validation loss 6.565907\n", "Epoch 3359, Training loss 2.428660, Validation loss 6.565917\n", "Epoch 3360, Training loss 2.428660, Validation loss 6.565912\n", "Epoch 3361, Training loss 2.428661, Validation loss 6.565922\n", "Epoch 3362, Training loss 2.428660, Validation loss 6.565937\n", "Epoch 3363, Training loss 2.428659, Validation loss 6.565948\n", "Epoch 3364, Training loss 2.428660, Validation loss 6.565943\n", "Epoch 3365, Training loss 2.428660, Validation loss 6.565943\n", "Epoch 3366, Training loss 2.428659, Validation loss 6.565953\n", "Epoch 3367, Training loss 2.428660, Validation loss 6.565968\n", "Epoch 3368, Training loss 2.428660, Validation loss 6.565978\n", "Epoch 3369, Training loss 2.428659, Validation loss 6.565973\n", "Epoch 3370, Training loss 2.428659, Validation loss 6.565984\n", "Epoch 3371, Training loss 2.428658, Validation loss 6.565994\n", "Epoch 3372, Training loss 2.428658, Validation loss 6.565999\n", "Epoch 3373, Training loss 2.428660, Validation loss 6.565994\n", "Epoch 3374, Training loss 2.428658, Validation loss 6.566004\n", "Epoch 3375, Training loss 2.428658, Validation loss 6.566024\n", "Epoch 3376, Training loss 2.428658, Validation loss 6.566030\n", "Epoch 3377, Training loss 2.428658, Validation loss 6.566034\n", "Epoch 3378, Training loss 2.428658, Validation loss 6.566040\n", "Epoch 3379, Training loss 2.428658, Validation loss 6.566045\n", "Epoch 3380, Training loss 2.428658, Validation loss 6.566050\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3381, Training loss 2.428657, Validation loss 6.566051\n", "Epoch 3382, Training loss 2.428658, Validation loss 6.566075\n", "Epoch 3383, Training loss 2.428658, Validation loss 6.566080\n", "Epoch 3384, Training loss 2.428657, Validation loss 6.566085\n", "Epoch 3385, Training loss 2.428658, Validation loss 6.566086\n", "Epoch 3386, Training loss 2.428657, Validation loss 6.566091\n", "Epoch 3387, Training loss 2.428658, Validation loss 6.566106\n", "Epoch 3388, Training loss 2.428657, Validation loss 6.566111\n", "Epoch 3389, Training loss 2.428657, Validation loss 6.566116\n", "Epoch 3390, Training loss 2.428657, Validation loss 6.566121\n", "Epoch 3391, Training loss 2.428657, Validation loss 6.566127\n", "Epoch 3392, Training loss 2.428657, Validation loss 6.566132\n", "Epoch 3393, Training loss 2.428657, Validation loss 6.566137\n", "Epoch 3394, Training loss 2.428657, Validation loss 6.566137\n", "Epoch 3395, Training loss 2.428657, Validation loss 6.566142\n", "Epoch 3396, Training loss 2.428655, Validation loss 6.566167\n", "Epoch 3397, Training loss 2.428655, Validation loss 6.566172\n", "Epoch 3398, Training loss 2.428657, Validation loss 6.566173\n", "Epoch 3399, Training loss 2.428657, Validation loss 6.566177\n", "Epoch 3400, Training loss 2.428656, Validation loss 6.566183\n", "Epoch 3401, Training loss 2.428655, Validation loss 6.566188\n", "Epoch 3402, Training loss 2.428656, Validation loss 6.566193\n", "Epoch 3403, Training loss 2.428655, Validation loss 6.566208\n", "Epoch 3404, Training loss 2.428655, Validation loss 6.566223\n", "Epoch 3405, Training loss 2.428656, Validation loss 6.566229\n", "Epoch 3406, Training loss 2.428656, Validation loss 6.566243\n", "Epoch 3407, Training loss 2.428654, Validation loss 6.566243\n", "Epoch 3408, Training loss 2.428655, Validation loss 6.566238\n", "Epoch 3409, Training loss 2.428656, Validation loss 6.566254\n", "Epoch 3410, Training loss 2.428654, Validation loss 6.566259\n", "Epoch 3411, Training loss 2.428655, Validation loss 6.566274\n", "Epoch 3412, Training loss 2.428655, Validation loss 6.566274\n", "Epoch 3413, Training loss 2.428655, Validation loss 6.566269\n", "Epoch 3414, Training loss 2.428655, Validation loss 6.566284\n", "Epoch 3415, Training loss 2.428655, Validation loss 6.566295\n", "Epoch 3416, Training loss 2.428655, Validation loss 6.566290\n", "Epoch 3417, Training loss 2.428655, Validation loss 6.566305\n", "Epoch 3418, Training loss 2.428653, Validation loss 6.566305\n", "Epoch 3419, Training loss 2.428653, Validation loss 6.566310\n", "Epoch 3420, Training loss 2.428655, Validation loss 6.566325\n", "Epoch 3421, Training loss 2.428652, Validation loss 6.566320\n", "Epoch 3422, Training loss 2.428653, Validation loss 6.566336\n", "Epoch 3423, Training loss 2.428653, Validation loss 6.566336\n", "Epoch 3424, Training loss 2.428653, Validation loss 6.566341\n", "Epoch 3425, Training loss 2.428653, Validation loss 6.566356\n", "Epoch 3426, Training loss 2.428653, Validation loss 6.566356\n", "Epoch 3427, Training loss 2.428653, Validation loss 6.566351\n", "Epoch 3428, Training loss 2.428653, Validation loss 6.566366\n", "Epoch 3429, Training loss 2.428653, Validation loss 6.566377\n", "Epoch 3430, Training loss 2.428653, Validation loss 6.566372\n", "Epoch 3431, Training loss 2.428653, Validation loss 6.566387\n", "Epoch 3432, Training loss 2.428652, Validation loss 6.566382\n", "Epoch 3433, Training loss 2.428653, Validation loss 6.566397\n", "Epoch 3434, Training loss 2.428652, Validation loss 6.566417\n", "Epoch 3435, Training loss 2.428653, Validation loss 6.566422\n", "Epoch 3436, Training loss 2.428653, Validation loss 6.566432\n", "Epoch 3437, Training loss 2.428652, Validation loss 6.566447\n", "Epoch 3438, Training loss 2.428652, Validation loss 6.566438\n", "Epoch 3439, Training loss 2.428653, Validation loss 6.566457\n", "Epoch 3440, Training loss 2.428653, Validation loss 6.566448\n", "Epoch 3441, Training loss 2.428651, Validation loss 6.566453\n", "Epoch 3442, Training loss 2.428650, Validation loss 6.566473\n", "Epoch 3443, Training loss 2.428652, Validation loss 6.566463\n", "Epoch 3444, Training loss 2.428652, Validation loss 6.566479\n", "Epoch 3445, Training loss 2.428651, Validation loss 6.566488\n", "Epoch 3446, Training loss 2.428651, Validation loss 6.566479\n", "Epoch 3447, Training loss 2.428653, Validation loss 6.566494\n", "Epoch 3448, Training loss 2.428653, Validation loss 6.566504\n", "Epoch 3449, Training loss 2.428651, Validation loss 6.566508\n", "Epoch 3450, Training loss 2.428651, Validation loss 6.566509\n", "Epoch 3451, Training loss 2.428652, Validation loss 6.566519\n", "Epoch 3452, Training loss 2.428652, Validation loss 6.566519\n", "Epoch 3453, Training loss 2.428651, Validation loss 6.566525\n", "Epoch 3454, Training loss 2.428651, Validation loss 6.566529\n", "Epoch 3455, Training loss 2.428649, Validation loss 6.566535\n", "Epoch 3456, Training loss 2.428651, Validation loss 6.566545\n", "Epoch 3457, Training loss 2.428650, Validation loss 6.566545\n", "Epoch 3458, Training loss 2.428650, Validation loss 6.566550\n", "Epoch 3459, Training loss 2.428651, Validation loss 6.566560\n", "Epoch 3460, Training loss 2.428650, Validation loss 6.566575\n", "Epoch 3461, Training loss 2.428651, Validation loss 6.566566\n", "Epoch 3462, Training loss 2.428650, Validation loss 6.566586\n", "Epoch 3463, Training loss 2.428650, Validation loss 6.566576\n", "Epoch 3464, Training loss 2.428650, Validation loss 6.566581\n", "Epoch 3465, Training loss 2.428651, Validation loss 6.566581\n", "Epoch 3466, Training loss 2.428650, Validation loss 6.566601\n", "Epoch 3467, Training loss 2.428650, Validation loss 6.566611\n", "Epoch 3468, Training loss 2.428650, Validation loss 6.566611\n", "Epoch 3469, Training loss 2.428650, Validation loss 6.566621\n", "Epoch 3470, Training loss 2.428649, Validation loss 6.566621\n", "Epoch 3471, Training loss 2.428649, Validation loss 6.566631\n", "Epoch 3472, Training loss 2.428649, Validation loss 6.566642\n", "Epoch 3473, Training loss 2.428650, Validation loss 6.566642\n", "Epoch 3474, Training loss 2.428649, Validation loss 6.566652\n", "Epoch 3475, Training loss 2.428649, Validation loss 6.566652\n", "Epoch 3476, Training loss 2.428649, Validation loss 6.566662\n", "Epoch 3477, Training loss 2.428649, Validation loss 6.566667\n", "Epoch 3478, Training loss 2.428649, Validation loss 6.566677\n", "Epoch 3479, Training loss 2.428649, Validation loss 6.566677\n", "Epoch 3480, Training loss 2.428648, Validation loss 6.566688\n", "Epoch 3481, Training loss 2.428648, Validation loss 6.566688\n", "Epoch 3482, Training loss 2.428649, Validation loss 6.566698\n", "Epoch 3483, Training loss 2.428649, Validation loss 6.566708\n", "Epoch 3484, Training loss 2.428648, Validation loss 6.566708\n", "Epoch 3485, Training loss 2.428648, Validation loss 6.566718\n", "Epoch 3486, Training loss 2.428648, Validation loss 6.566718\n", "Epoch 3487, Training loss 2.428648, Validation loss 6.566729\n", "Epoch 3488, Training loss 2.428648, Validation loss 6.566729\n", "Epoch 3489, Training loss 2.428648, Validation loss 6.566739\n", "Epoch 3490, Training loss 2.428647, Validation loss 6.566739\n", "Epoch 3491, Training loss 2.428647, Validation loss 6.566734\n", "Epoch 3492, Training loss 2.428648, Validation loss 6.566744\n", "Epoch 3493, Training loss 2.428647, Validation loss 6.566744\n", "Epoch 3494, Training loss 2.428649, Validation loss 6.566754\n", "Epoch 3495, Training loss 2.428648, Validation loss 6.566754\n", "Epoch 3496, Training loss 2.428649, Validation loss 6.566770\n", "Epoch 3497, Training loss 2.428648, Validation loss 6.566780\n", "Epoch 3498, Training loss 2.428648, Validation loss 6.566780\n", "Epoch 3499, Training loss 2.428647, Validation loss 6.566790\n", "Epoch 3500, Training loss 2.428647, Validation loss 6.566790\n", "Epoch 3501, Training loss 2.428647, Validation loss 6.566810\n", "Epoch 3502, Training loss 2.428647, Validation loss 6.566814\n", "Epoch 3503, Training loss 2.428647, Validation loss 6.566805\n", "Epoch 3504, Training loss 2.428646, Validation loss 6.566825\n", "Epoch 3505, Training loss 2.428647, Validation loss 6.566830\n", "Epoch 3506, Training loss 2.428647, Validation loss 6.566825\n", "Epoch 3507, Training loss 2.428647, Validation loss 6.566826\n", "Epoch 3508, Training loss 2.428647, Validation loss 6.566831\n", "Epoch 3509, Training loss 2.428646, Validation loss 6.566851\n", "Epoch 3510, Training loss 2.428647, Validation loss 6.566845\n", "Epoch 3511, Training loss 2.428647, Validation loss 6.566846\n", "Epoch 3512, Training loss 2.428646, Validation loss 6.566851\n", "Epoch 3513, Training loss 2.428646, Validation loss 6.566871\n", "Epoch 3514, Training loss 2.428645, Validation loss 6.566866\n", "Epoch 3515, Training loss 2.428646, Validation loss 6.566867\n", "Epoch 3516, Training loss 2.428645, Validation loss 6.566891\n", "Epoch 3517, Training loss 2.428645, Validation loss 6.566881\n", "Epoch 3518, Training loss 2.428646, Validation loss 6.566886\n", "Epoch 3519, Training loss 2.428646, Validation loss 6.566892\n", "Epoch 3520, Training loss 2.428646, Validation loss 6.566892\n", "Epoch 3521, Training loss 2.428645, Validation loss 6.566902\n", "Epoch 3522, Training loss 2.428646, Validation loss 6.566907\n", "Epoch 3523, Training loss 2.428645, Validation loss 6.566912\n", "Epoch 3524, Training loss 2.428647, Validation loss 6.566913\n", "Epoch 3525, Training loss 2.428647, Validation loss 6.566917\n", "Epoch 3526, Training loss 2.428646, Validation loss 6.566913\n", "Epoch 3527, Training loss 2.428647, Validation loss 6.566933\n", "Epoch 3528, Training loss 2.428647, Validation loss 6.566933\n", "Epoch 3529, Training loss 2.428646, Validation loss 6.566938\n", "Epoch 3530, Training loss 2.428644, Validation loss 6.566948\n", "Epoch 3531, Training loss 2.428645, Validation loss 6.566953\n", "Epoch 3532, Training loss 2.428645, Validation loss 6.566954\n", "Epoch 3533, Training loss 2.428645, Validation loss 6.566948\n", "Epoch 3534, Training loss 2.428646, Validation loss 6.566968\n", "Epoch 3535, Training loss 2.428645, Validation loss 6.566974\n", "Epoch 3536, Training loss 2.428645, Validation loss 6.566978\n", "Epoch 3537, Training loss 2.428646, Validation loss 6.566969\n", "Epoch 3538, Training loss 2.428645, Validation loss 6.566984\n", "Epoch 3539, Training loss 2.428646, Validation loss 6.566998\n", "Epoch 3540, Training loss 2.428646, Validation loss 6.567009\n", "Epoch 3541, Training loss 2.428644, Validation loss 6.567004\n", "Epoch 3542, Training loss 2.428645, Validation loss 6.567019\n", "Epoch 3543, Training loss 2.428645, Validation loss 6.567024\n", "Epoch 3544, Training loss 2.428645, Validation loss 6.567019\n", "Epoch 3545, Training loss 2.428644, Validation loss 6.567034\n", "Epoch 3546, Training loss 2.428643, Validation loss 6.567029\n", "Epoch 3547, Training loss 2.428644, Validation loss 6.567044\n", "Epoch 3548, Training loss 2.428644, Validation loss 6.567039\n", "Epoch 3549, Training loss 2.428645, Validation loss 6.567055\n", "Epoch 3550, Training loss 2.428644, Validation loss 6.567050\n", "Epoch 3551, Training loss 2.428644, Validation loss 6.567065\n", "Epoch 3552, Training loss 2.428644, Validation loss 6.567060\n", "Epoch 3553, Training loss 2.428644, Validation loss 6.567070\n", "Epoch 3554, Training loss 2.428644, Validation loss 6.567075\n", "Epoch 3555, Training loss 2.428643, Validation loss 6.567070\n", "Epoch 3556, Training loss 2.428643, Validation loss 6.567085\n", "Epoch 3557, Training loss 2.428644, Validation loss 6.567080\n", "Epoch 3558, Training loss 2.428643, Validation loss 6.567096\n", "Epoch 3559, Training loss 2.428644, Validation loss 6.567091\n", "Epoch 3560, Training loss 2.428643, Validation loss 6.567086\n", "Epoch 3561, Training loss 2.428643, Validation loss 6.567101\n", "Epoch 3562, Training loss 2.428644, Validation loss 6.567116\n", "Epoch 3563, Training loss 2.428643, Validation loss 6.567111\n", "Epoch 3564, Training loss 2.428643, Validation loss 6.567106\n", "Epoch 3565, Training loss 2.428643, Validation loss 6.567122\n", "Epoch 3566, Training loss 2.428643, Validation loss 6.567126\n", "Epoch 3567, Training loss 2.428643, Validation loss 6.567122\n", "Epoch 3568, Training loss 2.428643, Validation loss 6.567136\n", "Epoch 3569, Training loss 2.428643, Validation loss 6.567132\n", "Epoch 3570, Training loss 2.428643, Validation loss 6.567146\n", "Epoch 3571, Training loss 2.428643, Validation loss 6.567142\n", "Epoch 3572, Training loss 2.428643, Validation loss 6.567152\n", "Epoch 3573, Training loss 2.428644, Validation loss 6.567152\n", "Epoch 3574, Training loss 2.428644, Validation loss 6.567163\n", "Epoch 3575, Training loss 2.428643, Validation loss 6.567163\n", "Epoch 3576, Training loss 2.428643, Validation loss 6.567173\n", "Epoch 3577, Training loss 2.428644, Validation loss 6.567173\n", "Epoch 3578, Training loss 2.428643, Validation loss 6.567183\n", "Epoch 3579, Training loss 2.428642, Validation loss 6.567187\n", "Epoch 3580, Training loss 2.428642, Validation loss 6.567198\n", "Epoch 3581, Training loss 2.428642, Validation loss 6.567213\n", "Epoch 3582, Training loss 2.428642, Validation loss 6.567218\n", "Epoch 3583, Training loss 2.428643, Validation loss 6.567227\n", "Epoch 3584, Training loss 2.428643, Validation loss 6.567233\n", "Epoch 3585, Training loss 2.428643, Validation loss 6.567223\n", "Epoch 3586, Training loss 2.428643, Validation loss 6.567228\n", "Epoch 3587, Training loss 2.428643, Validation loss 6.567233\n", "Epoch 3588, Training loss 2.428641, Validation loss 6.567238\n", "Epoch 3589, Training loss 2.428643, Validation loss 6.567248\n", "Epoch 3590, Training loss 2.428642, Validation loss 6.567238\n", "Epoch 3591, Training loss 2.428642, Validation loss 6.567244\n", "Epoch 3592, Training loss 2.428643, Validation loss 6.567248\n", "Epoch 3593, Training loss 2.428641, Validation loss 6.567254\n", "Epoch 3594, Training loss 2.428642, Validation loss 6.567274\n", "Epoch 3595, Training loss 2.428643, Validation loss 6.567259\n", "Epoch 3596, Training loss 2.428642, Validation loss 6.567269\n", "Epoch 3597, Training loss 2.428642, Validation loss 6.567274\n", "Epoch 3598, Training loss 2.428643, Validation loss 6.567279\n", "Epoch 3599, Training loss 2.428642, Validation loss 6.567285\n", "Epoch 3600, Training loss 2.428641, Validation loss 6.567294\n", "Epoch 3601, Training loss 2.428642, Validation loss 6.567285\n", "Epoch 3602, Training loss 2.428642, Validation loss 6.567289\n", "Epoch 3603, Training loss 2.428642, Validation loss 6.567295\n", "Epoch 3604, Training loss 2.428641, Validation loss 6.567300\n", "Epoch 3605, Training loss 2.428641, Validation loss 6.567309\n", "Epoch 3606, Training loss 2.428642, Validation loss 6.567306\n", "Epoch 3607, Training loss 2.428641, Validation loss 6.567315\n", "Epoch 3608, Training loss 2.428641, Validation loss 6.567320\n", "Epoch 3609, Training loss 2.428642, Validation loss 6.567326\n", "Epoch 3610, Training loss 2.428640, Validation loss 6.567316\n", "Epoch 3611, Training loss 2.428641, Validation loss 6.567321\n", "Epoch 3612, Training loss 2.428642, Validation loss 6.567330\n", "Epoch 3613, Training loss 2.428641, Validation loss 6.567336\n", "Epoch 3614, Training loss 2.428641, Validation loss 6.567341\n", "Epoch 3615, Training loss 2.428642, Validation loss 6.567346\n", "Epoch 3616, Training loss 2.428642, Validation loss 6.567347\n", "Epoch 3617, Training loss 2.428641, Validation loss 6.567356\n", "Epoch 3618, Training loss 2.428640, Validation loss 6.567361\n", "Epoch 3619, Training loss 2.428640, Validation loss 6.567367\n", "Epoch 3620, Training loss 2.428642, Validation loss 6.567371\n", "Epoch 3621, Training loss 2.428641, Validation loss 6.567362\n", "Epoch 3622, Training loss 2.428641, Validation loss 6.567377\n", "Epoch 3623, Training loss 2.428641, Validation loss 6.567377\n", "Epoch 3624, Training loss 2.428640, Validation loss 6.567387\n", "Epoch 3625, Training loss 2.428642, Validation loss 6.567402\n", "Epoch 3626, Training loss 2.428641, Validation loss 6.567402\n", "Epoch 3627, Training loss 2.428639, Validation loss 6.567402\n", "Epoch 3628, Training loss 2.428640, Validation loss 6.567412\n", "Epoch 3629, Training loss 2.428639, Validation loss 6.567412\n", "Epoch 3630, Training loss 2.428641, Validation loss 6.567412\n", "Epoch 3631, Training loss 2.428640, Validation loss 6.567408\n", "Epoch 3632, Training loss 2.428641, Validation loss 6.567418\n", "Epoch 3633, Training loss 2.428641, Validation loss 6.567418\n", "Epoch 3634, Training loss 2.428641, Validation loss 6.567418\n", "Epoch 3635, Training loss 2.428640, Validation loss 6.567428\n", "Epoch 3636, Training loss 2.428641, Validation loss 6.567443\n", "Epoch 3637, Training loss 2.428639, Validation loss 6.567443\n", "Epoch 3638, Training loss 2.428641, Validation loss 6.567443\n", "Epoch 3639, Training loss 2.428639, Validation loss 6.567443\n", "Epoch 3640, Training loss 2.428640, Validation loss 6.567453\n", "Epoch 3641, Training loss 2.428638, Validation loss 6.567449\n", "Epoch 3642, Training loss 2.428640, Validation loss 6.567449\n", "Epoch 3643, Training loss 2.428640, Validation loss 6.567459\n", "Epoch 3644, Training loss 2.428641, Validation loss 6.567459\n", "Epoch 3645, Training loss 2.428640, Validation loss 6.567473\n", "Epoch 3646, Training loss 2.428641, Validation loss 6.567473\n", "Epoch 3647, Training loss 2.428640, Validation loss 6.567473\n", "Epoch 3648, Training loss 2.428640, Validation loss 6.567484\n", "Epoch 3649, Training loss 2.428640, Validation loss 6.567484\n", "Epoch 3650, Training loss 2.428641, Validation loss 6.567484\n", "Epoch 3651, Training loss 2.428639, Validation loss 6.567490\n", "Epoch 3652, Training loss 2.428640, Validation loss 6.567490\n", "Epoch 3653, Training loss 2.428639, Validation loss 6.567490\n", "Epoch 3654, Training loss 2.428641, Validation loss 6.567490\n", "Epoch 3655, Training loss 2.428640, Validation loss 6.567500\n", "Epoch 3656, Training loss 2.428640, Validation loss 6.567514\n", "Epoch 3657, Training loss 2.428639, Validation loss 6.567514\n", "Epoch 3658, Training loss 2.428639, Validation loss 6.567514\n", "Epoch 3659, Training loss 2.428640, Validation loss 6.567524\n", "Epoch 3660, Training loss 2.428640, Validation loss 6.567520\n", "Epoch 3661, Training loss 2.428639, Validation loss 6.567520\n", "Epoch 3662, Training loss 2.428639, Validation loss 6.567530\n", "Epoch 3663, Training loss 2.428639, Validation loss 6.567530\n", "Epoch 3664, Training loss 2.428638, Validation loss 6.567530\n", "Epoch 3665, Training loss 2.428639, Validation loss 6.567530\n", "Epoch 3666, Training loss 2.428640, Validation loss 6.567545\n", "Epoch 3667, Training loss 2.428639, Validation loss 6.567555\n", "Epoch 3668, Training loss 2.428640, Validation loss 6.567555\n", "Epoch 3669, Training loss 2.428639, Validation loss 6.567555\n", "Epoch 3670, Training loss 2.428638, Validation loss 6.567571\n", "Epoch 3671, Training loss 2.428639, Validation loss 6.567580\n", "Epoch 3672, Training loss 2.428639, Validation loss 6.567575\n", "Epoch 3673, Training loss 2.428639, Validation loss 6.567595\n", "Epoch 3674, Training loss 2.428639, Validation loss 6.567586\n", "Epoch 3675, Training loss 2.428638, Validation loss 6.567595\n", "Epoch 3676, Training loss 2.428639, Validation loss 6.567596\n", "Epoch 3677, Training loss 2.428639, Validation loss 6.567601\n", "Epoch 3678, Training loss 2.428639, Validation loss 6.567611\n", "Epoch 3679, Training loss 2.428640, Validation loss 6.567601\n", "Epoch 3680, Training loss 2.428639, Validation loss 6.567621\n", "Epoch 3681, Training loss 2.428639, Validation loss 6.567611\n", "Epoch 3682, Training loss 2.428639, Validation loss 6.567621\n", "Epoch 3683, Training loss 2.428638, Validation loss 6.567626\n", "Epoch 3684, Training loss 2.428638, Validation loss 6.567626\n", "Epoch 3685, Training loss 2.428638, Validation loss 6.567616\n", "Epoch 3686, Training loss 2.428639, Validation loss 6.567626\n", "Epoch 3687, Training loss 2.428638, Validation loss 6.567632\n", "Epoch 3688, Training loss 2.428639, Validation loss 6.567641\n", "Epoch 3689, Training loss 2.428638, Validation loss 6.567642\n", "Epoch 3690, Training loss 2.428637, Validation loss 6.567636\n", "Epoch 3691, Training loss 2.428638, Validation loss 6.567642\n", "Epoch 3692, Training loss 2.428638, Validation loss 6.567647\n", "Epoch 3693, Training loss 2.428639, Validation loss 6.567657\n", "Epoch 3694, Training loss 2.428639, Validation loss 6.567657\n", "Epoch 3695, Training loss 2.428638, Validation loss 6.567667\n", "Epoch 3696, Training loss 2.428638, Validation loss 6.567667\n", "Epoch 3697, Training loss 2.428638, Validation loss 6.567662\n", "Epoch 3698, Training loss 2.428637, Validation loss 6.567672\n", "Epoch 3699, Training loss 2.428638, Validation loss 6.567673\n", "Epoch 3700, Training loss 2.428639, Validation loss 6.567677\n", "Epoch 3701, Training loss 2.428637, Validation loss 6.567683\n", "Epoch 3702, Training loss 2.428638, Validation loss 6.567677\n", "Epoch 3703, Training loss 2.428639, Validation loss 6.567678\n", "Epoch 3704, Training loss 2.428638, Validation loss 6.567688\n", "Epoch 3705, Training loss 2.428638, Validation loss 6.567693\n", "Epoch 3706, Training loss 2.428638, Validation loss 6.567703\n", "Epoch 3707, Training loss 2.428638, Validation loss 6.567693\n", "Epoch 3708, Training loss 2.428638, Validation loss 6.567713\n", "Epoch 3709, Training loss 2.428638, Validation loss 6.567703\n", "Epoch 3710, Training loss 2.428638, Validation loss 6.567708\n", "Epoch 3711, Training loss 2.428638, Validation loss 6.567719\n", "Epoch 3712, Training loss 2.428638, Validation loss 6.567719\n", "Epoch 3713, Training loss 2.428638, Validation loss 6.567709\n", "Epoch 3714, Training loss 2.428638, Validation loss 6.567719\n", "Epoch 3715, Training loss 2.428637, Validation loss 6.567724\n", "Epoch 3716, Training loss 2.428638, Validation loss 6.567734\n", "Epoch 3717, Training loss 2.428638, Validation loss 6.567734\n", "Epoch 3718, Training loss 2.428638, Validation loss 6.567724\n", "Epoch 3719, Training loss 2.428638, Validation loss 6.567744\n", "Epoch 3720, Training loss 2.428638, Validation loss 6.567739\n", "Epoch 3721, Training loss 2.428637, Validation loss 6.567749\n", "Epoch 3722, Training loss 2.428637, Validation loss 6.567750\n", "Epoch 3723, Training loss 2.428638, Validation loss 6.567755\n", "Epoch 3724, Training loss 2.428637, Validation loss 6.567769\n", "Epoch 3725, Training loss 2.428635, Validation loss 6.567774\n", "Epoch 3726, Training loss 2.428638, Validation loss 6.567784\n", "Epoch 3727, Training loss 2.428638, Validation loss 6.567780\n", "Epoch 3728, Training loss 2.428638, Validation loss 6.567784\n", "Epoch 3729, Training loss 2.428637, Validation loss 6.567795\n", "Epoch 3730, Training loss 2.428638, Validation loss 6.567799\n", "Epoch 3731, Training loss 2.428638, Validation loss 6.567795\n", "Epoch 3732, Training loss 2.428638, Validation loss 6.567805\n", "Epoch 3733, Training loss 2.428637, Validation loss 6.567809\n", "Epoch 3734, Training loss 2.428637, Validation loss 6.567805\n", "Epoch 3735, Training loss 2.428636, Validation loss 6.567805\n", "Epoch 3736, Training loss 2.428637, Validation loss 6.567815\n", "Epoch 3737, Training loss 2.428637, Validation loss 6.567815\n", "Epoch 3738, Training loss 2.428638, Validation loss 6.567815\n", "Epoch 3739, Training loss 2.428637, Validation loss 6.567825\n", "Epoch 3740, Training loss 2.428637, Validation loss 6.567810\n", "Epoch 3741, Training loss 2.428638, Validation loss 6.567825\n", "Epoch 3742, Training loss 2.428637, Validation loss 6.567836\n", "Epoch 3743, Training loss 2.428637, Validation loss 6.567821\n", "Epoch 3744, Training loss 2.428637, Validation loss 6.567836\n", "Epoch 3745, Training loss 2.428636, Validation loss 6.567836\n", "Epoch 3746, Training loss 2.428637, Validation loss 6.567831\n", "Epoch 3747, Training loss 2.428637, Validation loss 6.567846\n", "Epoch 3748, Training loss 2.428636, Validation loss 6.567846\n", "Epoch 3749, Training loss 2.428637, Validation loss 6.567841\n", "Epoch 3750, Training loss 2.428636, Validation loss 6.567856\n", "Epoch 3751, Training loss 2.428636, Validation loss 6.567856\n", "Epoch 3752, Training loss 2.428637, Validation loss 6.567871\n", "Epoch 3753, Training loss 2.428637, Validation loss 6.567856\n", "Epoch 3754, Training loss 2.428637, Validation loss 6.567866\n", "Epoch 3755, Training loss 2.428636, Validation loss 6.567866\n", "Epoch 3756, Training loss 2.428636, Validation loss 6.567866\n", "Epoch 3757, Training loss 2.428637, Validation loss 6.567877\n", "Epoch 3758, Training loss 2.428636, Validation loss 6.567877\n", "Epoch 3759, Training loss 2.428636, Validation loss 6.567872\n", "Epoch 3760, Training loss 2.428636, Validation loss 6.567887\n", "Epoch 3761, Training loss 2.428636, Validation loss 6.567887\n", "Epoch 3762, Training loss 2.428638, Validation loss 6.567882\n", "Epoch 3763, Training loss 2.428638, Validation loss 6.567887\n", "Epoch 3764, Training loss 2.428636, Validation loss 6.567897\n", "Epoch 3765, Training loss 2.428636, Validation loss 6.567892\n", "Epoch 3766, Training loss 2.428635, Validation loss 6.567897\n", "Epoch 3767, Training loss 2.428637, Validation loss 6.567907\n", "Epoch 3768, Training loss 2.428637, Validation loss 6.567903\n", "Epoch 3769, Training loss 2.428637, Validation loss 6.567907\n", "Epoch 3770, Training loss 2.428636, Validation loss 6.567918\n", "Epoch 3771, Training loss 2.428636, Validation loss 6.567903\n", "Epoch 3772, Training loss 2.428635, Validation loss 6.567918\n", "Epoch 3773, Training loss 2.428636, Validation loss 6.567918\n", "Epoch 3774, Training loss 2.428637, Validation loss 6.567913\n", "Epoch 3775, Training loss 2.428636, Validation loss 6.567928\n", "Epoch 3776, Training loss 2.428636, Validation loss 6.567928\n", "Epoch 3777, Training loss 2.428636, Validation loss 6.567923\n", "Epoch 3778, Training loss 2.428636, Validation loss 6.567933\n", "Epoch 3779, Training loss 2.428636, Validation loss 6.567938\n", "Epoch 3780, Training loss 2.428636, Validation loss 6.567948\n", "Epoch 3781, Training loss 2.428635, Validation loss 6.567933\n", "Epoch 3782, Training loss 2.428636, Validation loss 6.567948\n", "Epoch 3783, Training loss 2.428638, Validation loss 6.567944\n", "Epoch 3784, Training loss 2.428636, Validation loss 6.567953\n", "Epoch 3785, Training loss 2.428636, Validation loss 6.567948\n", "Epoch 3786, Training loss 2.428635, Validation loss 6.567973\n", "Epoch 3787, Training loss 2.428636, Validation loss 6.567968\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3788, Training loss 2.428636, Validation loss 6.567973\n", "Epoch 3789, Training loss 2.428636, Validation loss 6.567978\n", "Epoch 3790, Training loss 2.428635, Validation loss 6.567984\n", "Epoch 3791, Training loss 2.428636, Validation loss 6.567978\n", "Epoch 3792, Training loss 2.428635, Validation loss 6.567984\n", "Epoch 3793, Training loss 2.428636, Validation loss 6.567988\n", "Epoch 3794, Training loss 2.428634, Validation loss 6.567994\n", "Epoch 3795, Training loss 2.428636, Validation loss 6.567999\n", "Epoch 3796, Training loss 2.428637, Validation loss 6.567994\n", "Epoch 3797, Training loss 2.428636, Validation loss 6.567999\n", "Epoch 3798, Training loss 2.428636, Validation loss 6.568004\n", "Epoch 3799, Training loss 2.428636, Validation loss 6.568009\n", "Epoch 3800, Training loss 2.428637, Validation loss 6.568004\n", "Epoch 3801, Training loss 2.428636, Validation loss 6.568009\n", "Epoch 3802, Training loss 2.428636, Validation loss 6.568014\n", "Epoch 3803, Training loss 2.428635, Validation loss 6.568019\n", "Epoch 3804, Training loss 2.428636, Validation loss 6.568025\n", "Epoch 3805, Training loss 2.428636, Validation loss 6.568024\n", "Epoch 3806, Training loss 2.428636, Validation loss 6.568029\n", "Epoch 3807, Training loss 2.428636, Validation loss 6.568034\n", "Epoch 3808, Training loss 2.428634, Validation loss 6.568039\n", "Epoch 3809, Training loss 2.428635, Validation loss 6.568034\n", "Epoch 3810, Training loss 2.428635, Validation loss 6.568039\n", "Epoch 3811, Training loss 2.428635, Validation loss 6.568045\n", "Epoch 3812, Training loss 2.428634, Validation loss 6.568049\n", "Epoch 3813, Training loss 2.428634, Validation loss 6.568045\n", "Epoch 3814, Training loss 2.428635, Validation loss 6.568049\n", "Epoch 3815, Training loss 2.428636, Validation loss 6.568055\n", "Epoch 3816, Training loss 2.428635, Validation loss 6.568045\n", "Epoch 3817, Training loss 2.428636, Validation loss 6.568050\n", "Epoch 3818, Training loss 2.428636, Validation loss 6.568055\n", "Epoch 3819, Training loss 2.428636, Validation loss 6.568050\n", "Epoch 3820, Training loss 2.428636, Validation loss 6.568055\n", "Epoch 3821, Training loss 2.428636, Validation loss 6.568060\n", "Epoch 3822, Training loss 2.428635, Validation loss 6.568066\n", "Epoch 3823, Training loss 2.428634, Validation loss 6.568060\n", "Epoch 3824, Training loss 2.428635, Validation loss 6.568066\n", "Epoch 3825, Training loss 2.428636, Validation loss 6.568070\n", "Epoch 3826, Training loss 2.428636, Validation loss 6.568076\n", "Epoch 3827, Training loss 2.428635, Validation loss 6.568081\n", "Epoch 3828, Training loss 2.428636, Validation loss 6.568076\n", "Epoch 3829, Training loss 2.428635, Validation loss 6.568081\n", "Epoch 3830, Training loss 2.428635, Validation loss 6.568086\n", "Epoch 3831, Training loss 2.428634, Validation loss 6.568091\n", "Epoch 3832, Training loss 2.428635, Validation loss 6.568086\n", "Epoch 3833, Training loss 2.428634, Validation loss 6.568091\n", "Epoch 3834, Training loss 2.428636, Validation loss 6.568096\n", "Epoch 3835, Training loss 2.428635, Validation loss 6.568101\n", "Epoch 3836, Training loss 2.428634, Validation loss 6.568107\n", "Epoch 3837, Training loss 2.428634, Validation loss 6.568101\n", "Epoch 3838, Training loss 2.428634, Validation loss 6.568107\n", "Epoch 3839, Training loss 2.428636, Validation loss 6.568111\n", "Epoch 3840, Training loss 2.428634, Validation loss 6.568117\n", "Epoch 3841, Training loss 2.428635, Validation loss 6.568111\n", "Epoch 3842, Training loss 2.428636, Validation loss 6.568121\n", "Epoch 3843, Training loss 2.428636, Validation loss 6.568127\n", "Epoch 3844, Training loss 2.428634, Validation loss 6.568131\n", "Epoch 3845, Training loss 2.428636, Validation loss 6.568137\n", "Epoch 3846, Training loss 2.428635, Validation loss 6.568131\n", "Epoch 3847, Training loss 2.428636, Validation loss 6.568137\n", "Epoch 3848, Training loss 2.428634, Validation loss 6.568142\n", "Epoch 3849, Training loss 2.428634, Validation loss 6.568147\n", "Epoch 3850, Training loss 2.428635, Validation loss 6.568142\n", "Epoch 3851, Training loss 2.428635, Validation loss 6.568147\n", "Epoch 3852, Training loss 2.428635, Validation loss 6.568132\n", "Epoch 3853, Training loss 2.428635, Validation loss 6.568152\n", "Epoch 3854, Training loss 2.428635, Validation loss 6.568157\n", "Epoch 3855, Training loss 2.428635, Validation loss 6.568172\n", "Epoch 3856, Training loss 2.428635, Validation loss 6.568168\n", "Epoch 3857, Training loss 2.428634, Validation loss 6.568168\n", "Epoch 3858, Training loss 2.428635, Validation loss 6.568163\n", "Epoch 3859, Training loss 2.428636, Validation loss 6.568172\n", "Epoch 3860, Training loss 2.428634, Validation loss 6.568168\n", "Epoch 3861, Training loss 2.428634, Validation loss 6.568168\n", "Epoch 3862, Training loss 2.428634, Validation loss 6.568182\n", "Epoch 3863, Training loss 2.428634, Validation loss 6.568178\n", "Epoch 3864, Training loss 2.428634, Validation loss 6.568178\n", "Epoch 3865, Training loss 2.428634, Validation loss 6.568178\n", "Epoch 3866, Training loss 2.428635, Validation loss 6.568173\n", "Epoch 3867, Training loss 2.428635, Validation loss 6.568188\n", "Epoch 3868, Training loss 2.428635, Validation loss 6.568188\n", "Epoch 3869, Training loss 2.428634, Validation loss 6.568188\n", "Epoch 3870, Training loss 2.428634, Validation loss 6.568203\n", "Epoch 3871, Training loss 2.428634, Validation loss 6.568198\n", "Epoch 3872, Training loss 2.428636, Validation loss 6.568198\n", "Epoch 3873, Training loss 2.428634, Validation loss 6.568213\n", "Epoch 3874, Training loss 2.428635, Validation loss 6.568213\n", "Epoch 3875, Training loss 2.428635, Validation loss 6.568209\n", "Epoch 3876, Training loss 2.428634, Validation loss 6.568209\n", "Epoch 3877, Training loss 2.428634, Validation loss 6.568204\n", "Epoch 3878, Training loss 2.428635, Validation loss 6.568223\n", "Epoch 3879, Training loss 2.428634, Validation loss 6.568219\n", "Epoch 3880, Training loss 2.428634, Validation loss 6.568219\n", "Epoch 3881, Training loss 2.428632, Validation loss 6.568223\n", "Epoch 3882, Training loss 2.428634, Validation loss 6.568219\n", "Epoch 3883, Training loss 2.428634, Validation loss 6.568219\n", "Epoch 3884, Training loss 2.428634, Validation loss 6.568219\n", "Epoch 3885, Training loss 2.428634, Validation loss 6.568233\n", "Epoch 3886, Training loss 2.428634, Validation loss 6.568229\n", "Epoch 3887, Training loss 2.428635, Validation loss 6.568229\n", "Epoch 3888, Training loss 2.428634, Validation loss 6.568229\n", "Epoch 3889, Training loss 2.428634, Validation loss 6.568224\n", "Epoch 3890, Training loss 2.428633, Validation loss 6.568239\n", "Epoch 3891, Training loss 2.428634, Validation loss 6.568239\n", "Epoch 3892, Training loss 2.428634, Validation loss 6.568239\n", "Epoch 3893, Training loss 2.428634, Validation loss 6.568254\n", "Epoch 3894, Training loss 2.428634, Validation loss 6.568250\n", "Epoch 3895, Training loss 2.428634, Validation loss 6.568250\n", "Epoch 3896, Training loss 2.428634, Validation loss 6.568245\n", "Epoch 3897, Training loss 2.428633, Validation loss 6.568245\n", "Epoch 3898, Training loss 2.428634, Validation loss 6.568260\n", "Epoch 3899, Training loss 2.428633, Validation loss 6.568260\n", "Epoch 3900, Training loss 2.428634, Validation loss 6.568255\n", "Epoch 3901, Training loss 2.428633, Validation loss 6.568260\n", "Epoch 3902, Training loss 2.428634, Validation loss 6.568270\n", "Epoch 3903, Training loss 2.428634, Validation loss 6.568270\n", "Epoch 3904, Training loss 2.428634, Validation loss 6.568274\n", "Epoch 3905, Training loss 2.428634, Validation loss 6.568270\n", "Epoch 3906, Training loss 2.428635, Validation loss 6.568270\n", "Epoch 3907, Training loss 2.428634, Validation loss 6.568270\n", "Epoch 3908, Training loss 2.428633, Validation loss 6.568265\n", "Epoch 3909, Training loss 2.428634, Validation loss 6.568280\n", "Epoch 3910, Training loss 2.428633, Validation loss 6.568280\n", "Epoch 3911, Training loss 2.428634, Validation loss 6.568280\n", "Epoch 3912, Training loss 2.428634, Validation loss 6.568295\n", "Epoch 3913, Training loss 2.428634, Validation loss 6.568291\n", "Epoch 3914, Training loss 2.428635, Validation loss 6.568291\n", "Epoch 3915, Training loss 2.428634, Validation loss 6.568305\n", "Epoch 3916, Training loss 2.428634, Validation loss 6.568305\n", "Epoch 3917, Training loss 2.428634, Validation loss 6.568300\n", "Epoch 3918, Training loss 2.428633, Validation loss 6.568300\n", "Epoch 3919, Training loss 2.428633, Validation loss 6.568295\n", "Epoch 3920, Training loss 2.428633, Validation loss 6.568311\n", "Epoch 3921, Training loss 2.428633, Validation loss 6.568311\n", "Epoch 3922, Training loss 2.428634, Validation loss 6.568311\n", "Epoch 3923, Training loss 2.428634, Validation loss 6.568316\n", "Epoch 3924, Training loss 2.428634, Validation loss 6.568311\n", "Epoch 3925, Training loss 2.428633, Validation loss 6.568321\n", "Epoch 3926, Training loss 2.428635, Validation loss 6.568321\n", "Epoch 3927, Training loss 2.428633, Validation loss 6.568316\n", "Epoch 3928, Training loss 2.428633, Validation loss 6.568321\n", "Epoch 3929, Training loss 2.428633, Validation loss 6.568321\n", "Epoch 3930, Training loss 2.428633, Validation loss 6.568321\n", "Epoch 3931, Training loss 2.428634, Validation loss 6.568316\n", "Epoch 3932, Training loss 2.428634, Validation loss 6.568331\n", "Epoch 3933, Training loss 2.428634, Validation loss 6.568331\n", "Epoch 3934, Training loss 2.428634, Validation loss 6.568341\n", "Epoch 3935, Training loss 2.428633, Validation loss 6.568346\n", "Epoch 3936, Training loss 2.428632, Validation loss 6.568356\n", "Epoch 3937, Training loss 2.428633, Validation loss 6.568361\n", "Epoch 3938, Training loss 2.428634, Validation loss 6.568366\n", "Epoch 3939, Training loss 2.428634, Validation loss 6.568371\n", "Epoch 3940, Training loss 2.428633, Validation loss 6.568361\n", "Epoch 3941, Training loss 2.428634, Validation loss 6.568371\n", "Epoch 3942, Training loss 2.428633, Validation loss 6.568381\n", "Epoch 3943, Training loss 2.428635, Validation loss 6.568371\n", "Epoch 3944, Training loss 2.428632, Validation loss 6.568381\n", "Epoch 3945, Training loss 2.428633, Validation loss 6.568371\n", "Epoch 3946, Training loss 2.428633, Validation loss 6.568377\n", "Epoch 3947, Training loss 2.428634, Validation loss 6.568381\n", "Epoch 3948, Training loss 2.428633, Validation loss 6.568387\n", "Epoch 3949, Training loss 2.428633, Validation loss 6.568381\n", "Epoch 3950, Training loss 2.428634, Validation loss 6.568387\n", "Epoch 3951, Training loss 2.428633, Validation loss 6.568377\n", "Epoch 3952, Training loss 2.428633, Validation loss 6.568387\n", "Epoch 3953, Training loss 2.428634, Validation loss 6.568387\n", "Epoch 3954, Training loss 2.428633, Validation loss 6.568397\n", "Epoch 3955, Training loss 2.428633, Validation loss 6.568387\n", "Epoch 3956, Training loss 2.428633, Validation loss 6.568397\n", "Epoch 3957, Training loss 2.428633, Validation loss 6.568402\n", "Epoch 3958, Training loss 2.428634, Validation loss 6.568397\n", "Epoch 3959, Training loss 2.428635, Validation loss 6.568402\n", "Epoch 3960, Training loss 2.428632, Validation loss 6.568392\n", "Epoch 3961, Training loss 2.428633, Validation loss 6.568402\n", "Epoch 3962, Training loss 2.428633, Validation loss 6.568402\n", "Epoch 3963, Training loss 2.428635, Validation loss 6.568412\n", "Epoch 3964, Training loss 2.428634, Validation loss 6.568422\n", "Epoch 3965, Training loss 2.428633, Validation loss 6.568412\n", "Epoch 3966, Training loss 2.428634, Validation loss 6.568418\n", "Epoch 3967, Training loss 2.428633, Validation loss 6.568412\n", "Epoch 3968, Training loss 2.428633, Validation loss 6.568418\n", "Epoch 3969, Training loss 2.428634, Validation loss 6.568418\n", "Epoch 3970, Training loss 2.428633, Validation loss 6.568428\n", "Epoch 3971, Training loss 2.428633, Validation loss 6.568418\n", "Epoch 3972, Training loss 2.428634, Validation loss 6.568428\n", "Epoch 3973, Training loss 2.428633, Validation loss 6.568418\n", "Epoch 3974, Training loss 2.428634, Validation loss 6.568428\n", "Epoch 3975, Training loss 2.428633, Validation loss 6.568433\n", "Epoch 3976, Training loss 2.428633, Validation loss 6.568428\n", "Epoch 3977, Training loss 2.428633, Validation loss 6.568433\n", "Epoch 3978, Training loss 2.428634, Validation loss 6.568433\n", "Epoch 3979, Training loss 2.428632, Validation loss 6.568443\n", "Epoch 3980, Training loss 2.428634, Validation loss 6.568433\n", "Epoch 3981, Training loss 2.428632, Validation loss 6.568443\n", "Epoch 3982, Training loss 2.428634, Validation loss 6.568433\n", "Epoch 3983, Training loss 2.428631, Validation loss 6.568443\n", "Epoch 3984, Training loss 2.428633, Validation loss 6.568443\n", "Epoch 3985, Training loss 2.428634, Validation loss 6.568453\n", "Epoch 3986, Training loss 2.428633, Validation loss 6.568459\n", "Epoch 3987, Training loss 2.428633, Validation loss 6.568453\n", "Epoch 3988, Training loss 2.428632, Validation loss 6.568459\n", "Epoch 3989, Training loss 2.428633, Validation loss 6.568449\n", "Epoch 3990, Training loss 2.428634, Validation loss 6.568459\n", "Epoch 3991, Training loss 2.428632, Validation loss 6.568449\n", "Epoch 3992, Training loss 2.428633, Validation loss 6.568459\n", "Epoch 3993, Training loss 2.428633, Validation loss 6.568474\n", "Epoch 3994, Training loss 2.428632, Validation loss 6.568469\n", "Epoch 3995, Training loss 2.428632, Validation loss 6.568459\n", "Epoch 3996, Training loss 2.428633, Validation loss 6.568469\n", "Epoch 3997, Training loss 2.428634, Validation loss 6.568474\n", "Epoch 3998, Training loss 2.428634, Validation loss 6.568464\n", "Epoch 3999, Training loss 2.428632, Validation loss 6.568474\n", "Epoch 4000, Training loss 2.428634, Validation loss 6.568474\n", "Epoch 4001, Training loss 2.428633, Validation loss 6.568484\n", "Epoch 4002, Training loss 2.428632, Validation loss 6.568474\n", "Epoch 4003, Training loss 2.428632, Validation loss 6.568484\n", "Epoch 4004, Training loss 2.428632, Validation loss 6.568489\n", "Epoch 4005, Training loss 2.428633, Validation loss 6.568484\n", "Epoch 4006, Training loss 2.428632, Validation loss 6.568474\n", "Epoch 4007, Training loss 2.428632, Validation loss 6.568480\n", "Epoch 4008, Training loss 2.428632, Validation loss 6.568500\n", "Epoch 4009, Training loss 2.428634, Validation loss 6.568490\n", "Epoch 4010, Training loss 2.428633, Validation loss 6.568500\n", "Epoch 4011, Training loss 2.428633, Validation loss 6.568490\n", "Epoch 4012, Training loss 2.428632, Validation loss 6.568500\n", "Epoch 4013, Training loss 2.428634, Validation loss 6.568490\n", "Epoch 4014, Training loss 2.428633, Validation loss 6.568500\n", "Epoch 4015, Training loss 2.428633, Validation loss 6.568500\n", "Epoch 4016, Training loss 2.428632, Validation loss 6.568505\n", "Epoch 4017, Training loss 2.428632, Validation loss 6.568515\n", "Epoch 4018, Training loss 2.428632, Validation loss 6.568505\n", "Epoch 4019, Training loss 2.428633, Validation loss 6.568515\n", "Epoch 4020, Training loss 2.428632, Validation loss 6.568505\n", "Epoch 4021, Training loss 2.428633, Validation loss 6.568515\n", "Epoch 4022, Training loss 2.428633, Validation loss 6.568515\n", "Epoch 4023, Training loss 2.428632, Validation loss 6.568525\n", "Epoch 4024, Training loss 2.428632, Validation loss 6.568530\n", "Epoch 4025, Training loss 2.428633, Validation loss 6.568525\n", "Epoch 4026, Training loss 2.428633, Validation loss 6.568530\n", "Epoch 4027, Training loss 2.428632, Validation loss 6.568521\n", "Epoch 4028, Training loss 2.428632, Validation loss 6.568530\n", "Epoch 4029, Training loss 2.428633, Validation loss 6.568521\n", "Epoch 4030, Training loss 2.428632, Validation loss 6.568530\n", "Epoch 4031, Training loss 2.428632, Validation loss 6.568541\n", "Epoch 4032, Training loss 2.428633, Validation loss 6.568541\n", "Epoch 4033, Training loss 2.428632, Validation loss 6.568545\n", "Epoch 4034, Training loss 2.428632, Validation loss 6.568545\n", "Epoch 4035, Training loss 2.428633, Validation loss 6.568555\n", "Epoch 4036, Training loss 2.428633, Validation loss 6.568555\n", "Epoch 4037, Training loss 2.428633, Validation loss 6.568555\n", "Epoch 4038, Training loss 2.428634, Validation loss 6.568570\n", "Epoch 4039, Training loss 2.428632, Validation loss 6.568570\n", "Epoch 4040, Training loss 2.428633, Validation loss 6.568570\n", "Epoch 4041, Training loss 2.428632, Validation loss 6.568570\n", "Epoch 4042, Training loss 2.428633, Validation loss 6.568565\n", "Epoch 4043, Training loss 2.428632, Validation loss 6.568565\n", "Epoch 4044, Training loss 2.428632, Validation loss 6.568570\n", "Epoch 4045, Training loss 2.428633, Validation loss 6.568581\n", "Epoch 4046, Training loss 2.428631, Validation loss 6.568581\n", "Epoch 4047, Training loss 2.428633, Validation loss 6.568565\n", "Epoch 4048, Training loss 2.428632, Validation loss 6.568576\n", "Epoch 4049, Training loss 2.428632, Validation loss 6.568576\n", "Epoch 4050, Training loss 2.428633, Validation loss 6.568581\n", "Epoch 4051, Training loss 2.428632, Validation loss 6.568581\n", "Epoch 4052, Training loss 2.428633, Validation loss 6.568591\n", "Epoch 4053, Training loss 2.428633, Validation loss 6.568591\n", "Epoch 4054, Training loss 2.428633, Validation loss 6.568586\n", "Epoch 4055, Training loss 2.428634, Validation loss 6.568586\n", "Epoch 4056, Training loss 2.428631, Validation loss 6.568586\n", "Epoch 4057, Training loss 2.428633, Validation loss 6.568591\n", "Epoch 4058, Training loss 2.428632, Validation loss 6.568601\n", "Epoch 4059, Training loss 2.428633, Validation loss 6.568586\n", "Epoch 4060, Training loss 2.428632, Validation loss 6.568586\n", "Epoch 4061, Training loss 2.428632, Validation loss 6.568596\n", "Epoch 4062, Training loss 2.428633, Validation loss 6.568596\n", "Epoch 4063, Training loss 2.428631, Validation loss 6.568601\n", "Epoch 4064, Training loss 2.428632, Validation loss 6.568601\n", "Epoch 4065, Training loss 2.428632, Validation loss 6.568611\n", "Epoch 4066, Training loss 2.428632, Validation loss 6.568596\n", "Epoch 4067, Training loss 2.428633, Validation loss 6.568596\n", "Epoch 4068, Training loss 2.428632, Validation loss 6.568606\n", "Epoch 4069, Training loss 2.428633, Validation loss 6.568611\n", "Epoch 4070, Training loss 2.428632, Validation loss 6.568611\n", "Epoch 4071, Training loss 2.428633, Validation loss 6.568611\n", "Epoch 4072, Training loss 2.428632, Validation loss 6.568622\n", "Epoch 4073, Training loss 2.428631, Validation loss 6.568606\n", "Epoch 4074, Training loss 2.428633, Validation loss 6.568617\n", "Epoch 4075, Training loss 2.428633, Validation loss 6.568617\n", "Epoch 4076, Training loss 2.428632, Validation loss 6.568622\n", "Epoch 4077, Training loss 2.428633, Validation loss 6.568622\n", "Epoch 4078, Training loss 2.428632, Validation loss 6.568617\n", "Epoch 4079, Training loss 2.428632, Validation loss 6.568617\n", "Epoch 4080, Training loss 2.428632, Validation loss 6.568617\n", "Epoch 4081, Training loss 2.428632, Validation loss 6.568627\n", "Epoch 4082, Training loss 2.428633, Validation loss 6.568632\n", "Epoch 4083, Training loss 2.428632, Validation loss 6.568632\n", "Epoch 4084, Training loss 2.428632, Validation loss 6.568632\n", "Epoch 4085, Training loss 2.428632, Validation loss 6.568642\n", "Epoch 4086, Training loss 2.428632, Validation loss 6.568627\n", "Epoch 4087, Training loss 2.428632, Validation loss 6.568627\n", "Epoch 4088, Training loss 2.428632, Validation loss 6.568642\n", "Epoch 4089, Training loss 2.428632, Validation loss 6.568642\n", "Epoch 4090, Training loss 2.428632, Validation loss 6.568642\n", "Epoch 4091, Training loss 2.428632, Validation loss 6.568637\n", "Epoch 4092, Training loss 2.428632, Validation loss 6.568637\n", "Epoch 4093, Training loss 2.428632, Validation loss 6.568637\n", "Epoch 4094, Training loss 2.428632, Validation loss 6.568647\n", "Epoch 4095, Training loss 2.428632, Validation loss 6.568652\n", "Epoch 4096, Training loss 2.428632, Validation loss 6.568652\n", "Epoch 4097, Training loss 2.428632, Validation loss 6.568652\n", "Epoch 4098, Training loss 2.428633, Validation loss 6.568647\n", "Epoch 4099, Training loss 2.428632, Validation loss 6.568647\n", "Epoch 4100, Training loss 2.428632, Validation loss 6.568647\n", "Epoch 4101, Training loss 2.428633, Validation loss 6.568662\n", "Epoch 4102, Training loss 2.428631, Validation loss 6.568662\n", "Epoch 4103, Training loss 2.428632, Validation loss 6.568647\n", "Epoch 4104, Training loss 2.428632, Validation loss 6.568657\n", "Epoch 4105, Training loss 2.428632, Validation loss 6.568657\n", "Epoch 4106, Training loss 2.428632, Validation loss 6.568657\n", "Epoch 4107, Training loss 2.428633, Validation loss 6.568672\n", "Epoch 4108, Training loss 2.428631, Validation loss 6.568672\n", "Epoch 4109, Training loss 2.428632, Validation loss 6.568672\n", "Epoch 4110, Training loss 2.428632, Validation loss 6.568667\n", "Epoch 4111, Training loss 2.428632, Validation loss 6.568667\n", "Epoch 4112, Training loss 2.428632, Validation loss 6.568667\n", "Epoch 4113, Training loss 2.428632, Validation loss 6.568667\n", "Epoch 4114, Training loss 2.428633, Validation loss 6.568683\n", "Epoch 4115, Training loss 2.428633, Validation loss 6.568683\n", "Epoch 4116, Training loss 2.428631, Validation loss 6.568683\n", "Epoch 4117, Training loss 2.428632, Validation loss 6.568678\n", "Epoch 4118, Training loss 2.428631, Validation loss 6.568678\n", "Epoch 4119, Training loss 2.428632, Validation loss 6.568678\n", "Epoch 4120, Training loss 2.428632, Validation loss 6.568683\n", "Epoch 4121, Training loss 2.428632, Validation loss 6.568693\n", "Epoch 4122, Training loss 2.428632, Validation loss 6.568678\n", "Epoch 4123, Training loss 2.428632, Validation loss 6.568678\n", "Epoch 4124, Training loss 2.428632, Validation loss 6.568688\n", "Epoch 4125, Training loss 2.428632, Validation loss 6.568688\n", "Epoch 4126, Training loss 2.428632, Validation loss 6.568693\n", "Epoch 4127, Training loss 2.428633, Validation loss 6.568703\n", "Epoch 4128, Training loss 2.428632, Validation loss 6.568703\n", "Epoch 4129, Training loss 2.428633, Validation loss 6.568703\n", "Epoch 4130, Training loss 2.428632, Validation loss 6.568698\n", "Epoch 4131, Training loss 2.428632, Validation loss 6.568698\n", "Epoch 4132, Training loss 2.428632, Validation loss 6.568698\n", "Epoch 4133, Training loss 2.428632, Validation loss 6.568703\n", "Epoch 4134, Training loss 2.428632, Validation loss 6.568694\n", "Epoch 4135, Training loss 2.428632, Validation loss 6.568698\n", "Epoch 4136, Training loss 2.428632, Validation loss 6.568698\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4137, Training loss 2.428632, Validation loss 6.568708\n", "Epoch 4138, Training loss 2.428632, Validation loss 6.568708\n", "Epoch 4139, Training loss 2.428632, Validation loss 6.568713\n", "Epoch 4140, Training loss 2.428632, Validation loss 6.568713\n", "Epoch 4141, Training loss 2.428632, Validation loss 6.568708\n", "Epoch 4142, Training loss 2.428632, Validation loss 6.568708\n", "Epoch 4143, Training loss 2.428632, Validation loss 6.568719\n", "Epoch 4144, Training loss 2.428634, Validation loss 6.568719\n", "Epoch 4145, Training loss 2.428632, Validation loss 6.568724\n", "Epoch 4146, Training loss 2.428632, Validation loss 6.568724\n", "Epoch 4147, Training loss 2.428633, Validation loss 6.568734\n", "Epoch 4148, Training loss 2.428631, Validation loss 6.568719\n", "Epoch 4149, Training loss 2.428633, Validation loss 6.568719\n", "Epoch 4150, Training loss 2.428632, Validation loss 6.568729\n", "Epoch 4151, Training loss 2.428632, Validation loss 6.568729\n", "Epoch 4152, Training loss 2.428632, Validation loss 6.568734\n", "Epoch 4153, Training loss 2.428632, Validation loss 6.568734\n", "Epoch 4154, Training loss 2.428633, Validation loss 6.568739\n", "Epoch 4155, Training loss 2.428631, Validation loss 6.568748\n", "Epoch 4156, Training loss 2.428632, Validation loss 6.568744\n", "Epoch 4157, Training loss 2.428632, Validation loss 6.568739\n", "Epoch 4158, Training loss 2.428631, Validation loss 6.568748\n", "Epoch 4159, Training loss 2.428632, Validation loss 6.568754\n", "Epoch 4160, Training loss 2.428632, Validation loss 6.568744\n", "Epoch 4161, Training loss 2.428632, Validation loss 6.568749\n", "Epoch 4162, Training loss 2.428632, Validation loss 6.568754\n", "Epoch 4163, Training loss 2.428633, Validation loss 6.568749\n", "Epoch 4164, Training loss 2.428631, Validation loss 6.568759\n", "Epoch 4165, Training loss 2.428632, Validation loss 6.568764\n", "Epoch 4166, Training loss 2.428632, Validation loss 6.568759\n", "Epoch 4167, Training loss 2.428632, Validation loss 6.568764\n", "Epoch 4168, Training loss 2.428632, Validation loss 6.568754\n", "Epoch 4169, Training loss 2.428632, Validation loss 6.568760\n", "Epoch 4170, Training loss 2.428632, Validation loss 6.568754\n", "Epoch 4171, Training loss 2.428631, Validation loss 6.568760\n", "Epoch 4172, Training loss 2.428632, Validation loss 6.568754\n", "Epoch 4173, Training loss 2.428632, Validation loss 6.568755\n", "Epoch 4174, Training loss 2.428632, Validation loss 6.568769\n", "Epoch 4175, Training loss 2.428631, Validation loss 6.568770\n", "Epoch 4176, Training loss 2.428632, Validation loss 6.568765\n", "Epoch 4177, Training loss 2.428632, Validation loss 6.568770\n", "Epoch 4178, Training loss 2.428633, Validation loss 6.568765\n", "Epoch 4179, Training loss 2.428632, Validation loss 6.568770\n", "Epoch 4180, Training loss 2.428632, Validation loss 6.568779\n", "Epoch 4181, Training loss 2.428631, Validation loss 6.568774\n", "Epoch 4182, Training loss 2.428632, Validation loss 6.568779\n", "Epoch 4183, Training loss 2.428632, Validation loss 6.568770\n", "Epoch 4184, Training loss 2.428632, Validation loss 6.568775\n", "Epoch 4185, Training loss 2.428632, Validation loss 6.568780\n", "Epoch 4186, Training loss 2.428631, Validation loss 6.568775\n", "Epoch 4187, Training loss 2.428633, Validation loss 6.568780\n", "Epoch 4188, Training loss 2.428632, Validation loss 6.568770\n", "Epoch 4189, Training loss 2.428631, Validation loss 6.568775\n", "Epoch 4190, Training loss 2.428632, Validation loss 6.568785\n", "Epoch 4191, Training loss 2.428631, Validation loss 6.568780\n", "Epoch 4192, Training loss 2.428632, Validation loss 6.568785\n", "Epoch 4193, Training loss 2.428632, Validation loss 6.568780\n", "Epoch 4194, Training loss 2.428632, Validation loss 6.568785\n", "Epoch 4195, Training loss 2.428631, Validation loss 6.568780\n", "Epoch 4196, Training loss 2.428633, Validation loss 6.568780\n", "Epoch 4197, Training loss 2.428632, Validation loss 6.568795\n", "Epoch 4198, Training loss 2.428632, Validation loss 6.568795\n", "Epoch 4199, Training loss 2.428632, Validation loss 6.568790\n", "Epoch 4200, Training loss 2.428631, Validation loss 6.568795\n", "Epoch 4201, Training loss 2.428632, Validation loss 6.568790\n", "Epoch 4202, Training loss 2.428632, Validation loss 6.568795\n", "Epoch 4203, Training loss 2.428632, Validation loss 6.568805\n", "Epoch 4204, Training loss 2.428631, Validation loss 6.568800\n", "Epoch 4205, Training loss 2.428633, Validation loss 6.568805\n", "Epoch 4206, Training loss 2.428631, Validation loss 6.568795\n", "Epoch 4207, Training loss 2.428632, Validation loss 6.568800\n", "Epoch 4208, Training loss 2.428632, Validation loss 6.568806\n", "Epoch 4209, Training loss 2.428632, Validation loss 6.568800\n", "Epoch 4210, Training loss 2.428632, Validation loss 6.568806\n", "Epoch 4211, Training loss 2.428630, Validation loss 6.568796\n", "Epoch 4212, Training loss 2.428632, Validation loss 6.568801\n", "Epoch 4213, Training loss 2.428631, Validation loss 6.568810\n", "Epoch 4214, Training loss 2.428632, Validation loss 6.568806\n", "Epoch 4215, Training loss 2.428632, Validation loss 6.568810\n", "Epoch 4216, Training loss 2.428632, Validation loss 6.568806\n", "Epoch 4217, Training loss 2.428631, Validation loss 6.568810\n", "Epoch 4218, Training loss 2.428632, Validation loss 6.568806\n", "Epoch 4219, Training loss 2.428631, Validation loss 6.568826\n", "Epoch 4220, Training loss 2.428631, Validation loss 6.568820\n", "Epoch 4221, Training loss 2.428632, Validation loss 6.568821\n", "Epoch 4222, Training loss 2.428632, Validation loss 6.568816\n", "Epoch 4223, Training loss 2.428632, Validation loss 6.568821\n", "Epoch 4224, Training loss 2.428631, Validation loss 6.568816\n", "Epoch 4225, Training loss 2.428632, Validation loss 6.568821\n", "Epoch 4226, Training loss 2.428632, Validation loss 6.568811\n", "Epoch 4227, Training loss 2.428632, Validation loss 6.568826\n", "Epoch 4228, Training loss 2.428632, Validation loss 6.568830\n", "Epoch 4229, Training loss 2.428631, Validation loss 6.568821\n", "Epoch 4230, Training loss 2.428632, Validation loss 6.568826\n", "Epoch 4231, Training loss 2.428632, Validation loss 6.568821\n", "Epoch 4232, Training loss 2.428632, Validation loss 6.568826\n", "Epoch 4233, Training loss 2.428632, Validation loss 6.568831\n", "Epoch 4234, Training loss 2.428632, Validation loss 6.568821\n", "Epoch 4235, Training loss 2.428632, Validation loss 6.568836\n", "Epoch 4236, Training loss 2.428632, Validation loss 6.568836\n", "Epoch 4237, Training loss 2.428632, Validation loss 6.568831\n", "Epoch 4238, Training loss 2.428631, Validation loss 6.568836\n", "Epoch 4239, Training loss 2.428632, Validation loss 6.568831\n", "Epoch 4240, Training loss 2.428632, Validation loss 6.568836\n", "Epoch 4241, Training loss 2.428633, Validation loss 6.568831\n", "Epoch 4242, Training loss 2.428631, Validation loss 6.568851\n", "Epoch 4243, Training loss 2.428632, Validation loss 6.568846\n", "Epoch 4244, Training loss 2.428632, Validation loss 6.568847\n", "Epoch 4245, Training loss 2.428633, Validation loss 6.568841\n", "Epoch 4246, Training loss 2.428631, Validation loss 6.568847\n", "Epoch 4247, Training loss 2.428631, Validation loss 6.568841\n", "Epoch 4248, Training loss 2.428631, Validation loss 6.568847\n", "Epoch 4249, Training loss 2.428631, Validation loss 6.568837\n", "Epoch 4250, Training loss 2.428631, Validation loss 6.568842\n", "Epoch 4251, Training loss 2.428631, Validation loss 6.568837\n", "Epoch 4252, Training loss 2.428632, Validation loss 6.568847\n", "Epoch 4253, Training loss 2.428631, Validation loss 6.568851\n", "Epoch 4254, Training loss 2.428632, Validation loss 6.568847\n", "Epoch 4255, Training loss 2.428632, Validation loss 6.568851\n", "Epoch 4256, Training loss 2.428631, Validation loss 6.568857\n", "Epoch 4257, Training loss 2.428632, Validation loss 6.568847\n", "Epoch 4258, Training loss 2.428632, Validation loss 6.568861\n", "Epoch 4259, Training loss 2.428631, Validation loss 6.568862\n", "Epoch 4260, Training loss 2.428632, Validation loss 6.568857\n", "Epoch 4261, Training loss 2.428632, Validation loss 6.568862\n", "Epoch 4262, Training loss 2.428632, Validation loss 6.568857\n", "Epoch 4263, Training loss 2.428631, Validation loss 6.568862\n", "Epoch 4264, Training loss 2.428632, Validation loss 6.568852\n", "Epoch 4265, Training loss 2.428631, Validation loss 6.568857\n", "Epoch 4266, Training loss 2.428631, Validation loss 6.568871\n", "Epoch 4267, Training loss 2.428631, Validation loss 6.568872\n", "Epoch 4268, Training loss 2.428631, Validation loss 6.568867\n", "Epoch 4269, Training loss 2.428631, Validation loss 6.568872\n", "Epoch 4270, Training loss 2.428632, Validation loss 6.568867\n", "Epoch 4271, Training loss 2.428632, Validation loss 6.568872\n", "Epoch 4272, Training loss 2.428632, Validation loss 6.568862\n", "Epoch 4273, Training loss 2.428632, Validation loss 6.568868\n", "Epoch 4274, Training loss 2.428632, Validation loss 6.568877\n", "Epoch 4275, Training loss 2.428631, Validation loss 6.568872\n", "Epoch 4276, Training loss 2.428632, Validation loss 6.568877\n", "Epoch 4277, Training loss 2.428632, Validation loss 6.568872\n", "Epoch 4278, Training loss 2.428631, Validation loss 6.568877\n", "Epoch 4279, Training loss 2.428632, Validation loss 6.568882\n", "Epoch 4280, Training loss 2.428632, Validation loss 6.568872\n", "Epoch 4281, Training loss 2.428632, Validation loss 6.568887\n", "Epoch 4282, Training loss 2.428631, Validation loss 6.568888\n", "Epoch 4283, Training loss 2.428632, Validation loss 6.568882\n", "Epoch 4284, Training loss 2.428631, Validation loss 6.568888\n", "Epoch 4285, Training loss 2.428629, Validation loss 6.568882\n", "Epoch 4286, Training loss 2.428632, Validation loss 6.568888\n", "Epoch 4287, Training loss 2.428631, Validation loss 6.568878\n", "Epoch 4288, Training loss 2.428632, Validation loss 6.568883\n", "Epoch 4289, Training loss 2.428631, Validation loss 6.568878\n", "Epoch 4290, Training loss 2.428632, Validation loss 6.568898\n", "Epoch 4291, Training loss 2.428630, Validation loss 6.568892\n", "Epoch 4292, Training loss 2.428632, Validation loss 6.568898\n", "Epoch 4293, Training loss 2.428632, Validation loss 6.568892\n", "Epoch 4294, Training loss 2.428631, Validation loss 6.568898\n", "Epoch 4295, Training loss 2.428632, Validation loss 6.568888\n", "Epoch 4296, Training loss 2.428632, Validation loss 6.568893\n", "Epoch 4297, Training loss 2.428631, Validation loss 6.568903\n", "Epoch 4298, Training loss 2.428631, Validation loss 6.568898\n", "Epoch 4299, Training loss 2.428632, Validation loss 6.568903\n", "Epoch 4300, Training loss 2.428632, Validation loss 6.568898\n", "Epoch 4301, Training loss 2.428632, Validation loss 6.568903\n", "Epoch 4302, Training loss 2.428632, Validation loss 6.568903\n", "Epoch 4303, Training loss 2.428630, Validation loss 6.568898\n", "Epoch 4304, Training loss 2.428632, Validation loss 6.568903\n", "Epoch 4305, Training loss 2.428631, Validation loss 6.568913\n", "Epoch 4306, Training loss 2.428631, Validation loss 6.568908\n", "Epoch 4307, Training loss 2.428631, Validation loss 6.568913\n", "Epoch 4308, Training loss 2.428631, Validation loss 6.568908\n", "Epoch 4309, Training loss 2.428631, Validation loss 6.568913\n", "Epoch 4310, Training loss 2.428632, Validation loss 6.568913\n", "Epoch 4311, Training loss 2.428631, Validation loss 6.568918\n", "Epoch 4312, Training loss 2.428631, Validation loss 6.568923\n", "Epoch 4313, Training loss 2.428632, Validation loss 6.568928\n", "Epoch 4314, Training loss 2.428631, Validation loss 6.568923\n", "Epoch 4315, Training loss 2.428632, Validation loss 6.568933\n", "Epoch 4316, Training loss 2.428631, Validation loss 6.568928\n", "Epoch 4317, Training loss 2.428631, Validation loss 6.568943\n", "Epoch 4318, Training loss 2.428632, Validation loss 6.568943\n", "Epoch 4319, Training loss 2.428631, Validation loss 6.568943\n", "Epoch 4320, Training loss 2.428631, Validation loss 6.568943\n", "Epoch 4321, Training loss 2.428632, Validation loss 6.568948\n", "Epoch 4322, Training loss 2.428632, Validation loss 6.568943\n", "Epoch 4323, Training loss 2.428630, Validation loss 6.568958\n", "Epoch 4324, Training loss 2.428632, Validation loss 6.568943\n", "Epoch 4325, Training loss 2.428632, Validation loss 6.568943\n", "Epoch 4326, Training loss 2.428632, Validation loss 6.568938\n", "Epoch 4327, Training loss 2.428630, Validation loss 6.568943\n", "Epoch 4328, Training loss 2.428632, Validation loss 6.568938\n", "Epoch 4329, Training loss 2.428630, Validation loss 6.568953\n", "Epoch 4330, Training loss 2.428630, Validation loss 6.568938\n", "Epoch 4331, Training loss 2.428632, Validation loss 6.568953\n", "Epoch 4332, Training loss 2.428631, Validation loss 6.568953\n", "Epoch 4333, Training loss 2.428631, Validation loss 6.568949\n", "Epoch 4334, Training loss 2.428632, Validation loss 6.568953\n", "Epoch 4335, Training loss 2.428632, Validation loss 6.568949\n", "Epoch 4336, Training loss 2.428631, Validation loss 6.568953\n", "Epoch 4337, Training loss 2.428631, Validation loss 6.568953\n", "Epoch 4338, Training loss 2.428631, Validation loss 6.568963\n", "Epoch 4339, Training loss 2.428632, Validation loss 6.568953\n", "Epoch 4340, Training loss 2.428632, Validation loss 6.568968\n", "Epoch 4341, Training loss 2.428632, Validation loss 6.568963\n", "Epoch 4342, Training loss 2.428631, Validation loss 6.568949\n", "Epoch 4343, Training loss 2.428631, Validation loss 6.568963\n", "Epoch 4344, Training loss 2.428632, Validation loss 6.568963\n", "Epoch 4345, Training loss 2.428632, Validation loss 6.568963\n", "Epoch 4346, Training loss 2.428631, Validation loss 6.568963\n", "Epoch 4347, Training loss 2.428632, Validation loss 6.568968\n", "Epoch 4348, Training loss 2.428631, Validation loss 6.568963\n", "Epoch 4349, Training loss 2.428632, Validation loss 6.568959\n", "Epoch 4350, Training loss 2.428631, Validation loss 6.568963\n", "Epoch 4351, Training loss 2.428632, Validation loss 6.568963\n", "Epoch 4352, Training loss 2.428631, Validation loss 6.568974\n", "Epoch 4353, Training loss 2.428631, Validation loss 6.568963\n", "Epoch 4354, Training loss 2.428632, Validation loss 6.568959\n", "Epoch 4355, Training loss 2.428631, Validation loss 6.568974\n", "Epoch 4356, Training loss 2.428631, Validation loss 6.568963\n", "Epoch 4357, Training loss 2.428630, Validation loss 6.568974\n", "Epoch 4358, Training loss 2.428632, Validation loss 6.568974\n", "Epoch 4359, Training loss 2.428632, Validation loss 6.568959\n", "Epoch 4360, Training loss 2.428632, Validation loss 6.568974\n", "Epoch 4361, Training loss 2.428632, Validation loss 6.568969\n", "Epoch 4362, Training loss 2.428630, Validation loss 6.568974\n", "Epoch 4363, Training loss 2.428631, Validation loss 6.568974\n", "Epoch 4364, Training loss 2.428631, Validation loss 6.568969\n", "Epoch 4365, Training loss 2.428631, Validation loss 6.568974\n", "Epoch 4366, Training loss 2.428631, Validation loss 6.568969\n", "Epoch 4367, Training loss 2.428631, Validation loss 6.568984\n", "Epoch 4368, Training loss 2.428631, Validation loss 6.568969\n", "Epoch 4369, Training loss 2.428632, Validation loss 6.568984\n", "Epoch 4370, Training loss 2.428631, Validation loss 6.568984\n", "Epoch 4371, Training loss 2.428631, Validation loss 6.568984\n", "Epoch 4372, Training loss 2.428632, Validation loss 6.568984\n", "Epoch 4373, Training loss 2.428631, Validation loss 6.568979\n", "Epoch 4374, Training loss 2.428631, Validation loss 6.568984\n", "Epoch 4375, Training loss 2.428632, Validation loss 6.568984\n", "Epoch 4376, Training loss 2.428632, Validation loss 6.568984\n", "Epoch 4377, Training loss 2.428631, Validation loss 6.568984\n", "Epoch 4378, Training loss 2.428632, Validation loss 6.568999\n", "Epoch 4379, Training loss 2.428632, Validation loss 6.568984\n", "Epoch 4380, Training loss 2.428631, Validation loss 6.568979\n", "Epoch 4381, Training loss 2.428632, Validation loss 6.568994\n", "Epoch 4382, Training loss 2.428632, Validation loss 6.568984\n", "Epoch 4383, Training loss 2.428631, Validation loss 6.568994\n", "Epoch 4384, Training loss 2.428630, Validation loss 6.568994\n", "Epoch 4385, Training loss 2.428632, Validation loss 6.568979\n", "Epoch 4386, Training loss 2.428631, Validation loss 6.568994\n", "Epoch 4387, Training loss 2.428630, Validation loss 6.568990\n", "Epoch 4388, Training loss 2.428631, Validation loss 6.568979\n", "Epoch 4389, Training loss 2.428631, Validation loss 6.568994\n", "Epoch 4390, Training loss 2.428631, Validation loss 6.568990\n", "Epoch 4391, Training loss 2.428632, Validation loss 6.568994\n", "Epoch 4392, Training loss 2.428632, Validation loss 6.568990\n", "Epoch 4393, Training loss 2.428631, Validation loss 6.568994\n", "Epoch 4394, Training loss 2.428630, Validation loss 6.568994\n", "Epoch 4395, Training loss 2.428632, Validation loss 6.569004\n", "Epoch 4396, Training loss 2.428631, Validation loss 6.568994\n", "Epoch 4397, Training loss 2.428630, Validation loss 6.568990\n", "Epoch 4398, Training loss 2.428632, Validation loss 6.569004\n", "Epoch 4399, Training loss 2.428632, Validation loss 6.568990\n", "Epoch 4400, Training loss 2.428632, Validation loss 6.569004\n", "Epoch 4401, Training loss 2.428630, Validation loss 6.569004\n", "Epoch 4402, Training loss 2.428631, Validation loss 6.569004\n", "Epoch 4403, Training loss 2.428630, Validation loss 6.569004\n", "Epoch 4404, Training loss 2.428631, Validation loss 6.569000\n", "Epoch 4405, Training loss 2.428632, Validation loss 6.569004\n", "Epoch 4406, Training loss 2.428631, Validation loss 6.569000\n", "Epoch 4407, Training loss 2.428631, Validation loss 6.569015\n", "Epoch 4408, Training loss 2.428632, Validation loss 6.569004\n", "Epoch 4409, Training loss 2.428631, Validation loss 6.569015\n", "Epoch 4410, Training loss 2.428631, Validation loss 6.569015\n", "Epoch 4411, Training loss 2.428631, Validation loss 6.569000\n", "Epoch 4412, Training loss 2.428632, Validation loss 6.569015\n", "Epoch 4413, Training loss 2.428632, Validation loss 6.569015\n", "Epoch 4414, Training loss 2.428630, Validation loss 6.569015\n", "Epoch 4415, Training loss 2.428631, Validation loss 6.569015\n", "Epoch 4416, Training loss 2.428631, Validation loss 6.569010\n", "Epoch 4417, Training loss 2.428630, Validation loss 6.569015\n", "Epoch 4418, Training loss 2.428632, Validation loss 6.569010\n", "Epoch 4419, Training loss 2.428632, Validation loss 6.569015\n", "Epoch 4420, Training loss 2.428631, Validation loss 6.569015\n", "Epoch 4421, Training loss 2.428630, Validation loss 6.569010\n", "Epoch 4422, Training loss 2.428632, Validation loss 6.569015\n", "Epoch 4423, Training loss 2.428632, Validation loss 6.569010\n", "Epoch 4424, Training loss 2.428630, Validation loss 6.569025\n", "Epoch 4425, Training loss 2.428631, Validation loss 6.569010\n", "Epoch 4426, Training loss 2.428632, Validation loss 6.569010\n", "Epoch 4427, Training loss 2.428631, Validation loss 6.569025\n", "Epoch 4428, Training loss 2.428632, Validation loss 6.569030\n", "Epoch 4429, Training loss 2.428632, Validation loss 6.569025\n", "Epoch 4430, Training loss 2.428631, Validation loss 6.569020\n", "Epoch 4431, Training loss 2.428631, Validation loss 6.569025\n", "Epoch 4432, Training loss 2.428633, Validation loss 6.569025\n", "Epoch 4433, Training loss 2.428631, Validation loss 6.569025\n", "Epoch 4434, Training loss 2.428630, Validation loss 6.569025\n", "Epoch 4435, Training loss 2.428632, Validation loss 6.569020\n", "Epoch 4436, Training loss 2.428632, Validation loss 6.569035\n", "Epoch 4437, Training loss 2.428631, Validation loss 6.569020\n", "Epoch 4438, Training loss 2.428632, Validation loss 6.569035\n", "Epoch 4439, Training loss 2.428632, Validation loss 6.569035\n", "Epoch 4440, Training loss 2.428632, Validation loss 6.569020\n", "Epoch 4441, Training loss 2.428632, Validation loss 6.569035\n", "Epoch 4442, Training loss 2.428632, Validation loss 6.569039\n", "Epoch 4443, Training loss 2.428631, Validation loss 6.569035\n", "Epoch 4444, Training loss 2.428630, Validation loss 6.569030\n", "Epoch 4445, Training loss 2.428630, Validation loss 6.569030\n", "Epoch 4446, Training loss 2.428632, Validation loss 6.569035\n", "Epoch 4447, Training loss 2.428631, Validation loss 6.569030\n", "Epoch 4448, Training loss 2.428632, Validation loss 6.569035\n", "Epoch 4449, Training loss 2.428631, Validation loss 6.569030\n", "Epoch 4450, Training loss 2.428631, Validation loss 6.569045\n", "Epoch 4451, Training loss 2.428632, Validation loss 6.569035\n", "Epoch 4452, Training loss 2.428632, Validation loss 6.569030\n", "Epoch 4453, Training loss 2.428630, Validation loss 6.569045\n", "Epoch 4454, Training loss 2.428630, Validation loss 6.569030\n", "Epoch 4455, Training loss 2.428632, Validation loss 6.569045\n", "Epoch 4456, Training loss 2.428632, Validation loss 6.569040\n", "Epoch 4457, Training loss 2.428631, Validation loss 6.569045\n", "Epoch 4458, Training loss 2.428630, Validation loss 6.569045\n", "Epoch 4459, Training loss 2.428631, Validation loss 6.569040\n", "Epoch 4460, Training loss 2.428631, Validation loss 6.569045\n", "Epoch 4461, Training loss 2.428631, Validation loss 6.569040\n", "Epoch 4462, Training loss 2.428632, Validation loss 6.569045\n", "Epoch 4463, Training loss 2.428631, Validation loss 6.569040\n", "Epoch 4464, Training loss 2.428631, Validation loss 6.569056\n", "Epoch 4465, Training loss 2.428632, Validation loss 6.569045\n", "Epoch 4466, Training loss 2.428632, Validation loss 6.569040\n", "Epoch 4467, Training loss 2.428631, Validation loss 6.569056\n", "Epoch 4468, Training loss 2.428630, Validation loss 6.569040\n", "Epoch 4469, Training loss 2.428631, Validation loss 6.569056\n", "Epoch 4470, Training loss 2.428632, Validation loss 6.569056\n", "Epoch 4471, Training loss 2.428632, Validation loss 6.569040\n", "Epoch 4472, Training loss 2.428632, Validation loss 6.569056\n", "Epoch 4473, Training loss 2.428631, Validation loss 6.569051\n", "Epoch 4474, Training loss 2.428630, Validation loss 6.569056\n", "Epoch 4475, Training loss 2.428631, Validation loss 6.569051\n", "Epoch 4476, Training loss 2.428631, Validation loss 6.569051\n", "Epoch 4477, Training loss 2.428631, Validation loss 6.569056\n", "Epoch 4478, Training loss 2.428630, Validation loss 6.569051\n", "Epoch 4479, Training loss 2.428631, Validation loss 6.569066\n", "Epoch 4480, Training loss 2.428632, Validation loss 6.569051\n", "Epoch 4481, Training loss 2.428630, Validation loss 6.569066\n", "Epoch 4482, Training loss 2.428631, Validation loss 6.569061\n", "Epoch 4483, Training loss 2.428632, Validation loss 6.569051\n", "Epoch 4484, Training loss 2.428631, Validation loss 6.569066\n", "Epoch 4485, Training loss 2.428631, Validation loss 6.569061\n", "Epoch 4486, Training loss 2.428631, Validation loss 6.569066\n", "Epoch 4487, Training loss 2.428630, Validation loss 6.569061\n", "Epoch 4488, Training loss 2.428632, Validation loss 6.569066\n", "Epoch 4489, Training loss 2.428632, Validation loss 6.569066\n", "Epoch 4490, Training loss 2.428631, Validation loss 6.569061\n", "Epoch 4491, Training loss 2.428631, Validation loss 6.569066\n", "Epoch 4492, Training loss 2.428633, Validation loss 6.569061\n", "Epoch 4493, Training loss 2.428631, Validation loss 6.569076\n", "Epoch 4494, Training loss 2.428631, Validation loss 6.569061\n", "Epoch 4495, Training loss 2.428632, Validation loss 6.569076\n", "Epoch 4496, Training loss 2.428632, Validation loss 6.569076\n", "Epoch 4497, Training loss 2.428632, Validation loss 6.569061\n", "Epoch 4498, Training loss 2.428631, Validation loss 6.569076\n", "Epoch 4499, Training loss 2.428631, Validation loss 6.569071\n", "Epoch 4500, Training loss 2.428631, Validation loss 6.569076\n", "Epoch 4501, Training loss 2.428631, Validation loss 6.569071\n", "Epoch 4502, Training loss 2.428631, Validation loss 6.569076\n", "Epoch 4503, Training loss 2.428631, Validation loss 6.569076\n", "Epoch 4504, Training loss 2.428630, Validation loss 6.569071\n", "Epoch 4505, Training loss 2.428631, Validation loss 6.569071\n", "Epoch 4506, Training loss 2.428631, Validation loss 6.569071\n", "Epoch 4507, Training loss 2.428630, Validation loss 6.569086\n", "Epoch 4508, Training loss 2.428630, Validation loss 6.569086\n", "Epoch 4509, Training loss 2.428631, Validation loss 6.569071\n", "Epoch 4510, Training loss 2.428631, Validation loss 6.569086\n", "Epoch 4511, Training loss 2.428632, Validation loss 6.569081\n", "Epoch 4512, Training loss 2.428631, Validation loss 6.569086\n", "Epoch 4513, Training loss 2.428630, Validation loss 6.569081\n", "Epoch 4514, Training loss 2.428630, Validation loss 6.569081\n", "Epoch 4515, Training loss 2.428631, Validation loss 6.569086\n", "Epoch 4516, Training loss 2.428632, Validation loss 6.569081\n", "Epoch 4517, Training loss 2.428632, Validation loss 6.569086\n", "Epoch 4518, Training loss 2.428631, Validation loss 6.569081\n", "Epoch 4519, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4520, Training loss 2.428631, Validation loss 6.569081\n", "Epoch 4521, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4522, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4523, Training loss 2.428631, Validation loss 6.569081\n", "Epoch 4524, Training loss 2.428632, Validation loss 6.569097\n", "Epoch 4525, Training loss 2.428631, Validation loss 6.569092\n", "Epoch 4526, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4527, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4528, Training loss 2.428630, Validation loss 6.569092\n", "Epoch 4529, Training loss 2.428632, Validation loss 6.569097\n", "Epoch 4530, Training loss 2.428632, Validation loss 6.569092\n", "Epoch 4531, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4532, Training loss 2.428631, Validation loss 6.569092\n", "Epoch 4533, Training loss 2.428630, Validation loss 6.569092\n", "Epoch 4534, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4535, Training loss 2.428630, Validation loss 6.569092\n", "Epoch 4536, Training loss 2.428632, Validation loss 6.569107\n", "Epoch 4537, Training loss 2.428631, Validation loss 6.569092\n", "Epoch 4538, Training loss 2.428630, Validation loss 6.569092\n", "Epoch 4539, Training loss 2.428632, Validation loss 6.569102\n", "Epoch 4540, Training loss 2.428632, Validation loss 6.569092\n", "Epoch 4541, Training loss 2.428630, Validation loss 6.569107\n", "Epoch 4542, Training loss 2.428630, Validation loss 6.569102\n", "Epoch 4543, Training loss 2.428632, Validation loss 6.569107\n", "Epoch 4544, Training loss 2.428632, Validation loss 6.569102\n", "Epoch 4545, Training loss 2.428631, Validation loss 6.569117\n", "Epoch 4546, Training loss 2.428631, Validation loss 6.569107\n", "Epoch 4547, Training loss 2.428630, Validation loss 6.569102\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4548, Training loss 2.428631, Validation loss 6.569117\n", "Epoch 4549, Training loss 2.428632, Validation loss 6.569102\n", "Epoch 4550, Training loss 2.428631, Validation loss 6.569117\n", "Epoch 4551, Training loss 2.428630, Validation loss 6.569112\n", "Epoch 4552, Training loss 2.428631, Validation loss 6.569127\n", "Epoch 4553, Training loss 2.428632, Validation loss 6.569127\n", "Epoch 4554, Training loss 2.428631, Validation loss 6.569122\n", "Epoch 4555, Training loss 2.428631, Validation loss 6.569117\n", "Epoch 4556, Training loss 2.428630, Validation loss 6.569117\n", "Epoch 4557, Training loss 2.428631, Validation loss 6.569122\n", "Epoch 4558, Training loss 2.428632, Validation loss 6.569132\n", "Epoch 4559, Training loss 2.428631, Validation loss 6.569127\n", "Epoch 4560, Training loss 2.428630, Validation loss 6.569122\n", "Epoch 4561, Training loss 2.428632, Validation loss 6.569132\n", "Epoch 4562, Training loss 2.428631, Validation loss 6.569137\n", "Epoch 4563, Training loss 2.428631, Validation loss 6.569127\n", "Epoch 4564, Training loss 2.428630, Validation loss 6.569132\n", "Epoch 4565, Training loss 2.428631, Validation loss 6.569141\n", "Epoch 4566, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4567, Training loss 2.428631, Validation loss 6.569127\n", "Epoch 4568, Training loss 2.428632, Validation loss 6.569122\n", "Epoch 4569, Training loss 2.428631, Validation loss 6.569132\n", "Epoch 4570, Training loss 2.428630, Validation loss 6.569137\n", "Epoch 4571, Training loss 2.428630, Validation loss 6.569147\n", "Epoch 4572, Training loss 2.428631, Validation loss 6.569141\n", "Epoch 4573, Training loss 2.428632, Validation loss 6.569132\n", "Epoch 4574, Training loss 2.428631, Validation loss 6.569137\n", "Epoch 4575, Training loss 2.428630, Validation loss 6.569147\n", "Epoch 4576, Training loss 2.428631, Validation loss 6.569137\n", "Epoch 4577, Training loss 2.428631, Validation loss 6.569142\n", "Epoch 4578, Training loss 2.428631, Validation loss 6.569141\n", "Epoch 4579, Training loss 2.428632, Validation loss 6.569147\n", "Epoch 4580, Training loss 2.428632, Validation loss 6.569137\n", "Epoch 4581, Training loss 2.428631, Validation loss 6.569142\n", "Epoch 4582, Training loss 2.428631, Validation loss 6.569152\n", "Epoch 4583, Training loss 2.428631, Validation loss 6.569142\n", "Epoch 4584, Training loss 2.428630, Validation loss 6.569137\n", "Epoch 4585, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4586, Training loss 2.428631, Validation loss 6.569137\n", "Epoch 4587, Training loss 2.428632, Validation loss 6.569142\n", "Epoch 4588, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4589, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4590, Training loss 2.428631, Validation loss 6.569137\n", "Epoch 4591, Training loss 2.428631, Validation loss 6.569142\n", "Epoch 4592, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4593, Training loss 2.428632, Validation loss 6.569157\n", "Epoch 4594, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4595, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4596, Training loss 2.428631, Validation loss 6.569152\n", "Epoch 4597, Training loss 2.428630, Validation loss 6.569157\n", "Epoch 4598, Training loss 2.428630, Validation loss 6.569147\n", "Epoch 4599, Training loss 2.428631, Validation loss 6.569157\n", "Epoch 4600, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4601, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4602, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4603, Training loss 2.428631, Validation loss 6.569157\n", "Epoch 4604, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4605, Training loss 2.428631, Validation loss 6.569152\n", "Epoch 4606, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4607, Training loss 2.428631, Validation loss 6.569157\n", "Epoch 4608, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4609, Training loss 2.428631, Validation loss 6.569157\n", "Epoch 4610, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4611, Training loss 2.428630, Validation loss 6.569167\n", "Epoch 4612, Training loss 2.428630, Validation loss 6.569158\n", "Epoch 4613, Training loss 2.428631, Validation loss 6.569157\n", "Epoch 4614, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4615, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4616, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4617, Training loss 2.428631, Validation loss 6.569167\n", "Epoch 4618, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4619, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4620, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4621, Training loss 2.428631, Validation loss 6.569167\n", "Epoch 4622, Training loss 2.428631, Validation loss 6.569158\n", "Epoch 4623, Training loss 2.428630, Validation loss 6.569167\n", "Epoch 4624, Training loss 2.428630, Validation loss 6.569158\n", "Epoch 4625, Training loss 2.428630, Validation loss 6.569152\n", "Epoch 4626, Training loss 2.428631, Validation loss 6.569158\n", "Epoch 4627, Training loss 2.428631, Validation loss 6.569167\n", "Epoch 4628, Training loss 2.428631, Validation loss 6.569158\n", "Epoch 4629, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4630, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4631, Training loss 2.428630, Validation loss 6.569152\n", "Epoch 4632, Training loss 2.428630, Validation loss 6.569158\n", "Epoch 4633, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4634, Training loss 2.428632, Validation loss 6.569172\n", "Epoch 4635, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4636, Training loss 2.428630, Validation loss 6.569158\n", "Epoch 4637, Training loss 2.428630, Validation loss 6.569167\n", "Epoch 4638, Training loss 2.428630, Validation loss 6.569172\n", "Epoch 4639, Training loss 2.428630, Validation loss 6.569162\n", "Epoch 4640, Training loss 2.428631, Validation loss 6.569172\n", "Epoch 4641, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4642, Training loss 2.428631, Validation loss 6.569158\n", "Epoch 4643, Training loss 2.428630, Validation loss 6.569162\n", "Epoch 4644, Training loss 2.428630, Validation loss 6.569172\n", "Epoch 4645, Training loss 2.428631, Validation loss 6.569178\n", "Epoch 4646, Training loss 2.428632, Validation loss 6.569168\n", "Epoch 4647, Training loss 2.428632, Validation loss 6.569167\n", "Epoch 4648, Training loss 2.428631, Validation loss 6.569172\n", "Epoch 4649, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4650, Training loss 2.428630, Validation loss 6.569168\n", "Epoch 4651, Training loss 2.428630, Validation loss 6.569178\n", "Epoch 4652, Training loss 2.428630, Validation loss 6.569168\n", "Epoch 4653, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4654, Training loss 2.428631, Validation loss 6.569172\n", "Epoch 4655, Training loss 2.428632, Validation loss 6.569178\n", "Epoch 4656, Training loss 2.428632, Validation loss 6.569168\n", "Epoch 4657, Training loss 2.428631, Validation loss 6.569173\n", "Epoch 4658, Training loss 2.428630, Validation loss 6.569172\n", "Epoch 4659, Training loss 2.428630, Validation loss 6.569162\n", "Epoch 4660, Training loss 2.428632, Validation loss 6.569168\n", "Epoch 4661, Training loss 2.428630, Validation loss 6.569178\n", "Epoch 4662, Training loss 2.428630, Validation loss 6.569168\n", "Epoch 4663, Training loss 2.428630, Validation loss 6.569173\n", "Epoch 4664, Training loss 2.428630, Validation loss 6.569168\n", "Epoch 4665, Training loss 2.428631, Validation loss 6.569178\n", "Epoch 4666, Training loss 2.428632, Validation loss 6.569182\n", "Epoch 4667, Training loss 2.428632, Validation loss 6.569173\n", "Epoch 4668, Training loss 2.428631, Validation loss 6.569182\n", "Epoch 4669, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4670, Training loss 2.428631, Validation loss 6.569178\n", "Epoch 4671, Training loss 2.428630, Validation loss 6.569173\n", "Epoch 4672, Training loss 2.428632, Validation loss 6.569182\n", "Epoch 4673, Training loss 2.428632, Validation loss 6.569173\n", "Epoch 4674, Training loss 2.428632, Validation loss 6.569178\n", "Epoch 4675, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4676, Training loss 2.428631, Validation loss 6.569182\n", "Epoch 4677, Training loss 2.428630, Validation loss 6.569173\n", "Epoch 4678, Training loss 2.428631, Validation loss 6.569182\n", "Epoch 4679, Training loss 2.428632, Validation loss 6.569188\n", "Epoch 4680, Training loss 2.428632, Validation loss 6.569178\n", "Epoch 4681, Training loss 2.428631, Validation loss 6.569183\n", "Epoch 4682, Training loss 2.428631, Validation loss 6.569182\n", "Epoch 4683, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4684, Training loss 2.428631, Validation loss 6.569178\n", "Epoch 4685, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4686, Training loss 2.428632, Validation loss 6.569193\n", "Epoch 4687, Training loss 2.428632, Validation loss 6.569183\n", "Epoch 4688, Training loss 2.428630, Validation loss 6.569178\n", "Epoch 4689, Training loss 2.428630, Validation loss 6.569188\n", "Epoch 4690, Training loss 2.428630, Validation loss 6.569178\n", "Epoch 4691, Training loss 2.428630, Validation loss 6.569183\n", "Epoch 4692, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4693, Training loss 2.428632, Validation loss 6.569188\n", "Epoch 4694, Training loss 2.428632, Validation loss 6.569178\n", "Epoch 4695, Training loss 2.428632, Validation loss 6.569183\n", "Epoch 4696, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4697, Training loss 2.428631, Validation loss 6.569183\n", "Epoch 4698, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4699, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4700, Training loss 2.428632, Validation loss 6.569178\n", "Epoch 4701, Training loss 2.428630, Validation loss 6.569183\n", "Epoch 4702, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4703, Training loss 2.428632, Validation loss 6.569198\n", "Epoch 4704, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4705, Training loss 2.428631, Validation loss 6.569183\n", "Epoch 4706, Training loss 2.428632, Validation loss 6.569193\n", "Epoch 4707, Training loss 2.428631, Validation loss 6.569198\n", "Epoch 4708, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4709, Training loss 2.428630, Validation loss 6.569193\n", "Epoch 4710, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4711, Training loss 2.428631, Validation loss 6.569183\n", "Epoch 4712, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4713, Training loss 2.428632, Validation loss 6.569198\n", "Epoch 4714, Training loss 2.428630, Validation loss 6.569203\n", "Epoch 4715, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4716, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4717, Training loss 2.428630, Validation loss 6.569198\n", "Epoch 4718, Training loss 2.428630, Validation loss 6.569188\n", "Epoch 4719, Training loss 2.428630, Validation loss 6.569193\n", "Epoch 4720, Training loss 2.428632, Validation loss 6.569203\n", "Epoch 4721, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4722, Training loss 2.428632, Validation loss 6.569188\n", "Epoch 4723, Training loss 2.428632, Validation loss 6.569198\n", "Epoch 4724, Training loss 2.428632, Validation loss 6.569203\n", "Epoch 4725, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4726, Training loss 2.428632, Validation loss 6.569199\n", "Epoch 4727, Training loss 2.428630, Validation loss 6.569198\n", "Epoch 4728, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4729, Training loss 2.428630, Validation loss 6.569193\n", "Epoch 4730, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4731, Training loss 2.428631, Validation loss 6.569208\n", "Epoch 4732, Training loss 2.428632, Validation loss 6.569203\n", "Epoch 4733, Training loss 2.428632, Validation loss 6.569193\n", "Epoch 4734, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4735, Training loss 2.428631, Validation loss 6.569208\n", "Epoch 4736, Training loss 2.428631, Validation loss 6.569199\n", "Epoch 4737, Training loss 2.428631, Validation loss 6.569208\n", "Epoch 4738, Training loss 2.428631, Validation loss 6.569213\n", "Epoch 4739, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4740, Training loss 2.428631, Validation loss 6.569199\n", "Epoch 4741, Training loss 2.428632, Validation loss 6.569208\n", "Epoch 4742, Training loss 2.428631, Validation loss 6.569213\n", "Epoch 4743, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4744, Training loss 2.428630, Validation loss 6.569213\n", "Epoch 4745, Training loss 2.428630, Validation loss 6.569203\n", "Epoch 4746, Training loss 2.428630, Validation loss 6.569199\n", "Epoch 4747, Training loss 2.428630, Validation loss 6.569203\n", "Epoch 4748, Training loss 2.428630, Validation loss 6.569194\n", "Epoch 4749, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4750, Training loss 2.428630, Validation loss 6.569209\n", "Epoch 4751, Training loss 2.428631, Validation loss 6.569208\n", "Epoch 4752, Training loss 2.428631, Validation loss 6.569199\n", "Epoch 4753, Training loss 2.428630, Validation loss 6.569203\n", "Epoch 4754, Training loss 2.428631, Validation loss 6.569213\n", "Epoch 4755, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4756, Training loss 2.428631, Validation loss 6.569209\n", "Epoch 4757, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4758, Training loss 2.428630, Validation loss 6.569213\n", "Epoch 4759, Training loss 2.428630, Validation loss 6.569203\n", "Epoch 4760, Training loss 2.428631, Validation loss 6.569209\n", "Epoch 4761, Training loss 2.428632, Validation loss 6.569219\n", "Epoch 4762, Training loss 2.428631, Validation loss 6.569213\n", "Epoch 4763, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4764, Training loss 2.428630, Validation loss 6.569209\n", "Epoch 4765, Training loss 2.428630, Validation loss 6.569219\n", "Epoch 4766, Training loss 2.428630, Validation loss 6.569209\n", "Epoch 4767, Training loss 2.428632, Validation loss 6.569214\n", "Epoch 4768, Training loss 2.428631, Validation loss 6.569213\n", "Epoch 4769, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4770, Training loss 2.428630, Validation loss 6.569209\n", "Epoch 4771, Training loss 2.428631, Validation loss 6.569214\n", "Epoch 4772, Training loss 2.428630, Validation loss 6.569223\n", "Epoch 4773, Training loss 2.428632, Validation loss 6.569219\n", "Epoch 4774, Training loss 2.428632, Validation loss 6.569209\n", "Epoch 4775, Training loss 2.428632, Validation loss 6.569219\n", "Epoch 4776, Training loss 2.428631, Validation loss 6.569223\n", "Epoch 4777, Training loss 2.428631, Validation loss 6.569214\n", "Epoch 4778, Training loss 2.428630, Validation loss 6.569219\n", "Epoch 4779, Training loss 2.428630, Validation loss 6.569219\n", "Epoch 4780, Training loss 2.428631, Validation loss 6.569209\n", "Epoch 4781, Training loss 2.428632, Validation loss 6.569214\n", "Epoch 4782, Training loss 2.428631, Validation loss 6.569223\n", "Epoch 4783, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4784, Training loss 2.428630, Validation loss 6.569219\n", "Epoch 4785, Training loss 2.428630, Validation loss 6.569214\n", "Epoch 4786, Training loss 2.428632, Validation loss 6.569223\n", "Epoch 4787, Training loss 2.428632, Validation loss 6.569214\n", "Epoch 4788, Training loss 2.428632, Validation loss 6.569219\n", "Epoch 4789, Training loss 2.428632, Validation loss 6.569229\n", "Epoch 4790, Training loss 2.428631, Validation loss 6.569223\n", "Epoch 4791, Training loss 2.428631, Validation loss 6.569214\n", "Epoch 4792, Training loss 2.428630, Validation loss 6.569223\n", "Epoch 4793, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4794, Training loss 2.428631, Validation loss 6.569219\n", "Epoch 4795, Training loss 2.428632, Validation loss 6.569224\n", "Epoch 4796, Training loss 2.428631, Validation loss 6.569223\n", "Epoch 4797, Training loss 2.428630, Validation loss 6.569229\n", "Epoch 4798, Training loss 2.428630, Validation loss 6.569219\n", "Epoch 4799, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4800, Training loss 2.428632, Validation loss 6.569234\n", "Epoch 4801, Training loss 2.428632, Validation loss 6.569224\n", "Epoch 4802, Training loss 2.428631, Validation loss 6.569219\n", "Epoch 4803, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4804, Training loss 2.428631, Validation loss 6.569219\n", "Epoch 4805, Training loss 2.428631, Validation loss 6.569224\n", "Epoch 4806, Training loss 2.428630, Validation loss 6.569234\n", "Epoch 4807, Training loss 2.428630, Validation loss 6.569224\n", "Epoch 4808, Training loss 2.428630, Validation loss 6.569229\n", "Epoch 4809, Training loss 2.428631, Validation loss 6.569224\n", "Epoch 4810, Training loss 2.428630, Validation loss 6.569234\n", "Epoch 4811, Training loss 2.428630, Validation loss 6.569224\n", "Epoch 4812, Training loss 2.428630, Validation loss 6.569229\n", "Epoch 4813, Training loss 2.428632, Validation loss 6.569239\n", "Epoch 4814, Training loss 2.428632, Validation loss 6.569229\n", "Epoch 4815, Training loss 2.428631, Validation loss 6.569224\n", "Epoch 4816, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4817, Training loss 2.428631, Validation loss 6.569220\n", "Epoch 4818, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4819, Training loss 2.428630, Validation loss 6.569234\n", "Epoch 4820, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4821, Training loss 2.428631, Validation loss 6.569224\n", "Epoch 4822, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4823, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4824, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4825, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4826, Training loss 2.428630, Validation loss 6.569229\n", "Epoch 4827, Training loss 2.428631, Validation loss 6.569239\n", "Epoch 4828, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4829, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4830, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4831, Training loss 2.428631, Validation loss 6.569239\n", "Epoch 4832, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4833, Training loss 2.428630, Validation loss 6.569234\n", "Epoch 4834, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4835, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4836, Training loss 2.428631, Validation loss 6.569240\n", "Epoch 4837, Training loss 2.428630, Validation loss 6.569239\n", "Epoch 4838, Training loss 2.428630, Validation loss 6.569229\n", "Epoch 4839, Training loss 2.428630, Validation loss 6.569234\n", "Epoch 4840, Training loss 2.428632, Validation loss 6.569240\n", "Epoch 4841, Training loss 2.428632, Validation loss 6.569249\n", "Epoch 4842, Training loss 2.428632, Validation loss 6.569240\n", "Epoch 4843, Training loss 2.428632, Validation loss 6.569234\n", "Epoch 4844, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4845, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4846, Training loss 2.428630, Validation loss 6.569240\n", "Epoch 4847, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4848, Training loss 2.428632, Validation loss 6.569244\n", "Epoch 4849, Training loss 2.428632, Validation loss 6.569234\n", "Epoch 4850, Training loss 2.428632, Validation loss 6.569240\n", "Epoch 4851, Training loss 2.428631, Validation loss 6.569249\n", "Epoch 4852, Training loss 2.428630, Validation loss 6.569254\n", "Epoch 4853, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4854, Training loss 2.428631, Validation loss 6.569240\n", "Epoch 4855, Training loss 2.428632, Validation loss 6.569249\n", "Epoch 4856, Training loss 2.428632, Validation loss 6.569240\n", "Epoch 4857, Training loss 2.428630, Validation loss 6.569244\n", "Epoch 4858, Training loss 2.428631, Validation loss 6.569254\n", "Epoch 4859, Training loss 2.428630, Validation loss 6.569249\n", "Epoch 4860, Training loss 2.428630, Validation loss 6.569240\n", "Epoch 4861, Training loss 2.428632, Validation loss 6.569249\n", "Epoch 4862, Training loss 2.428632, Validation loss 6.569240\n", "Epoch 4863, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4864, Training loss 2.428630, Validation loss 6.569250\n", "Epoch 4865, Training loss 2.428630, Validation loss 6.569249\n", "Epoch 4866, Training loss 2.428630, Validation loss 6.569240\n", "Epoch 4867, Training loss 2.428630, Validation loss 6.569244\n", "Epoch 4868, Training loss 2.428632, Validation loss 6.569254\n", "Epoch 4869, Training loss 2.428632, Validation loss 6.569244\n", "Epoch 4870, Training loss 2.428632, Validation loss 6.569250\n", "Epoch 4871, Training loss 2.428630, Validation loss 6.569244\n", "Epoch 4872, Training loss 2.428630, Validation loss 6.569254\n", "Epoch 4873, Training loss 2.428630, Validation loss 6.569244\n", "Epoch 4874, Training loss 2.428631, Validation loss 6.569250\n", "Epoch 4875, Training loss 2.428630, Validation loss 6.569260\n", "Epoch 4876, Training loss 2.428630, Validation loss 6.569250\n", "Epoch 4877, Training loss 2.428631, Validation loss 6.569255\n", "Epoch 4878, Training loss 2.428631, Validation loss 6.569250\n", "Epoch 4879, Training loss 2.428631, Validation loss 6.569260\n", "Epoch 4880, Training loss 2.428630, Validation loss 6.569250\n", "Epoch 4881, Training loss 2.428632, Validation loss 6.569255\n", "Epoch 4882, Training loss 2.428631, Validation loss 6.569264\n", "Epoch 4883, Training loss 2.428632, Validation loss 6.569255\n", "Epoch 4884, Training loss 2.428631, Validation loss 6.569250\n", "Epoch 4885, Training loss 2.428629, Validation loss 6.569255\n", "Epoch 4886, Training loss 2.428629, Validation loss 6.569245\n", "Epoch 4887, Training loss 2.428631, Validation loss 6.569255\n", "Epoch 4888, Training loss 2.428632, Validation loss 6.569260\n", "Epoch 4889, Training loss 2.428631, Validation loss 6.569260\n", "Epoch 4890, Training loss 2.428631, Validation loss 6.569250\n", "Epoch 4891, Training loss 2.428631, Validation loss 6.569255\n", "Epoch 4892, Training loss 2.428630, Validation loss 6.569260\n", "Epoch 4893, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4894, Training loss 2.428630, Validation loss 6.569260\n", "Epoch 4895, Training loss 2.428631, Validation loss 6.569255\n", "Epoch 4896, Training loss 2.428632, Validation loss 6.569264\n", "Epoch 4897, Training loss 2.428632, Validation loss 6.569255\n", "Epoch 4898, Training loss 2.428631, Validation loss 6.569260\n", "Epoch 4899, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4900, Training loss 2.428631, Validation loss 6.569264\n", "Epoch 4901, Training loss 2.428631, Validation loss 6.569255\n", "Epoch 4902, Training loss 2.428631, Validation loss 6.569260\n", "Epoch 4903, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4904, Training loss 2.428630, Validation loss 6.569260\n", "Epoch 4905, Training loss 2.428630, Validation loss 6.569265\n", "Epoch 4906, Training loss 2.428631, Validation loss 6.569264\n", "Epoch 4907, Training loss 2.428632, Validation loss 6.569270\n", "Epoch 4908, Training loss 2.428632, Validation loss 6.569260\n", "Epoch 4909, Training loss 2.428632, Validation loss 6.569265\n", "Epoch 4910, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4911, Training loss 2.428630, Validation loss 6.569265\n", "Epoch 4912, Training loss 2.428631, Validation loss 6.569260\n", "Epoch 4913, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4914, Training loss 2.428630, Validation loss 6.569260\n", "Epoch 4915, Training loss 2.428632, Validation loss 6.569265\n", "Epoch 4916, Training loss 2.428632, Validation loss 6.569270\n", "Epoch 4917, Training loss 2.428631, Validation loss 6.569270\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4918, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4919, Training loss 2.428631, Validation loss 6.569265\n", "Epoch 4920, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4921, Training loss 2.428630, Validation loss 6.569265\n", "Epoch 4922, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4923, Training loss 2.428631, Validation loss 6.569265\n", "Epoch 4924, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4925, Training loss 2.428630, Validation loss 6.569265\n", "Epoch 4926, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4927, Training loss 2.428631, Validation loss 6.569280\n", "Epoch 4928, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4929, Training loss 2.428631, Validation loss 6.569265\n", "Epoch 4930, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4931, Training loss 2.428630, Validation loss 6.569280\n", "Epoch 4932, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4933, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4934, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4935, Training loss 2.428631, Validation loss 6.569265\n", "Epoch 4936, Training loss 2.428632, Validation loss 6.569270\n", "Epoch 4937, Training loss 2.428631, Validation loss 6.569280\n", "Epoch 4938, Training loss 2.428630, Validation loss 6.569285\n", "Epoch 4939, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4940, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4941, Training loss 2.428631, Validation loss 6.569280\n", "Epoch 4942, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4943, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4944, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4945, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4946, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4947, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4948, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4949, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4950, Training loss 2.428631, Validation loss 6.569281\n", "Epoch 4951, Training loss 2.428631, Validation loss 6.569290\n", "Epoch 4952, Training loss 2.428630, Validation loss 6.569285\n", "Epoch 4953, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4954, Training loss 2.428631, Validation loss 6.569281\n", "Epoch 4955, Training loss 2.428631, Validation loss 6.569290\n", "Epoch 4956, Training loss 2.428631, Validation loss 6.569281\n", "Epoch 4957, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4958, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4959, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4960, Training loss 2.428630, Validation loss 6.569281\n", "Epoch 4961, Training loss 2.428632, Validation loss 6.569285\n", "Epoch 4962, Training loss 2.428631, Validation loss 6.569295\n", "Epoch 4963, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4964, Training loss 2.428631, Validation loss 6.569281\n", "Epoch 4965, Training loss 2.428631, Validation loss 6.569290\n", "Epoch 4966, Training loss 2.428630, Validation loss 6.569295\n", "Epoch 4967, Training loss 2.428630, Validation loss 6.569285\n", "Epoch 4968, Training loss 2.428631, Validation loss 6.569291\n", "Epoch 4969, Training loss 2.428632, Validation loss 6.569290\n", "Epoch 4970, Training loss 2.428632, Validation loss 6.569281\n", "Epoch 4971, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4972, Training loss 2.428631, Validation loss 6.569295\n", "Epoch 4973, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4974, Training loss 2.428631, Validation loss 6.569291\n", "Epoch 4975, Training loss 2.428632, Validation loss 6.569290\n", "Epoch 4976, Training loss 2.428631, Validation loss 6.569295\n", "Epoch 4977, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4978, Training loss 2.428630, Validation loss 6.569291\n", "Epoch 4979, Training loss 2.428630, Validation loss 6.569300\n", "Epoch 4980, Training loss 2.428630, Validation loss 6.569291\n", "Epoch 4981, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4982, Training loss 2.428631, Validation loss 6.569295\n", "Epoch 4983, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4984, Training loss 2.428631, Validation loss 6.569291\n", "Epoch 4985, Training loss 2.428631, Validation loss 6.569296\n", "Epoch 4986, Training loss 2.428630, Validation loss 6.569295\n", "Epoch 4987, Training loss 2.428630, Validation loss 6.569285\n", "Epoch 4988, Training loss 2.428632, Validation loss 6.569291\n", "Epoch 4989, Training loss 2.428631, Validation loss 6.569300\n", "Epoch 4990, Training loss 2.428631, Validation loss 6.569291\n", "Epoch 4991, Training loss 2.428630, Validation loss 6.569296\n", "Epoch 4992, Training loss 2.428630, Validation loss 6.569291\n", "Epoch 4993, Training loss 2.428630, Validation loss 6.569281\n", "Epoch 4994, Training loss 2.428630, Validation loss 6.569291\n", "Epoch 4995, Training loss 2.428632, Validation loss 6.569296\n", "Epoch 4996, Training loss 2.428631, Validation loss 6.569305\n", "Epoch 4997, Training loss 2.428631, Validation loss 6.569296\n", "Epoch 4998, Training loss 2.428631, Validation loss 6.569291\n", "Epoch 4999, Training loss 2.428630, Validation loss 6.569296\n" ] } ], "source": [ "for epoch in range(nepochs):\n", " \n", " # forward pass\n", " t_p_train = model(t_un_train)\n", " loss_train = loss_fn(t_p_train, t_c_train)\n", "\n", " with torch.no_grad():\n", " t_p_val = model(t_un_val)\n", " loss_val = loss_fn(t_p_val, t_c_val)\n", "\n", " print('Epoch %d, Training loss %f, Validation loss %f' % (epoch, float(loss_train), float(loss_val)))\n", " \n", " # backward pass\n", " optimizer.zero_grad()\n", " loss_train.backward() \n", " optimizer.step()" ] }, { "cell_type": "code", "execution_count": 126, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "(Parameter containing:\n", " tensor([[5.4878]], requires_grad=True), Parameter containing:\n", " tensor([-17.4613], requires_grad=True))" ] }, "execution_count": 126, "metadata": {}, "output_type": "execute_result" } ], "source": [ "model.weight, model.bias" ] }, { "cell_type": "code", "execution_count": 127, "metadata": {}, "outputs": [], "source": [ "loss_fn = nn.MSELoss()" ] }, { "cell_type": "code", "execution_count": 128, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 0, Training loss 103.222046, Validation loss 50.142578\n", "Epoch 1, Training loss 48.570103, Validation loss 3.796587\n", "Epoch 2, Training loss 39.051891, Validation loss 0.447808\n", "Epoch 3, Training loss 37.296520, Validation loss 1.792198\n", "Epoch 4, Training loss 36.876732, Validation loss 2.822379\n", "Epoch 5, Training loss 36.687138, Validation loss 3.333092\n", "Epoch 6, Training loss 36.537609, Validation loss 3.561284\n", "Epoch 7, Training loss 36.395458, Validation loss 3.660715\n", "Epoch 8, Training loss 36.255051, Validation loss 3.704718\n", "Epoch 9, Training loss 36.115433, Validation loss 3.725378\n", "Epoch 10, Training loss 35.976421, Validation loss 3.736291\n", "Epoch 11, Training loss 35.837982, Validation loss 3.743140\n", "Epoch 12, Training loss 35.700115, Validation loss 3.748306\n", "Epoch 13, Training loss 35.562820, Validation loss 3.752771\n", "Epoch 14, Training loss 35.426094, Validation loss 3.756952\n", "Epoch 15, Training loss 35.289936, Validation loss 3.761003\n", "Epoch 16, Training loss 35.154335, Validation loss 3.765011\n", "Epoch 17, Training loss 35.019299, Validation loss 3.768989\n", "Epoch 18, Training loss 34.884808, Validation loss 3.772966\n", "Epoch 19, Training loss 34.750885, Validation loss 3.776943\n", "Epoch 20, Training loss 34.617508, Validation loss 3.780907\n", "Epoch 21, Training loss 34.484684, Validation loss 3.784876\n", "Epoch 22, Training loss 34.352406, Validation loss 3.788838\n", "Epoch 23, Training loss 34.220673, Validation loss 3.792802\n", "Epoch 24, Training loss 34.089485, Validation loss 3.796766\n", "Epoch 25, Training loss 33.958836, Validation loss 3.800728\n", "Epoch 26, Training loss 33.828732, Validation loss 3.804693\n", "Epoch 27, Training loss 33.699165, Validation loss 3.808653\n", "Epoch 28, Training loss 33.570129, Validation loss 3.812611\n", "Epoch 29, Training loss 33.441616, Validation loss 3.816569\n", "Epoch 30, Training loss 33.313652, Validation loss 3.820530\n", "Epoch 31, Training loss 33.186199, Validation loss 3.824485\n", "Epoch 32, Training loss 33.059284, Validation loss 3.828444\n", "Epoch 33, Training loss 32.932884, Validation loss 3.832393\n", "Epoch 34, Training loss 32.807014, Validation loss 3.836344\n", "Epoch 35, Training loss 32.681660, Validation loss 3.840302\n", "Epoch 36, Training loss 32.556824, Validation loss 3.844252\n", "Epoch 37, Training loss 32.432503, Validation loss 3.848203\n", "Epoch 38, Training loss 32.308689, Validation loss 3.852152\n", "Epoch 39, Training loss 32.185394, Validation loss 3.856102\n", "Epoch 40, Training loss 32.062603, Validation loss 3.860045\n", "Epoch 41, Training loss 31.940323, Validation loss 3.863989\n", "Epoch 42, Training loss 31.818537, Validation loss 3.867930\n", "Epoch 43, Training loss 31.697266, Validation loss 3.871871\n", "Epoch 44, Training loss 31.576487, Validation loss 3.875816\n", "Epoch 45, Training loss 31.456217, Validation loss 3.879753\n", "Epoch 46, Training loss 31.336432, Validation loss 3.883693\n", "Epoch 47, Training loss 31.217148, Validation loss 3.887629\n", "Epoch 48, Training loss 31.098352, Validation loss 3.891566\n", "Epoch 49, Training loss 30.980051, Validation loss 3.895500\n", "Epoch 50, Training loss 30.862234, Validation loss 3.899430\n", "Epoch 51, Training loss 30.744904, Validation loss 3.903362\n", "Epoch 52, Training loss 30.628054, Validation loss 3.907286\n", "Epoch 53, Training loss 30.511694, Validation loss 3.911216\n", "Epoch 54, Training loss 30.395811, Validation loss 3.915143\n", "Epoch 55, Training loss 30.280407, Validation loss 3.919067\n", "Epoch 56, Training loss 30.165476, Validation loss 3.922989\n", "Epoch 57, Training loss 30.051029, Validation loss 3.926913\n", "Epoch 58, Training loss 29.937040, Validation loss 3.930829\n", "Epoch 59, Training loss 29.823528, Validation loss 3.934743\n", "Epoch 60, Training loss 29.710489, Validation loss 3.938656\n", "Epoch 61, Training loss 29.597910, Validation loss 3.942571\n", "Epoch 62, Training loss 29.485796, Validation loss 3.946483\n", "Epoch 63, Training loss 29.374147, Validation loss 3.950394\n", "Epoch 64, Training loss 29.262957, Validation loss 3.954301\n", "Epoch 65, Training loss 29.152227, Validation loss 3.958210\n", "Epoch 66, Training loss 29.041960, Validation loss 3.962113\n", "Epoch 67, Training loss 28.932137, Validation loss 3.966015\n", "Epoch 68, Training loss 28.822769, Validation loss 3.969916\n", "Epoch 69, Training loss 28.713856, Validation loss 3.973814\n", "Epoch 70, Training loss 28.605392, Validation loss 3.977712\n", "Epoch 71, Training loss 28.497375, Validation loss 3.981610\n", "Epoch 72, Training loss 28.389799, Validation loss 3.985498\n", "Epoch 73, Training loss 28.282673, Validation loss 3.989385\n", "Epoch 74, Training loss 28.175991, Validation loss 3.993273\n", "Epoch 75, Training loss 28.069750, Validation loss 3.997164\n", "Epoch 76, Training loss 27.963940, Validation loss 4.001048\n", "Epoch 77, Training loss 27.858572, Validation loss 4.004923\n", "Epoch 78, Training loss 27.753635, Validation loss 4.008808\n", "Epoch 79, Training loss 27.649130, Validation loss 4.012687\n", "Epoch 80, Training loss 27.545063, Validation loss 4.016564\n", "Epoch 81, Training loss 27.441420, Validation loss 4.020441\n", "Epoch 82, Training loss 27.338203, Validation loss 4.024303\n", "Epoch 83, Training loss 27.235420, Validation loss 4.028172\n", "Epoch 84, Training loss 27.133051, Validation loss 4.032037\n", "Epoch 85, Training loss 27.031113, Validation loss 4.035903\n", "Epoch 86, Training loss 26.929598, Validation loss 4.039762\n", "Epoch 87, Training loss 26.828489, Validation loss 4.043624\n", "Epoch 88, Training loss 26.727808, Validation loss 4.047487\n", "Epoch 89, Training loss 26.627539, Validation loss 4.051342\n", "Epoch 90, Training loss 26.527681, Validation loss 4.055195\n", "Epoch 91, Training loss 26.428242, Validation loss 4.059042\n", "Epoch 92, Training loss 26.329205, Validation loss 4.062891\n", "Epoch 93, Training loss 26.230583, Validation loss 4.066734\n", "Epoch 94, Training loss 26.132364, Validation loss 4.070577\n", "Epoch 95, Training loss 26.034552, Validation loss 4.074419\n", "Epoch 96, Training loss 25.937145, Validation loss 4.078254\n", "Epoch 97, Training loss 25.840139, Validation loss 4.082090\n", "Epoch 98, Training loss 25.743532, Validation loss 4.085925\n", "Epoch 99, Training loss 25.647322, Validation loss 4.089756\n", "Epoch 100, Training loss 25.551512, Validation loss 4.093582\n", "Epoch 101, Training loss 25.456099, Validation loss 4.097404\n", "Epoch 102, Training loss 25.361076, Validation loss 4.101230\n", "Epoch 103, Training loss 25.266445, Validation loss 4.105048\n", "Epoch 104, Training loss 25.172207, Validation loss 4.108870\n", "Epoch 105, Training loss 25.078358, Validation loss 4.112683\n", "Epoch 106, Training loss 24.984894, Validation loss 4.116500\n", "Epoch 107, Training loss 24.891815, Validation loss 4.120301\n", "Epoch 108, Training loss 24.799122, Validation loss 4.124114\n", "Epoch 109, Training loss 24.706814, Validation loss 4.127913\n", "Epoch 110, Training loss 24.614887, Validation loss 4.131716\n", "Epoch 111, Training loss 24.523333, Validation loss 4.135515\n", "Epoch 112, Training loss 24.432161, Validation loss 4.139308\n", "Epoch 113, Training loss 24.341370, Validation loss 4.143102\n", "Epoch 114, Training loss 24.250946, Validation loss 4.146894\n", "Epoch 115, Training loss 24.160896, Validation loss 4.150682\n", "Epoch 116, Training loss 24.071218, Validation loss 4.154470\n", "Epoch 117, Training loss 23.981911, Validation loss 4.158252\n", "Epoch 118, Training loss 23.892973, Validation loss 4.162030\n", "Epoch 119, Training loss 23.804405, Validation loss 4.165802\n", "Epoch 120, Training loss 23.716200, Validation loss 4.169583\n", "Epoch 121, Training loss 23.628359, Validation loss 4.173349\n", "Epoch 122, Training loss 23.540876, Validation loss 4.177112\n", "Epoch 123, Training loss 23.453756, Validation loss 4.180877\n", "Epoch 124, Training loss 23.366999, Validation loss 4.184641\n", "Epoch 125, Training loss 23.280598, Validation loss 4.188404\n", "Epoch 126, Training loss 23.194553, Validation loss 4.192154\n", "Epoch 127, Training loss 23.108862, Validation loss 4.195915\n", "Epoch 128, Training loss 23.023527, Validation loss 4.199662\n", "Epoch 129, Training loss 22.938540, Validation loss 4.203405\n", "Epoch 130, Training loss 22.853912, Validation loss 4.207150\n", "Epoch 131, Training loss 22.769627, Validation loss 4.210894\n", "Epoch 132, Training loss 22.685692, Validation loss 4.214629\n", "Epoch 133, Training loss 22.602102, Validation loss 4.218366\n", "Epoch 134, Training loss 22.518856, Validation loss 4.222098\n", "Epoch 135, Training loss 22.435959, Validation loss 4.225824\n", "Epoch 136, Training loss 22.353399, Validation loss 4.229554\n", "Epoch 137, Training loss 22.271177, Validation loss 4.233274\n", "Epoch 138, Training loss 22.189299, Validation loss 4.236992\n", "Epoch 139, Training loss 22.107758, Validation loss 4.240709\n", "Epoch 140, Training loss 22.026556, Validation loss 4.244420\n", "Epoch 141, Training loss 21.945688, Validation loss 4.248134\n", "Epoch 142, Training loss 21.865149, Validation loss 4.251842\n", "Epoch 143, Training loss 21.784946, Validation loss 4.255548\n", "Epoch 144, Training loss 21.705070, Validation loss 4.259249\n", "Epoch 145, Training loss 21.625528, Validation loss 4.262948\n", "Epoch 146, Training loss 21.546314, Validation loss 4.266641\n", "Epoch 147, Training loss 21.467426, Validation loss 4.270333\n", "Epoch 148, Training loss 21.388863, Validation loss 4.274018\n", "Epoch 149, Training loss 21.310625, Validation loss 4.277699\n", "Epoch 150, Training loss 21.232712, Validation loss 4.281376\n", "Epoch 151, Training loss 21.155115, Validation loss 4.285061\n", "Epoch 152, Training loss 21.077843, Validation loss 4.288734\n", "Epoch 153, Training loss 21.000887, Validation loss 4.292403\n", "Epoch 154, Training loss 20.924252, Validation loss 4.296070\n", "Epoch 155, Training loss 20.847931, Validation loss 4.299734\n", "Epoch 156, Training loss 20.771923, Validation loss 4.303403\n", "Epoch 157, Training loss 20.696234, Validation loss 4.307056\n", "Epoch 158, Training loss 20.620852, Validation loss 4.310718\n", "Epoch 159, Training loss 20.545786, Validation loss 4.314362\n", "Epoch 160, Training loss 20.471025, Validation loss 4.318015\n", "Epoch 161, Training loss 20.396570, Validation loss 4.321659\n", "Epoch 162, Training loss 20.322426, Validation loss 4.325299\n", "Epoch 163, Training loss 20.248590, Validation loss 4.328939\n", "Epoch 164, Training loss 20.175056, Validation loss 4.332572\n", "Epoch 165, Training loss 20.101828, Validation loss 4.336202\n", "Epoch 166, Training loss 20.028898, Validation loss 4.339830\n", "Epoch 167, Training loss 19.956274, Validation loss 4.343451\n", "Epoch 168, Training loss 19.883951, Validation loss 4.347071\n", "Epoch 169, Training loss 19.811920, Validation loss 4.350688\n", "Epoch 170, Training loss 19.740187, Validation loss 4.354302\n", "Epoch 171, Training loss 19.668755, Validation loss 4.357918\n", "Epoch 172, Training loss 19.597614, Validation loss 4.361532\n", "Epoch 173, Training loss 19.526768, Validation loss 4.365130\n", "Epoch 174, Training loss 19.456213, Validation loss 4.368731\n", "Epoch 175, Training loss 19.385950, Validation loss 4.372325\n", "Epoch 176, Training loss 19.315977, Validation loss 4.375920\n", "Epoch 177, Training loss 19.246290, Validation loss 4.379508\n", "Epoch 178, Training loss 19.176893, Validation loss 4.383094\n", "Epoch 179, Training loss 19.107782, Validation loss 4.386677\n", "Epoch 180, Training loss 19.038958, Validation loss 4.390257\n", "Epoch 181, Training loss 18.970417, Validation loss 4.393831\n", "Epoch 182, Training loss 18.902159, Validation loss 4.397406\n", "Epoch 183, Training loss 18.834179, Validation loss 4.400970\n", "Epoch 184, Training loss 18.766485, Validation loss 4.404536\n", "Epoch 185, Training loss 18.699068, Validation loss 4.408093\n", "Epoch 186, Training loss 18.631929, Validation loss 4.411653\n", "Epoch 187, Training loss 18.565069, Validation loss 4.415202\n", "Epoch 188, Training loss 18.498482, Validation loss 4.418756\n", "Epoch 189, Training loss 18.432169, Validation loss 4.422306\n", "Epoch 190, Training loss 18.366133, Validation loss 4.425846\n", "Epoch 191, Training loss 18.300367, Validation loss 4.429391\n", "Epoch 192, Training loss 18.234869, Validation loss 4.432924\n", "Epoch 193, Training loss 18.169649, Validation loss 4.436454\n", "Epoch 194, Training loss 18.104698, Validation loss 4.439986\n", "Epoch 195, Training loss 18.040010, Validation loss 4.443506\n", "Epoch 196, Training loss 17.975590, Validation loss 4.447028\n", "Epoch 197, Training loss 17.911438, Validation loss 4.450545\n", "Epoch 198, Training loss 17.847549, Validation loss 4.454060\n", "Epoch 199, Training loss 17.783922, Validation loss 4.457568\n", "Epoch 200, Training loss 17.720558, Validation loss 4.461080\n", "Epoch 201, Training loss 17.657457, Validation loss 4.464577\n", "Epoch 202, Training loss 17.594616, Validation loss 4.468076\n", "Epoch 203, Training loss 17.532040, Validation loss 4.471566\n", "Epoch 204, Training loss 17.469711, Validation loss 4.475061\n", "Epoch 205, Training loss 17.407644, Validation loss 4.478545\n", "Epoch 206, Training loss 17.345837, Validation loss 4.482037\n", "Epoch 207, Training loss 17.284281, Validation loss 4.485518\n", "Epoch 208, Training loss 17.222980, Validation loss 4.488996\n", "Epoch 209, Training loss 17.161930, Validation loss 4.492466\n", "Epoch 210, Training loss 17.101135, Validation loss 4.495929\n", "Epoch 211, Training loss 17.040588, Validation loss 4.499396\n", "Epoch 212, Training loss 16.980293, Validation loss 4.502860\n", "Epoch 213, Training loss 16.920250, Validation loss 4.506312\n", "Epoch 214, Training loss 16.860451, Validation loss 4.509772\n", "Epoch 215, Training loss 16.800898, Validation loss 4.513225\n", "Epoch 216, Training loss 16.741590, Validation loss 4.516666\n", "Epoch 217, Training loss 16.682531, Validation loss 4.520108\n", "Epoch 218, Training loss 16.623714, Validation loss 4.523546\n", "Epoch 219, Training loss 16.565140, Validation loss 4.526980\n", "Epoch 220, Training loss 16.506805, Validation loss 4.530411\n", "Epoch 221, Training loss 16.448709, Validation loss 4.533837\n", "Epoch 222, Training loss 16.390860, Validation loss 4.537265\n", "Epoch 223, Training loss 16.333246, Validation loss 4.540680\n", "Epoch 224, Training loss 16.275869, Validation loss 4.544092\n", "Epoch 225, Training loss 16.218727, Validation loss 4.547508\n", "Epoch 226, Training loss 16.161823, Validation loss 4.550907\n", "Epoch 227, Training loss 16.105154, Validation loss 4.554319\n", "Epoch 228, Training loss 16.048719, Validation loss 4.557715\n", "Epoch 229, Training loss 15.992517, Validation loss 4.561116\n", "Epoch 230, Training loss 15.936544, Validation loss 4.564511\n", "Epoch 231, Training loss 15.880805, Validation loss 4.567900\n", "Epoch 232, Training loss 15.825297, Validation loss 4.571281\n", "Epoch 233, Training loss 15.770015, Validation loss 4.574657\n", "Epoch 234, Training loss 15.714967, Validation loss 4.578037\n", "Epoch 235, Training loss 15.660136, Validation loss 4.581409\n", "Epoch 236, Training loss 15.605538, Validation loss 4.584782\n", "Epoch 237, Training loss 15.551166, Validation loss 4.588146\n", "Epoch 238, Training loss 15.497016, Validation loss 4.591502\n", "Epoch 239, Training loss 15.443091, Validation loss 4.594858\n", "Epoch 240, Training loss 15.389387, Validation loss 4.598214\n", "Epoch 241, Training loss 15.335907, Validation loss 4.601562\n", "Epoch 242, Training loss 15.282644, Validation loss 4.604909\n", "Epoch 243, Training loss 15.229605, Validation loss 4.608248\n", "Epoch 244, Training loss 15.176782, Validation loss 4.611588\n", "Epoch 245, Training loss 15.124177, Validation loss 4.614919\n", "Epoch 246, Training loss 15.071789, Validation loss 4.618253\n", "Epoch 247, Training loss 15.019617, Validation loss 4.621572\n", "Epoch 248, Training loss 14.967663, Validation loss 4.624901\n", "Epoch 249, Training loss 14.915920, Validation loss 4.628211\n", "Epoch 250, Training loss 14.864389, Validation loss 4.631533\n", "Epoch 251, Training loss 14.813080, Validation loss 4.634841\n", "Epoch 252, Training loss 14.761969, Validation loss 4.638146\n", "Epoch 253, Training loss 14.711080, Validation loss 4.641446\n", "Epoch 254, Training loss 14.660397, Validation loss 4.644746\n", "Epoch 255, Training loss 14.609923, Validation loss 4.648034\n", "Epoch 256, Training loss 14.559658, Validation loss 4.651325\n", "Epoch 257, Training loss 14.509600, Validation loss 4.654615\n", "Epoch 258, Training loss 14.459748, Validation loss 4.657893\n", "Epoch 259, Training loss 14.410101, Validation loss 4.661171\n", "Epoch 260, Training loss 14.360664, Validation loss 4.664452\n", "Epoch 261, Training loss 14.311425, Validation loss 4.667716\n", "Epoch 262, Training loss 14.262393, Validation loss 4.670988\n", "Epoch 263, Training loss 14.213560, Validation loss 4.674243\n", "Epoch 264, Training loss 14.164928, Validation loss 4.677498\n", "Epoch 265, Training loss 14.116501, Validation loss 4.680752\n", "Epoch 266, Training loss 14.068273, Validation loss 4.684005\n", "Epoch 267, Training loss 14.020242, Validation loss 4.687250\n", "Epoch 268, Training loss 13.972411, Validation loss 4.690486\n", "Epoch 269, Training loss 13.924776, Validation loss 4.693730\n", "Epoch 270, Training loss 13.877338, Validation loss 4.696960\n", "Epoch 271, Training loss 13.830095, Validation loss 4.700194\n", "Epoch 272, Training loss 13.783049, Validation loss 4.703424\n", "Epoch 273, Training loss 13.736194, Validation loss 4.706636\n", "Epoch 274, Training loss 13.689532, Validation loss 4.709856\n", "Epoch 275, Training loss 13.643069, Validation loss 4.713071\n", "Epoch 276, Training loss 13.596793, Validation loss 4.716273\n", "Epoch 277, Training loss 13.550707, Validation loss 4.719486\n", "Epoch 278, Training loss 13.504810, Validation loss 4.722683\n", "Epoch 279, Training loss 13.459108, Validation loss 4.725878\n", "Epoch 280, Training loss 13.413589, Validation loss 4.729076\n", "Epoch 281, Training loss 13.368262, Validation loss 4.732258\n", "Epoch 282, Training loss 13.323121, Validation loss 4.735443\n", "Epoch 283, Training loss 13.278165, Validation loss 4.738631\n", "Epoch 284, Training loss 13.233394, Validation loss 4.741810\n", "Epoch 285, Training loss 13.188809, Validation loss 4.744980\n", "Epoch 286, Training loss 13.144408, Validation loss 4.748145\n", "Epoch 287, Training loss 13.100189, Validation loss 4.751317\n", "Epoch 288, Training loss 13.056154, Validation loss 4.754472\n", "Epoch 289, Training loss 13.012301, Validation loss 4.757630\n", "Epoch 290, Training loss 12.968628, Validation loss 4.760779\n", "Epoch 291, Training loss 12.925137, Validation loss 4.763927\n", "Epoch 292, Training loss 12.881824, Validation loss 4.767073\n", "Epoch 293, Training loss 12.838688, Validation loss 4.770211\n", "Epoch 294, Training loss 12.795732, Validation loss 4.773343\n", "Epoch 295, Training loss 12.752950, Validation loss 4.776483\n", "Epoch 296, Training loss 12.710351, Validation loss 4.779605\n", "Epoch 297, Training loss 12.667925, Validation loss 4.782734\n", "Epoch 298, Training loss 12.625670, Validation loss 4.785854\n", "Epoch 299, Training loss 12.583593, Validation loss 4.788969\n", "Epoch 300, Training loss 12.541691, Validation loss 4.792083\n", "Epoch 301, Training loss 12.499959, Validation loss 4.795182\n", "Epoch 302, Training loss 12.458400, Validation loss 4.798286\n", "Epoch 303, Training loss 12.417012, Validation loss 4.801388\n", "Epoch 304, Training loss 12.375796, Validation loss 4.804484\n", "Epoch 305, Training loss 12.334751, Validation loss 4.807576\n", "Epoch 306, Training loss 12.293875, Validation loss 4.810662\n", "Epoch 307, Training loss 12.253164, Validation loss 4.813746\n", "Epoch 308, Training loss 12.212625, Validation loss 4.816822\n", "Epoch 309, Training loss 12.172250, Validation loss 4.819900\n", "Epoch 310, Training loss 12.132042, Validation loss 4.822964\n", "Epoch 311, Training loss 12.092005, Validation loss 4.826035\n", "Epoch 312, Training loss 12.052127, Validation loss 4.829097\n", "Epoch 313, Training loss 12.012418, Validation loss 4.832158\n", "Epoch 314, Training loss 11.972871, Validation loss 4.835209\n", "Epoch 315, Training loss 11.933488, Validation loss 4.838263\n", "Epoch 316, Training loss 11.894266, Validation loss 4.841311\n", "Epoch 317, Training loss 11.855207, Validation loss 4.844349\n", "Epoch 318, Training loss 11.816307, Validation loss 4.847386\n", "Epoch 319, Training loss 11.777569, Validation loss 4.850418\n", "Epoch 320, Training loss 11.738993, Validation loss 4.853448\n", "Epoch 321, Training loss 11.700570, Validation loss 4.856473\n", "Epoch 322, Training loss 11.662312, Validation loss 4.859493\n", "Epoch 323, Training loss 11.624208, Validation loss 4.862514\n", "Epoch 324, Training loss 11.586267, Validation loss 4.865526\n", "Epoch 325, Training loss 11.548478, Validation loss 4.868533\n", "Epoch 326, Training loss 11.510845, Validation loss 4.871542\n", "Epoch 327, Training loss 11.473366, Validation loss 4.874541\n", "Epoch 328, Training loss 11.436043, Validation loss 4.877539\n", "Epoch 329, Training loss 11.398874, Validation loss 4.880527\n", "Epoch 330, Training loss 11.361857, Validation loss 4.883517\n", "Epoch 331, Training loss 11.324996, Validation loss 4.886502\n", "Epoch 332, Training loss 11.288284, Validation loss 4.889485\n", "Epoch 333, Training loss 11.251725, Validation loss 4.892455\n", "Epoch 334, Training loss 11.215320, Validation loss 4.895431\n", "Epoch 335, Training loss 11.179061, Validation loss 4.898400\n", "Epoch 336, Training loss 11.142952, Validation loss 4.901361\n", "Epoch 337, Training loss 11.106990, Validation loss 4.904324\n", "Epoch 338, Training loss 11.071181, Validation loss 4.907276\n", "Epoch 339, Training loss 11.035520, Validation loss 4.910227\n", "Epoch 340, Training loss 11.000005, Validation loss 4.913177\n", "Epoch 341, Training loss 10.964631, Validation loss 4.916121\n", "Epoch 342, Training loss 10.929406, Validation loss 4.919054\n", "Epoch 343, Training loss 10.894330, Validation loss 4.921985\n", "Epoch 344, Training loss 10.859396, Validation loss 4.924920\n", "Epoch 345, Training loss 10.824607, Validation loss 4.927848\n", "Epoch 346, Training loss 10.789958, Validation loss 4.930775\n", "Epoch 347, Training loss 10.755459, Validation loss 4.933695\n", "Epoch 348, Training loss 10.721099, Validation loss 4.936605\n", "Epoch 349, Training loss 10.686882, Validation loss 4.939523\n", "Epoch 350, Training loss 10.652801, Validation loss 4.942421\n", "Epoch 351, Training loss 10.618864, Validation loss 4.945318\n", "Epoch 352, Training loss 10.585071, Validation loss 4.948221\n", "Epoch 353, Training loss 10.551414, Validation loss 4.951110\n", "Epoch 354, Training loss 10.517891, Validation loss 4.954002\n", "Epoch 355, Training loss 10.484511, Validation loss 4.956887\n", "Epoch 356, Training loss 10.451269, Validation loss 4.959770\n", "Epoch 357, Training loss 10.418165, Validation loss 4.962656\n", "Epoch 358, Training loss 10.385192, Validation loss 4.965527\n", "Epoch 359, Training loss 10.352364, Validation loss 4.968397\n", "Epoch 360, Training loss 10.319665, Validation loss 4.971268\n", "Epoch 361, Training loss 10.287107, Validation loss 4.974121\n", "Epoch 362, Training loss 10.254677, Validation loss 4.976980\n", "Epoch 363, Training loss 10.222383, Validation loss 4.979834\n", "Epoch 364, Training loss 10.190223, Validation loss 4.982680\n", "Epoch 365, Training loss 10.158197, Validation loss 4.985526\n", "Epoch 366, Training loss 10.126304, Validation loss 4.988364\n", "Epoch 367, Training loss 10.094538, Validation loss 4.991201\n", "Epoch 368, Training loss 10.062905, Validation loss 4.994036\n", "Epoch 369, Training loss 10.031401, Validation loss 4.996861\n", "Epoch 370, Training loss 10.000033, Validation loss 4.999683\n", "Epoch 371, Training loss 9.968790, Validation loss 5.002508\n", "Epoch 372, Training loss 9.937674, Validation loss 5.005322\n", "Epoch 373, Training loss 9.906690, Validation loss 5.008138\n", "Epoch 374, Training loss 9.875832, Validation loss 5.010944\n", "Epoch 375, Training loss 9.845098, Validation loss 5.013747\n", "Epoch 376, Training loss 9.814497, Validation loss 5.016549\n", "Epoch 377, Training loss 9.784017, Validation loss 5.019344\n", "Epoch 378, Training loss 9.753668, Validation loss 5.022133\n", "Epoch 379, Training loss 9.723441, Validation loss 5.024915\n", "Epoch 380, Training loss 9.693341, Validation loss 5.027704\n", "Epoch 381, Training loss 9.663362, Validation loss 5.030487\n", "Epoch 382, Training loss 9.633506, Validation loss 5.033254\n", "Epoch 383, Training loss 9.603782, Validation loss 5.036028\n", "Epoch 384, Training loss 9.574169, Validation loss 5.038805\n", "Epoch 385, Training loss 9.544684, Validation loss 5.041565\n", "Epoch 386, Training loss 9.515324, Validation loss 5.044324\n", "Epoch 387, Training loss 9.486080, Validation loss 5.047081\n", "Epoch 388, Training loss 9.456957, Validation loss 5.049831\n", "Epoch 389, Training loss 9.427958, Validation loss 5.052575\n", "Epoch 390, Training loss 9.399075, Validation loss 5.055316\n", "Epoch 391, Training loss 9.370308, Validation loss 5.058052\n", "Epoch 392, Training loss 9.341667, Validation loss 5.060793\n", "Epoch 393, Training loss 9.313140, Validation loss 5.063515\n", "Epoch 394, Training loss 9.284731, Validation loss 5.066243\n", "Epoch 395, Training loss 9.256436, Validation loss 5.068969\n", "Epoch 396, Training loss 9.228263, Validation loss 5.071688\n", "Epoch 397, Training loss 9.200209, Validation loss 5.074397\n", "Epoch 398, Training loss 9.172264, Validation loss 5.077116\n", "Epoch 399, Training loss 9.144436, Validation loss 5.079820\n", "Epoch 400, Training loss 9.116722, Validation loss 5.082521\n", "Epoch 401, Training loss 9.089123, Validation loss 5.085225\n", "Epoch 402, Training loss 9.061640, Validation loss 5.087922\n", "Epoch 403, Training loss 9.034270, Validation loss 5.090603\n", "Epoch 404, Training loss 9.007010, Validation loss 5.093296\n", "Epoch 405, Training loss 8.979866, Validation loss 5.095977\n", "Epoch 406, Training loss 8.952835, Validation loss 5.098660\n", "Epoch 407, Training loss 8.925912, Validation loss 5.101333\n", "Epoch 408, Training loss 8.899101, Validation loss 5.103989\n", "Epoch 409, Training loss 8.872400, Validation loss 5.106656\n", "Epoch 410, Training loss 8.845813, Validation loss 5.109321\n", "Epoch 411, Training loss 8.819334, Validation loss 5.111984\n", "Epoch 412, Training loss 8.792963, Validation loss 5.114639\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 413, Training loss 8.766699, Validation loss 5.117292\n", "Epoch 414, Training loss 8.740545, Validation loss 5.119931\n", "Epoch 415, Training loss 8.714501, Validation loss 5.122575\n", "Epoch 416, Training loss 8.688559, Validation loss 5.125207\n", "Epoch 417, Training loss 8.662731, Validation loss 5.127847\n", "Epoch 418, Training loss 8.637010, Validation loss 5.130475\n", "Epoch 419, Training loss 8.611390, Validation loss 5.133104\n", "Epoch 420, Training loss 8.585872, Validation loss 5.135727\n", "Epoch 421, Training loss 8.560465, Validation loss 5.138343\n", "Epoch 422, Training loss 8.535163, Validation loss 5.140957\n", "Epoch 423, Training loss 8.509964, Validation loss 5.143568\n", "Epoch 424, Training loss 8.484869, Validation loss 5.146181\n", "Epoch 425, Training loss 8.459875, Validation loss 5.148782\n", "Epoch 426, Training loss 8.434993, Validation loss 5.151372\n", "Epoch 427, Training loss 8.410205, Validation loss 5.153973\n", "Epoch 428, Training loss 8.385523, Validation loss 5.156567\n", "Epoch 429, Training loss 8.360942, Validation loss 5.159145\n", "Epoch 430, Training loss 8.336462, Validation loss 5.161729\n", "Epoch 431, Training loss 8.312087, Validation loss 5.164307\n", "Epoch 432, Training loss 8.287806, Validation loss 5.166881\n", "Epoch 433, Training loss 8.263630, Validation loss 5.169449\n", "Epoch 434, Training loss 8.239552, Validation loss 5.172010\n", "Epoch 435, Training loss 8.215573, Validation loss 5.174577\n", "Epoch 436, Training loss 8.191694, Validation loss 5.177136\n", "Epoch 437, Training loss 8.167912, Validation loss 5.179685\n", "Epoch 438, Training loss 8.144226, Validation loss 5.182248\n", "Epoch 439, Training loss 8.120645, Validation loss 5.184783\n", "Epoch 440, Training loss 8.097156, Validation loss 5.187327\n", "Epoch 441, Training loss 8.073765, Validation loss 5.189874\n", "Epoch 442, Training loss 8.050469, Validation loss 5.192409\n", "Epoch 443, Training loss 8.027272, Validation loss 5.194941\n", "Epoch 444, Training loss 8.004170, Validation loss 5.197457\n", "Epoch 445, Training loss 7.981165, Validation loss 5.199984\n", "Epoch 446, Training loss 7.958248, Validation loss 5.202504\n", "Epoch 447, Training loss 7.935431, Validation loss 5.205020\n", "Epoch 448, Training loss 7.912710, Validation loss 5.207522\n", "Epoch 449, Training loss 7.890079, Validation loss 5.210033\n", "Epoch 450, Training loss 7.867545, Validation loss 5.212546\n", "Epoch 451, Training loss 7.845102, Validation loss 5.215043\n", "Epoch 452, Training loss 7.822749, Validation loss 5.217542\n", "Epoch 453, Training loss 7.800492, Validation loss 5.220025\n", "Epoch 454, Training loss 7.778326, Validation loss 5.222509\n", "Epoch 455, Training loss 7.756249, Validation loss 5.224999\n", "Epoch 456, Training loss 7.734267, Validation loss 5.227483\n", "Epoch 457, Training loss 7.712372, Validation loss 5.229954\n", "Epoch 458, Training loss 7.690569, Validation loss 5.232423\n", "Epoch 459, Training loss 7.668855, Validation loss 5.234898\n", "Epoch 460, Training loss 7.647232, Validation loss 5.237365\n", "Epoch 461, Training loss 7.625700, Validation loss 5.239820\n", "Epoch 462, Training loss 7.604254, Validation loss 5.242273\n", "Epoch 463, Training loss 7.582898, Validation loss 5.244728\n", "Epoch 464, Training loss 7.561628, Validation loss 5.247171\n", "Epoch 465, Training loss 7.540445, Validation loss 5.249619\n", "Epoch 466, Training loss 7.519354, Validation loss 5.252060\n", "Epoch 467, Training loss 7.498347, Validation loss 5.254499\n", "Epoch 468, Training loss 7.477426, Validation loss 5.256935\n", "Epoch 469, Training loss 7.456592, Validation loss 5.259367\n", "Epoch 470, Training loss 7.435845, Validation loss 5.261796\n", "Epoch 471, Training loss 7.415186, Validation loss 5.264215\n", "Epoch 472, Training loss 7.394606, Validation loss 5.266634\n", "Epoch 473, Training loss 7.374115, Validation loss 5.269046\n", "Epoch 474, Training loss 7.353711, Validation loss 5.271455\n", "Epoch 475, Training loss 7.333385, Validation loss 5.273870\n", "Epoch 476, Training loss 7.313148, Validation loss 5.276264\n", "Epoch 477, Training loss 7.292990, Validation loss 5.278664\n", "Epoch 478, Training loss 7.272920, Validation loss 5.281062\n", "Epoch 479, Training loss 7.252926, Validation loss 5.283452\n", "Epoch 480, Training loss 7.233022, Validation loss 5.285833\n", "Epoch 481, Training loss 7.213194, Validation loss 5.288217\n", "Epoch 482, Training loss 7.193453, Validation loss 5.290598\n", "Epoch 483, Training loss 7.173790, Validation loss 5.292967\n", "Epoch 484, Training loss 7.154210, Validation loss 5.295341\n", "Epoch 485, Training loss 7.134710, Validation loss 5.297709\n", "Epoch 486, Training loss 7.115292, Validation loss 5.300068\n", "Epoch 487, Training loss 7.095952, Validation loss 5.302434\n", "Epoch 488, Training loss 7.076693, Validation loss 5.304796\n", "Epoch 489, Training loss 7.057513, Validation loss 5.307151\n", "Epoch 490, Training loss 7.038410, Validation loss 5.309494\n", "Epoch 491, Training loss 7.019389, Validation loss 5.311852\n", "Epoch 492, Training loss 7.000444, Validation loss 5.314185\n", "Epoch 493, Training loss 6.981581, Validation loss 5.316532\n", "Epoch 494, Training loss 6.962795, Validation loss 5.318862\n", "Epoch 495, Training loss 6.944085, Validation loss 5.321194\n", "Epoch 496, Training loss 6.925449, Validation loss 5.323510\n", "Epoch 497, Training loss 6.906892, Validation loss 5.325827\n", "Epoch 498, Training loss 6.888413, Validation loss 5.328149\n", "Epoch 499, Training loss 6.870011, Validation loss 5.330469\n", "Epoch 500, Training loss 6.851685, Validation loss 5.332776\n", "Epoch 501, Training loss 6.833432, Validation loss 5.335081\n", "Epoch 502, Training loss 6.815256, Validation loss 5.337395\n", "Epoch 503, Training loss 6.797155, Validation loss 5.339688\n", "Epoch 504, Training loss 6.779128, Validation loss 5.341983\n", "Epoch 505, Training loss 6.761177, Validation loss 5.344279\n", "Epoch 506, Training loss 6.743299, Validation loss 5.346572\n", "Epoch 507, Training loss 6.725492, Validation loss 5.348853\n", "Epoch 508, Training loss 6.707763, Validation loss 5.351140\n", "Epoch 509, Training loss 6.690105, Validation loss 5.353414\n", "Epoch 510, Training loss 6.672522, Validation loss 5.355685\n", "Epoch 511, Training loss 6.655009, Validation loss 5.357954\n", "Epoch 512, Training loss 6.637571, Validation loss 5.360222\n", "Epoch 513, Training loss 6.620202, Validation loss 5.362480\n", "Epoch 514, Training loss 6.602905, Validation loss 5.364738\n", "Epoch 515, Training loss 6.585679, Validation loss 5.367006\n", "Epoch 516, Training loss 6.568526, Validation loss 5.369254\n", "Epoch 517, Training loss 6.551443, Validation loss 5.371498\n", "Epoch 518, Training loss 6.534430, Validation loss 5.373739\n", "Epoch 519, Training loss 6.517488, Validation loss 5.375989\n", "Epoch 520, Training loss 6.500615, Validation loss 5.378223\n", "Epoch 521, Training loss 6.483809, Validation loss 5.380459\n", "Epoch 522, Training loss 6.467076, Validation loss 5.382696\n", "Epoch 523, Training loss 6.450413, Validation loss 5.384920\n", "Epoch 524, Training loss 6.433819, Validation loss 5.387146\n", "Epoch 525, Training loss 6.417290, Validation loss 5.389359\n", "Epoch 526, Training loss 6.400831, Validation loss 5.391570\n", "Epoch 527, Training loss 6.384441, Validation loss 5.393789\n", "Epoch 528, Training loss 6.368118, Validation loss 5.395993\n", "Epoch 529, Training loss 6.351860, Validation loss 5.398197\n", "Epoch 530, Training loss 6.335673, Validation loss 5.400385\n", "Epoch 531, Training loss 6.319550, Validation loss 5.402583\n", "Epoch 532, Training loss 6.303492, Validation loss 5.404787\n", "Epoch 533, Training loss 6.287504, Validation loss 5.406973\n", "Epoch 534, Training loss 6.271584, Validation loss 5.409157\n", "Epoch 535, Training loss 6.255725, Validation loss 5.411345\n", "Epoch 536, Training loss 6.239933, Validation loss 5.413527\n", "Epoch 537, Training loss 6.224205, Validation loss 5.415695\n", "Epoch 538, Training loss 6.208544, Validation loss 5.417870\n", "Epoch 539, Training loss 6.192947, Validation loss 5.420036\n", "Epoch 540, Training loss 6.177413, Validation loss 5.422204\n", "Epoch 541, Training loss 6.161943, Validation loss 5.424359\n", "Epoch 542, Training loss 6.146539, Validation loss 5.426515\n", "Epoch 543, Training loss 6.131196, Validation loss 5.428663\n", "Epoch 544, Training loss 6.115916, Validation loss 5.430817\n", "Epoch 545, Training loss 6.100704, Validation loss 5.432963\n", "Epoch 546, Training loss 6.085550, Validation loss 5.435106\n", "Epoch 547, Training loss 6.070461, Validation loss 5.437245\n", "Epoch 548, Training loss 6.055429, Validation loss 5.439385\n", "Epoch 549, Training loss 6.040467, Validation loss 5.441517\n", "Epoch 550, Training loss 6.025562, Validation loss 5.443646\n", "Epoch 551, Training loss 6.010718, Validation loss 5.445780\n", "Epoch 552, Training loss 5.995939, Validation loss 5.447893\n", "Epoch 553, Training loss 5.981218, Validation loss 5.450012\n", "Epoch 554, Training loss 5.966560, Validation loss 5.452127\n", "Epoch 555, Training loss 5.951960, Validation loss 5.454238\n", "Epoch 556, Training loss 5.937421, Validation loss 5.456337\n", "Epoch 557, Training loss 5.922943, Validation loss 5.458433\n", "Epoch 558, Training loss 5.908523, Validation loss 5.460534\n", "Epoch 559, Training loss 5.894166, Validation loss 5.462640\n", "Epoch 560, Training loss 5.879864, Validation loss 5.464734\n", "Epoch 561, Training loss 5.865624, Validation loss 5.466820\n", "Epoch 562, Training loss 5.851442, Validation loss 5.468912\n", "Epoch 563, Training loss 5.837315, Validation loss 5.470995\n", "Epoch 564, Training loss 5.823248, Validation loss 5.473062\n", "Epoch 565, Training loss 5.809245, Validation loss 5.475138\n", "Epoch 566, Training loss 5.795294, Validation loss 5.477215\n", "Epoch 567, Training loss 5.781402, Validation loss 5.479289\n", "Epoch 568, Training loss 5.767564, Validation loss 5.481350\n", "Epoch 569, Training loss 5.753785, Validation loss 5.483412\n", "Epoch 570, Training loss 5.740066, Validation loss 5.485471\n", "Epoch 571, Training loss 5.726402, Validation loss 5.487517\n", "Epoch 572, Training loss 5.712795, Validation loss 5.489564\n", "Epoch 573, Training loss 5.699241, Validation loss 5.491621\n", "Epoch 574, Training loss 5.685745, Validation loss 5.493661\n", "Epoch 575, Training loss 5.672303, Validation loss 5.495702\n", "Epoch 576, Training loss 5.658920, Validation loss 5.497744\n", "Epoch 577, Training loss 5.645589, Validation loss 5.499777\n", "Epoch 578, Training loss 5.632315, Validation loss 5.501802\n", "Epoch 579, Training loss 5.619096, Validation loss 5.503833\n", "Epoch 580, Training loss 5.605929, Validation loss 5.505860\n", "Epoch 581, Training loss 5.592819, Validation loss 5.507875\n", "Epoch 582, Training loss 5.579763, Validation loss 5.509886\n", "Epoch 583, Training loss 5.566758, Validation loss 5.511903\n", "Epoch 584, Training loss 5.553809, Validation loss 5.513916\n", "Epoch 585, Training loss 5.540916, Validation loss 5.515915\n", "Epoch 586, Training loss 5.528072, Validation loss 5.517921\n", "Epoch 587, Training loss 5.515281, Validation loss 5.519914\n", "Epoch 588, Training loss 5.502544, Validation loss 5.521917\n", "Epoch 589, Training loss 5.489861, Validation loss 5.523907\n", "Epoch 590, Training loss 5.477228, Validation loss 5.525889\n", "Epoch 591, Training loss 5.464646, Validation loss 5.527872\n", "Epoch 592, Training loss 5.452117, Validation loss 5.529850\n", "Epoch 593, Training loss 5.439641, Validation loss 5.531831\n", "Epoch 594, Training loss 5.427217, Validation loss 5.533811\n", "Epoch 595, Training loss 5.414841, Validation loss 5.535784\n", "Epoch 596, Training loss 5.402519, Validation loss 5.537753\n", "Epoch 597, Training loss 5.390252, Validation loss 5.539718\n", "Epoch 598, Training loss 5.378028, Validation loss 5.541679\n", "Epoch 599, Training loss 5.365860, Validation loss 5.543637\n", "Epoch 600, Training loss 5.353737, Validation loss 5.545596\n", "Epoch 601, Training loss 5.341667, Validation loss 5.547555\n", "Epoch 602, Training loss 5.329649, Validation loss 5.549498\n", "Epoch 603, Training loss 5.317675, Validation loss 5.551445\n", "Epoch 604, Training loss 5.305756, Validation loss 5.553384\n", "Epoch 605, Training loss 5.293881, Validation loss 5.555320\n", "Epoch 606, Training loss 5.282058, Validation loss 5.557256\n", "Epoch 607, Training loss 5.270283, Validation loss 5.559189\n", "Epoch 608, Training loss 5.258559, Validation loss 5.561118\n", "Epoch 609, Training loss 5.246881, Validation loss 5.563043\n", "Epoch 610, Training loss 5.235252, Validation loss 5.564969\n", "Epoch 611, Training loss 5.223671, Validation loss 5.566877\n", "Epoch 612, Training loss 5.212136, Validation loss 5.568800\n", "Epoch 613, Training loss 5.200652, Validation loss 5.570705\n", "Epoch 614, Training loss 5.189213, Validation loss 5.572612\n", "Epoch 615, Training loss 5.177821, Validation loss 5.574514\n", "Epoch 616, Training loss 5.166477, Validation loss 5.576413\n", "Epoch 617, Training loss 5.155180, Validation loss 5.578317\n", "Epoch 618, Training loss 5.143928, Validation loss 5.580213\n", "Epoch 619, Training loss 5.132724, Validation loss 5.582109\n", "Epoch 620, Training loss 5.121565, Validation loss 5.583998\n", "Epoch 621, Training loss 5.110452, Validation loss 5.585877\n", "Epoch 622, Training loss 5.099387, Validation loss 5.587763\n", "Epoch 623, Training loss 5.088368, Validation loss 5.589635\n", "Epoch 624, Training loss 5.077391, Validation loss 5.591513\n", "Epoch 625, Training loss 5.066462, Validation loss 5.593390\n", "Epoch 626, Training loss 5.055577, Validation loss 5.595261\n", "Epoch 627, Training loss 5.044738, Validation loss 5.597126\n", "Epoch 628, Training loss 5.033944, Validation loss 5.598989\n", "Epoch 629, Training loss 5.023192, Validation loss 5.600852\n", "Epoch 630, Training loss 5.012486, Validation loss 5.602706\n", "Epoch 631, Training loss 5.001825, Validation loss 5.604553\n", "Epoch 632, Training loss 4.991204, Validation loss 5.606409\n", "Epoch 633, Training loss 4.980632, Validation loss 5.608247\n", "Epoch 634, Training loss 4.970101, Validation loss 5.610096\n", "Epoch 635, Training loss 4.959613, Validation loss 5.611935\n", "Epoch 636, Training loss 4.949170, Validation loss 5.613767\n", "Epoch 637, Training loss 4.938768, Validation loss 5.615594\n", "Epoch 638, Training loss 4.928410, Validation loss 5.617427\n", "Epoch 639, Training loss 4.918094, Validation loss 5.619256\n", "Epoch 640, Training loss 4.907824, Validation loss 5.621081\n", "Epoch 641, Training loss 4.897592, Validation loss 5.622902\n", "Epoch 642, Training loss 4.887405, Validation loss 5.624724\n", "Epoch 643, Training loss 4.877258, Validation loss 5.626534\n", "Epoch 644, Training loss 4.867155, Validation loss 5.628338\n", "Epoch 645, Training loss 4.857093, Validation loss 5.630144\n", "Epoch 646, Training loss 4.847071, Validation loss 5.631950\n", "Epoch 647, Training loss 4.837092, Validation loss 5.633758\n", "Epoch 648, Training loss 4.827154, Validation loss 5.635547\n", "Epoch 649, Training loss 4.817256, Validation loss 5.637351\n", "Epoch 650, Training loss 4.807400, Validation loss 5.639146\n", "Epoch 651, Training loss 4.797583, Validation loss 5.640928\n", "Epoch 652, Training loss 4.787809, Validation loss 5.642720\n", "Epoch 653, Training loss 4.778075, Validation loss 5.644495\n", "Epoch 654, Training loss 4.768380, Validation loss 5.646275\n", "Epoch 655, Training loss 4.758725, Validation loss 5.648059\n", "Epoch 656, Training loss 4.749108, Validation loss 5.649831\n", "Epoch 657, Training loss 4.739534, Validation loss 5.651594\n", "Epoch 658, Training loss 4.729997, Validation loss 5.653358\n", "Epoch 659, Training loss 4.720500, Validation loss 5.655132\n", "Epoch 660, Training loss 4.711045, Validation loss 5.656892\n", "Epoch 661, Training loss 4.701624, Validation loss 5.658649\n", "Epoch 662, Training loss 4.692247, Validation loss 5.660397\n", "Epoch 663, Training loss 4.682905, Validation loss 5.662150\n", "Epoch 664, Training loss 4.673604, Validation loss 5.663900\n", "Epoch 665, Training loss 4.664340, Validation loss 5.665640\n", "Epoch 666, Training loss 4.655114, Validation loss 5.667377\n", "Epoch 667, Training loss 4.645925, Validation loss 5.669119\n", "Epoch 668, Training loss 4.636778, Validation loss 5.670862\n", "Epoch 669, Training loss 4.627664, Validation loss 5.672591\n", "Epoch 670, Training loss 4.618589, Validation loss 5.674321\n", "Epoch 671, Training loss 4.609554, Validation loss 5.676052\n", "Epoch 672, Training loss 4.600554, Validation loss 5.677779\n", "Epoch 673, Training loss 4.591593, Validation loss 5.679497\n", "Epoch 674, Training loss 4.582667, Validation loss 5.681211\n", "Epoch 675, Training loss 4.573779, Validation loss 5.682926\n", "Epoch 676, Training loss 4.564926, Validation loss 5.684636\n", "Epoch 677, Training loss 4.556112, Validation loss 5.686347\n", "Epoch 678, Training loss 4.547331, Validation loss 5.688064\n", "Epoch 679, Training loss 4.538589, Validation loss 5.689758\n", "Epoch 680, Training loss 4.529881, Validation loss 5.691453\n", "Epoch 681, Training loss 4.521211, Validation loss 5.693143\n", "Epoch 682, Training loss 4.512575, Validation loss 5.694834\n", "Epoch 683, Training loss 4.503976, Validation loss 5.696535\n", "Epoch 684, Training loss 4.495410, Validation loss 5.698227\n", "Epoch 685, Training loss 4.486886, Validation loss 5.699911\n", "Epoch 686, Training loss 4.478390, Validation loss 5.701591\n", "Epoch 687, Training loss 4.469933, Validation loss 5.703270\n", "Epoch 688, Training loss 4.461511, Validation loss 5.704947\n", "Epoch 689, Training loss 4.453123, Validation loss 5.706624\n", "Epoch 690, Training loss 4.444768, Validation loss 5.708292\n", "Epoch 691, Training loss 4.436448, Validation loss 5.709960\n", "Epoch 692, Training loss 4.428163, Validation loss 5.711620\n", "Epoch 693, Training loss 4.419913, Validation loss 5.713276\n", "Epoch 694, Training loss 4.411696, Validation loss 5.714928\n", "Epoch 695, Training loss 4.403512, Validation loss 5.716575\n", "Epoch 696, Training loss 4.395361, Validation loss 5.718237\n", "Epoch 697, Training loss 4.387247, Validation loss 5.719886\n", "Epoch 698, Training loss 4.379166, Validation loss 5.721535\n", "Epoch 699, Training loss 4.371116, Validation loss 5.723180\n", "Epoch 700, Training loss 4.363101, Validation loss 5.724817\n", "Epoch 701, Training loss 4.355119, Validation loss 5.726449\n", "Epoch 702, Training loss 4.347169, Validation loss 5.728073\n", "Epoch 703, Training loss 4.339253, Validation loss 5.729712\n", "Epoch 704, Training loss 4.331369, Validation loss 5.731336\n", "Epoch 705, Training loss 4.323516, Validation loss 5.732971\n", "Epoch 706, Training loss 4.315700, Validation loss 5.734582\n", "Epoch 707, Training loss 4.307911, Validation loss 5.736209\n", "Epoch 708, Training loss 4.300158, Validation loss 5.737822\n", "Epoch 709, Training loss 4.292433, Validation loss 5.739426\n", "Epoch 710, Training loss 4.284744, Validation loss 5.741040\n", "Epoch 711, Training loss 4.277084, Validation loss 5.742645\n", "Epoch 712, Training loss 4.269459, Validation loss 5.744256\n", "Epoch 713, Training loss 4.261860, Validation loss 5.745862\n", "Epoch 714, Training loss 4.254297, Validation loss 5.747459\n", "Epoch 715, Training loss 4.246763, Validation loss 5.749062\n", "Epoch 716, Training loss 4.239261, Validation loss 5.750651\n", "Epoch 717, Training loss 4.231790, Validation loss 5.752236\n", "Epoch 718, Training loss 4.224349, Validation loss 5.753831\n", "Epoch 719, Training loss 4.216938, Validation loss 5.755408\n", "Epoch 720, Training loss 4.209559, Validation loss 5.756989\n", "Epoch 721, Training loss 4.202210, Validation loss 5.758567\n", "Epoch 722, Training loss 4.194891, Validation loss 5.760146\n", "Epoch 723, Training loss 4.187603, Validation loss 5.761715\n", "Epoch 724, Training loss 4.180344, Validation loss 5.763289\n", "Epoch 725, Training loss 4.173115, Validation loss 5.764851\n", "Epoch 726, Training loss 4.165918, Validation loss 5.766422\n", "Epoch 727, Training loss 4.158749, Validation loss 5.767979\n", "Epoch 728, Training loss 4.151611, Validation loss 5.769541\n", "Epoch 729, Training loss 4.144500, Validation loss 5.771100\n", "Epoch 730, Training loss 4.137421, Validation loss 5.772654\n", "Epoch 731, Training loss 4.130368, Validation loss 5.774209\n", "Epoch 732, Training loss 4.123347, Validation loss 5.775750\n", "Epoch 733, Training loss 4.116353, Validation loss 5.777301\n", "Epoch 734, Training loss 4.109390, Validation loss 5.778833\n", "Epoch 735, Training loss 4.102454, Validation loss 5.780372\n", "Epoch 736, Training loss 4.095549, Validation loss 5.781910\n", "Epoch 737, Training loss 4.088669, Validation loss 5.783449\n", "Epoch 738, Training loss 4.081820, Validation loss 5.784978\n", "Epoch 739, Training loss 4.074998, Validation loss 5.786504\n", "Epoch 740, Training loss 4.068203, Validation loss 5.788035\n", "Epoch 741, Training loss 4.061439, Validation loss 5.789548\n", "Epoch 742, Training loss 4.054701, Validation loss 5.791066\n", "Epoch 743, Training loss 4.047991, Validation loss 5.792580\n", "Epoch 744, Training loss 4.041308, Validation loss 5.794098\n", "Epoch 745, Training loss 4.034653, Validation loss 5.795612\n", "Epoch 746, Training loss 4.028027, Validation loss 5.797118\n", "Epoch 747, Training loss 4.021428, Validation loss 5.798620\n", "Epoch 748, Training loss 4.014851, Validation loss 5.800126\n", "Epoch 749, Training loss 4.008305, Validation loss 5.801628\n", "Epoch 750, Training loss 4.001789, Validation loss 5.803131\n", "Epoch 751, Training loss 3.995298, Validation loss 5.804620\n", "Epoch 752, Training loss 3.988833, Validation loss 5.806114\n", "Epoch 753, Training loss 3.982395, Validation loss 5.807618\n", "Epoch 754, Training loss 3.975984, Validation loss 5.809099\n", "Epoch 755, Training loss 3.969597, Validation loss 5.810585\n", "Epoch 756, Training loss 3.963239, Validation loss 5.812067\n", "Epoch 757, Training loss 3.956907, Validation loss 5.813544\n", "Epoch 758, Training loss 3.950602, Validation loss 5.815027\n", "Epoch 759, Training loss 3.944323, Validation loss 5.816487\n", "Epoch 760, Training loss 3.938069, Validation loss 5.817961\n", "Epoch 761, Training loss 3.931841, Validation loss 5.819427\n", "Epoch 762, Training loss 3.925635, Validation loss 5.820892\n", "Epoch 763, Training loss 3.919457, Validation loss 5.822353\n", "Epoch 764, Training loss 3.913306, Validation loss 5.823815\n", "Epoch 765, Training loss 3.907178, Validation loss 5.825277\n", "Epoch 766, Training loss 3.901077, Validation loss 5.826717\n", "Epoch 767, Training loss 3.895002, Validation loss 5.828166\n", "Epoch 768, Training loss 3.888950, Validation loss 5.829629\n", "Epoch 769, Training loss 3.882925, Validation loss 5.831069\n", "Epoch 770, Training loss 3.876925, Validation loss 5.832510\n", "Epoch 771, Training loss 3.870948, Validation loss 5.833961\n", "Epoch 772, Training loss 3.864998, Validation loss 5.835407\n", "Epoch 773, Training loss 3.859068, Validation loss 5.836831\n", "Epoch 774, Training loss 3.853167, Validation loss 5.838268\n", "Epoch 775, Training loss 3.847287, Validation loss 5.839697\n", "Epoch 776, Training loss 3.841435, Validation loss 5.841117\n", "Epoch 777, Training loss 3.835605, Validation loss 5.842551\n", "Epoch 778, Training loss 3.829800, Validation loss 5.843971\n", "Epoch 779, Training loss 3.824017, Validation loss 5.845392\n", "Epoch 780, Training loss 3.818259, Validation loss 5.846809\n", "Epoch 781, Training loss 3.812526, Validation loss 5.848226\n", "Epoch 782, Training loss 3.806815, Validation loss 5.849643\n", "Epoch 783, Training loss 3.801127, Validation loss 5.851042\n", "Epoch 784, Training loss 3.795463, Validation loss 5.852451\n", "Epoch 785, Training loss 3.789824, Validation loss 5.853851\n", "Epoch 786, Training loss 3.784206, Validation loss 5.855260\n", "Epoch 787, Training loss 3.778612, Validation loss 5.856656\n", "Epoch 788, Training loss 3.773042, Validation loss 5.858052\n", "Epoch 789, Training loss 3.767497, Validation loss 5.859448\n", "Epoch 790, Training loss 3.761971, Validation loss 5.860845\n", "Epoch 791, Training loss 3.756467, Validation loss 5.862233\n", "Epoch 792, Training loss 3.750989, Validation loss 5.863626\n", "Epoch 793, Training loss 3.745533, Validation loss 5.865009\n", "Epoch 794, Training loss 3.740098, Validation loss 5.866384\n", "Epoch 795, Training loss 3.734687, Validation loss 5.867764\n", "Epoch 796, Training loss 3.729299, Validation loss 5.869144\n", "Epoch 797, Training loss 3.723931, Validation loss 5.870524\n", "Epoch 798, Training loss 3.718587, Validation loss 5.871896\n", "Epoch 799, Training loss 3.713263, Validation loss 5.873254\n", "Epoch 800, Training loss 3.707960, Validation loss 5.874621\n", "Epoch 801, Training loss 3.702683, Validation loss 5.875980\n", "Epoch 802, Training loss 3.697426, Validation loss 5.877343\n", "Epoch 803, Training loss 3.692189, Validation loss 5.878707\n", "Epoch 804, Training loss 3.686974, Validation loss 5.880066\n", "Epoch 805, Training loss 3.681782, Validation loss 5.881413\n", "Epoch 806, Training loss 3.676612, Validation loss 5.882763\n", "Epoch 807, Training loss 3.671463, Validation loss 5.884109\n", "Epoch 808, Training loss 3.666331, Validation loss 5.885456\n", "Epoch 809, Training loss 3.661227, Validation loss 5.886803\n", "Epoch 810, Training loss 3.656140, Validation loss 5.888145\n", "Epoch 811, Training loss 3.651076, Validation loss 5.889489\n", "Epoch 812, Training loss 3.646030, Validation loss 5.890818\n", "Epoch 813, Training loss 3.641006, Validation loss 5.892157\n", "Epoch 814, Training loss 3.636003, Validation loss 5.893482\n", "Epoch 815, Training loss 3.631024, Validation loss 5.894807\n", "Epoch 816, Training loss 3.626059, Validation loss 5.896138\n", "Epoch 817, Training loss 3.621119, Validation loss 5.897459\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 818, Training loss 3.616199, Validation loss 5.898781\n", "Epoch 819, Training loss 3.611297, Validation loss 5.900107\n", "Epoch 820, Training loss 3.606420, Validation loss 5.901421\n", "Epoch 821, Training loss 3.601559, Validation loss 5.902729\n", "Epoch 822, Training loss 3.596720, Validation loss 5.904038\n", "Epoch 823, Training loss 3.591898, Validation loss 5.905356\n", "Epoch 824, Training loss 3.587098, Validation loss 5.906656\n", "Epoch 825, Training loss 3.582319, Validation loss 5.907966\n", "Epoch 826, Training loss 3.577557, Validation loss 5.909276\n", "Epoch 827, Training loss 3.572814, Validation loss 5.910568\n", "Epoch 828, Training loss 3.568095, Validation loss 5.911860\n", "Epoch 829, Training loss 3.563392, Validation loss 5.913156\n", "Epoch 830, Training loss 3.558712, Validation loss 5.914449\n", "Epoch 831, Training loss 3.554048, Validation loss 5.915741\n", "Epoch 832, Training loss 3.549402, Validation loss 5.917025\n", "Epoch 833, Training loss 3.544779, Validation loss 5.918304\n", "Epoch 834, Training loss 3.540175, Validation loss 5.919584\n", "Epoch 835, Training loss 3.535586, Validation loss 5.920863\n", "Epoch 836, Training loss 3.531016, Validation loss 5.922148\n", "Epoch 837, Training loss 3.526469, Validation loss 5.923428\n", "Epoch 838, Training loss 3.521939, Validation loss 5.924690\n", "Epoch 839, Training loss 3.517428, Validation loss 5.925975\n", "Epoch 840, Training loss 3.512934, Validation loss 5.927238\n", "Epoch 841, Training loss 3.508459, Validation loss 5.928501\n", "Epoch 842, Training loss 3.504004, Validation loss 5.929759\n", "Epoch 843, Training loss 3.499568, Validation loss 5.931022\n", "Epoch 844, Training loss 3.495148, Validation loss 5.932285\n", "Epoch 845, Training loss 3.490746, Validation loss 5.933539\n", "Epoch 846, Training loss 3.486366, Validation loss 5.934794\n", "Epoch 847, Training loss 3.481999, Validation loss 5.936049\n", "Epoch 848, Training loss 3.477653, Validation loss 5.937294\n", "Epoch 849, Training loss 3.473324, Validation loss 5.938550\n", "Epoch 850, Training loss 3.469011, Validation loss 5.939796\n", "Epoch 851, Training loss 3.464720, Validation loss 5.941047\n", "Epoch 852, Training loss 3.460446, Validation loss 5.942271\n", "Epoch 853, Training loss 3.456188, Validation loss 5.943508\n", "Epoch 854, Training loss 3.451946, Validation loss 5.944746\n", "Epoch 855, Training loss 3.447723, Validation loss 5.945975\n", "Epoch 856, Training loss 3.443519, Validation loss 5.947204\n", "Epoch 857, Training loss 3.439332, Validation loss 5.948447\n", "Epoch 858, Training loss 3.435161, Validation loss 5.949667\n", "Epoch 859, Training loss 3.431006, Validation loss 5.950891\n", "Epoch 860, Training loss 3.426871, Validation loss 5.952103\n", "Epoch 861, Training loss 3.422751, Validation loss 5.953333\n", "Epoch 862, Training loss 3.418649, Validation loss 5.954554\n", "Epoch 863, Training loss 3.414563, Validation loss 5.955775\n", "Epoch 864, Training loss 3.410495, Validation loss 5.956987\n", "Epoch 865, Training loss 3.406445, Validation loss 5.958200\n", "Epoch 866, Training loss 3.402409, Validation loss 5.959403\n", "Epoch 867, Training loss 3.398391, Validation loss 5.960601\n", "Epoch 868, Training loss 3.394389, Validation loss 5.961810\n", "Epoch 869, Training loss 3.390405, Validation loss 5.963004\n", "Epoch 870, Training loss 3.386434, Validation loss 5.964203\n", "Epoch 871, Training loss 3.382483, Validation loss 5.965394\n", "Epoch 872, Training loss 3.378548, Validation loss 5.966599\n", "Epoch 873, Training loss 3.374626, Validation loss 5.967784\n", "Epoch 874, Training loss 3.370725, Validation loss 5.968975\n", "Epoch 875, Training loss 3.366835, Validation loss 5.970157\n", "Epoch 876, Training loss 3.362962, Validation loss 5.971353\n", "Epoch 877, Training loss 3.359109, Validation loss 5.972526\n", "Epoch 878, Training loss 3.355269, Validation loss 5.973708\n", "Epoch 879, Training loss 3.351445, Validation loss 5.974895\n", "Epoch 880, Training loss 3.347638, Validation loss 5.976064\n", "Epoch 881, Training loss 3.343846, Validation loss 5.977242\n", "Epoch 882, Training loss 3.340069, Validation loss 5.978416\n", "Epoch 883, Training loss 3.336308, Validation loss 5.979590\n", "Epoch 884, Training loss 3.332560, Validation loss 5.980760\n", "Epoch 885, Training loss 3.328832, Validation loss 5.981925\n", "Epoch 886, Training loss 3.325119, Validation loss 5.983081\n", "Epoch 887, Training loss 3.321420, Validation loss 5.984241\n", "Epoch 888, Training loss 3.317738, Validation loss 5.985398\n", "Epoch 889, Training loss 3.314069, Validation loss 5.986564\n", "Epoch 890, Training loss 3.310412, Validation loss 5.987716\n", "Epoch 891, Training loss 3.306774, Validation loss 5.988863\n", "Epoch 892, Training loss 3.303151, Validation loss 5.990016\n", "Epoch 893, Training loss 3.299542, Validation loss 5.991154\n", "Epoch 894, Training loss 3.295949, Validation loss 5.992302\n", "Epoch 895, Training loss 3.292367, Validation loss 5.993446\n", "Epoch 896, Training loss 3.288805, Validation loss 5.994590\n", "Epoch 897, Training loss 3.285255, Validation loss 5.995729\n", "Epoch 898, Training loss 3.281720, Validation loss 5.996869\n", "Epoch 899, Training loss 3.278200, Validation loss 5.998008\n", "Epoch 900, Training loss 3.274696, Validation loss 5.999148\n", "Epoch 901, Training loss 3.271204, Validation loss 6.000278\n", "Epoch 902, Training loss 3.267728, Validation loss 6.001400\n", "Epoch 903, Training loss 3.264264, Validation loss 6.002522\n", "Epoch 904, Training loss 3.260816, Validation loss 6.003648\n", "Epoch 905, Training loss 3.257382, Validation loss 6.004770\n", "Epoch 906, Training loss 3.253962, Validation loss 6.005892\n", "Epoch 907, Training loss 3.250557, Validation loss 6.007005\n", "Epoch 908, Training loss 3.247164, Validation loss 6.008123\n", "Epoch 909, Training loss 3.243788, Validation loss 6.009227\n", "Epoch 910, Training loss 3.240424, Validation loss 6.010345\n", "Epoch 911, Training loss 3.237073, Validation loss 6.011459\n", "Epoch 912, Training loss 3.233737, Validation loss 6.012564\n", "Epoch 913, Training loss 3.230417, Validation loss 6.013673\n", "Epoch 914, Training loss 3.227106, Validation loss 6.014782\n", "Epoch 915, Training loss 3.223811, Validation loss 6.015878\n", "Epoch 916, Training loss 3.220530, Validation loss 6.016969\n", "Epoch 917, Training loss 3.217261, Validation loss 6.018075\n", "Epoch 918, Training loss 3.214009, Validation loss 6.019171\n", "Epoch 919, Training loss 3.210769, Validation loss 6.020253\n", "Epoch 920, Training loss 3.207541, Validation loss 6.021354\n", "Epoch 921, Training loss 3.204326, Validation loss 6.022441\n", "Epoch 922, Training loss 3.201125, Validation loss 6.023529\n", "Epoch 923, Training loss 3.197936, Validation loss 6.024611\n", "Epoch 924, Training loss 3.194764, Validation loss 6.025694\n", "Epoch 925, Training loss 3.191602, Validation loss 6.026778\n", "Epoch 926, Training loss 3.188454, Validation loss 6.027852\n", "Epoch 927, Training loss 3.185318, Validation loss 6.028921\n", "Epoch 928, Training loss 3.182197, Validation loss 6.030000\n", "Epoch 929, Training loss 3.179086, Validation loss 6.031070\n", "Epoch 930, Training loss 3.175990, Validation loss 6.032135\n", "Epoch 931, Training loss 3.172906, Validation loss 6.033210\n", "Epoch 932, Training loss 3.169834, Validation loss 6.034275\n", "Epoch 933, Training loss 3.166776, Validation loss 6.035346\n", "Epoch 934, Training loss 3.163731, Validation loss 6.036407\n", "Epoch 935, Training loss 3.160695, Validation loss 6.037463\n", "Epoch 936, Training loss 3.157675, Validation loss 6.038515\n", "Epoch 937, Training loss 3.154667, Validation loss 6.039567\n", "Epoch 938, Training loss 3.151673, Validation loss 6.040624\n", "Epoch 939, Training loss 3.148689, Validation loss 6.041677\n", "Epoch 940, Training loss 3.145716, Validation loss 6.042729\n", "Epoch 941, Training loss 3.142758, Validation loss 6.043777\n", "Epoch 942, Training loss 3.139811, Validation loss 6.044830\n", "Epoch 943, Training loss 3.136876, Validation loss 6.045869\n", "Epoch 944, Training loss 3.133954, Validation loss 6.046913\n", "Epoch 945, Training loss 3.131045, Validation loss 6.047952\n", "Epoch 946, Training loss 3.128145, Validation loss 6.048987\n", "Epoch 947, Training loss 3.125259, Validation loss 6.050026\n", "Epoch 948, Training loss 3.122385, Validation loss 6.051061\n", "Epoch 949, Training loss 3.119520, Validation loss 6.052101\n", "Epoch 950, Training loss 3.116668, Validation loss 6.053122\n", "Epoch 951, Training loss 3.113831, Validation loss 6.054143\n", "Epoch 952, Training loss 3.111004, Validation loss 6.055174\n", "Epoch 953, Training loss 3.108187, Validation loss 6.056196\n", "Epoch 954, Training loss 3.105385, Validation loss 6.057227\n", "Epoch 955, Training loss 3.102590, Validation loss 6.058239\n", "Epoch 956, Training loss 3.099810, Validation loss 6.059257\n", "Epoch 957, Training loss 3.097042, Validation loss 6.060278\n", "Epoch 958, Training loss 3.094282, Validation loss 6.061286\n", "Epoch 959, Training loss 3.091536, Validation loss 6.062295\n", "Epoch 960, Training loss 3.088800, Validation loss 6.063313\n", "Epoch 961, Training loss 3.086076, Validation loss 6.064316\n", "Epoch 962, Training loss 3.083363, Validation loss 6.065330\n", "Epoch 963, Training loss 3.080661, Validation loss 6.066324\n", "Epoch 964, Training loss 3.077972, Validation loss 6.067338\n", "Epoch 965, Training loss 3.075292, Validation loss 6.068342\n", "Epoch 966, Training loss 3.072625, Validation loss 6.069337\n", "Epoch 967, Training loss 3.069967, Validation loss 6.070342\n", "Epoch 968, Training loss 3.067320, Validation loss 6.071337\n", "Epoch 969, Training loss 3.064685, Validation loss 6.072318\n", "Epoch 970, Training loss 3.062060, Validation loss 6.073309\n", "Epoch 971, Training loss 3.059447, Validation loss 6.074300\n", "Epoch 972, Training loss 3.056843, Validation loss 6.075282\n", "Epoch 973, Training loss 3.054250, Validation loss 6.076277\n", "Epoch 974, Training loss 3.051668, Validation loss 6.077250\n", "Epoch 975, Training loss 3.049098, Validation loss 6.078237\n", "Epoch 976, Training loss 3.046538, Validation loss 6.079218\n", "Epoch 977, Training loss 3.043988, Validation loss 6.080201\n", "Epoch 978, Training loss 3.041451, Validation loss 6.081173\n", "Epoch 979, Training loss 3.038919, Validation loss 6.082147\n", "Epoch 980, Training loss 3.036402, Validation loss 6.083110\n", "Epoch 981, Training loss 3.033895, Validation loss 6.084088\n", "Epoch 982, Training loss 3.031398, Validation loss 6.085057\n", "Epoch 983, Training loss 3.028908, Validation loss 6.086026\n", "Epoch 984, Training loss 3.026432, Validation loss 6.086985\n", "Epoch 985, Training loss 3.023967, Validation loss 6.087954\n", "Epoch 986, Training loss 3.021510, Validation loss 6.088923\n", "Epoch 987, Training loss 3.019063, Validation loss 6.089879\n", "Epoch 988, Training loss 3.016626, Validation loss 6.090834\n", "Epoch 989, Training loss 3.014201, Validation loss 6.091789\n", "Epoch 990, Training loss 3.011785, Validation loss 6.092744\n", "Epoch 991, Training loss 3.009378, Validation loss 6.093695\n", "Epoch 992, Training loss 3.006981, Validation loss 6.094641\n", "Epoch 993, Training loss 3.004594, Validation loss 6.095583\n", "Epoch 994, Training loss 3.002218, Validation loss 6.096539\n", "Epoch 995, Training loss 2.999850, Validation loss 6.097481\n", "Epoch 996, Training loss 2.997494, Validation loss 6.098413\n", "Epoch 997, Training loss 2.995147, Validation loss 6.099360\n", "Epoch 998, Training loss 2.992807, Validation loss 6.100297\n", "Epoch 999, Training loss 2.990479, Validation loss 6.101230\n", "Epoch 1000, Training loss 2.988161, Validation loss 6.102172\n", "Epoch 1001, Training loss 2.985850, Validation loss 6.103106\n", "Epoch 1002, Training loss 2.983553, Validation loss 6.104043\n", "Epoch 1003, Training loss 2.981262, Validation loss 6.104981\n", "Epoch 1004, Training loss 2.978984, Validation loss 6.105910\n", "Epoch 1005, Training loss 2.976712, Validation loss 6.106829\n", "Epoch 1006, Training loss 2.974450, Validation loss 6.107758\n", "Epoch 1007, Training loss 2.972198, Validation loss 6.108677\n", "Epoch 1008, Training loss 2.969956, Validation loss 6.109601\n", "Epoch 1009, Training loss 2.967721, Validation loss 6.110521\n", "Epoch 1010, Training loss 2.965496, Validation loss 6.111436\n", "Epoch 1011, Training loss 2.963281, Validation loss 6.112356\n", "Epoch 1012, Training loss 2.961074, Validation loss 6.113266\n", "Epoch 1013, Training loss 2.958879, Validation loss 6.114186\n", "Epoch 1014, Training loss 2.956690, Validation loss 6.115097\n", "Epoch 1015, Training loss 2.954510, Validation loss 6.115998\n", "Epoch 1016, Training loss 2.952342, Validation loss 6.116909\n", "Epoch 1017, Training loss 2.950181, Validation loss 6.117811\n", "Epoch 1018, Training loss 2.948029, Validation loss 6.118712\n", "Epoch 1019, Training loss 2.945885, Validation loss 6.119609\n", "Epoch 1020, Training loss 2.943751, Validation loss 6.120506\n", "Epoch 1021, Training loss 2.941625, Validation loss 6.121413\n", "Epoch 1022, Training loss 2.939507, Validation loss 6.122315\n", "Epoch 1023, Training loss 2.937401, Validation loss 6.123203\n", "Epoch 1024, Training loss 2.935301, Validation loss 6.124100\n", "Epoch 1025, Training loss 2.933210, Validation loss 6.124984\n", "Epoch 1026, Training loss 2.931129, Validation loss 6.125876\n", "Epoch 1027, Training loss 2.929055, Validation loss 6.126774\n", "Epoch 1028, Training loss 2.926989, Validation loss 6.127653\n", "Epoch 1029, Training loss 2.924931, Validation loss 6.128546\n", "Epoch 1030, Training loss 2.922886, Validation loss 6.129430\n", "Epoch 1031, Training loss 2.920846, Validation loss 6.130309\n", "Epoch 1032, Training loss 2.918815, Validation loss 6.131188\n", "Epoch 1033, Training loss 2.916791, Validation loss 6.132068\n", "Epoch 1034, Training loss 2.914778, Validation loss 6.132937\n", "Epoch 1035, Training loss 2.912772, Validation loss 6.133812\n", "Epoch 1036, Training loss 2.910774, Validation loss 6.134687\n", "Epoch 1037, Training loss 2.908784, Validation loss 6.135552\n", "Epoch 1038, Training loss 2.906803, Validation loss 6.136432\n", "Epoch 1039, Training loss 2.904830, Validation loss 6.137293\n", "Epoch 1040, Training loss 2.902863, Validation loss 6.138154\n", "Epoch 1041, Training loss 2.900908, Validation loss 6.139024\n", "Epoch 1042, Training loss 2.898958, Validation loss 6.139876\n", "Epoch 1043, Training loss 2.897017, Validation loss 6.140742\n", "Epoch 1044, Training loss 2.895084, Validation loss 6.141603\n", "Epoch 1045, Training loss 2.893159, Validation loss 6.142460\n", "Epoch 1046, Training loss 2.891246, Validation loss 6.143321\n", "Epoch 1047, Training loss 2.889334, Validation loss 6.144178\n", "Epoch 1048, Training loss 2.887433, Validation loss 6.145025\n", "Epoch 1049, Training loss 2.885539, Validation loss 6.145882\n", "Epoch 1050, Training loss 2.883655, Validation loss 6.146720\n", "Epoch 1051, Training loss 2.881777, Validation loss 6.147577\n", "Epoch 1052, Training loss 2.879907, Validation loss 6.148420\n", "Epoch 1053, Training loss 2.878044, Validation loss 6.149268\n", "Epoch 1054, Training loss 2.876189, Validation loss 6.150106\n", "Epoch 1055, Training loss 2.874343, Validation loss 6.150940\n", "Epoch 1056, Training loss 2.872503, Validation loss 6.151783\n", "Epoch 1057, Training loss 2.870672, Validation loss 6.152627\n", "Epoch 1058, Training loss 2.868849, Validation loss 6.153456\n", "Epoch 1059, Training loss 2.867033, Validation loss 6.154304\n", "Epoch 1060, Training loss 2.865225, Validation loss 6.155138\n", "Epoch 1061, Training loss 2.863422, Validation loss 6.155968\n", "Epoch 1062, Training loss 2.861627, Validation loss 6.156792\n", "Epoch 1063, Training loss 2.859842, Validation loss 6.157617\n", "Epoch 1064, Training loss 2.858061, Validation loss 6.158437\n", "Epoch 1065, Training loss 2.856289, Validation loss 6.159267\n", "Epoch 1066, Training loss 2.854524, Validation loss 6.160097\n", "Epoch 1067, Training loss 2.852768, Validation loss 6.160907\n", "Epoch 1068, Training loss 2.851018, Validation loss 6.161742\n", "Epoch 1069, Training loss 2.849275, Validation loss 6.162553\n", "Epoch 1070, Training loss 2.847538, Validation loss 6.163373\n", "Epoch 1071, Training loss 2.845810, Validation loss 6.164194\n", "Epoch 1072, Training loss 2.844087, Validation loss 6.164996\n", "Epoch 1073, Training loss 2.842373, Validation loss 6.165802\n", "Epoch 1074, Training loss 2.840667, Validation loss 6.166623\n", "Epoch 1075, Training loss 2.838966, Validation loss 6.167430\n", "Epoch 1076, Training loss 2.837274, Validation loss 6.168237\n", "Epoch 1077, Training loss 2.835586, Validation loss 6.169043\n", "Epoch 1078, Training loss 2.833907, Validation loss 6.169846\n", "Epoch 1079, Training loss 2.832235, Validation loss 6.170657\n", "Epoch 1080, Training loss 2.830570, Validation loss 6.171445\n", "Epoch 1081, Training loss 2.828911, Validation loss 6.172257\n", "Epoch 1082, Training loss 2.827260, Validation loss 6.173045\n", "Epoch 1083, Training loss 2.825614, Validation loss 6.173838\n", "Epoch 1084, Training loss 2.823977, Validation loss 6.174635\n", "Epoch 1085, Training loss 2.822345, Validation loss 6.175428\n", "Epoch 1086, Training loss 2.820721, Validation loss 6.176217\n", "Epoch 1087, Training loss 2.819102, Validation loss 6.177005\n", "Epoch 1088, Training loss 2.817491, Validation loss 6.177794\n", "Epoch 1089, Training loss 2.815886, Validation loss 6.178587\n", "Epoch 1090, Training loss 2.814290, Validation loss 6.179371\n", "Epoch 1091, Training loss 2.812697, Validation loss 6.180160\n", "Epoch 1092, Training loss 2.811114, Validation loss 6.180943\n", "Epoch 1093, Training loss 2.809536, Validation loss 6.181727\n", "Epoch 1094, Training loss 2.807961, Validation loss 6.182507\n", "Epoch 1095, Training loss 2.806398, Validation loss 6.183281\n", "Epoch 1096, Training loss 2.804838, Validation loss 6.184061\n", "Epoch 1097, Training loss 2.803286, Validation loss 6.184835\n", "Epoch 1098, Training loss 2.801739, Validation loss 6.185615\n", "Epoch 1099, Training loss 2.800200, Validation loss 6.186376\n", "Epoch 1100, Training loss 2.798667, Validation loss 6.187150\n", "Epoch 1101, Training loss 2.797141, Validation loss 6.187921\n", "Epoch 1102, Training loss 2.795619, Validation loss 6.188691\n", "Epoch 1103, Training loss 2.794106, Validation loss 6.189461\n", "Epoch 1104, Training loss 2.792599, Validation loss 6.190227\n", "Epoch 1105, Training loss 2.791096, Validation loss 6.190988\n", "Epoch 1106, Training loss 2.789598, Validation loss 6.191749\n", "Epoch 1107, Training loss 2.788111, Validation loss 6.192505\n", "Epoch 1108, Training loss 2.786626, Validation loss 6.193266\n", "Epoch 1109, Training loss 2.785149, Validation loss 6.194018\n", "Epoch 1110, Training loss 2.783677, Validation loss 6.194769\n", "Epoch 1111, Training loss 2.782213, Validation loss 6.195531\n", "Epoch 1112, Training loss 2.780755, Validation loss 6.196282\n", "Epoch 1113, Training loss 2.779301, Validation loss 6.197039\n", "Epoch 1114, Training loss 2.777854, Validation loss 6.197782\n", "Epoch 1115, Training loss 2.776413, Validation loss 6.198529\n", "Epoch 1116, Training loss 2.774980, Validation loss 6.199276\n", "Epoch 1117, Training loss 2.773548, Validation loss 6.200023\n", "Epoch 1118, Training loss 2.772125, Validation loss 6.200761\n", "Epoch 1119, Training loss 2.770708, Validation loss 6.201499\n", "Epoch 1120, Training loss 2.769297, Validation loss 6.202241\n", "Epoch 1121, Training loss 2.767890, Validation loss 6.202989\n", "Epoch 1122, Training loss 2.766490, Validation loss 6.203713\n", "Epoch 1123, Training loss 2.765095, Validation loss 6.204451\n", "Epoch 1124, Training loss 2.763708, Validation loss 6.205184\n", "Epoch 1125, Training loss 2.762324, Validation loss 6.205927\n", "Epoch 1126, Training loss 2.760946, Validation loss 6.206651\n", "Epoch 1127, Training loss 2.759575, Validation loss 6.207389\n", "Epoch 1128, Training loss 2.758211, Validation loss 6.208123\n", "Epoch 1129, Training loss 2.756851, Validation loss 6.208851\n", "Epoch 1130, Training loss 2.755495, Validation loss 6.209576\n", "Epoch 1131, Training loss 2.754147, Validation loss 6.210300\n", "Epoch 1132, Training loss 2.752805, Validation loss 6.211033\n", "Epoch 1133, Training loss 2.751466, Validation loss 6.211753\n", "Epoch 1134, Training loss 2.750135, Validation loss 6.212458\n", "Epoch 1135, Training loss 2.748807, Validation loss 6.213192\n", "Epoch 1136, Training loss 2.747487, Validation loss 6.213902\n", "Epoch 1137, Training loss 2.746169, Validation loss 6.214622\n", "Epoch 1138, Training loss 2.744859, Validation loss 6.215337\n", "Epoch 1139, Training loss 2.743555, Validation loss 6.216052\n", "Epoch 1140, Training loss 2.742257, Validation loss 6.216757\n", "Epoch 1141, Training loss 2.740962, Validation loss 6.217477\n", "Epoch 1142, Training loss 2.739673, Validation loss 6.218183\n", "Epoch 1143, Training loss 2.738389, Validation loss 6.218893\n", "Epoch 1144, Training loss 2.737111, Validation loss 6.219589\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1145, Training loss 2.735838, Validation loss 6.220299\n", "Epoch 1146, Training loss 2.734570, Validation loss 6.221005\n", "Epoch 1147, Training loss 2.733308, Validation loss 6.221702\n", "Epoch 1148, Training loss 2.732050, Validation loss 6.222407\n", "Epoch 1149, Training loss 2.730799, Validation loss 6.223108\n", "Epoch 1150, Training loss 2.729553, Validation loss 6.223804\n", "Epoch 1151, Training loss 2.728310, Validation loss 6.224506\n", "Epoch 1152, Training loss 2.727075, Validation loss 6.225202\n", "Epoch 1153, Training loss 2.725842, Validation loss 6.225889\n", "Epoch 1154, Training loss 2.724616, Validation loss 6.226591\n", "Epoch 1155, Training loss 2.723395, Validation loss 6.227283\n", "Epoch 1156, Training loss 2.722177, Validation loss 6.227969\n", "Epoch 1157, Training loss 2.720965, Validation loss 6.228661\n", "Epoch 1158, Training loss 2.719759, Validation loss 6.229353\n", "Epoch 1159, Training loss 2.718558, Validation loss 6.230031\n", "Epoch 1160, Training loss 2.717363, Validation loss 6.230723\n", "Epoch 1161, Training loss 2.716171, Validation loss 6.231405\n", "Epoch 1162, Training loss 2.714985, Validation loss 6.232078\n", "Epoch 1163, Training loss 2.713803, Validation loss 6.232766\n", "Epoch 1164, Training loss 2.712625, Validation loss 6.233443\n", "Epoch 1165, Training loss 2.711455, Validation loss 6.234126\n", "Epoch 1166, Training loss 2.710288, Validation loss 6.234799\n", "Epoch 1167, Training loss 2.709125, Validation loss 6.235467\n", "Epoch 1168, Training loss 2.707965, Validation loss 6.236150\n", "Epoch 1169, Training loss 2.706814, Validation loss 6.236818\n", "Epoch 1170, Training loss 2.705667, Validation loss 6.237496\n", "Epoch 1171, Training loss 2.704525, Validation loss 6.238165\n", "Epoch 1172, Training loss 2.703385, Validation loss 6.238843\n", "Epoch 1173, Training loss 2.702251, Validation loss 6.239511\n", "Epoch 1174, Training loss 2.701122, Validation loss 6.240166\n", "Epoch 1175, Training loss 2.699999, Validation loss 6.240834\n", "Epoch 1176, Training loss 2.698878, Validation loss 6.241498\n", "Epoch 1177, Training loss 2.697763, Validation loss 6.242148\n", "Epoch 1178, Training loss 2.696653, Validation loss 6.242812\n", "Epoch 1179, Training loss 2.695545, Validation loss 6.243476\n", "Epoch 1180, Training loss 2.694444, Validation loss 6.244131\n", "Epoch 1181, Training loss 2.693347, Validation loss 6.244790\n", "Epoch 1182, Training loss 2.692255, Validation loss 6.245440\n", "Epoch 1183, Training loss 2.691168, Validation loss 6.246104\n", "Epoch 1184, Training loss 2.690085, Validation loss 6.246753\n", "Epoch 1185, Training loss 2.689005, Validation loss 6.247408\n", "Epoch 1186, Training loss 2.687931, Validation loss 6.248058\n", "Epoch 1187, Training loss 2.686861, Validation loss 6.248708\n", "Epoch 1188, Training loss 2.685795, Validation loss 6.249353\n", "Epoch 1189, Training loss 2.684734, Validation loss 6.250003\n", "Epoch 1190, Training loss 2.683676, Validation loss 6.250648\n", "Epoch 1191, Training loss 2.682624, Validation loss 6.251293\n", "Epoch 1192, Training loss 2.681576, Validation loss 6.251934\n", "Epoch 1193, Training loss 2.680532, Validation loss 6.252579\n", "Epoch 1194, Training loss 2.679493, Validation loss 6.253210\n", "Epoch 1195, Training loss 2.678457, Validation loss 6.253855\n", "Epoch 1196, Training loss 2.677425, Validation loss 6.254491\n", "Epoch 1197, Training loss 2.676399, Validation loss 6.255132\n", "Epoch 1198, Training loss 2.675378, Validation loss 6.255758\n", "Epoch 1199, Training loss 2.674360, Validation loss 6.256384\n", "Epoch 1200, Training loss 2.673346, Validation loss 6.257025\n", "Epoch 1201, Training loss 2.672336, Validation loss 6.257652\n", "Epoch 1202, Training loss 2.671330, Validation loss 6.258287\n", "Epoch 1203, Training loss 2.670328, Validation loss 6.258924\n", "Epoch 1204, Training loss 2.669332, Validation loss 6.259550\n", "Epoch 1205, Training loss 2.668337, Validation loss 6.260167\n", "Epoch 1206, Training loss 2.667349, Validation loss 6.260794\n", "Epoch 1207, Training loss 2.666364, Validation loss 6.261415\n", "Epoch 1208, Training loss 2.665384, Validation loss 6.262052\n", "Epoch 1209, Training loss 2.664406, Validation loss 6.262673\n", "Epoch 1210, Training loss 2.663432, Validation loss 6.263295\n", "Epoch 1211, Training loss 2.662464, Validation loss 6.263907\n", "Epoch 1212, Training loss 2.661500, Validation loss 6.264529\n", "Epoch 1213, Training loss 2.660538, Validation loss 6.265142\n", "Epoch 1214, Training loss 2.659580, Validation loss 6.265749\n", "Epoch 1215, Training loss 2.658628, Validation loss 6.266366\n", "Epoch 1216, Training loss 2.657679, Validation loss 6.266984\n", "Epoch 1217, Training loss 2.656734, Validation loss 6.267596\n", "Epoch 1218, Training loss 2.655793, Validation loss 6.268204\n", "Epoch 1219, Training loss 2.654855, Validation loss 6.268826\n", "Epoch 1220, Training loss 2.653921, Validation loss 6.269429\n", "Epoch 1221, Training loss 2.652992, Validation loss 6.270037\n", "Epoch 1222, Training loss 2.652067, Validation loss 6.270635\n", "Epoch 1223, Training loss 2.651144, Validation loss 6.271238\n", "Epoch 1224, Training loss 2.650226, Validation loss 6.271841\n", "Epoch 1225, Training loss 2.649311, Validation loss 6.272444\n", "Epoch 1226, Training loss 2.648401, Validation loss 6.273042\n", "Epoch 1227, Training loss 2.647495, Validation loss 6.273645\n", "Epoch 1228, Training loss 2.646590, Validation loss 6.274249\n", "Epoch 1229, Training loss 2.645691, Validation loss 6.274837\n", "Epoch 1230, Training loss 2.644796, Validation loss 6.275440\n", "Epoch 1231, Training loss 2.643904, Validation loss 6.276044\n", "Epoch 1232, Training loss 2.643015, Validation loss 6.276642\n", "Epoch 1233, Training loss 2.642131, Validation loss 6.277231\n", "Epoch 1234, Training loss 2.641251, Validation loss 6.277820\n", "Epoch 1235, Training loss 2.640372, Validation loss 6.278409\n", "Epoch 1236, Training loss 2.639499, Validation loss 6.278998\n", "Epoch 1237, Training loss 2.638629, Validation loss 6.279582\n", "Epoch 1238, Training loss 2.637762, Validation loss 6.280170\n", "Epoch 1239, Training loss 2.636898, Validation loss 6.280760\n", "Epoch 1240, Training loss 2.636041, Validation loss 6.281349\n", "Epoch 1241, Training loss 2.635185, Validation loss 6.281923\n", "Epoch 1242, Training loss 2.634330, Validation loss 6.282498\n", "Epoch 1243, Training loss 2.633482, Validation loss 6.283077\n", "Epoch 1244, Training loss 2.632638, Validation loss 6.283667\n", "Epoch 1245, Training loss 2.631795, Validation loss 6.284256\n", "Epoch 1246, Training loss 2.630958, Validation loss 6.284831\n", "Epoch 1247, Training loss 2.630122, Validation loss 6.285405\n", "Epoch 1248, Training loss 2.629291, Validation loss 6.285985\n", "Epoch 1249, Training loss 2.628463, Validation loss 6.286550\n", "Epoch 1250, Training loss 2.627638, Validation loss 6.287125\n", "Epoch 1251, Training loss 2.626819, Validation loss 6.287690\n", "Epoch 1252, Training loss 2.625999, Validation loss 6.288260\n", "Epoch 1253, Training loss 2.625185, Validation loss 6.288820\n", "Epoch 1254, Training loss 2.624373, Validation loss 6.289395\n", "Epoch 1255, Training loss 2.623567, Validation loss 6.289960\n", "Epoch 1256, Training loss 2.622762, Validation loss 6.290531\n", "Epoch 1257, Training loss 2.621961, Validation loss 6.291101\n", "Epoch 1258, Training loss 2.621162, Validation loss 6.291656\n", "Epoch 1259, Training loss 2.620368, Validation loss 6.292217\n", "Epoch 1260, Training loss 2.619576, Validation loss 6.292787\n", "Epoch 1261, Training loss 2.618789, Validation loss 6.293348\n", "Epoch 1262, Training loss 2.618003, Validation loss 6.293909\n", "Epoch 1263, Training loss 2.617221, Validation loss 6.294469\n", "Epoch 1264, Training loss 2.616444, Validation loss 6.295030\n", "Epoch 1265, Training loss 2.615669, Validation loss 6.295581\n", "Epoch 1266, Training loss 2.614897, Validation loss 6.296132\n", "Epoch 1267, Training loss 2.614127, Validation loss 6.296692\n", "Epoch 1268, Training loss 2.613362, Validation loss 6.297248\n", "Epoch 1269, Training loss 2.612600, Validation loss 6.297800\n", "Epoch 1270, Training loss 2.611840, Validation loss 6.298351\n", "Epoch 1271, Training loss 2.611084, Validation loss 6.298902\n", "Epoch 1272, Training loss 2.610331, Validation loss 6.299458\n", "Epoch 1273, Training loss 2.609581, Validation loss 6.300000\n", "Epoch 1274, Training loss 2.608835, Validation loss 6.300541\n", "Epoch 1275, Training loss 2.608090, Validation loss 6.301083\n", "Epoch 1276, Training loss 2.607351, Validation loss 6.301624\n", "Epoch 1277, Training loss 2.606613, Validation loss 6.302171\n", "Epoch 1278, Training loss 2.605879, Validation loss 6.302708\n", "Epoch 1279, Training loss 2.605148, Validation loss 6.303245\n", "Epoch 1280, Training loss 2.604419, Validation loss 6.303796\n", "Epoch 1281, Training loss 2.603694, Validation loss 6.304333\n", "Epoch 1282, Training loss 2.602970, Validation loss 6.304869\n", "Epoch 1283, Training loss 2.602252, Validation loss 6.305411\n", "Epoch 1284, Training loss 2.601534, Validation loss 6.305953\n", "Epoch 1285, Training loss 2.600823, Validation loss 6.306495\n", "Epoch 1286, Training loss 2.600112, Validation loss 6.307008\n", "Epoch 1287, Training loss 2.599403, Validation loss 6.307545\n", "Epoch 1288, Training loss 2.598699, Validation loss 6.308082\n", "Epoch 1289, Training loss 2.597998, Validation loss 6.308614\n", "Epoch 1290, Training loss 2.597299, Validation loss 6.309146\n", "Epoch 1291, Training loss 2.596603, Validation loss 6.309673\n", "Epoch 1292, Training loss 2.595910, Validation loss 6.310201\n", "Epoch 1293, Training loss 2.595219, Validation loss 6.310714\n", "Epoch 1294, Training loss 2.594531, Validation loss 6.311241\n", "Epoch 1295, Training loss 2.593847, Validation loss 6.311773\n", "Epoch 1296, Training loss 2.593165, Validation loss 6.312301\n", "Epoch 1297, Training loss 2.592486, Validation loss 6.312814\n", "Epoch 1298, Training loss 2.591810, Validation loss 6.313336\n", "Epoch 1299, Training loss 2.591136, Validation loss 6.313859\n", "Epoch 1300, Training loss 2.590466, Validation loss 6.314382\n", "Epoch 1301, Training loss 2.589800, Validation loss 6.314886\n", "Epoch 1302, Training loss 2.589134, Validation loss 6.315403\n", "Epoch 1303, Training loss 2.588472, Validation loss 6.315921\n", "Epoch 1304, Training loss 2.587811, Validation loss 6.316434\n", "Epoch 1305, Training loss 2.587155, Validation loss 6.316943\n", "Epoch 1306, Training loss 2.586502, Validation loss 6.317456\n", "Epoch 1307, Training loss 2.585849, Validation loss 6.317969\n", "Epoch 1308, Training loss 2.585200, Validation loss 6.318477\n", "Epoch 1309, Training loss 2.584554, Validation loss 6.318981\n", "Epoch 1310, Training loss 2.583910, Validation loss 6.319499\n", "Epoch 1311, Training loss 2.583271, Validation loss 6.320022\n", "Epoch 1312, Training loss 2.582631, Validation loss 6.320510\n", "Epoch 1313, Training loss 2.581996, Validation loss 6.321024\n", "Epoch 1314, Training loss 2.581362, Validation loss 6.321537\n", "Epoch 1315, Training loss 2.580734, Validation loss 6.322036\n", "Epoch 1316, Training loss 2.580106, Validation loss 6.322539\n", "Epoch 1317, Training loss 2.579481, Validation loss 6.323043\n", "Epoch 1318, Training loss 2.578859, Validation loss 6.323546\n", "Epoch 1319, Training loss 2.578238, Validation loss 6.324040\n", "Epoch 1320, Training loss 2.577621, Validation loss 6.324540\n", "Epoch 1321, Training loss 2.577008, Validation loss 6.325029\n", "Epoch 1322, Training loss 2.576394, Validation loss 6.325528\n", "Epoch 1323, Training loss 2.575784, Validation loss 6.326017\n", "Epoch 1324, Training loss 2.575177, Validation loss 6.326516\n", "Epoch 1325, Training loss 2.574572, Validation loss 6.327010\n", "Epoch 1326, Training loss 2.573971, Validation loss 6.327508\n", "Epoch 1327, Training loss 2.573371, Validation loss 6.327998\n", "Epoch 1328, Training loss 2.572773, Validation loss 6.328482\n", "Epoch 1329, Training loss 2.572178, Validation loss 6.328971\n", "Epoch 1330, Training loss 2.571585, Validation loss 6.329465\n", "Epoch 1331, Training loss 2.570996, Validation loss 6.329959\n", "Epoch 1332, Training loss 2.570407, Validation loss 6.330434\n", "Epoch 1333, Training loss 2.569824, Validation loss 6.330924\n", "Epoch 1334, Training loss 2.569241, Validation loss 6.331403\n", "Epoch 1335, Training loss 2.568660, Validation loss 6.331893\n", "Epoch 1336, Training loss 2.568083, Validation loss 6.332373\n", "Epoch 1337, Training loss 2.567508, Validation loss 6.332857\n", "Epoch 1338, Training loss 2.566934, Validation loss 6.333342\n", "Epoch 1339, Training loss 2.566365, Validation loss 6.333821\n", "Epoch 1340, Training loss 2.565796, Validation loss 6.334301\n", "Epoch 1341, Training loss 2.565229, Validation loss 6.334766\n", "Epoch 1342, Training loss 2.564666, Validation loss 6.335251\n", "Epoch 1343, Training loss 2.564104, Validation loss 6.335731\n", "Epoch 1344, Training loss 2.563546, Validation loss 6.336200\n", "Epoch 1345, Training loss 2.562988, Validation loss 6.336670\n", "Epoch 1346, Training loss 2.562434, Validation loss 6.337151\n", "Epoch 1347, Training loss 2.561883, Validation loss 6.337621\n", "Epoch 1348, Training loss 2.561332, Validation loss 6.338095\n", "Epoch 1349, Training loss 2.560786, Validation loss 6.338570\n", "Epoch 1350, Training loss 2.560239, Validation loss 6.339036\n", "Epoch 1351, Training loss 2.559697, Validation loss 6.339501\n", "Epoch 1352, Training loss 2.559156, Validation loss 6.339976\n", "Epoch 1353, Training loss 2.558617, Validation loss 6.340436\n", "Epoch 1354, Training loss 2.558080, Validation loss 6.340902\n", "Epoch 1355, Training loss 2.557546, Validation loss 6.341357\n", "Epoch 1356, Training loss 2.557015, Validation loss 6.341827\n", "Epoch 1357, Training loss 2.556485, Validation loss 6.342288\n", "Epoch 1358, Training loss 2.555956, Validation loss 6.342749\n", "Epoch 1359, Training loss 2.555430, Validation loss 6.343219\n", "Epoch 1360, Training loss 2.554908, Validation loss 6.343684\n", "Epoch 1361, Training loss 2.554388, Validation loss 6.344145\n", "Epoch 1362, Training loss 2.553869, Validation loss 6.344605\n", "Epoch 1363, Training loss 2.553352, Validation loss 6.345056\n", "Epoch 1364, Training loss 2.552836, Validation loss 6.345512\n", "Epoch 1365, Training loss 2.552323, Validation loss 6.345968\n", "Epoch 1366, Training loss 2.551814, Validation loss 6.346418\n", "Epoch 1367, Training loss 2.551306, Validation loss 6.346879\n", "Epoch 1368, Training loss 2.550800, Validation loss 6.347334\n", "Epoch 1369, Training loss 2.550295, Validation loss 6.347781\n", "Epoch 1370, Training loss 2.549793, Validation loss 6.348227\n", "Epoch 1371, Training loss 2.549292, Validation loss 6.348673\n", "Epoch 1372, Training loss 2.548795, Validation loss 6.349129\n", "Epoch 1373, Training loss 2.548300, Validation loss 6.349570\n", "Epoch 1374, Training loss 2.547807, Validation loss 6.350021\n", "Epoch 1375, Training loss 2.547314, Validation loss 6.350467\n", "Epoch 1376, Training loss 2.546823, Validation loss 6.350914\n", "Epoch 1377, Training loss 2.546336, Validation loss 6.351369\n", "Epoch 1378, Training loss 2.545851, Validation loss 6.351815\n", "Epoch 1379, Training loss 2.545367, Validation loss 6.352256\n", "Epoch 1380, Training loss 2.544885, Validation loss 6.352688\n", "Epoch 1381, Training loss 2.544406, Validation loss 6.353129\n", "Epoch 1382, Training loss 2.543927, Validation loss 6.353566\n", "Epoch 1383, Training loss 2.543453, Validation loss 6.354002\n", "Epoch 1384, Training loss 2.542979, Validation loss 6.354448\n", "Epoch 1385, Training loss 2.542507, Validation loss 6.354876\n", "Epoch 1386, Training loss 2.542036, Validation loss 6.355321\n", "Epoch 1387, Training loss 2.541569, Validation loss 6.355758\n", "Epoch 1388, Training loss 2.541103, Validation loss 6.356190\n", "Epoch 1389, Training loss 2.540639, Validation loss 6.356631\n", "Epoch 1390, Training loss 2.540177, Validation loss 6.357063\n", "Epoch 1391, Training loss 2.539716, Validation loss 6.357490\n", "Epoch 1392, Training loss 2.539257, Validation loss 6.357916\n", "Epoch 1393, Training loss 2.538801, Validation loss 6.358353\n", "Epoch 1394, Training loss 2.538346, Validation loss 6.358794\n", "Epoch 1395, Training loss 2.537894, Validation loss 6.359221\n", "Epoch 1396, Training loss 2.537442, Validation loss 6.359633\n", "Epoch 1397, Training loss 2.536994, Validation loss 6.360070\n", "Epoch 1398, Training loss 2.536548, Validation loss 6.360502\n", "Epoch 1399, Training loss 2.536103, Validation loss 6.360919\n", "Epoch 1400, Training loss 2.535658, Validation loss 6.361341\n", "Epoch 1401, Training loss 2.535217, Validation loss 6.361773\n", "Epoch 1402, Training loss 2.534777, Validation loss 6.362189\n", "Epoch 1403, Training loss 2.534337, Validation loss 6.362612\n", "Epoch 1404, Training loss 2.533903, Validation loss 6.363034\n", "Epoch 1405, Training loss 2.533468, Validation loss 6.363456\n", "Epoch 1406, Training loss 2.533036, Validation loss 6.363878\n", "Epoch 1407, Training loss 2.532604, Validation loss 6.364300\n", "Epoch 1408, Training loss 2.532175, Validation loss 6.364712\n", "Epoch 1409, Training loss 2.531748, Validation loss 6.365129\n", "Epoch 1410, Training loss 2.531322, Validation loss 6.365547\n", "Epoch 1411, Training loss 2.530899, Validation loss 6.365949\n", "Epoch 1412, Training loss 2.530478, Validation loss 6.366366\n", "Epoch 1413, Training loss 2.530056, Validation loss 6.366788\n", "Epoch 1414, Training loss 2.529638, Validation loss 6.367206\n", "Epoch 1415, Training loss 2.529221, Validation loss 6.367618\n", "Epoch 1416, Training loss 2.528807, Validation loss 6.368035\n", "Epoch 1417, Training loss 2.528394, Validation loss 6.368443\n", "Epoch 1418, Training loss 2.527982, Validation loss 6.368841\n", "Epoch 1419, Training loss 2.527572, Validation loss 6.369243\n", "Epoch 1420, Training loss 2.527163, Validation loss 6.369666\n", "Epoch 1421, Training loss 2.526757, Validation loss 6.370078\n", "Epoch 1422, Training loss 2.526351, Validation loss 6.370481\n", "Epoch 1423, Training loss 2.525949, Validation loss 6.370888\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1424, Training loss 2.525548, Validation loss 6.371286\n", "Epoch 1425, Training loss 2.525146, Validation loss 6.371689\n", "Epoch 1426, Training loss 2.524749, Validation loss 6.372087\n", "Epoch 1427, Training loss 2.524352, Validation loss 6.372499\n", "Epoch 1428, Training loss 2.523957, Validation loss 6.372902\n", "Epoch 1429, Training loss 2.523564, Validation loss 6.373309\n", "Epoch 1430, Training loss 2.523172, Validation loss 6.373697\n", "Epoch 1431, Training loss 2.522782, Validation loss 6.374085\n", "Epoch 1432, Training loss 2.522394, Validation loss 6.374488\n", "Epoch 1433, Training loss 2.522007, Validation loss 6.374881\n", "Epoch 1434, Training loss 2.521621, Validation loss 6.375284\n", "Epoch 1435, Training loss 2.521237, Validation loss 6.375687\n", "Epoch 1436, Training loss 2.520854, Validation loss 6.376065\n", "Epoch 1437, Training loss 2.520474, Validation loss 6.376477\n", "Epoch 1438, Training loss 2.520095, Validation loss 6.376861\n", "Epoch 1439, Training loss 2.519718, Validation loss 6.377264\n", "Epoch 1440, Training loss 2.519342, Validation loss 6.377647\n", "Epoch 1441, Training loss 2.518968, Validation loss 6.378045\n", "Epoch 1442, Training loss 2.518596, Validation loss 6.378433\n", "Epoch 1443, Training loss 2.518224, Validation loss 6.378821\n", "Epoch 1444, Training loss 2.517855, Validation loss 6.379204\n", "Epoch 1445, Training loss 2.517486, Validation loss 6.379588\n", "Epoch 1446, Training loss 2.517121, Validation loss 6.379981\n", "Epoch 1447, Training loss 2.516755, Validation loss 6.380364\n", "Epoch 1448, Training loss 2.516391, Validation loss 6.380752\n", "Epoch 1449, Training loss 2.516030, Validation loss 6.381140\n", "Epoch 1450, Training loss 2.515668, Validation loss 6.381514\n", "Epoch 1451, Training loss 2.515307, Validation loss 6.381907\n", "Epoch 1452, Training loss 2.514952, Validation loss 6.382290\n", "Epoch 1453, Training loss 2.514594, Validation loss 6.382664\n", "Epoch 1454, Training loss 2.514239, Validation loss 6.383042\n", "Epoch 1455, Training loss 2.513886, Validation loss 6.383420\n", "Epoch 1456, Training loss 2.513534, Validation loss 6.383794\n", "Epoch 1457, Training loss 2.513185, Validation loss 6.384182\n", "Epoch 1458, Training loss 2.512834, Validation loss 6.384561\n", "Epoch 1459, Training loss 2.512487, Validation loss 6.384935\n", "Epoch 1460, Training loss 2.512141, Validation loss 6.385318\n", "Epoch 1461, Training loss 2.511797, Validation loss 6.385692\n", "Epoch 1462, Training loss 2.511453, Validation loss 6.386070\n", "Epoch 1463, Training loss 2.511112, Validation loss 6.386434\n", "Epoch 1464, Training loss 2.510771, Validation loss 6.386813\n", "Epoch 1465, Training loss 2.510432, Validation loss 6.387191\n", "Epoch 1466, Training loss 2.510094, Validation loss 6.387555\n", "Epoch 1467, Training loss 2.509758, Validation loss 6.387933\n", "Epoch 1468, Training loss 2.509423, Validation loss 6.388302\n", "Epoch 1469, Training loss 2.509090, Validation loss 6.388670\n", "Epoch 1470, Training loss 2.508759, Validation loss 6.389049\n", "Epoch 1471, Training loss 2.508427, Validation loss 6.389418\n", "Epoch 1472, Training loss 2.508097, Validation loss 6.389787\n", "Epoch 1473, Training loss 2.507770, Validation loss 6.390151\n", "Epoch 1474, Training loss 2.507445, Validation loss 6.390519\n", "Epoch 1475, Training loss 2.507119, Validation loss 6.390883\n", "Epoch 1476, Training loss 2.506795, Validation loss 6.391242\n", "Epoch 1477, Training loss 2.506473, Validation loss 6.391611\n", "Epoch 1478, Training loss 2.506150, Validation loss 6.391970\n", "Epoch 1479, Training loss 2.505831, Validation loss 6.392329\n", "Epoch 1480, Training loss 2.505513, Validation loss 6.392693\n", "Epoch 1481, Training loss 2.505195, Validation loss 6.393042\n", "Epoch 1482, Training loss 2.504880, Validation loss 6.393401\n", "Epoch 1483, Training loss 2.504565, Validation loss 6.393765\n", "Epoch 1484, Training loss 2.504251, Validation loss 6.394129\n", "Epoch 1485, Training loss 2.503940, Validation loss 6.394483\n", "Epoch 1486, Training loss 2.503629, Validation loss 6.394847\n", "Epoch 1487, Training loss 2.503319, Validation loss 6.395201\n", "Epoch 1488, Training loss 2.503011, Validation loss 6.395555\n", "Epoch 1489, Training loss 2.502703, Validation loss 6.395915\n", "Epoch 1490, Training loss 2.502397, Validation loss 6.396259\n", "Epoch 1491, Training loss 2.502093, Validation loss 6.396613\n", "Epoch 1492, Training loss 2.501790, Validation loss 6.396976\n", "Epoch 1493, Training loss 2.501490, Validation loss 6.397321\n", "Epoch 1494, Training loss 2.501189, Validation loss 6.397680\n", "Epoch 1495, Training loss 2.500889, Validation loss 6.398034\n", "Epoch 1496, Training loss 2.500590, Validation loss 6.398369\n", "Epoch 1497, Training loss 2.500293, Validation loss 6.398723\n", "Epoch 1498, Training loss 2.499998, Validation loss 6.399077\n", "Epoch 1499, Training loss 2.499702, Validation loss 6.399416\n", "Epoch 1500, Training loss 2.499410, Validation loss 6.399766\n", "Epoch 1501, Training loss 2.499118, Validation loss 6.400115\n", "Epoch 1502, Training loss 2.498827, Validation loss 6.400465\n", "Epoch 1503, Training loss 2.498537, Validation loss 6.400809\n", "Epoch 1504, Training loss 2.498248, Validation loss 6.401144\n", "Epoch 1505, Training loss 2.497962, Validation loss 6.401507\n", "Epoch 1506, Training loss 2.497676, Validation loss 6.401833\n", "Epoch 1507, Training loss 2.497389, Validation loss 6.402172\n", "Epoch 1508, Training loss 2.497108, Validation loss 6.402516\n", "Epoch 1509, Training loss 2.496824, Validation loss 6.402861\n", "Epoch 1510, Training loss 2.496542, Validation loss 6.403205\n", "Epoch 1511, Training loss 2.496263, Validation loss 6.403540\n", "Epoch 1512, Training loss 2.495983, Validation loss 6.403884\n", "Epoch 1513, Training loss 2.495705, Validation loss 6.404209\n", "Epoch 1514, Training loss 2.495429, Validation loss 6.404554\n", "Epoch 1515, Training loss 2.495153, Validation loss 6.404893\n", "Epoch 1516, Training loss 2.494878, Validation loss 6.405227\n", "Epoch 1517, Training loss 2.494605, Validation loss 6.405557\n", "Epoch 1518, Training loss 2.494331, Validation loss 6.405897\n", "Epoch 1519, Training loss 2.494062, Validation loss 6.406231\n", "Epoch 1520, Training loss 2.493792, Validation loss 6.406571\n", "Epoch 1521, Training loss 2.493522, Validation loss 6.406891\n", "Epoch 1522, Training loss 2.493256, Validation loss 6.407230\n", "Epoch 1523, Training loss 2.492990, Validation loss 6.407570\n", "Epoch 1524, Training loss 2.492723, Validation loss 6.407900\n", "Epoch 1525, Training loss 2.492458, Validation loss 6.408220\n", "Epoch 1526, Training loss 2.492195, Validation loss 6.408550\n", "Epoch 1527, Training loss 2.491933, Validation loss 6.408884\n", "Epoch 1528, Training loss 2.491672, Validation loss 6.409209\n", "Epoch 1529, Training loss 2.491412, Validation loss 6.409539\n", "Epoch 1530, Training loss 2.491152, Validation loss 6.409868\n", "Epoch 1531, Training loss 2.490893, Validation loss 6.410193\n", "Epoch 1532, Training loss 2.490637, Validation loss 6.410518\n", "Epoch 1533, Training loss 2.490381, Validation loss 6.410833\n", "Epoch 1534, Training loss 2.490126, Validation loss 6.411158\n", "Epoch 1535, Training loss 2.489872, Validation loss 6.411479\n", "Epoch 1536, Training loss 2.489621, Validation loss 6.411798\n", "Epoch 1537, Training loss 2.489368, Validation loss 6.412127\n", "Epoch 1538, Training loss 2.489117, Validation loss 6.412448\n", "Epoch 1539, Training loss 2.488869, Validation loss 6.412768\n", "Epoch 1540, Training loss 2.488621, Validation loss 6.413093\n", "Epoch 1541, Training loss 2.488371, Validation loss 6.413403\n", "Epoch 1542, Training loss 2.488125, Validation loss 6.413723\n", "Epoch 1543, Training loss 2.487879, Validation loss 6.414052\n", "Epoch 1544, Training loss 2.487636, Validation loss 6.414368\n", "Epoch 1545, Training loss 2.487392, Validation loss 6.414688\n", "Epoch 1546, Training loss 2.487149, Validation loss 6.415002\n", "Epoch 1547, Training loss 2.486909, Validation loss 6.415318\n", "Epoch 1548, Training loss 2.486667, Validation loss 6.415637\n", "Epoch 1549, Training loss 2.486428, Validation loss 6.415948\n", "Epoch 1550, Training loss 2.486188, Validation loss 6.416258\n", "Epoch 1551, Training loss 2.485952, Validation loss 6.416568\n", "Epoch 1552, Training loss 2.485715, Validation loss 6.416883\n", "Epoch 1553, Training loss 2.485481, Validation loss 6.417198\n", "Epoch 1554, Training loss 2.485244, Validation loss 6.417508\n", "Epoch 1555, Training loss 2.485011, Validation loss 6.417823\n", "Epoch 1556, Training loss 2.484779, Validation loss 6.418119\n", "Epoch 1557, Training loss 2.484547, Validation loss 6.418434\n", "Epoch 1558, Training loss 2.484316, Validation loss 6.418744\n", "Epoch 1559, Training loss 2.484086, Validation loss 6.419054\n", "Epoch 1560, Training loss 2.483857, Validation loss 6.419374\n", "Epoch 1561, Training loss 2.483631, Validation loss 6.419670\n", "Epoch 1562, Training loss 2.483402, Validation loss 6.419985\n", "Epoch 1563, Training loss 2.483176, Validation loss 6.420295\n", "Epoch 1564, Training loss 2.482951, Validation loss 6.420590\n", "Epoch 1565, Training loss 2.482728, Validation loss 6.420900\n", "Epoch 1566, Training loss 2.482505, Validation loss 6.421196\n", "Epoch 1567, Training loss 2.482282, Validation loss 6.421496\n", "Epoch 1568, Training loss 2.482062, Validation loss 6.421797\n", "Epoch 1569, Training loss 2.481840, Validation loss 6.422102\n", "Epoch 1570, Training loss 2.481621, Validation loss 6.422412\n", "Epoch 1571, Training loss 2.481402, Validation loss 6.422703\n", "Epoch 1572, Training loss 2.481184, Validation loss 6.423003\n", "Epoch 1573, Training loss 2.480967, Validation loss 6.423308\n", "Epoch 1574, Training loss 2.480751, Validation loss 6.423604\n", "Epoch 1575, Training loss 2.480537, Validation loss 6.423899\n", "Epoch 1576, Training loss 2.480322, Validation loss 6.424204\n", "Epoch 1577, Training loss 2.480110, Validation loss 6.424495\n", "Epoch 1578, Training loss 2.479896, Validation loss 6.424786\n", "Epoch 1579, Training loss 2.479685, Validation loss 6.425081\n", "Epoch 1580, Training loss 2.479475, Validation loss 6.425372\n", "Epoch 1581, Training loss 2.479265, Validation loss 6.425672\n", "Epoch 1582, Training loss 2.479056, Validation loss 6.425967\n", "Epoch 1583, Training loss 2.478848, Validation loss 6.426263\n", "Epoch 1584, Training loss 2.478640, Validation loss 6.426563\n", "Epoch 1585, Training loss 2.478433, Validation loss 6.426849\n", "Epoch 1586, Training loss 2.478229, Validation loss 6.427154\n", "Epoch 1587, Training loss 2.478023, Validation loss 6.427440\n", "Epoch 1588, Training loss 2.477820, Validation loss 6.427725\n", "Epoch 1589, Training loss 2.477617, Validation loss 6.428011\n", "Epoch 1590, Training loss 2.477416, Validation loss 6.428302\n", "Epoch 1591, Training loss 2.477213, Validation loss 6.428587\n", "Epoch 1592, Training loss 2.477014, Validation loss 6.428873\n", "Epoch 1593, Training loss 2.476813, Validation loss 6.429163\n", "Epoch 1594, Training loss 2.476615, Validation loss 6.429459\n", "Epoch 1595, Training loss 2.476417, Validation loss 6.429745\n", "Epoch 1596, Training loss 2.476219, Validation loss 6.430030\n", "Epoch 1597, Training loss 2.476024, Validation loss 6.430326\n", "Epoch 1598, Training loss 2.475827, Validation loss 6.430602\n", "Epoch 1599, Training loss 2.475632, Validation loss 6.430892\n", "Epoch 1600, Training loss 2.475438, Validation loss 6.431163\n", "Epoch 1601, Training loss 2.475244, Validation loss 6.431439\n", "Epoch 1602, Training loss 2.475052, Validation loss 6.431739\n", "Epoch 1603, Training loss 2.474861, Validation loss 6.432010\n", "Epoch 1604, Training loss 2.474670, Validation loss 6.432295\n", "Epoch 1605, Training loss 2.474481, Validation loss 6.432566\n", "Epoch 1606, Training loss 2.474292, Validation loss 6.432847\n", "Epoch 1607, Training loss 2.474103, Validation loss 6.433133\n", "Epoch 1608, Training loss 2.473916, Validation loss 6.433408\n", "Epoch 1609, Training loss 2.473728, Validation loss 6.433689\n", "Epoch 1610, Training loss 2.473543, Validation loss 6.433974\n", "Epoch 1611, Training loss 2.473358, Validation loss 6.434250\n", "Epoch 1612, Training loss 2.473173, Validation loss 6.434526\n", "Epoch 1613, Training loss 2.472989, Validation loss 6.434812\n", "Epoch 1614, Training loss 2.472806, Validation loss 6.435068\n", "Epoch 1615, Training loss 2.472624, Validation loss 6.435349\n", "Epoch 1616, Training loss 2.472442, Validation loss 6.435619\n", "Epoch 1617, Training loss 2.472262, Validation loss 6.435905\n", "Epoch 1618, Training loss 2.472082, Validation loss 6.436166\n", "Epoch 1619, Training loss 2.471902, Validation loss 6.436447\n", "Epoch 1620, Training loss 2.471723, Validation loss 6.436718\n", "Epoch 1621, Training loss 2.471546, Validation loss 6.436994\n", "Epoch 1622, Training loss 2.471369, Validation loss 6.437274\n", "Epoch 1623, Training loss 2.471193, Validation loss 6.437530\n", "Epoch 1624, Training loss 2.471016, Validation loss 6.437811\n", "Epoch 1625, Training loss 2.470843, Validation loss 6.438077\n", "Epoch 1626, Training loss 2.470668, Validation loss 6.438348\n", "Epoch 1627, Training loss 2.470495, Validation loss 6.438619\n", "Epoch 1628, Training loss 2.470321, Validation loss 6.438869\n", "Epoch 1629, Training loss 2.470151, Validation loss 6.439150\n", "Epoch 1630, Training loss 2.469978, Validation loss 6.439406\n", "Epoch 1631, Training loss 2.469808, Validation loss 6.439672\n", "Epoch 1632, Training loss 2.469638, Validation loss 6.439948\n", "Epoch 1633, Training loss 2.469469, Validation loss 6.440205\n", "Epoch 1634, Training loss 2.469300, Validation loss 6.440470\n", "Epoch 1635, Training loss 2.469131, Validation loss 6.440732\n", "Epoch 1636, Training loss 2.468964, Validation loss 6.440992\n", "Epoch 1637, Training loss 2.468798, Validation loss 6.441268\n", "Epoch 1638, Training loss 2.468632, Validation loss 6.441534\n", "Epoch 1639, Training loss 2.468467, Validation loss 6.441775\n", "Epoch 1640, Training loss 2.468303, Validation loss 6.442036\n", "Epoch 1641, Training loss 2.468139, Validation loss 6.442302\n", "Epoch 1642, Training loss 2.467977, Validation loss 6.442573\n", "Epoch 1643, Training loss 2.467814, Validation loss 6.442834\n", "Epoch 1644, Training loss 2.467652, Validation loss 6.443090\n", "Epoch 1645, Training loss 2.467491, Validation loss 6.443356\n", "Epoch 1646, Training loss 2.467331, Validation loss 6.443617\n", "Epoch 1647, Training loss 2.467172, Validation loss 6.443853\n", "Epoch 1648, Training loss 2.467012, Validation loss 6.444109\n", "Epoch 1649, Training loss 2.466853, Validation loss 6.444370\n", "Epoch 1650, Training loss 2.466695, Validation loss 6.444622\n", "Epoch 1651, Training loss 2.466538, Validation loss 6.444892\n", "Epoch 1652, Training loss 2.466382, Validation loss 6.445143\n", "Epoch 1653, Training loss 2.466225, Validation loss 6.445385\n", "Epoch 1654, Training loss 2.466072, Validation loss 6.445655\n", "Epoch 1655, Training loss 2.465917, Validation loss 6.445902\n", "Epoch 1656, Training loss 2.465764, Validation loss 6.446158\n", "Epoch 1657, Training loss 2.465609, Validation loss 6.446414\n", "Epoch 1658, Training loss 2.465456, Validation loss 6.446665\n", "Epoch 1659, Training loss 2.465306, Validation loss 6.446906\n", "Epoch 1660, Training loss 2.465155, Validation loss 6.447162\n", "Epoch 1661, Training loss 2.465003, Validation loss 6.447413\n", "Epoch 1662, Training loss 2.464854, Validation loss 6.447670\n", "Epoch 1663, Training loss 2.464704, Validation loss 6.447930\n", "Epoch 1664, Training loss 2.464554, Validation loss 6.448181\n", "Epoch 1665, Training loss 2.464406, Validation loss 6.448422\n", "Epoch 1666, Training loss 2.464259, Validation loss 6.448668\n", "Epoch 1667, Training loss 2.464111, Validation loss 6.448920\n", "Epoch 1668, Training loss 2.463964, Validation loss 6.449161\n", "Epoch 1669, Training loss 2.463820, Validation loss 6.449398\n", "Epoch 1670, Training loss 2.463675, Validation loss 6.449649\n", "Epoch 1671, Training loss 2.463530, Validation loss 6.449880\n", "Epoch 1672, Training loss 2.463385, Validation loss 6.450131\n", "Epoch 1673, Training loss 2.463243, Validation loss 6.450387\n", "Epoch 1674, Training loss 2.463099, Validation loss 6.450628\n", "Epoch 1675, Training loss 2.462957, Validation loss 6.450870\n", "Epoch 1676, Training loss 2.462816, Validation loss 6.451111\n", "Epoch 1677, Training loss 2.462674, Validation loss 6.451357\n", "Epoch 1678, Training loss 2.462534, Validation loss 6.451598\n", "Epoch 1679, Training loss 2.462394, Validation loss 6.451830\n", "Epoch 1680, Training loss 2.462255, Validation loss 6.452081\n", "Epoch 1681, Training loss 2.462116, Validation loss 6.452322\n", "Epoch 1682, Training loss 2.461979, Validation loss 6.452568\n", "Epoch 1683, Training loss 2.461840, Validation loss 6.452795\n", "Epoch 1684, Training loss 2.461703, Validation loss 6.453026\n", "Epoch 1685, Training loss 2.461568, Validation loss 6.453277\n", "Epoch 1686, Training loss 2.461431, Validation loss 6.453514\n", "Epoch 1687, Training loss 2.461296, Validation loss 6.453750\n", "Epoch 1688, Training loss 2.461161, Validation loss 6.453976\n", "Epoch 1689, Training loss 2.461027, Validation loss 6.454232\n", "Epoch 1690, Training loss 2.460892, Validation loss 6.454463\n", "Epoch 1691, Training loss 2.460759, Validation loss 6.454705\n", "Epoch 1692, Training loss 2.460627, Validation loss 6.454926\n", "Epoch 1693, Training loss 2.460495, Validation loss 6.455163\n", "Epoch 1694, Training loss 2.460364, Validation loss 6.455404\n", "Epoch 1695, Training loss 2.460233, Validation loss 6.455640\n", "Epoch 1696, Training loss 2.460102, Validation loss 6.455866\n", "Epoch 1697, Training loss 2.459972, Validation loss 6.456083\n", "Epoch 1698, Training loss 2.459844, Validation loss 6.456334\n", "Epoch 1699, Training loss 2.459714, Validation loss 6.456570\n", "Epoch 1700, Training loss 2.459585, Validation loss 6.456787\n", "Epoch 1701, Training loss 2.459459, Validation loss 6.457018\n", "Epoch 1702, Training loss 2.459332, Validation loss 6.457259\n", "Epoch 1703, Training loss 2.459204, Validation loss 6.457481\n", "Epoch 1704, Training loss 2.459079, Validation loss 6.457712\n", "Epoch 1705, Training loss 2.458952, Validation loss 6.457948\n", "Epoch 1706, Training loss 2.458827, Validation loss 6.458179\n", "Epoch 1707, Training loss 2.458703, Validation loss 6.458396\n", "Epoch 1708, Training loss 2.458580, Validation loss 6.458632\n", "Epoch 1709, Training loss 2.458455, Validation loss 6.458864\n", "Epoch 1710, Training loss 2.458333, Validation loss 6.459085\n", "Epoch 1711, Training loss 2.458209, Validation loss 6.459301\n", "Epoch 1712, Training loss 2.458088, Validation loss 6.459528\n", "Epoch 1713, Training loss 2.457966, Validation loss 6.459764\n", "Epoch 1714, Training loss 2.457845, Validation loss 6.459995\n", "Epoch 1715, Training loss 2.457725, Validation loss 6.460222\n", "Epoch 1716, Training loss 2.457605, Validation loss 6.460438\n", "Epoch 1717, Training loss 2.457484, Validation loss 6.460650\n", "Epoch 1718, Training loss 2.457365, Validation loss 6.460881\n", "Epoch 1719, Training loss 2.457247, Validation loss 6.461112\n", "Epoch 1720, Training loss 2.457129, Validation loss 6.461339\n", "Epoch 1721, Training loss 2.457011, Validation loss 6.461560\n", "Epoch 1722, Training loss 2.456894, Validation loss 6.461771\n", "Epoch 1723, Training loss 2.456778, Validation loss 6.461988\n", "Epoch 1724, Training loss 2.456660, Validation loss 6.462210\n", "Epoch 1725, Training loss 2.456547, Validation loss 6.462441\n", "Epoch 1726, Training loss 2.456430, Validation loss 6.462643\n", "Epoch 1727, Training loss 2.456316, Validation loss 6.462869\n", "Epoch 1728, Training loss 2.456202, Validation loss 6.463086\n", "Epoch 1729, Training loss 2.456087, Validation loss 6.463302\n", "Epoch 1730, Training loss 2.455975, Validation loss 6.463523\n", "Epoch 1731, Training loss 2.455862, Validation loss 6.463735\n", "Epoch 1732, Training loss 2.455749, Validation loss 6.463961\n", "Epoch 1733, Training loss 2.455637, Validation loss 6.464182\n", "Epoch 1734, Training loss 2.455527, Validation loss 6.464394\n", "Epoch 1735, Training loss 2.455415, Validation loss 6.464605\n", "Epoch 1736, Training loss 2.455303, Validation loss 6.464832\n", "Epoch 1737, Training loss 2.455194, Validation loss 6.465038\n", "Epoch 1738, Training loss 2.455084, Validation loss 6.465250\n", "Epoch 1739, Training loss 2.454977, Validation loss 6.465452\n", "Epoch 1740, Training loss 2.454867, Validation loss 6.465678\n", "Epoch 1741, Training loss 2.454760, Validation loss 6.465894\n", "Epoch 1742, Training loss 2.454651, Validation loss 6.466101\n", "Epoch 1743, Training loss 2.454544, Validation loss 6.466317\n", "Epoch 1744, Training loss 2.454437, Validation loss 6.466524\n", "Epoch 1745, Training loss 2.454331, Validation loss 6.466735\n", "Epoch 1746, Training loss 2.454224, Validation loss 6.466947\n", "Epoch 1747, Training loss 2.454119, Validation loss 6.467148\n", "Epoch 1748, Training loss 2.454013, Validation loss 6.467370\n", "Epoch 1749, Training loss 2.453908, Validation loss 6.467576\n", "Epoch 1750, Training loss 2.453804, Validation loss 6.467793\n", "Epoch 1751, Training loss 2.453701, Validation loss 6.468004\n", "Epoch 1752, Training loss 2.453598, Validation loss 6.468215\n", "Epoch 1753, Training loss 2.453494, Validation loss 6.468412\n", "Epoch 1754, Training loss 2.453391, Validation loss 6.468614\n", "Epoch 1755, Training loss 2.453288, Validation loss 6.468821\n", "Epoch 1756, Training loss 2.453188, Validation loss 6.469022\n", "Epoch 1757, Training loss 2.453086, Validation loss 6.469238\n", "Epoch 1758, Training loss 2.452987, Validation loss 6.469435\n", "Epoch 1759, Training loss 2.452885, Validation loss 6.469632\n", "Epoch 1760, Training loss 2.452785, Validation loss 6.469833\n", "Epoch 1761, Training loss 2.452685, Validation loss 6.470045\n", "Epoch 1762, Training loss 2.452586, Validation loss 6.470261\n", "Epoch 1763, Training loss 2.452486, Validation loss 6.470468\n", "Epoch 1764, Training loss 2.452388, Validation loss 6.470679\n", "Epoch 1765, Training loss 2.452291, Validation loss 6.470881\n", "Epoch 1766, Training loss 2.452193, Validation loss 6.471077\n", "Epoch 1767, Training loss 2.452096, Validation loss 6.471264\n", "Epoch 1768, Training loss 2.451999, Validation loss 6.471466\n", "Epoch 1769, Training loss 2.451903, Validation loss 6.471667\n", "Epoch 1770, Training loss 2.451806, Validation loss 6.471879\n", "Epoch 1771, Training loss 2.451711, Validation loss 6.472075\n", "Epoch 1772, Training loss 2.451615, Validation loss 6.472271\n", "Epoch 1773, Training loss 2.451522, Validation loss 6.472473\n", "Epoch 1774, Training loss 2.451427, Validation loss 6.472675\n", "Epoch 1775, Training loss 2.451332, Validation loss 6.472877\n", "Epoch 1776, Training loss 2.451239, Validation loss 6.473063\n", "Epoch 1777, Training loss 2.451145, Validation loss 6.473255\n", "Epoch 1778, Training loss 2.451052, Validation loss 6.473456\n", "Epoch 1779, Training loss 2.450959, Validation loss 6.473658\n", "Epoch 1780, Training loss 2.450867, Validation loss 6.473845\n", "Epoch 1781, Training loss 2.450776, Validation loss 6.474051\n", "Epoch 1782, Training loss 2.450685, Validation loss 6.474237\n", "Epoch 1783, Training loss 2.450593, Validation loss 6.474429\n", "Epoch 1784, Training loss 2.450503, Validation loss 6.474631\n", "Epoch 1785, Training loss 2.450412, Validation loss 6.474837\n", "Epoch 1786, Training loss 2.450321, Validation loss 6.475024\n", "Epoch 1787, Training loss 2.450233, Validation loss 6.475215\n", "Epoch 1788, Training loss 2.450144, Validation loss 6.475417\n", "Epoch 1789, Training loss 2.450054, Validation loss 6.475609\n", "Epoch 1790, Training loss 2.449966, Validation loss 6.475795\n", "Epoch 1791, Training loss 2.449878, Validation loss 6.475977\n", "Epoch 1792, Training loss 2.449790, Validation loss 6.476174\n", "Epoch 1793, Training loss 2.449703, Validation loss 6.476366\n", "Epoch 1794, Training loss 2.449616, Validation loss 6.476562\n", "Epoch 1795, Training loss 2.449529, Validation loss 6.476753\n", "Epoch 1796, Training loss 2.449442, Validation loss 6.476944\n", "Epoch 1797, Training loss 2.449356, Validation loss 6.477137\n", "Epoch 1798, Training loss 2.449271, Validation loss 6.477318\n", "Epoch 1799, Training loss 2.449186, Validation loss 6.477505\n", "Epoch 1800, Training loss 2.449102, Validation loss 6.477692\n", "Epoch 1801, Training loss 2.449016, Validation loss 6.477888\n", "Epoch 1802, Training loss 2.448932, Validation loss 6.478079\n", "Epoch 1803, Training loss 2.448849, Validation loss 6.478271\n", "Epoch 1804, Training loss 2.448765, Validation loss 6.478458\n", "Epoch 1805, Training loss 2.448682, Validation loss 6.478644\n", "Epoch 1806, Training loss 2.448598, Validation loss 6.478831\n", "Epoch 1807, Training loss 2.448516, Validation loss 6.479017\n", "Epoch 1808, Training loss 2.448434, Validation loss 6.479199\n", "Epoch 1809, Training loss 2.448352, Validation loss 6.479386\n", "Epoch 1810, Training loss 2.448271, Validation loss 6.479572\n", "Epoch 1811, Training loss 2.448191, Validation loss 6.479754\n", "Epoch 1812, Training loss 2.448110, Validation loss 6.479946\n", "Epoch 1813, Training loss 2.448030, Validation loss 6.480132\n", "Epoch 1814, Training loss 2.447948, Validation loss 6.480309\n", "Epoch 1815, Training loss 2.447869, Validation loss 6.480486\n", "Epoch 1816, Training loss 2.447790, Validation loss 6.480667\n", "Epoch 1817, Training loss 2.447712, Validation loss 6.480849\n", "Epoch 1818, Training loss 2.447633, Validation loss 6.481040\n", "Epoch 1819, Training loss 2.447554, Validation loss 6.481227\n", "Epoch 1820, Training loss 2.447476, Validation loss 6.481418\n", "Epoch 1821, Training loss 2.447399, Validation loss 6.481590\n", "Epoch 1822, Training loss 2.447320, Validation loss 6.481762\n", "Epoch 1823, Training loss 2.447243, Validation loss 6.481938\n", "Epoch 1824, Training loss 2.447167, Validation loss 6.482125\n", "Epoch 1825, Training loss 2.447090, Validation loss 6.482302\n", "Epoch 1826, Training loss 2.447015, Validation loss 6.482473\n", "Epoch 1827, Training loss 2.446939, Validation loss 6.482660\n", "Epoch 1828, Training loss 2.446861, Validation loss 6.482837\n", "Epoch 1829, Training loss 2.446787, Validation loss 6.483013\n", "Epoch 1830, Training loss 2.446712, Validation loss 6.483200\n", "Epoch 1831, Training loss 2.446638, Validation loss 6.483377\n", "Epoch 1832, Training loss 2.446565, Validation loss 6.483553\n", "Epoch 1833, Training loss 2.446491, Validation loss 6.483725\n", "Epoch 1834, Training loss 2.446416, Validation loss 6.483912\n", "Epoch 1835, Training loss 2.446343, Validation loss 6.484078\n", "Epoch 1836, Training loss 2.446270, Validation loss 6.484240\n", "Epoch 1837, Training loss 2.446196, Validation loss 6.484422\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1838, Training loss 2.446125, Validation loss 6.484589\n", "Epoch 1839, Training loss 2.446052, Validation loss 6.484760\n", "Epoch 1840, Training loss 2.445979, Validation loss 6.484941\n", "Epoch 1841, Training loss 2.445909, Validation loss 6.485118\n", "Epoch 1842, Training loss 2.445837, Validation loss 6.485295\n", "Epoch 1843, Training loss 2.445765, Validation loss 6.485471\n", "Epoch 1844, Training loss 2.445695, Validation loss 6.485638\n", "Epoch 1845, Training loss 2.445625, Validation loss 6.485824\n", "Epoch 1846, Training loss 2.445555, Validation loss 6.485991\n", "Epoch 1847, Training loss 2.445485, Validation loss 6.486153\n", "Epoch 1848, Training loss 2.445417, Validation loss 6.486325\n", "Epoch 1849, Training loss 2.445346, Validation loss 6.486492\n", "Epoch 1850, Training loss 2.445277, Validation loss 6.486668\n", "Epoch 1851, Training loss 2.445208, Validation loss 6.486835\n", "Epoch 1852, Training loss 2.445140, Validation loss 6.487006\n", "Epoch 1853, Training loss 2.445072, Validation loss 6.487178\n", "Epoch 1854, Training loss 2.445005, Validation loss 6.487350\n", "Epoch 1855, Training loss 2.444937, Validation loss 6.487506\n", "Epoch 1856, Training loss 2.444870, Validation loss 6.487673\n", "Epoch 1857, Training loss 2.444803, Validation loss 6.487835\n", "Epoch 1858, Training loss 2.444736, Validation loss 6.488016\n", "Epoch 1859, Training loss 2.444669, Validation loss 6.488178\n", "Epoch 1860, Training loss 2.444603, Validation loss 6.488345\n", "Epoch 1861, Training loss 2.444538, Validation loss 6.488521\n", "Epoch 1862, Training loss 2.444471, Validation loss 6.488688\n", "Epoch 1863, Training loss 2.444405, Validation loss 6.488840\n", "Epoch 1864, Training loss 2.444341, Validation loss 6.489002\n", "Epoch 1865, Training loss 2.444275, Validation loss 6.489173\n", "Epoch 1866, Training loss 2.444210, Validation loss 6.489349\n", "Epoch 1867, Training loss 2.444147, Validation loss 6.489506\n", "Epoch 1868, Training loss 2.444083, Validation loss 6.489668\n", "Epoch 1869, Training loss 2.444020, Validation loss 6.489839\n", "Epoch 1870, Training loss 2.443956, Validation loss 6.490001\n", "Epoch 1871, Training loss 2.443892, Validation loss 6.490163\n", "Epoch 1872, Training loss 2.443830, Validation loss 6.490320\n", "Epoch 1873, Training loss 2.443767, Validation loss 6.490481\n", "Epoch 1874, Training loss 2.443703, Validation loss 6.490653\n", "Epoch 1875, Training loss 2.443642, Validation loss 6.490815\n", "Epoch 1876, Training loss 2.443581, Validation loss 6.490986\n", "Epoch 1877, Training loss 2.443519, Validation loss 6.491143\n", "Epoch 1878, Training loss 2.443457, Validation loss 6.491290\n", "Epoch 1879, Training loss 2.443397, Validation loss 6.491457\n", "Epoch 1880, Training loss 2.443335, Validation loss 6.491608\n", "Epoch 1881, Training loss 2.443274, Validation loss 6.491779\n", "Epoch 1882, Training loss 2.443214, Validation loss 6.491946\n", "Epoch 1883, Training loss 2.443154, Validation loss 6.492093\n", "Epoch 1884, Training loss 2.443094, Validation loss 6.492249\n", "Epoch 1885, Training loss 2.443034, Validation loss 6.492411\n", "Epoch 1886, Training loss 2.442974, Validation loss 6.492572\n", "Epoch 1887, Training loss 2.442916, Validation loss 6.492734\n", "Epoch 1888, Training loss 2.442857, Validation loss 6.492881\n", "Epoch 1889, Training loss 2.442797, Validation loss 6.493038\n", "Epoch 1890, Training loss 2.442741, Validation loss 6.493204\n", "Epoch 1891, Training loss 2.442682, Validation loss 6.493361\n", "Epoch 1892, Training loss 2.442623, Validation loss 6.493513\n", "Epoch 1893, Training loss 2.442567, Validation loss 6.493670\n", "Epoch 1894, Training loss 2.442509, Validation loss 6.493816\n", "Epoch 1895, Training loss 2.442451, Validation loss 6.493978\n", "Epoch 1896, Training loss 2.442393, Validation loss 6.494139\n", "Epoch 1897, Training loss 2.442337, Validation loss 6.494286\n", "Epoch 1898, Training loss 2.442281, Validation loss 6.494448\n", "Epoch 1899, Training loss 2.442224, Validation loss 6.494604\n", "Epoch 1900, Training loss 2.442169, Validation loss 6.494741\n", "Epoch 1901, Training loss 2.442112, Validation loss 6.494898\n", "Epoch 1902, Training loss 2.442057, Validation loss 6.495054\n", "Epoch 1903, Training loss 2.442002, Validation loss 6.495215\n", "Epoch 1904, Training loss 2.441946, Validation loss 6.495358\n", "Epoch 1905, Training loss 2.441892, Validation loss 6.495519\n", "Epoch 1906, Training loss 2.441835, Validation loss 6.495666\n", "Epoch 1907, Training loss 2.441782, Validation loss 6.495822\n", "Epoch 1908, Training loss 2.441727, Validation loss 6.495979\n", "Epoch 1909, Training loss 2.441673, Validation loss 6.496121\n", "Epoch 1910, Training loss 2.441619, Validation loss 6.496273\n", "Epoch 1911, Training loss 2.441566, Validation loss 6.496419\n", "Epoch 1912, Training loss 2.441512, Validation loss 6.496576\n", "Epoch 1913, Training loss 2.441459, Validation loss 6.496713\n", "Epoch 1914, Training loss 2.441407, Validation loss 6.496859\n", "Epoch 1915, Training loss 2.441355, Validation loss 6.497025\n", "Epoch 1916, Training loss 2.441302, Validation loss 6.497172\n", "Epoch 1917, Training loss 2.441249, Validation loss 6.497314\n", "Epoch 1918, Training loss 2.441197, Validation loss 6.497466\n", "Epoch 1919, Training loss 2.441145, Validation loss 6.497612\n", "Epoch 1920, Training loss 2.441093, Validation loss 6.497764\n", "Epoch 1921, Training loss 2.441044, Validation loss 6.497915\n", "Epoch 1922, Training loss 2.440991, Validation loss 6.498057\n", "Epoch 1923, Training loss 2.440940, Validation loss 6.498199\n", "Epoch 1924, Training loss 2.440890, Validation loss 6.498345\n", "Epoch 1925, Training loss 2.440839, Validation loss 6.498497\n", "Epoch 1926, Training loss 2.440788, Validation loss 6.498639\n", "Epoch 1927, Training loss 2.440738, Validation loss 6.498785\n", "Epoch 1928, Training loss 2.440687, Validation loss 6.498927\n", "Epoch 1929, Training loss 2.440639, Validation loss 6.499079\n", "Epoch 1930, Training loss 2.440589, Validation loss 6.499225\n", "Epoch 1931, Training loss 2.440539, Validation loss 6.499377\n", "Epoch 1932, Training loss 2.440490, Validation loss 6.499514\n", "Epoch 1933, Training loss 2.440442, Validation loss 6.499655\n", "Epoch 1934, Training loss 2.440392, Validation loss 6.499797\n", "Epoch 1935, Training loss 2.440344, Validation loss 6.499939\n", "Epoch 1936, Training loss 2.440294, Validation loss 6.500081\n", "Epoch 1937, Training loss 2.440248, Validation loss 6.500217\n", "Epoch 1938, Training loss 2.440199, Validation loss 6.500364\n", "Epoch 1939, Training loss 2.440152, Validation loss 6.500505\n", "Epoch 1940, Training loss 2.440105, Validation loss 6.500652\n", "Epoch 1941, Training loss 2.440057, Validation loss 6.500793\n", "Epoch 1942, Training loss 2.440009, Validation loss 6.500935\n", "Epoch 1943, Training loss 2.439962, Validation loss 6.501072\n", "Epoch 1944, Training loss 2.439915, Validation loss 6.501218\n", "Epoch 1945, Training loss 2.439870, Validation loss 6.501365\n", "Epoch 1946, Training loss 2.439824, Validation loss 6.501487\n", "Epoch 1947, Training loss 2.439776, Validation loss 6.501628\n", "Epoch 1948, Training loss 2.439730, Validation loss 6.501780\n", "Epoch 1949, Training loss 2.439685, Validation loss 6.501912\n", "Epoch 1950, Training loss 2.439639, Validation loss 6.502058\n", "Epoch 1951, Training loss 2.439594, Validation loss 6.502194\n", "Epoch 1952, Training loss 2.439548, Validation loss 6.502332\n", "Epoch 1953, Training loss 2.439503, Validation loss 6.502463\n", "Epoch 1954, Training loss 2.439460, Validation loss 6.502614\n", "Epoch 1955, Training loss 2.439414, Validation loss 6.502746\n", "Epoch 1956, Training loss 2.439370, Validation loss 6.502882\n", "Epoch 1957, Training loss 2.439325, Validation loss 6.503019\n", "Epoch 1958, Training loss 2.439281, Validation loss 6.503146\n", "Epoch 1959, Training loss 2.439238, Validation loss 6.503293\n", "Epoch 1960, Training loss 2.439193, Validation loss 6.503434\n", "Epoch 1961, Training loss 2.439150, Validation loss 6.503566\n", "Epoch 1962, Training loss 2.439106, Validation loss 6.503697\n", "Epoch 1963, Training loss 2.439064, Validation loss 6.503834\n", "Epoch 1964, Training loss 2.439020, Validation loss 6.503961\n", "Epoch 1965, Training loss 2.438977, Validation loss 6.504098\n", "Epoch 1966, Training loss 2.438935, Validation loss 6.504249\n", "Epoch 1967, Training loss 2.438893, Validation loss 6.504361\n", "Epoch 1968, Training loss 2.438850, Validation loss 6.504502\n", "Epoch 1969, Training loss 2.438807, Validation loss 6.504644\n", "Epoch 1970, Training loss 2.438766, Validation loss 6.504761\n", "Epoch 1971, Training loss 2.438724, Validation loss 6.504897\n", "Epoch 1972, Training loss 2.438682, Validation loss 6.505044\n", "Epoch 1973, Training loss 2.438641, Validation loss 6.505175\n", "Epoch 1974, Training loss 2.438600, Validation loss 6.505297\n", "Epoch 1975, Training loss 2.438558, Validation loss 6.505429\n", "Epoch 1976, Training loss 2.438518, Validation loss 6.505560\n", "Epoch 1977, Training loss 2.438476, Validation loss 6.505697\n", "Epoch 1978, Training loss 2.438435, Validation loss 6.505833\n", "Epoch 1979, Training loss 2.438396, Validation loss 6.505960\n", "Epoch 1980, Training loss 2.438356, Validation loss 6.506082\n", "Epoch 1981, Training loss 2.438315, Validation loss 6.506214\n", "Epoch 1982, Training loss 2.438275, Validation loss 6.506355\n", "Epoch 1983, Training loss 2.438235, Validation loss 6.506481\n", "Epoch 1984, Training loss 2.438196, Validation loss 6.506613\n", "Epoch 1985, Training loss 2.438156, Validation loss 6.506739\n", "Epoch 1986, Training loss 2.438118, Validation loss 6.506861\n", "Epoch 1987, Training loss 2.438078, Validation loss 6.506993\n", "Epoch 1988, Training loss 2.438039, Validation loss 6.507119\n", "Epoch 1989, Training loss 2.437999, Validation loss 6.507246\n", "Epoch 1990, Training loss 2.437961, Validation loss 6.507378\n", "Epoch 1991, Training loss 2.437922, Validation loss 6.507504\n", "Epoch 1992, Training loss 2.437884, Validation loss 6.507630\n", "Epoch 1993, Training loss 2.437847, Validation loss 6.507758\n", "Epoch 1994, Training loss 2.437809, Validation loss 6.507889\n", "Epoch 1995, Training loss 2.437770, Validation loss 6.508010\n", "Epoch 1996, Training loss 2.437731, Validation loss 6.508132\n", "Epoch 1997, Training loss 2.437696, Validation loss 6.508274\n", "Epoch 1998, Training loss 2.437658, Validation loss 6.508400\n", "Epoch 1999, Training loss 2.437619, Validation loss 6.508517\n", "Epoch 2000, Training loss 2.437582, Validation loss 6.508634\n", "Epoch 2001, Training loss 2.437546, Validation loss 6.508765\n", "Epoch 2002, Training loss 2.437510, Validation loss 6.508896\n", "Epoch 2003, Training loss 2.437473, Validation loss 6.509013\n", "Epoch 2004, Training loss 2.437437, Validation loss 6.509130\n", "Epoch 2005, Training loss 2.437400, Validation loss 6.509261\n", "Epoch 2006, Training loss 2.437364, Validation loss 6.509378\n", "Epoch 2007, Training loss 2.437328, Validation loss 6.509505\n", "Epoch 2008, Training loss 2.437291, Validation loss 6.509645\n", "Epoch 2009, Training loss 2.437257, Validation loss 6.509762\n", "Epoch 2010, Training loss 2.437221, Validation loss 6.509874\n", "Epoch 2011, Training loss 2.437186, Validation loss 6.510000\n", "Epoch 2012, Training loss 2.437150, Validation loss 6.510117\n", "Epoch 2013, Training loss 2.437114, Validation loss 6.510248\n", "Epoch 2014, Training loss 2.437079, Validation loss 6.510375\n", "Epoch 2015, Training loss 2.437046, Validation loss 6.510492\n", "Epoch 2016, Training loss 2.437009, Validation loss 6.510618\n", "Epoch 2017, Training loss 2.436976, Validation loss 6.510730\n", "Epoch 2018, Training loss 2.436941, Validation loss 6.510852\n", "Epoch 2019, Training loss 2.436907, Validation loss 6.510988\n", "Epoch 2020, Training loss 2.436873, Validation loss 6.511099\n", "Epoch 2021, Training loss 2.436839, Validation loss 6.511216\n", "Epoch 2022, Training loss 2.436805, Validation loss 6.511328\n", "Epoch 2023, Training loss 2.436771, Validation loss 6.511454\n", "Epoch 2024, Training loss 2.436738, Validation loss 6.511576\n", "Epoch 2025, Training loss 2.436704, Validation loss 6.511693\n", "Epoch 2026, Training loss 2.436670, Validation loss 6.511809\n", "Epoch 2027, Training loss 2.436637, Validation loss 6.511930\n", "Epoch 2028, Training loss 2.436604, Validation loss 6.512057\n", "Epoch 2029, Training loss 2.436571, Validation loss 6.512168\n", "Epoch 2030, Training loss 2.436538, Validation loss 6.512280\n", "Epoch 2031, Training loss 2.436507, Validation loss 6.512392\n", "Epoch 2032, Training loss 2.436475, Validation loss 6.512518\n", "Epoch 2033, Training loss 2.436441, Validation loss 6.512620\n", "Epoch 2034, Training loss 2.436410, Validation loss 6.512746\n", "Epoch 2035, Training loss 2.436378, Validation loss 6.512867\n", "Epoch 2036, Training loss 2.436345, Validation loss 6.512980\n", "Epoch 2037, Training loss 2.436314, Validation loss 6.513105\n", "Epoch 2038, Training loss 2.436281, Validation loss 6.513217\n", "Epoch 2039, Training loss 2.436251, Validation loss 6.513344\n", "Epoch 2040, Training loss 2.436219, Validation loss 6.513445\n", "Epoch 2041, Training loss 2.436188, Validation loss 6.513567\n", "Epoch 2042, Training loss 2.436156, Validation loss 6.513688\n", "Epoch 2043, Training loss 2.436125, Validation loss 6.513785\n", "Epoch 2044, Training loss 2.436094, Validation loss 6.513906\n", "Epoch 2045, Training loss 2.436062, Validation loss 6.514018\n", "Epoch 2046, Training loss 2.436032, Validation loss 6.514134\n", "Epoch 2047, Training loss 2.436002, Validation loss 6.514241\n", "Epoch 2048, Training loss 2.435972, Validation loss 6.514363\n", "Epoch 2049, Training loss 2.435941, Validation loss 6.514484\n", "Epoch 2050, Training loss 2.435912, Validation loss 6.514585\n", "Epoch 2051, Training loss 2.435882, Validation loss 6.514712\n", "Epoch 2052, Training loss 2.435852, Validation loss 6.514823\n", "Epoch 2053, Training loss 2.435820, Validation loss 6.514930\n", "Epoch 2054, Training loss 2.435791, Validation loss 6.515041\n", "Epoch 2055, Training loss 2.435761, Validation loss 6.515148\n", "Epoch 2056, Training loss 2.435732, Validation loss 6.515269\n", "Epoch 2057, Training loss 2.435703, Validation loss 6.515381\n", "Epoch 2058, Training loss 2.435675, Validation loss 6.515492\n", "Epoch 2059, Training loss 2.435646, Validation loss 6.515599\n", "Epoch 2060, Training loss 2.435616, Validation loss 6.515710\n", "Epoch 2061, Training loss 2.435588, Validation loss 6.515822\n", "Epoch 2062, Training loss 2.435559, Validation loss 6.515919\n", "Epoch 2063, Training loss 2.435530, Validation loss 6.516035\n", "Epoch 2064, Training loss 2.435501, Validation loss 6.516137\n", "Epoch 2065, Training loss 2.435475, Validation loss 6.516248\n", "Epoch 2066, Training loss 2.435446, Validation loss 6.516369\n", "Epoch 2067, Training loss 2.435417, Validation loss 6.516471\n", "Epoch 2068, Training loss 2.435390, Validation loss 6.516597\n", "Epoch 2069, Training loss 2.435361, Validation loss 6.516694\n", "Epoch 2070, Training loss 2.435333, Validation loss 6.516800\n", "Epoch 2071, Training loss 2.435306, Validation loss 6.516897\n", "Epoch 2072, Training loss 2.435277, Validation loss 6.517028\n", "Epoch 2073, Training loss 2.435251, Validation loss 6.517135\n", "Epoch 2074, Training loss 2.435223, Validation loss 6.517236\n", "Epoch 2075, Training loss 2.435197, Validation loss 6.517352\n", "Epoch 2076, Training loss 2.435170, Validation loss 6.517459\n", "Epoch 2077, Training loss 2.435143, Validation loss 6.517561\n", "Epoch 2078, Training loss 2.435116, Validation loss 6.517667\n", "Epoch 2079, Training loss 2.435089, Validation loss 6.517768\n", "Epoch 2080, Training loss 2.435063, Validation loss 6.517875\n", "Epoch 2081, Training loss 2.435035, Validation loss 6.517982\n", "Epoch 2082, Training loss 2.435009, Validation loss 6.518093\n", "Epoch 2083, Training loss 2.434982, Validation loss 6.518205\n", "Epoch 2084, Training loss 2.434956, Validation loss 6.518311\n", "Epoch 2085, Training loss 2.434931, Validation loss 6.518417\n", "Epoch 2086, Training loss 2.434904, Validation loss 6.518514\n", "Epoch 2087, Training loss 2.434878, Validation loss 6.518610\n", "Epoch 2088, Training loss 2.434852, Validation loss 6.518717\n", "Epoch 2089, Training loss 2.434828, Validation loss 6.518818\n", "Epoch 2090, Training loss 2.434802, Validation loss 6.518920\n", "Epoch 2091, Training loss 2.434776, Validation loss 6.519021\n", "Epoch 2092, Training loss 2.434751, Validation loss 6.519138\n", "Epoch 2093, Training loss 2.434726, Validation loss 6.519234\n", "Epoch 2094, Training loss 2.434700, Validation loss 6.519331\n", "Epoch 2095, Training loss 2.434675, Validation loss 6.519437\n", "Epoch 2096, Training loss 2.434650, Validation loss 6.519543\n", "Epoch 2097, Training loss 2.434625, Validation loss 6.519640\n", "Epoch 2098, Training loss 2.434601, Validation loss 6.519752\n", "Epoch 2099, Training loss 2.434576, Validation loss 6.519863\n", "Epoch 2100, Training loss 2.434551, Validation loss 6.519954\n", "Epoch 2101, Training loss 2.434526, Validation loss 6.520065\n", "Epoch 2102, Training loss 2.434503, Validation loss 6.520152\n", "Epoch 2103, Training loss 2.434480, Validation loss 6.520254\n", "Epoch 2104, Training loss 2.434454, Validation loss 6.520360\n", "Epoch 2105, Training loss 2.434431, Validation loss 6.520466\n", "Epoch 2106, Training loss 2.434406, Validation loss 6.520563\n", "Epoch 2107, Training loss 2.434382, Validation loss 6.520674\n", "Epoch 2108, Training loss 2.434359, Validation loss 6.520765\n", "Epoch 2109, Training loss 2.434334, Validation loss 6.520862\n", "Epoch 2110, Training loss 2.434312, Validation loss 6.520959\n", "Epoch 2111, Training loss 2.434288, Validation loss 6.521055\n", "Epoch 2112, Training loss 2.434264, Validation loss 6.521156\n", "Epoch 2113, Training loss 2.434242, Validation loss 6.521273\n", "Epoch 2114, Training loss 2.434218, Validation loss 6.521359\n", "Epoch 2115, Training loss 2.434196, Validation loss 6.521465\n", "Epoch 2116, Training loss 2.434172, Validation loss 6.521567\n", "Epoch 2117, Training loss 2.434149, Validation loss 6.521663\n", "Epoch 2118, Training loss 2.434126, Validation loss 6.521760\n", "Epoch 2119, Training loss 2.434105, Validation loss 6.521851\n", "Epoch 2120, Training loss 2.434081, Validation loss 6.521953\n", "Epoch 2121, Training loss 2.434059, Validation loss 6.522049\n", "Epoch 2122, Training loss 2.434036, Validation loss 6.522145\n", "Epoch 2123, Training loss 2.434015, Validation loss 6.522242\n", "Epoch 2124, Training loss 2.433994, Validation loss 6.522358\n", "Epoch 2125, Training loss 2.433970, Validation loss 6.522444\n", "Epoch 2126, Training loss 2.433948, Validation loss 6.522536\n", "Epoch 2127, Training loss 2.433926, Validation loss 6.522632\n", "Epoch 2128, Training loss 2.433904, Validation loss 6.522734\n", "Epoch 2129, Training loss 2.433882, Validation loss 6.522835\n", "Epoch 2130, Training loss 2.433861, Validation loss 6.522926\n", "Epoch 2131, Training loss 2.433838, Validation loss 6.523023\n", "Epoch 2132, Training loss 2.433818, Validation loss 6.523109\n", "Epoch 2133, Training loss 2.433797, Validation loss 6.523211\n", "Epoch 2134, Training loss 2.433775, Validation loss 6.523307\n", "Epoch 2135, Training loss 2.433755, Validation loss 6.523398\n", "Epoch 2136, Training loss 2.433732, Validation loss 6.523489\n", "Epoch 2137, Training loss 2.433712, Validation loss 6.523591\n", "Epoch 2138, Training loss 2.433691, Validation loss 6.523683\n", "Epoch 2139, Training loss 2.433670, Validation loss 6.523764\n", "Epoch 2140, Training loss 2.433650, Validation loss 6.523870\n", "Epoch 2141, Training loss 2.433629, Validation loss 6.523956\n", "Epoch 2142, Training loss 2.433608, Validation loss 6.524053\n", "Epoch 2143, Training loss 2.433587, Validation loss 6.524149\n", "Epoch 2144, Training loss 2.433567, Validation loss 6.524246\n", "Epoch 2145, Training loss 2.433547, Validation loss 6.524337\n", "Epoch 2146, Training loss 2.433527, Validation loss 6.524424\n", "Epoch 2147, Training loss 2.433506, Validation loss 6.524520\n", "Epoch 2148, Training loss 2.433486, Validation loss 6.524601\n", "Epoch 2149, Training loss 2.433465, Validation loss 6.524703\n", "Epoch 2150, Training loss 2.433446, Validation loss 6.524804\n", "Epoch 2151, Training loss 2.433426, Validation loss 6.524890\n", "Epoch 2152, Training loss 2.433407, Validation loss 6.524981\n", "Epoch 2153, Training loss 2.433386, Validation loss 6.525073\n", "Epoch 2154, Training loss 2.433367, Validation loss 6.525164\n", "Epoch 2155, Training loss 2.433347, Validation loss 6.525260\n", "Epoch 2156, Training loss 2.433329, Validation loss 6.525352\n", "Epoch 2157, Training loss 2.433308, Validation loss 6.525443\n", "Epoch 2158, Training loss 2.433289, Validation loss 6.525530\n", "Epoch 2159, Training loss 2.433269, Validation loss 6.525616\n", "Epoch 2160, Training loss 2.433250, Validation loss 6.525707\n", "Epoch 2161, Training loss 2.433232, Validation loss 6.525803\n", "Epoch 2162, Training loss 2.433214, Validation loss 6.525890\n", "Epoch 2163, Training loss 2.433193, Validation loss 6.525976\n", "Epoch 2164, Training loss 2.433176, Validation loss 6.526073\n", "Epoch 2165, Training loss 2.433156, Validation loss 6.526154\n", "Epoch 2166, Training loss 2.433138, Validation loss 6.526245\n", "Epoch 2167, Training loss 2.433118, Validation loss 6.526337\n", "Epoch 2168, Training loss 2.433100, Validation loss 6.526418\n", "Epoch 2169, Training loss 2.433081, Validation loss 6.526499\n", "Epoch 2170, Training loss 2.433065, Validation loss 6.526606\n", "Epoch 2171, Training loss 2.433045, Validation loss 6.526682\n", "Epoch 2172, Training loss 2.433028, Validation loss 6.526778\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2173, Training loss 2.433007, Validation loss 6.526864\n", "Epoch 2174, Training loss 2.432991, Validation loss 6.526941\n", "Epoch 2175, Training loss 2.432973, Validation loss 6.527032\n", "Epoch 2176, Training loss 2.432956, Validation loss 6.527123\n", "Epoch 2177, Training loss 2.432936, Validation loss 6.527210\n", "Epoch 2178, Training loss 2.432919, Validation loss 6.527291\n", "Epoch 2179, Training loss 2.432902, Validation loss 6.527382\n", "Epoch 2180, Training loss 2.432884, Validation loss 6.527469\n", "Epoch 2181, Training loss 2.432866, Validation loss 6.527555\n", "Epoch 2182, Training loss 2.432850, Validation loss 6.527641\n", "Epoch 2183, Training loss 2.432832, Validation loss 6.527742\n", "Epoch 2184, Training loss 2.432813, Validation loss 6.527823\n", "Epoch 2185, Training loss 2.432797, Validation loss 6.527910\n", "Epoch 2186, Training loss 2.432779, Validation loss 6.527996\n", "Epoch 2187, Training loss 2.432763, Validation loss 6.528078\n", "Epoch 2188, Training loss 2.432746, Validation loss 6.528169\n", "Epoch 2189, Training loss 2.432729, Validation loss 6.528245\n", "Epoch 2190, Training loss 2.432712, Validation loss 6.528327\n", "Epoch 2191, Training loss 2.432694, Validation loss 6.528413\n", "Epoch 2192, Training loss 2.432678, Validation loss 6.528504\n", "Epoch 2193, Training loss 2.432661, Validation loss 6.528585\n", "Epoch 2194, Training loss 2.432645, Validation loss 6.528662\n", "Epoch 2195, Training loss 2.432628, Validation loss 6.528748\n", "Epoch 2196, Training loss 2.432611, Validation loss 6.528844\n", "Epoch 2197, Training loss 2.432595, Validation loss 6.528925\n", "Epoch 2198, Training loss 2.432579, Validation loss 6.529012\n", "Epoch 2199, Training loss 2.432563, Validation loss 6.529088\n", "Epoch 2200, Training loss 2.432546, Validation loss 6.529169\n", "Epoch 2201, Training loss 2.432531, Validation loss 6.529251\n", "Epoch 2202, Training loss 2.432515, Validation loss 6.529346\n", "Epoch 2203, Training loss 2.432499, Validation loss 6.529423\n", "Epoch 2204, Training loss 2.432482, Validation loss 6.529504\n", "Epoch 2205, Training loss 2.432465, Validation loss 6.529591\n", "Epoch 2206, Training loss 2.432451, Validation loss 6.529672\n", "Epoch 2207, Training loss 2.432436, Validation loss 6.529748\n", "Epoch 2208, Training loss 2.432419, Validation loss 6.529829\n", "Epoch 2209, Training loss 2.432405, Validation loss 6.529911\n", "Epoch 2210, Training loss 2.432387, Validation loss 6.529982\n", "Epoch 2211, Training loss 2.432372, Validation loss 6.530073\n", "Epoch 2212, Training loss 2.432357, Validation loss 6.530154\n", "Epoch 2213, Training loss 2.432342, Validation loss 6.530231\n", "Epoch 2214, Training loss 2.432327, Validation loss 6.530317\n", "Epoch 2215, Training loss 2.432311, Validation loss 6.530388\n", "Epoch 2216, Training loss 2.432296, Validation loss 6.530469\n", "Epoch 2217, Training loss 2.432281, Validation loss 6.530546\n", "Epoch 2218, Training loss 2.432265, Validation loss 6.530632\n", "Epoch 2219, Training loss 2.432250, Validation loss 6.530718\n", "Epoch 2220, Training loss 2.432236, Validation loss 6.530789\n", "Epoch 2221, Training loss 2.432221, Validation loss 6.530880\n", "Epoch 2222, Training loss 2.432207, Validation loss 6.530962\n", "Epoch 2223, Training loss 2.432191, Validation loss 6.531033\n", "Epoch 2224, Training loss 2.432177, Validation loss 6.531119\n", "Epoch 2225, Training loss 2.432163, Validation loss 6.531190\n", "Epoch 2226, Training loss 2.432148, Validation loss 6.531267\n", "Epoch 2227, Training loss 2.432132, Validation loss 6.531343\n", "Epoch 2228, Training loss 2.432118, Validation loss 6.531419\n", "Epoch 2229, Training loss 2.432104, Validation loss 6.531510\n", "Epoch 2230, Training loss 2.432089, Validation loss 6.531577\n", "Epoch 2231, Training loss 2.432075, Validation loss 6.531658\n", "Epoch 2232, Training loss 2.432062, Validation loss 6.531734\n", "Epoch 2233, Training loss 2.432047, Validation loss 6.531825\n", "Epoch 2234, Training loss 2.432034, Validation loss 6.531897\n", "Epoch 2235, Training loss 2.432019, Validation loss 6.531972\n", "Epoch 2236, Training loss 2.432004, Validation loss 6.532044\n", "Epoch 2237, Training loss 2.431990, Validation loss 6.532130\n", "Epoch 2238, Training loss 2.431977, Validation loss 6.532201\n", "Epoch 2239, Training loss 2.431964, Validation loss 6.532288\n", "Epoch 2240, Training loss 2.431950, Validation loss 6.532349\n", "Epoch 2241, Training loss 2.431935, Validation loss 6.532425\n", "Epoch 2242, Training loss 2.431923, Validation loss 6.532506\n", "Epoch 2243, Training loss 2.431908, Validation loss 6.532582\n", "Epoch 2244, Training loss 2.431895, Validation loss 6.532653\n", "Epoch 2245, Training loss 2.431882, Validation loss 6.532725\n", "Epoch 2246, Training loss 2.431868, Validation loss 6.532806\n", "Epoch 2247, Training loss 2.431855, Validation loss 6.532887\n", "Epoch 2248, Training loss 2.431842, Validation loss 6.532963\n", "Epoch 2249, Training loss 2.431829, Validation loss 6.533034\n", "Epoch 2250, Training loss 2.431814, Validation loss 6.533121\n", "Epoch 2251, Training loss 2.431803, Validation loss 6.533187\n", "Epoch 2252, Training loss 2.431790, Validation loss 6.533253\n", "Epoch 2253, Training loss 2.431775, Validation loss 6.533339\n", "Epoch 2254, Training loss 2.431763, Validation loss 6.533401\n", "Epoch 2255, Training loss 2.431749, Validation loss 6.533472\n", "Epoch 2256, Training loss 2.431737, Validation loss 6.533553\n", "Epoch 2257, Training loss 2.431725, Validation loss 6.533624\n", "Epoch 2258, Training loss 2.431711, Validation loss 6.533710\n", "Epoch 2259, Training loss 2.431699, Validation loss 6.533786\n", "Epoch 2260, Training loss 2.431685, Validation loss 6.533857\n", "Epoch 2261, Training loss 2.431674, Validation loss 6.533938\n", "Epoch 2262, Training loss 2.431661, Validation loss 6.534005\n", "Epoch 2263, Training loss 2.431648, Validation loss 6.534071\n", "Epoch 2264, Training loss 2.431636, Validation loss 6.534147\n", "Epoch 2265, Training loss 2.431623, Validation loss 6.534218\n", "Epoch 2266, Training loss 2.431611, Validation loss 6.534289\n", "Epoch 2267, Training loss 2.431599, Validation loss 6.534355\n", "Epoch 2268, Training loss 2.431587, Validation loss 6.534436\n", "Epoch 2269, Training loss 2.431574, Validation loss 6.534513\n", "Epoch 2270, Training loss 2.431562, Validation loss 6.534584\n", "Epoch 2271, Training loss 2.431550, Validation loss 6.534660\n", "Epoch 2272, Training loss 2.431539, Validation loss 6.534731\n", "Epoch 2273, Training loss 2.431526, Validation loss 6.534792\n", "Epoch 2274, Training loss 2.431515, Validation loss 6.534859\n", "Epoch 2275, Training loss 2.431502, Validation loss 6.534935\n", "Epoch 2276, Training loss 2.431491, Validation loss 6.535011\n", "Epoch 2277, Training loss 2.431479, Validation loss 6.535077\n", "Epoch 2278, Training loss 2.431468, Validation loss 6.535143\n", "Epoch 2279, Training loss 2.431454, Validation loss 6.535214\n", "Epoch 2280, Training loss 2.431443, Validation loss 6.535290\n", "Epoch 2281, Training loss 2.431432, Validation loss 6.535361\n", "Epoch 2282, Training loss 2.431421, Validation loss 6.535423\n", "Epoch 2283, Training loss 2.431409, Validation loss 6.535493\n", "Epoch 2284, Training loss 2.431399, Validation loss 6.535575\n", "Epoch 2285, Training loss 2.431387, Validation loss 6.535636\n", "Epoch 2286, Training loss 2.431375, Validation loss 6.535712\n", "Epoch 2287, Training loss 2.431363, Validation loss 6.535783\n", "Epoch 2288, Training loss 2.431352, Validation loss 6.535844\n", "Epoch 2289, Training loss 2.431341, Validation loss 6.535910\n", "Epoch 2290, Training loss 2.431329, Validation loss 6.535976\n", "Epoch 2291, Training loss 2.431318, Validation loss 6.536053\n", "Epoch 2292, Training loss 2.431308, Validation loss 6.536129\n", "Epoch 2293, Training loss 2.431296, Validation loss 6.536190\n", "Epoch 2294, Training loss 2.431285, Validation loss 6.536261\n", "Epoch 2295, Training loss 2.431275, Validation loss 6.536327\n", "Epoch 2296, Training loss 2.431264, Validation loss 6.536393\n", "Epoch 2297, Training loss 2.431252, Validation loss 6.536469\n", "Epoch 2298, Training loss 2.431242, Validation loss 6.536535\n", "Epoch 2299, Training loss 2.431230, Validation loss 6.536596\n", "Epoch 2300, Training loss 2.431220, Validation loss 6.536677\n", "Epoch 2301, Training loss 2.431210, Validation loss 6.536734\n", "Epoch 2302, Training loss 2.431199, Validation loss 6.536809\n", "Epoch 2303, Training loss 2.431188, Validation loss 6.536886\n", "Epoch 2304, Training loss 2.431177, Validation loss 6.536932\n", "Epoch 2305, Training loss 2.431167, Validation loss 6.537003\n", "Epoch 2306, Training loss 2.431156, Validation loss 6.537069\n", "Epoch 2307, Training loss 2.431147, Validation loss 6.537140\n", "Epoch 2308, Training loss 2.431136, Validation loss 6.537216\n", "Epoch 2309, Training loss 2.431125, Validation loss 6.537277\n", "Epoch 2310, Training loss 2.431116, Validation loss 6.537343\n", "Epoch 2311, Training loss 2.431106, Validation loss 6.537414\n", "Epoch 2312, Training loss 2.431095, Validation loss 6.537466\n", "Epoch 2313, Training loss 2.431086, Validation loss 6.537532\n", "Epoch 2314, Training loss 2.431075, Validation loss 6.537598\n", "Epoch 2315, Training loss 2.431064, Validation loss 6.537659\n", "Epoch 2316, Training loss 2.431056, Validation loss 6.537735\n", "Epoch 2317, Training loss 2.431045, Validation loss 6.537791\n", "Epoch 2318, Training loss 2.431035, Validation loss 6.537862\n", "Epoch 2319, Training loss 2.431024, Validation loss 6.537918\n", "Epoch 2320, Training loss 2.431015, Validation loss 6.537994\n", "Epoch 2321, Training loss 2.431006, Validation loss 6.538055\n", "Epoch 2322, Training loss 2.430994, Validation loss 6.538116\n", "Epoch 2323, Training loss 2.430986, Validation loss 6.538188\n", "Epoch 2324, Training loss 2.430977, Validation loss 6.538248\n", "Epoch 2325, Training loss 2.430966, Validation loss 6.538314\n", "Epoch 2326, Training loss 2.430957, Validation loss 6.538380\n", "Epoch 2327, Training loss 2.430947, Validation loss 6.538442\n", "Epoch 2328, Training loss 2.430937, Validation loss 6.538498\n", "Epoch 2329, Training loss 2.430926, Validation loss 6.538574\n", "Epoch 2330, Training loss 2.430917, Validation loss 6.538630\n", "Epoch 2331, Training loss 2.430910, Validation loss 6.538686\n", "Epoch 2332, Training loss 2.430900, Validation loss 6.538762\n", "Epoch 2333, Training loss 2.430890, Validation loss 6.538828\n", "Epoch 2334, Training loss 2.430882, Validation loss 6.538889\n", "Epoch 2335, Training loss 2.430871, Validation loss 6.538945\n", "Epoch 2336, Training loss 2.430863, Validation loss 6.539016\n", "Epoch 2337, Training loss 2.430852, Validation loss 6.539077\n", "Epoch 2338, Training loss 2.430844, Validation loss 6.539138\n", "Epoch 2339, Training loss 2.430835, Validation loss 6.539199\n", "Epoch 2340, Training loss 2.430825, Validation loss 6.539260\n", "Epoch 2341, Training loss 2.430817, Validation loss 6.539321\n", "Epoch 2342, Training loss 2.430808, Validation loss 6.539377\n", "Epoch 2343, Training loss 2.430799, Validation loss 6.539443\n", "Epoch 2344, Training loss 2.430789, Validation loss 6.539514\n", "Epoch 2345, Training loss 2.430780, Validation loss 6.539575\n", "Epoch 2346, Training loss 2.430772, Validation loss 6.539641\n", "Epoch 2347, Training loss 2.430763, Validation loss 6.539687\n", "Epoch 2348, Training loss 2.430754, Validation loss 6.539743\n", "Epoch 2349, Training loss 2.430745, Validation loss 6.539814\n", "Epoch 2350, Training loss 2.430737, Validation loss 6.539871\n", "Epoch 2351, Training loss 2.430729, Validation loss 6.539941\n", "Epoch 2352, Training loss 2.430720, Validation loss 6.540002\n", "Epoch 2353, Training loss 2.430711, Validation loss 6.540053\n", "Epoch 2354, Training loss 2.430702, Validation loss 6.540124\n", "Epoch 2355, Training loss 2.430694, Validation loss 6.540185\n", "Epoch 2356, Training loss 2.430685, Validation loss 6.540236\n", "Epoch 2357, Training loss 2.430678, Validation loss 6.540302\n", "Epoch 2358, Training loss 2.430668, Validation loss 6.540368\n", "Epoch 2359, Training loss 2.430660, Validation loss 6.540419\n", "Epoch 2360, Training loss 2.430651, Validation loss 6.540495\n", "Epoch 2361, Training loss 2.430644, Validation loss 6.540551\n", "Epoch 2362, Training loss 2.430634, Validation loss 6.540607\n", "Epoch 2363, Training loss 2.430626, Validation loss 6.540658\n", "Epoch 2364, Training loss 2.430618, Validation loss 6.540724\n", "Epoch 2365, Training loss 2.430611, Validation loss 6.540775\n", "Epoch 2366, Training loss 2.430603, Validation loss 6.540826\n", "Epoch 2367, Training loss 2.430594, Validation loss 6.540883\n", "Epoch 2368, Training loss 2.430585, Validation loss 6.540954\n", "Epoch 2369, Training loss 2.430578, Validation loss 6.541014\n", "Epoch 2370, Training loss 2.430570, Validation loss 6.541065\n", "Epoch 2371, Training loss 2.430562, Validation loss 6.541131\n", "Epoch 2372, Training loss 2.430554, Validation loss 6.541193\n", "Epoch 2373, Training loss 2.430546, Validation loss 6.541258\n", "Epoch 2374, Training loss 2.430538, Validation loss 6.541314\n", "Epoch 2375, Training loss 2.430530, Validation loss 6.541370\n", "Epoch 2376, Training loss 2.430522, Validation loss 6.541411\n", "Epoch 2377, Training loss 2.430514, Validation loss 6.541477\n", "Epoch 2378, Training loss 2.430506, Validation loss 6.541543\n", "Epoch 2379, Training loss 2.430499, Validation loss 6.541594\n", "Epoch 2380, Training loss 2.430490, Validation loss 6.541655\n", "Epoch 2381, Training loss 2.430482, Validation loss 6.541711\n", "Epoch 2382, Training loss 2.430475, Validation loss 6.541772\n", "Epoch 2383, Training loss 2.430468, Validation loss 6.541823\n", "Epoch 2384, Training loss 2.430460, Validation loss 6.541874\n", "Epoch 2385, Training loss 2.430454, Validation loss 6.541935\n", "Epoch 2386, Training loss 2.430446, Validation loss 6.541986\n", "Epoch 2387, Training loss 2.430438, Validation loss 6.542047\n", "Epoch 2388, Training loss 2.430430, Validation loss 6.542112\n", "Epoch 2389, Training loss 2.430422, Validation loss 6.542164\n", "Epoch 2390, Training loss 2.430416, Validation loss 6.542219\n", "Epoch 2391, Training loss 2.430408, Validation loss 6.542285\n", "Epoch 2392, Training loss 2.430402, Validation loss 6.542332\n", "Epoch 2393, Training loss 2.430394, Validation loss 6.542387\n", "Epoch 2394, Training loss 2.430387, Validation loss 6.542443\n", "Epoch 2395, Training loss 2.430380, Validation loss 6.542504\n", "Epoch 2396, Training loss 2.430372, Validation loss 6.542550\n", "Epoch 2397, Training loss 2.430364, Validation loss 6.542616\n", "Epoch 2398, Training loss 2.430358, Validation loss 6.542667\n", "Epoch 2399, Training loss 2.430351, Validation loss 6.542718\n", "Epoch 2400, Training loss 2.430343, Validation loss 6.542769\n", "Epoch 2401, Training loss 2.430336, Validation loss 6.542830\n", "Epoch 2402, Training loss 2.430329, Validation loss 6.542891\n", "Epoch 2403, Training loss 2.430322, Validation loss 6.542936\n", "Epoch 2404, Training loss 2.430315, Validation loss 6.542998\n", "Epoch 2405, Training loss 2.430309, Validation loss 6.543054\n", "Epoch 2406, Training loss 2.430300, Validation loss 6.543105\n", "Epoch 2407, Training loss 2.430295, Validation loss 6.543160\n", "Epoch 2408, Training loss 2.430288, Validation loss 6.543216\n", "Epoch 2409, Training loss 2.430280, Validation loss 6.543262\n", "Epoch 2410, Training loss 2.430274, Validation loss 6.543318\n", "Epoch 2411, Training loss 2.430267, Validation loss 6.543374\n", "Epoch 2412, Training loss 2.430261, Validation loss 6.543425\n", "Epoch 2413, Training loss 2.430254, Validation loss 6.543481\n", "Epoch 2414, Training loss 2.430248, Validation loss 6.543532\n", "Epoch 2415, Training loss 2.430242, Validation loss 6.543588\n", "Epoch 2416, Training loss 2.430234, Validation loss 6.543649\n", "Epoch 2417, Training loss 2.430225, Validation loss 6.543699\n", "Epoch 2418, Training loss 2.430220, Validation loss 6.543741\n", "Epoch 2419, Training loss 2.430214, Validation loss 6.543792\n", "Epoch 2420, Training loss 2.430207, Validation loss 6.543847\n", "Epoch 2421, Training loss 2.430200, Validation loss 6.543898\n", "Epoch 2422, Training loss 2.430194, Validation loss 6.543959\n", "Epoch 2423, Training loss 2.430189, Validation loss 6.544005\n", "Epoch 2424, Training loss 2.430182, Validation loss 6.544061\n", "Epoch 2425, Training loss 2.430176, Validation loss 6.544107\n", "Epoch 2426, Training loss 2.430168, Validation loss 6.544163\n", "Epoch 2427, Training loss 2.430163, Validation loss 6.544219\n", "Epoch 2428, Training loss 2.430156, Validation loss 6.544265\n", "Epoch 2429, Training loss 2.430151, Validation loss 6.544321\n", "Epoch 2430, Training loss 2.430144, Validation loss 6.544371\n", "Epoch 2431, Training loss 2.430136, Validation loss 6.544427\n", "Epoch 2432, Training loss 2.430131, Validation loss 6.544478\n", "Epoch 2433, Training loss 2.430125, Validation loss 6.544529\n", "Epoch 2434, Training loss 2.430119, Validation loss 6.544580\n", "Epoch 2435, Training loss 2.430113, Validation loss 6.544621\n", "Epoch 2436, Training loss 2.430106, Validation loss 6.544672\n", "Epoch 2437, Training loss 2.430100, Validation loss 6.544733\n", "Epoch 2438, Training loss 2.430095, Validation loss 6.544794\n", "Epoch 2439, Training loss 2.430088, Validation loss 6.544829\n", "Epoch 2440, Training loss 2.430082, Validation loss 6.544875\n", "Epoch 2441, Training loss 2.430077, Validation loss 6.544926\n", "Epoch 2442, Training loss 2.430071, Validation loss 6.544982\n", "Epoch 2443, Training loss 2.430065, Validation loss 6.545038\n", "Epoch 2444, Training loss 2.430059, Validation loss 6.545089\n", "Epoch 2445, Training loss 2.430052, Validation loss 6.545135\n", "Epoch 2446, Training loss 2.430046, Validation loss 6.545195\n", "Epoch 2447, Training loss 2.430042, Validation loss 6.545241\n", "Epoch 2448, Training loss 2.430034, Validation loss 6.545287\n", "Epoch 2449, Training loss 2.430030, Validation loss 6.545348\n", "Epoch 2450, Training loss 2.430023, Validation loss 6.545394\n", "Epoch 2451, Training loss 2.430018, Validation loss 6.545435\n", "Epoch 2452, Training loss 2.430012, Validation loss 6.545486\n", "Epoch 2453, Training loss 2.430007, Validation loss 6.545537\n", "Epoch 2454, Training loss 2.430000, Validation loss 6.545587\n", "Epoch 2455, Training loss 2.429995, Validation loss 6.545628\n", "Epoch 2456, Training loss 2.429989, Validation loss 6.545674\n", "Epoch 2457, Training loss 2.429984, Validation loss 6.545734\n", "Epoch 2458, Training loss 2.429978, Validation loss 6.545785\n", "Epoch 2459, Training loss 2.429973, Validation loss 6.545841\n", "Epoch 2460, Training loss 2.429967, Validation loss 6.545877\n", "Epoch 2461, Training loss 2.429961, Validation loss 6.545928\n", "Epoch 2462, Training loss 2.429955, Validation loss 6.545974\n", "Epoch 2463, Training loss 2.429950, Validation loss 6.546020\n", "Epoch 2464, Training loss 2.429945, Validation loss 6.546066\n", "Epoch 2465, Training loss 2.429940, Validation loss 6.546122\n", "Epoch 2466, Training loss 2.429934, Validation loss 6.546163\n", "Epoch 2467, Training loss 2.429929, Validation loss 6.546218\n", "Epoch 2468, Training loss 2.429923, Validation loss 6.546259\n", "Epoch 2469, Training loss 2.429918, Validation loss 6.546305\n", "Epoch 2470, Training loss 2.429912, Validation loss 6.546366\n", "Epoch 2471, Training loss 2.429908, Validation loss 6.546412\n", "Epoch 2472, Training loss 2.429903, Validation loss 6.546447\n", "Epoch 2473, Training loss 2.429897, Validation loss 6.546503\n", "Epoch 2474, Training loss 2.429892, Validation loss 6.546559\n", "Epoch 2475, Training loss 2.429888, Validation loss 6.546600\n", "Epoch 2476, Training loss 2.429881, Validation loss 6.546651\n", "Epoch 2477, Training loss 2.429877, Validation loss 6.546701\n", "Epoch 2478, Training loss 2.429871, Validation loss 6.546747\n", "Epoch 2479, Training loss 2.429867, Validation loss 6.546783\n", "Epoch 2480, Training loss 2.429860, Validation loss 6.546829\n", "Epoch 2481, Training loss 2.429856, Validation loss 6.546885\n", "Epoch 2482, Training loss 2.429851, Validation loss 6.546926\n", "Epoch 2483, Training loss 2.429845, Validation loss 6.546967\n", "Epoch 2484, Training loss 2.429841, Validation loss 6.547022\n", "Epoch 2485, Training loss 2.429836, Validation loss 6.547068\n", "Epoch 2486, Training loss 2.429831, Validation loss 6.547114\n", "Epoch 2487, Training loss 2.429825, Validation loss 6.547164\n", "Epoch 2488, Training loss 2.429821, Validation loss 6.547211\n", "Epoch 2489, Training loss 2.429816, Validation loss 6.547251\n", "Epoch 2490, Training loss 2.429811, Validation loss 6.547292\n", "Epoch 2491, Training loss 2.429807, Validation loss 6.547333\n", "Epoch 2492, Training loss 2.429802, Validation loss 6.547374\n", "Epoch 2493, Training loss 2.429796, Validation loss 6.547425\n", "Epoch 2494, Training loss 2.429792, Validation loss 6.547475\n", "Epoch 2495, Training loss 2.429788, Validation loss 6.547521\n", "Epoch 2496, Training loss 2.429781, Validation loss 6.547552\n", "Epoch 2497, Training loss 2.429777, Validation loss 6.547603\n", "Epoch 2498, Training loss 2.429773, Validation loss 6.547658\n", "Epoch 2499, Training loss 2.429768, Validation loss 6.547704\n", "Epoch 2500, Training loss 2.429764, Validation loss 6.547740\n", "Epoch 2501, Training loss 2.429758, Validation loss 6.547781\n", "Epoch 2502, Training loss 2.429754, Validation loss 6.547832\n", "Epoch 2503, Training loss 2.429749, Validation loss 6.547878\n", "Epoch 2504, Training loss 2.429744, Validation loss 6.547914\n", "Epoch 2505, Training loss 2.429740, Validation loss 6.547959\n", "Epoch 2506, Training loss 2.429735, Validation loss 6.548005\n", "Epoch 2507, Training loss 2.429732, Validation loss 6.548065\n", "Epoch 2508, Training loss 2.429725, Validation loss 6.548101\n", "Epoch 2509, Training loss 2.429723, Validation loss 6.548142\n", "Epoch 2510, Training loss 2.429717, Validation loss 6.548183\n", "Epoch 2511, Training loss 2.429712, Validation loss 6.548234\n", "Epoch 2512, Training loss 2.429709, Validation loss 6.548285\n", "Epoch 2513, Training loss 2.429705, Validation loss 6.548316\n", "Epoch 2514, Training loss 2.429699, Validation loss 6.548366\n", "Epoch 2515, Training loss 2.429694, Validation loss 6.548407\n", "Epoch 2516, Training loss 2.429691, Validation loss 6.548448\n", "Epoch 2517, Training loss 2.429688, Validation loss 6.548493\n", "Epoch 2518, Training loss 2.429683, Validation loss 6.548534\n", "Epoch 2519, Training loss 2.429678, Validation loss 6.548580\n", "Epoch 2520, Training loss 2.429674, Validation loss 6.548625\n", "Epoch 2521, Training loss 2.429669, Validation loss 6.548661\n", "Epoch 2522, Training loss 2.429665, Validation loss 6.548697\n", "Epoch 2523, Training loss 2.429661, Validation loss 6.548738\n", "Epoch 2524, Training loss 2.429657, Validation loss 6.548784\n", "Epoch 2525, Training loss 2.429652, Validation loss 6.548840\n", "Epoch 2526, Training loss 2.429648, Validation loss 6.548870\n", "Epoch 2527, Training loss 2.429644, Validation loss 6.548926\n", "Epoch 2528, Training loss 2.429639, Validation loss 6.548956\n", "Epoch 2529, Training loss 2.429636, Validation loss 6.549003\n", "Epoch 2530, Training loss 2.429631, Validation loss 6.549043\n", "Epoch 2531, Training loss 2.429627, Validation loss 6.549089\n", "Epoch 2532, Training loss 2.429622, Validation loss 6.549120\n", "Epoch 2533, Training loss 2.429619, Validation loss 6.549181\n", "Epoch 2534, Training loss 2.429614, Validation loss 6.549211\n", "Epoch 2535, Training loss 2.429611, Validation loss 6.549252\n", "Epoch 2536, Training loss 2.429606, Validation loss 6.549293\n", "Epoch 2537, Training loss 2.429603, Validation loss 6.549338\n", "Epoch 2538, Training loss 2.429600, Validation loss 6.549389\n", "Epoch 2539, Training loss 2.429595, Validation loss 6.549420\n", "Epoch 2540, Training loss 2.429591, Validation loss 6.549456\n", "Epoch 2541, Training loss 2.429587, Validation loss 6.549491\n", "Epoch 2542, Training loss 2.429583, Validation loss 6.549527\n", "Epoch 2543, Training loss 2.429578, Validation loss 6.549568\n", "Epoch 2544, Training loss 2.429576, Validation loss 6.549624\n", "Epoch 2545, Training loss 2.429572, Validation loss 6.549669\n", "Epoch 2546, Training loss 2.429567, Validation loss 6.549715\n", "Epoch 2547, Training loss 2.429563, Validation loss 6.549741\n", "Epoch 2548, Training loss 2.429559, Validation loss 6.549787\n", "Epoch 2549, Training loss 2.429557, Validation loss 6.549827\n", "Epoch 2550, Training loss 2.429552, Validation loss 6.549863\n", "Epoch 2551, Training loss 2.429549, Validation loss 6.549909\n", "Epoch 2552, Training loss 2.429544, Validation loss 6.549959\n", "Epoch 2553, Training loss 2.429541, Validation loss 6.550000\n", "Epoch 2554, Training loss 2.429537, Validation loss 6.550031\n", "Epoch 2555, Training loss 2.429533, Validation loss 6.550071\n", "Epoch 2556, Training loss 2.429529, Validation loss 6.550097\n", "Epoch 2557, Training loss 2.429526, Validation loss 6.550148\n", "Epoch 2558, Training loss 2.429522, Validation loss 6.550199\n", "Epoch 2559, Training loss 2.429518, Validation loss 6.550224\n", "Epoch 2560, Training loss 2.429514, Validation loss 6.550255\n", "Epoch 2561, Training loss 2.429512, Validation loss 6.550306\n", "Epoch 2562, Training loss 2.429507, Validation loss 6.550352\n", "Epoch 2563, Training loss 2.429503, Validation loss 6.550392\n", "Epoch 2564, Training loss 2.429501, Validation loss 6.550427\n", "Epoch 2565, Training loss 2.429496, Validation loss 6.550464\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2566, Training loss 2.429492, Validation loss 6.550499\n", "Epoch 2567, Training loss 2.429491, Validation loss 6.550545\n", "Epoch 2568, Training loss 2.429485, Validation loss 6.550581\n", "Epoch 2569, Training loss 2.429484, Validation loss 6.550617\n", "Epoch 2570, Training loss 2.429479, Validation loss 6.550653\n", "Epoch 2571, Training loss 2.429474, Validation loss 6.550688\n", "Epoch 2572, Training loss 2.429472, Validation loss 6.550734\n", "Epoch 2573, Training loss 2.429468, Validation loss 6.550779\n", "Epoch 2574, Training loss 2.429465, Validation loss 6.550810\n", "Epoch 2575, Training loss 2.429461, Validation loss 6.550851\n", "Epoch 2576, Training loss 2.429457, Validation loss 6.550877\n", "Epoch 2577, Training loss 2.429455, Validation loss 6.550917\n", "Epoch 2578, Training loss 2.429452, Validation loss 6.550968\n", "Epoch 2579, Training loss 2.429448, Validation loss 6.551008\n", "Epoch 2580, Training loss 2.429444, Validation loss 6.551039\n", "Epoch 2581, Training loss 2.429441, Validation loss 6.551080\n", "Epoch 2582, Training loss 2.429438, Validation loss 6.551121\n", "Epoch 2583, Training loss 2.429435, Validation loss 6.551151\n", "Epoch 2584, Training loss 2.429431, Validation loss 6.551197\n", "Epoch 2585, Training loss 2.429429, Validation loss 6.551228\n", "Epoch 2586, Training loss 2.429424, Validation loss 6.551268\n", "Epoch 2587, Training loss 2.429422, Validation loss 6.551319\n", "Epoch 2588, Training loss 2.429419, Validation loss 6.551344\n", "Epoch 2589, Training loss 2.429415, Validation loss 6.551380\n", "Epoch 2590, Training loss 2.429411, Validation loss 6.551416\n", "Epoch 2591, Training loss 2.429407, Validation loss 6.551452\n", "Epoch 2592, Training loss 2.429405, Validation loss 6.551497\n", "Epoch 2593, Training loss 2.429401, Validation loss 6.551543\n", "Epoch 2594, Training loss 2.429399, Validation loss 6.551569\n", "Epoch 2595, Training loss 2.429396, Validation loss 6.551604\n", "Epoch 2596, Training loss 2.429392, Validation loss 6.551645\n", "Epoch 2597, Training loss 2.429389, Validation loss 6.551671\n", "Epoch 2598, Training loss 2.429387, Validation loss 6.551712\n", "Epoch 2599, Training loss 2.429382, Validation loss 6.551747\n", "Epoch 2600, Training loss 2.429380, Validation loss 6.551787\n", "Epoch 2601, Training loss 2.429376, Validation loss 6.551823\n", "Epoch 2602, Training loss 2.429373, Validation loss 6.551864\n", "Epoch 2603, Training loss 2.429371, Validation loss 6.551894\n", "Epoch 2604, Training loss 2.429367, Validation loss 6.551935\n", "Epoch 2605, Training loss 2.429365, Validation loss 6.551966\n", "Epoch 2606, Training loss 2.429363, Validation loss 6.551997\n", "Epoch 2607, Training loss 2.429358, Validation loss 6.552037\n", "Epoch 2608, Training loss 2.429355, Validation loss 6.552078\n", "Epoch 2609, Training loss 2.429352, Validation loss 6.552104\n", "Epoch 2610, Training loss 2.429350, Validation loss 6.552154\n", "Epoch 2611, Training loss 2.429348, Validation loss 6.552179\n", "Epoch 2612, Training loss 2.429344, Validation loss 6.552211\n", "Epoch 2613, Training loss 2.429341, Validation loss 6.552236\n", "Epoch 2614, Training loss 2.429338, Validation loss 6.552297\n", "Epoch 2615, Training loss 2.429335, Validation loss 6.552332\n", "Epoch 2616, Training loss 2.429332, Validation loss 6.552358\n", "Epoch 2617, Training loss 2.429329, Validation loss 6.552403\n", "Epoch 2618, Training loss 2.429325, Validation loss 6.552429\n", "Epoch 2619, Training loss 2.429323, Validation loss 6.552470\n", "Epoch 2620, Training loss 2.429320, Validation loss 6.552510\n", "Epoch 2621, Training loss 2.429318, Validation loss 6.552546\n", "Epoch 2622, Training loss 2.429314, Validation loss 6.552582\n", "Epoch 2623, Training loss 2.429312, Validation loss 6.552603\n", "Epoch 2624, Training loss 2.429308, Validation loss 6.552628\n", "Epoch 2625, Training loss 2.429306, Validation loss 6.552679\n", "Epoch 2626, Training loss 2.429304, Validation loss 6.552709\n", "Epoch 2627, Training loss 2.429301, Validation loss 6.552735\n", "Epoch 2628, Training loss 2.429298, Validation loss 6.552766\n", "Epoch 2629, Training loss 2.429294, Validation loss 6.552796\n", "Epoch 2630, Training loss 2.429293, Validation loss 6.552837\n", "Epoch 2631, Training loss 2.429289, Validation loss 6.552887\n", "Epoch 2632, Training loss 2.429288, Validation loss 6.552928\n", "Epoch 2633, Training loss 2.429284, Validation loss 6.552949\n", "Epoch 2634, Training loss 2.429282, Validation loss 6.552974\n", "Epoch 2635, Training loss 2.429279, Validation loss 6.553010\n", "Epoch 2636, Training loss 2.429276, Validation loss 6.553056\n", "Epoch 2637, Training loss 2.429273, Validation loss 6.553086\n", "Epoch 2638, Training loss 2.429272, Validation loss 6.553122\n", "Epoch 2639, Training loss 2.429268, Validation loss 6.553152\n", "Epoch 2640, Training loss 2.429266, Validation loss 6.553178\n", "Epoch 2641, Training loss 2.429263, Validation loss 6.553214\n", "Epoch 2642, Training loss 2.429260, Validation loss 6.553264\n", "Epoch 2643, Training loss 2.429258, Validation loss 6.553290\n", "Epoch 2644, Training loss 2.429255, Validation loss 6.553326\n", "Epoch 2645, Training loss 2.429253, Validation loss 6.553351\n", "Epoch 2646, Training loss 2.429250, Validation loss 6.553387\n", "Epoch 2647, Training loss 2.429247, Validation loss 6.553412\n", "Epoch 2648, Training loss 2.429245, Validation loss 6.553458\n", "Epoch 2649, Training loss 2.429242, Validation loss 6.553489\n", "Epoch 2650, Training loss 2.429240, Validation loss 6.553509\n", "Epoch 2651, Training loss 2.429238, Validation loss 6.553535\n", "Epoch 2652, Training loss 2.429235, Validation loss 6.553581\n", "Epoch 2653, Training loss 2.429232, Validation loss 6.553611\n", "Epoch 2654, Training loss 2.429231, Validation loss 6.553641\n", "Epoch 2655, Training loss 2.429228, Validation loss 6.553677\n", "Epoch 2656, Training loss 2.429225, Validation loss 6.553718\n", "Epoch 2657, Training loss 2.429223, Validation loss 6.553739\n", "Epoch 2658, Training loss 2.429220, Validation loss 6.553779\n", "Epoch 2659, Training loss 2.429218, Validation loss 6.553800\n", "Epoch 2660, Training loss 2.429215, Validation loss 6.553850\n", "Epoch 2661, Training loss 2.429213, Validation loss 6.553881\n", "Epoch 2662, Training loss 2.429210, Validation loss 6.553911\n", "Epoch 2663, Training loss 2.429208, Validation loss 6.553942\n", "Epoch 2664, Training loss 2.429206, Validation loss 6.553977\n", "Epoch 2665, Training loss 2.429204, Validation loss 6.553988\n", "Epoch 2666, Training loss 2.429202, Validation loss 6.554034\n", "Epoch 2667, Training loss 2.429199, Validation loss 6.554054\n", "Epoch 2668, Training loss 2.429196, Validation loss 6.554090\n", "Epoch 2669, Training loss 2.429193, Validation loss 6.554131\n", "Epoch 2670, Training loss 2.429191, Validation loss 6.554156\n", "Epoch 2671, Training loss 2.429190, Validation loss 6.554182\n", "Epoch 2672, Training loss 2.429188, Validation loss 6.554217\n", "Epoch 2673, Training loss 2.429184, Validation loss 6.554263\n", "Epoch 2674, Training loss 2.429183, Validation loss 6.554278\n", "Epoch 2675, Training loss 2.429181, Validation loss 6.554324\n", "Epoch 2676, Training loss 2.429177, Validation loss 6.554340\n", "Epoch 2677, Training loss 2.429176, Validation loss 6.554370\n", "Epoch 2678, Training loss 2.429173, Validation loss 6.554415\n", "Epoch 2679, Training loss 2.429171, Validation loss 6.554441\n", "Epoch 2680, Training loss 2.429170, Validation loss 6.554477\n", "Epoch 2681, Training loss 2.429167, Validation loss 6.554507\n", "Epoch 2682, Training loss 2.429164, Validation loss 6.554528\n", "Epoch 2683, Training loss 2.429162, Validation loss 6.554564\n", "Epoch 2684, Training loss 2.429161, Validation loss 6.554604\n", "Epoch 2685, Training loss 2.429159, Validation loss 6.554634\n", "Epoch 2686, Training loss 2.429156, Validation loss 6.554665\n", "Epoch 2687, Training loss 2.429154, Validation loss 6.554696\n", "Epoch 2688, Training loss 2.429151, Validation loss 6.554716\n", "Epoch 2689, Training loss 2.429149, Validation loss 6.554747\n", "Epoch 2690, Training loss 2.429148, Validation loss 6.554778\n", "Epoch 2691, Training loss 2.429146, Validation loss 6.554818\n", "Epoch 2692, Training loss 2.429142, Validation loss 6.554839\n", "Epoch 2693, Training loss 2.429140, Validation loss 6.554864\n", "Epoch 2694, Training loss 2.429140, Validation loss 6.554894\n", "Epoch 2695, Training loss 2.429137, Validation loss 6.554930\n", "Epoch 2696, Training loss 2.429135, Validation loss 6.554951\n", "Epoch 2697, Training loss 2.429132, Validation loss 6.554986\n", "Epoch 2698, Training loss 2.429130, Validation loss 6.555017\n", "Epoch 2699, Training loss 2.429128, Validation loss 6.555052\n", "Epoch 2700, Training loss 2.429126, Validation loss 6.555088\n", "Epoch 2701, Training loss 2.429124, Validation loss 6.555104\n", "Epoch 2702, Training loss 2.429122, Validation loss 6.555134\n", "Epoch 2703, Training loss 2.429120, Validation loss 6.555150\n", "Epoch 2704, Training loss 2.429117, Validation loss 6.555195\n", "Epoch 2705, Training loss 2.429116, Validation loss 6.555221\n", "Epoch 2706, Training loss 2.429113, Validation loss 6.555256\n", "Epoch 2707, Training loss 2.429112, Validation loss 6.555287\n", "Epoch 2708, Training loss 2.429110, Validation loss 6.555312\n", "Epoch 2709, Training loss 2.429108, Validation loss 6.555338\n", "Epoch 2710, Training loss 2.429107, Validation loss 6.555383\n", "Epoch 2711, Training loss 2.429105, Validation loss 6.555404\n", "Epoch 2712, Training loss 2.429101, Validation loss 6.555434\n", "Epoch 2713, Training loss 2.429101, Validation loss 6.555455\n", "Epoch 2714, Training loss 2.429099, Validation loss 6.555481\n", "Epoch 2715, Training loss 2.429096, Validation loss 6.555501\n", "Epoch 2716, Training loss 2.429094, Validation loss 6.555532\n", "Epoch 2717, Training loss 2.429092, Validation loss 6.555562\n", "Epoch 2718, Training loss 2.429091, Validation loss 6.555598\n", "Epoch 2719, Training loss 2.429089, Validation loss 6.555628\n", "Epoch 2720, Training loss 2.429086, Validation loss 6.555654\n", "Epoch 2721, Training loss 2.429086, Validation loss 6.555679\n", "Epoch 2722, Training loss 2.429083, Validation loss 6.555705\n", "Epoch 2723, Training loss 2.429081, Validation loss 6.555726\n", "Epoch 2724, Training loss 2.429080, Validation loss 6.555766\n", "Epoch 2725, Training loss 2.429078, Validation loss 6.555797\n", "Epoch 2726, Training loss 2.429075, Validation loss 6.555832\n", "Epoch 2727, Training loss 2.429074, Validation loss 6.555862\n", "Epoch 2728, Training loss 2.429071, Validation loss 6.555878\n", "Epoch 2729, Training loss 2.429071, Validation loss 6.555899\n", "Epoch 2730, Training loss 2.429069, Validation loss 6.555934\n", "Epoch 2731, Training loss 2.429067, Validation loss 6.555974\n", "Epoch 2732, Training loss 2.429065, Validation loss 6.556000\n", "Epoch 2733, Training loss 2.429062, Validation loss 6.556026\n", "Epoch 2734, Training loss 2.429061, Validation loss 6.556051\n", "Epoch 2735, Training loss 2.429060, Validation loss 6.556077\n", "Epoch 2736, Training loss 2.429058, Validation loss 6.556102\n", "Epoch 2737, Training loss 2.429056, Validation loss 6.556128\n", "Epoch 2738, Training loss 2.429054, Validation loss 6.556163\n", "Epoch 2739, Training loss 2.429053, Validation loss 6.556199\n", "Epoch 2740, Training loss 2.429050, Validation loss 6.556214\n", "Epoch 2741, Training loss 2.429049, Validation loss 6.556235\n", "Epoch 2742, Training loss 2.429047, Validation loss 6.556265\n", "Epoch 2743, Training loss 2.429044, Validation loss 6.556281\n", "Epoch 2744, Training loss 2.429043, Validation loss 6.556316\n", "Epoch 2745, Training loss 2.429043, Validation loss 6.556347\n", "Epoch 2746, Training loss 2.429041, Validation loss 6.556372\n", "Epoch 2747, Training loss 2.429039, Validation loss 6.556413\n", "Epoch 2748, Training loss 2.429038, Validation loss 6.556423\n", "Epoch 2749, Training loss 2.429037, Validation loss 6.556454\n", "Epoch 2750, Training loss 2.429034, Validation loss 6.556489\n", "Epoch 2751, Training loss 2.429033, Validation loss 6.556500\n", "Epoch 2752, Training loss 2.429030, Validation loss 6.556540\n", "Epoch 2753, Training loss 2.429029, Validation loss 6.556571\n", "Epoch 2754, Training loss 2.429026, Validation loss 6.556591\n", "Epoch 2755, Training loss 2.429025, Validation loss 6.556622\n", "Epoch 2756, Training loss 2.429024, Validation loss 6.556643\n", "Epoch 2757, Training loss 2.429022, Validation loss 6.556678\n", "Epoch 2758, Training loss 2.429020, Validation loss 6.556698\n", "Epoch 2759, Training loss 2.429020, Validation loss 6.556719\n", "Epoch 2760, Training loss 2.429017, Validation loss 6.556754\n", "Epoch 2761, Training loss 2.429015, Validation loss 6.556775\n", "Epoch 2762, Training loss 2.429014, Validation loss 6.556800\n", "Epoch 2763, Training loss 2.429012, Validation loss 6.556826\n", "Epoch 2764, Training loss 2.429010, Validation loss 6.556841\n", "Epoch 2765, Training loss 2.429009, Validation loss 6.556862\n", "Epoch 2766, Training loss 2.429007, Validation loss 6.556888\n", "Epoch 2767, Training loss 2.429006, Validation loss 6.556928\n", "Epoch 2768, Training loss 2.429004, Validation loss 6.556963\n", "Epoch 2769, Training loss 2.429003, Validation loss 6.556979\n", "Epoch 2770, Training loss 2.429001, Validation loss 6.557014\n", "Epoch 2771, Training loss 2.429001, Validation loss 6.557030\n", "Epoch 2772, Training loss 2.428999, Validation loss 6.557055\n", "Epoch 2773, Training loss 2.428997, Validation loss 6.557071\n", "Epoch 2774, Training loss 2.428996, Validation loss 6.557101\n", "Epoch 2775, Training loss 2.428994, Validation loss 6.557127\n", "Epoch 2776, Training loss 2.428992, Validation loss 6.557152\n", "Epoch 2777, Training loss 2.428989, Validation loss 6.557182\n", "Epoch 2778, Training loss 2.428990, Validation loss 6.557208\n", "Epoch 2779, Training loss 2.428987, Validation loss 6.557233\n", "Epoch 2780, Training loss 2.428987, Validation loss 6.557244\n", "Epoch 2781, Training loss 2.428985, Validation loss 6.557274\n", "Epoch 2782, Training loss 2.428983, Validation loss 6.557310\n", "Epoch 2783, Training loss 2.428982, Validation loss 6.557341\n", "Epoch 2784, Training loss 2.428981, Validation loss 6.557361\n", "Epoch 2785, Training loss 2.428979, Validation loss 6.557381\n", "Epoch 2786, Training loss 2.428976, Validation loss 6.557402\n", "Epoch 2787, Training loss 2.428977, Validation loss 6.557433\n", "Epoch 2788, Training loss 2.428975, Validation loss 6.557453\n", "Epoch 2789, Training loss 2.428973, Validation loss 6.557473\n", "Epoch 2790, Training loss 2.428971, Validation loss 6.557504\n", "Epoch 2791, Training loss 2.428971, Validation loss 6.557529\n", "Epoch 2792, Training loss 2.428970, Validation loss 6.557555\n", "Epoch 2793, Training loss 2.428968, Validation loss 6.557585\n", "Epoch 2794, Training loss 2.428967, Validation loss 6.557606\n", "Epoch 2795, Training loss 2.428964, Validation loss 6.557621\n", "Epoch 2796, Training loss 2.428964, Validation loss 6.557632\n", "Epoch 2797, Training loss 2.428962, Validation loss 6.557673\n", "Epoch 2798, Training loss 2.428960, Validation loss 6.557697\n", "Epoch 2799, Training loss 2.428959, Validation loss 6.557728\n", "Epoch 2800, Training loss 2.428958, Validation loss 6.557753\n", "Epoch 2801, Training loss 2.428957, Validation loss 6.557788\n", "Epoch 2802, Training loss 2.428956, Validation loss 6.557794\n", "Epoch 2803, Training loss 2.428953, Validation loss 6.557825\n", "Epoch 2804, Training loss 2.428953, Validation loss 6.557830\n", "Epoch 2805, Training loss 2.428951, Validation loss 6.557866\n", "Epoch 2806, Training loss 2.428950, Validation loss 6.557896\n", "Epoch 2807, Training loss 2.428950, Validation loss 6.557921\n", "Epoch 2808, Training loss 2.428948, Validation loss 6.557951\n", "Epoch 2809, Training loss 2.428947, Validation loss 6.557977\n", "Epoch 2810, Training loss 2.428944, Validation loss 6.557992\n", "Epoch 2811, Training loss 2.428943, Validation loss 6.558018\n", "Epoch 2812, Training loss 2.428942, Validation loss 6.558043\n", "Epoch 2813, Training loss 2.428941, Validation loss 6.558059\n", "Epoch 2814, Training loss 2.428938, Validation loss 6.558080\n", "Epoch 2815, Training loss 2.428938, Validation loss 6.558115\n", "Epoch 2816, Training loss 2.428937, Validation loss 6.558135\n", "Epoch 2817, Training loss 2.428935, Validation loss 6.558151\n", "Epoch 2818, Training loss 2.428935, Validation loss 6.558172\n", "Epoch 2819, Training loss 2.428934, Validation loss 6.558192\n", "Epoch 2820, Training loss 2.428931, Validation loss 6.558208\n", "Epoch 2821, Training loss 2.428931, Validation loss 6.558248\n", "Epoch 2822, Training loss 2.428930, Validation loss 6.558263\n", "Epoch 2823, Training loss 2.428929, Validation loss 6.558293\n", "Epoch 2824, Training loss 2.428927, Validation loss 6.558323\n", "Epoch 2825, Training loss 2.428926, Validation loss 6.558344\n", "Epoch 2826, Training loss 2.428925, Validation loss 6.558375\n", "Epoch 2827, Training loss 2.428924, Validation loss 6.558385\n", "Epoch 2828, Training loss 2.428923, Validation loss 6.558415\n", "Epoch 2829, Training loss 2.428920, Validation loss 6.558426\n", "Epoch 2830, Training loss 2.428920, Validation loss 6.558437\n", "Epoch 2831, Training loss 2.428918, Validation loss 6.558477\n", "Epoch 2832, Training loss 2.428918, Validation loss 6.558497\n", "Epoch 2833, Training loss 2.428917, Validation loss 6.558523\n", "Epoch 2834, Training loss 2.428916, Validation loss 6.558553\n", "Epoch 2835, Training loss 2.428915, Validation loss 6.558559\n", "Epoch 2836, Training loss 2.428913, Validation loss 6.558579\n", "Epoch 2837, Training loss 2.428912, Validation loss 6.558599\n", "Epoch 2838, Training loss 2.428911, Validation loss 6.558635\n", "Epoch 2839, Training loss 2.428908, Validation loss 6.558655\n", "Epoch 2840, Training loss 2.428909, Validation loss 6.558671\n", "Epoch 2841, Training loss 2.428907, Validation loss 6.558696\n", "Epoch 2842, Training loss 2.428906, Validation loss 6.558712\n", "Epoch 2843, Training loss 2.428905, Validation loss 6.558737\n", "Epoch 2844, Training loss 2.428905, Validation loss 6.558767\n", "Epoch 2845, Training loss 2.428903, Validation loss 6.558783\n", "Epoch 2846, Training loss 2.428902, Validation loss 6.558808\n", "Epoch 2847, Training loss 2.428901, Validation loss 6.558824\n", "Epoch 2848, Training loss 2.428899, Validation loss 6.558855\n", "Epoch 2849, Training loss 2.428898, Validation loss 6.558879\n", "Epoch 2850, Training loss 2.428897, Validation loss 6.558900\n", "Epoch 2851, Training loss 2.428896, Validation loss 6.558925\n", "Epoch 2852, Training loss 2.428895, Validation loss 6.558941\n", "Epoch 2853, Training loss 2.428894, Validation loss 6.558961\n", "Epoch 2854, Training loss 2.428894, Validation loss 6.558967\n", "Epoch 2855, Training loss 2.428892, Validation loss 6.559002\n", "Epoch 2856, Training loss 2.428892, Validation loss 6.559012\n", "Epoch 2857, Training loss 2.428890, Validation loss 6.559053\n", "Epoch 2858, Training loss 2.428889, Validation loss 6.559083\n", "Epoch 2859, Training loss 2.428888, Validation loss 6.559099\n", "Epoch 2860, Training loss 2.428887, Validation loss 6.559114\n", "Epoch 2861, Training loss 2.428886, Validation loss 6.559139\n", "Epoch 2862, Training loss 2.428883, Validation loss 6.559150\n", "Epoch 2863, Training loss 2.428882, Validation loss 6.559180\n", "Epoch 2864, Training loss 2.428882, Validation loss 6.559196\n", "Epoch 2865, Training loss 2.428880, Validation loss 6.559206\n", "Epoch 2866, Training loss 2.428880, Validation loss 6.559247\n", "Epoch 2867, Training loss 2.428879, Validation loss 6.559267\n", "Epoch 2868, Training loss 2.428879, Validation loss 6.559287\n", "Epoch 2869, Training loss 2.428876, Validation loss 6.559308\n", "Epoch 2870, Training loss 2.428876, Validation loss 6.559328\n", "Epoch 2871, Training loss 2.428875, Validation loss 6.559349\n", "Epoch 2872, Training loss 2.428874, Validation loss 6.559359\n", "Epoch 2873, Training loss 2.428873, Validation loss 6.559379\n", "Epoch 2874, Training loss 2.428872, Validation loss 6.559400\n", "Epoch 2875, Training loss 2.428871, Validation loss 6.559420\n", "Epoch 2876, Training loss 2.428870, Validation loss 6.559451\n", "Epoch 2877, Training loss 2.428869, Validation loss 6.559466\n", "Epoch 2878, Training loss 2.428868, Validation loss 6.559491\n", "Epoch 2879, Training loss 2.428868, Validation loss 6.559502\n", "Epoch 2880, Training loss 2.428865, Validation loss 6.559532\n", "Epoch 2881, Training loss 2.428866, Validation loss 6.559547\n", "Epoch 2882, Training loss 2.428864, Validation loss 6.559563\n", "Epoch 2883, Training loss 2.428863, Validation loss 6.559574\n", "Epoch 2884, Training loss 2.428863, Validation loss 6.559608\n", "Epoch 2885, Training loss 2.428861, Validation loss 6.559629\n", "Epoch 2886, Training loss 2.428861, Validation loss 6.559654\n", "Epoch 2887, Training loss 2.428858, Validation loss 6.559670\n", "Epoch 2888, Training loss 2.428859, Validation loss 6.559695\n", "Epoch 2889, Training loss 2.428857, Validation loss 6.559715\n", "Epoch 2890, Training loss 2.428857, Validation loss 6.559736\n", "Epoch 2891, Training loss 2.428855, Validation loss 6.559741\n", "Epoch 2892, Training loss 2.428855, Validation loss 6.559767\n", "Epoch 2893, Training loss 2.428854, Validation loss 6.559783\n", "Epoch 2894, Training loss 2.428852, Validation loss 6.559807\n", "Epoch 2895, Training loss 2.428853, Validation loss 6.559823\n", "Epoch 2896, Training loss 2.428850, Validation loss 6.559848\n", "Epoch 2897, Training loss 2.428850, Validation loss 6.559864\n", "Epoch 2898, Training loss 2.428850, Validation loss 6.559894\n", "Epoch 2899, Training loss 2.428848, Validation loss 6.559909\n", "Epoch 2900, Training loss 2.428848, Validation loss 6.559925\n", "Epoch 2901, Training loss 2.428847, Validation loss 6.559950\n", "Epoch 2902, Training loss 2.428846, Validation loss 6.559970\n", "Epoch 2903, Training loss 2.428845, Validation loss 6.559996\n", "Epoch 2904, Training loss 2.428844, Validation loss 6.559996\n", "Epoch 2905, Training loss 2.428843, Validation loss 6.560031\n", "Epoch 2906, Training loss 2.428843, Validation loss 6.560042\n", "Epoch 2907, Training loss 2.428841, Validation loss 6.560062\n", "Epoch 2908, Training loss 2.428840, Validation loss 6.560083\n", "Epoch 2909, Training loss 2.428840, Validation loss 6.560103\n", "Epoch 2910, Training loss 2.428838, Validation loss 6.560109\n", "Epoch 2911, Training loss 2.428838, Validation loss 6.560129\n", "Epoch 2912, Training loss 2.428836, Validation loss 6.560154\n", "Epoch 2913, Training loss 2.428835, Validation loss 6.560175\n", "Epoch 2914, Training loss 2.428836, Validation loss 6.560195\n", "Epoch 2915, Training loss 2.428835, Validation loss 6.560215\n", "Epoch 2916, Training loss 2.428834, Validation loss 6.560246\n", "Epoch 2917, Training loss 2.428834, Validation loss 6.560256\n", "Epoch 2918, Training loss 2.428833, Validation loss 6.560287\n", "Epoch 2919, Training loss 2.428831, Validation loss 6.560292\n", "Epoch 2920, Training loss 2.428830, Validation loss 6.560302\n", "Epoch 2921, Training loss 2.428828, Validation loss 6.560332\n", "Epoch 2922, Training loss 2.428828, Validation loss 6.560343\n", "Epoch 2923, Training loss 2.428828, Validation loss 6.560359\n", "Epoch 2924, Training loss 2.428827, Validation loss 6.560389\n", "Epoch 2925, Training loss 2.428827, Validation loss 6.560399\n", "Epoch 2926, Training loss 2.428826, Validation loss 6.560424\n", "Epoch 2927, Training loss 2.428825, Validation loss 6.560444\n", "Epoch 2928, Training loss 2.428825, Validation loss 6.560470\n", "Epoch 2929, Training loss 2.428824, Validation loss 6.560490\n", "Epoch 2930, Training loss 2.428821, Validation loss 6.560496\n", "Epoch 2931, Training loss 2.428821, Validation loss 6.560516\n", "Epoch 2932, Training loss 2.428821, Validation loss 6.560541\n", "Epoch 2933, Training loss 2.428821, Validation loss 6.560547\n", "Epoch 2934, Training loss 2.428819, Validation loss 6.560567\n", "Epoch 2935, Training loss 2.428819, Validation loss 6.560573\n", "Epoch 2936, Training loss 2.428817, Validation loss 6.560608\n", "Epoch 2937, Training loss 2.428817, Validation loss 6.560623\n", "Epoch 2938, Training loss 2.428816, Validation loss 6.560649\n", "Epoch 2939, Training loss 2.428816, Validation loss 6.560664\n", "Epoch 2940, Training loss 2.428813, Validation loss 6.560679\n", "Epoch 2941, Training loss 2.428813, Validation loss 6.560695\n", "Epoch 2942, Training loss 2.428814, Validation loss 6.560710\n", "Epoch 2943, Training loss 2.428812, Validation loss 6.560725\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2944, Training loss 2.428812, Validation loss 6.560751\n", "Epoch 2945, Training loss 2.428812, Validation loss 6.560766\n", "Epoch 2946, Training loss 2.428810, Validation loss 6.560791\n", "Epoch 2947, Training loss 2.428809, Validation loss 6.560802\n", "Epoch 2948, Training loss 2.428809, Validation loss 6.560827\n", "Epoch 2949, Training loss 2.428808, Validation loss 6.560847\n", "Epoch 2950, Training loss 2.428806, Validation loss 6.560853\n", "Epoch 2951, Training loss 2.428805, Validation loss 6.560874\n", "Epoch 2952, Training loss 2.428806, Validation loss 6.560898\n", "Epoch 2953, Training loss 2.428806, Validation loss 6.560909\n", "Epoch 2954, Training loss 2.428804, Validation loss 6.560925\n", "Epoch 2955, Training loss 2.428804, Validation loss 6.560935\n", "Epoch 2956, Training loss 2.428802, Validation loss 6.560960\n", "Epoch 2957, Training loss 2.428802, Validation loss 6.560986\n", "Epoch 2958, Training loss 2.428801, Validation loss 6.561006\n", "Epoch 2959, Training loss 2.428801, Validation loss 6.561025\n", "Epoch 2960, Training loss 2.428800, Validation loss 6.561036\n", "Epoch 2961, Training loss 2.428800, Validation loss 6.561056\n", "Epoch 2962, Training loss 2.428797, Validation loss 6.561067\n", "Epoch 2963, Training loss 2.428798, Validation loss 6.561082\n", "Epoch 2964, Training loss 2.428798, Validation loss 6.561098\n", "Epoch 2965, Training loss 2.428796, Validation loss 6.561123\n", "Epoch 2966, Training loss 2.428796, Validation loss 6.561128\n", "Epoch 2967, Training loss 2.428795, Validation loss 6.561154\n", "Epoch 2968, Training loss 2.428795, Validation loss 6.561174\n", "Epoch 2969, Training loss 2.428793, Validation loss 6.561194\n", "Epoch 2970, Training loss 2.428793, Validation loss 6.561204\n", "Epoch 2971, Training loss 2.428793, Validation loss 6.561225\n", "Epoch 2972, Training loss 2.428792, Validation loss 6.561250\n", "Epoch 2973, Training loss 2.428791, Validation loss 6.561260\n", "Epoch 2974, Training loss 2.428790, Validation loss 6.561281\n", "Epoch 2975, Training loss 2.428789, Validation loss 6.561301\n", "Epoch 2976, Training loss 2.428789, Validation loss 6.561296\n", "Epoch 2977, Training loss 2.428788, Validation loss 6.561317\n", "Epoch 2978, Training loss 2.428787, Validation loss 6.561342\n", "Epoch 2979, Training loss 2.428788, Validation loss 6.561352\n", "Epoch 2980, Training loss 2.428786, Validation loss 6.561382\n", "Epoch 2981, Training loss 2.428785, Validation loss 6.561398\n", "Epoch 2982, Training loss 2.428785, Validation loss 6.561413\n", "Epoch 2983, Training loss 2.428785, Validation loss 6.561429\n", "Epoch 2984, Training loss 2.428784, Validation loss 6.561439\n", "Epoch 2985, Training loss 2.428783, Validation loss 6.561464\n", "Epoch 2986, Training loss 2.428782, Validation loss 6.561475\n", "Epoch 2987, Training loss 2.428783, Validation loss 6.561490\n", "Epoch 2988, Training loss 2.428782, Validation loss 6.561505\n", "Epoch 2989, Training loss 2.428780, Validation loss 6.561521\n", "Epoch 2990, Training loss 2.428780, Validation loss 6.561536\n", "Epoch 2991, Training loss 2.428779, Validation loss 6.561547\n", "Epoch 2992, Training loss 2.428778, Validation loss 6.561572\n", "Epoch 2993, Training loss 2.428778, Validation loss 6.561587\n", "Epoch 2994, Training loss 2.428778, Validation loss 6.561612\n", "Epoch 2995, Training loss 2.428776, Validation loss 6.561617\n", "Epoch 2996, Training loss 2.428776, Validation loss 6.561638\n", "Epoch 2997, Training loss 2.428775, Validation loss 6.561663\n", "Epoch 2998, Training loss 2.428774, Validation loss 6.561668\n", "Epoch 2999, Training loss 2.428774, Validation loss 6.561674\n", "Epoch 3000, Training loss 2.428774, Validation loss 6.561699\n", "Epoch 3001, Training loss 2.428774, Validation loss 6.561709\n", "Epoch 3002, Training loss 2.428773, Validation loss 6.561730\n", "Epoch 3003, Training loss 2.428772, Validation loss 6.561740\n", "Epoch 3004, Training loss 2.428771, Validation loss 6.561775\n", "Epoch 3005, Training loss 2.428771, Validation loss 6.561790\n", "Epoch 3006, Training loss 2.428771, Validation loss 6.561791\n", "Epoch 3007, Training loss 2.428770, Validation loss 6.561806\n", "Epoch 3008, Training loss 2.428769, Validation loss 6.561836\n", "Epoch 3009, Training loss 2.428768, Validation loss 6.561837\n", "Epoch 3010, Training loss 2.428768, Validation loss 6.561852\n", "Epoch 3011, Training loss 2.428767, Validation loss 6.561872\n", "Epoch 3012, Training loss 2.428767, Validation loss 6.561888\n", "Epoch 3013, Training loss 2.428765, Validation loss 6.561898\n", "Epoch 3014, Training loss 2.428766, Validation loss 6.561918\n", "Epoch 3015, Training loss 2.428765, Validation loss 6.561934\n", "Epoch 3016, Training loss 2.428765, Validation loss 6.561944\n", "Epoch 3017, Training loss 2.428765, Validation loss 6.561965\n", "Epoch 3018, Training loss 2.428763, Validation loss 6.561975\n", "Epoch 3019, Training loss 2.428763, Validation loss 6.561995\n", "Epoch 3020, Training loss 2.428763, Validation loss 6.562005\n", "Epoch 3021, Training loss 2.428762, Validation loss 6.562016\n", "Epoch 3022, Training loss 2.428762, Validation loss 6.562036\n", "Epoch 3023, Training loss 2.428762, Validation loss 6.562046\n", "Epoch 3024, Training loss 2.428761, Validation loss 6.562057\n", "Epoch 3025, Training loss 2.428760, Validation loss 6.562077\n", "Epoch 3026, Training loss 2.428760, Validation loss 6.562087\n", "Epoch 3027, Training loss 2.428758, Validation loss 6.562108\n", "Epoch 3028, Training loss 2.428758, Validation loss 6.562118\n", "Epoch 3029, Training loss 2.428759, Validation loss 6.562138\n", "Epoch 3030, Training loss 2.428757, Validation loss 6.562168\n", "Epoch 3031, Training loss 2.428758, Validation loss 6.562173\n", "Epoch 3032, Training loss 2.428757, Validation loss 6.562203\n", "Epoch 3033, Training loss 2.428756, Validation loss 6.562204\n", "Epoch 3034, Training loss 2.428755, Validation loss 6.562219\n", "Epoch 3035, Training loss 2.428754, Validation loss 6.562239\n", "Epoch 3036, Training loss 2.428753, Validation loss 6.562250\n", "Epoch 3037, Training loss 2.428755, Validation loss 6.562265\n", "Epoch 3038, Training loss 2.428753, Validation loss 6.562270\n", "Epoch 3039, Training loss 2.428753, Validation loss 6.562301\n", "Epoch 3040, Training loss 2.428752, Validation loss 6.562301\n", "Epoch 3041, Training loss 2.428751, Validation loss 6.562316\n", "Epoch 3042, Training loss 2.428751, Validation loss 6.562337\n", "Epoch 3043, Training loss 2.428752, Validation loss 6.562352\n", "Epoch 3044, Training loss 2.428750, Validation loss 6.562377\n", "Epoch 3045, Training loss 2.428750, Validation loss 6.562397\n", "Epoch 3046, Training loss 2.428749, Validation loss 6.562393\n", "Epoch 3047, Training loss 2.428749, Validation loss 6.562418\n", "Epoch 3048, Training loss 2.428748, Validation loss 6.562423\n", "Epoch 3049, Training loss 2.428747, Validation loss 6.562449\n", "Epoch 3050, Training loss 2.428748, Validation loss 6.562459\n", "Epoch 3051, Training loss 2.428747, Validation loss 6.562464\n", "Epoch 3052, Training loss 2.428746, Validation loss 6.562490\n", "Epoch 3053, Training loss 2.428747, Validation loss 6.562495\n", "Epoch 3054, Training loss 2.428745, Validation loss 6.562510\n", "Epoch 3055, Training loss 2.428744, Validation loss 6.562515\n", "Epoch 3056, Training loss 2.428745, Validation loss 6.562551\n", "Epoch 3057, Training loss 2.428743, Validation loss 6.562566\n", "Epoch 3058, Training loss 2.428743, Validation loss 6.562571\n", "Epoch 3059, Training loss 2.428744, Validation loss 6.562586\n", "Epoch 3060, Training loss 2.428743, Validation loss 6.562602\n", "Epoch 3061, Training loss 2.428742, Validation loss 6.562622\n", "Epoch 3062, Training loss 2.428741, Validation loss 6.562637\n", "Epoch 3063, Training loss 2.428742, Validation loss 6.562652\n", "Epoch 3064, Training loss 2.428742, Validation loss 6.562653\n", "Epoch 3065, Training loss 2.428741, Validation loss 6.562668\n", "Epoch 3066, Training loss 2.428739, Validation loss 6.562684\n", "Epoch 3067, Training loss 2.428739, Validation loss 6.562699\n", "Epoch 3068, Training loss 2.428739, Validation loss 6.562704\n", "Epoch 3069, Training loss 2.428739, Validation loss 6.562724\n", "Epoch 3070, Training loss 2.428738, Validation loss 6.562749\n", "Epoch 3071, Training loss 2.428737, Validation loss 6.562759\n", "Epoch 3072, Training loss 2.428737, Validation loss 6.562765\n", "Epoch 3073, Training loss 2.428737, Validation loss 6.562785\n", "Epoch 3074, Training loss 2.428736, Validation loss 6.562796\n", "Epoch 3075, Training loss 2.428735, Validation loss 6.562806\n", "Epoch 3076, Training loss 2.428735, Validation loss 6.562816\n", "Epoch 3077, Training loss 2.428734, Validation loss 6.562821\n", "Epoch 3078, Training loss 2.428735, Validation loss 6.562847\n", "Epoch 3079, Training loss 2.428735, Validation loss 6.562857\n", "Epoch 3080, Training loss 2.428733, Validation loss 6.562867\n", "Epoch 3081, Training loss 2.428734, Validation loss 6.562877\n", "Epoch 3082, Training loss 2.428733, Validation loss 6.562888\n", "Epoch 3083, Training loss 2.428733, Validation loss 6.562903\n", "Epoch 3084, Training loss 2.428732, Validation loss 6.562933\n", "Epoch 3085, Training loss 2.428732, Validation loss 6.562953\n", "Epoch 3086, Training loss 2.428731, Validation loss 6.562953\n", "Epoch 3087, Training loss 2.428731, Validation loss 6.562973\n", "Epoch 3088, Training loss 2.428730, Validation loss 6.562984\n", "Epoch 3089, Training loss 2.428730, Validation loss 6.563004\n", "Epoch 3090, Training loss 2.428730, Validation loss 6.563009\n", "Epoch 3091, Training loss 2.428729, Validation loss 6.563025\n", "Epoch 3092, Training loss 2.428728, Validation loss 6.563040\n", "Epoch 3093, Training loss 2.428727, Validation loss 6.563041\n", "Epoch 3094, Training loss 2.428727, Validation loss 6.563061\n", "Epoch 3095, Training loss 2.428727, Validation loss 6.563061\n", "Epoch 3096, Training loss 2.428727, Validation loss 6.563091\n", "Epoch 3097, Training loss 2.428727, Validation loss 6.563092\n", "Epoch 3098, Training loss 2.428726, Validation loss 6.563121\n", "Epoch 3099, Training loss 2.428726, Validation loss 6.563137\n", "Epoch 3100, Training loss 2.428726, Validation loss 6.563157\n", "Epoch 3101, Training loss 2.428725, Validation loss 6.563152\n", "Epoch 3102, Training loss 2.428725, Validation loss 6.563173\n", "Epoch 3103, Training loss 2.428725, Validation loss 6.563188\n", "Epoch 3104, Training loss 2.428725, Validation loss 6.563193\n", "Epoch 3105, Training loss 2.428723, Validation loss 6.563203\n", "Epoch 3106, Training loss 2.428723, Validation loss 6.563214\n", "Epoch 3107, Training loss 2.428723, Validation loss 6.563239\n", "Epoch 3108, Training loss 2.428723, Validation loss 6.563234\n", "Epoch 3109, Training loss 2.428721, Validation loss 6.563254\n", "Epoch 3110, Training loss 2.428721, Validation loss 6.563265\n", "Epoch 3111, Training loss 2.428721, Validation loss 6.563280\n", "Epoch 3112, Training loss 2.428722, Validation loss 6.563285\n", "Epoch 3113, Training loss 2.428721, Validation loss 6.563305\n", "Epoch 3114, Training loss 2.428720, Validation loss 6.563321\n", "Epoch 3115, Training loss 2.428720, Validation loss 6.563336\n", "Epoch 3116, Training loss 2.428718, Validation loss 6.563341\n", "Epoch 3117, Training loss 2.428719, Validation loss 6.563356\n", "Epoch 3118, Training loss 2.428718, Validation loss 6.563372\n", "Epoch 3119, Training loss 2.428718, Validation loss 6.563377\n", "Epoch 3120, Training loss 2.428719, Validation loss 6.563392\n", "Epoch 3121, Training loss 2.428718, Validation loss 6.563397\n", "Epoch 3122, Training loss 2.428717, Validation loss 6.563413\n", "Epoch 3123, Training loss 2.428717, Validation loss 6.563428\n", "Epoch 3124, Training loss 2.428716, Validation loss 6.563433\n", "Epoch 3125, Training loss 2.428716, Validation loss 6.563448\n", "Epoch 3126, Training loss 2.428716, Validation loss 6.563454\n", "Epoch 3127, Training loss 2.428715, Validation loss 6.563469\n", "Epoch 3128, Training loss 2.428716, Validation loss 6.563484\n", "Epoch 3129, Training loss 2.428715, Validation loss 6.563499\n", "Epoch 3130, Training loss 2.428715, Validation loss 6.563524\n", "Epoch 3131, Training loss 2.428715, Validation loss 6.563530\n", "Epoch 3132, Training loss 2.428713, Validation loss 6.563545\n", "Epoch 3133, Training loss 2.428714, Validation loss 6.563555\n", "Epoch 3134, Training loss 2.428713, Validation loss 6.563560\n", "Epoch 3135, Training loss 2.428712, Validation loss 6.563571\n", "Epoch 3136, Training loss 2.428712, Validation loss 6.563586\n", "Epoch 3137, Training loss 2.428712, Validation loss 6.563591\n", "Epoch 3138, Training loss 2.428711, Validation loss 6.563616\n", "Epoch 3139, Training loss 2.428712, Validation loss 6.563627\n", "Epoch 3140, Training loss 2.428712, Validation loss 6.563632\n", "Epoch 3141, Training loss 2.428711, Validation loss 6.563632\n", "Epoch 3142, Training loss 2.428710, Validation loss 6.563657\n", "Epoch 3143, Training loss 2.428710, Validation loss 6.563663\n", "Epoch 3144, Training loss 2.428710, Validation loss 6.563673\n", "Epoch 3145, Training loss 2.428710, Validation loss 6.563698\n", "Epoch 3146, Training loss 2.428709, Validation loss 6.563718\n", "Epoch 3147, Training loss 2.428709, Validation loss 6.563718\n", "Epoch 3148, Training loss 2.428708, Validation loss 6.563734\n", "Epoch 3149, Training loss 2.428709, Validation loss 6.563754\n", "Epoch 3150, Training loss 2.428708, Validation loss 6.563754\n", "Epoch 3151, Training loss 2.428708, Validation loss 6.563774\n", "Epoch 3152, Training loss 2.428707, Validation loss 6.563789\n", "Epoch 3153, Training loss 2.428707, Validation loss 6.563790\n", "Epoch 3154, Training loss 2.428706, Validation loss 6.563810\n", "Epoch 3155, Training loss 2.428706, Validation loss 6.563810\n", "Epoch 3156, Training loss 2.428706, Validation loss 6.563826\n", "Epoch 3157, Training loss 2.428706, Validation loss 6.563831\n", "Epoch 3158, Training loss 2.428706, Validation loss 6.563846\n", "Epoch 3159, Training loss 2.428706, Validation loss 6.563847\n", "Epoch 3160, Training loss 2.428704, Validation loss 6.563867\n", "Epoch 3161, Training loss 2.428705, Validation loss 6.563882\n", "Epoch 3162, Training loss 2.428704, Validation loss 6.563897\n", "Epoch 3163, Training loss 2.428704, Validation loss 6.563907\n", "Epoch 3164, Training loss 2.428704, Validation loss 6.563918\n", "Epoch 3165, Training loss 2.428703, Validation loss 6.563928\n", "Epoch 3166, Training loss 2.428703, Validation loss 6.563938\n", "Epoch 3167, Training loss 2.428703, Validation loss 6.563948\n", "Epoch 3168, Training loss 2.428703, Validation loss 6.563963\n", "Epoch 3169, Training loss 2.428702, Validation loss 6.563973\n", "Epoch 3170, Training loss 2.428701, Validation loss 6.563983\n", "Epoch 3171, Training loss 2.428702, Validation loss 6.564004\n", "Epoch 3172, Training loss 2.428701, Validation loss 6.564014\n", "Epoch 3173, Training loss 2.428701, Validation loss 6.564024\n", "Epoch 3174, Training loss 2.428700, Validation loss 6.564034\n", "Epoch 3175, Training loss 2.428700, Validation loss 6.564050\n", "Epoch 3176, Training loss 2.428699, Validation loss 6.564040\n", "Epoch 3177, Training loss 2.428700, Validation loss 6.564051\n", "Epoch 3178, Training loss 2.428700, Validation loss 6.564075\n", "Epoch 3179, Training loss 2.428699, Validation loss 6.564095\n", "Epoch 3180, Training loss 2.428699, Validation loss 6.564101\n", "Epoch 3181, Training loss 2.428699, Validation loss 6.564101\n", "Epoch 3182, Training loss 2.428698, Validation loss 6.564121\n", "Epoch 3183, Training loss 2.428697, Validation loss 6.564137\n", "Epoch 3184, Training loss 2.428698, Validation loss 6.564142\n", "Epoch 3185, Training loss 2.428697, Validation loss 6.564147\n", "Epoch 3186, Training loss 2.428697, Validation loss 6.564157\n", "Epoch 3187, Training loss 2.428696, Validation loss 6.564163\n", "Epoch 3188, Training loss 2.428697, Validation loss 6.564183\n", "Epoch 3189, Training loss 2.428696, Validation loss 6.564198\n", "Epoch 3190, Training loss 2.428696, Validation loss 6.564203\n", "Epoch 3191, Training loss 2.428695, Validation loss 6.564204\n", "Epoch 3192, Training loss 2.428695, Validation loss 6.564219\n", "Epoch 3193, Training loss 2.428696, Validation loss 6.564224\n", "Epoch 3194, Training loss 2.428694, Validation loss 6.564244\n", "Epoch 3195, Training loss 2.428694, Validation loss 6.564249\n", "Epoch 3196, Training loss 2.428694, Validation loss 6.564264\n", "Epoch 3197, Training loss 2.428694, Validation loss 6.564275\n", "Epoch 3198, Training loss 2.428694, Validation loss 6.564290\n", "Epoch 3199, Training loss 2.428694, Validation loss 6.564315\n", "Epoch 3200, Training loss 2.428693, Validation loss 6.564310\n", "Epoch 3201, Training loss 2.428693, Validation loss 6.564335\n", "Epoch 3202, Training loss 2.428693, Validation loss 6.564331\n", "Epoch 3203, Training loss 2.428692, Validation loss 6.564356\n", "Epoch 3204, Training loss 2.428693, Validation loss 6.564351\n", "Epoch 3205, Training loss 2.428692, Validation loss 6.564361\n", "Epoch 3206, Training loss 2.428692, Validation loss 6.564372\n", "Epoch 3207, Training loss 2.428691, Validation loss 6.564382\n", "Epoch 3208, Training loss 2.428691, Validation loss 6.564392\n", "Epoch 3209, Training loss 2.428692, Validation loss 6.564402\n", "Epoch 3210, Training loss 2.428691, Validation loss 6.564417\n", "Epoch 3211, Training loss 2.428691, Validation loss 6.564423\n", "Epoch 3212, Training loss 2.428690, Validation loss 6.564437\n", "Epoch 3213, Training loss 2.428691, Validation loss 6.564443\n", "Epoch 3214, Training loss 2.428690, Validation loss 6.564438\n", "Epoch 3215, Training loss 2.428690, Validation loss 6.564474\n", "Epoch 3216, Training loss 2.428689, Validation loss 6.564478\n", "Epoch 3217, Training loss 2.428690, Validation loss 6.564498\n", "Epoch 3218, Training loss 2.428690, Validation loss 6.564504\n", "Epoch 3219, Training loss 2.428690, Validation loss 6.564519\n", "Epoch 3220, Training loss 2.428688, Validation loss 6.564519\n", "Epoch 3221, Training loss 2.428689, Validation loss 6.564529\n", "Epoch 3222, Training loss 2.428689, Validation loss 6.564545\n", "Epoch 3223, Training loss 2.428687, Validation loss 6.564550\n", "Epoch 3224, Training loss 2.428687, Validation loss 6.564569\n", "Epoch 3225, Training loss 2.428688, Validation loss 6.564570\n", "Epoch 3226, Training loss 2.428688, Validation loss 6.564575\n", "Epoch 3227, Training loss 2.428687, Validation loss 6.564590\n", "Epoch 3228, Training loss 2.428688, Validation loss 6.564591\n", "Epoch 3229, Training loss 2.428687, Validation loss 6.564611\n", "Epoch 3230, Training loss 2.428686, Validation loss 6.564616\n", "Epoch 3231, Training loss 2.428686, Validation loss 6.564631\n", "Epoch 3232, Training loss 2.428686, Validation loss 6.564632\n", "Epoch 3233, Training loss 2.428685, Validation loss 6.564637\n", "Epoch 3234, Training loss 2.428684, Validation loss 6.564657\n", "Epoch 3235, Training loss 2.428685, Validation loss 6.564672\n", "Epoch 3236, Training loss 2.428684, Validation loss 6.564682\n", "Epoch 3237, Training loss 2.428685, Validation loss 6.564692\n", "Epoch 3238, Training loss 2.428685, Validation loss 6.564703\n", "Epoch 3239, Training loss 2.428684, Validation loss 6.564713\n", "Epoch 3240, Training loss 2.428684, Validation loss 6.564713\n", "Epoch 3241, Training loss 2.428684, Validation loss 6.564723\n", "Epoch 3242, Training loss 2.428684, Validation loss 6.564734\n", "Epoch 3243, Training loss 2.428684, Validation loss 6.564744\n", "Epoch 3244, Training loss 2.428682, Validation loss 6.564749\n", "Epoch 3245, Training loss 2.428684, Validation loss 6.564760\n", "Epoch 3246, Training loss 2.428682, Validation loss 6.564760\n", "Epoch 3247, Training loss 2.428683, Validation loss 6.564770\n", "Epoch 3248, Training loss 2.428684, Validation loss 6.564780\n", "Epoch 3249, Training loss 2.428681, Validation loss 6.564790\n", "Epoch 3250, Training loss 2.428683, Validation loss 6.564800\n", "Epoch 3251, Training loss 2.428681, Validation loss 6.564811\n", "Epoch 3252, Training loss 2.428682, Validation loss 6.564811\n", "Epoch 3253, Training loss 2.428679, Validation loss 6.564821\n", "Epoch 3254, Training loss 2.428681, Validation loss 6.564831\n", "Epoch 3255, Training loss 2.428682, Validation loss 6.564851\n", "Epoch 3256, Training loss 2.428681, Validation loss 6.564871\n", "Epoch 3257, Training loss 2.428681, Validation loss 6.564891\n", "Epoch 3258, Training loss 2.428679, Validation loss 6.564892\n", "Epoch 3259, Training loss 2.428680, Validation loss 6.564897\n", "Epoch 3260, Training loss 2.428679, Validation loss 6.564912\n", "Epoch 3261, Training loss 2.428679, Validation loss 6.564917\n", "Epoch 3262, Training loss 2.428679, Validation loss 6.564937\n", "Epoch 3263, Training loss 2.428678, Validation loss 6.564938\n", "Epoch 3264, Training loss 2.428679, Validation loss 6.564943\n", "Epoch 3265, Training loss 2.428679, Validation loss 6.564943\n", "Epoch 3266, Training loss 2.428678, Validation loss 6.564963\n", "Epoch 3267, Training loss 2.428679, Validation loss 6.564969\n", "Epoch 3268, Training loss 2.428677, Validation loss 6.564969\n", "Epoch 3269, Training loss 2.428679, Validation loss 6.564989\n", "Epoch 3270, Training loss 2.428678, Validation loss 6.564994\n", "Epoch 3271, Training loss 2.428677, Validation loss 6.565014\n", "Epoch 3272, Training loss 2.428677, Validation loss 6.565014\n", "Epoch 3273, Training loss 2.428679, Validation loss 6.565020\n", "Epoch 3274, Training loss 2.428676, Validation loss 6.565040\n", "Epoch 3275, Training loss 2.428677, Validation loss 6.565040\n", "Epoch 3276, Training loss 2.428677, Validation loss 6.565055\n", "Epoch 3277, Training loss 2.428676, Validation loss 6.565065\n", "Epoch 3278, Training loss 2.428677, Validation loss 6.565080\n", "Epoch 3279, Training loss 2.428676, Validation loss 6.565090\n", "Epoch 3280, Training loss 2.428676, Validation loss 6.565090\n", "Epoch 3281, Training loss 2.428675, Validation loss 6.565101\n", "Epoch 3282, Training loss 2.428677, Validation loss 6.565115\n", "Epoch 3283, Training loss 2.428675, Validation loss 6.565121\n", "Epoch 3284, Training loss 2.428674, Validation loss 6.565131\n", "Epoch 3285, Training loss 2.428675, Validation loss 6.565126\n", "Epoch 3286, Training loss 2.428675, Validation loss 6.565142\n", "Epoch 3287, Training loss 2.428674, Validation loss 6.565152\n", "Epoch 3288, Training loss 2.428674, Validation loss 6.565157\n", "Epoch 3289, Training loss 2.428675, Validation loss 6.565172\n", "Epoch 3290, Training loss 2.428674, Validation loss 6.565183\n", "Epoch 3291, Training loss 2.428674, Validation loss 6.565183\n", "Epoch 3292, Training loss 2.428673, Validation loss 6.565193\n", "Epoch 3293, Training loss 2.428674, Validation loss 6.565198\n", "Epoch 3294, Training loss 2.428673, Validation loss 6.565213\n", "Epoch 3295, Training loss 2.428673, Validation loss 6.565223\n", "Epoch 3296, Training loss 2.428673, Validation loss 6.565219\n", "Epoch 3297, Training loss 2.428673, Validation loss 6.565234\n", "Epoch 3298, Training loss 2.428672, Validation loss 6.565254\n", "Epoch 3299, Training loss 2.428672, Validation loss 6.565274\n", "Epoch 3300, Training loss 2.428672, Validation loss 6.565279\n", "Epoch 3301, Training loss 2.428673, Validation loss 6.565284\n", "Epoch 3302, Training loss 2.428673, Validation loss 6.565289\n", "Epoch 3303, Training loss 2.428672, Validation loss 6.565304\n", "Epoch 3304, Training loss 2.428671, Validation loss 6.565295\n", "Epoch 3305, Training loss 2.428672, Validation loss 6.565310\n", "Epoch 3306, Training loss 2.428671, Validation loss 6.565315\n", "Epoch 3307, Training loss 2.428670, Validation loss 6.565320\n", "Epoch 3308, Training loss 2.428672, Validation loss 6.565336\n", "Epoch 3309, Training loss 2.428671, Validation loss 6.565341\n", "Epoch 3310, Training loss 2.428670, Validation loss 6.565346\n", "Epoch 3311, Training loss 2.428670, Validation loss 6.565361\n", "Epoch 3312, Training loss 2.428670, Validation loss 6.565371\n", "Epoch 3313, Training loss 2.428670, Validation loss 6.565376\n", "Epoch 3314, Training loss 2.428669, Validation loss 6.565392\n", "Epoch 3315, Training loss 2.428670, Validation loss 6.565396\n", "Epoch 3316, Training loss 2.428671, Validation loss 6.565397\n", "Epoch 3317, Training loss 2.428669, Validation loss 6.565402\n", "Epoch 3318, Training loss 2.428670, Validation loss 6.565407\n", "Epoch 3319, Training loss 2.428669, Validation loss 6.565423\n", "Epoch 3320, Training loss 2.428669, Validation loss 6.565428\n", "Epoch 3321, Training loss 2.428669, Validation loss 6.565443\n", "Epoch 3322, Training loss 2.428668, Validation loss 6.565453\n", "Epoch 3323, Training loss 2.428668, Validation loss 6.565468\n", "Epoch 3324, Training loss 2.428668, Validation loss 6.565478\n", "Epoch 3325, Training loss 2.428668, Validation loss 6.565473\n", "Epoch 3326, Training loss 2.428667, Validation loss 6.565484\n", "Epoch 3327, Training loss 2.428668, Validation loss 6.565484\n", "Epoch 3328, Training loss 2.428667, Validation loss 6.565494\n", "Epoch 3329, Training loss 2.428667, Validation loss 6.565509\n", "Epoch 3330, Training loss 2.428667, Validation loss 6.565514\n", "Epoch 3331, Training loss 2.428667, Validation loss 6.565514\n", "Epoch 3332, Training loss 2.428666, Validation loss 6.565525\n", "Epoch 3333, Training loss 2.428668, Validation loss 6.565530\n", "Epoch 3334, Training loss 2.428667, Validation loss 6.565549\n", "Epoch 3335, Training loss 2.428666, Validation loss 6.565545\n", "Epoch 3336, Training loss 2.428667, Validation loss 6.565555\n", "Epoch 3337, Training loss 2.428667, Validation loss 6.565566\n", "Epoch 3338, Training loss 2.428666, Validation loss 6.565580\n", "Epoch 3339, Training loss 2.428666, Validation loss 6.565580\n", "Epoch 3340, Training loss 2.428666, Validation loss 6.565586\n", "Epoch 3341, Training loss 2.428666, Validation loss 6.565596\n", "Epoch 3342, Training loss 2.428666, Validation loss 6.565596\n", "Epoch 3343, Training loss 2.428666, Validation loss 6.565602\n", "Epoch 3344, Training loss 2.428665, Validation loss 6.565617\n", "Epoch 3345, Training loss 2.428665, Validation loss 6.565627\n", "Epoch 3346, Training loss 2.428666, Validation loss 6.565637\n", "Epoch 3347, Training loss 2.428664, Validation loss 6.565657\n", "Epoch 3348, Training loss 2.428664, Validation loss 6.565657\n", "Epoch 3349, Training loss 2.428664, Validation loss 6.565667\n", "Epoch 3350, Training loss 2.428664, Validation loss 6.565682\n", "Epoch 3351, Training loss 2.428664, Validation loss 6.565687\n", "Epoch 3352, Training loss 2.428664, Validation loss 6.565692\n", "Epoch 3353, Training loss 2.428664, Validation loss 6.565698\n", "Epoch 3354, Training loss 2.428665, Validation loss 6.565698\n", "Epoch 3355, Training loss 2.428664, Validation loss 6.565708\n", "Epoch 3356, Training loss 2.428663, Validation loss 6.565723\n", "Epoch 3357, Training loss 2.428664, Validation loss 6.565728\n", "Epoch 3358, Training loss 2.428663, Validation loss 6.565733\n", "Epoch 3359, Training loss 2.428664, Validation loss 6.565739\n", "Epoch 3360, Training loss 2.428664, Validation loss 6.565739\n", "Epoch 3361, Training loss 2.428663, Validation loss 6.565749\n", "Epoch 3362, Training loss 2.428662, Validation loss 6.565764\n", "Epoch 3363, Training loss 2.428663, Validation loss 6.565769\n", "Epoch 3364, Training loss 2.428663, Validation loss 6.565770\n", "Epoch 3365, Training loss 2.428663, Validation loss 6.565780\n", "Epoch 3366, Training loss 2.428663, Validation loss 6.565780\n", "Epoch 3367, Training loss 2.428663, Validation loss 6.565800\n", "Epoch 3368, Training loss 2.428663, Validation loss 6.565805\n", "Epoch 3369, Training loss 2.428662, Validation loss 6.565810\n", "Epoch 3370, Training loss 2.428662, Validation loss 6.565811\n", "Epoch 3371, Training loss 2.428662, Validation loss 6.565830\n", "Epoch 3372, Training loss 2.428662, Validation loss 6.565840\n", "Epoch 3373, Training loss 2.428662, Validation loss 6.565851\n", "Epoch 3374, Training loss 2.428661, Validation loss 6.565861\n", "Epoch 3375, Training loss 2.428662, Validation loss 6.565876\n", "Epoch 3376, Training loss 2.428660, Validation loss 6.565876\n", "Epoch 3377, Training loss 2.428661, Validation loss 6.565881\n", "Epoch 3378, Training loss 2.428661, Validation loss 6.565881\n", "Epoch 3379, Training loss 2.428659, Validation loss 6.565896\n", "Epoch 3380, Training loss 2.428660, Validation loss 6.565907\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3381, Training loss 2.428661, Validation loss 6.565902\n", "Epoch 3382, Training loss 2.428659, Validation loss 6.565912\n", "Epoch 3383, Training loss 2.428660, Validation loss 6.565922\n", "Epoch 3384, Training loss 2.428661, Validation loss 6.565927\n", "Epoch 3385, Training loss 2.428660, Validation loss 6.565937\n", "Epoch 3386, Training loss 2.428660, Validation loss 6.565933\n", "Epoch 3387, Training loss 2.428660, Validation loss 6.565943\n", "Epoch 3388, Training loss 2.428660, Validation loss 6.565953\n", "Epoch 3389, Training loss 2.428659, Validation loss 6.565968\n", "Epoch 3390, Training loss 2.428659, Validation loss 6.565978\n", "Epoch 3391, Training loss 2.428659, Validation loss 6.565973\n", "Epoch 3392, Training loss 2.428658, Validation loss 6.565973\n", "Epoch 3393, Training loss 2.428659, Validation loss 6.565989\n", "Epoch 3394, Training loss 2.428659, Validation loss 6.565999\n", "Epoch 3395, Training loss 2.428658, Validation loss 6.565994\n", "Epoch 3396, Training loss 2.428659, Validation loss 6.566004\n", "Epoch 3397, Training loss 2.428660, Validation loss 6.566014\n", "Epoch 3398, Training loss 2.428658, Validation loss 6.566039\n", "Epoch 3399, Training loss 2.428658, Validation loss 6.566040\n", "Epoch 3400, Training loss 2.428659, Validation loss 6.566045\n", "Epoch 3401, Training loss 2.428658, Validation loss 6.566050\n", "Epoch 3402, Training loss 2.428658, Validation loss 6.566055\n", "Epoch 3403, Training loss 2.428658, Validation loss 6.566060\n", "Epoch 3404, Training loss 2.428658, Validation loss 6.566065\n", "Epoch 3405, Training loss 2.428657, Validation loss 6.566070\n", "Epoch 3406, Training loss 2.428658, Validation loss 6.566075\n", "Epoch 3407, Training loss 2.428658, Validation loss 6.566081\n", "Epoch 3408, Training loss 2.428658, Validation loss 6.566086\n", "Epoch 3409, Training loss 2.428657, Validation loss 6.566086\n", "Epoch 3410, Training loss 2.428658, Validation loss 6.566111\n", "Epoch 3411, Training loss 2.428656, Validation loss 6.566116\n", "Epoch 3412, Training loss 2.428657, Validation loss 6.566116\n", "Epoch 3413, Training loss 2.428656, Validation loss 6.566122\n", "Epoch 3414, Training loss 2.428657, Validation loss 6.566127\n", "Epoch 3415, Training loss 2.428657, Validation loss 6.566132\n", "Epoch 3416, Training loss 2.428657, Validation loss 6.566137\n", "Epoch 3417, Training loss 2.428657, Validation loss 6.566142\n", "Epoch 3418, Training loss 2.428655, Validation loss 6.566157\n", "Epoch 3419, Training loss 2.428656, Validation loss 6.566163\n", "Epoch 3420, Training loss 2.428656, Validation loss 6.566167\n", "Epoch 3421, Training loss 2.428657, Validation loss 6.566168\n", "Epoch 3422, Training loss 2.428655, Validation loss 6.566173\n", "Epoch 3423, Training loss 2.428656, Validation loss 6.566178\n", "Epoch 3424, Training loss 2.428655, Validation loss 6.566184\n", "Epoch 3425, Training loss 2.428655, Validation loss 6.566213\n", "Epoch 3426, Training loss 2.428655, Validation loss 6.566218\n", "Epoch 3427, Training loss 2.428655, Validation loss 6.566233\n", "Epoch 3428, Training loss 2.428655, Validation loss 6.566233\n", "Epoch 3429, Training loss 2.428655, Validation loss 6.566248\n", "Epoch 3430, Training loss 2.428654, Validation loss 6.566254\n", "Epoch 3431, Training loss 2.428655, Validation loss 6.566254\n", "Epoch 3432, Training loss 2.428655, Validation loss 6.566264\n", "Epoch 3433, Training loss 2.428654, Validation loss 6.566264\n", "Epoch 3434, Training loss 2.428655, Validation loss 6.566259\n", "Epoch 3435, Training loss 2.428654, Validation loss 6.566284\n", "Epoch 3436, Training loss 2.428654, Validation loss 6.566284\n", "Epoch 3437, Training loss 2.428655, Validation loss 6.566280\n", "Epoch 3438, Training loss 2.428653, Validation loss 6.566295\n", "Epoch 3439, Training loss 2.428655, Validation loss 6.566305\n", "Epoch 3440, Training loss 2.428654, Validation loss 6.566300\n", "Epoch 3441, Training loss 2.428653, Validation loss 6.566315\n", "Epoch 3442, Training loss 2.428655, Validation loss 6.566315\n", "Epoch 3443, Training loss 2.428654, Validation loss 6.566320\n", "Epoch 3444, Training loss 2.428653, Validation loss 6.566336\n", "Epoch 3445, Training loss 2.428654, Validation loss 6.566331\n", "Epoch 3446, Training loss 2.428653, Validation loss 6.566331\n", "Epoch 3447, Training loss 2.428653, Validation loss 6.566346\n", "Epoch 3448, Training loss 2.428653, Validation loss 6.566351\n", "Epoch 3449, Training loss 2.428653, Validation loss 6.566366\n", "Epoch 3450, Training loss 2.428653, Validation loss 6.566366\n", "Epoch 3451, Training loss 2.428653, Validation loss 6.566361\n", "Epoch 3452, Training loss 2.428653, Validation loss 6.566377\n", "Epoch 3453, Training loss 2.428653, Validation loss 6.566382\n", "Epoch 3454, Training loss 2.428654, Validation loss 6.566382\n", "Epoch 3455, Training loss 2.428652, Validation loss 6.566397\n", "Epoch 3456, Training loss 2.428653, Validation loss 6.566402\n", "Epoch 3457, Training loss 2.428652, Validation loss 6.566422\n", "Epoch 3458, Training loss 2.428653, Validation loss 6.566427\n", "Epoch 3459, Training loss 2.428653, Validation loss 6.566437\n", "Epoch 3460, Training loss 2.428653, Validation loss 6.566437\n", "Epoch 3461, Training loss 2.428651, Validation loss 6.566442\n", "Epoch 3462, Training loss 2.428652, Validation loss 6.566453\n", "Epoch 3463, Training loss 2.428652, Validation loss 6.566453\n", "Epoch 3464, Training loss 2.428651, Validation loss 6.566458\n", "Epoch 3465, Training loss 2.428651, Validation loss 6.566468\n", "Epoch 3466, Training loss 2.428652, Validation loss 6.566468\n", "Epoch 3467, Training loss 2.428651, Validation loss 6.566473\n", "Epoch 3468, Training loss 2.428652, Validation loss 6.566493\n", "Epoch 3469, Training loss 2.428651, Validation loss 6.566483\n", "Epoch 3470, Training loss 2.428652, Validation loss 6.566489\n", "Epoch 3471, Training loss 2.428652, Validation loss 6.566509\n", "Epoch 3472, Training loss 2.428652, Validation loss 6.566499\n", "Epoch 3473, Training loss 2.428651, Validation loss 6.566514\n", "Epoch 3474, Training loss 2.428651, Validation loss 6.566524\n", "Epoch 3475, Training loss 2.428650, Validation loss 6.566525\n", "Epoch 3476, Training loss 2.428651, Validation loss 6.566530\n", "Epoch 3477, Training loss 2.428650, Validation loss 6.566525\n", "Epoch 3478, Training loss 2.428650, Validation loss 6.566540\n", "Epoch 3479, Training loss 2.428652, Validation loss 6.566550\n", "Epoch 3480, Training loss 2.428651, Validation loss 6.566550\n", "Epoch 3481, Training loss 2.428651, Validation loss 6.566555\n", "Epoch 3482, Training loss 2.428649, Validation loss 6.566565\n", "Epoch 3483, Training loss 2.428651, Validation loss 6.566566\n", "Epoch 3484, Training loss 2.428650, Validation loss 6.566571\n", "Epoch 3485, Training loss 2.428650, Validation loss 6.566580\n", "Epoch 3486, Training loss 2.428650, Validation loss 6.566581\n", "Epoch 3487, Training loss 2.428651, Validation loss 6.566586\n", "Epoch 3488, Training loss 2.428649, Validation loss 6.566586\n", "Epoch 3489, Training loss 2.428650, Validation loss 6.566606\n", "Epoch 3490, Training loss 2.428649, Validation loss 6.566616\n", "Epoch 3491, Training loss 2.428649, Validation loss 6.566616\n", "Epoch 3492, Training loss 2.428649, Validation loss 6.566627\n", "Epoch 3493, Training loss 2.428649, Validation loss 6.566627\n", "Epoch 3494, Training loss 2.428648, Validation loss 6.566637\n", "Epoch 3495, Training loss 2.428650, Validation loss 6.566637\n", "Epoch 3496, Training loss 2.428649, Validation loss 6.566647\n", "Epoch 3497, Training loss 2.428650, Validation loss 6.566647\n", "Epoch 3498, Training loss 2.428649, Validation loss 6.566657\n", "Epoch 3499, Training loss 2.428649, Validation loss 6.566668\n", "Epoch 3500, Training loss 2.428649, Validation loss 6.566672\n", "Epoch 3501, Training loss 2.428648, Validation loss 6.566682\n", "Epoch 3502, Training loss 2.428648, Validation loss 6.566682\n", "Epoch 3503, Training loss 2.428648, Validation loss 6.566692\n", "Epoch 3504, Training loss 2.428648, Validation loss 6.566692\n", "Epoch 3505, Training loss 2.428649, Validation loss 6.566703\n", "Epoch 3506, Training loss 2.428649, Validation loss 6.566703\n", "Epoch 3507, Training loss 2.428648, Validation loss 6.566713\n", "Epoch 3508, Training loss 2.428649, Validation loss 6.566713\n", "Epoch 3509, Training loss 2.428648, Validation loss 6.566723\n", "Epoch 3510, Training loss 2.428648, Validation loss 6.566733\n", "Epoch 3511, Training loss 2.428648, Validation loss 6.566733\n", "Epoch 3512, Training loss 2.428647, Validation loss 6.566744\n", "Epoch 3513, Training loss 2.428648, Validation loss 6.566729\n", "Epoch 3514, Training loss 2.428648, Validation loss 6.566739\n", "Epoch 3515, Training loss 2.428648, Validation loss 6.566749\n", "Epoch 3516, Training loss 2.428647, Validation loss 6.566749\n", "Epoch 3517, Training loss 2.428647, Validation loss 6.566760\n", "Epoch 3518, Training loss 2.428648, Validation loss 6.566764\n", "Epoch 3519, Training loss 2.428647, Validation loss 6.566774\n", "Epoch 3520, Training loss 2.428648, Validation loss 6.566774\n", "Epoch 3521, Training loss 2.428647, Validation loss 6.566784\n", "Epoch 3522, Training loss 2.428647, Validation loss 6.566784\n", "Epoch 3523, Training loss 2.428647, Validation loss 6.566804\n", "Epoch 3524, Training loss 2.428648, Validation loss 6.566810\n", "Epoch 3525, Training loss 2.428648, Validation loss 6.566810\n", "Epoch 3526, Training loss 2.428647, Validation loss 6.566820\n", "Epoch 3527, Training loss 2.428647, Validation loss 6.566825\n", "Epoch 3528, Training loss 2.428647, Validation loss 6.566830\n", "Epoch 3529, Training loss 2.428647, Validation loss 6.566821\n", "Epoch 3530, Training loss 2.428647, Validation loss 6.566825\n", "Epoch 3531, Training loss 2.428648, Validation loss 6.566845\n", "Epoch 3532, Training loss 2.428647, Validation loss 6.566851\n", "Epoch 3533, Training loss 2.428646, Validation loss 6.566841\n", "Epoch 3534, Training loss 2.428647, Validation loss 6.566861\n", "Epoch 3535, Training loss 2.428645, Validation loss 6.566866\n", "Epoch 3536, Training loss 2.428646, Validation loss 6.566871\n", "Epoch 3537, Training loss 2.428646, Validation loss 6.566862\n", "Epoch 3538, Training loss 2.428645, Validation loss 6.566866\n", "Epoch 3539, Training loss 2.428646, Validation loss 6.566886\n", "Epoch 3540, Training loss 2.428646, Validation loss 6.566881\n", "Epoch 3541, Training loss 2.428647, Validation loss 6.566886\n", "Epoch 3542, Training loss 2.428647, Validation loss 6.566887\n", "Epoch 3543, Training loss 2.428647, Validation loss 6.566907\n", "Epoch 3544, Training loss 2.428646, Validation loss 6.566902\n", "Epoch 3545, Training loss 2.428646, Validation loss 6.566907\n", "Epoch 3546, Training loss 2.428647, Validation loss 6.566907\n", "Epoch 3547, Training loss 2.428646, Validation loss 6.566913\n", "Epoch 3548, Training loss 2.428647, Validation loss 6.566922\n", "Epoch 3549, Training loss 2.428646, Validation loss 6.566927\n", "Epoch 3550, Training loss 2.428644, Validation loss 6.566928\n", "Epoch 3551, Training loss 2.428646, Validation loss 6.566933\n", "Epoch 3552, Training loss 2.428644, Validation loss 6.566943\n", "Epoch 3553, Training loss 2.428645, Validation loss 6.566948\n", "Epoch 3554, Training loss 2.428645, Validation loss 6.566948\n", "Epoch 3555, Training loss 2.428645, Validation loss 6.566954\n", "Epoch 3556, Training loss 2.428644, Validation loss 6.566963\n", "Epoch 3557, Training loss 2.428644, Validation loss 6.566968\n", "Epoch 3558, Training loss 2.428645, Validation loss 6.566969\n", "Epoch 3559, Training loss 2.428645, Validation loss 6.566974\n", "Epoch 3560, Training loss 2.428646, Validation loss 6.566978\n", "Epoch 3561, Training loss 2.428646, Validation loss 6.566994\n", "Epoch 3562, Training loss 2.428645, Validation loss 6.567004\n", "Epoch 3563, Training loss 2.428643, Validation loss 6.566999\n", "Epoch 3564, Training loss 2.428645, Validation loss 6.567014\n", "Epoch 3565, Training loss 2.428644, Validation loss 6.567029\n", "Epoch 3566, Training loss 2.428645, Validation loss 6.567024\n", "Epoch 3567, Training loss 2.428645, Validation loss 6.567029\n", "Epoch 3568, Training loss 2.428645, Validation loss 6.567024\n", "Epoch 3569, Training loss 2.428644, Validation loss 6.567039\n", "Epoch 3570, Training loss 2.428645, Validation loss 6.567035\n", "Epoch 3571, Training loss 2.428644, Validation loss 6.567050\n", "Epoch 3572, Training loss 2.428643, Validation loss 6.567045\n", "Epoch 3573, Training loss 2.428644, Validation loss 6.567055\n", "Epoch 3574, Training loss 2.428643, Validation loss 6.567070\n", "Epoch 3575, Training loss 2.428643, Validation loss 6.567065\n", "Epoch 3576, Training loss 2.428643, Validation loss 6.567065\n", "Epoch 3577, Training loss 2.428644, Validation loss 6.567075\n", "Epoch 3578, Training loss 2.428644, Validation loss 6.567080\n", "Epoch 3579, Training loss 2.428644, Validation loss 6.567075\n", "Epoch 3580, Training loss 2.428645, Validation loss 6.567086\n", "Epoch 3581, Training loss 2.428645, Validation loss 6.567086\n", "Epoch 3582, Training loss 2.428643, Validation loss 6.567081\n", "Epoch 3583, Training loss 2.428643, Validation loss 6.567096\n", "Epoch 3584, Training loss 2.428644, Validation loss 6.567091\n", "Epoch 3585, Training loss 2.428644, Validation loss 6.567106\n", "Epoch 3586, Training loss 2.428643, Validation loss 6.567121\n", "Epoch 3587, Training loss 2.428643, Validation loss 6.567116\n", "Epoch 3588, Training loss 2.428643, Validation loss 6.567132\n", "Epoch 3589, Training loss 2.428644, Validation loss 6.567127\n", "Epoch 3590, Training loss 2.428645, Validation loss 6.567132\n", "Epoch 3591, Training loss 2.428643, Validation loss 6.567127\n", "Epoch 3592, Training loss 2.428642, Validation loss 6.567137\n", "Epoch 3593, Training loss 2.428643, Validation loss 6.567137\n", "Epoch 3594, Training loss 2.428644, Validation loss 6.567147\n", "Epoch 3595, Training loss 2.428644, Validation loss 6.567147\n", "Epoch 3596, Training loss 2.428642, Validation loss 6.567157\n", "Epoch 3597, Training loss 2.428643, Validation loss 6.567157\n", "Epoch 3598, Training loss 2.428643, Validation loss 6.567168\n", "Epoch 3599, Training loss 2.428643, Validation loss 6.567168\n", "Epoch 3600, Training loss 2.428644, Validation loss 6.567188\n", "Epoch 3601, Training loss 2.428642, Validation loss 6.567193\n", "Epoch 3602, Training loss 2.428643, Validation loss 6.567203\n", "Epoch 3603, Training loss 2.428643, Validation loss 6.567208\n", "Epoch 3604, Training loss 2.428642, Validation loss 6.567213\n", "Epoch 3605, Training loss 2.428643, Validation loss 6.567218\n", "Epoch 3606, Training loss 2.428643, Validation loss 6.567228\n", "Epoch 3607, Training loss 2.428643, Validation loss 6.567218\n", "Epoch 3608, Training loss 2.428643, Validation loss 6.567223\n", "Epoch 3609, Training loss 2.428642, Validation loss 6.567228\n", "Epoch 3610, Training loss 2.428643, Validation loss 6.567233\n", "Epoch 3611, Training loss 2.428643, Validation loss 6.567243\n", "Epoch 3612, Training loss 2.428643, Validation loss 6.567248\n", "Epoch 3613, Training loss 2.428642, Validation loss 6.567249\n", "Epoch 3614, Training loss 2.428642, Validation loss 6.567254\n", "Epoch 3615, Training loss 2.428642, Validation loss 6.567259\n", "Epoch 3616, Training loss 2.428643, Validation loss 6.567268\n", "Epoch 3617, Training loss 2.428642, Validation loss 6.567254\n", "Epoch 3618, Training loss 2.428642, Validation loss 6.567264\n", "Epoch 3619, Training loss 2.428642, Validation loss 6.567269\n", "Epoch 3620, Training loss 2.428643, Validation loss 6.567274\n", "Epoch 3621, Training loss 2.428642, Validation loss 6.567279\n", "Epoch 3622, Training loss 2.428642, Validation loss 6.567269\n", "Epoch 3623, Training loss 2.428641, Validation loss 6.567289\n", "Epoch 3624, Training loss 2.428641, Validation loss 6.567295\n", "Epoch 3625, Training loss 2.428642, Validation loss 6.567300\n", "Epoch 3626, Training loss 2.428642, Validation loss 6.567305\n", "Epoch 3627, Training loss 2.428642, Validation loss 6.567315\n", "Epoch 3628, Training loss 2.428642, Validation loss 6.567300\n", "Epoch 3629, Training loss 2.428641, Validation loss 6.567310\n", "Epoch 3630, Training loss 2.428641, Validation loss 6.567315\n", "Epoch 3631, Training loss 2.428642, Validation loss 6.567320\n", "Epoch 3632, Training loss 2.428641, Validation loss 6.567310\n", "Epoch 3633, Training loss 2.428642, Validation loss 6.567316\n", "Epoch 3634, Training loss 2.428641, Validation loss 6.567326\n", "Epoch 3635, Training loss 2.428641, Validation loss 6.567330\n", "Epoch 3636, Training loss 2.428642, Validation loss 6.567346\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3637, Training loss 2.428640, Validation loss 6.567351\n", "Epoch 3638, Training loss 2.428641, Validation loss 6.567341\n", "Epoch 3639, Training loss 2.428641, Validation loss 6.567347\n", "Epoch 3640, Training loss 2.428641, Validation loss 6.567356\n", "Epoch 3641, Training loss 2.428641, Validation loss 6.567361\n", "Epoch 3642, Training loss 2.428642, Validation loss 6.567366\n", "Epoch 3643, Training loss 2.428641, Validation loss 6.567376\n", "Epoch 3644, Training loss 2.428640, Validation loss 6.567362\n", "Epoch 3645, Training loss 2.428641, Validation loss 6.567381\n", "Epoch 3646, Training loss 2.428640, Validation loss 6.567387\n", "Epoch 3647, Training loss 2.428639, Validation loss 6.567387\n", "Epoch 3648, Training loss 2.428640, Validation loss 6.567387\n", "Epoch 3649, Training loss 2.428641, Validation loss 6.567397\n", "Epoch 3650, Training loss 2.428640, Validation loss 6.567412\n", "Epoch 3651, Training loss 2.428641, Validation loss 6.567412\n", "Epoch 3652, Training loss 2.428640, Validation loss 6.567412\n", "Epoch 3653, Training loss 2.428640, Validation loss 6.567412\n", "Epoch 3654, Training loss 2.428641, Validation loss 6.567422\n", "Epoch 3655, Training loss 2.428641, Validation loss 6.567422\n", "Epoch 3656, Training loss 2.428640, Validation loss 6.567418\n", "Epoch 3657, Training loss 2.428640, Validation loss 6.567428\n", "Epoch 3658, Training loss 2.428640, Validation loss 6.567428\n", "Epoch 3659, Training loss 2.428640, Validation loss 6.567428\n", "Epoch 3660, Training loss 2.428641, Validation loss 6.567443\n", "Epoch 3661, Training loss 2.428640, Validation loss 6.567443\n", "Epoch 3662, Training loss 2.428641, Validation loss 6.567453\n", "Epoch 3663, Training loss 2.428640, Validation loss 6.567453\n", "Epoch 3664, Training loss 2.428640, Validation loss 6.567453\n", "Epoch 3665, Training loss 2.428640, Validation loss 6.567463\n", "Epoch 3666, Training loss 2.428641, Validation loss 6.567459\n", "Epoch 3667, Training loss 2.428640, Validation loss 6.567459\n", "Epoch 3668, Training loss 2.428640, Validation loss 6.567459\n", "Epoch 3669, Training loss 2.428639, Validation loss 6.567469\n", "Epoch 3670, Training loss 2.428640, Validation loss 6.567469\n", "Epoch 3671, Training loss 2.428639, Validation loss 6.567483\n", "Epoch 3672, Training loss 2.428641, Validation loss 6.567483\n", "Epoch 3673, Training loss 2.428639, Validation loss 6.567494\n", "Epoch 3674, Training loss 2.428640, Validation loss 6.567494\n", "Epoch 3675, Training loss 2.428639, Validation loss 6.567489\n", "Epoch 3676, Training loss 2.428640, Validation loss 6.567489\n", "Epoch 3677, Training loss 2.428639, Validation loss 6.567500\n", "Epoch 3678, Training loss 2.428640, Validation loss 6.567500\n", "Epoch 3679, Training loss 2.428640, Validation loss 6.567500\n", "Epoch 3680, Training loss 2.428639, Validation loss 6.567510\n", "Epoch 3681, Training loss 2.428639, Validation loss 6.567510\n", "Epoch 3682, Training loss 2.428639, Validation loss 6.567524\n", "Epoch 3683, Training loss 2.428638, Validation loss 6.567524\n", "Epoch 3684, Training loss 2.428640, Validation loss 6.567535\n", "Epoch 3685, Training loss 2.428639, Validation loss 6.567530\n", "Epoch 3686, Training loss 2.428639, Validation loss 6.567530\n", "Epoch 3687, Training loss 2.428639, Validation loss 6.567530\n", "Epoch 3688, Training loss 2.428640, Validation loss 6.567541\n", "Epoch 3689, Training loss 2.428639, Validation loss 6.567541\n", "Epoch 3690, Training loss 2.428640, Validation loss 6.567541\n", "Epoch 3691, Training loss 2.428639, Validation loss 6.567555\n", "Epoch 3692, Training loss 2.428639, Validation loss 6.567565\n", "Epoch 3693, Training loss 2.428639, Validation loss 6.567575\n", "Epoch 3694, Training loss 2.428639, Validation loss 6.567580\n", "Epoch 3695, Training loss 2.428640, Validation loss 6.567585\n", "Epoch 3696, Training loss 2.428639, Validation loss 6.567595\n", "Epoch 3697, Training loss 2.428639, Validation loss 6.567585\n", "Epoch 3698, Training loss 2.428638, Validation loss 6.567591\n", "Epoch 3699, Training loss 2.428638, Validation loss 6.567596\n", "Epoch 3700, Training loss 2.428639, Validation loss 6.567601\n", "Epoch 3701, Training loss 2.428638, Validation loss 6.567611\n", "Epoch 3702, Training loss 2.428638, Validation loss 6.567611\n", "Epoch 3703, Training loss 2.428639, Validation loss 6.567606\n", "Epoch 3704, Training loss 2.428638, Validation loss 6.567616\n", "Epoch 3705, Training loss 2.428639, Validation loss 6.567616\n", "Epoch 3706, Training loss 2.428639, Validation loss 6.567626\n", "Epoch 3707, Training loss 2.428639, Validation loss 6.567626\n", "Epoch 3708, Training loss 2.428639, Validation loss 6.567621\n", "Epoch 3709, Training loss 2.428639, Validation loss 6.567641\n", "Epoch 3710, Training loss 2.428638, Validation loss 6.567632\n", "Epoch 3711, Training loss 2.428638, Validation loss 6.567641\n", "Epoch 3712, Training loss 2.428639, Validation loss 6.567642\n", "Epoch 3713, Training loss 2.428638, Validation loss 6.567637\n", "Epoch 3714, Training loss 2.428638, Validation loss 6.567657\n", "Epoch 3715, Training loss 2.428639, Validation loss 6.567647\n", "Epoch 3716, Training loss 2.428638, Validation loss 6.567667\n", "Epoch 3717, Training loss 2.428638, Validation loss 6.567657\n", "Epoch 3718, Training loss 2.428639, Validation loss 6.567662\n", "Epoch 3719, Training loss 2.428638, Validation loss 6.567672\n", "Epoch 3720, Training loss 2.428637, Validation loss 6.567662\n", "Epoch 3721, Training loss 2.428639, Validation loss 6.567682\n", "Epoch 3722, Training loss 2.428638, Validation loss 6.567673\n", "Epoch 3723, Training loss 2.428638, Validation loss 6.567677\n", "Epoch 3724, Training loss 2.428639, Validation loss 6.567688\n", "Epoch 3725, Training loss 2.428638, Validation loss 6.567688\n", "Epoch 3726, Training loss 2.428637, Validation loss 6.567683\n", "Epoch 3727, Training loss 2.428638, Validation loss 6.567688\n", "Epoch 3728, Training loss 2.428638, Validation loss 6.567693\n", "Epoch 3729, Training loss 2.428637, Validation loss 6.567683\n", "Epoch 3730, Training loss 2.428637, Validation loss 6.567703\n", "Epoch 3731, Training loss 2.428638, Validation loss 6.567698\n", "Epoch 3732, Training loss 2.428638, Validation loss 6.567714\n", "Epoch 3733, Training loss 2.428638, Validation loss 6.567708\n", "Epoch 3734, Training loss 2.428637, Validation loss 6.567718\n", "Epoch 3735, Training loss 2.428638, Validation loss 6.567719\n", "Epoch 3736, Training loss 2.428638, Validation loss 6.567714\n", "Epoch 3737, Training loss 2.428638, Validation loss 6.567734\n", "Epoch 3738, Training loss 2.428638, Validation loss 6.567724\n", "Epoch 3739, Training loss 2.428637, Validation loss 6.567724\n", "Epoch 3740, Training loss 2.428638, Validation loss 6.567734\n", "Epoch 3741, Training loss 2.428638, Validation loss 6.567739\n", "Epoch 3742, Training loss 2.428637, Validation loss 6.567749\n", "Epoch 3743, Training loss 2.428638, Validation loss 6.567739\n", "Epoch 3744, Training loss 2.428638, Validation loss 6.567739\n", "Epoch 3745, Training loss 2.428637, Validation loss 6.567750\n", "Epoch 3746, Training loss 2.428637, Validation loss 6.567755\n", "Epoch 3747, Training loss 2.428638, Validation loss 6.567774\n", "Epoch 3748, Training loss 2.428638, Validation loss 6.567784\n", "Epoch 3749, Training loss 2.428637, Validation loss 6.567784\n", "Epoch 3750, Training loss 2.428638, Validation loss 6.567799\n", "Epoch 3751, Training loss 2.428638, Validation loss 6.567784\n", "Epoch 3752, Training loss 2.428637, Validation loss 6.567794\n", "Epoch 3753, Training loss 2.428637, Validation loss 6.567790\n", "Epoch 3754, Training loss 2.428637, Validation loss 6.567794\n", "Epoch 3755, Training loss 2.428637, Validation loss 6.567805\n", "Epoch 3756, Training loss 2.428637, Validation loss 6.567810\n", "Epoch 3757, Training loss 2.428638, Validation loss 6.567805\n", "Epoch 3758, Training loss 2.428638, Validation loss 6.567815\n", "Epoch 3759, Training loss 2.428637, Validation loss 6.567800\n", "Epoch 3760, Training loss 2.428637, Validation loss 6.567815\n", "Epoch 3761, Training loss 2.428637, Validation loss 6.567815\n", "Epoch 3762, Training loss 2.428638, Validation loss 6.567811\n", "Epoch 3763, Training loss 2.428637, Validation loss 6.567825\n", "Epoch 3764, Training loss 2.428637, Validation loss 6.567825\n", "Epoch 3765, Training loss 2.428637, Validation loss 6.567840\n", "Epoch 3766, Training loss 2.428637, Validation loss 6.567835\n", "Epoch 3767, Training loss 2.428636, Validation loss 6.567835\n", "Epoch 3768, Training loss 2.428637, Validation loss 6.567831\n", "Epoch 3769, Training loss 2.428637, Validation loss 6.567835\n", "Epoch 3770, Training loss 2.428637, Validation loss 6.567846\n", "Epoch 3771, Training loss 2.428638, Validation loss 6.567846\n", "Epoch 3772, Training loss 2.428637, Validation loss 6.567841\n", "Epoch 3773, Training loss 2.428637, Validation loss 6.567856\n", "Epoch 3774, Training loss 2.428636, Validation loss 6.567852\n", "Epoch 3775, Training loss 2.428636, Validation loss 6.567852\n", "Epoch 3776, Training loss 2.428636, Validation loss 6.567866\n", "Epoch 3777, Training loss 2.428637, Validation loss 6.567866\n", "Epoch 3778, Training loss 2.428637, Validation loss 6.567881\n", "Epoch 3779, Training loss 2.428636, Validation loss 6.567866\n", "Epoch 3780, Training loss 2.428637, Validation loss 6.567876\n", "Epoch 3781, Training loss 2.428636, Validation loss 6.567872\n", "Epoch 3782, Training loss 2.428637, Validation loss 6.567876\n", "Epoch 3783, Training loss 2.428637, Validation loss 6.567887\n", "Epoch 3784, Training loss 2.428637, Validation loss 6.567882\n", "Epoch 3785, Training loss 2.428637, Validation loss 6.567887\n", "Epoch 3786, Training loss 2.428636, Validation loss 6.567897\n", "Epoch 3787, Training loss 2.428637, Validation loss 6.567902\n", "Epoch 3788, Training loss 2.428638, Validation loss 6.567897\n", "Epoch 3789, Training loss 2.428636, Validation loss 6.567907\n", "Epoch 3790, Training loss 2.428635, Validation loss 6.567907\n", "Epoch 3791, Training loss 2.428636, Validation loss 6.567903\n", "Epoch 3792, Training loss 2.428637, Validation loss 6.567907\n", "Epoch 3793, Training loss 2.428637, Validation loss 6.567917\n", "Epoch 3794, Training loss 2.428637, Validation loss 6.567913\n", "Epoch 3795, Training loss 2.428636, Validation loss 6.567917\n", "Epoch 3796, Training loss 2.428635, Validation loss 6.567913\n", "Epoch 3797, Training loss 2.428635, Validation loss 6.567913\n", "Epoch 3798, Training loss 2.428636, Validation loss 6.567928\n", "Epoch 3799, Training loss 2.428637, Validation loss 6.567938\n", "Epoch 3800, Training loss 2.428637, Validation loss 6.567923\n", "Epoch 3801, Training loss 2.428636, Validation loss 6.567938\n", "Epoch 3802, Training loss 2.428636, Validation loss 6.567938\n", "Epoch 3803, Training loss 2.428637, Validation loss 6.567933\n", "Epoch 3804, Training loss 2.428636, Validation loss 6.567948\n", "Epoch 3805, Training loss 2.428635, Validation loss 6.567948\n", "Epoch 3806, Training loss 2.428635, Validation loss 6.567944\n", "Epoch 3807, Training loss 2.428635, Validation loss 6.567968\n", "Epoch 3808, Training loss 2.428636, Validation loss 6.567963\n", "Epoch 3809, Training loss 2.428636, Validation loss 6.567968\n", "Epoch 3810, Training loss 2.428635, Validation loss 6.567973\n", "Epoch 3811, Training loss 2.428636, Validation loss 6.567978\n", "Epoch 3812, Training loss 2.428636, Validation loss 6.567973\n", "Epoch 3813, Training loss 2.428637, Validation loss 6.567978\n", "Epoch 3814, Training loss 2.428635, Validation loss 6.567984\n", "Epoch 3815, Training loss 2.428636, Validation loss 6.567988\n", "Epoch 3816, Training loss 2.428636, Validation loss 6.567984\n", "Epoch 3817, Training loss 2.428637, Validation loss 6.567988\n", "Epoch 3818, Training loss 2.428635, Validation loss 6.567994\n", "Epoch 3819, Training loss 2.428635, Validation loss 6.567999\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3820, Training loss 2.428636, Validation loss 6.568004\n", "Epoch 3821, Training loss 2.428635, Validation loss 6.567999\n", "Epoch 3822, Training loss 2.428634, Validation loss 6.568004\n", "Epoch 3823, Training loss 2.428635, Validation loss 6.568009\n", "Epoch 3824, Training loss 2.428636, Validation loss 6.568014\n", "Epoch 3825, Training loss 2.428635, Validation loss 6.568009\n", "Epoch 3826, Training loss 2.428636, Validation loss 6.568014\n", "Epoch 3827, Training loss 2.428635, Validation loss 6.568019\n", "Epoch 3828, Training loss 2.428635, Validation loss 6.568025\n", "Epoch 3829, Training loss 2.428634, Validation loss 6.568029\n", "Epoch 3830, Training loss 2.428636, Validation loss 6.568025\n", "Epoch 3831, Training loss 2.428636, Validation loss 6.568029\n", "Epoch 3832, Training loss 2.428636, Validation loss 6.568035\n", "Epoch 3833, Training loss 2.428636, Validation loss 6.568045\n", "Epoch 3834, Training loss 2.428637, Validation loss 6.568039\n", "Epoch 3835, Training loss 2.428635, Validation loss 6.568045\n", "Epoch 3836, Training loss 2.428635, Validation loss 6.568049\n", "Epoch 3837, Training loss 2.428635, Validation loss 6.568055\n", "Epoch 3838, Training loss 2.428635, Validation loss 6.568060\n", "Epoch 3839, Training loss 2.428635, Validation loss 6.568055\n", "Epoch 3840, Training loss 2.428635, Validation loss 6.568060\n", "Epoch 3841, Training loss 2.428635, Validation loss 6.568065\n", "Epoch 3842, Training loss 2.428636, Validation loss 6.568070\n", "Epoch 3843, Training loss 2.428635, Validation loss 6.568065\n", "Epoch 3844, Training loss 2.428634, Validation loss 6.568070\n", "Epoch 3845, Training loss 2.428635, Validation loss 6.568075\n", "Epoch 3846, Training loss 2.428635, Validation loss 6.568080\n", "Epoch 3847, Training loss 2.428635, Validation loss 6.568086\n", "Epoch 3848, Training loss 2.428634, Validation loss 6.568080\n", "Epoch 3849, Training loss 2.428634, Validation loss 6.568086\n", "Epoch 3850, Training loss 2.428635, Validation loss 6.568090\n", "Epoch 3851, Training loss 2.428634, Validation loss 6.568081\n", "Epoch 3852, Training loss 2.428634, Validation loss 6.568086\n", "Epoch 3853, Training loss 2.428634, Validation loss 6.568081\n", "Epoch 3854, Training loss 2.428636, Validation loss 6.568086\n", "Epoch 3855, Training loss 2.428635, Validation loss 6.568091\n", "Epoch 3856, Training loss 2.428636, Validation loss 6.568096\n", "Epoch 3857, Training loss 2.428635, Validation loss 6.568101\n", "Epoch 3858, Training loss 2.428636, Validation loss 6.568096\n", "Epoch 3859, Training loss 2.428635, Validation loss 6.568101\n", "Epoch 3860, Training loss 2.428635, Validation loss 6.568106\n", "Epoch 3861, Training loss 2.428635, Validation loss 6.568111\n", "Epoch 3862, Training loss 2.428635, Validation loss 6.568106\n", "Epoch 3863, Training loss 2.428635, Validation loss 6.568111\n", "Epoch 3864, Training loss 2.428634, Validation loss 6.568117\n", "Epoch 3865, Training loss 2.428634, Validation loss 6.568122\n", "Epoch 3866, Training loss 2.428634, Validation loss 6.568117\n", "Epoch 3867, Training loss 2.428635, Validation loss 6.568122\n", "Epoch 3868, Training loss 2.428633, Validation loss 6.568127\n", "Epoch 3869, Training loss 2.428634, Validation loss 6.568132\n", "Epoch 3870, Training loss 2.428635, Validation loss 6.568142\n", "Epoch 3871, Training loss 2.428635, Validation loss 6.568137\n", "Epoch 3872, Training loss 2.428633, Validation loss 6.568142\n", "Epoch 3873, Training loss 2.428634, Validation loss 6.568147\n", "Epoch 3874, Training loss 2.428635, Validation loss 6.568152\n", "Epoch 3875, Training loss 2.428635, Validation loss 6.568147\n", "Epoch 3876, Training loss 2.428634, Validation loss 6.568152\n", "Epoch 3877, Training loss 2.428635, Validation loss 6.568167\n", "Epoch 3878, Training loss 2.428635, Validation loss 6.568152\n", "Epoch 3879, Training loss 2.428636, Validation loss 6.568167\n", "Epoch 3880, Training loss 2.428634, Validation loss 6.568167\n", "Epoch 3881, Training loss 2.428634, Validation loss 6.568167\n", "Epoch 3882, Training loss 2.428634, Validation loss 6.568182\n", "Epoch 3883, Training loss 2.428635, Validation loss 6.568178\n", "Epoch 3884, Training loss 2.428634, Validation loss 6.568178\n", "Epoch 3885, Training loss 2.428635, Validation loss 6.568178\n", "Epoch 3886, Training loss 2.428635, Validation loss 6.568182\n", "Epoch 3887, Training loss 2.428634, Validation loss 6.568178\n", "Epoch 3888, Training loss 2.428635, Validation loss 6.568178\n", "Epoch 3889, Training loss 2.428634, Validation loss 6.568188\n", "Epoch 3890, Training loss 2.428634, Validation loss 6.568192\n", "Epoch 3891, Training loss 2.428634, Validation loss 6.568188\n", "Epoch 3892, Training loss 2.428635, Validation loss 6.568188\n", "Epoch 3893, Training loss 2.428634, Validation loss 6.568183\n", "Epoch 3894, Training loss 2.428634, Validation loss 6.568203\n", "Epoch 3895, Training loss 2.428634, Validation loss 6.568198\n", "Epoch 3896, Training loss 2.428635, Validation loss 6.568198\n", "Epoch 3897, Training loss 2.428633, Validation loss 6.568213\n", "Epoch 3898, Training loss 2.428635, Validation loss 6.568208\n", "Epoch 3899, Training loss 2.428634, Validation loss 6.568208\n", "Epoch 3900, Training loss 2.428633, Validation loss 6.568208\n", "Epoch 3901, Training loss 2.428633, Validation loss 6.568223\n", "Epoch 3902, Training loss 2.428634, Validation loss 6.568219\n", "Epoch 3903, Training loss 2.428634, Validation loss 6.568219\n", "Epoch 3904, Training loss 2.428635, Validation loss 6.568219\n", "Epoch 3905, Training loss 2.428635, Validation loss 6.568233\n", "Epoch 3906, Training loss 2.428634, Validation loss 6.568229\n", "Epoch 3907, Training loss 2.428634, Validation loss 6.568229\n", "Epoch 3908, Training loss 2.428633, Validation loss 6.568229\n", "Epoch 3909, Training loss 2.428634, Validation loss 6.568233\n", "Epoch 3910, Training loss 2.428633, Validation loss 6.568229\n", "Epoch 3911, Training loss 2.428634, Validation loss 6.568229\n", "Epoch 3912, Training loss 2.428635, Validation loss 6.568234\n", "Epoch 3913, Training loss 2.428635, Validation loss 6.568244\n", "Epoch 3914, Training loss 2.428634, Validation loss 6.568239\n", "Epoch 3915, Training loss 2.428634, Validation loss 6.568239\n", "Epoch 3916, Training loss 2.428635, Validation loss 6.568254\n", "Epoch 3917, Training loss 2.428634, Validation loss 6.568249\n", "Epoch 3918, Training loss 2.428633, Validation loss 6.568249\n", "Epoch 3919, Training loss 2.428633, Validation loss 6.568249\n", "Epoch 3920, Training loss 2.428634, Validation loss 6.568264\n", "Epoch 3921, Training loss 2.428633, Validation loss 6.568260\n", "Epoch 3922, Training loss 2.428634, Validation loss 6.568260\n", "Epoch 3923, Training loss 2.428633, Validation loss 6.568260\n", "Epoch 3924, Training loss 2.428635, Validation loss 6.568255\n", "Epoch 3925, Training loss 2.428635, Validation loss 6.568270\n", "Epoch 3926, Training loss 2.428635, Validation loss 6.568270\n", "Epoch 3927, Training loss 2.428634, Validation loss 6.568270\n", "Epoch 3928, Training loss 2.428634, Validation loss 6.568285\n", "Epoch 3929, Training loss 2.428633, Validation loss 6.568280\n", "Epoch 3930, Training loss 2.428634, Validation loss 6.568280\n", "Epoch 3931, Training loss 2.428633, Validation loss 6.568275\n", "Epoch 3932, Training loss 2.428634, Validation loss 6.568285\n", "Epoch 3933, Training loss 2.428634, Validation loss 6.568280\n", "Epoch 3934, Training loss 2.428633, Validation loss 6.568280\n", "Epoch 3935, Training loss 2.428634, Validation loss 6.568275\n", "Epoch 3936, Training loss 2.428634, Validation loss 6.568290\n", "Epoch 3937, Training loss 2.428632, Validation loss 6.568290\n", "Epoch 3938, Training loss 2.428634, Validation loss 6.568290\n", "Epoch 3939, Training loss 2.428634, Validation loss 6.568285\n", "Epoch 3940, Training loss 2.428633, Validation loss 6.568301\n", "Epoch 3941, Training loss 2.428633, Validation loss 6.568301\n", "Epoch 3942, Training loss 2.428634, Validation loss 6.568301\n", "Epoch 3943, Training loss 2.428634, Validation loss 6.568296\n", "Epoch 3944, Training loss 2.428634, Validation loss 6.568311\n", "Epoch 3945, Training loss 2.428634, Validation loss 6.568311\n", "Epoch 3946, Training loss 2.428634, Validation loss 6.568311\n", "Epoch 3947, Training loss 2.428633, Validation loss 6.568326\n", "Epoch 3948, Training loss 2.428633, Validation loss 6.568321\n", "Epoch 3949, Training loss 2.428633, Validation loss 6.568321\n", "Epoch 3950, Training loss 2.428633, Validation loss 6.568316\n", "Epoch 3951, Training loss 2.428634, Validation loss 6.568336\n", "Epoch 3952, Training loss 2.428634, Validation loss 6.568331\n", "Epoch 3953, Training loss 2.428634, Validation loss 6.568331\n", "Epoch 3954, Training loss 2.428633, Validation loss 6.568326\n", "Epoch 3955, Training loss 2.428634, Validation loss 6.568331\n", "Epoch 3956, Training loss 2.428633, Validation loss 6.568331\n", "Epoch 3957, Training loss 2.428632, Validation loss 6.568341\n", "Epoch 3958, Training loss 2.428633, Validation loss 6.568346\n", "Epoch 3959, Training loss 2.428634, Validation loss 6.568366\n", "Epoch 3960, Training loss 2.428634, Validation loss 6.568376\n", "Epoch 3961, Training loss 2.428634, Validation loss 6.568366\n", "Epoch 3962, Training loss 2.428633, Validation loss 6.568371\n", "Epoch 3963, Training loss 2.428635, Validation loss 6.568366\n", "Epoch 3964, Training loss 2.428633, Validation loss 6.568371\n", "Epoch 3965, Training loss 2.428634, Validation loss 6.568366\n", "Epoch 3966, Training loss 2.428633, Validation loss 6.568371\n", "Epoch 3967, Training loss 2.428634, Validation loss 6.568372\n", "Epoch 3968, Training loss 2.428633, Validation loss 6.568381\n", "Epoch 3969, Training loss 2.428633, Validation loss 6.568372\n", "Epoch 3970, Training loss 2.428633, Validation loss 6.568381\n", "Epoch 3971, Training loss 2.428634, Validation loss 6.568372\n", "Epoch 3972, Training loss 2.428633, Validation loss 6.568381\n", "Epoch 3973, Training loss 2.428633, Validation loss 6.568387\n", "Epoch 3974, Training loss 2.428633, Validation loss 6.568392\n", "Epoch 3975, Training loss 2.428634, Validation loss 6.568397\n", "Epoch 3976, Training loss 2.428634, Validation loss 6.568387\n", "Epoch 3977, Training loss 2.428632, Validation loss 6.568397\n", "Epoch 3978, Training loss 2.428633, Validation loss 6.568407\n", "Epoch 3979, Training loss 2.428633, Validation loss 6.568397\n", "Epoch 3980, Training loss 2.428633, Validation loss 6.568407\n", "Epoch 3981, Training loss 2.428633, Validation loss 6.568397\n", "Epoch 3982, Training loss 2.428632, Validation loss 6.568402\n", "Epoch 3983, Training loss 2.428633, Validation loss 6.568407\n", "Epoch 3984, Training loss 2.428633, Validation loss 6.568412\n", "Epoch 3985, Training loss 2.428633, Validation loss 6.568402\n", "Epoch 3986, Training loss 2.428632, Validation loss 6.568412\n", "Epoch 3987, Training loss 2.428634, Validation loss 6.568402\n", "Epoch 3988, Training loss 2.428634, Validation loss 6.568412\n", "Epoch 3989, Training loss 2.428634, Validation loss 6.568413\n", "Epoch 3990, Training loss 2.428632, Validation loss 6.568422\n", "Epoch 3991, Training loss 2.428633, Validation loss 6.568428\n", "Epoch 3992, Training loss 2.428632, Validation loss 6.568422\n", "Epoch 3993, Training loss 2.428633, Validation loss 6.568413\n", "Epoch 3994, Training loss 2.428633, Validation loss 6.568422\n", "Epoch 3995, Training loss 2.428633, Validation loss 6.568413\n", "Epoch 3996, Training loss 2.428632, Validation loss 6.568418\n", "Epoch 3997, Training loss 2.428633, Validation loss 6.568428\n", "Epoch 3998, Training loss 2.428634, Validation loss 6.568428\n", "Epoch 3999, Training loss 2.428633, Validation loss 6.568438\n", "Epoch 4000, Training loss 2.428633, Validation loss 6.568428\n", "Epoch 4001, Training loss 2.428632, Validation loss 6.568438\n", "Epoch 4002, Training loss 2.428634, Validation loss 6.568443\n", "Epoch 4003, Training loss 2.428633, Validation loss 6.568438\n", "Epoch 4004, Training loss 2.428634, Validation loss 6.568443\n", "Epoch 4005, Training loss 2.428632, Validation loss 6.568443\n", "Epoch 4006, Training loss 2.428633, Validation loss 6.568453\n", "Epoch 4007, Training loss 2.428633, Validation loss 6.568443\n", "Epoch 4008, Training loss 2.428634, Validation loss 6.568453\n", "Epoch 4009, Training loss 2.428632, Validation loss 6.568443\n", "Epoch 4010, Training loss 2.428633, Validation loss 6.568453\n", "Epoch 4011, Training loss 2.428633, Validation loss 6.568458\n", "Epoch 4012, Training loss 2.428633, Validation loss 6.568453\n", "Epoch 4013, Training loss 2.428632, Validation loss 6.568458\n", "Epoch 4014, Training loss 2.428633, Validation loss 6.568459\n", "Epoch 4015, Training loss 2.428633, Validation loss 6.568469\n", "Epoch 4016, Training loss 2.428632, Validation loss 6.568459\n", "Epoch 4017, Training loss 2.428632, Validation loss 6.568469\n", "Epoch 4018, Training loss 2.428633, Validation loss 6.568459\n", "Epoch 4019, Training loss 2.428633, Validation loss 6.568469\n", "Epoch 4020, Training loss 2.428633, Validation loss 6.568469\n", "Epoch 4021, Training loss 2.428632, Validation loss 6.568479\n", "Epoch 4022, Training loss 2.428633, Validation loss 6.568484\n", "Epoch 4023, Training loss 2.428633, Validation loss 6.568474\n", "Epoch 4024, Training loss 2.428632, Validation loss 6.568484\n", "Epoch 4025, Training loss 2.428632, Validation loss 6.568474\n", "Epoch 4026, Training loss 2.428633, Validation loss 6.568484\n", "Epoch 4027, Training loss 2.428633, Validation loss 6.568474\n", "Epoch 4028, Training loss 2.428632, Validation loss 6.568484\n", "Epoch 4029, Training loss 2.428632, Validation loss 6.568484\n", "Epoch 4030, Training loss 2.428633, Validation loss 6.568494\n", "Epoch 4031, Training loss 2.428633, Validation loss 6.568484\n", "Epoch 4032, Training loss 2.428632, Validation loss 6.568494\n", "Epoch 4033, Training loss 2.428633, Validation loss 6.568499\n", "Epoch 4034, Training loss 2.428633, Validation loss 6.568490\n", "Epoch 4035, Training loss 2.428634, Validation loss 6.568499\n", "Epoch 4036, Training loss 2.428633, Validation loss 6.568500\n", "Epoch 4037, Training loss 2.428633, Validation loss 6.568510\n", "Epoch 4038, Training loss 2.428632, Validation loss 6.568515\n", "Epoch 4039, Training loss 2.428634, Validation loss 6.568510\n", "Epoch 4040, Training loss 2.428632, Validation loss 6.568500\n", "Epoch 4041, Training loss 2.428633, Validation loss 6.568510\n", "Epoch 4042, Training loss 2.428632, Validation loss 6.568515\n", "Epoch 4043, Training loss 2.428633, Validation loss 6.568505\n", "Epoch 4044, Training loss 2.428632, Validation loss 6.568525\n", "Epoch 4045, Training loss 2.428633, Validation loss 6.568515\n", "Epoch 4046, Training loss 2.428633, Validation loss 6.568525\n", "Epoch 4047, Training loss 2.428632, Validation loss 6.568515\n", "Epoch 4048, Training loss 2.428632, Validation loss 6.568525\n", "Epoch 4049, Training loss 2.428633, Validation loss 6.568515\n", "Epoch 4050, Training loss 2.428633, Validation loss 6.568525\n", "Epoch 4051, Training loss 2.428633, Validation loss 6.568530\n", "Epoch 4052, Training loss 2.428632, Validation loss 6.568531\n", "Epoch 4053, Training loss 2.428632, Validation loss 6.568535\n", "Epoch 4054, Training loss 2.428633, Validation loss 6.568535\n", "Epoch 4055, Training loss 2.428632, Validation loss 6.568535\n", "Epoch 4056, Training loss 2.428633, Validation loss 6.568545\n", "Epoch 4057, Training loss 2.428633, Validation loss 6.568550\n", "Epoch 4058, Training loss 2.428633, Validation loss 6.568550\n", "Epoch 4059, Training loss 2.428632, Validation loss 6.568560\n", "Epoch 4060, Training loss 2.428632, Validation loss 6.568560\n", "Epoch 4061, Training loss 2.428633, Validation loss 6.568560\n", "Epoch 4062, Training loss 2.428633, Validation loss 6.568565\n", "Epoch 4063, Training loss 2.428632, Validation loss 6.568575\n", "Epoch 4064, Training loss 2.428633, Validation loss 6.568575\n", "Epoch 4065, Training loss 2.428632, Validation loss 6.568575\n", "Epoch 4066, Training loss 2.428632, Validation loss 6.568571\n", "Epoch 4067, Training loss 2.428634, Validation loss 6.568571\n", "Epoch 4068, Training loss 2.428632, Validation loss 6.568575\n", "Epoch 4069, Training loss 2.428633, Validation loss 6.568575\n", "Epoch 4070, Training loss 2.428632, Validation loss 6.568585\n", "Epoch 4071, Training loss 2.428633, Validation loss 6.568571\n", "Epoch 4072, Training loss 2.428632, Validation loss 6.568571\n", "Epoch 4073, Training loss 2.428632, Validation loss 6.568581\n", "Epoch 4074, Training loss 2.428634, Validation loss 6.568585\n", "Epoch 4075, Training loss 2.428632, Validation loss 6.568585\n", "Epoch 4076, Training loss 2.428633, Validation loss 6.568595\n", "Epoch 4077, Training loss 2.428633, Validation loss 6.568595\n", "Epoch 4078, Training loss 2.428633, Validation loss 6.568581\n", "Epoch 4079, Training loss 2.428633, Validation loss 6.568591\n", "Epoch 4080, Training loss 2.428632, Validation loss 6.568591\n", "Epoch 4081, Training loss 2.428633, Validation loss 6.568595\n", "Epoch 4082, Training loss 2.428633, Validation loss 6.568595\n", "Epoch 4083, Training loss 2.428632, Validation loss 6.568591\n", "Epoch 4084, Training loss 2.428634, Validation loss 6.568591\n", "Epoch 4085, Training loss 2.428632, Validation loss 6.568591\n", "Epoch 4086, Training loss 2.428633, Validation loss 6.568601\n", "Epoch 4087, Training loss 2.428632, Validation loss 6.568606\n", "Epoch 4088, Training loss 2.428632, Validation loss 6.568606\n", "Epoch 4089, Training loss 2.428633, Validation loss 6.568601\n", "Epoch 4090, Training loss 2.428632, Validation loss 6.568601\n", "Epoch 4091, Training loss 2.428633, Validation loss 6.568601\n", "Epoch 4092, Training loss 2.428632, Validation loss 6.568601\n", "Epoch 4093, Training loss 2.428633, Validation loss 6.568616\n", "Epoch 4094, Training loss 2.428633, Validation loss 6.568616\n", "Epoch 4095, Training loss 2.428631, Validation loss 6.568616\n", "Epoch 4096, Training loss 2.428632, Validation loss 6.568612\n", "Epoch 4097, Training loss 2.428632, Validation loss 6.568612\n", "Epoch 4098, Training loss 2.428632, Validation loss 6.568612\n", "Epoch 4099, Training loss 2.428632, Validation loss 6.568622\n", "Epoch 4100, Training loss 2.428632, Validation loss 6.568626\n", "Epoch 4101, Training loss 2.428632, Validation loss 6.568626\n", "Epoch 4102, Training loss 2.428632, Validation loss 6.568626\n", "Epoch 4103, Training loss 2.428633, Validation loss 6.568622\n", "Epoch 4104, Training loss 2.428634, Validation loss 6.568622\n", "Epoch 4105, Training loss 2.428632, Validation loss 6.568622\n", "Epoch 4106, Training loss 2.428632, Validation loss 6.568636\n", "Epoch 4107, Training loss 2.428632, Validation loss 6.568636\n", "Epoch 4108, Training loss 2.428632, Validation loss 6.568636\n", "Epoch 4109, Training loss 2.428634, Validation loss 6.568632\n", "Epoch 4110, Training loss 2.428631, Validation loss 6.568632\n", "Epoch 4111, Training loss 2.428632, Validation loss 6.568632\n", "Epoch 4112, Training loss 2.428632, Validation loss 6.568647\n", "Epoch 4113, Training loss 2.428632, Validation loss 6.568647\n", "Epoch 4114, Training loss 2.428632, Validation loss 6.568647\n", "Epoch 4115, Training loss 2.428631, Validation loss 6.568642\n", "Epoch 4116, Training loss 2.428633, Validation loss 6.568642\n", "Epoch 4117, Training loss 2.428632, Validation loss 6.568642\n", "Epoch 4118, Training loss 2.428632, Validation loss 6.568642\n", "Epoch 4119, Training loss 2.428632, Validation loss 6.568657\n", "Epoch 4120, Training loss 2.428633, Validation loss 6.568657\n", "Epoch 4121, Training loss 2.428632, Validation loss 6.568657\n", "Epoch 4122, Training loss 2.428632, Validation loss 6.568653\n", "Epoch 4123, Training loss 2.428633, Validation loss 6.568653\n", "Epoch 4124, Training loss 2.428632, Validation loss 6.568653\n", "Epoch 4125, Training loss 2.428632, Validation loss 6.568657\n", "Epoch 4126, Training loss 2.428632, Validation loss 6.568667\n", "Epoch 4127, Training loss 2.428631, Validation loss 6.568653\n", "Epoch 4128, Training loss 2.428632, Validation loss 6.568653\n", "Epoch 4129, Training loss 2.428631, Validation loss 6.568663\n", "Epoch 4130, Training loss 2.428632, Validation loss 6.568663\n", "Epoch 4131, Training loss 2.428632, Validation loss 6.568667\n", "Epoch 4132, Training loss 2.428632, Validation loss 6.568677\n", "Epoch 4133, Training loss 2.428632, Validation loss 6.568677\n", "Epoch 4134, Training loss 2.428630, Validation loss 6.568663\n", "Epoch 4135, Training loss 2.428632, Validation loss 6.568673\n", "Epoch 4136, Training loss 2.428632, Validation loss 6.568673\n", "Epoch 4137, Training loss 2.428632, Validation loss 6.568673\n", "Epoch 4138, Training loss 2.428632, Validation loss 6.568677\n", "Epoch 4139, Training loss 2.428632, Validation loss 6.568688\n", "Epoch 4140, Training loss 2.428632, Validation loss 6.568673\n", "Epoch 4141, Training loss 2.428632, Validation loss 6.568673\n", "Epoch 4142, Training loss 2.428632, Validation loss 6.568683\n", "Epoch 4143, Training loss 2.428632, Validation loss 6.568683\n", "Epoch 4144, Training loss 2.428632, Validation loss 6.568688\n", "Epoch 4145, Training loss 2.428632, Validation loss 6.568678\n", "Epoch 4146, Training loss 2.428633, Validation loss 6.568683\n", "Epoch 4147, Training loss 2.428632, Validation loss 6.568683\n", "Epoch 4148, Training loss 2.428632, Validation loss 6.568693\n", "Epoch 4149, Training loss 2.428632, Validation loss 6.568693\n", "Epoch 4150, Training loss 2.428632, Validation loss 6.568698\n", "Epoch 4151, Training loss 2.428632, Validation loss 6.568698\n", "Epoch 4152, Training loss 2.428632, Validation loss 6.568708\n", "Epoch 4153, Training loss 2.428632, Validation loss 6.568708\n", "Epoch 4154, Training loss 2.428631, Validation loss 6.568693\n", "Epoch 4155, Training loss 2.428632, Validation loss 6.568704\n", "Epoch 4156, Training loss 2.428632, Validation loss 6.568704\n", "Epoch 4157, Training loss 2.428632, Validation loss 6.568708\n", "Epoch 4158, Training loss 2.428632, Validation loss 6.568708\n", "Epoch 4159, Training loss 2.428632, Validation loss 6.568704\n", "Epoch 4160, Training loss 2.428632, Validation loss 6.568704\n", "Epoch 4161, Training loss 2.428632, Validation loss 6.568704\n", "Epoch 4162, Training loss 2.428632, Validation loss 6.568714\n", "Epoch 4163, Training loss 2.428631, Validation loss 6.568718\n", "Epoch 4164, Training loss 2.428632, Validation loss 6.568699\n", "Epoch 4165, Training loss 2.428632, Validation loss 6.568714\n", "Epoch 4166, Training loss 2.428632, Validation loss 6.568714\n", "Epoch 4167, Training loss 2.428632, Validation loss 6.568714\n", "Epoch 4168, Training loss 2.428632, Validation loss 6.568724\n", "Epoch 4169, Training loss 2.428632, Validation loss 6.568729\n", "Epoch 4170, Training loss 2.428631, Validation loss 6.568729\n", "Epoch 4171, Training loss 2.428632, Validation loss 6.568724\n", "Epoch 4172, Training loss 2.428632, Validation loss 6.568724\n", "Epoch 4173, Training loss 2.428631, Validation loss 6.568724\n", "Epoch 4174, Training loss 2.428632, Validation loss 6.568724\n", "Epoch 4175, Training loss 2.428631, Validation loss 6.568744\n", "Epoch 4176, Training loss 2.428632, Validation loss 6.568729\n", "Epoch 4177, Training loss 2.428632, Validation loss 6.568744\n", "Epoch 4178, Training loss 2.428631, Validation loss 6.568744\n", "Epoch 4179, Training loss 2.428632, Validation loss 6.568739\n", "Epoch 4180, Training loss 2.428632, Validation loss 6.568744\n", "Epoch 4181, Training loss 2.428631, Validation loss 6.568739\n", "Epoch 4182, Training loss 2.428632, Validation loss 6.568744\n", "Epoch 4183, Training loss 2.428632, Validation loss 6.568754\n", "Epoch 4184, Training loss 2.428632, Validation loss 6.568759\n", "Epoch 4185, Training loss 2.428632, Validation loss 6.568749\n", "Epoch 4186, Training loss 2.428632, Validation loss 6.568754\n", "Epoch 4187, Training loss 2.428631, Validation loss 6.568749\n", "Epoch 4188, Training loss 2.428632, Validation loss 6.568754\n", "Epoch 4189, Training loss 2.428632, Validation loss 6.568749\n", "Epoch 4190, Training loss 2.428632, Validation loss 6.568754\n", "Epoch 4191, Training loss 2.428632, Validation loss 6.568745\n", "Epoch 4192, Training loss 2.428632, Validation loss 6.568769\n", "Epoch 4193, Training loss 2.428631, Validation loss 6.568759\n", "Epoch 4194, Training loss 2.428632, Validation loss 6.568765\n", "Epoch 4195, Training loss 2.428631, Validation loss 6.568759\n", "Epoch 4196, Training loss 2.428631, Validation loss 6.568765\n", "Epoch 4197, Training loss 2.428632, Validation loss 6.568774\n", "Epoch 4198, Training loss 2.428632, Validation loss 6.568769\n", "Epoch 4199, Training loss 2.428632, Validation loss 6.568774\n", "Epoch 4200, Training loss 2.428631, Validation loss 6.568765\n", "Epoch 4201, Training loss 2.428632, Validation loss 6.568770\n", "Epoch 4202, Training loss 2.428632, Validation loss 6.568765\n", "Epoch 4203, Training loss 2.428632, Validation loss 6.568770\n", "Epoch 4204, Training loss 2.428632, Validation loss 6.568765\n", "Epoch 4205, Training loss 2.428632, Validation loss 6.568770\n", "Epoch 4206, Training loss 2.428632, Validation loss 6.568779\n", "Epoch 4207, Training loss 2.428631, Validation loss 6.568785\n", "Epoch 4208, Training loss 2.428632, Validation loss 6.568775\n", "Epoch 4209, Training loss 2.428631, Validation loss 6.568780\n", "Epoch 4210, Training loss 2.428632, Validation loss 6.568775\n", "Epoch 4211, Training loss 2.428632, Validation loss 6.568780\n", "Epoch 4212, Training loss 2.428632, Validation loss 6.568775\n", "Epoch 4213, Training loss 2.428632, Validation loss 6.568775\n", "Epoch 4214, Training loss 2.428632, Validation loss 6.568789\n", "Epoch 4215, Training loss 2.428632, Validation loss 6.568795\n", "Epoch 4216, Training loss 2.428631, Validation loss 6.568785\n", "Epoch 4217, Training loss 2.428632, Validation loss 6.568790\n", "Epoch 4218, Training loss 2.428631, Validation loss 6.568785\n", "Epoch 4219, Training loss 2.428632, Validation loss 6.568790\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4220, Training loss 2.428632, Validation loss 6.568785\n", "Epoch 4221, Training loss 2.428631, Validation loss 6.568786\n", "Epoch 4222, Training loss 2.428632, Validation loss 6.568800\n", "Epoch 4223, Training loss 2.428632, Validation loss 6.568790\n", "Epoch 4224, Training loss 2.428632, Validation loss 6.568795\n", "Epoch 4225, Training loss 2.428631, Validation loss 6.568790\n", "Epoch 4226, Training loss 2.428632, Validation loss 6.568795\n", "Epoch 4227, Training loss 2.428631, Validation loss 6.568790\n", "Epoch 4228, Training loss 2.428633, Validation loss 6.568795\n", "Epoch 4229, Training loss 2.428631, Validation loss 6.568786\n", "Epoch 4230, Training loss 2.428631, Validation loss 6.568810\n", "Epoch 4231, Training loss 2.428632, Validation loss 6.568800\n", "Epoch 4232, Training loss 2.428632, Validation loss 6.568806\n", "Epoch 4233, Training loss 2.428632, Validation loss 6.568800\n", "Epoch 4234, Training loss 2.428632, Validation loss 6.568806\n", "Epoch 4235, Training loss 2.428632, Validation loss 6.568800\n", "Epoch 4236, Training loss 2.428631, Validation loss 6.568801\n", "Epoch 4237, Training loss 2.428633, Validation loss 6.568815\n", "Epoch 4238, Training loss 2.428632, Validation loss 6.568820\n", "Epoch 4239, Training loss 2.428632, Validation loss 6.568810\n", "Epoch 4240, Training loss 2.428632, Validation loss 6.568816\n", "Epoch 4241, Training loss 2.428633, Validation loss 6.568810\n", "Epoch 4242, Training loss 2.428631, Validation loss 6.568816\n", "Epoch 4243, Training loss 2.428631, Validation loss 6.568810\n", "Epoch 4244, Training loss 2.428632, Validation loss 6.568811\n", "Epoch 4245, Training loss 2.428631, Validation loss 6.568826\n", "Epoch 4246, Training loss 2.428631, Validation loss 6.568816\n", "Epoch 4247, Training loss 2.428632, Validation loss 6.568821\n", "Epoch 4248, Training loss 2.428632, Validation loss 6.568816\n", "Epoch 4249, Training loss 2.428631, Validation loss 6.568821\n", "Epoch 4250, Training loss 2.428632, Validation loss 6.568816\n", "Epoch 4251, Training loss 2.428632, Validation loss 6.568821\n", "Epoch 4252, Training loss 2.428631, Validation loss 6.568811\n", "Epoch 4253, Training loss 2.428632, Validation loss 6.568836\n", "Epoch 4254, Training loss 2.428631, Validation loss 6.568826\n", "Epoch 4255, Training loss 2.428631, Validation loss 6.568831\n", "Epoch 4256, Training loss 2.428631, Validation loss 6.568826\n", "Epoch 4257, Training loss 2.428632, Validation loss 6.568831\n", "Epoch 4258, Training loss 2.428632, Validation loss 6.568826\n", "Epoch 4259, Training loss 2.428633, Validation loss 6.568827\n", "Epoch 4260, Training loss 2.428632, Validation loss 6.568841\n", "Epoch 4261, Training loss 2.428632, Validation loss 6.568841\n", "Epoch 4262, Training loss 2.428632, Validation loss 6.568836\n", "Epoch 4263, Training loss 2.428632, Validation loss 6.568841\n", "Epoch 4264, Training loss 2.428632, Validation loss 6.568836\n", "Epoch 4265, Training loss 2.428631, Validation loss 6.568841\n", "Epoch 4266, Training loss 2.428632, Validation loss 6.568836\n", "Epoch 4267, Training loss 2.428631, Validation loss 6.568837\n", "Epoch 4268, Training loss 2.428632, Validation loss 6.568851\n", "Epoch 4269, Training loss 2.428631, Validation loss 6.568841\n", "Epoch 4270, Training loss 2.428632, Validation loss 6.568847\n", "Epoch 4271, Training loss 2.428632, Validation loss 6.568841\n", "Epoch 4272, Training loss 2.428632, Validation loss 6.568847\n", "Epoch 4273, Training loss 2.428632, Validation loss 6.568841\n", "Epoch 4274, Training loss 2.428631, Validation loss 6.568842\n", "Epoch 4275, Training loss 2.428632, Validation loss 6.568856\n", "Epoch 4276, Training loss 2.428632, Validation loss 6.568861\n", "Epoch 4277, Training loss 2.428632, Validation loss 6.568851\n", "Epoch 4278, Training loss 2.428631, Validation loss 6.568857\n", "Epoch 4279, Training loss 2.428632, Validation loss 6.568851\n", "Epoch 4280, Training loss 2.428632, Validation loss 6.568857\n", "Epoch 4281, Training loss 2.428632, Validation loss 6.568851\n", "Epoch 4282, Training loss 2.428631, Validation loss 6.568852\n", "Epoch 4283, Training loss 2.428632, Validation loss 6.568867\n", "Epoch 4284, Training loss 2.428632, Validation loss 6.568867\n", "Epoch 4285, Training loss 2.428631, Validation loss 6.568862\n", "Epoch 4286, Training loss 2.428631, Validation loss 6.568867\n", "Epoch 4287, Training loss 2.428631, Validation loss 6.568862\n", "Epoch 4288, Training loss 2.428632, Validation loss 6.568867\n", "Epoch 4289, Training loss 2.428631, Validation loss 6.568862\n", "Epoch 4290, Training loss 2.428631, Validation loss 6.568862\n", "Epoch 4291, Training loss 2.428631, Validation loss 6.568877\n", "Epoch 4292, Training loss 2.428631, Validation loss 6.568867\n", "Epoch 4293, Training loss 2.428632, Validation loss 6.568872\n", "Epoch 4294, Training loss 2.428631, Validation loss 6.568867\n", "Epoch 4295, Training loss 2.428631, Validation loss 6.568872\n", "Epoch 4296, Training loss 2.428632, Validation loss 6.568867\n", "Epoch 4297, Training loss 2.428632, Validation loss 6.568868\n", "Epoch 4298, Training loss 2.428631, Validation loss 6.568882\n", "Epoch 4299, Training loss 2.428632, Validation loss 6.568882\n", "Epoch 4300, Training loss 2.428632, Validation loss 6.568877\n", "Epoch 4301, Training loss 2.428630, Validation loss 6.568882\n", "Epoch 4302, Training loss 2.428633, Validation loss 6.568877\n", "Epoch 4303, Training loss 2.428631, Validation loss 6.568882\n", "Epoch 4304, Training loss 2.428632, Validation loss 6.568877\n", "Epoch 4305, Training loss 2.428631, Validation loss 6.568878\n", "Epoch 4306, Training loss 2.428633, Validation loss 6.568872\n", "Epoch 4307, Training loss 2.428631, Validation loss 6.568892\n", "Epoch 4308, Training loss 2.428632, Validation loss 6.568888\n", "Epoch 4309, Training loss 2.428631, Validation loss 6.568892\n", "Epoch 4310, Training loss 2.428631, Validation loss 6.568888\n", "Epoch 4311, Training loss 2.428632, Validation loss 6.568892\n", "Epoch 4312, Training loss 2.428631, Validation loss 6.568883\n", "Epoch 4313, Training loss 2.428631, Validation loss 6.568897\n", "Epoch 4314, Training loss 2.428631, Validation loss 6.568902\n", "Epoch 4315, Training loss 2.428632, Validation loss 6.568892\n", "Epoch 4316, Training loss 2.428631, Validation loss 6.568898\n", "Epoch 4317, Training loss 2.428632, Validation loss 6.568892\n", "Epoch 4318, Training loss 2.428631, Validation loss 6.568898\n", "Epoch 4319, Training loss 2.428631, Validation loss 6.568892\n", "Epoch 4320, Training loss 2.428631, Validation loss 6.568893\n", "Epoch 4321, Training loss 2.428631, Validation loss 6.568888\n", "Epoch 4322, Training loss 2.428631, Validation loss 6.568908\n", "Epoch 4323, Training loss 2.428632, Validation loss 6.568903\n", "Epoch 4324, Training loss 2.428632, Validation loss 6.568908\n", "Epoch 4325, Training loss 2.428631, Validation loss 6.568903\n", "Epoch 4326, Training loss 2.428631, Validation loss 6.568908\n", "Epoch 4327, Training loss 2.428632, Validation loss 6.568903\n", "Epoch 4328, Training loss 2.428631, Validation loss 6.568903\n", "Epoch 4329, Training loss 2.428631, Validation loss 6.568918\n", "Epoch 4330, Training loss 2.428630, Validation loss 6.568918\n", "Epoch 4331, Training loss 2.428632, Validation loss 6.568913\n", "Epoch 4332, Training loss 2.428631, Validation loss 6.568918\n", "Epoch 4333, Training loss 2.428632, Validation loss 6.568913\n", "Epoch 4334, Training loss 2.428631, Validation loss 6.568928\n", "Epoch 4335, Training loss 2.428632, Validation loss 6.568918\n", "Epoch 4336, Training loss 2.428632, Validation loss 6.568933\n", "Epoch 4337, Training loss 2.428631, Validation loss 6.568928\n", "Epoch 4338, Training loss 2.428632, Validation loss 6.568943\n", "Epoch 4339, Training loss 2.428631, Validation loss 6.568918\n", "Epoch 4340, Training loss 2.428631, Validation loss 6.568933\n", "Epoch 4341, Training loss 2.428632, Validation loss 6.568938\n", "Epoch 4342, Training loss 2.428631, Validation loss 6.568933\n", "Epoch 4343, Training loss 2.428632, Validation loss 6.568948\n", "Epoch 4344, Training loss 2.428633, Validation loss 6.568938\n", "Epoch 4345, Training loss 2.428631, Validation loss 6.568948\n", "Epoch 4346, Training loss 2.428630, Validation loss 6.568948\n", "Epoch 4347, Training loss 2.428631, Validation loss 6.568953\n", "Epoch 4348, Training loss 2.428632, Validation loss 6.568948\n", "Epoch 4349, Training loss 2.428631, Validation loss 6.568944\n", "Epoch 4350, Training loss 2.428632, Validation loss 6.568948\n", "Epoch 4351, Training loss 2.428631, Validation loss 6.568948\n", "Epoch 4352, Training loss 2.428630, Validation loss 6.568944\n", "Epoch 4353, Training loss 2.428631, Validation loss 6.568948\n", "Epoch 4354, Training loss 2.428632, Validation loss 6.568944\n", "Epoch 4355, Training loss 2.428631, Validation loss 6.568958\n", "Epoch 4356, Training loss 2.428630, Validation loss 6.568944\n", "Epoch 4357, Training loss 2.428633, Validation loss 6.568958\n", "Epoch 4358, Training loss 2.428631, Validation loss 6.568958\n", "Epoch 4359, Training loss 2.428631, Validation loss 6.568963\n", "Epoch 4360, Training loss 2.428632, Validation loss 6.568958\n", "Epoch 4361, Training loss 2.428631, Validation loss 6.568954\n", "Epoch 4362, Training loss 2.428631, Validation loss 6.568958\n", "Epoch 4363, Training loss 2.428632, Validation loss 6.568958\n", "Epoch 4364, Training loss 2.428631, Validation loss 6.568958\n", "Epoch 4365, Training loss 2.428631, Validation loss 6.568958\n", "Epoch 4366, Training loss 2.428631, Validation loss 6.568954\n", "Epoch 4367, Training loss 2.428632, Validation loss 6.568958\n", "Epoch 4368, Training loss 2.428632, Validation loss 6.568954\n", "Epoch 4369, Training loss 2.428631, Validation loss 6.568968\n", "Epoch 4370, Training loss 2.428632, Validation loss 6.568958\n", "Epoch 4371, Training loss 2.428631, Validation loss 6.568954\n", "Epoch 4372, Training loss 2.428632, Validation loss 6.568968\n", "Epoch 4373, Training loss 2.428632, Validation loss 6.568973\n", "Epoch 4374, Training loss 2.428631, Validation loss 6.568968\n", "Epoch 4375, Training loss 2.428631, Validation loss 6.568964\n", "Epoch 4376, Training loss 2.428632, Validation loss 6.568968\n", "Epoch 4377, Training loss 2.428632, Validation loss 6.568968\n", "Epoch 4378, Training loss 2.428631, Validation loss 6.568964\n", "Epoch 4379, Training loss 2.428631, Validation loss 6.568968\n", "Epoch 4380, Training loss 2.428630, Validation loss 6.568964\n", "Epoch 4381, Training loss 2.428632, Validation loss 6.568968\n", "Epoch 4382, Training loss 2.428632, Validation loss 6.568968\n", "Epoch 4383, Training loss 2.428632, Validation loss 6.568979\n", "Epoch 4384, Training loss 2.428631, Validation loss 6.568968\n", "Epoch 4385, Training loss 2.428631, Validation loss 6.568964\n", "Epoch 4386, Training loss 2.428631, Validation loss 6.568979\n", "Epoch 4387, Training loss 2.428632, Validation loss 6.568964\n", "Epoch 4388, Training loss 2.428631, Validation loss 6.568979\n", "Epoch 4389, Training loss 2.428631, Validation loss 6.568979\n", "Epoch 4390, Training loss 2.428631, Validation loss 6.568974\n", "Epoch 4391, Training loss 2.428632, Validation loss 6.568979\n", "Epoch 4392, Training loss 2.428631, Validation loss 6.568994\n", "Epoch 4393, Training loss 2.428630, Validation loss 6.568979\n", "Epoch 4394, Training loss 2.428631, Validation loss 6.568974\n", "Epoch 4395, Training loss 2.428631, Validation loss 6.568989\n", "Epoch 4396, Training loss 2.428632, Validation loss 6.568979\n", "Epoch 4397, Training loss 2.428632, Validation loss 6.568994\n", "Epoch 4398, Training loss 2.428631, Validation loss 6.568989\n", "Epoch 4399, Training loss 2.428631, Validation loss 6.568974\n", "Epoch 4400, Training loss 2.428632, Validation loss 6.568989\n", "Epoch 4401, Training loss 2.428632, Validation loss 6.568989\n", "Epoch 4402, Training loss 2.428631, Validation loss 6.568989\n", "Epoch 4403, Training loss 2.428631, Validation loss 6.568989\n", "Epoch 4404, Training loss 2.428632, Validation loss 6.568994\n", "Epoch 4405, Training loss 2.428631, Validation loss 6.568989\n", "Epoch 4406, Training loss 2.428631, Validation loss 6.568985\n", "Epoch 4407, Training loss 2.428632, Validation loss 6.568989\n", "Epoch 4408, Training loss 2.428632, Validation loss 6.568989\n", "Epoch 4409, Training loss 2.428631, Validation loss 6.568985\n", "Epoch 4410, Training loss 2.428631, Validation loss 6.568989\n", "Epoch 4411, Training loss 2.428632, Validation loss 6.568985\n", "Epoch 4412, Training loss 2.428631, Validation loss 6.568999\n", "Epoch 4413, Training loss 2.428630, Validation loss 6.568985\n", "Epoch 4414, Training loss 2.428632, Validation loss 6.568999\n", "Epoch 4415, Training loss 2.428632, Validation loss 6.568999\n", "Epoch 4416, Training loss 2.428631, Validation loss 6.568985\n", "Epoch 4417, Training loss 2.428631, Validation loss 6.568999\n", "Epoch 4418, Training loss 2.428631, Validation loss 6.568995\n", "Epoch 4419, Training loss 2.428630, Validation loss 6.568999\n", "Epoch 4420, Training loss 2.428631, Validation loss 6.568999\n", "Epoch 4421, Training loss 2.428632, Validation loss 6.568999\n", "Epoch 4422, Training loss 2.428632, Validation loss 6.568999\n", "Epoch 4423, Training loss 2.428632, Validation loss 6.568995\n", "Epoch 4424, Training loss 2.428631, Validation loss 6.569009\n", "Epoch 4425, Training loss 2.428631, Validation loss 6.568995\n", "Epoch 4426, Training loss 2.428631, Validation loss 6.569009\n", "Epoch 4427, Training loss 2.428632, Validation loss 6.569009\n", "Epoch 4428, Training loss 2.428631, Validation loss 6.569009\n", "Epoch 4429, Training loss 2.428631, Validation loss 6.569009\n", "Epoch 4430, Training loss 2.428631, Validation loss 6.569005\n", "Epoch 4431, Training loss 2.428632, Validation loss 6.569009\n", "Epoch 4432, Training loss 2.428631, Validation loss 6.569005\n", "Epoch 4433, Training loss 2.428631, Validation loss 6.569009\n", "Epoch 4434, Training loss 2.428632, Validation loss 6.569009\n", "Epoch 4435, Training loss 2.428632, Validation loss 6.569005\n", "Epoch 4436, Training loss 2.428631, Validation loss 6.569009\n", "Epoch 4437, Training loss 2.428630, Validation loss 6.569005\n", "Epoch 4438, Training loss 2.428631, Validation loss 6.569020\n", "Epoch 4439, Training loss 2.428631, Validation loss 6.569009\n", "Epoch 4440, Training loss 2.428630, Validation loss 6.569005\n", "Epoch 4441, Training loss 2.428632, Validation loss 6.569020\n", "Epoch 4442, Training loss 2.428632, Validation loss 6.569005\n", "Epoch 4443, Training loss 2.428630, Validation loss 6.569020\n", "Epoch 4444, Training loss 2.428631, Validation loss 6.569015\n", "Epoch 4445, Training loss 2.428632, Validation loss 6.569005\n", "Epoch 4446, Training loss 2.428631, Validation loss 6.569020\n", "Epoch 4447, Training loss 2.428630, Validation loss 6.569015\n", "Epoch 4448, Training loss 2.428632, Validation loss 6.569020\n", "Epoch 4449, Training loss 2.428631, Validation loss 6.569015\n", "Epoch 4450, Training loss 2.428630, Validation loss 6.569020\n", "Epoch 4451, Training loss 2.428632, Validation loss 6.569020\n", "Epoch 4452, Training loss 2.428631, Validation loss 6.569030\n", "Epoch 4453, Training loss 2.428630, Validation loss 6.569020\n", "Epoch 4454, Training loss 2.428631, Validation loss 6.569015\n", "Epoch 4455, Training loss 2.428632, Validation loss 6.569030\n", "Epoch 4456, Training loss 2.428631, Validation loss 6.569015\n", "Epoch 4457, Training loss 2.428632, Validation loss 6.569030\n", "Epoch 4458, Training loss 2.428631, Validation loss 6.569030\n", "Epoch 4459, Training loss 2.428632, Validation loss 6.569026\n", "Epoch 4460, Training loss 2.428631, Validation loss 6.569030\n", "Epoch 4461, Training loss 2.428632, Validation loss 6.569045\n", "Epoch 4462, Training loss 2.428632, Validation loss 6.569030\n", "Epoch 4463, Training loss 2.428630, Validation loss 6.569026\n", "Epoch 4464, Training loss 2.428632, Validation loss 6.569040\n", "Epoch 4465, Training loss 2.428632, Validation loss 6.569030\n", "Epoch 4466, Training loss 2.428630, Validation loss 6.569040\n", "Epoch 4467, Training loss 2.428630, Validation loss 6.569040\n", "Epoch 4468, Training loss 2.428631, Validation loss 6.569026\n", "Epoch 4469, Training loss 2.428632, Validation loss 6.569026\n", "Epoch 4470, Training loss 2.428631, Validation loss 6.569040\n", "Epoch 4471, Training loss 2.428631, Validation loss 6.569040\n", "Epoch 4472, Training loss 2.428630, Validation loss 6.569040\n", "Epoch 4473, Training loss 2.428631, Validation loss 6.569036\n", "Epoch 4474, Training loss 2.428632, Validation loss 6.569040\n", "Epoch 4475, Training loss 2.428632, Validation loss 6.569036\n", "Epoch 4476, Training loss 2.428631, Validation loss 6.569040\n", "Epoch 4477, Training loss 2.428630, Validation loss 6.569040\n", "Epoch 4478, Training loss 2.428632, Validation loss 6.569036\n", "Epoch 4479, Training loss 2.428631, Validation loss 6.569040\n", "Epoch 4480, Training loss 2.428631, Validation loss 6.569036\n", "Epoch 4481, Training loss 2.428631, Validation loss 6.569050\n", "Epoch 4482, Training loss 2.428631, Validation loss 6.569036\n", "Epoch 4483, Training loss 2.428631, Validation loss 6.569050\n", "Epoch 4484, Training loss 2.428632, Validation loss 6.569050\n", "Epoch 4485, Training loss 2.428632, Validation loss 6.569036\n", "Epoch 4486, Training loss 2.428631, Validation loss 6.569050\n", "Epoch 4487, Training loss 2.428631, Validation loss 6.569046\n", "Epoch 4488, Training loss 2.428632, Validation loss 6.569050\n", "Epoch 4489, Training loss 2.428632, Validation loss 6.569050\n", "Epoch 4490, Training loss 2.428630, Validation loss 6.569050\n", "Epoch 4491, Training loss 2.428631, Validation loss 6.569050\n", "Epoch 4492, Training loss 2.428631, Validation loss 6.569046\n", "Epoch 4493, Training loss 2.428632, Validation loss 6.569050\n", "Epoch 4494, Training loss 2.428631, Validation loss 6.569046\n", "Epoch 4495, Training loss 2.428631, Validation loss 6.569061\n", "Epoch 4496, Training loss 2.428631, Validation loss 6.569061\n", "Epoch 4497, Training loss 2.428630, Validation loss 6.569046\n", "Epoch 4498, Training loss 2.428631, Validation loss 6.569061\n", "Epoch 4499, Training loss 2.428631, Validation loss 6.569056\n", "Epoch 4500, Training loss 2.428630, Validation loss 6.569061\n", "Epoch 4501, Training loss 2.428632, Validation loss 6.569056\n", "Epoch 4502, Training loss 2.428631, Validation loss 6.569056\n", "Epoch 4503, Training loss 2.428631, Validation loss 6.569061\n", "Epoch 4504, Training loss 2.428630, Validation loss 6.569056\n", "Epoch 4505, Training loss 2.428632, Validation loss 6.569061\n", "Epoch 4506, Training loss 2.428631, Validation loss 6.569056\n", "Epoch 4507, Training loss 2.428631, Validation loss 6.569071\n", "Epoch 4508, Training loss 2.428632, Validation loss 6.569061\n", "Epoch 4509, Training loss 2.428631, Validation loss 6.569056\n", "Epoch 4510, Training loss 2.428631, Validation loss 6.569071\n", "Epoch 4511, Training loss 2.428631, Validation loss 6.569056\n", "Epoch 4512, Training loss 2.428632, Validation loss 6.569071\n", "Epoch 4513, Training loss 2.428631, Validation loss 6.569066\n", "Epoch 4514, Training loss 2.428633, Validation loss 6.569071\n", "Epoch 4515, Training loss 2.428632, Validation loss 6.569071\n", "Epoch 4516, Training loss 2.428631, Validation loss 6.569066\n", "Epoch 4517, Training loss 2.428631, Validation loss 6.569071\n", "Epoch 4518, Training loss 2.428632, Validation loss 6.569066\n", "Epoch 4519, Training loss 2.428631, Validation loss 6.569071\n", "Epoch 4520, Training loss 2.428631, Validation loss 6.569066\n", "Epoch 4521, Training loss 2.428631, Validation loss 6.569081\n", "Epoch 4522, Training loss 2.428632, Validation loss 6.569071\n", "Epoch 4523, Training loss 2.428631, Validation loss 6.569081\n", "Epoch 4524, Training loss 2.428633, Validation loss 6.569081\n", "Epoch 4525, Training loss 2.428631, Validation loss 6.569066\n", "Epoch 4526, Training loss 2.428631, Validation loss 6.569066\n", "Epoch 4527, Training loss 2.428631, Validation loss 6.569081\n", "Epoch 4528, Training loss 2.428632, Validation loss 6.569066\n", "Epoch 4529, Training loss 2.428631, Validation loss 6.569081\n", "Epoch 4530, Training loss 2.428630, Validation loss 6.569077\n", "Epoch 4531, Training loss 2.428631, Validation loss 6.569081\n", "Epoch 4532, Training loss 2.428631, Validation loss 6.569077\n", "Epoch 4533, Training loss 2.428632, Validation loss 6.569091\n", "Epoch 4534, Training loss 2.428631, Validation loss 6.569081\n", "Epoch 4535, Training loss 2.428631, Validation loss 6.569077\n", "Epoch 4536, Training loss 2.428630, Validation loss 6.569091\n", "Epoch 4537, Training loss 2.428631, Validation loss 6.569077\n", "Epoch 4538, Training loss 2.428632, Validation loss 6.569091\n", "Epoch 4539, Training loss 2.428631, Validation loss 6.569087\n", "Epoch 4540, Training loss 2.428632, Validation loss 6.569077\n", "Epoch 4541, Training loss 2.428631, Validation loss 6.569091\n", "Epoch 4542, Training loss 2.428631, Validation loss 6.569087\n", "Epoch 4543, Training loss 2.428631, Validation loss 6.569091\n", "Epoch 4544, Training loss 2.428631, Validation loss 6.569087\n", "Epoch 4545, Training loss 2.428631, Validation loss 6.569091\n", "Epoch 4546, Training loss 2.428631, Validation loss 6.569091\n", "Epoch 4547, Training loss 2.428632, Validation loss 6.569087\n", "Epoch 4548, Training loss 2.428632, Validation loss 6.569091\n", "Epoch 4549, Training loss 2.428631, Validation loss 6.569087\n", "Epoch 4550, Training loss 2.428630, Validation loss 6.569102\n", "Epoch 4551, Training loss 2.428631, Validation loss 6.569087\n", "Epoch 4552, Training loss 2.428632, Validation loss 6.569102\n", "Epoch 4553, Training loss 2.428632, Validation loss 6.569102\n", "Epoch 4554, Training loss 2.428631, Validation loss 6.569087\n", "Epoch 4555, Training loss 2.428630, Validation loss 6.569102\n", "Epoch 4556, Training loss 2.428632, Validation loss 6.569097\n", "Epoch 4557, Training loss 2.428630, Validation loss 6.569102\n", "Epoch 4558, Training loss 2.428632, Validation loss 6.569097\n", "Epoch 4559, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4560, Training loss 2.428630, Validation loss 6.569102\n", "Epoch 4561, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4562, Training loss 2.428631, Validation loss 6.569102\n", "Epoch 4563, Training loss 2.428631, Validation loss 6.569097\n", "Epoch 4564, Training loss 2.428630, Validation loss 6.569097\n", "Epoch 4565, Training loss 2.428632, Validation loss 6.569112\n", "Epoch 4566, Training loss 2.428632, Validation loss 6.569097\n", "Epoch 4567, Training loss 2.428630, Validation loss 6.569112\n", "Epoch 4568, Training loss 2.428631, Validation loss 6.569107\n", "Epoch 4569, Training loss 2.428630, Validation loss 6.569112\n", "Epoch 4570, Training loss 2.428631, Validation loss 6.569107\n", "Epoch 4571, Training loss 2.428631, Validation loss 6.569107\n", "Epoch 4572, Training loss 2.428632, Validation loss 6.569122\n", "Epoch 4573, Training loss 2.428631, Validation loss 6.569117\n", "Epoch 4574, Training loss 2.428631, Validation loss 6.569112\n", "Epoch 4575, Training loss 2.428632, Validation loss 6.569127\n", "Epoch 4576, Training loss 2.428632, Validation loss 6.569127\n", "Epoch 4577, Training loss 2.428631, Validation loss 6.569122\n", "Epoch 4578, Training loss 2.428631, Validation loss 6.569117\n", "Epoch 4579, Training loss 2.428631, Validation loss 6.569117\n", "Epoch 4580, Training loss 2.428631, Validation loss 6.569122\n", "Epoch 4581, Training loss 2.428632, Validation loss 6.569132\n", "Epoch 4582, Training loss 2.428631, Validation loss 6.569127\n", "Epoch 4583, Training loss 2.428631, Validation loss 6.569122\n", "Epoch 4584, Training loss 2.428632, Validation loss 6.569132\n", "Epoch 4585, Training loss 2.428631, Validation loss 6.569137\n", "Epoch 4586, Training loss 2.428631, Validation loss 6.569127\n", "Epoch 4587, Training loss 2.428630, Validation loss 6.569132\n", "Epoch 4588, Training loss 2.428631, Validation loss 6.569142\n", "Epoch 4589, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4590, Training loss 2.428631, Validation loss 6.569127\n", "Epoch 4591, Training loss 2.428632, Validation loss 6.569122\n", "Epoch 4592, Training loss 2.428631, Validation loss 6.569132\n", "Epoch 4593, Training loss 2.428630, Validation loss 6.569137\n", "Epoch 4594, Training loss 2.428630, Validation loss 6.569147\n", "Epoch 4595, Training loss 2.428632, Validation loss 6.569142\n", "Epoch 4596, Training loss 2.428632, Validation loss 6.569132\n", "Epoch 4597, Training loss 2.428631, Validation loss 6.569137\n", "Epoch 4598, Training loss 2.428630, Validation loss 6.569147\n", "Epoch 4599, Training loss 2.428630, Validation loss 6.569137\n", "Epoch 4600, Training loss 2.428631, Validation loss 6.569142\n", "Epoch 4601, Training loss 2.428631, Validation loss 6.569142\n", "Epoch 4602, Training loss 2.428632, Validation loss 6.569147\n", "Epoch 4603, Training loss 2.428632, Validation loss 6.569137\n", "Epoch 4604, Training loss 2.428631, Validation loss 6.569142\n", "Epoch 4605, Training loss 2.428631, Validation loss 6.569152\n", "Epoch 4606, Training loss 2.428631, Validation loss 6.569142\n", "Epoch 4607, Training loss 2.428630, Validation loss 6.569137\n", "Epoch 4608, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4609, Training loss 2.428631, Validation loss 6.569137\n", "Epoch 4610, Training loss 2.428631, Validation loss 6.569142\n", "Epoch 4611, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4612, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4613, Training loss 2.428631, Validation loss 6.569137\n", "Epoch 4614, Training loss 2.428631, Validation loss 6.569142\n", "Epoch 4615, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4616, Training loss 2.428631, Validation loss 6.569157\n", "Epoch 4617, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4618, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4619, Training loss 2.428631, Validation loss 6.569152\n", "Epoch 4620, Training loss 2.428630, Validation loss 6.569157\n", "Epoch 4621, Training loss 2.428630, Validation loss 6.569147\n", "Epoch 4622, Training loss 2.428631, Validation loss 6.569157\n", "Epoch 4623, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4624, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4625, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4626, Training loss 2.428631, Validation loss 6.569157\n", "Epoch 4627, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4628, Training loss 2.428631, Validation loss 6.569152\n", "Epoch 4629, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4630, Training loss 2.428631, Validation loss 6.569157\n", "Epoch 4631, Training loss 2.428631, Validation loss 6.569147\n", "Epoch 4632, Training loss 2.428631, Validation loss 6.569157\n", "Epoch 4633, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4634, Training loss 2.428630, Validation loss 6.569167\n", "Epoch 4635, Training loss 2.428630, Validation loss 6.569158\n", "Epoch 4636, Training loss 2.428631, Validation loss 6.569157\n", "Epoch 4637, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4638, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4639, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4640, Training loss 2.428631, Validation loss 6.569167\n", "Epoch 4641, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4642, Training loss 2.428632, Validation loss 6.569152\n", "Epoch 4643, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4644, Training loss 2.428631, Validation loss 6.569167\n", "Epoch 4645, Training loss 2.428631, Validation loss 6.569158\n", "Epoch 4646, Training loss 2.428630, Validation loss 6.569167\n", "Epoch 4647, Training loss 2.428630, Validation loss 6.569158\n", "Epoch 4648, Training loss 2.428630, Validation loss 6.569152\n", "Epoch 4649, Training loss 2.428631, Validation loss 6.569158\n", "Epoch 4650, Training loss 2.428631, Validation loss 6.569167\n", "Epoch 4651, Training loss 2.428631, Validation loss 6.569158\n", "Epoch 4652, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4653, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4654, Training loss 2.428631, Validation loss 6.569152\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4655, Training loss 2.428630, Validation loss 6.569158\n", "Epoch 4656, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4657, Training loss 2.428632, Validation loss 6.569172\n", "Epoch 4658, Training loss 2.428632, Validation loss 6.569162\n", "Epoch 4659, Training loss 2.428631, Validation loss 6.569158\n", "Epoch 4660, Training loss 2.428630, Validation loss 6.569167\n", "Epoch 4661, Training loss 2.428630, Validation loss 6.569172\n", "Epoch 4662, Training loss 2.428630, Validation loss 6.569162\n", "Epoch 4663, Training loss 2.428631, Validation loss 6.569172\n", "Epoch 4664, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4665, Training loss 2.428631, Validation loss 6.569158\n", "Epoch 4666, Training loss 2.428630, Validation loss 6.569162\n", "Epoch 4667, Training loss 2.428631, Validation loss 6.569172\n", "Epoch 4668, Training loss 2.428631, Validation loss 6.569178\n", "Epoch 4669, Training loss 2.428631, Validation loss 6.569168\n", "Epoch 4670, Training loss 2.428632, Validation loss 6.569167\n", "Epoch 4671, Training loss 2.428631, Validation loss 6.569172\n", "Epoch 4672, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4673, Training loss 2.428630, Validation loss 6.569168\n", "Epoch 4674, Training loss 2.428630, Validation loss 6.569178\n", "Epoch 4675, Training loss 2.428630, Validation loss 6.569168\n", "Epoch 4676, Training loss 2.428631, Validation loss 6.569162\n", "Epoch 4677, Training loss 2.428631, Validation loss 6.569172\n", "Epoch 4678, Training loss 2.428632, Validation loss 6.569178\n", "Epoch 4679, Training loss 2.428632, Validation loss 6.569168\n", "Epoch 4680, Training loss 2.428631, Validation loss 6.569173\n", "Epoch 4681, Training loss 2.428630, Validation loss 6.569172\n", "Epoch 4682, Training loss 2.428630, Validation loss 6.569162\n", "Epoch 4683, Training loss 2.428632, Validation loss 6.569168\n", "Epoch 4684, Training loss 2.428630, Validation loss 6.569178\n", "Epoch 4685, Training loss 2.428630, Validation loss 6.569168\n", "Epoch 4686, Training loss 2.428630, Validation loss 6.569173\n", "Epoch 4687, Training loss 2.428631, Validation loss 6.569168\n", "Epoch 4688, Training loss 2.428631, Validation loss 6.569178\n", "Epoch 4689, Training loss 2.428632, Validation loss 6.569182\n", "Epoch 4690, Training loss 2.428632, Validation loss 6.569173\n", "Epoch 4691, Training loss 2.428631, Validation loss 6.569182\n", "Epoch 4692, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4693, Training loss 2.428631, Validation loss 6.569178\n", "Epoch 4694, Training loss 2.428630, Validation loss 6.569173\n", "Epoch 4695, Training loss 2.428631, Validation loss 6.569182\n", "Epoch 4696, Training loss 2.428631, Validation loss 6.569173\n", "Epoch 4697, Training loss 2.428632, Validation loss 6.569178\n", "Epoch 4698, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4699, Training loss 2.428631, Validation loss 6.569182\n", "Epoch 4700, Training loss 2.428631, Validation loss 6.569173\n", "Epoch 4701, Training loss 2.428631, Validation loss 6.569182\n", "Epoch 4702, Training loss 2.428632, Validation loss 6.569188\n", "Epoch 4703, Training loss 2.428632, Validation loss 6.569178\n", "Epoch 4704, Training loss 2.428631, Validation loss 6.569183\n", "Epoch 4705, Training loss 2.428631, Validation loss 6.569182\n", "Epoch 4706, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4707, Training loss 2.428631, Validation loss 6.569178\n", "Epoch 4708, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4709, Training loss 2.428632, Validation loss 6.569193\n", "Epoch 4710, Training loss 2.428632, Validation loss 6.569183\n", "Epoch 4711, Training loss 2.428631, Validation loss 6.569178\n", "Epoch 4712, Training loss 2.428630, Validation loss 6.569188\n", "Epoch 4713, Training loss 2.428630, Validation loss 6.569178\n", "Epoch 4714, Training loss 2.428630, Validation loss 6.569183\n", "Epoch 4715, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4716, Training loss 2.428632, Validation loss 6.569188\n", "Epoch 4717, Training loss 2.428632, Validation loss 6.569178\n", "Epoch 4718, Training loss 2.428632, Validation loss 6.569183\n", "Epoch 4719, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4720, Training loss 2.428631, Validation loss 6.569183\n", "Epoch 4721, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4722, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4723, Training loss 2.428631, Validation loss 6.569178\n", "Epoch 4724, Training loss 2.428630, Validation loss 6.569183\n", "Epoch 4725, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4726, Training loss 2.428631, Validation loss 6.569198\n", "Epoch 4727, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4728, Training loss 2.428631, Validation loss 6.569183\n", "Epoch 4729, Training loss 2.428632, Validation loss 6.569193\n", "Epoch 4730, Training loss 2.428631, Validation loss 6.569198\n", "Epoch 4731, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4732, Training loss 2.428630, Validation loss 6.569193\n", "Epoch 4733, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4734, Training loss 2.428631, Validation loss 6.569183\n", "Epoch 4735, Training loss 2.428631, Validation loss 6.569188\n", "Epoch 4736, Training loss 2.428632, Validation loss 6.569198\n", "Epoch 4737, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4738, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4739, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4740, Training loss 2.428630, Validation loss 6.569198\n", "Epoch 4741, Training loss 2.428630, Validation loss 6.569188\n", "Epoch 4742, Training loss 2.428630, Validation loss 6.569193\n", "Epoch 4743, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4744, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4745, Training loss 2.428632, Validation loss 6.569188\n", "Epoch 4746, Training loss 2.428632, Validation loss 6.569198\n", "Epoch 4747, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4748, Training loss 2.428631, Validation loss 6.569193\n", "Epoch 4749, Training loss 2.428632, Validation loss 6.569199\n", "Epoch 4750, Training loss 2.428630, Validation loss 6.569198\n", "Epoch 4751, Training loss 2.428630, Validation loss 6.569188\n", "Epoch 4752, Training loss 2.428630, Validation loss 6.569193\n", "Epoch 4753, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4754, Training loss 2.428631, Validation loss 6.569208\n", "Epoch 4755, Training loss 2.428632, Validation loss 6.569203\n", "Epoch 4756, Training loss 2.428632, Validation loss 6.569193\n", "Epoch 4757, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4758, Training loss 2.428631, Validation loss 6.569208\n", "Epoch 4759, Training loss 2.428631, Validation loss 6.569199\n", "Epoch 4760, Training loss 2.428631, Validation loss 6.569208\n", "Epoch 4761, Training loss 2.428631, Validation loss 6.569213\n", "Epoch 4762, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4763, Training loss 2.428631, Validation loss 6.569199\n", "Epoch 4764, Training loss 2.428632, Validation loss 6.569208\n", "Epoch 4765, Training loss 2.428631, Validation loss 6.569213\n", "Epoch 4766, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4767, Training loss 2.428630, Validation loss 6.569213\n", "Epoch 4768, Training loss 2.428630, Validation loss 6.569203\n", "Epoch 4769, Training loss 2.428630, Validation loss 6.569199\n", "Epoch 4770, Training loss 2.428630, Validation loss 6.569203\n", "Epoch 4771, Training loss 2.428630, Validation loss 6.569194\n", "Epoch 4772, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4773, Training loss 2.428631, Validation loss 6.569209\n", "Epoch 4774, Training loss 2.428631, Validation loss 6.569208\n", "Epoch 4775, Training loss 2.428631, Validation loss 6.569199\n", "Epoch 4776, Training loss 2.428630, Validation loss 6.569203\n", "Epoch 4777, Training loss 2.428631, Validation loss 6.569213\n", "Epoch 4778, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4779, Training loss 2.428631, Validation loss 6.569209\n", "Epoch 4780, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4781, Training loss 2.428630, Validation loss 6.569213\n", "Epoch 4782, Training loss 2.428630, Validation loss 6.569203\n", "Epoch 4783, Training loss 2.428631, Validation loss 6.569209\n", "Epoch 4784, Training loss 2.428632, Validation loss 6.569219\n", "Epoch 4785, Training loss 2.428631, Validation loss 6.569213\n", "Epoch 4786, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4787, Training loss 2.428630, Validation loss 6.569209\n", "Epoch 4788, Training loss 2.428630, Validation loss 6.569219\n", "Epoch 4789, Training loss 2.428630, Validation loss 6.569209\n", "Epoch 4790, Training loss 2.428632, Validation loss 6.569214\n", "Epoch 4791, Training loss 2.428631, Validation loss 6.569213\n", "Epoch 4792, Training loss 2.428631, Validation loss 6.569203\n", "Epoch 4793, Training loss 2.428630, Validation loss 6.569209\n", "Epoch 4794, Training loss 2.428631, Validation loss 6.569214\n", "Epoch 4795, Training loss 2.428630, Validation loss 6.569223\n", "Epoch 4796, Training loss 2.428632, Validation loss 6.569219\n", "Epoch 4797, Training loss 2.428632, Validation loss 6.569209\n", "Epoch 4798, Training loss 2.428632, Validation loss 6.569219\n", "Epoch 4799, Training loss 2.428631, Validation loss 6.569223\n", "Epoch 4800, Training loss 2.428631, Validation loss 6.569214\n", "Epoch 4801, Training loss 2.428630, Validation loss 6.569219\n", "Epoch 4802, Training loss 2.428631, Validation loss 6.569219\n", "Epoch 4803, Training loss 2.428631, Validation loss 6.569209\n", "Epoch 4804, Training loss 2.428632, Validation loss 6.569214\n", "Epoch 4805, Training loss 2.428631, Validation loss 6.569223\n", "Epoch 4806, Training loss 2.428630, Validation loss 6.569229\n", "Epoch 4807, Training loss 2.428630, Validation loss 6.569219\n", "Epoch 4808, Training loss 2.428630, Validation loss 6.569214\n", "Epoch 4809, Training loss 2.428632, Validation loss 6.569223\n", "Epoch 4810, Training loss 2.428632, Validation loss 6.569214\n", "Epoch 4811, Training loss 2.428632, Validation loss 6.569219\n", "Epoch 4812, Training loss 2.428632, Validation loss 6.569229\n", "Epoch 4813, Training loss 2.428631, Validation loss 6.569223\n", "Epoch 4814, Training loss 2.428631, Validation loss 6.569214\n", "Epoch 4815, Training loss 2.428630, Validation loss 6.569223\n", "Epoch 4816, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4817, Training loss 2.428631, Validation loss 6.569219\n", "Epoch 4818, Training loss 2.428631, Validation loss 6.569224\n", "Epoch 4819, Training loss 2.428631, Validation loss 6.569223\n", "Epoch 4820, Training loss 2.428630, Validation loss 6.569229\n", "Epoch 4821, Training loss 2.428630, Validation loss 6.569219\n", "Epoch 4822, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4823, Training loss 2.428632, Validation loss 6.569234\n", "Epoch 4824, Training loss 2.428632, Validation loss 6.569224\n", "Epoch 4825, Training loss 2.428631, Validation loss 6.569219\n", "Epoch 4826, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4827, Training loss 2.428631, Validation loss 6.569219\n", "Epoch 4828, Training loss 2.428631, Validation loss 6.569224\n", "Epoch 4829, Training loss 2.428630, Validation loss 6.569234\n", "Epoch 4830, Training loss 2.428630, Validation loss 6.569224\n", "Epoch 4831, Training loss 2.428630, Validation loss 6.569229\n", "Epoch 4832, Training loss 2.428631, Validation loss 6.569224\n", "Epoch 4833, Training loss 2.428630, Validation loss 6.569234\n", "Epoch 4834, Training loss 2.428630, Validation loss 6.569224\n", "Epoch 4835, Training loss 2.428630, Validation loss 6.569229\n", "Epoch 4836, Training loss 2.428632, Validation loss 6.569239\n", "Epoch 4837, Training loss 2.428632, Validation loss 6.569229\n", "Epoch 4838, Training loss 2.428631, Validation loss 6.569224\n", "Epoch 4839, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4840, Training loss 2.428631, Validation loss 6.569220\n", "Epoch 4841, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4842, Training loss 2.428630, Validation loss 6.569234\n", "Epoch 4843, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4844, Training loss 2.428631, Validation loss 6.569224\n", "Epoch 4845, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4846, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4847, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4848, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4849, Training loss 2.428630, Validation loss 6.569229\n", "Epoch 4850, Training loss 2.428631, Validation loss 6.569239\n", "Epoch 4851, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4852, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4853, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4854, Training loss 2.428631, Validation loss 6.569239\n", "Epoch 4855, Training loss 2.428631, Validation loss 6.569229\n", "Epoch 4856, Training loss 2.428630, Validation loss 6.569234\n", "Epoch 4857, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4858, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4859, Training loss 2.428631, Validation loss 6.569240\n", "Epoch 4860, Training loss 2.428630, Validation loss 6.569239\n", "Epoch 4861, Training loss 2.428630, Validation loss 6.569229\n", "Epoch 4862, Training loss 2.428630, Validation loss 6.569234\n", "Epoch 4863, Training loss 2.428632, Validation loss 6.569240\n", "Epoch 4864, Training loss 2.428632, Validation loss 6.569249\n", "Epoch 4865, Training loss 2.428632, Validation loss 6.569240\n", "Epoch 4866, Training loss 2.428632, Validation loss 6.569234\n", "Epoch 4867, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4868, Training loss 2.428631, Validation loss 6.569234\n", "Epoch 4869, Training loss 2.428630, Validation loss 6.569240\n", "Epoch 4870, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4871, Training loss 2.428632, Validation loss 6.569244\n", "Epoch 4872, Training loss 2.428632, Validation loss 6.569234\n", "Epoch 4873, Training loss 2.428632, Validation loss 6.569240\n", "Epoch 4874, Training loss 2.428631, Validation loss 6.569249\n", "Epoch 4875, Training loss 2.428630, Validation loss 6.569254\n", "Epoch 4876, Training loss 2.428630, Validation loss 6.569244\n", "Epoch 4877, Training loss 2.428631, Validation loss 6.569240\n", "Epoch 4878, Training loss 2.428631, Validation loss 6.569249\n", "Epoch 4879, Training loss 2.428631, Validation loss 6.569240\n", "Epoch 4880, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4881, Training loss 2.428631, Validation loss 6.569254\n", "Epoch 4882, Training loss 2.428630, Validation loss 6.569249\n", "Epoch 4883, Training loss 2.428630, Validation loss 6.569240\n", "Epoch 4884, Training loss 2.428632, Validation loss 6.569249\n", "Epoch 4885, Training loss 2.428632, Validation loss 6.569240\n", "Epoch 4886, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4887, Training loss 2.428630, Validation loss 6.569250\n", "Epoch 4888, Training loss 2.428630, Validation loss 6.569249\n", "Epoch 4889, Training loss 2.428630, Validation loss 6.569240\n", "Epoch 4890, Training loss 2.428631, Validation loss 6.569244\n", "Epoch 4891, Training loss 2.428632, Validation loss 6.569254\n", "Epoch 4892, Training loss 2.428632, Validation loss 6.569244\n", "Epoch 4893, Training loss 2.428632, Validation loss 6.569250\n", "Epoch 4894, Training loss 2.428630, Validation loss 6.569244\n", "Epoch 4895, Training loss 2.428630, Validation loss 6.569254\n", "Epoch 4896, Training loss 2.428630, Validation loss 6.569244\n", "Epoch 4897, Training loss 2.428631, Validation loss 6.569250\n", "Epoch 4898, Training loss 2.428630, Validation loss 6.569260\n", "Epoch 4899, Training loss 2.428630, Validation loss 6.569250\n", "Epoch 4900, Training loss 2.428631, Validation loss 6.569255\n", "Epoch 4901, Training loss 2.428631, Validation loss 6.569250\n", "Epoch 4902, Training loss 2.428631, Validation loss 6.569260\n", "Epoch 4903, Training loss 2.428631, Validation loss 6.569250\n", "Epoch 4904, Training loss 2.428632, Validation loss 6.569255\n", "Epoch 4905, Training loss 2.428631, Validation loss 6.569264\n", "Epoch 4906, Training loss 2.428631, Validation loss 6.569255\n", "Epoch 4907, Training loss 2.428631, Validation loss 6.569250\n", "Epoch 4908, Training loss 2.428629, Validation loss 6.569255\n", "Epoch 4909, Training loss 2.428629, Validation loss 6.569245\n", "Epoch 4910, Training loss 2.428631, Validation loss 6.569255\n", "Epoch 4911, Training loss 2.428632, Validation loss 6.569260\n", "Epoch 4912, Training loss 2.428631, Validation loss 6.569260\n", "Epoch 4913, Training loss 2.428631, Validation loss 6.569250\n", "Epoch 4914, Training loss 2.428631, Validation loss 6.569255\n", "Epoch 4915, Training loss 2.428630, Validation loss 6.569260\n", "Epoch 4916, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4917, Training loss 2.428630, Validation loss 6.569260\n", "Epoch 4918, Training loss 2.428631, Validation loss 6.569255\n", "Epoch 4919, Training loss 2.428632, Validation loss 6.569264\n", "Epoch 4920, Training loss 2.428632, Validation loss 6.569255\n", "Epoch 4921, Training loss 2.428631, Validation loss 6.569260\n", "Epoch 4922, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4923, Training loss 2.428631, Validation loss 6.569264\n", "Epoch 4924, Training loss 2.428631, Validation loss 6.569255\n", "Epoch 4925, Training loss 2.428631, Validation loss 6.569260\n", "Epoch 4926, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4927, Training loss 2.428630, Validation loss 6.569260\n", "Epoch 4928, Training loss 2.428631, Validation loss 6.569265\n", "Epoch 4929, Training loss 2.428631, Validation loss 6.569264\n", "Epoch 4930, Training loss 2.428632, Validation loss 6.569270\n", "Epoch 4931, Training loss 2.428632, Validation loss 6.569260\n", "Epoch 4932, Training loss 2.428632, Validation loss 6.569265\n", "Epoch 4933, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4934, Training loss 2.428630, Validation loss 6.569265\n", "Epoch 4935, Training loss 2.428631, Validation loss 6.569260\n", "Epoch 4936, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4937, Training loss 2.428630, Validation loss 6.569260\n", "Epoch 4938, Training loss 2.428632, Validation loss 6.569265\n", "Epoch 4939, Training loss 2.428632, Validation loss 6.569270\n", "Epoch 4940, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4941, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4942, Training loss 2.428631, Validation loss 6.569265\n", "Epoch 4943, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4944, Training loss 2.428630, Validation loss 6.569265\n", "Epoch 4945, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4946, Training loss 2.428631, Validation loss 6.569265\n", "Epoch 4947, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4948, Training loss 2.428630, Validation loss 6.569265\n", "Epoch 4949, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4950, Training loss 2.428631, Validation loss 6.569280\n", "Epoch 4951, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4952, Training loss 2.428631, Validation loss 6.569265\n", "Epoch 4953, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4954, Training loss 2.428630, Validation loss 6.569280\n", "Epoch 4955, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4956, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4957, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4958, Training loss 2.428631, Validation loss 6.569265\n", "Epoch 4959, Training loss 2.428632, Validation loss 6.569270\n", "Epoch 4960, Training loss 2.428631, Validation loss 6.569280\n", "Epoch 4961, Training loss 2.428630, Validation loss 6.569285\n", "Epoch 4962, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4963, Training loss 2.428630, Validation loss 6.569270\n", "Epoch 4964, Training loss 2.428631, Validation loss 6.569280\n", "Epoch 4965, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4966, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4967, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4968, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4969, Training loss 2.428631, Validation loss 6.569270\n", "Epoch 4970, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4971, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4972, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4973, Training loss 2.428630, Validation loss 6.569280\n", "Epoch 4974, Training loss 2.428631, Validation loss 6.569290\n", "Epoch 4975, Training loss 2.428630, Validation loss 6.569285\n", "Epoch 4976, Training loss 2.428630, Validation loss 6.569275\n", "Epoch 4977, Training loss 2.428631, Validation loss 6.569280\n", "Epoch 4978, Training loss 2.428631, Validation loss 6.569290\n", "Epoch 4979, Training loss 2.428631, Validation loss 6.569280\n", "Epoch 4980, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4981, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4982, Training loss 2.428631, Validation loss 6.569275\n", "Epoch 4983, Training loss 2.428630, Validation loss 6.569280\n", "Epoch 4984, Training loss 2.428632, Validation loss 6.569285\n", "Epoch 4985, Training loss 2.428631, Validation loss 6.569295\n", "Epoch 4986, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4987, Training loss 2.428631, Validation loss 6.569280\n", "Epoch 4988, Training loss 2.428631, Validation loss 6.569290\n", "Epoch 4989, Training loss 2.428630, Validation loss 6.569295\n", "Epoch 4990, Training loss 2.428630, Validation loss 6.569285\n", "Epoch 4991, Training loss 2.428631, Validation loss 6.569291\n", "Epoch 4992, Training loss 2.428632, Validation loss 6.569290\n", "Epoch 4993, Training loss 2.428632, Validation loss 6.569280\n", "Epoch 4994, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4995, Training loss 2.428631, Validation loss 6.569295\n", "Epoch 4996, Training loss 2.428631, Validation loss 6.569285\n", "Epoch 4997, Training loss 2.428631, Validation loss 6.569291\n", "Epoch 4998, Training loss 2.428632, Validation loss 6.569290\n", "Epoch 4999, Training loss 2.428631, Validation loss 6.569295\n" ] }, { "data": { "text/plain": [ "(Parameter containing:\n", " tensor([[5.4878]], requires_grad=True), Parameter containing:\n", " tensor([-17.4613], requires_grad=True))" ] }, "execution_count": 128, "metadata": {}, "output_type": "execute_result" } ], "source": [ "import torch\n", "import torch.nn as nn\n", "\n", "model = nn.Linear(1, 1)\n", "\n", "learning_rate = 1e-2\n", "\n", "optimizer = optim.SGD(model.parameters(), lr=learning_rate)\n", "\n", "loss_fn = nn.MSELoss()\n", "\n", "nepochs = 5000\n", "\n", "for epoch in range(nepochs):\n", " # forward pass\n", " t_p_train = model(t_un_train)\n", " loss_train = loss_fn(t_p_train, t_c_train)\n", "\n", " with torch.no_grad():\n", " t_p_val = model(t_un_val)\n", " loss_val = loss_fn(t_p_val, t_c_val)\n", "\n", " print('Epoch %d, Training loss %f, Validation loss %f' % (epoch, float(loss_train), float(loss_val)))\n", " \n", " # backward pass\n", " optimizer.zero_grad()\n", " loss_train.backward() \n", " optimizer.step()\n", " \n", "model.weight, model.bias" ] }, { "cell_type": "code", "execution_count": 131, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "Sequential(\n", " (0): Linear(in_features=1, out_features=10, bias=True)\n", " (1): Tanh()\n", " (2): Linear(in_features=10, out_features=1, bias=True)\n", ")" ] }, "execution_count": 131, "metadata": {}, "output_type": "execute_result" } ], "source": [ "model = nn.Sequential(\n", " nn.Linear(1, 10),\n", " nn.Tanh(),\n", " nn.Linear(10, 1))\n", "model" ] }, { "cell_type": "code", "execution_count": 132, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[torch.Size([10, 1]), torch.Size([10]), torch.Size([1, 10]), torch.Size([1])]" ] }, "execution_count": 132, "metadata": {}, "output_type": "execute_result" } ], "source": [ "[param.shape for param in model.parameters()]" ] }, { "cell_type": "code", "execution_count": 133, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "0.weight torch.Size([10, 1])\n", "0.bias torch.Size([10])\n", "2.weight torch.Size([1, 10])\n", "2.bias torch.Size([1])\n" ] } ], "source": [ "for name, param in model.named_parameters():\n", " print(name, param.shape)" ] }, { "cell_type": "code", "execution_count": 134, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "Sequential(\n", " (hidden_linear): Linear(in_features=1, out_features=10, bias=True)\n", " (hidden_activation): Tanh()\n", " (output_linear): Linear(in_features=10, out_features=1, bias=True)\n", ")" ] }, "execution_count": 134, "metadata": {}, "output_type": "execute_result" } ], "source": [ "from collections import OrderedDict\n", "\n", "model = nn.Sequential(OrderedDict([\n", " ('hidden_linear', nn.Linear(1, 10)),\n", " ('hidden_activation', nn.Tanh()),\n", " ('output_linear', nn.Linear(10, 1))\n", "]))\n", "\n", "model" ] }, { "cell_type": "code", "execution_count": 135, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "hidden_linear.weight torch.Size([10, 1])\n", "hidden_linear.bias torch.Size([10])\n", "output_linear.weight torch.Size([1, 10])\n", "output_linear.bias torch.Size([1])\n" ] } ], "source": [ "for name, param in model.named_parameters():\n", " print(name, param.shape)" ] }, { "cell_type": "code", "execution_count": 136, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "Parameter containing:\n", "tensor([[-0.0950],\n", " [ 0.5718],\n", " [-0.1405],\n", " [-0.3802],\n", " [ 0.8591],\n", " [-0.8780],\n", " [-0.1615],\n", " [-0.3330],\n", " [ 0.9726],\n", " [-0.6343]], requires_grad=True)" ] }, "execution_count": 136, "metadata": {}, "output_type": "execute_result" } ], "source": [ "model.hidden_linear.weight" ] }, { "cell_type": "code", "execution_count": 137, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 0, Training loss 188.202881, Validation loss 136.222977\n", "Epoch 1, Training loss 150.089111, Validation loss 91.237564\n", "Epoch 2, Training loss 125.055519, Validation loss 62.170696\n", "Epoch 3, Training loss 107.771858, Validation loss 42.034805\n", "Epoch 4, Training loss 95.981758, Validation loss 28.695263\n", "Epoch 5, Training loss 87.523048, Validation loss 20.033142\n", "Epoch 6, Training loss 80.909752, Validation loss 14.547966\n", "Epoch 7, Training loss 75.393440, Validation loss 11.149405\n", "Epoch 8, Training loss 70.702812, Validation loss 8.716105\n", "Epoch 9, Training loss 66.611610, Validation loss 6.617854\n", "Epoch 10, Training loss 62.936260, Validation loss 5.115901\n", "Epoch 11, Training loss 59.520077, Validation loss 3.834637\n", "Epoch 12, Training loss 56.252563, Validation loss 3.043854\n", "Epoch 13, Training loss 53.085567, Validation loss 2.144593\n", "Epoch 14, Training loss 50.177406, Validation loss 2.155064\n", "Epoch 15, Training loss 47.875084, Validation loss 0.764214\n", "Epoch 16, Training loss 48.680031, Validation loss 5.711440\n", "Epoch 17, Training loss 70.819298, Validation loss 6.539827\n", "Epoch 18, Training loss 45.117634, Validation loss 1.705493\n", "Epoch 19, Training loss 43.745403, Validation loss 1.002231\n", "Epoch 20, Training loss 62.779263, Validation loss 22.550776\n", "Epoch 21, Training loss 109.944939, Validation loss 15.391956\n", "Epoch 22, Training loss 87.499474, Validation loss 5.273516\n", "Epoch 23, Training loss 86.618271, Validation loss 5.433077\n", "Epoch 24, Training loss 85.102760, Validation loss 0.968221\n", "Epoch 25, Training loss 76.580666, Validation loss 0.880462\n", "Epoch 26, Training loss 72.048180, Validation loss 0.838404\n", "Epoch 27, Training loss 69.421997, Validation loss 0.805673\n", "Epoch 28, Training loss 62.210373, Validation loss 0.736177\n", "Epoch 29, Training loss 59.585712, Validation loss 4.289258\n", "Epoch 30, Training loss 72.343330, Validation loss 3.939681\n", "Epoch 31, Training loss 69.813751, Validation loss 3.661477\n", "Epoch 32, Training loss 55.094055, Validation loss 0.636955\n", "Epoch 33, Training loss 52.274673, Validation loss 1.619797\n", "Epoch 34, Training loss 79.979195, Validation loss 30.219894\n", "Epoch 35, Training loss 108.668747, Validation loss 14.255305\n", "Epoch 36, Training loss 90.254997, Validation loss 7.105915\n", "Epoch 37, Training loss 81.673286, Validation loss 1.263896\n", "Epoch 38, Training loss 69.352409, Validation loss 1.787565\n", "Epoch 39, Training loss 67.121910, Validation loss 4.780434\n", "Epoch 40, Training loss 74.308563, Validation loss 2.865013\n", "Epoch 41, Training loss 73.233864, Validation loss 2.153057\n", "Epoch 42, Training loss 54.405811, Validation loss 0.759353\n", "Epoch 43, Training loss 55.133430, Validation loss 6.451164\n", "Epoch 44, Training loss 77.712212, Validation loss 6.876225\n", "Epoch 45, Training loss 53.713181, Validation loss 0.691898\n", "Epoch 46, Training loss 43.604458, Validation loss 0.508772\n", "Epoch 47, Training loss 41.269554, Validation loss 1.270763\n", "Epoch 48, Training loss 48.032284, Validation loss 5.719485\n", "Epoch 49, Training loss 75.484917, Validation loss 36.249390\n", "Epoch 50, Training loss 109.997620, Validation loss 16.181046\n", "Epoch 51, Training loss 90.736908, Validation loss 8.593626\n", "Epoch 52, Training loss 66.301231, Validation loss 1.291996\n", "Epoch 53, Training loss 66.447350, Validation loss 3.765539\n", "Epoch 54, Training loss 70.372856, Validation loss 2.293092\n", "Epoch 55, Training loss 62.951084, Validation loss 1.423502\n", "Epoch 56, Training loss 63.198341, Validation loss 1.914119\n", "Epoch 57, Training loss 56.967697, Validation loss 3.535026\n", "Epoch 58, Training loss 60.448292, Validation loss 4.247518\n", "Epoch 59, Training loss 83.741844, Validation loss 18.616880\n", "Epoch 60, Training loss 60.130928, Validation loss 3.645747\n", "Epoch 61, Training loss 54.595108, Validation loss 1.627113\n", "Epoch 62, Training loss 45.136234, Validation loss 1.051045\n", "Epoch 63, Training loss 42.040958, Validation loss 1.308910\n", "Epoch 64, Training loss 48.324543, Validation loss 9.004208\n", "Epoch 65, Training loss 86.159454, Validation loss 38.434875\n", "Epoch 66, Training loss 66.450066, Validation loss 10.269223\n", "Epoch 67, Training loss 61.564934, Validation loss 5.869538\n", "Epoch 68, Training loss 62.936230, Validation loss 1.013806\n", "Epoch 69, Training loss 51.716278, Validation loss 2.381294\n", "Epoch 70, Training loss 75.816269, Validation loss 19.148706\n", "Epoch 71, Training loss 60.622791, Validation loss 5.168776\n", "Epoch 72, Training loss 57.950203, Validation loss 1.010424\n", "Epoch 73, Training loss 52.307419, Validation loss 9.175933\n", "Epoch 74, Training loss 67.611069, Validation loss 3.528175\n", "Epoch 75, Training loss 45.746189, Validation loss 0.829644\n", "Epoch 76, Training loss 46.992420, Validation loss 3.070091\n", "Epoch 77, Training loss 39.243313, Validation loss 1.339633\n", "Epoch 78, Training loss 26.990831, Validation loss 0.155706\n", "Epoch 79, Training loss 44.985210, Validation loss 22.084705\n", "Epoch 80, Training loss 140.068970, Validation loss 98.133278\n", "Epoch 81, Training loss 118.656036, Validation loss 35.475739\n", "Epoch 82, Training loss 66.085724, Validation loss 4.641664\n", "Epoch 83, Training loss 62.563957, Validation loss 0.867647\n", "Epoch 84, Training loss 59.866032, Validation loss 0.895948\n", "Epoch 85, Training loss 65.109222, Validation loss 1.823038\n", "Epoch 86, Training loss 51.824162, Validation loss 13.114482\n", "Epoch 87, Training loss 83.479126, Validation loss 33.456787\n", "Epoch 88, Training loss 73.680527, Validation loss 12.505135\n", "Epoch 89, Training loss 70.435104, Validation loss 1.182821\n", "Epoch 90, Training loss 71.140129, Validation loss 6.281937\n", "Epoch 91, Training loss 63.649334, Validation loss 3.879307\n", "Epoch 92, Training loss 53.341606, Validation loss 11.221026\n", "Epoch 93, Training loss 66.935478, Validation loss 14.394190\n", "Epoch 94, Training loss 60.886833, Validation loss 0.892655\n", "Epoch 95, Training loss 44.398464, Validation loss 5.035343\n", "Epoch 96, Training loss 55.381866, Validation loss 8.758650\n", "Epoch 97, Training loss 72.161636, Validation loss 20.945646\n", "Epoch 98, Training loss 62.780399, Validation loss 2.774074\n", "Epoch 99, Training loss 57.693176, Validation loss 2.228899\n", "Epoch 100, Training loss 49.205479, Validation loss 1.090995\n", "Epoch 101, Training loss 25.733154, Validation loss 0.605259\n", "Epoch 102, Training loss 28.567255, Validation loss 11.722998\n", "Epoch 103, Training loss 100.000717, Validation loss 57.765408\n", "Epoch 104, Training loss 111.190788, Validation loss 37.687496\n", "Epoch 105, Training loss 59.561481, Validation loss 19.396229\n", "Epoch 106, Training loss 57.900440, Validation loss 14.422561\n", "Epoch 107, Training loss 76.874557, Validation loss 2.293505\n", "Epoch 108, Training loss 62.418526, Validation loss 1.554102\n", "Epoch 109, Training loss 47.591389, Validation loss 3.106505\n", "Epoch 110, Training loss 68.639534, Validation loss 22.656759\n", "Epoch 111, Training loss 49.282101, Validation loss 0.836678\n", "Epoch 112, Training loss 47.802265, Validation loss 7.640678\n", "Epoch 113, Training loss 61.553417, Validation loss 2.550550\n", "Epoch 114, Training loss 42.272652, Validation loss 1.500478\n", "Epoch 115, Training loss 30.040598, Validation loss 2.379789\n", "Epoch 116, Training loss 67.994682, Validation loss 33.760410\n", "Epoch 117, Training loss 48.602020, Validation loss 2.709499\n", "Epoch 118, Training loss 39.010429, Validation loss 0.689597\n", "Epoch 119, Training loss 25.319740, Validation loss 6.079014\n", "Epoch 120, Training loss 29.407255, Validation loss 1.655716\n", "Epoch 121, Training loss 57.679092, Validation loss 37.563011\n", "Epoch 122, Training loss 72.639481, Validation loss 14.569146\n", "Epoch 123, Training loss 41.045956, Validation loss 0.648168\n", "Epoch 124, Training loss 36.422245, Validation loss 1.106288\n", "Epoch 125, Training loss 34.035793, Validation loss 4.153983\n", "Epoch 126, Training loss 16.762203, Validation loss 7.504107\n", "Epoch 127, Training loss 13.693490, Validation loss 6.818150\n", "Epoch 128, Training loss 11.972682, Validation loss 13.425880\n", "Epoch 129, Training loss 27.356056, Validation loss 7.566709\n", "Epoch 130, Training loss 121.439262, Validation loss 72.712173\n", "Epoch 131, Training loss 37.457455, Validation loss 19.578720\n", "Epoch 132, Training loss 51.632492, Validation loss 6.984236\n", "Epoch 133, Training loss 40.228813, Validation loss 4.104677\n", "Epoch 134, Training loss 15.372277, Validation loss 0.218190\n", "Epoch 135, Training loss 44.690418, Validation loss 51.018627\n", "Epoch 136, Training loss 82.666664, Validation loss 54.855400\n", "Epoch 137, Training loss 97.822487, Validation loss 9.706301\n", "Epoch 138, Training loss 82.150467, Validation loss 5.589798\n", "Epoch 139, Training loss 52.402649, Validation loss 8.421597\n", "Epoch 140, Training loss 39.936687, Validation loss 5.207246\n", "Epoch 141, Training loss 50.399399, Validation loss 1.617449\n", "Epoch 142, Training loss 34.754501, Validation loss 14.318795\n", "Epoch 143, Training loss 51.936378, Validation loss 14.048177\n", "Epoch 144, Training loss 104.084183, Validation loss 46.545895\n", "Epoch 145, Training loss 47.858685, Validation loss 23.048172\n", "Epoch 146, Training loss 51.843533, Validation loss 1.983348\n", "Epoch 147, Training loss 46.441929, Validation loss 2.795144\n", "Epoch 148, Training loss 42.758205, Validation loss 0.755481\n", "Epoch 149, Training loss 39.034386, Validation loss 1.775669\n", "Epoch 150, Training loss 39.810471, Validation loss 2.471927\n", "Epoch 151, Training loss 34.184357, Validation loss 1.060184\n", "Epoch 152, Training loss 38.021915, Validation loss 5.949709\n", "Epoch 153, Training loss 25.034809, Validation loss 1.284300\n", "Epoch 154, Training loss 71.483078, Validation loss 68.057541\n", "Epoch 155, Training loss 40.979176, Validation loss 9.935998\n", "Epoch 156, Training loss 76.494057, Validation loss 54.759197\n", "Epoch 157, Training loss 46.107635, Validation loss 4.339132\n", "Epoch 158, Training loss 37.477112, Validation loss 1.985982\n", "Epoch 159, Training loss 39.105511, Validation loss 6.102532\n", "Epoch 160, Training loss 34.030655, Validation loss 2.856300\n", "Epoch 161, Training loss 43.981998, Validation loss 10.805421\n", "Epoch 162, Training loss 29.222395, Validation loss 0.467854\n", "Epoch 163, Training loss 38.589378, Validation loss 13.657401\n", "Epoch 164, Training loss 25.249800, Validation loss 1.932532\n", "Epoch 165, Training loss 24.337599, Validation loss 10.132791\n", "Epoch 166, Training loss 31.664259, Validation loss 4.977683\n", "Epoch 167, Training loss 67.619545, Validation loss 26.857227\n", "Epoch 168, Training loss 23.754499, Validation loss 3.971027\n", "Epoch 169, Training loss 22.655516, Validation loss 0.707431\n", "Epoch 170, Training loss 30.962572, Validation loss 21.926662\n", "Epoch 171, Training loss 42.295425, Validation loss 12.824355\n", "Epoch 172, Training loss 91.616867, Validation loss 34.835369\n", "Epoch 173, Training loss 40.679382, Validation loss 5.268408\n", "Epoch 174, Training loss 31.344027, Validation loss 0.637842\n", "Epoch 175, Training loss 32.020359, Validation loss 13.407646\n", "Epoch 176, Training loss 52.059875, Validation loss 9.690411\n", "Epoch 177, Training loss 48.745232, Validation loss 26.712757\n", "Epoch 178, Training loss 48.534966, Validation loss 2.163431\n", "Epoch 179, Training loss 19.877708, Validation loss 1.041355\n", "Epoch 180, Training loss 21.132433, Validation loss 15.709188\n", "Epoch 181, Training loss 42.937286, Validation loss 4.825353\n", "Epoch 182, Training loss 53.461582, Validation loss 42.147942\n", "Epoch 183, Training loss 22.093172, Validation loss 0.398994\n", "Epoch 184, Training loss 53.332195, Validation loss 41.447304\n", "Epoch 185, Training loss 18.916389, Validation loss 0.214236\n", "Epoch 186, Training loss 42.256966, Validation loss 38.892761\n", "Epoch 187, Training loss 20.877779, Validation loss 0.107939\n", "Epoch 188, Training loss 54.534122, Validation loss 41.747375\n", "Epoch 189, Training loss 17.882389, Validation loss 1.458125\n", "Epoch 190, Training loss 32.477169, Validation loss 35.439907\n", "Epoch 191, Training loss 35.287922, Validation loss 12.893023\n", "Epoch 192, Training loss 97.834808, Validation loss 46.786503\n", "Epoch 193, Training loss 49.841408, Validation loss 28.322666\n", "Epoch 194, Training loss 39.605408, Validation loss 0.831564\n", "Epoch 195, Training loss 35.953609, Validation loss 1.215887\n", "Epoch 196, Training loss 26.564907, Validation loss 0.168763\n", "Epoch 197, Training loss 39.062096, Validation loss 38.804512\n", "Epoch 198, Training loss 37.067982, Validation loss 3.312617\n", "Epoch 199, Training loss 56.114605, Validation loss 45.140800\n", "Epoch 200, Training loss 16.560074, Validation loss 1.862582\n", "Epoch 201, Training loss 30.512438, Validation loss 36.655014\n", "Epoch 202, Training loss 42.767860, Validation loss 25.523722\n", "Epoch 203, Training loss 105.190063, Validation loss 50.266003\n", "Epoch 204, Training loss 55.226822, Validation loss 25.896603\n", "Epoch 205, Training loss 44.051586, Validation loss 0.855164\n", "Epoch 206, Training loss 38.701439, Validation loss 0.981959\n", "Epoch 207, Training loss 37.248146, Validation loss 2.565227\n", "Epoch 208, Training loss 31.758528, Validation loss 0.221515\n", "Epoch 209, Training loss 31.157368, Validation loss 4.594879\n", "Epoch 210, Training loss 32.976360, Validation loss 3.429501\n", "Epoch 211, Training loss 52.636204, Validation loss 12.871094\n", "Epoch 212, Training loss 26.567341, Validation loss 3.869768\n", "Epoch 213, Training loss 26.956455, Validation loss 0.482947\n", "Epoch 214, Training loss 32.669899, Validation loss 14.785099\n", "Epoch 215, Training loss 35.504002, Validation loss 4.678892\n", "Epoch 216, Training loss 43.344357, Validation loss 21.257896\n", "Epoch 217, Training loss 36.975319, Validation loss 6.096017\n", "Epoch 218, Training loss 55.468185, Validation loss 24.002811\n", "Epoch 219, Training loss 55.376633, Validation loss 4.462302\n", "Epoch 220, Training loss 30.634056, Validation loss 19.813473\n", "Epoch 221, Training loss 46.870419, Validation loss 6.600374\n", "Epoch 222, Training loss 54.354000, Validation loss 31.929436\n", "Epoch 223, Training loss 18.294641, Validation loss 4.994111\n", "Epoch 224, Training loss 16.564240, Validation loss 10.644855\n", "Epoch 225, Training loss 20.402826, Validation loss 0.892590\n", "Epoch 226, Training loss 49.642616, Validation loss 45.827824\n", "Epoch 227, Training loss 10.153070, Validation loss 9.513924\n", "Epoch 228, Training loss 16.540611, Validation loss 53.753967\n", "Epoch 229, Training loss 142.842117, Validation loss 133.952667\n", "Epoch 230, Training loss 140.536774, Validation loss 105.325859\n", "Epoch 231, Training loss 51.767754, Validation loss 2.548903\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 232, Training loss 75.779999, Validation loss 66.181656\n", "Epoch 233, Training loss 49.276592, Validation loss 3.114651\n", "Epoch 234, Training loss 57.732765, Validation loss 5.054900\n", "Epoch 235, Training loss 42.667252, Validation loss 0.304515\n", "Epoch 236, Training loss 42.240395, Validation loss 3.882009\n", "Epoch 237, Training loss 50.448311, Validation loss 12.433903\n", "Epoch 238, Training loss 69.159485, Validation loss 8.507136\n", "Epoch 239, Training loss 52.538158, Validation loss 3.046782\n", "Epoch 240, Training loss 35.436962, Validation loss 2.210200\n", "Epoch 241, Training loss 39.319572, Validation loss 5.540761\n", "Epoch 242, Training loss 56.817757, Validation loss 10.055670\n", "Epoch 243, Training loss 31.915648, Validation loss 2.608111\n", "Epoch 244, Training loss 33.952797, Validation loss 2.927378\n", "Epoch 245, Training loss 45.910721, Validation loss 12.795069\n", "Epoch 246, Training loss 25.914827, Validation loss 0.867138\n", "Epoch 247, Training loss 25.890924, Validation loss 10.627375\n", "Epoch 248, Training loss 49.526424, Validation loss 27.620127\n", "Epoch 249, Training loss 88.138260, Validation loss 26.037930\n", "Epoch 250, Training loss 66.045830, Validation loss 15.144862\n", "Epoch 251, Training loss 56.069714, Validation loss 3.872609\n", "Epoch 252, Training loss 49.248440, Validation loss 16.001856\n", "Epoch 253, Training loss 32.939468, Validation loss 0.189991\n", "Epoch 254, Training loss 42.697109, Validation loss 15.051481\n", "Epoch 255, Training loss 27.330790, Validation loss 0.296701\n", "Epoch 256, Training loss 36.071030, Validation loss 16.448675\n", "Epoch 257, Training loss 20.720432, Validation loss 6.269170\n", "Epoch 258, Training loss 21.576187, Validation loss 0.138368\n", "Epoch 259, Training loss 37.128868, Validation loss 23.658079\n", "Epoch 260, Training loss 17.745440, Validation loss 6.157141\n", "Epoch 261, Training loss 16.423040, Validation loss 4.894213\n", "Epoch 262, Training loss 17.194422, Validation loss 16.405067\n", "Epoch 263, Training loss 43.417061, Validation loss 33.598572\n", "Epoch 264, Training loss 95.684822, Validation loss 41.874802\n", "Epoch 265, Training loss 62.213341, Validation loss 24.533224\n", "Epoch 266, Training loss 45.660385, Validation loss 0.982519\n", "Epoch 267, Training loss 41.417206, Validation loss 1.244154\n", "Epoch 268, Training loss 35.080063, Validation loss 0.834175\n", "Epoch 269, Training loss 32.161068, Validation loss 0.124771\n", "Epoch 270, Training loss 34.732376, Validation loss 6.241729\n", "Epoch 271, Training loss 28.876099, Validation loss 0.221656\n", "Epoch 272, Training loss 36.169716, Validation loss 11.507915\n", "Epoch 273, Training loss 25.954403, Validation loss 8.108714\n", "Epoch 274, Training loss 34.910492, Validation loss 8.980914\n", "Epoch 275, Training loss 58.543877, Validation loss 21.839834\n", "Epoch 276, Training loss 22.528044, Validation loss 4.045698\n", "Epoch 277, Training loss 20.933481, Validation loss 1.101960\n", "Epoch 278, Training loss 21.463192, Validation loss 22.174334\n", "Epoch 279, Training loss 34.351654, Validation loss 14.951490\n", "Epoch 280, Training loss 122.159889, Validation loss 81.444267\n", "Epoch 281, Training loss 38.658566, Validation loss 43.586983\n", "Epoch 282, Training loss 48.206474, Validation loss 2.703720\n", "Epoch 283, Training loss 39.871964, Validation loss 0.795329\n", "Epoch 284, Training loss 38.422169, Validation loss 1.722051\n", "Epoch 285, Training loss 43.432766, Validation loss 3.595771\n", "Epoch 286, Training loss 32.739765, Validation loss 2.548962\n", "Epoch 287, Training loss 36.592903, Validation loss 6.449761\n", "Epoch 288, Training loss 49.917240, Validation loss 9.332778\n", "Epoch 289, Training loss 35.475891, Validation loss 2.484617\n", "Epoch 290, Training loss 45.239147, Validation loss 12.740651\n", "Epoch 291, Training loss 25.851179, Validation loss 2.198431\n", "Epoch 292, Training loss 20.379223, Validation loss 3.290876\n", "Epoch 293, Training loss 8.714318, Validation loss 37.830826\n", "Epoch 294, Training loss 75.022636, Validation loss 58.051228\n", "Epoch 295, Training loss 159.541946, Validation loss 120.602791\n", "Epoch 296, Training loss 47.797390, Validation loss 72.927505\n", "Epoch 297, Training loss 107.440742, Validation loss 83.612381\n", "Epoch 298, Training loss 73.991348, Validation loss 4.536131\n", "Epoch 299, Training loss 63.351067, Validation loss 3.381332\n", "Epoch 300, Training loss 56.858784, Validation loss 63.311806\n", "Epoch 301, Training loss 46.030972, Validation loss 1.515953\n", "Epoch 302, Training loss 45.351604, Validation loss 1.027595\n", "Epoch 303, Training loss 43.572350, Validation loss 1.980765\n", "Epoch 304, Training loss 47.501587, Validation loss 2.806103\n", "Epoch 305, Training loss 39.008751, Validation loss 0.459046\n", "Epoch 306, Training loss 37.556850, Validation loss 0.540917\n", "Epoch 307, Training loss 36.230633, Validation loss 0.922534\n", "Epoch 308, Training loss 35.035900, Validation loss 0.470299\n", "Epoch 309, Training loss 34.598839, Validation loss 3.687891\n", "Epoch 310, Training loss 40.643818, Validation loss 7.182279\n", "Epoch 311, Training loss 57.168510, Validation loss 11.634871\n", "Epoch 312, Training loss 28.984745, Validation loss 5.933853\n", "Epoch 313, Training loss 37.044231, Validation loss 9.646644\n", "Epoch 314, Training loss 46.073238, Validation loss 15.849907\n", "Epoch 315, Training loss 24.174543, Validation loss 10.009675\n", "Epoch 316, Training loss 23.997524, Validation loss 9.400969\n", "Epoch 317, Training loss 58.246986, Validation loss 68.210258\n", "Epoch 318, Training loss 17.521654, Validation loss 38.478924\n", "Epoch 319, Training loss 73.554535, Validation loss 73.137482\n", "Epoch 320, Training loss 126.250175, Validation loss 70.848183\n", "Epoch 321, Training loss 74.592575, Validation loss 46.058987\n", "Epoch 322, Training loss 34.245995, Validation loss 1.002867\n", "Epoch 323, Training loss 39.051819, Validation loss 26.076437\n", "Epoch 324, Training loss 66.236496, Validation loss 6.409334\n", "Epoch 325, Training loss 30.788387, Validation loss 0.843072\n", "Epoch 326, Training loss 44.236454, Validation loss 34.023029\n", "Epoch 327, Training loss 21.919897, Validation loss 0.714438\n", "Epoch 328, Training loss 24.444828, Validation loss 25.117496\n", "Epoch 329, Training loss 53.161980, Validation loss 13.115885\n", "Epoch 330, Training loss 48.422729, Validation loss 42.416992\n", "Epoch 331, Training loss 26.559732, Validation loss 30.033060\n", "Epoch 332, Training loss 33.718811, Validation loss 7.343309\n", "Epoch 333, Training loss 48.165497, Validation loss 44.842503\n", "Epoch 334, Training loss 28.461878, Validation loss 33.712616\n", "Epoch 335, Training loss 31.664104, Validation loss 8.464482\n", "Epoch 336, Training loss 47.180805, Validation loss 45.871552\n", "Epoch 337, Training loss 29.748632, Validation loss 35.001251\n", "Epoch 338, Training loss 25.714991, Validation loss 1.176640\n", "Epoch 339, Training loss 50.154232, Validation loss 45.441277\n", "Epoch 340, Training loss 25.212942, Validation loss 31.315773\n", "Epoch 341, Training loss 35.060459, Validation loss 7.699202\n", "Epoch 342, Training loss 46.899517, Validation loss 46.499641\n", "Epoch 343, Training loss 25.705200, Validation loss 33.997471\n", "Epoch 344, Training loss 42.867279, Validation loss 10.228229\n", "Epoch 345, Training loss 49.139164, Validation loss 47.390430\n", "Epoch 346, Training loss 15.042575, Validation loss 13.406125\n", "Epoch 347, Training loss 19.012171, Validation loss 1.248621\n", "Epoch 348, Training loss 36.998222, Validation loss 45.769810\n", "Epoch 349, Training loss 15.895938, Validation loss 0.151599\n", "Epoch 350, Training loss 28.078112, Validation loss 41.545265\n", "Epoch 351, Training loss 41.985851, Validation loss 15.581511\n", "Epoch 352, Training loss 54.793816, Validation loss 55.458031\n", "Epoch 353, Training loss 22.236334, Validation loss 35.988800\n", "Epoch 354, Training loss 48.577389, Validation loss 5.321933\n", "Epoch 355, Training loss 34.951618, Validation loss 50.052826\n", "Epoch 356, Training loss 17.216650, Validation loss 1.877899\n", "Epoch 357, Training loss 40.621922, Validation loss 53.147678\n", "Epoch 358, Training loss 16.103558, Validation loss 16.366447\n", "Epoch 359, Training loss 21.380312, Validation loss 1.559353\n", "Epoch 360, Training loss 49.989239, Validation loss 58.474575\n", "Epoch 361, Training loss 22.045916, Validation loss 32.919735\n", "Epoch 362, Training loss 33.211121, Validation loss 10.388155\n", "Epoch 363, Training loss 43.255615, Validation loss 56.044167\n", "Epoch 364, Training loss 12.026979, Validation loss 15.291058\n", "Epoch 365, Training loss 14.568661, Validation loss 0.181796\n", "Epoch 366, Training loss 29.986315, Validation loss 52.103443\n", "Epoch 367, Training loss 39.965855, Validation loss 17.694658\n", "Epoch 368, Training loss 54.884426, Validation loss 64.524628\n", "Epoch 369, Training loss 24.000937, Validation loss 43.438965\n", "Epoch 370, Training loss 39.206882, Validation loss 1.494663\n", "Epoch 371, Training loss 21.012455, Validation loss 42.081871\n", "Epoch 372, Training loss 35.005348, Validation loss 0.540354\n", "Epoch 373, Training loss 9.115666, Validation loss 13.691843\n", "Epoch 374, Training loss 10.472707, Validation loss 1.941965\n", "Epoch 375, Training loss 25.781254, Validation loss 61.697708\n", "Epoch 376, Training loss 41.811428, Validation loss 5.473098\n", "Epoch 377, Training loss 39.424896, Validation loss 5.468519\n", "Epoch 378, Training loss 23.145550, Validation loss 56.538605\n", "Epoch 379, Training loss 31.179502, Validation loss 0.361096\n", "Epoch 380, Training loss 26.440594, Validation loss 3.821286\n", "Epoch 381, Training loss 9.874971, Validation loss 0.412758\n", "Epoch 382, Training loss 29.112020, Validation loss 76.077827\n", "Epoch 383, Training loss 29.454191, Validation loss 0.022819\n", "Epoch 384, Training loss 34.478737, Validation loss 10.811291\n", "Epoch 385, Training loss 25.032215, Validation loss 7.166121\n", "Epoch 386, Training loss 28.590616, Validation loss 1.819534\n", "Epoch 387, Training loss 40.271229, Validation loss 19.264463\n", "Epoch 388, Training loss 28.509720, Validation loss 14.712540\n", "Epoch 389, Training loss 17.340776, Validation loss 1.188064\n", "Epoch 390, Training loss 25.543444, Validation loss 88.886940\n", "Epoch 391, Training loss 69.458809, Validation loss 63.936295\n", "Epoch 392, Training loss 74.463448, Validation loss 27.431007\n", "Epoch 393, Training loss 29.067432, Validation loss 12.048976\n", "Epoch 394, Training loss 52.094978, Validation loss 4.281846\n", "Epoch 395, Training loss 16.452782, Validation loss 16.148310\n", "Epoch 396, Training loss 17.629173, Validation loss 5.933511\n", "Epoch 397, Training loss 12.617379, Validation loss 7.837291\n", "Epoch 398, Training loss 15.389823, Validation loss 78.134842\n", "Epoch 399, Training loss 112.993164, Validation loss 98.364777\n", "Epoch 400, Training loss 41.142368, Validation loss 23.878611\n", "Epoch 401, Training loss 38.405659, Validation loss 0.828503\n", "Epoch 402, Training loss 34.547344, Validation loss 51.214653\n", "Epoch 403, Training loss 39.254570, Validation loss 2.491954\n", "Epoch 404, Training loss 23.677151, Validation loss 27.314154\n", "Epoch 405, Training loss 15.852333, Validation loss 11.616129\n", "Epoch 406, Training loss 8.793417, Validation loss 0.207769\n", "Epoch 407, Training loss 45.027637, Validation loss 114.482285\n", "Epoch 408, Training loss 79.377785, Validation loss 34.663124\n", "Epoch 409, Training loss 58.254799, Validation loss 1.278180\n", "Epoch 410, Training loss 48.029125, Validation loss 1.171317\n", "Epoch 411, Training loss 42.781277, Validation loss 0.615607\n", "Epoch 412, Training loss 40.977932, Validation loss 0.607557\n", "Epoch 413, Training loss 39.608486, Validation loss 1.678879\n", "Epoch 414, Training loss 39.446770, Validation loss 0.025169\n", "Epoch 415, Training loss 44.196629, Validation loss 5.697085\n", "Epoch 416, Training loss 37.846905, Validation loss 0.000107\n", "Epoch 417, Training loss 43.788918, Validation loss 7.524011\n", "Epoch 418, Training loss 22.593975, Validation loss 2.051311\n", "Epoch 419, Training loss 27.712803, Validation loss 65.471336\n", "Epoch 420, Training loss 89.986618, Validation loss 59.887814\n", "Epoch 421, Training loss 38.308334, Validation loss 55.598518\n", "Epoch 422, Training loss 61.069874, Validation loss 3.322539\n", "Epoch 423, Training loss 57.035530, Validation loss 2.175529\n", "Epoch 424, Training loss 50.286526, Validation loss 1.444543\n", "Epoch 425, Training loss 13.180573, Validation loss 8.389235\n", "Epoch 426, Training loss 11.484343, Validation loss 16.819561\n", "Epoch 427, Training loss 10.670385, Validation loss 5.457948\n", "Epoch 428, Training loss 16.053450, Validation loss 58.524544\n", "Epoch 429, Training loss 62.000626, Validation loss 38.533688\n", "Epoch 430, Training loss 73.061760, Validation loss 102.291138\n", "Epoch 431, Training loss 27.717735, Validation loss 1.804701\n", "Epoch 432, Training loss 53.724480, Validation loss 85.089462\n", "Epoch 433, Training loss 18.259604, Validation loss 2.765673\n", "Epoch 434, Training loss 11.434959, Validation loss 68.852829\n", "Epoch 435, Training loss 58.186176, Validation loss 55.769241\n", "Epoch 436, Training loss 83.345345, Validation loss 115.517097\n", "Epoch 437, Training loss 16.518278, Validation loss 38.997032\n", "Epoch 438, Training loss 35.749031, Validation loss 18.215260\n", "Epoch 439, Training loss 58.167870, Validation loss 72.794998\n", "Epoch 440, Training loss 27.533468, Validation loss 53.655640\n", "Epoch 441, Training loss 66.348068, Validation loss 47.843258\n", "Epoch 442, Training loss 75.025475, Validation loss 66.969254\n", "Epoch 443, Training loss 20.232737, Validation loss 40.620796\n", "Epoch 444, Training loss 38.433277, Validation loss 7.143627\n", "Epoch 445, Training loss 43.261150, Validation loss 55.580956\n", "Epoch 446, Training loss 25.915512, Validation loss 0.839074\n", "Epoch 447, Training loss 35.084541, Validation loss 50.741440\n", "Epoch 448, Training loss 18.160593, Validation loss 0.358871\n", "Epoch 449, Training loss 29.691891, Validation loss 49.831829\n", "Epoch 450, Training loss 34.415703, Validation loss 0.814875\n", "Epoch 451, Training loss 14.281769, Validation loss 42.354343\n", "Epoch 452, Training loss 30.099306, Validation loss 20.354542\n", "Epoch 453, Training loss 49.924877, Validation loss 66.877884\n", "Epoch 454, Training loss 12.603898, Validation loss 3.599426\n", "Epoch 455, Training loss 14.218846, Validation loss 29.876402\n", "Epoch 456, Training loss 23.523329, Validation loss 2.887733\n", "Epoch 457, Training loss 39.628216, Validation loss 64.125160\n", "Epoch 458, Training loss 19.130424, Validation loss 2.442740\n", "Epoch 459, Training loss 31.685608, Validation loss 63.196972\n", "Epoch 460, Training loss 49.783428, Validation loss 39.658810\n", "Epoch 461, Training loss 59.182068, Validation loss 74.083878\n", "Epoch 462, Training loss 34.884762, Validation loss 56.248684\n", "Epoch 463, Training loss 29.651270, Validation loss 9.148705\n", "Epoch 464, Training loss 43.057957, Validation loss 63.553944\n", "Epoch 465, Training loss 19.300592, Validation loss 1.768371\n", "Epoch 466, Training loss 22.521582, Validation loss 54.202221\n", "Epoch 467, Training loss 52.627018, Validation loss 57.626835\n", "Epoch 468, Training loss 54.725819, Validation loss 69.824806\n", "Epoch 469, Training loss 38.865402, Validation loss 55.313530\n", "Epoch 470, Training loss 12.580521, Validation loss 11.027719\n", "Epoch 471, Training loss 11.407351, Validation loss 18.625309\n", "Epoch 472, Training loss 11.704053, Validation loss 2.430834\n", "Epoch 473, Training loss 15.717930, Validation loss 40.555984\n", "Epoch 474, Training loss 27.397310, Validation loss 10.554416\n", "Epoch 475, Training loss 39.354874, Validation loss 68.634689\n", "Epoch 476, Training loss 37.756283, Validation loss 1.952485\n", "Epoch 477, Training loss 20.004597, Validation loss 2.517909\n", "Epoch 478, Training loss 15.131277, Validation loss 37.403671\n", "Epoch 479, Training loss 47.462051, Validation loss 18.303637\n", "Epoch 480, Training loss 24.698439, Validation loss 35.381359\n", "Epoch 481, Training loss 32.593765, Validation loss 5.752198\n", "Epoch 482, Training loss 29.601234, Validation loss 0.321716\n", "Epoch 483, Training loss 32.332241, Validation loss 12.059843\n", "Epoch 484, Training loss 24.397844, Validation loss 1.371361\n", "Epoch 485, Training loss 26.084641, Validation loss 16.082611\n", "Epoch 486, Training loss 31.266491, Validation loss 8.009584\n", "Epoch 487, Training loss 32.468773, Validation loss 28.051119\n", "Epoch 488, Training loss 19.670639, Validation loss 80.431931\n", "Epoch 489, Training loss 76.698692, Validation loss 27.206100\n", "Epoch 490, Training loss 41.259727, Validation loss 30.592096\n", "Epoch 491, Training loss 31.002378, Validation loss 41.035580\n", "Epoch 492, Training loss 28.527620, Validation loss 20.089811\n", "Epoch 493, Training loss 7.381019, Validation loss 28.396112\n", "Epoch 494, Training loss 6.148661, Validation loss 13.707011\n", "Epoch 495, Training loss 7.070354, Validation loss 25.129822\n", "Epoch 496, Training loss 20.255722, Validation loss 3.556656\n", "Epoch 497, Training loss 61.997990, Validation loss 118.711823\n", "Epoch 498, Training loss 36.366695, Validation loss 8.789732\n", "Epoch 499, Training loss 26.564434, Validation loss 6.624624\n", "Epoch 500, Training loss 27.082666, Validation loss 0.158440\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 501, Training loss 33.792725, Validation loss 15.058035\n", "Epoch 502, Training loss 22.999510, Validation loss 1.917233\n", "Epoch 503, Training loss 26.154413, Validation loss 16.972994\n", "Epoch 504, Training loss 36.225079, Validation loss 17.544306\n", "Epoch 505, Training loss 44.739883, Validation loss 27.032108\n", "Epoch 506, Training loss 22.614887, Validation loss 17.033224\n", "Epoch 507, Training loss 27.514246, Validation loss 6.954543\n", "Epoch 508, Training loss 34.052082, Validation loss 27.799284\n", "Epoch 509, Training loss 23.210176, Validation loss 0.854998\n", "Epoch 510, Training loss 46.492729, Validation loss 32.972263\n", "Epoch 511, Training loss 25.235857, Validation loss 24.412657\n", "Epoch 512, Training loss 38.442402, Validation loss 20.609032\n", "Epoch 513, Training loss 40.567562, Validation loss 34.149380\n", "Epoch 514, Training loss 22.353720, Validation loss 18.765388\n", "Epoch 515, Training loss 51.071659, Validation loss 5.401290\n", "Epoch 516, Training loss 47.510014, Validation loss 41.184189\n", "Epoch 517, Training loss 27.008287, Validation loss 26.131903\n", "Epoch 518, Training loss 46.641083, Validation loss 2.342072\n", "Epoch 519, Training loss 15.746004, Validation loss 15.911375\n", "Epoch 520, Training loss 32.671375, Validation loss 6.361736\n", "Epoch 521, Training loss 68.008949, Validation loss 53.950584\n", "Epoch 522, Training loss 19.205702, Validation loss 24.545385\n", "Epoch 523, Training loss 83.711960, Validation loss 80.442184\n", "Epoch 524, Training loss 43.961456, Validation loss 31.447292\n", "Epoch 525, Training loss 41.824856, Validation loss 1.053161\n", "Epoch 526, Training loss 35.643154, Validation loss 1.285066\n", "Epoch 527, Training loss 33.709511, Validation loss 0.050582\n", "Epoch 528, Training loss 35.379066, Validation loss 5.247728\n", "Epoch 529, Training loss 27.379818, Validation loss 2.129989\n", "Epoch 530, Training loss 26.359686, Validation loss 7.283662\n", "Epoch 531, Training loss 30.520712, Validation loss 3.004439\n", "Epoch 532, Training loss 38.587780, Validation loss 15.722445\n", "Epoch 533, Training loss 35.461624, Validation loss 14.195788\n", "Epoch 534, Training loss 30.090078, Validation loss 13.159118\n", "Epoch 535, Training loss 27.648323, Validation loss 0.502773\n", "Epoch 536, Training loss 38.783154, Validation loss 21.935839\n", "Epoch 537, Training loss 25.968584, Validation loss 17.935608\n", "Epoch 538, Training loss 39.721130, Validation loss 27.684561\n", "Epoch 539, Training loss 40.309593, Validation loss 27.946930\n", "Epoch 540, Training loss 32.085064, Validation loss 22.583761\n", "Epoch 541, Training loss 23.773037, Validation loss 0.544181\n", "Epoch 542, Training loss 42.324738, Validation loss 30.307945\n", "Epoch 543, Training loss 30.793989, Validation loss 24.597527\n", "Epoch 544, Training loss 28.070444, Validation loss 5.844359\n", "Epoch 545, Training loss 42.737434, Validation loss 32.342392\n", "Epoch 546, Training loss 30.538023, Validation loss 25.975777\n", "Epoch 547, Training loss 29.500246, Validation loss 6.500345\n", "Epoch 548, Training loss 47.673904, Validation loss 34.589119\n", "Epoch 549, Training loss 34.871269, Validation loss 27.505531\n", "Epoch 550, Training loss 22.731823, Validation loss 1.908493\n", "Epoch 551, Training loss 37.851341, Validation loss 30.787136\n", "Epoch 552, Training loss 19.971989, Validation loss 22.563145\n", "Epoch 553, Training loss 35.036434, Validation loss 18.950003\n", "Epoch 554, Training loss 44.372993, Validation loss 45.060871\n", "Epoch 555, Training loss 23.530357, Validation loss 28.920845\n", "Epoch 556, Training loss 27.107143, Validation loss 21.706369\n", "Epoch 557, Training loss 48.173080, Validation loss 115.684212\n", "Epoch 558, Training loss 66.390884, Validation loss 35.222492\n", "Epoch 559, Training loss 45.122311, Validation loss 1.058616\n", "Epoch 560, Training loss 41.154778, Validation loss 1.262471\n", "Epoch 561, Training loss 36.575596, Validation loss 1.554519\n", "Epoch 562, Training loss 22.754656, Validation loss 0.958644\n", "Epoch 563, Training loss 40.853607, Validation loss 133.349945\n", "Epoch 564, Training loss 159.546539, Validation loss 165.078186\n", "Epoch 565, Training loss 66.520279, Validation loss 50.219646\n", "Epoch 566, Training loss 25.462946, Validation loss 3.095728\n", "Epoch 567, Training loss 67.614502, Validation loss 133.379990\n", "Epoch 568, Training loss 34.770576, Validation loss 20.793236\n", "Epoch 569, Training loss 21.460367, Validation loss 6.822656\n", "Epoch 570, Training loss 21.968239, Validation loss 1.074276\n", "Epoch 571, Training loss 37.889248, Validation loss 29.155888\n", "Epoch 572, Training loss 18.526667, Validation loss 18.408997\n", "Epoch 573, Training loss 19.476912, Validation loss 2.208320\n", "Epoch 574, Training loss 29.198713, Validation loss 32.442387\n", "Epoch 575, Training loss 38.650066, Validation loss 20.203663\n", "Epoch 576, Training loss 40.496353, Validation loss 39.622116\n", "Epoch 577, Training loss 15.762302, Validation loss 19.307135\n", "Epoch 578, Training loss 15.439899, Validation loss 12.406163\n", "Epoch 579, Training loss 16.986393, Validation loss 31.109192\n", "Epoch 580, Training loss 25.503172, Validation loss 6.515719\n", "Epoch 581, Training loss 49.960712, Validation loss 49.659386\n", "Epoch 582, Training loss 29.820614, Validation loss 9.589415\n", "Epoch 583, Training loss 30.336761, Validation loss 42.628086\n", "Epoch 584, Training loss 43.744892, Validation loss 18.890295\n", "Epoch 585, Training loss 43.046333, Validation loss 49.756493\n", "Epoch 586, Training loss 14.160836, Validation loss 7.037714\n", "Epoch 587, Training loss 17.252279, Validation loss 36.237598\n", "Epoch 588, Training loss 23.200548, Validation loss 8.237538\n", "Epoch 589, Training loss 28.585365, Validation loss 50.378639\n", "Epoch 590, Training loss 46.975754, Validation loss 14.294964\n", "Epoch 591, Training loss 48.032913, Validation loss 57.675846\n", "Epoch 592, Training loss 12.971696, Validation loss 0.937576\n", "Epoch 593, Training loss 38.460987, Validation loss 85.891808\n", "Epoch 594, Training loss 44.287258, Validation loss 4.745943\n", "Epoch 595, Training loss 44.159676, Validation loss 47.581436\n", "Epoch 596, Training loss 12.893234, Validation loss 1.748327\n", "Epoch 597, Training loss 15.007483, Validation loss 50.972797\n", "Epoch 598, Training loss 22.392046, Validation loss 17.425232\n", "Epoch 599, Training loss 28.679102, Validation loss 96.122849\n", "Epoch 600, Training loss 47.543114, Validation loss 28.269625\n", "Epoch 601, Training loss 36.034168, Validation loss 35.457458\n", "Epoch 602, Training loss 7.373916, Validation loss 12.309338\n", "Epoch 603, Training loss 5.772570, Validation loss 38.147228\n", "Epoch 604, Training loss 11.154088, Validation loss 4.419096\n", "Epoch 605, Training loss 12.534155, Validation loss 65.521225\n", "Epoch 606, Training loss 24.072351, Validation loss 16.132362\n", "Epoch 607, Training loss 31.890762, Validation loss 104.615929\n", "Epoch 608, Training loss 44.505833, Validation loss 23.969719\n", "Epoch 609, Training loss 35.682739, Validation loss 40.558723\n", "Epoch 610, Training loss 22.485481, Validation loss 7.944152\n", "Epoch 611, Training loss 13.755894, Validation loss 39.500271\n", "Epoch 612, Training loss 12.487520, Validation loss 6.762916\n", "Epoch 613, Training loss 14.526956, Validation loss 83.823288\n", "Epoch 614, Training loss 36.293686, Validation loss 10.965183\n", "Epoch 615, Training loss 12.473163, Validation loss 28.571835\n", "Epoch 616, Training loss 7.025993, Validation loss 0.148857\n", "Epoch 617, Training loss 10.693221, Validation loss 46.935284\n", "Epoch 618, Training loss 14.392536, Validation loss 2.075433\n", "Epoch 619, Training loss 17.191282, Validation loss 62.120628\n", "Epoch 620, Training loss 22.674177, Validation loss 0.466242\n", "Epoch 621, Training loss 36.907848, Validation loss 149.399170\n", "Epoch 622, Training loss 56.383503, Validation loss 31.861504\n", "Epoch 623, Training loss 38.425472, Validation loss 11.380044\n", "Epoch 624, Training loss 21.024338, Validation loss 56.872856\n", "Epoch 625, Training loss 26.874065, Validation loss 7.566489\n", "Epoch 626, Training loss 30.141432, Validation loss 118.427444\n", "Epoch 627, Training loss 108.852219, Validation loss 90.488960\n", "Epoch 628, Training loss 50.793339, Validation loss 103.906647\n", "Epoch 629, Training loss 45.282104, Validation loss 4.029670\n", "Epoch 630, Training loss 37.716530, Validation loss 3.089066\n", "Epoch 631, Training loss 33.504486, Validation loss 0.088238\n", "Epoch 632, Training loss 28.486599, Validation loss 5.733911\n", "Epoch 633, Training loss 5.826125, Validation loss 0.156021\n", "Epoch 634, Training loss 6.942131, Validation loss 19.861603\n", "Epoch 635, Training loss 7.206227, Validation loss 0.354024\n", "Epoch 636, Training loss 9.220537, Validation loss 31.359549\n", "Epoch 637, Training loss 9.296906, Validation loss 1.143404\n", "Epoch 638, Training loss 12.968779, Validation loss 50.579048\n", "Epoch 639, Training loss 14.485423, Validation loss 1.728449\n", "Epoch 640, Training loss 15.783423, Validation loss 60.113430\n", "Epoch 641, Training loss 18.105858, Validation loss 0.185341\n", "Epoch 642, Training loss 39.818043, Validation loss 152.154175\n", "Epoch 643, Training loss 46.286499, Validation loss 19.286074\n", "Epoch 644, Training loss 27.433193, Validation loss 11.218178\n", "Epoch 645, Training loss 9.511884, Validation loss 54.619686\n", "Epoch 646, Training loss 16.303671, Validation loss 6.282052\n", "Epoch 647, Training loss 12.243665, Validation loss 46.544937\n", "Epoch 648, Training loss 12.733078, Validation loss 1.987065\n", "Epoch 649, Training loss 11.855659, Validation loss 37.244419\n", "Epoch 650, Training loss 9.067559, Validation loss 0.330118\n", "Epoch 651, Training loss 11.765249, Validation loss 43.394512\n", "Epoch 652, Training loss 11.221633, Validation loss 1.002309\n", "Epoch 653, Training loss 12.204603, Validation loss 38.116428\n", "Epoch 654, Training loss 9.028028, Validation loss 0.269124\n", "Epoch 655, Training loss 11.300458, Validation loss 41.661358\n", "Epoch 656, Training loss 10.759435, Validation loss 1.003477\n", "Epoch 657, Training loss 12.317498, Validation loss 40.645874\n", "Epoch 658, Training loss 9.757420, Validation loss 0.460163\n", "Epoch 659, Training loss 11.221479, Validation loss 39.032219\n", "Epoch 660, Training loss 9.822432, Validation loss 0.727022\n", "Epoch 661, Training loss 12.089879, Validation loss 43.152405\n", "Epoch 662, Training loss 10.691300, Validation loss 0.814905\n", "Epoch 663, Training loss 11.322901, Validation loss 36.591904\n", "Epoch 664, Training loss 8.939461, Validation loss 0.471187\n", "Epoch 665, Training loss 11.540578, Validation loss 43.775433\n", "Epoch 666, Training loss 11.087568, Validation loss 1.124731\n", "Epoch 667, Training loss 11.478027, Validation loss 36.592518\n", "Epoch 668, Training loss 8.801947, Validation loss 0.456213\n", "Epoch 669, Training loss 11.142157, Validation loss 43.077888\n", "Epoch 670, Training loss 10.876488, Validation loss 1.167559\n", "Epoch 671, Training loss 11.457314, Validation loss 37.516209\n", "Epoch 672, Training loss 9.013797, Validation loss 0.577465\n", "Epoch 673, Training loss 10.955992, Validation loss 42.375607\n", "Epoch 674, Training loss 10.565631, Validation loss 1.131929\n", "Epoch 675, Training loss 11.255712, Validation loss 38.112278\n", "Epoch 676, Training loss 9.172834, Validation loss 0.714093\n", "Epoch 677, Training loss 10.799375, Validation loss 41.908962\n", "Epoch 678, Training loss 10.307647, Validation loss 1.123594\n", "Epoch 679, Training loss 10.955499, Validation loss 38.327633\n", "Epoch 680, Training loss 9.213286, Validation loss 0.833600\n", "Epoch 681, Training loss 10.593033, Validation loss 41.677181\n", "Epoch 682, Training loss 10.105482, Validation loss 1.149819\n", "Epoch 683, Training loss 10.588325, Validation loss 38.211670\n", "Epoch 684, Training loss 9.128176, Validation loss 0.921269\n", "Epoch 685, Training loss 10.302027, Validation loss 41.569450\n", "Epoch 686, Training loss 9.906311, Validation loss 1.181555\n", "Epoch 687, Training loss 10.161787, Validation loss 37.869038\n", "Epoch 688, Training loss 8.937228, Validation loss 0.963430\n", "Epoch 689, Training loss 9.923927, Validation loss 41.355625\n", "Epoch 690, Training loss 9.627297, Validation loss 1.165796\n", "Epoch 691, Training loss 9.705654, Validation loss 37.497025\n", "Epoch 692, Training loss 8.684975, Validation loss 0.945702\n", "Epoch 693, Training loss 9.500686, Validation loss 40.734276\n", "Epoch 694, Training loss 9.205831, Validation loss 1.062758\n", "Epoch 695, Training loss 9.287020, Validation loss 37.342953\n", "Epoch 696, Training loss 8.430835, Validation loss 0.873989\n", "Epoch 697, Training loss 9.107669, Validation loss 39.589294\n", "Epoch 698, Training loss 8.679281, Validation loss 0.897348\n", "Epoch 699, Training loss 8.950351, Validation loss 37.354042\n", "Epoch 700, Training loss 8.184861, Validation loss 0.771480\n", "Epoch 701, Training loss 8.793702, Validation loss 38.205734\n", "Epoch 702, Training loss 8.171039, Validation loss 0.731493\n", "Epoch 703, Training loss 8.678603, Validation loss 37.094109\n", "Epoch 704, Training loss 7.900351, Validation loss 0.653907\n", "Epoch 705, Training loss 8.552493, Validation loss 36.994228\n", "Epoch 706, Training loss 7.761649, Validation loss 0.596057\n", "Epoch 707, Training loss 8.452141, Validation loss 36.414326\n", "Epoch 708, Training loss 7.584990, Validation loss 0.538302\n", "Epoch 709, Training loss 8.355862, Validation loss 36.015327\n", "Epoch 710, Training loss 7.435797, Validation loss 0.487119\n", "Epoch 711, Training loss 8.267857, Validation loss 35.562492\n", "Epoch 712, Training loss 7.292552, Validation loss 0.439862\n", "Epoch 713, Training loss 8.190248, Validation loss 35.135700\n", "Epoch 714, Training loss 7.163505, Validation loss 0.398488\n", "Epoch 715, Training loss 8.117473, Validation loss 34.720303\n", "Epoch 716, Training loss 7.044286, Validation loss 0.360993\n", "Epoch 717, Training loss 8.053308, Validation loss 34.322601\n", "Epoch 718, Training loss 6.936958, Validation loss 0.328299\n", "Epoch 719, Training loss 7.994324, Validation loss 33.945602\n", "Epoch 720, Training loss 6.839259, Validation loss 0.299235\n", "Epoch 721, Training loss 7.941756, Validation loss 33.589752\n", "Epoch 722, Training loss 6.751310, Validation loss 0.273824\n", "Epoch 723, Training loss 7.894009, Validation loss 33.256294\n", "Epoch 724, Training loss 6.671659, Validation loss 0.251403\n", "Epoch 725, Training loss 7.851204, Validation loss 32.944523\n", "Epoch 726, Training loss 6.599725, Validation loss 0.231722\n", "Epoch 727, Training loss 7.812306, Validation loss 32.653755\n", "Epoch 728, Training loss 6.534360, Validation loss 0.214320\n", "Epoch 729, Training loss 7.777192, Validation loss 32.382736\n", "Epoch 730, Training loss 6.474928, Validation loss 0.198947\n", "Epoch 731, Training loss 7.745055, Validation loss 32.129990\n", "Epoch 732, Training loss 6.420486, Validation loss 0.185268\n", "Epoch 733, Training loss 7.715753, Validation loss 31.893980\n", "Epoch 734, Training loss 6.370510, Validation loss 0.173089\n", "Epoch 735, Training loss 7.688674, Validation loss 31.673222\n", "Epoch 736, Training loss 6.324281, Validation loss 0.162176\n", "Epoch 737, Training loss 7.663684, Validation loss 31.466194\n", "Epoch 738, Training loss 6.281400, Validation loss 0.152389\n", "Epoch 739, Training loss 7.640303, Validation loss 31.271629\n", "Epoch 740, Training loss 6.241337, Validation loss 0.143562\n", "Epoch 741, Training loss 7.618533, Validation loss 31.088375\n", "Epoch 742, Training loss 6.203819, Validation loss 0.135604\n", "Epoch 743, Training loss 7.597867, Validation loss 30.915260\n", "Epoch 744, Training loss 6.168420, Validation loss 0.128388\n", "Epoch 745, Training loss 7.578489, Validation loss 30.751442\n", "Epoch 746, Training loss 6.135022, Validation loss 0.121864\n", "Epoch 747, Training loss 7.559835, Validation loss 30.595989\n", "Epoch 748, Training loss 6.103202, Validation loss 0.115929\n", "Epoch 749, Training loss 7.542264, Validation loss 30.448267\n", "Epoch 750, Training loss 6.073010, Validation loss 0.110559\n", "Epoch 751, Training loss 7.525099, Validation loss 30.307415\n", "Epoch 752, Training loss 6.043994, Validation loss 0.105666\n", "Epoch 753, Training loss 7.508903, Validation loss 30.173077\n", "Epoch 754, Training loss 6.016316, Validation loss 0.101244\n", "Epoch 755, Training loss 7.492840, Validation loss 30.044455\n", "Epoch 756, Training loss 5.989521, Validation loss 0.097218\n", "Epoch 757, Training loss 7.477777, Validation loss 29.921490\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 758, Training loss 5.963907, Validation loss 0.093596\n", "Epoch 759, Training loss 7.462490, Validation loss 29.803152\n", "Epoch 760, Training loss 5.938882, Validation loss 0.090307\n", "Epoch 761, Training loss 7.448397, Validation loss 29.689976\n", "Epoch 762, Training loss 5.914982, Validation loss 0.087368\n", "Epoch 763, Training loss 7.433597, Validation loss 29.580393\n", "Epoch 764, Training loss 5.891379, Validation loss 0.084717\n", "Epoch 765, Training loss 7.420451, Validation loss 29.475752\n", "Epoch 766, Training loss 5.868964, Validation loss 0.082371\n", "Epoch 767, Training loss 7.405783, Validation loss 29.373480\n", "Epoch 768, Training loss 5.846460, Validation loss 0.080281\n", "Epoch 769, Training loss 7.393759, Validation loss 29.276485\n", "Epoch 770, Training loss 5.825420, Validation loss 0.078450\n", "Epoch 771, Training loss 7.378717, Validation loss 29.180252\n", "Epoch 772, Training loss 5.803681, Validation loss 0.076864\n", "Epoch 773, Training loss 7.368289, Validation loss 29.090553\n", "Epoch 774, Training loss 5.784067, Validation loss 0.075475\n", "Epoch 775, Training loss 7.352008, Validation loss 28.998819\n", "Epoch 776, Training loss 5.762666, Validation loss 0.074370\n", "Epoch 777, Training loss 7.344203, Validation loss 28.916538\n", "Epoch 778, Training loss 5.744689, Validation loss 0.073330\n", "Epoch 779, Training loss 7.324984, Validation loss 28.827160\n", "Epoch 780, Training loss 5.723008, Validation loss 0.072728\n", "Epoch 781, Training loss 7.322224, Validation loss 28.753975\n", "Epoch 782, Training loss 5.707390, Validation loss 0.071892\n", "Epoch 783, Training loss 7.296438, Validation loss 28.663023\n", "Epoch 784, Training loss 5.684138, Validation loss 0.071919\n", "Epoch 785, Training loss 7.303893, Validation loss 28.603157\n", "Epoch 786, Training loss 5.672472, Validation loss 0.070995\n", "Epoch 787, Training loss 7.263784, Validation loss 28.502916\n", "Epoch 788, Training loss 5.645159, Validation loss 0.072034\n", "Epoch 789, Training loss 7.292970, Validation loss 28.466742\n", "Epoch 790, Training loss 5.641004, Validation loss 0.070352\n", "Epoch 791, Training loss 7.221300, Validation loss 28.340471\n", "Epoch 792, Training loss 5.604286, Validation loss 0.073414\n", "Epoch 793, Training loss 7.298248, Validation loss 28.352285\n", "Epoch 794, Training loss 5.615631, Validation loss 0.069384\n", "Epoch 795, Training loss 7.155879, Validation loss 28.161469\n", "Epoch 796, Training loss 5.557855, Validation loss 0.077087\n", "Epoch 797, Training loss 7.340595, Validation loss 28.278246\n", "Epoch 798, Training loss 5.603045, Validation loss 0.066886\n", "Epoch 799, Training loss 7.037813, Validation loss 27.933197\n", "Epoch 800, Training loss 5.498729, Validation loss 0.086030\n", "Epoch 801, Training loss 7.470628, Validation loss 28.287867\n", "Epoch 802, Training loss 5.621630, Validation loss 0.060902\n", "Epoch 803, Training loss 6.803507, Validation loss 27.577587\n", "Epoch 804, Training loss 5.415148, Validation loss 0.108892\n", "Epoch 805, Training loss 7.805369, Validation loss 28.462618\n", "Epoch 806, Training loss 5.726019, Validation loss 0.052983\n", "Epoch 807, Training loss 6.359599, Validation loss 26.940634\n", "Epoch 808, Training loss 5.288883, Validation loss 0.166409\n", "Epoch 809, Training loss 8.493363, Validation loss 28.801378\n", "Epoch 810, Training loss 6.023999, Validation loss 0.073018\n", "Epoch 811, Training loss 5.803058, Validation loss 26.015696\n", "Epoch 812, Training loss 5.028078, Validation loss 0.269966\n", "Epoch 813, Training loss 8.906699, Validation loss 28.549423\n", "Epoch 814, Training loss 6.236202, Validation loss 0.105061\n", "Epoch 815, Training loss 5.589180, Validation loss 25.492640\n", "Epoch 816, Training loss 4.815384, Validation loss 0.332908\n", "Epoch 817, Training loss 8.616336, Validation loss 27.937098\n", "Epoch 818, Training loss 6.094061, Validation loss 0.074158\n", "Epoch 819, Training loss 5.621906, Validation loss 25.478970\n", "Epoch 820, Training loss 4.889806, Validation loss 0.324735\n", "Epoch 821, Training loss 8.824062, Validation loss 28.216688\n", "Epoch 822, Training loss 6.162384, Validation loss 0.087523\n", "Epoch 823, Training loss 5.546257, Validation loss 25.347048\n", "Epoch 824, Training loss 4.777040, Validation loss 0.355441\n", "Epoch 825, Training loss 8.618803, Validation loss 27.868124\n", "Epoch 826, Training loss 6.048661, Validation loss 0.066893\n", "Epoch 827, Training loss 5.563971, Validation loss 25.348890\n", "Epoch 828, Training loss 4.821342, Validation loss 0.352040\n", "Epoch 829, Training loss 8.761008, Validation loss 28.045935\n", "Epoch 830, Training loss 6.088619, Validation loss 0.073907\n", "Epoch 831, Training loss 5.506049, Validation loss 25.245483\n", "Epoch 832, Training loss 4.733177, Validation loss 0.378162\n", "Epoch 833, Training loss 8.601940, Validation loss 27.772127\n", "Epoch 834, Training loss 5.995631, Validation loss 0.059314\n", "Epoch 835, Training loss 5.513782, Validation loss 25.238810\n", "Epoch 836, Training loss 4.760314, Validation loss 0.378018\n", "Epoch 837, Training loss 8.704998, Validation loss 27.892494\n", "Epoch 838, Training loss 6.018751, Validation loss 0.062956\n", "Epoch 839, Training loss 5.467178, Validation loss 25.156378\n", "Epoch 840, Training loss 4.687228, Validation loss 0.401144\n", "Epoch 841, Training loss 8.573495, Validation loss 27.663910\n", "Epoch 842, Training loss 5.938260, Validation loss 0.052263\n", "Epoch 843, Training loss 5.469514, Validation loss 25.147240\n", "Epoch 844, Training loss 4.703959, Validation loss 0.402938\n", "Epoch 845, Training loss 8.651232, Validation loss 27.748285\n", "Epoch 846, Training loss 5.950603, Validation loss 0.054035\n", "Epoch 847, Training loss 5.430514, Validation loss 25.080822\n", "Epoch 848, Training loss 4.640649, Validation loss 0.424030\n", "Epoch 849, Training loss 8.537115, Validation loss 27.547998\n", "Epoch 850, Training loss 5.878161, Validation loss 0.046122\n", "Epoch 851, Training loss 5.429820, Validation loss 25.072134\n", "Epoch 852, Training loss 4.650631, Validation loss 0.427027\n", "Epoch 853, Training loss 8.597224, Validation loss 27.607958\n", "Epoch 854, Training loss 5.883088, Validation loss 0.046839\n", "Epoch 855, Training loss 5.396325, Validation loss 25.018652\n", "Epoch 856, Training loss 4.594072, Validation loss 0.446642\n", "Epoch 857, Training loss 8.494679, Validation loss 27.426365\n", "Epoch 858, Training loss 5.816151, Validation loss 0.041082\n", "Epoch 859, Training loss 5.393920, Validation loss 25.011824\n", "Epoch 860, Training loss 4.599407, Validation loss 0.450406\n", "Epoch 861, Training loss 8.541696, Validation loss 27.468531\n", "Epoch 862, Training loss 5.815668, Validation loss 0.041223\n", "Epoch 863, Training loss 5.364671, Validation loss 24.969236\n", "Epoch 864, Training loss 4.547820, Validation loss 0.468869\n", "Epoch 865, Training loss 8.447283, Validation loss 27.299620\n", "Epoch 866, Training loss 5.752754, Validation loss 0.037248\n", "Epoch 867, Training loss 5.361246, Validation loss 24.964647\n", "Epoch 868, Training loss 4.549717, Validation loss 0.473154\n", "Epoch 869, Training loss 8.483911, Validation loss 27.327820\n", "Epoch 870, Training loss 5.748040, Validation loss 0.037110\n", "Epoch 871, Training loss 5.335412, Validation loss 24.931299\n", "Epoch 872, Training loss 4.502062, Validation loss 0.490649\n", "Epoch 873, Training loss 8.395741, Validation loss 27.168228\n", "Epoch 874, Training loss 5.688338, Validation loss 0.034683\n", "Epoch 875, Training loss 5.331336, Validation loss 24.928894\n", "Epoch 876, Training loss 4.501190, Validation loss 0.495335\n", "Epoch 877, Training loss 8.423498, Validation loss 27.184399\n", "Epoch 878, Training loss 5.680104, Validation loss 0.034467\n", "Epoch 879, Training loss 5.308385, Validation loss 24.903450\n", "Epoch 880, Training loss 4.456902, Validation loss 0.511948\n", "Epoch 881, Training loss 8.340639, Validation loss 27.032303\n", "Epoch 882, Training loss 5.623231, Validation loss 0.033418\n", "Epoch 883, Training loss 5.303802, Validation loss 24.902777\n", "Epoch 884, Training loss 4.453588, Validation loss 0.517000\n", "Epoch 885, Training loss 8.360274, Validation loss 27.037285\n", "Epoch 886, Training loss 5.611818, Validation loss 0.033277\n", "Epoch 887, Training loss 5.283329, Validation loss 24.883947\n", "Epoch 888, Training loss 4.412379, Validation loss 0.532776\n", "Epoch 889, Training loss 8.282500, Validation loss 26.892080\n", "Epoch 890, Training loss 5.557726, Validation loss 0.033465\n", "Epoch 891, Training loss 5.278263, Validation loss 24.884357\n", "Epoch 892, Training loss 4.406768, Validation loss 0.538208\n", "Epoch 893, Training loss 8.294366, Validation loss 26.886105\n", "Epoch 894, Training loss 5.543307, Validation loss 0.033534\n", "Epoch 895, Training loss 5.259980, Validation loss 24.870842\n", "Epoch 896, Training loss 4.368544, Validation loss 0.553137\n", "Epoch 897, Training loss 8.221750, Validation loss 26.747910\n", "Epoch 898, Training loss 5.492046, Validation loss 0.034819\n", "Epoch 899, Training loss 5.254383, Validation loss 24.871532\n", "Epoch 900, Training loss 4.360684, Validation loss 0.558995\n", "Epoch 901, Training loss 8.225985, Validation loss 26.730892\n", "Epoch 902, Training loss 5.474742, Validation loss 0.035229\n", "Epoch 903, Training loss 5.238024, Validation loss 24.862120\n", "Epoch 904, Training loss 4.325402, Validation loss 0.573071\n", "Epoch 905, Training loss 8.158757, Validation loss 26.600224\n", "Epoch 906, Training loss 5.426439, Validation loss 0.037465\n", "Epoch 907, Training loss 5.231879, Validation loss 24.862534\n", "Epoch 908, Training loss 4.315344, Validation loss 0.579395\n", "Epoch 909, Training loss 8.155540, Validation loss 26.572245\n", "Epoch 910, Training loss 5.406352, Validation loss 0.038340\n", "Epoch 911, Training loss 5.217208, Validation loss 24.855927\n", "Epoch 912, Training loss 4.282948, Validation loss 0.592613\n", "Epoch 913, Training loss 8.093832, Validation loss 26.449535\n", "Epoch 914, Training loss 5.361077, Validation loss 0.041379\n", "Epoch 915, Training loss 5.210475, Validation loss 24.855370\n", "Epoch 916, Training loss 4.270804, Validation loss 0.599438\n", "Epoch 917, Training loss 8.083534, Validation loss 26.411045\n", "Epoch 918, Training loss 5.338424, Validation loss 0.042832\n", "Epoch 919, Training loss 5.197261, Validation loss 24.850412\n", "Epoch 920, Training loss 4.241187, Validation loss 0.611809\n", "Epoch 921, Training loss 8.027293, Validation loss 26.296688\n", "Epoch 922, Training loss 5.296165, Validation loss 0.046525\n", "Epoch 923, Training loss 5.189949, Validation loss 24.848431\n", "Epoch 924, Training loss 4.227128, Validation loss 0.619145\n", "Epoch 925, Training loss 8.010474, Validation loss 26.248423\n", "Epoch 926, Training loss 5.271204, Validation loss 0.048651\n", "Epoch 927, Training loss 5.177940, Validation loss 24.843870\n", "Epoch 928, Training loss 4.200097, Validation loss 0.630716\n", "Epoch 929, Training loss 7.959422, Validation loss 26.142569\n", "Epoch 930, Training loss 5.231835, Validation loss 0.052869\n", "Epoch 931, Training loss 5.170122, Validation loss 24.840170\n", "Epoch 932, Training loss 4.184405, Validation loss 0.638528\n", "Epoch 933, Training loss 7.936930, Validation loss 26.085808\n", "Epoch 934, Training loss 5.204974, Validation loss 0.055727\n", "Epoch 935, Training loss 5.159074, Validation loss 24.835014\n", "Epoch 936, Training loss 4.159683, Validation loss 0.649382\n", "Epoch 937, Training loss 7.890501, Validation loss 25.988106\n", "Epoch 938, Training loss 5.168226, Validation loss 0.060369\n", "Epoch 939, Training loss 5.150834, Validation loss 24.829365\n", "Epoch 940, Training loss 4.142725, Validation loss 0.657597\n", "Epoch 941, Training loss 7.863435, Validation loss 25.924768\n", "Epoch 942, Training loss 5.139974, Validation loss 0.063971\n", "Epoch 943, Training loss 5.140541, Validation loss 24.822859\n", "Epoch 944, Training loss 4.119956, Validation loss 0.667838\n", "Epoch 945, Training loss 7.820835, Validation loss 25.834425\n", "Epoch 946, Training loss 5.105478, Validation loss 0.068974\n", "Epoch 947, Training loss 5.131994, Validation loss 24.815102\n", "Epoch 948, Training loss 4.102151, Validation loss 0.676358\n", "Epoch 949, Training loss 7.790409, Validation loss 25.766602\n", "Epoch 950, Training loss 5.076359, Validation loss 0.073293\n", "Epoch 951, Training loss 5.122235, Validation loss 24.806564\n", "Epoch 952, Training loss 4.080940, Validation loss 0.686118\n", "Epoch 953, Training loss 7.750748, Validation loss 25.682753\n", "Epoch 954, Training loss 5.043687, Validation loss 0.078635\n", "Epoch 955, Training loss 5.113511, Validation loss 24.796556\n", "Epoch 956, Training loss 4.062717, Validation loss 0.694825\n", "Epoch 957, Training loss 7.718199, Validation loss 25.612772\n", "Epoch 958, Training loss 5.014244, Validation loss 0.083603\n", "Epoch 959, Training loss 5.104135, Validation loss 24.785704\n", "Epoch 960, Training loss 4.042712, Validation loss 0.704223\n", "Epoch 961, Training loss 7.680632, Validation loss 25.534277\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 962, Training loss 4.983005, Validation loss 0.089282\n", "Epoch 963, Training loss 5.095335, Validation loss 24.773336\n", "Epoch 964, Training loss 4.024413, Validation loss 0.713026\n", "Epoch 965, Training loss 7.646944, Validation loss 25.464121\n", "Epoch 966, Training loss 4.953629, Validation loss 0.094823\n", "Epoch 967, Training loss 5.086253, Validation loss 24.760117\n", "Epoch 968, Training loss 4.005338, Validation loss 0.722155\n", "Epoch 969, Training loss 7.610851, Validation loss 25.390299\n", "Epoch 970, Training loss 4.923534, Validation loss 0.100848\n", "Epoch 971, Training loss 5.077470, Validation loss 24.745378\n", "Epoch 972, Training loss 3.987208, Validation loss 0.730973\n", "Epoch 973, Training loss 7.576739, Validation loss 25.321468\n", "Epoch 974, Training loss 4.894485, Validation loss 0.106889\n", "Epoch 975, Training loss 5.068609, Validation loss 24.729694\n", "Epoch 976, Training loss 3.968877, Validation loss 0.739910\n", "Epoch 977, Training loss 7.541708, Validation loss 25.251902\n", "Epoch 978, Training loss 4.865322, Validation loss 0.113258\n", "Epoch 979, Training loss 5.059925, Validation loss 24.712692\n", "Epoch 980, Training loss 3.951095, Validation loss 0.748679\n", "Epoch 981, Training loss 7.507665, Validation loss 25.185455\n", "Epoch 982, Training loss 4.836790, Validation loss 0.119736\n", "Epoch 983, Training loss 5.051256, Validation loss 24.694595\n", "Epoch 984, Training loss 3.933387, Validation loss 0.757469\n", "Epoch 985, Training loss 7.473421, Validation loss 25.119884\n", "Epoch 986, Training loss 4.808396, Validation loss 0.126456\n", "Epoch 987, Training loss 5.042718, Validation loss 24.675383\n", "Epoch 988, Training loss 3.916047, Validation loss 0.766163\n", "Epoch 989, Training loss 7.439720, Validation loss 25.056641\n", "Epoch 990, Training loss 4.780438, Validation loss 0.133323\n", "Epoch 991, Training loss 5.034255, Validation loss 24.655140\n", "Epoch 992, Training loss 3.898890, Validation loss 0.774830\n", "Epoch 993, Training loss 7.406108, Validation loss 24.994844\n", "Epoch 994, Training loss 4.752717, Validation loss 0.140391\n", "Epoch 995, Training loss 5.025905, Validation loss 24.633804\n", "Epoch 996, Training loss 3.882033, Validation loss 0.783430\n", "Epoch 997, Training loss 7.372884, Validation loss 24.935219\n", "Epoch 998, Training loss 4.725337, Validation loss 0.147616\n", "Epoch 999, Training loss 5.017658, Validation loss 24.611500\n", "Epoch 1000, Training loss 3.865397, Validation loss 0.791989\n", "Epoch 1001, Training loss 7.339838, Validation loss 24.877291\n", "Epoch 1002, Training loss 4.698221, Validation loss 0.155020\n", "Epoch 1003, Training loss 5.009546, Validation loss 24.588322\n", "Epoch 1004, Training loss 3.849026, Validation loss 0.800492\n", "Epoch 1005, Training loss 7.307092, Validation loss 24.821371\n", "Epoch 1006, Training loss 4.671402, Validation loss 0.162587\n", "Epoch 1007, Training loss 5.001568, Validation loss 24.564255\n", "Epoch 1008, Training loss 3.832910, Validation loss 0.808937\n", "Epoch 1009, Training loss 7.274592, Validation loss 24.767376\n", "Epoch 1010, Training loss 4.644831, Validation loss 0.170321\n", "Epoch 1011, Training loss 4.993721, Validation loss 24.539305\n", "Epoch 1012, Training loss 3.817030, Validation loss 0.817334\n", "Epoch 1013, Training loss 7.242318, Validation loss 24.715311\n", "Epoch 1014, Training loss 4.618500, Validation loss 0.178221\n", "Epoch 1015, Training loss 4.986044, Validation loss 24.513720\n", "Epoch 1016, Training loss 3.801403, Validation loss 0.825668\n", "Epoch 1017, Training loss 7.210284, Validation loss 24.665138\n", "Epoch 1018, Training loss 4.592413, Validation loss 0.186285\n", "Epoch 1019, Training loss 4.978513, Validation loss 24.487354\n", "Epoch 1020, Training loss 3.786003, Validation loss 0.833960\n", "Epoch 1021, Training loss 7.178426, Validation loss 24.616814\n", "Epoch 1022, Training loss 4.566526, Validation loss 0.194521\n", "Epoch 1023, Training loss 4.971158, Validation loss 24.460289\n", "Epoch 1024, Training loss 3.770860, Validation loss 0.842187\n", "Epoch 1025, Training loss 7.146797, Validation loss 24.570419\n", "Epoch 1026, Training loss 4.540854, Validation loss 0.202918\n", "Epoch 1027, Training loss 4.963986, Validation loss 24.432711\n", "Epoch 1028, Training loss 3.755937, Validation loss 0.850365\n", "Epoch 1029, Training loss 7.115311, Validation loss 24.525795\n", "Epoch 1030, Training loss 4.515358, Validation loss 0.211488\n", "Epoch 1031, Training loss 4.957007, Validation loss 24.404526\n", "Epoch 1032, Training loss 3.741253, Validation loss 0.858481\n", "Epoch 1033, Training loss 7.084004, Validation loss 24.483061\n", "Epoch 1034, Training loss 4.490029, Validation loss 0.220231\n", "Epoch 1035, Training loss 4.950221, Validation loss 24.375849\n", "Epoch 1036, Training loss 3.726769, Validation loss 0.866553\n", "Epoch 1037, Training loss 7.052761, Validation loss 24.441889\n", "Epoch 1038, Training loss 4.464833, Validation loss 0.229163\n", "Epoch 1039, Training loss 4.943648, Validation loss 24.346668\n", "Epoch 1040, Training loss 3.712535, Validation loss 0.874560\n", "Epoch 1041, Training loss 7.021694, Validation loss 24.402615\n", "Epoch 1042, Training loss 4.439805, Validation loss 0.238263\n", "Epoch 1043, Training loss 4.937306, Validation loss 24.317127\n", "Epoch 1044, Training loss 3.698512, Validation loss 0.882507\n", "Epoch 1045, Training loss 6.990677, Validation loss 24.364916\n", "Epoch 1046, Training loss 4.414879, Validation loss 0.247558\n", "Epoch 1047, Training loss 4.931192, Validation loss 24.287222\n", "Epoch 1048, Training loss 3.684697, Validation loss 0.890401\n", "Epoch 1049, Training loss 6.959714, Validation loss 24.328825\n", "Epoch 1050, Training loss 4.390054, Validation loss 0.257051\n", "Epoch 1051, Training loss 4.925318, Validation loss 24.256931\n", "Epoch 1052, Training loss 3.671107, Validation loss 0.898231\n", "Epoch 1053, Training loss 6.928822, Validation loss 24.294359\n", "Epoch 1054, Training loss 4.365343, Validation loss 0.266733\n", "Epoch 1055, Training loss 4.919699, Validation loss 24.226385\n", "Epoch 1056, Training loss 3.657717, Validation loss 0.905998\n", "Epoch 1057, Training loss 6.897900, Validation loss 24.261293\n", "Epoch 1058, Training loss 4.340680, Validation loss 0.276635\n", "Epoch 1059, Training loss 4.914336, Validation loss 24.195532\n", "Epoch 1060, Training loss 3.644522, Validation loss 0.913713\n", "Epoch 1061, Training loss 6.866967, Validation loss 24.229706\n", "Epoch 1062, Training loss 4.316102, Validation loss 0.286747\n", "Epoch 1063, Training loss 4.909268, Validation loss 24.164551\n", "Epoch 1064, Training loss 3.631541, Validation loss 0.921351\n", "Epoch 1065, Training loss 6.835995, Validation loss 24.199528\n", "Epoch 1066, Training loss 4.291558, Validation loss 0.297087\n", "Epoch 1067, Training loss 4.904467, Validation loss 24.133289\n", "Epoch 1068, Training loss 3.618752, Validation loss 0.928931\n", "Epoch 1069, Training loss 6.804950, Validation loss 24.170641\n", "Epoch 1070, Training loss 4.267061, Validation loss 0.307660\n", "Epoch 1071, Training loss 4.899980, Validation loss 24.101934\n", "Epoch 1072, Training loss 3.606148, Validation loss 0.936446\n", "Epoch 1073, Training loss 6.773772, Validation loss 24.142952\n", "Epoch 1074, Training loss 4.242577, Validation loss 0.318484\n", "Epoch 1075, Training loss 4.895799, Validation loss 24.070387\n", "Epoch 1076, Training loss 3.593750, Validation loss 0.943885\n", "Epoch 1077, Training loss 6.742509, Validation loss 24.116585\n", "Epoch 1078, Training loss 4.218121, Validation loss 0.329557\n", "Epoch 1079, Training loss 4.891948, Validation loss 24.038788\n", "Epoch 1080, Training loss 3.581536, Validation loss 0.951249\n", "Epoch 1081, Training loss 6.711064, Validation loss 24.091223\n", "Epoch 1082, Training loss 4.193661, Validation loss 0.340900\n", "Epoch 1083, Training loss 4.888427, Validation loss 24.007057\n", "Epoch 1084, Training loss 3.569510, Validation loss 0.958541\n", "Epoch 1085, Training loss 6.679437, Validation loss 24.066990\n", "Epoch 1086, Training loss 4.169195, Validation loss 0.352520\n", "Epoch 1087, Training loss 4.885265, Validation loss 23.975285\n", "Epoch 1088, Training loss 3.557672, Validation loss 0.965751\n", "Epoch 1089, Training loss 6.647591, Validation loss 24.043766\n", "Epoch 1090, Training loss 4.144716, Validation loss 0.364432\n", "Epoch 1091, Training loss 4.882491, Validation loss 23.943590\n", "Epoch 1092, Training loss 3.546032, Validation loss 0.972868\n", "Epoch 1093, Training loss 6.615509, Validation loss 24.021494\n", "Epoch 1094, Training loss 4.120219, Validation loss 0.376646\n", "Epoch 1095, Training loss 4.880080, Validation loss 23.911760\n", "Epoch 1096, Training loss 3.534562, Validation loss 0.979910\n", "Epoch 1097, Training loss 6.583106, Validation loss 23.999962\n", "Epoch 1098, Training loss 4.095669, Validation loss 0.389190\n", "Epoch 1099, Training loss 4.878071, Validation loss 23.879961\n", "Epoch 1100, Training loss 3.523279, Validation loss 0.986852\n", "Epoch 1101, Training loss 6.550410, Validation loss 23.979265\n", "Epoch 1102, Training loss 4.071094, Validation loss 0.402061\n", "Epoch 1103, Training loss 4.876504, Validation loss 23.848263\n", "Epoch 1104, Training loss 3.512193, Validation loss 0.993694\n", "Epoch 1105, Training loss 6.517367, Validation loss 23.959263\n", "Epoch 1106, Training loss 4.046466, Validation loss 0.415288\n", "Epoch 1107, Training loss 4.875353, Validation loss 23.816528\n", "Epoch 1108, Training loss 3.501286, Validation loss 1.000437\n", "Epoch 1109, Training loss 6.483933, Validation loss 23.939884\n", "Epoch 1110, Training loss 4.021796, Validation loss 0.428882\n", "Epoch 1111, Training loss 4.874693, Validation loss 23.784927\n", "Epoch 1112, Training loss 3.490586, Validation loss 1.007054\n", "Epoch 1113, Training loss 6.450097, Validation loss 23.921095\n", "Epoch 1114, Training loss 3.997072, Validation loss 0.442865\n", "Epoch 1115, Training loss 4.874501, Validation loss 23.753338\n", "Epoch 1116, Training loss 3.480082, Validation loss 1.013556\n", "Epoch 1117, Training loss 6.415820, Validation loss 23.902800\n", "Epoch 1118, Training loss 3.972297, Validation loss 0.457245\n", "Epoch 1119, Training loss 4.874815, Validation loss 23.721865\n", "Epoch 1120, Training loss 3.469788, Validation loss 1.019928\n", "Epoch 1121, Training loss 6.381073, Validation loss 23.884888\n", "Epoch 1122, Training loss 3.947477, Validation loss 0.472043\n", "Epoch 1123, Training loss 4.875666, Validation loss 23.690510\n", "Epoch 1124, Training loss 3.459688, Validation loss 1.026163\n", "Epoch 1125, Training loss 6.345750, Validation loss 23.867153\n", "Epoch 1126, Training loss 3.922577, Validation loss 0.487304\n", "Epoch 1127, Training loss 4.877075, Validation loss 23.659277\n", "Epoch 1128, Training loss 3.449817, Validation loss 1.032243\n", "Epoch 1129, Training loss 6.309896, Validation loss 23.849636\n", "Epoch 1130, Training loss 3.897635, Validation loss 0.503017\n", "Epoch 1131, Training loss 4.879073, Validation loss 23.628126\n", "Epoch 1132, Training loss 3.440179, Validation loss 1.038162\n", "Epoch 1133, Training loss 6.273452, Validation loss 23.832321\n", "Epoch 1134, Training loss 3.872656, Validation loss 0.519214\n", "Epoch 1135, Training loss 4.881695, Validation loss 23.597206\n", "Epoch 1136, Training loss 3.430771, Validation loss 1.043900\n", "Epoch 1137, Training loss 6.236340, Validation loss 23.814764\n", "Epoch 1138, Training loss 3.847638, Validation loss 0.535929\n", "Epoch 1139, Training loss 4.884972, Validation loss 23.566401\n", "Epoch 1140, Training loss 3.421635, Validation loss 1.049438\n", "Epoch 1141, Training loss 6.198604, Validation loss 23.797245\n", "Epoch 1142, Training loss 3.822604, Validation loss 0.553156\n", "Epoch 1143, Training loss 4.888924, Validation loss 23.535738\n", "Epoch 1144, Training loss 3.412762, Validation loss 1.054780\n", "Epoch 1145, Training loss 6.160117, Validation loss 23.779358\n", "Epoch 1146, Training loss 3.797547, Validation loss 0.570948\n", "Epoch 1147, Training loss 4.893607, Validation loss 23.505285\n", "Epoch 1148, Training loss 3.404189, Validation loss 1.059886\n", "Epoch 1149, Training loss 6.120894, Validation loss 23.761110\n", "Epoch 1150, Training loss 3.772511, Validation loss 0.589310\n", "Epoch 1151, Training loss 4.899068, Validation loss 23.475092\n", "Epoch 1152, Training loss 3.395945, Validation loss 1.064733\n", "Epoch 1153, Training loss 6.080903, Validation loss 23.742352\n", "Epoch 1154, Training loss 3.747502, Validation loss 0.608267\n", "Epoch 1155, Training loss 4.905309, Validation loss 23.445091\n", "Epoch 1156, Training loss 3.388032, Validation loss 1.069325\n", "Epoch 1157, Training loss 6.040082, Validation loss 23.722727\n", "Epoch 1158, Training loss 3.722569, Validation loss 0.627837\n", "Epoch 1159, Training loss 4.912385, Validation loss 23.415304\n", "Epoch 1160, Training loss 3.380501, Validation loss 1.073614\n", "Epoch 1161, Training loss 5.998422, Validation loss 23.702238\n", "Epoch 1162, Training loss 3.697736, Validation loss 0.648039\n", "Epoch 1163, Training loss 4.920333, Validation loss 23.385771\n", "Epoch 1164, Training loss 3.373380, Validation loss 1.077592\n", "Epoch 1165, Training loss 5.955918, Validation loss 23.680641\n", "Epoch 1166, Training loss 3.673070, Validation loss 0.668876\n", "Epoch 1167, Training loss 4.929187, Validation loss 23.356535\n", "Epoch 1168, Training loss 3.366713, Validation loss 1.081223\n", "Epoch 1169, Training loss 5.912540, Validation loss 23.657700\n", "Epoch 1170, Training loss 3.648601, Validation loss 0.690372\n", "Epoch 1171, Training loss 4.938952, Validation loss 23.327599\n", "Epoch 1172, Training loss 3.360542, Validation loss 1.084494\n", "Epoch 1173, Training loss 5.868330, Validation loss 23.633215\n", "Epoch 1174, Training loss 3.624421, Validation loss 0.712511\n", "Epoch 1175, Training loss 4.949632, Validation loss 23.298883\n", "Epoch 1176, Training loss 3.354912, Validation loss 1.087388\n", "Epoch 1177, Training loss 5.823300, Validation loss 23.606937\n", "Epoch 1178, Training loss 3.600602, Validation loss 0.735282\n", "Epoch 1179, Training loss 4.961257, Validation loss 23.270565\n", "Epoch 1180, Training loss 3.349885, Validation loss 1.089874\n", "Epoch 1181, Training loss 5.777536, Validation loss 23.578632\n", "Epoch 1182, Training loss 3.577253, Validation loss 0.758658\n", "Epoch 1183, Training loss 4.973740, Validation loss 23.242477\n", "Epoch 1184, Training loss 3.345498, Validation loss 1.091971\n", "Epoch 1185, Training loss 5.731105, Validation loss 23.548016\n", "Epoch 1186, Training loss 3.554471, Validation loss 0.782603\n", "Epoch 1187, Training loss 4.987042, Validation loss 23.214653\n", "Epoch 1188, Training loss 3.341816, Validation loss 1.093670\n", "Epoch 1189, Training loss 5.684160, Validation loss 23.514858\n", "Epoch 1190, Training loss 3.532397, Validation loss 0.807043\n", "Epoch 1191, Training loss 5.001061, Validation loss 23.187094\n", "Epoch 1192, Training loss 3.338896, Validation loss 1.094981\n", "Epoch 1193, Training loss 5.636908, Validation loss 23.478926\n", "Epoch 1194, Training loss 3.511174, Validation loss 0.831887\n", "Epoch 1195, Training loss 5.015607, Validation loss 23.159668\n", "Epoch 1196, Training loss 3.336765, Validation loss 1.095972\n", "Epoch 1197, Training loss 5.589608, Validation loss 23.440069\n", "Epoch 1198, Training loss 3.490962, Validation loss 0.857015\n", "Epoch 1199, Training loss 5.030445, Validation loss 23.132259\n", "Epoch 1200, Training loss 3.335462, Validation loss 1.096711\n", "Epoch 1201, Training loss 5.542574, Validation loss 23.398111\n", "Epoch 1202, Training loss 3.471953, Validation loss 0.882271\n", "Epoch 1203, Training loss 5.045337, Validation loss 23.104876\n", "Epoch 1204, Training loss 3.335026, Validation loss 1.097270\n", "Epoch 1205, Training loss 5.496219, Validation loss 23.353140\n", "Epoch 1206, Training loss 3.454320, Validation loss 0.907459\n", "Epoch 1207, Training loss 5.059857, Validation loss 23.077070\n", "Epoch 1208, Training loss 3.335430, Validation loss 1.097821\n", "Epoch 1209, Training loss 5.450957, Validation loss 23.305075\n", "Epoch 1210, Training loss 3.438256, Validation loss 0.932374\n", "Epoch 1211, Training loss 5.073645, Validation loss 23.048824\n", "Epoch 1212, Training loss 3.336668, Validation loss 1.098495\n", "Epoch 1213, Training loss 5.407274, Validation loss 23.254110\n", "Epoch 1214, Training loss 3.423936, Validation loss 0.956786\n", "Epoch 1215, Training loss 5.086208, Validation loss 23.019615\n", "Epoch 1216, Training loss 3.338700, Validation loss 1.099500\n", "Epoch 1217, Training loss 5.365651, Validation loss 23.200558\n", "Epoch 1218, Training loss 3.411556, Validation loss 0.980441\n", "Epoch 1219, Training loss 5.097123, Validation loss 22.989128\n", "Epoch 1220, Training loss 3.341485, Validation loss 1.101013\n", "Epoch 1221, Training loss 5.326475, Validation loss 23.144611\n", "Epoch 1222, Training loss 3.401230, Validation loss 1.003143\n", "Epoch 1223, Training loss 5.105946, Validation loss 22.956821\n", "Epoch 1224, Training loss 3.344980, Validation loss 1.103235\n", "Epoch 1225, Training loss 5.290102, Validation loss 23.086599\n", "Epoch 1226, Training loss 3.393146, Validation loss 1.024668\n", "Epoch 1227, Training loss 5.112382, Validation loss 22.922213\n", "Epoch 1228, Training loss 3.349185, Validation loss 1.106295\n", "Epoch 1229, Training loss 5.256714, Validation loss 23.026663\n", "Epoch 1230, Training loss 3.387401, Validation loss 1.044901\n", "Epoch 1231, Training loss 5.116183, Validation loss 22.884499\n", "Epoch 1232, Training loss 3.354187, Validation loss 1.110297\n", "Epoch 1233, Training loss 5.226372, Validation loss 22.964827\n", "Epoch 1234, Training loss 3.384133, Validation loss 1.063743\n", "Epoch 1235, Training loss 5.117256, Validation loss 22.842958\n", "Epoch 1236, Training loss 3.360094, Validation loss 1.115275\n", "Epoch 1237, Training loss 5.198947, Validation loss 22.900808\n", "Epoch 1238, Training loss 3.383491, Validation loss 1.081170\n", "Epoch 1239, Training loss 5.115613, Validation loss 22.796572\n", "Epoch 1240, Training loss 3.367177, Validation loss 1.121183\n", "Epoch 1241, Training loss 5.174170, Validation loss 22.833872\n", "Epoch 1242, Training loss 3.385672, Validation loss 1.097208\n", "Epoch 1243, Training loss 5.111359, Validation loss 22.744146\n", "Epoch 1244, Training loss 3.375806, Validation loss 1.127920\n", "Epoch 1245, Training loss 5.151591, Validation loss 22.762781\n", "Epoch 1246, Training loss 3.390936, Validation loss 1.111942\n", "Epoch 1247, Training loss 5.104599, Validation loss 22.683929\n", "Epoch 1248, Training loss 3.386526, Validation loss 1.135337\n", "Epoch 1249, Training loss 5.130692, Validation loss 22.685589\n", "Epoch 1250, Training loss 3.399768, Validation loss 1.125438\n", "Epoch 1251, Training loss 5.095431, Validation loss 22.613602\n", "Epoch 1252, Training loss 3.400102, Validation loss 1.143216\n", "Epoch 1253, Training loss 5.110755, Validation loss 22.599182\n", "Epoch 1254, Training loss 3.412826, Validation loss 1.137790\n", "Epoch 1255, Training loss 5.083754, Validation loss 22.529484\n", "Epoch 1256, Training loss 3.417602, Validation loss 1.151348\n", "Epoch 1257, Training loss 5.090856, Validation loss 22.498703\n", "Epoch 1258, Training loss 3.431224, Validation loss 1.149078\n", "Epoch 1259, Training loss 5.069153, Validation loss 22.425821\n", "Epoch 1260, Training loss 3.440608, Validation loss 1.159521\n", "Epoch 1261, Training loss 5.069683, Validation loss 22.376377\n", "Epoch 1262, Training loss 3.456745, Validation loss 1.159358\n", "Epoch 1263, Training loss 5.050590, Validation loss 22.292858\n", "Epoch 1264, Training loss 3.471601, Validation loss 1.167515\n", "Epoch 1265, Training loss 5.045111, Validation loss 22.218895\n", "Epoch 1266, Training loss 3.492393, Validation loss 1.168701\n", "Epoch 1267, Training loss 5.025647, Validation loss 22.113092\n", "Epoch 1268, Training loss 3.514685, Validation loss 1.175228\n", "Epoch 1269, Training loss 5.013133, Validation loss 22.001827\n", "Epoch 1270, Training loss 3.543589, Validation loss 1.177265\n", "Epoch 1271, Training loss 4.989026, Validation loss 21.852909\n", "Epoch 1272, Training loss 3.577457, Validation loss 1.182760\n", "Epoch 1273, Training loss 4.965289, Validation loss 21.676933\n", "Epoch 1274, Training loss 3.620852, Validation loss 1.185587\n", "Epoch 1275, Training loss 4.927980, Validation loss 21.442472\n", "Epoch 1276, Training loss 3.675137, Validation loss 1.190997\n", "Epoch 1277, Training loss 4.880942, Validation loss 21.140686\n", "Epoch 1278, Training loss 3.746391, Validation loss 1.195606\n", "Epoch 1279, Training loss 4.809131, Validation loss 20.728598\n", "Epoch 1280, Training loss 3.840270, Validation loss 1.203603\n", "Epoch 1281, Training loss 4.704850, Validation loss 20.172522\n", "Epoch 1282, Training loss 3.965367, Validation loss 1.214599\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1283, Training loss 4.545243, Validation loss 19.430742\n", "Epoch 1284, Training loss 4.122947, Validation loss 1.234282\n", "Epoch 1285, Training loss 4.321803, Validation loss 18.548204\n", "Epoch 1286, Training loss 4.285386, Validation loss 1.265616\n", "Epoch 1287, Training loss 4.070440, Validation loss 17.752331\n", "Epoch 1288, Training loss 4.375134, Validation loss 1.306970\n", "Epoch 1289, Training loss 3.902465, Validation loss 17.374872\n", "Epoch 1290, Training loss 4.372883, Validation loss 1.333792\n", "Epoch 1291, Training loss 3.854694, Validation loss 17.395788\n", "Epoch 1292, Training loss 4.363390, Validation loss 1.327768\n", "Epoch 1293, Training loss 3.854487, Validation loss 17.540775\n", "Epoch 1294, Training loss 4.353684, Validation loss 1.304486\n", "Epoch 1295, Training loss 3.869378, Validation loss 17.726860\n", "Epoch 1296, Training loss 4.338890, Validation loss 1.277957\n", "Epoch 1297, Training loss 3.895749, Validation loss 17.937157\n", "Epoch 1298, Training loss 4.322454, Validation loss 1.250056\n", "Epoch 1299, Training loss 3.922723, Validation loss 18.147549\n", "Epoch 1300, Training loss 4.302906, Validation loss 1.225157\n", "Epoch 1301, Training loss 3.951964, Validation loss 18.356657\n", "Epoch 1302, Training loss 4.282921, Validation loss 1.202191\n", "Epoch 1303, Training loss 3.977236, Validation loss 18.552946\n", "Epoch 1304, Training loss 4.261102, Validation loss 1.183429\n", "Epoch 1305, Training loss 4.002140, Validation loss 18.740515\n", "Epoch 1306, Training loss 4.239950, Validation loss 1.166901\n", "Epoch 1307, Training loss 4.021754, Validation loss 18.911612\n", "Epoch 1308, Training loss 4.217580, Validation loss 1.154367\n", "Epoch 1309, Training loss 4.040874, Validation loss 19.073183\n", "Epoch 1310, Training loss 4.196852, Validation loss 1.143462\n", "Epoch 1311, Training loss 4.054330, Validation loss 19.217617\n", "Epoch 1312, Training loss 4.174827, Validation loss 1.136119\n", "Epoch 1313, Training loss 4.068251, Validation loss 19.354593\n", "Epoch 1314, Training loss 4.155634, Validation loss 1.129528\n", "Epoch 1315, Training loss 4.076116, Validation loss 19.474146\n", "Epoch 1316, Training loss 4.134075, Validation loss 1.126290\n", "Epoch 1317, Training loss 4.085958, Validation loss 19.590012\n", "Epoch 1318, Training loss 4.117385, Validation loss 1.122894\n", "Epoch 1319, Training loss 4.088804, Validation loss 19.686884\n", "Epoch 1320, Training loss 4.095459, Validation loss 1.122948\n", "Epoch 1321, Training loss 4.095973, Validation loss 19.786264\n", "Epoch 1322, Training loss 4.083012, Validation loss 1.121773\n", "Epoch 1323, Training loss 4.093889, Validation loss 19.861324\n", "Epoch 1324, Training loss 4.057966, Validation loss 1.124694\n", "Epoch 1325, Training loss 4.100281, Validation loss 19.950855\n", "Epoch 1326, Training loss 4.054206, Validation loss 1.124810\n", "Epoch 1327, Training loss 4.092054, Validation loss 20.000912\n", "Epoch 1328, Training loss 4.018225, Validation loss 1.130721\n", "Epoch 1329, Training loss 4.101517, Validation loss 20.093567\n", "Epoch 1330, Training loss 4.036318, Validation loss 1.130700\n", "Epoch 1331, Training loss 4.082317, Validation loss 20.103687\n", "Epoch 1332, Training loss 3.966157, Validation loss 1.141027\n", "Epoch 1333, Training loss 4.105549, Validation loss 20.233772\n", "Epoch 1334, Training loss 4.047421, Validation loss 1.137320\n", "Epoch 1335, Training loss 4.059656, Validation loss 20.153280\n", "Epoch 1336, Training loss 3.870672, Validation loss 1.157543\n", "Epoch 1337, Training loss 4.132974, Validation loss 20.430338\n", "Epoch 1338, Training loss 4.150254, Validation loss 1.139043\n", "Epoch 1339, Training loss 4.017955, Validation loss 20.112513\n", "Epoch 1340, Training loss 3.650121, Validation loss 1.185786\n", "Epoch 1341, Training loss 4.269245, Validation loss 20.896748\n", "Epoch 1342, Training loss 4.527927, Validation loss 1.123257\n", "Epoch 1343, Training loss 4.063216, Validation loss 20.187197\n", "Epoch 1344, Training loss 3.459424, Validation loss 1.168220\n", "Epoch 1345, Training loss 4.434628, Validation loss 21.430389\n", "Epoch 1346, Training loss 4.896938, Validation loss 1.121042\n", "Epoch 1347, Training loss 4.353559, Validation loss 20.866434\n", "Epoch 1348, Training loss 4.003387, Validation loss 1.005009\n", "Epoch 1349, Training loss 3.965352, Validation loss 20.404722\n", "Epoch 1350, Training loss 3.889529, Validation loss 1.238056\n", "Epoch 1351, Training loss 4.161294, Validation loss 20.650318\n", "Epoch 1352, Training loss 3.885475, Validation loss 1.139644\n", "Epoch 1353, Training loss 3.882244, Validation loss 20.345924\n", "Epoch 1354, Training loss 3.844216, Validation loss 1.298275\n", "Epoch 1355, Training loss 4.172743, Validation loss 20.718023\n", "Epoch 1356, Training loss 3.877802, Validation loss 1.142395\n", "Epoch 1357, Training loss 3.820519, Validation loss 20.308655\n", "Epoch 1358, Training loss 3.794166, Validation loss 1.343173\n", "Epoch 1359, Training loss 4.204235, Validation loss 20.824368\n", "Epoch 1360, Training loss 3.916379, Validation loss 1.135471\n", "Epoch 1361, Training loss 3.737245, Validation loss 20.208426\n", "Epoch 1362, Training loss 3.663925, Validation loss 1.401085\n", "Epoch 1363, Training loss 4.296763, Validation loss 21.082968\n", "Epoch 1364, Training loss 4.138241, Validation loss 1.110625\n", "Epoch 1365, Training loss 3.626645, Validation loss 19.983616\n", "Epoch 1366, Training loss 3.290395, Validation loss 1.490979\n", "Epoch 1367, Training loss 4.667365, Validation loss 21.994820\n", "Epoch 1368, Training loss 4.918585, Validation loss 1.053429\n", "Epoch 1369, Training loss 4.001684, Validation loss 20.877510\n", "Epoch 1370, Training loss 4.092531, Validation loss 1.244513\n", "Epoch 1371, Training loss 4.255609, Validation loss 20.853327\n", "Epoch 1372, Training loss 3.463473, Validation loss 1.114308\n", "Epoch 1373, Training loss 3.909583, Validation loss 20.984879\n", "Epoch 1374, Training loss 4.580344, Validation loss 1.439637\n", "Epoch 1375, Training loss 4.344680, Validation loss 20.999704\n", "Epoch 1376, Training loss 3.444878, Validation loss 1.064903\n", "Epoch 1377, Training loss 3.894629, Validation loss 21.052799\n", "Epoch 1378, Training loss 4.652046, Validation loss 1.479050\n", "Epoch 1379, Training loss 4.435720, Validation loss 21.218838\n", "Epoch 1380, Training loss 3.590867, Validation loss 1.013606\n", "Epoch 1381, Training loss 3.694249, Validation loss 20.699881\n", "Epoch 1382, Training loss 4.323381, Validation loss 1.553703\n", "Epoch 1383, Training loss 4.281088, Validation loss 20.881193\n", "Epoch 1384, Training loss 3.120266, Validation loss 1.144660\n", "Epoch 1385, Training loss 4.067545, Validation loss 21.603861\n", "Epoch 1386, Training loss 5.089019, Validation loss 1.547847\n", "Epoch 1387, Training loss 5.117969, Validation loss 22.774920\n", "Epoch 1388, Training loss 4.988876, Validation loss 0.791610\n", "Epoch 1389, Training loss 3.681765, Validation loss 20.704405\n", "Epoch 1390, Training loss 3.831534, Validation loss 1.476864\n", "Epoch 1391, Training loss 4.509178, Validation loss 21.513752\n", "Epoch 1392, Training loss 3.733366, Validation loss 1.003891\n", "Epoch 1393, Training loss 3.330435, Validation loss 20.157640\n", "Epoch 1394, Training loss 3.764752, Validation loss 1.785235\n", "Epoch 1395, Training loss 4.285597, Validation loss 21.142746\n", "Epoch 1396, Training loss 3.410012, Validation loss 1.155560\n", "Epoch 1397, Training loss 3.559503, Validation loss 20.662819\n", "Epoch 1398, Training loss 4.314045, Validation loss 1.727354\n", "Epoch 1399, Training loss 4.215469, Validation loss 20.944065\n", "Epoch 1400, Training loss 3.027871, Validation loss 1.227826\n", "Epoch 1401, Training loss 4.000429, Validation loss 21.624584\n", "Epoch 1402, Training loss 5.055074, Validation loss 1.665711\n", "Epoch 1403, Training loss 5.068847, Validation loss 22.929277\n", "Epoch 1404, Training loss 5.048196, Validation loss 0.873460\n", "Epoch 1405, Training loss 3.833161, Validation loss 21.195076\n", "Epoch 1406, Training loss 4.130022, Validation loss 1.453205\n", "Epoch 1407, Training loss 4.313011, Validation loss 21.189627\n", "Epoch 1408, Training loss 3.042279, Validation loss 1.176981\n", "Epoch 1409, Training loss 3.949703, Validation loss 21.643591\n", "Epoch 1410, Training loss 5.030315, Validation loss 1.733591\n", "Epoch 1411, Training loss 5.062259, Validation loss 23.034328\n", "Epoch 1412, Training loss 5.035407, Validation loss 0.899457\n", "Epoch 1413, Training loss 3.821547, Validation loss 21.302458\n", "Epoch 1414, Training loss 4.132109, Validation loss 1.494094\n", "Epoch 1415, Training loss 4.281759, Validation loss 21.228558\n", "Epoch 1416, Training loss 2.933358, Validation loss 1.230186\n", "Epoch 1417, Training loss 3.975338, Validation loss 21.789064\n", "Epoch 1418, Training loss 5.074176, Validation loss 1.788670\n", "Epoch 1419, Training loss 5.196279, Validation loss 23.468079\n", "Epoch 1420, Training loss 5.319405, Validation loss 0.920788\n", "Epoch 1421, Training loss 4.489898, Validation loss 22.779978\n", "Epoch 1422, Training loss 5.253202, Validation loss 1.306031\n", "Epoch 1423, Training loss 5.332819, Validation loss 23.558050\n", "Epoch 1424, Training loss 5.086411, Validation loss 0.752027\n", "Epoch 1425, Training loss 3.654962, Validation loss 21.241913\n", "Epoch 1426, Training loss 3.895218, Validation loss 1.634729\n", "Epoch 1427, Training loss 4.325224, Validation loss 21.544495\n", "Epoch 1428, Training loss 3.049469, Validation loss 1.223687\n", "Epoch 1429, Training loss 3.731840, Validation loss 21.468517\n", "Epoch 1430, Training loss 4.763937, Validation loss 1.916311\n", "Epoch 1431, Training loss 4.566389, Validation loss 22.354834\n", "Epoch 1432, Training loss 4.185431, Validation loss 1.118483\n", "Epoch 1433, Training loss 3.199829, Validation loss 20.101328\n", "Epoch 1434, Training loss 2.579835, Validation loss 2.154057\n", "Epoch 1435, Training loss 4.704980, Validation loss 22.932304\n", "Epoch 1436, Training loss 5.145938, Validation loss 1.381360\n", "Epoch 1437, Training loss 4.687677, Validation loss 23.357534\n", "Epoch 1438, Training loss 5.549652, Validation loss 1.412836\n", "Epoch 1439, Training loss 6.304076, Validation loss 26.093103\n", "Epoch 1440, Training loss 6.277809, Validation loss 0.743363\n", "Epoch 1441, Training loss 7.307718, Validation loss 31.231268\n", "Epoch 1442, Training loss 5.401064, Validation loss 0.882097\n", "Epoch 1443, Training loss 5.196189, Validation loss 22.490170\n", "Epoch 1444, Training loss 3.950226, Validation loss 0.651061\n", "Epoch 1445, Training loss 2.961954, Validation loss 19.895950\n", "Epoch 1446, Training loss 3.589876, Validation loss 2.360712\n", "Epoch 1447, Training loss 3.818654, Validation loss 20.710712\n", "Epoch 1448, Training loss 2.810826, Validation loss 1.688397\n", "Epoch 1449, Training loss 3.584954, Validation loss 21.043098\n", "Epoch 1450, Training loss 4.542312, Validation loss 2.157133\n", "Epoch 1451, Training loss 3.923113, Validation loss 21.240610\n", "Epoch 1452, Training loss 3.571910, Validation loss 1.602998\n", "Epoch 1453, Training loss 3.098476, Validation loss 19.914864\n", "Epoch 1454, Training loss 2.793103, Validation loss 2.300963\n", "Epoch 1455, Training loss 3.988935, Validation loss 21.610769\n", "Epoch 1456, Training loss 4.395540, Validation loss 1.799225\n", "Epoch 1457, Training loss 3.324392, Validation loss 20.391659\n", "Epoch 1458, Training loss 3.085118, Validation loss 2.088658\n", "Epoch 1459, Training loss 3.663793, Validation loss 21.015657\n", "Epoch 1460, Training loss 3.759968, Validation loss 1.901386\n", "Epoch 1461, Training loss 3.081038, Validation loss 19.744387\n", "Epoch 1462, Training loss 2.271574, Validation loss 2.471711\n", "Epoch 1463, Training loss 4.005877, Validation loss 21.802113\n", "Epoch 1464, Training loss 4.759434, Validation loss 2.015405\n", "Epoch 1465, Training loss 4.072890, Validation loss 22.005978\n", "Epoch 1466, Training loss 4.577163, Validation loss 1.755270\n", "Epoch 1467, Training loss 3.733250, Validation loss 21.196714\n", "Epoch 1468, Training loss 3.420624, Validation loss 1.769081\n", "Epoch 1469, Training loss 3.322452, Validation loss 20.471296\n", "Epoch 1470, Training loss 2.996811, Validation loss 2.147840\n", "Epoch 1471, Training loss 3.512601, Validation loss 20.906897\n", "Epoch 1472, Training loss 3.717568, Validation loss 2.080323\n", "Epoch 1473, Training loss 3.062311, Validation loss 19.780798\n", "Epoch 1474, Training loss 2.159102, Validation loss 2.572323\n", "Epoch 1475, Training loss 3.820873, Validation loss 21.541147\n", "Epoch 1476, Training loss 4.629906, Validation loss 2.195593\n", "Epoch 1477, Training loss 3.848133, Validation loss 21.664278\n", "Epoch 1478, Training loss 4.255920, Validation loss 1.910849\n", "Epoch 1479, Training loss 3.359125, Validation loss 20.504332\n", "Epoch 1480, Training loss 2.623059, Validation loss 2.174689\n", "Epoch 1481, Training loss 3.845716, Validation loss 21.704540\n", "Epoch 1482, Training loss 4.508442, Validation loss 2.081836\n", "Epoch 1483, Training loss 3.599618, Validation loss 21.200991\n", "Epoch 1484, Training loss 3.596517, Validation loss 1.983080\n", "Epoch 1485, Training loss 3.135678, Validation loss 20.082308\n", "Epoch 1486, Training loss 2.257689, Validation loss 2.499019\n", "Epoch 1487, Training loss 3.842424, Validation loss 21.716364\n", "Epoch 1488, Training loss 4.635364, Validation loss 2.220439\n", "Epoch 1489, Training loss 3.876288, Validation loss 21.846638\n", "Epoch 1490, Training loss 4.287606, Validation loss 1.945491\n", "Epoch 1491, Training loss 3.350990, Validation loss 20.661768\n", "Epoch 1492, Training loss 2.691114, Validation loss 2.208049\n", "Epoch 1493, Training loss 3.710030, Validation loss 21.550171\n", "Epoch 1494, Training loss 4.261029, Validation loss 2.163939\n", "Epoch 1495, Training loss 3.255476, Validation loss 20.576534\n", "Epoch 1496, Training loss 2.815672, Validation loss 2.308847\n", "Epoch 1497, Training loss 3.433973, Validation loss 21.030455\n", "Epoch 1498, Training loss 3.771301, Validation loss 2.289080\n", "Epoch 1499, Training loss 2.964776, Validation loss 19.806946\n", "Epoch 1500, Training loss 1.982884, Validation loss 2.830920\n", "Epoch 1501, Training loss 3.502841, Validation loss 21.081009\n", "Epoch 1502, Training loss 4.303817, Validation loss 2.511890\n", "Epoch 1503, Training loss 3.235156, Validation loss 20.674936\n", "Epoch 1504, Training loss 3.324795, Validation loss 2.395850\n", "Epoch 1505, Training loss 2.929554, Validation loss 19.868046\n", "Epoch 1506, Training loss 2.202369, Validation loss 2.783746\n", "Epoch 1507, Training loss 3.617793, Validation loss 21.387918\n", "Epoch 1508, Training loss 4.445003, Validation loss 2.471227\n", "Epoch 1509, Training loss 3.501235, Validation loss 21.259825\n", "Epoch 1510, Training loss 3.771803, Validation loss 2.245669\n", "Epoch 1511, Training loss 2.979516, Validation loss 19.910856\n", "Epoch 1512, Training loss 1.940667, Validation loss 2.873774\n", "Epoch 1513, Training loss 3.393580, Validation loss 20.930756\n", "Epoch 1514, Training loss 4.160619, Validation loss 2.625551\n", "Epoch 1515, Training loss 3.000305, Validation loss 20.264170\n", "Epoch 1516, Training loss 2.898251, Validation loss 2.650645\n", "Epoch 1517, Training loss 2.976029, Validation loss 20.200806\n", "Epoch 1518, Training loss 2.960563, Validation loss 2.700724\n", "Epoch 1519, Training loss 2.810830, Validation loss 19.849396\n", "Epoch 1520, Training loss 2.641928, Validation loss 2.886671\n", "Epoch 1521, Training loss 3.026646, Validation loss 20.319836\n", "Epoch 1522, Training loss 3.393333, Validation loss 2.749103\n", "Epoch 1523, Training loss 2.650739, Validation loss 19.304169\n", "Epoch 1524, Training loss 1.782136, Validation loss 3.334609\n", "Epoch 1525, Training loss 3.121417, Validation loss 20.356951\n", "Epoch 1526, Training loss 3.940208, Validation loss 2.915273\n", "Epoch 1527, Training loss 2.696128, Validation loss 19.627338\n", "Epoch 1528, Training loss 2.387998, Validation loss 3.061900\n", "Epoch 1529, Training loss 3.216150, Validation loss 20.703154\n", "Epoch 1530, Training loss 3.862973, Validation loss 2.744498\n", "Epoch 1531, Training loss 2.723150, Validation loss 19.641319\n", "Epoch 1532, Training loss 2.066766, Validation loss 3.123654\n", "Epoch 1533, Training loss 3.424378, Validation loss 21.106533\n", "Epoch 1534, Training loss 4.270907, Validation loss 2.740012\n", "Epoch 1535, Training loss 3.154091, Validation loss 20.745031\n", "Epoch 1536, Training loss 3.355119, Validation loss 2.616902\n", "Epoch 1537, Training loss 2.731452, Validation loss 19.606968\n", "Epoch 1538, Training loss 1.820122, Validation loss 3.261160\n", "Epoch 1539, Training loss 3.161520, Validation loss 20.560644\n", "Epoch 1540, Training loss 3.966764, Validation loss 2.943244\n", "Epoch 1541, Training loss 2.699930, Validation loss 19.773369\n", "Epoch 1542, Training loss 2.451808, Validation loss 3.096818\n", "Epoch 1543, Training loss 3.055089, Validation loss 20.499105\n", "Epoch 1544, Training loss 3.577334, Validation loss 2.879151\n", "Epoch 1545, Training loss 2.570391, Validation loss 19.283882\n", "Epoch 1546, Training loss 1.683896, Validation loss 3.558202\n", "Epoch 1547, Training loss 2.871357, Validation loss 19.950846\n", "Epoch 1548, Training loss 3.614013, Validation loss 3.184680\n", "Epoch 1549, Training loss 2.397010, Validation loss 18.973186\n", "Epoch 1550, Training loss 1.727111, Validation loss 3.714134\n", "Epoch 1551, Training loss 3.013340, Validation loss 20.226570\n", "Epoch 1552, Training loss 3.888650, Validation loss 3.161634\n", "Epoch 1553, Training loss 2.560289, Validation loss 19.518929\n", "Epoch 1554, Training loss 2.331912, Validation loss 3.316769\n", "Epoch 1555, Training loss 2.991144, Validation loss 20.386059\n", "Epoch 1556, Training loss 3.603229, Validation loss 3.029883\n", "Epoch 1557, Training loss 2.490138, Validation loss 19.199135\n", "Epoch 1558, Training loss 1.669033, Validation loss 3.680263\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1559, Training loss 2.856539, Validation loss 19.957199\n", "Epoch 1560, Training loss 3.637913, Validation loss 3.274216\n", "Epoch 1561, Training loss 2.357083, Validation loss 18.978867\n", "Epoch 1562, Training loss 1.746184, Validation loss 3.779813\n", "Epoch 1563, Training loss 3.010276, Validation loss 20.274897\n", "Epoch 1564, Training loss 3.899933, Validation loss 3.232160\n", "Epoch 1565, Training loss 2.544573, Validation loss 19.573702\n", "Epoch 1566, Training loss 2.387805, Validation loss 3.368933\n", "Epoch 1567, Training loss 2.831439, Validation loss 20.142277\n", "Epoch 1568, Training loss 3.329229, Validation loss 3.172372\n", "Epoch 1569, Training loss 2.379409, Validation loss 18.937799\n", "Epoch 1570, Training loss 1.512792, Validation loss 4.002896\n", "Epoch 1571, Training loss 2.388624, Validation loss 18.966249\n", "Epoch 1572, Training loss 2.963042, Validation loss 3.690866\n", "Epoch 1573, Training loss 2.073430, Validation loss 18.237715\n", "Epoch 1574, Training loss 1.473709, Validation loss 4.367939\n", "Epoch 1575, Training loss 2.458739, Validation loss 19.001280\n", "Epoch 1576, Training loss 3.269362, Validation loss 3.738612\n", "Epoch 1577, Training loss 2.073964, Validation loss 18.188154\n", "Epoch 1578, Training loss 1.399405, Validation loss 4.480791\n", "Epoch 1579, Training loss 2.180485, Validation loss 18.388893\n", "Epoch 1580, Training loss 2.802650, Validation loss 3.979746\n", "Epoch 1581, Training loss 1.948645, Validation loss 17.947327\n", "Epoch 1582, Training loss 1.502175, Validation loss 4.501919\n", "Epoch 1583, Training loss 2.560792, Validation loss 19.163906\n", "Epoch 1584, Training loss 3.499230, Validation loss 3.755034\n", "Epoch 1585, Training loss 2.103515, Validation loss 18.402050\n", "Epoch 1586, Training loss 1.566072, Validation loss 4.265531\n", "Epoch 1587, Training loss 2.670512, Validation loss 19.491961\n", "Epoch 1588, Training loss 3.569521, Validation loss 3.642209\n", "Epoch 1589, Training loss 2.160016, Validation loss 18.604923\n", "Epoch 1590, Training loss 1.645096, Validation loss 4.149814\n", "Epoch 1591, Training loss 2.786562, Validation loss 19.794632\n", "Epoch 1592, Training loss 3.693829, Validation loss 3.565529\n", "Epoch 1593, Training loss 2.247950, Validation loss 18.922394\n", "Epoch 1594, Training loss 1.885293, Validation loss 3.932382\n", "Epoch 1595, Training loss 2.898785, Validation loss 20.153280\n", "Epoch 1596, Training loss 3.757942, Validation loss 3.451221\n", "Epoch 1597, Training loss 2.334766, Validation loss 19.193991\n", "Epoch 1598, Training loss 1.987091, Validation loss 3.801087\n", "Epoch 1599, Training loss 2.886297, Validation loss 20.217352\n", "Epoch 1600, Training loss 3.666731, Validation loss 3.428002\n", "Epoch 1601, Training loss 2.290336, Validation loss 19.060251\n", "Epoch 1602, Training loss 1.724087, Validation loss 3.979625\n", "Epoch 1603, Training loss 2.852293, Validation loss 20.076948\n", "Epoch 1604, Training loss 3.690933, Validation loss 3.532409\n", "Epoch 1605, Training loss 2.252845, Validation loss 19.024937\n", "Epoch 1606, Training loss 1.820550, Validation loss 3.994496\n", "Epoch 1607, Training loss 2.847608, Validation loss 20.099482\n", "Epoch 1608, Training loss 3.667705, Validation loss 3.548485\n", "Epoch 1609, Training loss 2.236711, Validation loss 18.990767\n", "Epoch 1610, Training loss 1.748320, Validation loss 4.062819\n", "Epoch 1611, Training loss 2.804828, Validation loss 19.993908\n", "Epoch 1612, Training loss 3.620820, Validation loss 3.614430\n", "Epoch 1613, Training loss 2.182384, Validation loss 18.844109\n", "Epoch 1614, Training loss 1.648607, Validation loss 4.203035\n", "Epoch 1615, Training loss 2.700667, Validation loss 19.730707\n", "Epoch 1616, Training loss 3.507890, Validation loss 3.732690\n", "Epoch 1617, Training loss 2.086857, Validation loss 18.527479\n", "Epoch 1618, Training loss 1.457962, Validation loss 4.494007\n", "Epoch 1619, Training loss 2.352518, Validation loss 18.904394\n", "Epoch 1620, Training loss 3.069681, Validation loss 4.038711\n", "Epoch 1621, Training loss 1.899186, Validation loss 17.742325\n", "Epoch 1622, Training loss 1.223838, Validation loss 5.236461\n", "Epoch 1623, Training loss 1.493130, Validation loss 16.516754\n", "Epoch 1624, Training loss 1.244460, Validation loss 5.473438\n", "Epoch 1625, Training loss 1.971897, Validation loss 17.485048\n", "Epoch 1626, Training loss 2.911225, Validation loss 4.703843\n", "Epoch 1627, Training loss 1.627073, Validation loss 16.864882\n", "Epoch 1628, Training loss 1.129763, Validation loss 5.763474\n", "Epoch 1629, Training loss 1.329182, Validation loss 15.819125\n", "Epoch 1630, Training loss 1.090606, Validation loss 5.961350\n", "Epoch 1631, Training loss 1.583164, Validation loss 16.377069\n", "Epoch 1632, Training loss 2.375767, Validation loss 5.218410\n", "Epoch 1633, Training loss 1.437116, Validation loss 16.433983\n", "Epoch 1634, Training loss 1.282075, Validation loss 5.600484\n", "Epoch 1635, Training loss 2.057858, Validation loss 17.680225\n", "Epoch 1636, Training loss 3.023311, Validation loss 4.655960\n", "Epoch 1637, Training loss 1.640665, Validation loss 16.892836\n", "Epoch 1638, Training loss 1.126669, Validation loss 5.737253\n", "Epoch 1639, Training loss 1.325234, Validation loss 15.810306\n", "Epoch 1640, Training loss 1.127075, Validation loss 5.917530\n", "Epoch 1641, Training loss 1.715286, Validation loss 16.623329\n", "Epoch 1642, Training loss 2.625791, Validation loss 5.120226\n", "Epoch 1643, Training loss 1.433846, Validation loss 16.233562\n", "Epoch 1644, Training loss 1.064204, Validation loss 6.050484\n", "Epoch 1645, Training loss 1.291153, Validation loss 15.697990\n", "Epoch 1646, Training loss 1.393070, Validation loss 5.796315\n", "Epoch 1647, Training loss 2.056532, Validation loss 17.393854\n", "Epoch 1648, Training loss 3.113835, Validation loss 4.858436\n", "Epoch 1649, Training loss 1.553763, Validation loss 16.659483\n", "Epoch 1650, Training loss 1.104685, Validation loss 5.821150\n", "Epoch 1651, Training loss 1.351827, Validation loss 15.944535\n", "Epoch 1652, Training loss 1.429493, Validation loss 5.659041\n", "Epoch 1653, Training loss 2.077656, Validation loss 17.497265\n", "Epoch 1654, Training loss 3.106217, Validation loss 4.815659\n", "Epoch 1655, Training loss 1.561590, Validation loss 16.661098\n", "Epoch 1656, Training loss 1.097602, Validation loss 5.862647\n", "Epoch 1657, Training loss 1.306633, Validation loss 15.764023\n", "Epoch 1658, Training loss 1.274521, Validation loss 5.843421\n", "Epoch 1659, Training loss 1.977291, Validation loss 17.155293\n", "Epoch 1660, Training loss 2.998612, Validation loss 4.968308\n", "Epoch 1661, Training loss 1.493349, Validation loss 16.356623\n", "Epoch 1662, Training loss 1.062929, Validation loss 6.131917\n", "Epoch 1663, Training loss 1.196205, Validation loss 15.186810\n", "Epoch 1664, Training loss 0.977602, Validation loss 6.461478\n", "Epoch 1665, Training loss 1.255562, Validation loss 15.254313\n", "Epoch 1666, Training loss 1.798504, Validation loss 5.920553\n", "Epoch 1667, Training loss 1.567369, Validation loss 16.341892\n", "Epoch 1668, Training loss 2.369065, Validation loss 5.386581\n", "Epoch 1669, Training loss 1.373514, Validation loss 16.162050\n", "Epoch 1670, Training loss 1.169948, Validation loss 5.892663\n", "Epoch 1671, Training loss 1.789244, Validation loss 16.902407\n", "Epoch 1672, Training loss 2.665865, Validation loss 5.065215\n", "Epoch 1673, Training loss 1.440729, Validation loss 16.185968\n", "Epoch 1674, Training loss 1.038925, Validation loss 6.222817\n", "Epoch 1675, Training loss 1.167508, Validation loss 15.114006\n", "Epoch 1676, Training loss 1.008765, Validation loss 6.441895\n", "Epoch 1677, Training loss 1.421143, Validation loss 15.632124\n", "Epoch 1678, Training loss 2.221570, Validation loss 5.700950\n", "Epoch 1679, Training loss 1.261538, Validation loss 15.722841\n", "Epoch 1680, Training loss 1.258397, Validation loss 6.038149\n", "Epoch 1681, Training loss 1.943025, Validation loss 17.104807\n", "Epoch 1682, Training loss 2.938188, Validation loss 5.057263\n", "Epoch 1683, Training loss 1.462127, Validation loss 16.212908\n", "Epoch 1684, Training loss 1.043415, Validation loss 6.311165\n", "Epoch 1685, Training loss 1.145551, Validation loss 14.820649\n", "Epoch 1686, Training loss 0.907170, Validation loss 6.932516\n", "Epoch 1687, Training loss 0.960256, Validation loss 13.967172\n", "Epoch 1688, Training loss 0.848610, Validation loss 7.289704\n", "Epoch 1689, Training loss 0.980584, Validation loss 14.016418\n", "Epoch 1690, Training loss 1.308782, Validation loss 6.826544\n", "Epoch 1691, Training loss 1.725149, Validation loss 15.957808\n", "Epoch 1692, Training loss 2.802531, Validation loss 5.706214\n", "Epoch 1693, Training loss 1.244996, Validation loss 15.381709\n", "Epoch 1694, Training loss 0.966400, Validation loss 6.854113\n", "Epoch 1695, Training loss 1.037936, Validation loss 14.218602\n", "Epoch 1696, Training loss 0.869081, Validation loss 7.456659\n", "Epoch 1697, Training loss 0.915990, Validation loss 13.157917\n", "Epoch 1698, Training loss 0.879785, Validation loss 8.428819\n", "Epoch 1699, Training loss 1.145566, Validation loss 11.722018\n", "Epoch 1700, Training loss 1.226470, Validation loss 10.347964\n", "Epoch 1701, Training loss 1.987498, Validation loss 9.853651\n", "Epoch 1702, Training loss 0.643760, Validation loss 10.671838\n", "Epoch 1703, Training loss 0.642227, Validation loss 10.510456\n", "Epoch 1704, Training loss 0.641489, Validation loss 10.611446\n", "Epoch 1705, Training loss 0.642840, Validation loss 10.593135\n", "Epoch 1706, Training loss 0.653342, Validation loss 10.394482\n", "Epoch 1707, Training loss 0.701029, Validation loss 11.041620\n", "Epoch 1708, Training loss 0.918151, Validation loss 9.599333\n", "Epoch 1709, Training loss 1.261063, Validation loss 12.903797\n", "Epoch 1710, Training loss 2.199543, Validation loss 7.941824\n", "Epoch 1711, Training loss 0.781484, Validation loss 13.003472\n", "Epoch 1712, Training loss 0.752899, Validation loss 8.738928\n", "Epoch 1713, Training loss 0.779368, Validation loss 12.544157\n", "Epoch 1714, Training loss 0.794371, Validation loss 9.172372\n", "Epoch 1715, Training loss 0.994726, Validation loss 11.535533\n", "Epoch 1716, Training loss 1.174797, Validation loss 10.759307\n", "Epoch 1717, Training loss 1.919889, Validation loss 9.786684\n", "Epoch 1718, Training loss 0.646169, Validation loss 11.030592\n", "Epoch 1719, Training loss 0.644869, Validation loss 10.333590\n", "Epoch 1720, Training loss 0.645832, Validation loss 11.071282\n", "Epoch 1721, Training loss 0.654626, Validation loss 10.107081\n", "Epoch 1722, Training loss 0.695909, Validation loss 11.466693\n", "Epoch 1723, Training loss 0.880836, Validation loss 9.358672\n", "Epoch 1724, Training loss 1.224939, Validation loss 13.236396\n", "Epoch 1725, Training loss 2.143293, Validation loss 7.724856\n", "Epoch 1726, Training loss 0.822709, Validation loss 13.474882\n", "Epoch 1727, Training loss 0.780728, Validation loss 8.344075\n", "Epoch 1728, Training loss 0.809030, Validation loss 13.271461\n", "Epoch 1729, Training loss 0.766678, Validation loss 8.280403\n", "Epoch 1730, Training loss 0.794384, Validation loss 13.177095\n", "Epoch 1731, Training loss 0.774056, Validation loss 8.122804\n", "Epoch 1732, Training loss 0.886722, Validation loss 13.625176\n", "Epoch 1733, Training loss 1.216203, Validation loss 7.338114\n", "Epoch 1734, Training loss 1.610569, Validation loss 15.718598\n", "Epoch 1735, Training loss 2.654320, Validation loss 5.965765\n", "Epoch 1736, Training loss 1.221133, Validation loss 15.462883\n", "Epoch 1737, Training loss 0.976214, Validation loss 6.916403\n", "Epoch 1738, Training loss 1.067640, Validation loss 14.490754\n", "Epoch 1739, Training loss 0.899438, Validation loss 7.426108\n", "Epoch 1740, Training loss 0.978388, Validation loss 13.420922\n", "Epoch 1741, Training loss 0.964354, Validation loss 8.488644\n", "Epoch 1742, Training loss 1.345683, Validation loss 11.817493\n", "Epoch 1743, Training loss 1.080786, Validation loss 10.086744\n", "Epoch 1744, Training loss 1.733672, Validation loss 10.269459\n", "Epoch 1745, Training loss 0.676148, Validation loss 10.743203\n", "Epoch 1746, Training loss 0.786557, Validation loss 10.167714\n", "Epoch 1747, Training loss 1.043487, Validation loss 12.138511\n", "Epoch 1748, Training loss 1.801925, Validation loss 8.633101\n", "Epoch 1749, Training loss 0.732127, Validation loss 12.740287\n", "Epoch 1750, Training loss 0.815878, Validation loss 8.605846\n", "Epoch 1751, Training loss 1.112733, Validation loss 14.060615\n", "Epoch 1752, Training loss 1.908086, Validation loss 7.130629\n", "Epoch 1753, Training loss 0.996337, Validation loss 14.811268\n", "Epoch 1754, Training loss 1.119452, Validation loss 6.919318\n", "Epoch 1755, Training loss 1.662272, Validation loss 16.563578\n", "Epoch 1756, Training loss 2.596605, Validation loss 5.530230\n", "Epoch 1757, Training loss 1.409360, Validation loss 16.319986\n", "Epoch 1758, Training loss 1.056753, Validation loss 6.460067\n", "Epoch 1759, Training loss 1.187402, Validation loss 15.200438\n", "Epoch 1760, Training loss 0.944787, Validation loss 6.964106\n", "Epoch 1761, Training loss 1.019464, Validation loss 14.204185\n", "Epoch 1762, Training loss 0.883172, Validation loss 7.651283\n", "Epoch 1763, Training loss 0.977302, Validation loss 13.129611\n", "Epoch 1764, Training loss 1.018743, Validation loss 8.901180\n", "Epoch 1765, Training loss 1.500785, Validation loss 11.434724\n", "Epoch 1766, Training loss 0.882831, Validation loss 10.119214\n", "Epoch 1767, Training loss 1.350002, Validation loss 10.384330\n", "Epoch 1768, Training loss 0.930769, Validation loss 11.456065\n", "Epoch 1769, Training loss 1.543350, Validation loss 9.262296\n", "Epoch 1770, Training loss 0.794396, Validation loss 12.439012\n", "Epoch 1771, Training loss 1.177533, Validation loss 8.607434\n", "Epoch 1772, Training loss 1.240628, Validation loss 14.247339\n", "Epoch 1773, Training loss 2.116361, Validation loss 7.122502\n", "Epoch 1774, Training loss 0.969331, Validation loss 14.605790\n", "Epoch 1775, Training loss 0.876796, Validation loss 7.579855\n", "Epoch 1776, Training loss 0.959729, Validation loss 14.523372\n", "Epoch 1777, Training loss 0.923525, Validation loss 7.212566\n", "Epoch 1778, Training loss 1.231424, Validation loss 15.379115\n", "Epoch 1779, Training loss 1.868553, Validation loss 6.165632\n", "Epoch 1780, Training loss 1.364131, Validation loss 16.321663\n", "Epoch 1781, Training loss 1.799439, Validation loss 5.716899\n", "Epoch 1782, Training loss 1.706274, Validation loss 17.373549\n", "Epoch 1783, Training loss 2.332544, Validation loss 5.089705\n", "Epoch 1784, Training loss 1.552872, Validation loss 17.154999\n", "Epoch 1785, Training loss 1.291016, Validation loss 5.506683\n", "Epoch 1786, Training loss 1.979781, Validation loss 17.888044\n", "Epoch 1787, Training loss 2.730048, Validation loss 4.788177\n", "Epoch 1788, Training loss 1.590566, Validation loss 16.909582\n", "Epoch 1789, Training loss 1.089870, Validation loss 6.076341\n", "Epoch 1790, Training loss 1.220794, Validation loss 15.444478\n", "Epoch 1791, Training loss 0.944759, Validation loss 6.663440\n", "Epoch 1792, Training loss 1.052703, Validation loss 14.784245\n", "Epoch 1793, Training loss 1.049738, Validation loss 6.690677\n", "Epoch 1794, Training loss 1.559582, Validation loss 15.988471\n", "Epoch 1795, Training loss 2.462365, Validation loss 5.731880\n", "Epoch 1796, Training loss 1.231429, Validation loss 15.620102\n", "Epoch 1797, Training loss 0.961913, Validation loss 6.731431\n", "Epoch 1798, Training loss 1.072883, Validation loss 14.912246\n", "Epoch 1799, Training loss 1.030372, Validation loss 6.690021\n", "Epoch 1800, Training loss 1.512528, Validation loss 15.895735\n", "Epoch 1801, Training loss 2.381300, Validation loss 5.776729\n", "Epoch 1802, Training loss 1.208590, Validation loss 15.574912\n", "Epoch 1803, Training loss 0.971343, Validation loss 6.645556\n", "Epoch 1804, Training loss 1.177621, Validation loss 15.296577\n", "Epoch 1805, Training loss 1.479454, Validation loss 6.218560\n", "Epoch 1806, Training loss 1.745364, Validation loss 16.687025\n", "Epoch 1807, Training loss 2.643205, Validation loss 5.433892\n", "Epoch 1808, Training loss 1.335350, Validation loss 15.894543\n", "Epoch 1809, Training loss 0.988890, Validation loss 6.711168\n", "Epoch 1810, Training loss 1.060282, Validation loss 14.507898\n", "Epoch 1811, Training loss 0.867724, Validation loss 7.400429\n", "Epoch 1812, Training loss 0.899893, Validation loss 13.427342\n", "Epoch 1813, Training loss 0.815461, Validation loss 8.212444\n", "Epoch 1814, Training loss 0.902121, Validation loss 12.284668\n", "Epoch 1815, Training loss 1.022883, Validation loss 9.695839\n", "Epoch 1816, Training loss 1.592304, Validation loss 10.509608\n", "Epoch 1817, Training loss 0.769881, Validation loss 10.751591\n", "Epoch 1818, Training loss 1.117060, Validation loss 9.815973\n", "Epoch 1819, Training loss 1.150385, Validation loss 12.506188\n", "Epoch 1820, Training loss 1.954724, Validation loss 8.345134\n", "Epoch 1821, Training loss 0.733315, Validation loss 12.777298\n", "Epoch 1822, Training loss 0.718360, Validation loss 8.869383\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1823, Training loss 0.752463, Validation loss 12.871406\n", "Epoch 1824, Training loss 0.846952, Validation loss 8.349653\n", "Epoch 1825, Training loss 1.175071, Validation loss 14.180986\n", "Epoch 1826, Training loss 1.997190, Validation loss 7.025384\n", "Epoch 1827, Training loss 0.957713, Validation loss 14.557492\n", "Epoch 1828, Training loss 0.969398, Validation loss 7.239316\n", "Epoch 1829, Training loss 1.371208, Validation loss 15.587436\n", "Epoch 1830, Training loss 2.168011, Validation loss 6.109621\n", "Epoch 1831, Training loss 1.171566, Validation loss 15.624527\n", "Epoch 1832, Training loss 1.049658, Validation loss 6.555319\n", "Epoch 1833, Training loss 1.485025, Validation loss 16.226818\n", "Epoch 1834, Training loss 2.204556, Validation loss 5.693777\n", "Epoch 1835, Training loss 1.278912, Validation loss 16.064791\n", "Epoch 1836, Training loss 1.142244, Validation loss 6.202730\n", "Epoch 1837, Training loss 1.680631, Validation loss 16.774227\n", "Epoch 1838, Training loss 2.461280, Validation loss 5.401408\n", "Epoch 1839, Training loss 1.335119, Validation loss 16.084913\n", "Epoch 1840, Training loss 0.993175, Validation loss 6.519165\n", "Epoch 1841, Training loss 1.116745, Validation loss 15.162028\n", "Epoch 1842, Training loss 1.081634, Validation loss 6.559360\n", "Epoch 1843, Training loss 1.583394, Validation loss 16.129330\n", "Epoch 1844, Training loss 2.436128, Validation loss 5.723709\n", "Epoch 1845, Training loss 1.218195, Validation loss 15.556543\n", "Epoch 1846, Training loss 0.941502, Validation loss 6.876034\n", "Epoch 1847, Training loss 1.016452, Validation loss 14.608644\n", "Epoch 1848, Training loss 0.923856, Validation loss 7.037514\n", "Epoch 1849, Training loss 1.226788, Validation loss 15.028277\n", "Epoch 1850, Training loss 1.896281, Validation loss 6.325038\n", "Epoch 1851, Training loss 1.200307, Validation loss 15.475754\n", "Epoch 1852, Training loss 1.564577, Validation loss 6.263789\n", "Epoch 1853, Training loss 1.626388, Validation loss 16.546989\n", "Epoch 1854, Training loss 2.422798, Validation loss 5.592981\n", "Epoch 1855, Training loss 1.277195, Validation loss 15.849144\n", "Epoch 1856, Training loss 0.966751, Validation loss 6.678167\n", "Epoch 1857, Training loss 1.078591, Validation loss 14.954230\n", "Epoch 1858, Training loss 1.084036, Validation loss 6.681724\n", "Epoch 1859, Training loss 1.576572, Validation loss 15.982947\n", "Epoch 1860, Training loss 2.440977, Validation loss 5.832497\n", "Epoch 1861, Training loss 1.178661, Validation loss 15.336428\n", "Epoch 1862, Training loss 0.917975, Validation loss 7.081775\n", "Epoch 1863, Training loss 0.962859, Validation loss 14.217435\n", "Epoch 1864, Training loss 0.831364, Validation loss 7.470695\n", "Epoch 1865, Training loss 0.913645, Validation loss 13.899392\n", "Epoch 1866, Training loss 1.081379, Validation loss 7.247139\n", "Epoch 1867, Training loss 1.528680, Validation loss 15.412815\n", "Epoch 1868, Training loss 2.448116, Validation loss 6.202423\n", "Epoch 1869, Training loss 1.082151, Validation loss 14.836801\n", "Epoch 1870, Training loss 0.879564, Validation loss 7.472214\n", "Epoch 1871, Training loss 0.907032, Validation loss 13.659358\n", "Epoch 1872, Training loss 0.799448, Validation loss 8.123549\n", "Epoch 1873, Training loss 0.829271, Validation loss 12.579033\n", "Epoch 1874, Training loss 0.848353, Validation loss 9.194908\n", "Epoch 1875, Training loss 1.148736, Validation loss 11.083939\n", "Epoch 1876, Training loss 1.120045, Validation loss 11.033483\n", "Epoch 1877, Training loss 1.811363, Validation loss 9.411529\n", "Epoch 1878, Training loss 0.660175, Validation loss 11.512391\n", "Epoch 1879, Training loss 0.697187, Validation loss 9.655783\n", "Epoch 1880, Training loss 0.831292, Validation loss 12.253823\n", "Epoch 1881, Training loss 1.304987, Validation loss 8.565061\n", "Epoch 1882, Training loss 1.127146, Validation loss 13.750037\n", "Epoch 1883, Training loss 1.905053, Validation loss 7.456195\n", "Epoch 1884, Training loss 0.878754, Validation loss 14.078150\n", "Epoch 1885, Training loss 0.915054, Validation loss 7.670912\n", "Epoch 1886, Training loss 1.253095, Validation loss 15.013952\n", "Epoch 1887, Training loss 2.003778, Validation loss 6.554221\n", "Epoch 1888, Training loss 1.066636, Validation loss 15.167181\n", "Epoch 1889, Training loss 1.067837, Validation loss 6.849607\n", "Epoch 1890, Training loss 1.523747, Validation loss 16.112720\n", "Epoch 1891, Training loss 2.316807, Validation loss 5.880631\n", "Epoch 1892, Training loss 1.197855, Validation loss 15.614935\n", "Epoch 1893, Training loss 0.940075, Validation loss 6.899966\n", "Epoch 1894, Training loss 1.044646, Validation loss 14.859603\n", "Epoch 1895, Training loss 1.073527, Validation loss 6.820826\n", "Epoch 1896, Training loss 1.533729, Validation loss 15.906727\n", "Epoch 1897, Training loss 2.364695, Validation loss 5.944461\n", "Epoch 1898, Training loss 1.155525, Validation loss 15.334427\n", "Epoch 1899, Training loss 0.909900, Validation loss 7.125154\n", "Epoch 1900, Training loss 0.961709, Validation loss 14.342237\n", "Epoch 1901, Training loss 0.867598, Validation loss 7.346519\n", "Epoch 1902, Training loss 1.071055, Validation loss 14.497274\n", "Epoch 1903, Training loss 1.571648, Validation loss 6.763095\n", "Epoch 1904, Training loss 1.346377, Validation loss 15.523507\n", "Epoch 1905, Training loss 2.075942, Validation loss 6.213984\n", "Epoch 1906, Training loss 1.126314, Validation loss 15.402340\n", "Epoch 1907, Training loss 1.097801, Validation loss 6.667561\n", "Epoch 1908, Training loss 1.569999, Validation loss 16.203085\n", "Epoch 1909, Training loss 2.362990, Validation loss 5.821190\n", "Epoch 1910, Training loss 1.188374, Validation loss 15.498816\n", "Epoch 1911, Training loss 0.920512, Validation loss 7.011155\n", "Epoch 1912, Training loss 0.986109, Validation loss 14.502447\n", "Epoch 1913, Training loss 0.934758, Validation loss 7.150043\n", "Epoch 1914, Training loss 1.269248, Validation loss 15.047282\n", "Epoch 1915, Training loss 1.987222, Validation loss 6.404259\n", "Epoch 1916, Training loss 1.076750, Validation loss 15.097846\n", "Epoch 1917, Training loss 1.160722, Validation loss 6.761499\n", "Epoch 1918, Training loss 1.610976, Validation loss 16.171743\n", "Epoch 1919, Training loss 2.437075, Validation loss 5.876201\n", "Epoch 1920, Training loss 1.178461, Validation loss 15.348433\n", "Epoch 1921, Training loss 0.911085, Validation loss 7.252543\n", "Epoch 1922, Training loss 0.939449, Validation loss 13.947300\n", "Epoch 1923, Training loss 0.804135, Validation loss 7.951716\n", "Epoch 1924, Training loss 0.813309, Validation loss 12.876199\n", "Epoch 1925, Training loss 0.766943, Validation loss 8.785187\n", "Epoch 1926, Training loss 0.851002, Validation loss 11.721709\n", "Epoch 1927, Training loss 0.992264, Validation loss 10.313665\n", "Epoch 1928, Training loss 1.548693, Validation loss 9.993137\n", "Epoch 1929, Training loss 0.762527, Validation loss 11.352671\n", "Epoch 1930, Training loss 1.096712, Validation loss 9.372240\n", "Epoch 1931, Training loss 1.145638, Validation loss 13.013311\n", "Epoch 1932, Training loss 1.908191, Validation loss 8.015963\n", "Epoch 1933, Training loss 0.769473, Validation loss 13.214046\n", "Epoch 1934, Training loss 0.753135, Validation loss 8.538493\n", "Epoch 1935, Training loss 0.846646, Validation loss 13.405124\n", "Epoch 1936, Training loss 1.138902, Validation loss 7.829137\n", "Epoch 1937, Training loss 1.370501, Validation loss 14.946300\n", "Epoch 1938, Training loss 2.200501, Validation loss 6.684963\n", "Epoch 1939, Training loss 0.997605, Validation loss 14.655020\n", "Epoch 1940, Training loss 0.845326, Validation loss 7.699300\n", "Epoch 1941, Training loss 0.872417, Validation loss 13.853469\n", "Epoch 1942, Training loss 0.800965, Validation loss 7.856517\n", "Epoch 1943, Training loss 0.905159, Validation loss 13.825280\n", "Epoch 1944, Training loss 1.183534, Validation loss 7.395426\n", "Epoch 1945, Training loss 1.455026, Validation loss 15.319672\n", "Epoch 1946, Training loss 2.296839, Validation loss 6.400755\n", "Epoch 1947, Training loss 1.043697, Validation loss 14.829389\n", "Epoch 1948, Training loss 0.858816, Validation loss 7.570285\n", "Epoch 1949, Training loss 0.881696, Validation loss 13.831263\n", "Epoch 1950, Training loss 0.785774, Validation loss 7.905920\n", "Epoch 1951, Training loss 0.831607, Validation loss 13.470597\n", "Epoch 1952, Training loss 0.922208, Validation loss 7.759452\n", "Epoch 1953, Training loss 1.284685, Validation loss 14.622307\n", "Epoch 1954, Training loss 2.091636, Validation loss 6.745488\n", "Epoch 1955, Training loss 0.954912, Validation loss 14.493833\n", "Epoch 1956, Training loss 0.880559, Validation loss 7.459910\n", "Epoch 1957, Training loss 1.109247, Validation loss 14.688357\n", "Epoch 1958, Training loss 1.649080, Validation loss 6.758984\n", "Epoch 1959, Training loss 1.243465, Validation loss 15.386987\n", "Epoch 1960, Training loss 1.825925, Validation loss 6.402138\n", "Epoch 1961, Training loss 1.207350, Validation loss 15.620851\n", "Epoch 1962, Training loss 1.535735, Validation loss 6.372454\n", "Epoch 1963, Training loss 1.521549, Validation loss 16.352262\n", "Epoch 1964, Training loss 2.203014, Validation loss 5.856551\n", "Epoch 1965, Training loss 1.190609, Validation loss 15.708888\n", "Epoch 1966, Training loss 0.968443, Validation loss 6.789563\n", "Epoch 1967, Training loss 1.206992, Validation loss 15.373535\n", "Epoch 1968, Training loss 1.635902, Validation loss 6.368152\n", "Epoch 1969, Training loss 1.358123, Validation loss 15.901987\n", "Epoch 1970, Training loss 1.931066, Validation loss 6.108801\n", "Epoch 1971, Training loss 1.201087, Validation loss 15.760814\n", "Epoch 1972, Training loss 1.311953, Validation loss 6.421900\n", "Epoch 1973, Training loss 1.641183, Validation loss 16.547550\n", "Epoch 1974, Training loss 2.371983, Validation loss 5.761636\n", "Epoch 1975, Training loss 1.210988, Validation loss 15.586431\n", "Epoch 1976, Training loss 0.915791, Validation loss 7.211170\n", "Epoch 1977, Training loss 0.939862, Validation loss 14.094127\n", "Epoch 1978, Training loss 0.798269, Validation loss 7.858292\n", "Epoch 1979, Training loss 0.802941, Validation loss 13.213839\n", "Epoch 1980, Training loss 0.744731, Validation loss 8.255027\n", "Epoch 1981, Training loss 0.789410, Validation loss 13.022106\n", "Epoch 1982, Training loss 0.929304, Validation loss 8.036937\n", "Epoch 1983, Training loss 1.270090, Validation loss 14.306899\n", "Epoch 1984, Training loss 2.069675, Validation loss 6.981104\n", "Epoch 1985, Training loss 0.898304, Validation loss 14.132913\n", "Epoch 1986, Training loss 0.821061, Validation loss 7.813734\n", "Epoch 1987, Training loss 0.954616, Validation loss 14.043756\n", "Epoch 1988, Training loss 1.311095, Validation loss 7.276778\n", "Epoch 1989, Training loss 1.386713, Validation loss 15.250756\n", "Epoch 1990, Training loss 2.166613, Validation loss 6.485138\n", "Epoch 1991, Training loss 1.010789, Validation loss 14.772713\n", "Epoch 1992, Training loss 0.851979, Validation loss 7.513112\n", "Epoch 1993, Training loss 0.935369, Validation loss 14.185230\n", "Epoch 1994, Training loss 1.085410, Validation loss 7.298798\n", "Epoch 1995, Training loss 1.435591, Validation loss 15.310302\n", "Epoch 1996, Training loss 2.226872, Validation loss 6.415857\n", "Epoch 1997, Training loss 1.017047, Validation loss 14.723892\n", "Epoch 1998, Training loss 0.838036, Validation loss 7.649049\n", "Epoch 1999, Training loss 0.860231, Validation loss 13.757263\n", "Epoch 2000, Training loss 0.814576, Validation loss 7.843290\n", "Epoch 2001, Training loss 0.986378, Validation loss 13.925228\n", "Epoch 2002, Training loss 1.462335, Validation loss 7.248359\n", "Epoch 2003, Training loss 1.230151, Validation loss 14.897462\n", "Epoch 2004, Training loss 1.911261, Validation loss 6.705752\n", "Epoch 2005, Training loss 1.013892, Validation loss 14.851079\n", "Epoch 2006, Training loss 1.080262, Validation loss 7.103899\n", "Epoch 2007, Training loss 1.454224, Validation loss 15.659546\n", "Epoch 2008, Training loss 2.201145, Validation loss 6.264261\n", "Epoch 2009, Training loss 1.059391, Validation loss 15.003463\n", "Epoch 2010, Training loss 0.859260, Validation loss 7.467719\n", "Epoch 2011, Training loss 0.906088, Validation loss 14.083794\n", "Epoch 2012, Training loss 0.921949, Validation loss 7.517830\n", "Epoch 2013, Training loss 1.232476, Validation loss 14.721223\n", "Epoch 2014, Training loss 1.927358, Validation loss 6.735042\n", "Epoch 2015, Training loss 0.975525, Validation loss 14.643030\n", "Epoch 2016, Training loss 0.992325, Validation loss 7.294531\n", "Epoch 2017, Training loss 1.337590, Validation loss 15.262608\n", "Epoch 2018, Training loss 2.045158, Validation loss 6.481330\n", "Epoch 2019, Training loss 1.012684, Validation loss 14.875121\n", "Epoch 2020, Training loss 0.897834, Validation loss 7.355424\n", "Epoch 2021, Training loss 1.107551, Validation loss 14.756696\n", "Epoch 2022, Training loss 1.577471, Validation loss 6.813769\n", "Epoch 2023, Training loss 1.210708, Validation loss 15.276272\n", "Epoch 2024, Training loss 1.731696, Validation loss 6.573186\n", "Epoch 2025, Training loss 1.154842, Validation loss 15.403081\n", "Epoch 2026, Training loss 1.455879, Validation loss 6.631810\n", "Epoch 2027, Training loss 1.406716, Validation loss 15.953583\n", "Epoch 2028, Training loss 2.021761, Validation loss 6.191773\n", "Epoch 2029, Training loss 1.102956, Validation loss 15.416178\n", "Epoch 2030, Training loss 0.966106, Validation loss 7.043806\n", "Epoch 2031, Training loss 1.237871, Validation loss 15.316458\n", "Epoch 2032, Training loss 1.767761, Validation loss 6.500986\n", "Epoch 2033, Training loss 1.112042, Validation loss 15.310923\n", "Epoch 2034, Training loss 1.301611, Validation loss 6.772158\n", "Epoch 2035, Training loss 1.456260, Validation loss 15.956321\n", "Epoch 2036, Training loss 2.115428, Validation loss 6.192155\n", "Epoch 2037, Training loss 1.086110, Validation loss 15.252013\n", "Epoch 2038, Training loss 0.872099, Validation loss 7.423163\n", "Epoch 2039, Training loss 0.934477, Validation loss 14.297111\n", "Epoch 2040, Training loss 0.994043, Validation loss 7.417320\n", "Epoch 2041, Training loss 1.297206, Validation loss 14.995857\n", "Epoch 2042, Training loss 1.979440, Validation loss 6.658154\n", "Epoch 2043, Training loss 0.968700, Validation loss 14.667974\n", "Epoch 2044, Training loss 0.855089, Validation loss 7.629712\n", "Epoch 2045, Training loss 0.995610, Validation loss 14.344762\n", "Epoch 2046, Training loss 1.328313, Validation loss 7.198955\n", "Epoch 2047, Training loss 1.281062, Validation loss 15.168003\n", "Epoch 2048, Training loss 1.922766, Validation loss 6.646592\n", "Epoch 2049, Training loss 0.992978, Validation loss 14.864473\n", "Epoch 2050, Training loss 0.913513, Validation loss 7.442533\n", "Epoch 2051, Training loss 1.145603, Validation loss 14.856627\n", "Epoch 2052, Training loss 1.655509, Validation loss 6.835378\n", "Epoch 2053, Training loss 1.075218, Validation loss 15.042190\n", "Epoch 2054, Training loss 1.346272, Validation loss 6.949472\n", "Epoch 2055, Training loss 1.336683, Validation loss 15.624758\n", "Epoch 2056, Training loss 1.935091, Validation loss 6.454286\n", "Epoch 2057, Training loss 1.039762, Validation loss 15.173414\n", "Epoch 2058, Training loss 0.919849, Validation loss 7.346622\n", "Epoch 2059, Training loss 1.132028, Validation loss 14.947033\n", "Epoch 2060, Training loss 1.577581, Validation loss 6.837310\n", "Epoch 2061, Training loss 1.118312, Validation loss 15.194471\n", "Epoch 2062, Training loss 1.452047, Validation loss 6.853607\n", "Epoch 2063, Training loss 1.242516, Validation loss 15.558634\n", "Epoch 2064, Training loss 1.716174, Validation loss 6.591503\n", "Epoch 2065, Training loss 1.093041, Validation loss 15.420022\n", "Epoch 2066, Training loss 1.199208, Validation loss 6.956153\n", "Epoch 2067, Training loss 1.386809, Validation loss 15.842360\n", "Epoch 2068, Training loss 1.974054, Validation loss 6.384395\n", "Epoch 2069, Training loss 1.048393, Validation loss 15.249245\n", "Epoch 2070, Training loss 0.861083, Validation loss 7.597077\n", "Epoch 2071, Training loss 0.936351, Validation loss 14.363963\n", "Epoch 2072, Training loss 1.053782, Validation loss 7.484451\n", "Epoch 2073, Training loss 1.263296, Validation loss 15.030257\n", "Epoch 2074, Training loss 1.873526, Validation loss 6.793503\n", "Epoch 2075, Training loss 0.953941, Validation loss 14.743025\n", "Epoch 2076, Training loss 0.835404, Validation loss 7.848421\n", "Epoch 2077, Training loss 0.933726, Validation loss 14.201784\n", "Epoch 2078, Training loss 1.154757, Validation loss 7.527117\n", "Epoch 2079, Training loss 1.228770, Validation loss 14.962362\n", "Epoch 2080, Training loss 1.816348, Validation loss 6.895328\n", "Epoch 2081, Training loss 0.944603, Validation loss 14.745373\n", "Epoch 2082, Training loss 0.853432, Validation loss 7.824052\n", "Epoch 2083, Training loss 0.993569, Validation loss 14.414246\n", "Epoch 2084, Training loss 1.318635, Validation loss 7.357197\n", "Epoch 2085, Training loss 1.140206, Validation loss 14.992713\n", "Epoch 2086, Training loss 1.609046, Validation loss 7.009108\n", "Epoch 2087, Training loss 1.003561, Validation loss 15.039635\n", "Epoch 2088, Training loss 1.110715, Validation loss 7.358173\n", "Epoch 2089, Training loss 1.268940, Validation loss 15.446454\n", "Epoch 2090, Training loss 1.803831, Validation loss 6.743443\n", "Epoch 2091, Training loss 0.984863, Validation loss 15.115900\n", "Epoch 2092, Training loss 0.861572, Validation loss 7.764314\n", "Epoch 2093, Training loss 0.985619, Validation loss 14.572658\n", "Epoch 2094, Training loss 1.245613, Validation loss 7.397869\n", "Epoch 2095, Training loss 1.151009, Validation loss 15.115788\n", "Epoch 2096, Training loss 1.605062, Validation loss 7.021733\n", "Epoch 2097, Training loss 0.982730, Validation loss 15.108081\n", "Epoch 2098, Training loss 1.018121, Validation loss 7.530123\n", "Epoch 2099, Training loss 1.205666, Validation loss 15.323582\n", "Epoch 2100, Training loss 1.685415, Validation loss 6.913295\n", "Epoch 2101, Training loss 0.969600, Validation loss 15.159135\n", "Epoch 2102, Training loss 0.897512, Validation loss 7.750131\n", "Epoch 2103, Training loss 1.064134, Validation loss 14.908031\n", "Epoch 2104, Training loss 1.420372, Validation loss 7.239420\n", "Epoch 2105, Training loss 1.023567, Validation loss 15.161042\n", "Epoch 2106, Training loss 1.228102, Validation loss 7.358010\n", "Epoch 2107, Training loss 1.153999, Validation loss 15.481904\n", "Epoch 2108, Training loss 1.536027, Validation loss 7.027973\n", "Epoch 2109, Training loss 1.007858, Validation loss 15.436599\n", "Epoch 2110, Training loss 1.045218, Validation loss 7.494368\n", "Epoch 2111, Training loss 1.191521, Validation loss 15.549868\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2112, Training loss 1.609407, Validation loss 6.968179\n", "Epoch 2113, Training loss 0.979162, Validation loss 15.404468\n", "Epoch 2114, Training loss 0.916729, Validation loss 7.747438\n", "Epoch 2115, Training loss 1.078879, Validation loss 15.150620\n", "Epoch 2116, Training loss 1.417175, Validation loss 7.254855\n", "Epoch 2117, Training loss 0.999412, Validation loss 15.313776\n", "Epoch 2118, Training loss 1.124530, Validation loss 7.513968\n", "Epoch 2119, Training loss 1.151864, Validation loss 15.551125\n", "Epoch 2120, Training loss 1.525919, Validation loss 7.102480\n", "Epoch 2121, Training loss 0.980976, Validation loss 15.480667\n", "Epoch 2122, Training loss 0.967605, Validation loss 7.700116\n", "Epoch 2123, Training loss 1.122434, Validation loss 15.402038\n", "Epoch 2124, Training loss 1.484459, Validation loss 7.189229\n", "Epoch 2125, Training loss 0.971530, Validation loss 15.413606\n", "Epoch 2126, Training loss 0.986975, Validation loss 7.717282\n", "Epoch 2127, Training loss 1.124702, Validation loss 15.424454\n", "Epoch 2128, Training loss 1.489887, Validation loss 7.210814\n", "Epoch 2129, Training loss 0.962879, Validation loss 15.417302\n", "Epoch 2130, Training loss 0.961224, Validation loss 7.784621\n", "Epoch 2131, Training loss 1.103472, Validation loss 15.360763\n", "Epoch 2132, Training loss 1.453902, Validation loss 7.280669\n", "Epoch 2133, Training loss 0.961899, Validation loss 15.400248\n", "Epoch 2134, Training loss 0.987720, Validation loss 7.771513\n", "Epoch 2135, Training loss 1.111876, Validation loss 15.421732\n", "Epoch 2136, Training loss 1.466072, Validation loss 7.278072\n", "Epoch 2137, Training loss 0.956558, Validation loss 15.425341\n", "Epoch 2138, Training loss 0.962501, Validation loss 7.823831\n", "Epoch 2139, Training loss 1.095372, Validation loss 15.373272\n", "Epoch 2140, Training loss 1.437640, Validation loss 7.331870\n", "Epoch 2141, Training loss 0.956378, Validation loss 15.411991\n", "Epoch 2142, Training loss 0.984729, Validation loss 7.811263\n", "Epoch 2143, Training loss 1.101691, Validation loss 15.421987\n", "Epoch 2144, Training loss 1.446692, Validation loss 7.330596\n", "Epoch 2145, Training loss 0.952332, Validation loss 15.432038\n", "Epoch 2146, Training loss 0.966421, Validation loss 7.849872\n", "Epoch 2147, Training loss 1.090505, Validation loss 15.389360\n", "Epoch 2148, Training loss 1.427732, Validation loss 7.369707\n", "Epoch 2149, Training loss 0.951609, Validation loss 15.421330\n", "Epoch 2150, Training loss 0.980284, Validation loss 7.846325\n", "Epoch 2151, Training loss 1.093085, Validation loss 15.416946\n", "Epoch 2152, Training loss 1.431247, Validation loss 7.375230\n", "Epoch 2153, Training loss 0.948664, Validation loss 15.432222\n", "Epoch 2154, Training loss 0.970228, Validation loss 7.871857\n", "Epoch 2155, Training loss 1.086390, Validation loss 15.399328\n", "Epoch 2156, Training loss 1.420079, Validation loss 7.401999\n", "Epoch 2157, Training loss 0.947488, Validation loss 15.425296\n", "Epoch 2158, Training loss 0.977046, Validation loss 7.875588\n", "Epoch 2159, Training loss 1.086313, Validation loss 15.411024\n", "Epoch 2160, Training loss 1.419663, Validation loss 7.412139\n", "Epoch 2161, Training loss 0.945370, Validation loss 15.429273\n", "Epoch 2162, Training loss 0.972761, Validation loss 7.891955\n", "Epoch 2163, Training loss 1.082429, Validation loss 15.402946\n", "Epoch 2164, Training loss 1.413295, Validation loss 7.430673\n", "Epoch 2165, Training loss 0.944032, Validation loss 15.425115\n", "Epoch 2166, Training loss 0.975468, Validation loss 7.899147\n", "Epoch 2167, Training loss 1.081138, Validation loss 15.406086\n", "Epoch 2168, Training loss 1.411129, Validation loss 7.442301\n", "Epoch 2169, Training loss 0.942415, Validation loss 15.425210\n", "Epoch 2170, Training loss 0.974067, Validation loss 7.910453\n", "Epoch 2171, Training loss 1.078658, Validation loss 15.402246\n", "Epoch 2172, Training loss 1.407144, Validation loss 7.456146\n", "Epoch 2173, Training loss 0.941125, Validation loss 15.422387\n", "Epoch 2174, Training loss 0.974998, Validation loss 7.918331\n", "Epoch 2175, Training loss 1.077064, Validation loss 15.401869\n", "Epoch 2176, Training loss 1.404589, Validation loss 7.467288\n", "Epoch 2177, Training loss 0.939816, Validation loss 15.420854\n", "Epoch 2178, Training loss 0.974706, Validation loss 7.926976\n", "Epoch 2179, Training loss 1.075226, Validation loss 15.399442\n", "Epoch 2180, Training loss 1.401680, Validation loss 7.478456\n", "Epoch 2181, Training loss 0.938658, Validation loss 15.418475\n", "Epoch 2182, Training loss 0.975052, Validation loss 7.934331\n", "Epoch 2183, Training loss 1.073684, Validation loss 15.397853\n", "Epoch 2184, Training loss 1.399243, Validation loss 7.488423\n", "Epoch 2185, Training loss 0.937559, Validation loss 15.416438\n", "Epoch 2186, Training loss 0.975118, Validation loss 7.941427\n", "Epoch 2187, Training loss 1.072171, Validation loss 15.395836\n", "Epoch 2188, Training loss 1.396845, Validation loss 7.497872\n", "Epoch 2189, Training loss 0.936555, Validation loss 15.414255\n", "Epoch 2190, Training loss 0.975332, Validation loss 7.947853\n", "Epoch 2191, Training loss 1.070787, Validation loss 15.394030\n", "Epoch 2192, Training loss 1.394641, Validation loss 7.506582\n", "Epoch 2193, Training loss 0.935622, Validation loss 15.412236\n", "Epoch 2194, Training loss 0.975499, Validation loss 7.953887\n", "Epoch 2195, Training loss 1.069461, Validation loss 15.392133\n", "Epoch 2196, Training loss 1.392516, Validation loss 7.514774\n", "Epoch 2197, Training loss 0.934759, Validation loss 15.410208\n", "Epoch 2198, Training loss 0.975716, Validation loss 7.959433\n", "Epoch 2199, Training loss 1.068222, Validation loss 15.390321\n", "Epoch 2200, Training loss 1.390513, Validation loss 7.522385\n", "Epoch 2201, Training loss 0.933957, Validation loss 15.408285\n", "Epoch 2202, Training loss 0.975920, Validation loss 7.964613\n", "Epoch 2203, Training loss 1.067035, Validation loss 15.388534\n", "Epoch 2204, Training loss 1.388567, Validation loss 7.529581\n", "Epoch 2205, Training loss 0.933214, Validation loss 15.406449\n", "Epoch 2206, Training loss 0.976158, Validation loss 7.969408\n", "Epoch 2207, Training loss 1.065910, Validation loss 15.386825\n", "Epoch 2208, Training loss 1.386700, Validation loss 7.536316\n", "Epoch 2209, Training loss 0.932526, Validation loss 15.404718\n", "Epoch 2210, Training loss 0.976403, Validation loss 7.973877\n", "Epoch 2211, Training loss 1.064833, Validation loss 15.385171\n", "Epoch 2212, Training loss 1.384885, Validation loss 7.542671\n", "Epoch 2213, Training loss 0.931885, Validation loss 15.403062\n", "Epoch 2214, Training loss 0.976659, Validation loss 7.978005\n", "Epoch 2215, Training loss 1.063803, Validation loss 15.383603\n", "Epoch 2216, Training loss 1.383122, Validation loss 7.548657\n", "Epoch 2217, Training loss 0.931291, Validation loss 15.401546\n", "Epoch 2218, Training loss 0.976927, Validation loss 7.981876\n", "Epoch 2219, Training loss 1.062806, Validation loss 15.382058\n", "Epoch 2220, Training loss 1.381389, Validation loss 7.554348\n", "Epoch 2221, Training loss 0.930739, Validation loss 15.400096\n", "Epoch 2222, Training loss 0.977212, Validation loss 7.985468\n", "Epoch 2223, Training loss 1.061850, Validation loss 15.380622\n", "Epoch 2224, Training loss 1.379699, Validation loss 7.559738\n", "Epoch 2225, Training loss 0.930217, Validation loss 15.398727\n", "Epoch 2226, Training loss 0.977489, Validation loss 7.988883\n", "Epoch 2227, Training loss 1.060912, Validation loss 15.379175\n", "Epoch 2228, Training loss 1.378019, Validation loss 7.564906\n", "Epoch 2229, Training loss 0.929736, Validation loss 15.397442\n", "Epoch 2230, Training loss 0.977803, Validation loss 7.992022\n", "Epoch 2231, Training loss 1.060011, Validation loss 15.377852\n", "Epoch 2232, Training loss 1.376377, Validation loss 7.569811\n", "Epoch 2233, Training loss 0.929281, Validation loss 15.396226\n", "Epoch 2234, Training loss 0.978100, Validation loss 7.994994\n", "Epoch 2235, Training loss 1.059127, Validation loss 15.376527\n", "Epoch 2236, Training loss 1.374748, Validation loss 7.574496\n", "Epoch 2237, Training loss 0.928859, Validation loss 15.395109\n", "Epoch 2238, Training loss 0.978422, Validation loss 7.997764\n", "Epoch 2239, Training loss 1.058273, Validation loss 15.375319\n", "Epoch 2240, Training loss 1.373144, Validation loss 7.578960\n", "Epoch 2241, Training loss 0.928467, Validation loss 15.394097\n", "Epoch 2242, Training loss 0.978747, Validation loss 8.000362\n", "Epoch 2243, Training loss 1.057432, Validation loss 15.374116\n", "Epoch 2244, Training loss 1.371545, Validation loss 7.583278\n", "Epoch 2245, Training loss 0.928092, Validation loss 15.393095\n", "Epoch 2246, Training loss 0.979080, Validation loss 8.002828\n", "Epoch 2247, Training loss 1.056606, Validation loss 15.372941\n", "Epoch 2248, Training loss 1.369958, Validation loss 7.587435\n", "Epoch 2249, Training loss 0.927738, Validation loss 15.392145\n", "Epoch 2250, Training loss 0.979415, Validation loss 8.005159\n", "Epoch 2251, Training loss 1.055794, Validation loss 15.371801\n", "Epoch 2252, Training loss 1.368380, Validation loss 7.591449\n", "Epoch 2253, Training loss 0.927407, Validation loss 15.391272\n", "Epoch 2254, Training loss 0.979763, Validation loss 8.007339\n", "Epoch 2255, Training loss 1.055001, Validation loss 15.370738\n", "Epoch 2256, Training loss 1.366815, Validation loss 7.595310\n", "Epoch 2257, Training loss 0.927095, Validation loss 15.390466\n", "Epoch 2258, Training loss 0.980114, Validation loss 8.009420\n", "Epoch 2259, Training loss 1.054214, Validation loss 15.369681\n", "Epoch 2260, Training loss 1.365250, Validation loss 7.599067\n", "Epoch 2261, Training loss 0.926800, Validation loss 15.389699\n", "Epoch 2262, Training loss 0.980472, Validation loss 8.011383\n", "Epoch 2263, Training loss 1.053441, Validation loss 15.368670\n", "Epoch 2264, Training loss 1.363697, Validation loss 7.602690\n", "Epoch 2265, Training loss 0.926520, Validation loss 15.388969\n", "Epoch 2266, Training loss 0.980832, Validation loss 8.013252\n", "Epoch 2267, Training loss 1.052673, Validation loss 15.367671\n", "Epoch 2268, Training loss 1.362139, Validation loss 7.606250\n", "Epoch 2269, Training loss 0.926252, Validation loss 15.388263\n", "Epoch 2270, Training loss 0.981202, Validation loss 8.015021\n", "Epoch 2271, Training loss 1.051919, Validation loss 15.366743\n", "Epoch 2272, Training loss 1.360592, Validation loss 7.609686\n", "Epoch 2273, Training loss 0.926004, Validation loss 15.387639\n", "Epoch 2274, Training loss 0.981578, Validation loss 8.016695\n", "Epoch 2275, Training loss 1.051169, Validation loss 15.365819\n", "Epoch 2276, Training loss 1.359044, Validation loss 7.613042\n", "Epoch 2277, Training loss 0.925763, Validation loss 15.387012\n", "Epoch 2278, Training loss 0.981956, Validation loss 8.018290\n", "Epoch 2279, Training loss 1.050429, Validation loss 15.364916\n", "Epoch 2280, Training loss 1.357503, Validation loss 7.616314\n", "Epoch 2281, Training loss 0.925536, Validation loss 15.386451\n", "Epoch 2282, Training loss 0.982336, Validation loss 8.019826\n", "Epoch 2283, Training loss 1.049691, Validation loss 15.364035\n", "Epoch 2284, Training loss 1.355958, Validation loss 7.619503\n", "Epoch 2285, Training loss 0.925323, Validation loss 15.385907\n", "Epoch 2286, Training loss 0.982732, Validation loss 8.021256\n", "Epoch 2287, Training loss 1.048963, Validation loss 15.363200\n", "Epoch 2288, Training loss 1.354420, Validation loss 7.622641\n", "Epoch 2289, Training loss 0.925117, Validation loss 15.385386\n", "Epoch 2290, Training loss 0.983127, Validation loss 8.022649\n", "Epoch 2291, Training loss 1.048242, Validation loss 15.362383\n", "Epoch 2292, Training loss 1.352890, Validation loss 7.625710\n", "Epoch 2293, Training loss 0.924919, Validation loss 15.384904\n", "Epoch 2294, Training loss 0.983511, Validation loss 8.023991\n", "Epoch 2295, Training loss 1.047518, Validation loss 15.361540\n", "Epoch 2296, Training loss 1.351349, Validation loss 7.628730\n", "Epoch 2297, Training loss 0.924735, Validation loss 15.384455\n", "Epoch 2298, Training loss 0.983924, Validation loss 8.025235\n", "Epoch 2299, Training loss 1.046807, Validation loss 15.360782\n", "Epoch 2300, Training loss 1.349820, Validation loss 7.631661\n", "Epoch 2301, Training loss 0.924554, Validation loss 15.384007\n", "Epoch 2302, Training loss 0.984323, Validation loss 8.026448\n", "Epoch 2303, Training loss 1.046095, Validation loss 15.360007\n", "Epoch 2304, Training loss 1.348287, Validation loss 7.634584\n", "Epoch 2305, Training loss 0.924385, Validation loss 15.383589\n", "Epoch 2306, Training loss 0.984734, Validation loss 8.027620\n", "Epoch 2307, Training loss 1.045386, Validation loss 15.359248\n", "Epoch 2308, Training loss 1.346750, Validation loss 7.637440\n", "Epoch 2309, Training loss 0.924223, Validation loss 15.383202\n", "Epoch 2310, Training loss 0.985154, Validation loss 8.028711\n", "Epoch 2311, Training loss 1.044686, Validation loss 15.358562\n", "Epoch 2312, Training loss 1.345224, Validation loss 7.640261\n", "Epoch 2313, Training loss 0.924070, Validation loss 15.382844\n", "Epoch 2314, Training loss 0.985574, Validation loss 8.029779\n", "Epoch 2315, Training loss 1.043986, Validation loss 15.357838\n", "Epoch 2316, Training loss 1.343696, Validation loss 7.643022\n", "Epoch 2317, Training loss 0.923919, Validation loss 15.382468\n", "Epoch 2318, Training loss 0.985990, Validation loss 8.030812\n", "Epoch 2319, Training loss 1.043286, Validation loss 15.357114\n", "Epoch 2320, Training loss 1.342165, Validation loss 7.645767\n", "Epoch 2321, Training loss 0.923778, Validation loss 15.382133\n", "Epoch 2322, Training loss 0.986421, Validation loss 8.031788\n", "Epoch 2323, Training loss 1.042594, Validation loss 15.356463\n", "Epoch 2324, Training loss 1.340643, Validation loss 7.648458\n", "Epoch 2325, Training loss 0.923643, Validation loss 15.381806\n", "Epoch 2326, Training loss 0.986848, Validation loss 8.032734\n", "Epoch 2327, Training loss 1.041898, Validation loss 15.355761\n", "Epoch 2328, Training loss 1.339111, Validation loss 7.651134\n", "Epoch 2329, Training loss 0.923514, Validation loss 15.381484\n", "Epoch 2330, Training loss 0.987288, Validation loss 8.033629\n", "Epoch 2331, Training loss 1.041212, Validation loss 15.355143\n", "Epoch 2332, Training loss 1.337586, Validation loss 7.653778\n", "Epoch 2333, Training loss 0.923391, Validation loss 15.381203\n", "Epoch 2334, Training loss 0.987727, Validation loss 8.034486\n", "Epoch 2335, Training loss 1.040528, Validation loss 15.354528\n", "Epoch 2336, Training loss 1.336065, Validation loss 7.656366\n", "Epoch 2337, Training loss 0.923278, Validation loss 15.380967\n", "Epoch 2338, Training loss 0.988170, Validation loss 8.035315\n", "Epoch 2339, Training loss 1.039845, Validation loss 15.353917\n", "Epoch 2340, Training loss 1.334545, Validation loss 7.658946\n", "Epoch 2341, Training loss 0.923164, Validation loss 15.380693\n", "Epoch 2342, Training loss 0.988612, Validation loss 8.036117\n", "Epoch 2343, Training loss 1.039161, Validation loss 15.353302\n", "Epoch 2344, Training loss 1.333023, Validation loss 7.661490\n", "Epoch 2345, Training loss 0.923060, Validation loss 15.380443\n", "Epoch 2346, Training loss 0.989070, Validation loss 8.036860\n", "Epoch 2347, Training loss 1.038486, Validation loss 15.352750\n", "Epoch 2348, Training loss 1.331506, Validation loss 7.663984\n", "Epoch 2349, Training loss 0.922961, Validation loss 15.380230\n", "Epoch 2350, Training loss 0.989520, Validation loss 8.037590\n", "Epoch 2351, Training loss 1.037808, Validation loss 15.352167\n", "Epoch 2352, Training loss 1.329983, Validation loss 7.666496\n", "Epoch 2353, Training loss 0.922864, Validation loss 15.379992\n", "Epoch 2354, Training loss 0.989987, Validation loss 8.038288\n", "Epoch 2355, Training loss 1.037135, Validation loss 15.351625\n", "Epoch 2356, Training loss 1.328471, Validation loss 7.668953\n", "Epoch 2357, Training loss 0.922772, Validation loss 15.379786\n", "Epoch 2358, Training loss 0.990445, Validation loss 8.038959\n", "Epoch 2359, Training loss 1.036461, Validation loss 15.351068\n", "Epoch 2360, Training loss 1.326952, Validation loss 7.671406\n", "Epoch 2361, Training loss 0.922687, Validation loss 15.379578\n", "Epoch 2362, Training loss 0.990912, Validation loss 8.039610\n", "Epoch 2363, Training loss 1.035787, Validation loss 15.350540\n", "Epoch 2364, Training loss 1.325432, Validation loss 7.673850\n", "Epoch 2365, Training loss 0.922606, Validation loss 15.379389\n", "Epoch 2366, Training loss 0.991390, Validation loss 8.040208\n", "Epoch 2367, Training loss 1.035118, Validation loss 15.350041\n", "Epoch 2368, Training loss 1.323915, Validation loss 7.676268\n", "Epoch 2369, Training loss 0.922530, Validation loss 15.379218\n", "Epoch 2370, Training loss 0.991866, Validation loss 8.040791\n", "Epoch 2371, Training loss 1.034451, Validation loss 15.349544\n", "Epoch 2372, Training loss 1.322405, Validation loss 7.678639\n", "Epoch 2373, Training loss 0.922458, Validation loss 15.379047\n", "Epoch 2374, Training loss 0.992346, Validation loss 8.041338\n", "Epoch 2375, Training loss 1.033791, Validation loss 15.349096\n", "Epoch 2376, Training loss 1.320901, Validation loss 7.680980\n", "Epoch 2377, Training loss 0.922389, Validation loss 15.378903\n", "Epoch 2378, Training loss 0.992817, Validation loss 8.041901\n", "Epoch 2379, Training loss 1.033119, Validation loss 15.348564\n", "Epoch 2380, Training loss 1.319380, Validation loss 7.683361\n", "Epoch 2381, Training loss 0.922321, Validation loss 15.378689\n", "Epoch 2382, Training loss 0.993308, Validation loss 8.042419\n", "Epoch 2383, Training loss 1.032454, Validation loss 15.348098\n", "Epoch 2384, Training loss 1.317869, Validation loss 7.685707\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2385, Training loss 0.922264, Validation loss 15.378556\n", "Epoch 2386, Training loss 0.993807, Validation loss 8.042880\n", "Epoch 2387, Training loss 1.031795, Validation loss 15.347672\n", "Epoch 2388, Training loss 1.316365, Validation loss 7.687996\n", "Epoch 2389, Training loss 0.922211, Validation loss 15.378443\n", "Epoch 2390, Training loss 0.994306, Validation loss 8.043324\n", "Epoch 2391, Training loss 1.031134, Validation loss 15.347218\n", "Epoch 2392, Training loss 1.314854, Validation loss 7.690302\n", "Epoch 2393, Training loss 0.922160, Validation loss 15.378307\n", "Epoch 2394, Training loss 0.994805, Validation loss 8.043769\n", "Epoch 2395, Training loss 1.030479, Validation loss 15.346821\n", "Epoch 2396, Training loss 1.313348, Validation loss 7.692588\n", "Epoch 2397, Training loss 0.922112, Validation loss 15.378187\n", "Epoch 2398, Training loss 0.995306, Validation loss 8.044185\n", "Epoch 2399, Training loss 1.029821, Validation loss 15.346410\n", "Epoch 2400, Training loss 1.311846, Validation loss 7.694850\n", "Epoch 2401, Training loss 0.922069, Validation loss 15.378075\n", "Epoch 2402, Training loss 0.995816, Validation loss 8.044567\n", "Epoch 2403, Training loss 1.029164, Validation loss 15.345999\n", "Epoch 2404, Training loss 1.310339, Validation loss 7.697106\n", "Epoch 2405, Training loss 0.922029, Validation loss 15.377954\n", "Epoch 2406, Training loss 0.996331, Validation loss 8.044940\n", "Epoch 2407, Training loss 1.028509, Validation loss 15.345613\n", "Epoch 2408, Training loss 1.308835, Validation loss 7.699353\n", "Epoch 2409, Training loss 0.921995, Validation loss 15.377850\n", "Epoch 2410, Training loss 0.996851, Validation loss 8.045282\n", "Epoch 2411, Training loss 1.027853, Validation loss 15.345223\n", "Epoch 2412, Training loss 1.307329, Validation loss 7.701578\n", "Epoch 2413, Training loss 0.921964, Validation loss 15.377741\n", "Epoch 2414, Training loss 0.997379, Validation loss 8.045597\n", "Epoch 2415, Training loss 1.027202, Validation loss 15.344855\n", "Epoch 2416, Training loss 1.305828, Validation loss 7.703795\n", "Epoch 2417, Training loss 0.921936, Validation loss 15.377651\n", "Epoch 2418, Training loss 0.997904, Validation loss 8.045890\n", "Epoch 2419, Training loss 1.026553, Validation loss 15.344499\n", "Epoch 2420, Training loss 1.304332, Validation loss 7.705983\n", "Epoch 2421, Training loss 0.921911, Validation loss 15.377581\n", "Epoch 2422, Training loss 0.998429, Validation loss 8.046179\n", "Epoch 2423, Training loss 1.025902, Validation loss 15.344153\n", "Epoch 2424, Training loss 1.302830, Validation loss 7.708178\n", "Epoch 2425, Training loss 0.921890, Validation loss 15.377484\n", "Epoch 2426, Training loss 0.998968, Validation loss 8.046441\n", "Epoch 2427, Training loss 1.025251, Validation loss 15.343803\n", "Epoch 2428, Training loss 1.301330, Validation loss 7.710386\n", "Epoch 2429, Training loss 0.921868, Validation loss 15.377373\n", "Epoch 2430, Training loss 0.999500, Validation loss 8.046713\n", "Epoch 2431, Training loss 1.024601, Validation loss 15.343457\n", "Epoch 2432, Training loss 1.299831, Validation loss 7.712552\n", "Epoch 2433, Training loss 0.921856, Validation loss 15.377306\n", "Epoch 2434, Training loss 1.000042, Validation loss 8.046943\n", "Epoch 2435, Training loss 1.023951, Validation loss 15.343144\n", "Epoch 2436, Training loss 1.298327, Validation loss 7.714722\n", "Epoch 2437, Training loss 0.921851, Validation loss 15.377248\n", "Epoch 2438, Training loss 1.000602, Validation loss 8.047091\n", "Epoch 2439, Training loss 1.023309, Validation loss 15.342861\n", "Epoch 2440, Training loss 1.296838, Validation loss 7.716822\n", "Epoch 2441, Training loss 0.921843, Validation loss 15.377182\n", "Epoch 2442, Training loss 1.001147, Validation loss 8.047301\n", "Epoch 2443, Training loss 1.022662, Validation loss 15.342570\n", "Epoch 2444, Training loss 1.295336, Validation loss 7.718982\n", "Epoch 2445, Training loss 0.921841, Validation loss 15.377127\n", "Epoch 2446, Training loss 1.001706, Validation loss 8.047455\n", "Epoch 2447, Training loss 1.022015, Validation loss 15.342257\n", "Epoch 2448, Training loss 1.293839, Validation loss 7.721102\n", "Epoch 2449, Training loss 0.921840, Validation loss 15.377047\n", "Epoch 2450, Training loss 1.002269, Validation loss 8.047617\n", "Epoch 2451, Training loss 1.021370, Validation loss 15.341983\n", "Epoch 2452, Training loss 1.292341, Validation loss 7.723235\n", "Epoch 2453, Training loss 0.921844, Validation loss 15.376988\n", "Epoch 2454, Training loss 1.002836, Validation loss 8.047724\n", "Epoch 2455, Training loss 1.020727, Validation loss 15.341722\n", "Epoch 2456, Training loss 1.290846, Validation loss 7.725329\n", "Epoch 2457, Training loss 0.921854, Validation loss 15.376931\n", "Epoch 2458, Training loss 1.003412, Validation loss 8.047813\n", "Epoch 2459, Training loss 1.020082, Validation loss 15.341440\n", "Epoch 2460, Training loss 1.289346, Validation loss 7.727451\n", "Epoch 2461, Training loss 0.921862, Validation loss 15.376862\n", "Epoch 2462, Training loss 1.003987, Validation loss 8.047914\n", "Epoch 2463, Training loss 1.019441, Validation loss 15.341197\n", "Epoch 2464, Training loss 1.287854, Validation loss 7.729531\n", "Epoch 2465, Training loss 0.921877, Validation loss 15.376812\n", "Epoch 2466, Training loss 1.004565, Validation loss 8.047977\n", "Epoch 2467, Training loss 1.018795, Validation loss 15.340945\n", "Epoch 2468, Training loss 1.286354, Validation loss 7.731614\n", "Epoch 2469, Training loss 0.921896, Validation loss 15.376778\n", "Epoch 2470, Training loss 1.005155, Validation loss 8.048020\n", "Epoch 2471, Training loss 1.018155, Validation loss 15.340745\n", "Epoch 2472, Training loss 1.284857, Validation loss 7.733697\n", "Epoch 2473, Training loss 0.921920, Validation loss 15.376742\n", "Epoch 2474, Training loss 1.005751, Validation loss 8.048052\n", "Epoch 2475, Training loss 1.017513, Validation loss 15.340508\n", "Epoch 2476, Training loss 1.283361, Validation loss 7.735754\n", "Epoch 2477, Training loss 0.921946, Validation loss 15.376708\n", "Epoch 2478, Training loss 1.006348, Validation loss 8.048052\n", "Epoch 2479, Training loss 1.016871, Validation loss 15.340290\n", "Epoch 2480, Training loss 1.281858, Validation loss 7.737818\n", "Epoch 2481, Training loss 0.921976, Validation loss 15.376652\n", "Epoch 2482, Training loss 1.006957, Validation loss 8.048035\n", "Epoch 2483, Training loss 1.016232, Validation loss 15.340100\n", "Epoch 2484, Training loss 1.280364, Validation loss 7.739881\n", "Epoch 2485, Training loss 0.922005, Validation loss 15.376610\n", "Epoch 2486, Training loss 1.007559, Validation loss 8.048014\n", "Epoch 2487, Training loss 1.015593, Validation loss 15.339900\n", "Epoch 2488, Training loss 1.278869, Validation loss 7.741931\n", "Epoch 2489, Training loss 0.922038, Validation loss 15.376561\n", "Epoch 2490, Training loss 1.008166, Validation loss 8.047994\n", "Epoch 2491, Training loss 1.014950, Validation loss 15.339700\n", "Epoch 2492, Training loss 1.277368, Validation loss 7.743982\n", "Epoch 2493, Training loss 0.922077, Validation loss 15.376513\n", "Epoch 2494, Training loss 1.008783, Validation loss 8.047930\n", "Epoch 2495, Training loss 1.014310, Validation loss 15.339499\n", "Epoch 2496, Training loss 1.275871, Validation loss 7.745993\n", "Epoch 2497, Training loss 0.922122, Validation loss 15.376493\n", "Epoch 2498, Training loss 1.009407, Validation loss 8.047845\n", "Epoch 2499, Training loss 1.013669, Validation loss 15.339334\n", "Epoch 2500, Training loss 1.274367, Validation loss 7.748044\n", "Epoch 2501, Training loss 0.922167, Validation loss 15.376446\n", "Epoch 2502, Training loss 1.010036, Validation loss 8.047744\n", "Epoch 2503, Training loss 1.013029, Validation loss 15.339167\n", "Epoch 2504, Training loss 1.272864, Validation loss 7.750064\n", "Epoch 2505, Training loss 0.922218, Validation loss 15.376420\n", "Epoch 2506, Training loss 1.010674, Validation loss 8.047623\n", "Epoch 2507, Training loss 1.012390, Validation loss 15.339035\n", "Epoch 2508, Training loss 1.271362, Validation loss 7.752084\n", "Epoch 2509, Training loss 0.922269, Validation loss 15.376371\n", "Epoch 2510, Training loss 1.011312, Validation loss 8.047495\n", "Epoch 2511, Training loss 1.011748, Validation loss 15.338879\n", "Epoch 2512, Training loss 1.269858, Validation loss 7.754097\n", "Epoch 2513, Training loss 0.922328, Validation loss 15.376361\n", "Epoch 2514, Training loss 1.011964, Validation loss 8.047327\n", "Epoch 2515, Training loss 1.011110, Validation loss 15.338755\n", "Epoch 2516, Training loss 1.268351, Validation loss 7.756106\n", "Epoch 2517, Training loss 0.922390, Validation loss 15.376350\n", "Epoch 2518, Training loss 1.012618, Validation loss 8.047135\n", "Epoch 2519, Training loss 1.010471, Validation loss 15.338637\n", "Epoch 2520, Training loss 1.266848, Validation loss 7.758090\n", "Epoch 2521, Training loss 0.922455, Validation loss 15.376333\n", "Epoch 2522, Training loss 1.013277, Validation loss 8.046947\n", "Epoch 2523, Training loss 1.009834, Validation loss 15.338531\n", "Epoch 2524, Training loss 1.265346, Validation loss 7.760078\n", "Epoch 2525, Training loss 0.922519, Validation loss 15.376301\n", "Epoch 2526, Training loss 1.013931, Validation loss 8.046738\n", "Epoch 2527, Training loss 1.009195, Validation loss 15.338410\n", "Epoch 2528, Training loss 1.263839, Validation loss 7.762088\n", "Epoch 2529, Training loss 0.922585, Validation loss 15.376224\n", "Epoch 2530, Training loss 1.014593, Validation loss 8.046534\n", "Epoch 2531, Training loss 1.008550, Validation loss 15.338260\n", "Epoch 2532, Training loss 1.262323, Validation loss 7.764088\n", "Epoch 2533, Training loss 0.922658, Validation loss 15.376181\n", "Epoch 2534, Training loss 1.015270, Validation loss 8.046308\n", "Epoch 2535, Training loss 1.007907, Validation loss 15.338139\n", "Epoch 2536, Training loss 1.260810, Validation loss 7.766086\n", "Epoch 2537, Training loss 0.922736, Validation loss 15.376144\n", "Epoch 2538, Training loss 1.015951, Validation loss 8.046049\n", "Epoch 2539, Training loss 1.007267, Validation loss 15.338069\n", "Epoch 2540, Training loss 1.259296, Validation loss 7.768076\n", "Epoch 2541, Training loss 0.922816, Validation loss 15.376105\n", "Epoch 2542, Training loss 1.016641, Validation loss 8.045774\n", "Epoch 2543, Training loss 1.006624, Validation loss 15.337962\n", "Epoch 2544, Training loss 1.257778, Validation loss 7.770075\n", "Epoch 2545, Training loss 0.922896, Validation loss 15.376044\n", "Epoch 2546, Training loss 1.017324, Validation loss 8.045507\n", "Epoch 2547, Training loss 1.005978, Validation loss 15.337871\n", "Epoch 2548, Training loss 1.256255, Validation loss 7.772053\n", "Epoch 2549, Training loss 0.922984, Validation loss 15.376005\n", "Epoch 2550, Training loss 1.018028, Validation loss 8.045194\n", "Epoch 2551, Training loss 1.005338, Validation loss 15.337800\n", "Epoch 2552, Training loss 1.254736, Validation loss 7.774041\n", "Epoch 2553, Training loss 0.923074, Validation loss 15.375959\n", "Epoch 2554, Training loss 1.018730, Validation loss 8.044859\n", "Epoch 2555, Training loss 1.004693, Validation loss 15.337714\n", "Epoch 2556, Training loss 1.253210, Validation loss 7.776014\n", "Epoch 2557, Training loss 0.923171, Validation loss 15.375929\n", "Epoch 2558, Training loss 1.019448, Validation loss 8.044510\n", "Epoch 2559, Training loss 1.004051, Validation loss 15.337681\n", "Epoch 2560, Training loss 1.251687, Validation loss 7.777977\n", "Epoch 2561, Training loss 0.923271, Validation loss 15.375901\n", "Epoch 2562, Training loss 1.020163, Validation loss 8.044153\n", "Epoch 2563, Training loss 1.003404, Validation loss 15.337626\n", "Epoch 2564, Training loss 1.250155, Validation loss 7.779954\n", "Epoch 2565, Training loss 0.923372, Validation loss 15.375845\n", "Epoch 2566, Training loss 1.020891, Validation loss 8.043769\n", "Epoch 2567, Training loss 1.002759, Validation loss 15.337587\n", "Epoch 2568, Training loss 1.248622, Validation loss 7.781932\n", "Epoch 2569, Training loss 0.923478, Validation loss 15.375799\n", "Epoch 2570, Training loss 1.021620, Validation loss 8.043378\n", "Epoch 2571, Training loss 1.002109, Validation loss 15.337513\n", "Epoch 2572, Training loss 1.247082, Validation loss 7.783927\n", "Epoch 2573, Training loss 0.923585, Validation loss 15.375747\n", "Epoch 2574, Training loss 1.022362, Validation loss 8.042960\n", "Epoch 2575, Training loss 1.001460, Validation loss 15.337485\n", "Epoch 2576, Training loss 1.245541, Validation loss 7.785896\n", "Epoch 2577, Training loss 0.923698, Validation loss 15.375671\n", "Epoch 2578, Training loss 1.023109, Validation loss 8.042538\n", "Epoch 2579, Training loss 1.000810, Validation loss 15.337462\n", "Epoch 2580, Training loss 1.243994, Validation loss 7.787871\n", "Epoch 2581, Training loss 0.923818, Validation loss 15.375634\n", "Epoch 2582, Training loss 1.023869, Validation loss 8.042084\n", "Epoch 2583, Training loss 1.000160, Validation loss 15.337446\n", "Epoch 2584, Training loss 1.242450, Validation loss 7.789826\n", "Epoch 2585, Training loss 0.923940, Validation loss 15.375593\n", "Epoch 2586, Training loss 1.024630, Validation loss 8.041601\n", "Epoch 2587, Training loss 0.999510, Validation loss 15.337440\n", "Epoch 2588, Training loss 1.240898, Validation loss 7.791808\n", "Epoch 2589, Training loss 0.924063, Validation loss 15.375545\n", "Epoch 2590, Training loss 1.025395, Validation loss 8.041107\n", "Epoch 2591, Training loss 0.998859, Validation loss 15.337450\n", "Epoch 2592, Training loss 1.239345, Validation loss 7.793778\n", "Epoch 2593, Training loss 0.924190, Validation loss 15.375463\n", "Epoch 2594, Training loss 1.026170, Validation loss 8.040615\n", "Epoch 2595, Training loss 0.998203, Validation loss 15.337422\n", "Epoch 2596, Training loss 1.237784, Validation loss 7.795761\n", "Epoch 2597, Training loss 0.924322, Validation loss 15.375389\n", "Epoch 2598, Training loss 1.026951, Validation loss 8.040113\n", "Epoch 2599, Training loss 0.997545, Validation loss 15.337401\n", "Epoch 2600, Training loss 1.236218, Validation loss 7.797751\n", "Epoch 2601, Training loss 0.924456, Validation loss 15.375285\n", "Epoch 2602, Training loss 1.027743, Validation loss 8.039578\n", "Epoch 2603, Training loss 0.996887, Validation loss 15.337391\n", "Epoch 2604, Training loss 1.234647, Validation loss 7.799742\n", "Epoch 2605, Training loss 0.924593, Validation loss 15.375204\n", "Epoch 2606, Training loss 1.028540, Validation loss 8.039031\n", "Epoch 2607, Training loss 0.996228, Validation loss 15.337389\n", "Epoch 2608, Training loss 1.233077, Validation loss 7.801712\n", "Epoch 2609, Training loss 0.924738, Validation loss 15.375120\n", "Epoch 2610, Training loss 1.029350, Validation loss 8.038453\n", "Epoch 2611, Training loss 0.995568, Validation loss 15.337412\n", "Epoch 2612, Training loss 1.231496, Validation loss 7.803705\n", "Epoch 2613, Training loss 0.924885, Validation loss 15.375035\n", "Epoch 2614, Training loss 1.030165, Validation loss 8.037866\n", "Epoch 2615, Training loss 0.994904, Validation loss 15.337423\n", "Epoch 2616, Training loss 1.229912, Validation loss 7.805707\n", "Epoch 2617, Training loss 0.925035, Validation loss 15.374940\n", "Epoch 2618, Training loss 1.030983, Validation loss 8.037259\n", "Epoch 2619, Training loss 0.994242, Validation loss 15.337457\n", "Epoch 2620, Training loss 1.228325, Validation loss 7.807691\n", "Epoch 2621, Training loss 0.925190, Validation loss 15.374837\n", "Epoch 2622, Training loss 1.031816, Validation loss 8.036628\n", "Epoch 2623, Training loss 0.993575, Validation loss 15.337487\n", "Epoch 2624, Training loss 1.226728, Validation loss 7.809680\n", "Epoch 2625, Training loss 0.925351, Validation loss 15.374739\n", "Epoch 2626, Training loss 1.032659, Validation loss 8.035996\n", "Epoch 2627, Training loss 0.992905, Validation loss 15.337502\n", "Epoch 2628, Training loss 1.225126, Validation loss 7.811708\n", "Epoch 2629, Training loss 0.925511, Validation loss 15.374595\n", "Epoch 2630, Training loss 1.033507, Validation loss 8.035321\n", "Epoch 2631, Training loss 0.992236, Validation loss 15.337545\n", "Epoch 2632, Training loss 1.223518, Validation loss 7.813692\n", "Epoch 2633, Training loss 0.925683, Validation loss 15.374516\n", "Epoch 2634, Training loss 1.034370, Validation loss 8.034632\n", "Epoch 2635, Training loss 0.991562, Validation loss 15.337593\n", "Epoch 2636, Training loss 1.221903, Validation loss 7.815704\n", "Epoch 2637, Training loss 0.925855, Validation loss 15.374376\n", "Epoch 2638, Training loss 1.035237, Validation loss 8.033953\n", "Epoch 2639, Training loss 0.990886, Validation loss 15.337633\n", "Epoch 2640, Training loss 1.220281, Validation loss 7.817741\n", "Epoch 2641, Training loss 0.926027, Validation loss 15.374215\n", "Epoch 2642, Training loss 1.036109, Validation loss 8.033253\n", "Epoch 2643, Training loss 0.990206, Validation loss 15.337646\n", "Epoch 2644, Training loss 1.218654, Validation loss 7.819778\n", "Epoch 2645, Training loss 0.926205, Validation loss 15.374068\n", "Epoch 2646, Training loss 1.036995, Validation loss 8.032520\n", "Epoch 2647, Training loss 0.989527, Validation loss 15.337693\n", "Epoch 2648, Training loss 1.217021, Validation loss 7.821802\n", "Epoch 2649, Training loss 0.926390, Validation loss 15.373919\n", "Epoch 2650, Training loss 1.037889, Validation loss 8.031777\n", "Epoch 2651, Training loss 0.988844, Validation loss 15.337744\n", "Epoch 2652, Training loss 1.215380, Validation loss 7.823862\n", "Epoch 2653, Training loss 0.926573, Validation loss 15.373739\n", "Epoch 2654, Training loss 1.038787, Validation loss 8.031028\n", "Epoch 2655, Training loss 0.988157, Validation loss 15.337775\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2656, Training loss 1.213730, Validation loss 7.825916\n", "Epoch 2657, Training loss 0.926763, Validation loss 15.373541\n", "Epoch 2658, Training loss 1.039697, Validation loss 8.030269\n", "Epoch 2659, Training loss 0.987467, Validation loss 15.337803\n", "Epoch 2660, Training loss 1.212076, Validation loss 7.827977\n", "Epoch 2661, Training loss 0.926960, Validation loss 15.373369\n", "Epoch 2662, Training loss 1.040619, Validation loss 8.029460\n", "Epoch 2663, Training loss 0.986778, Validation loss 15.337868\n", "Epoch 2664, Training loss 1.210413, Validation loss 7.830028\n", "Epoch 2665, Training loss 0.927158, Validation loss 15.373158\n", "Epoch 2666, Training loss 1.041545, Validation loss 8.028666\n", "Epoch 2667, Training loss 0.986082, Validation loss 15.337887\n", "Epoch 2668, Training loss 1.208741, Validation loss 7.832120\n", "Epoch 2669, Training loss 0.927359, Validation loss 15.372933\n", "Epoch 2670, Training loss 1.042484, Validation loss 8.027835\n", "Epoch 2671, Training loss 0.985385, Validation loss 15.337932\n", "Epoch 2672, Training loss 1.207063, Validation loss 7.834206\n", "Epoch 2673, Training loss 0.927565, Validation loss 15.372696\n", "Epoch 2674, Training loss 1.043431, Validation loss 8.027001\n", "Epoch 2675, Training loss 0.984683, Validation loss 15.337977\n", "Epoch 2676, Training loss 1.205374, Validation loss 7.836287\n", "Epoch 2677, Training loss 0.927779, Validation loss 15.372488\n", "Epoch 2678, Training loss 1.044394, Validation loss 8.026126\n", "Epoch 2679, Training loss 0.983980, Validation loss 15.338056\n", "Epoch 2680, Training loss 1.203676, Validation loss 7.838412\n", "Epoch 2681, Training loss 0.927995, Validation loss 15.372251\n", "Epoch 2682, Training loss 1.045362, Validation loss 8.025248\n", "Epoch 2683, Training loss 0.983271, Validation loss 15.338081\n", "Epoch 2684, Training loss 1.201970, Validation loss 7.840522\n", "Epoch 2685, Training loss 0.928216, Validation loss 15.371976\n", "Epoch 2686, Training loss 1.046342, Validation loss 8.024353\n", "Epoch 2687, Training loss 0.982558, Validation loss 15.338121\n", "Epoch 2688, Training loss 1.200250, Validation loss 7.842662\n", "Epoch 2689, Training loss 0.928440, Validation loss 15.371686\n", "Epoch 2690, Training loss 1.047334, Validation loss 8.023456\n", "Epoch 2691, Training loss 0.981838, Validation loss 15.338147\n", "Epoch 2692, Training loss 1.198518, Validation loss 7.844822\n", "Epoch 2693, Training loss 0.928670, Validation loss 15.371405\n", "Epoch 2694, Training loss 1.048342, Validation loss 8.022520\n", "Epoch 2695, Training loss 0.981119, Validation loss 15.338194\n", "Epoch 2696, Training loss 1.196780, Validation loss 7.846981\n", "Epoch 2697, Training loss 0.928903, Validation loss 15.371082\n", "Epoch 2698, Training loss 1.049352, Validation loss 8.021574\n", "Epoch 2699, Training loss 0.980397, Validation loss 15.338226\n", "Epoch 2700, Training loss 1.195038, Validation loss 7.849140\n", "Epoch 2701, Training loss 0.929142, Validation loss 15.370771\n", "Epoch 2702, Training loss 1.050374, Validation loss 8.020603\n", "Epoch 2703, Training loss 0.979669, Validation loss 15.338259\n", "Epoch 2704, Training loss 1.193279, Validation loss 7.851331\n", "Epoch 2705, Training loss 0.929382, Validation loss 15.370423\n", "Epoch 2706, Training loss 1.051403, Validation loss 8.019641\n", "Epoch 2707, Training loss 0.978935, Validation loss 15.338275\n", "Epoch 2708, Training loss 1.191512, Validation loss 7.853538\n", "Epoch 2709, Training loss 0.929629, Validation loss 15.370071\n", "Epoch 2710, Training loss 1.052449, Validation loss 8.018626\n", "Epoch 2711, Training loss 0.978203, Validation loss 15.338322\n", "Epoch 2712, Training loss 1.189740, Validation loss 7.855751\n", "Epoch 2713, Training loss 0.929876, Validation loss 15.369687\n", "Epoch 2714, Training loss 1.053497, Validation loss 8.017643\n", "Epoch 2715, Training loss 0.977462, Validation loss 15.338315\n", "Epoch 2716, Training loss 1.187950, Validation loss 7.857987\n", "Epoch 2717, Training loss 0.930128, Validation loss 15.369258\n", "Epoch 2718, Training loss 1.054559, Validation loss 8.016631\n", "Epoch 2719, Training loss 0.976714, Validation loss 15.338294\n", "Epoch 2720, Training loss 1.186149, Validation loss 7.860236\n", "Epoch 2721, Training loss 0.930385, Validation loss 15.368821\n", "Epoch 2722, Training loss 1.055632, Validation loss 8.015604\n", "Epoch 2723, Training loss 0.975968, Validation loss 15.338314\n", "Epoch 2724, Training loss 1.184345, Validation loss 7.862497\n", "Epoch 2725, Training loss 0.930647, Validation loss 15.368392\n", "Epoch 2726, Training loss 1.056712, Validation loss 8.014556\n", "Epoch 2727, Training loss 0.975213, Validation loss 15.338299\n", "Epoch 2728, Training loss 1.182519, Validation loss 7.864803\n", "Epoch 2729, Training loss 0.930912, Validation loss 15.367911\n", "Epoch 2730, Training loss 1.057808, Validation loss 8.013505\n", "Epoch 2731, Training loss 0.974451, Validation loss 15.338247\n", "Epoch 2732, Training loss 1.180687, Validation loss 7.867104\n", "Epoch 2733, Training loss 0.931176, Validation loss 15.367391\n", "Epoch 2734, Training loss 1.058902, Validation loss 8.012456\n", "Epoch 2735, Training loss 0.973689, Validation loss 15.338187\n", "Epoch 2736, Training loss 1.178849, Validation loss 7.869426\n", "Epoch 2737, Training loss 0.931446, Validation loss 15.366865\n", "Epoch 2738, Training loss 1.060010, Validation loss 8.011370\n", "Epoch 2739, Training loss 0.972923, Validation loss 15.338140\n", "Epoch 2740, Training loss 1.176999, Validation loss 7.871762\n", "Epoch 2741, Training loss 0.931720, Validation loss 15.366322\n", "Epoch 2742, Training loss 1.061124, Validation loss 8.010283\n", "Epoch 2743, Training loss 0.972152, Validation loss 15.338094\n", "Epoch 2744, Training loss 1.175138, Validation loss 7.874114\n", "Epoch 2745, Training loss 0.931996, Validation loss 15.365741\n", "Epoch 2746, Training loss 1.062247, Validation loss 8.009183\n", "Epoch 2747, Training loss 0.971376, Validation loss 15.338018\n", "Epoch 2748, Training loss 1.173266, Validation loss 7.876494\n", "Epoch 2749, Training loss 0.932273, Validation loss 15.365124\n", "Epoch 2750, Training loss 1.063375, Validation loss 8.008094\n", "Epoch 2751, Training loss 0.970598, Validation loss 15.337914\n", "Epoch 2752, Training loss 1.171393, Validation loss 7.878888\n", "Epoch 2753, Training loss 0.932553, Validation loss 15.364499\n", "Epoch 2754, Training loss 1.064508, Validation loss 8.007003\n", "Epoch 2755, Training loss 0.969812, Validation loss 15.337793\n", "Epoch 2756, Training loss 1.169501, Validation loss 7.881305\n", "Epoch 2757, Training loss 0.932833, Validation loss 15.363808\n", "Epoch 2758, Training loss 1.065647, Validation loss 8.005901\n", "Epoch 2759, Training loss 0.969023, Validation loss 15.337628\n", "Epoch 2760, Training loss 1.167603, Validation loss 7.883747\n", "Epoch 2761, Training loss 0.933117, Validation loss 15.363093\n", "Epoch 2762, Training loss 1.066793, Validation loss 8.004811\n", "Epoch 2763, Training loss 0.968228, Validation loss 15.337456\n", "Epoch 2764, Training loss 1.165695, Validation loss 7.886217\n", "Epoch 2765, Training loss 0.933398, Validation loss 15.362321\n", "Epoch 2766, Training loss 1.067941, Validation loss 8.003695\n", "Epoch 2767, Training loss 0.967431, Validation loss 15.337283\n", "Epoch 2768, Training loss 1.163781, Validation loss 7.888682\n", "Epoch 2769, Training loss 0.933683, Validation loss 15.361551\n", "Epoch 2770, Training loss 1.069094, Validation loss 8.002576\n", "Epoch 2771, Training loss 0.966632, Validation loss 15.337088\n", "Epoch 2772, Training loss 1.161860, Validation loss 7.891165\n", "Epoch 2773, Training loss 0.933971, Validation loss 15.360738\n", "Epoch 2774, Training loss 1.070250, Validation loss 8.001472\n", "Epoch 2775, Training loss 0.965828, Validation loss 15.336847\n", "Epoch 2776, Training loss 1.159934, Validation loss 7.893680\n", "Epoch 2777, Training loss 0.934255, Validation loss 15.359876\n", "Epoch 2778, Training loss 1.071404, Validation loss 8.000367\n", "Epoch 2779, Training loss 0.965021, Validation loss 15.336592\n", "Epoch 2780, Training loss 1.158000, Validation loss 7.896217\n", "Epoch 2781, Training loss 0.934539, Validation loss 15.358984\n", "Epoch 2782, Training loss 1.072557, Validation loss 7.999273\n", "Epoch 2783, Training loss 0.964211, Validation loss 15.336291\n", "Epoch 2784, Training loss 1.156065, Validation loss 7.898774\n", "Epoch 2785, Training loss 0.934819, Validation loss 15.358011\n", "Epoch 2786, Training loss 1.073706, Validation loss 7.998208\n", "Epoch 2787, Training loss 0.963398, Validation loss 15.335961\n", "Epoch 2788, Training loss 1.154129, Validation loss 7.901325\n", "Epoch 2789, Training loss 0.935098, Validation loss 15.357022\n", "Epoch 2790, Training loss 1.074852, Validation loss 7.997132\n", "Epoch 2791, Training loss 0.962587, Validation loss 15.335612\n", "Epoch 2792, Training loss 1.152193, Validation loss 7.903895\n", "Epoch 2793, Training loss 0.935375, Validation loss 15.355989\n", "Epoch 2794, Training loss 1.075989, Validation loss 7.996071\n", "Epoch 2795, Training loss 0.961772, Validation loss 15.335212\n", "Epoch 2796, Training loss 1.150260, Validation loss 7.906470\n", "Epoch 2797, Training loss 0.935647, Validation loss 15.354916\n", "Epoch 2798, Training loss 1.077118, Validation loss 7.995046\n", "Epoch 2799, Training loss 0.960955, Validation loss 15.334764\n", "Epoch 2800, Training loss 1.148326, Validation loss 7.909077\n", "Epoch 2801, Training loss 0.935914, Validation loss 15.353806\n", "Epoch 2802, Training loss 1.078236, Validation loss 7.994039\n", "Epoch 2803, Training loss 0.960139, Validation loss 15.334303\n", "Epoch 2804, Training loss 1.146394, Validation loss 7.911690\n", "Epoch 2805, Training loss 0.936176, Validation loss 15.352624\n", "Epoch 2806, Training loss 1.079346, Validation loss 7.993045\n", "Epoch 2807, Training loss 0.959323, Validation loss 15.333798\n", "Epoch 2808, Training loss 1.144470, Validation loss 7.914310\n", "Epoch 2809, Training loss 0.936432, Validation loss 15.351408\n", "Epoch 2810, Training loss 1.080439, Validation loss 7.992078\n", "Epoch 2811, Training loss 0.958511, Validation loss 15.333252\n", "Epoch 2812, Training loss 1.142556, Validation loss 7.916935\n", "Epoch 2813, Training loss 0.936677, Validation loss 15.350133\n", "Epoch 2814, Training loss 1.081509, Validation loss 7.991155\n", "Epoch 2815, Training loss 0.957701, Validation loss 15.332647\n", "Epoch 2816, Training loss 1.140659, Validation loss 7.919543\n", "Epoch 2817, Training loss 0.936919, Validation loss 15.348855\n", "Epoch 2818, Training loss 1.082565, Validation loss 7.990240\n", "Epoch 2819, Training loss 0.956896, Validation loss 15.332016\n", "Epoch 2820, Training loss 1.138772, Validation loss 7.922152\n", "Epoch 2821, Training loss 0.937149, Validation loss 15.347502\n", "Epoch 2822, Training loss 1.083595, Validation loss 7.989375\n", "Epoch 2823, Training loss 0.956096, Validation loss 15.331356\n", "Epoch 2824, Training loss 1.136908, Validation loss 7.924755\n", "Epoch 2825, Training loss 0.937368, Validation loss 15.346103\n", "Epoch 2826, Training loss 1.084596, Validation loss 7.988564\n", "Epoch 2827, Training loss 0.955299, Validation loss 15.330632\n", "Epoch 2828, Training loss 1.135059, Validation loss 7.927354\n", "Epoch 2829, Training loss 0.937576, Validation loss 15.344653\n", "Epoch 2830, Training loss 1.085572, Validation loss 7.987788\n", "Epoch 2831, Training loss 0.954506, Validation loss 15.329842\n", "Epoch 2832, Training loss 1.133229, Validation loss 7.929966\n", "Epoch 2833, Training loss 0.937771, Validation loss 15.343135\n", "Epoch 2834, Training loss 1.086520, Validation loss 7.987067\n", "Epoch 2835, Training loss 0.953722, Validation loss 15.329000\n", "Epoch 2836, Training loss 1.131423, Validation loss 7.932541\n", "Epoch 2837, Training loss 0.937953, Validation loss 15.341599\n", "Epoch 2838, Training loss 1.087431, Validation loss 7.986411\n", "Epoch 2839, Training loss 0.952946, Validation loss 15.328129\n", "Epoch 2840, Training loss 1.129650, Validation loss 7.935112\n", "Epoch 2841, Training loss 0.938120, Validation loss 15.340014\n", "Epoch 2842, Training loss 1.088303, Validation loss 7.985783\n", "Epoch 2843, Training loss 0.952180, Validation loss 15.327198\n", "Epoch 2844, Training loss 1.127910, Validation loss 7.937650\n", "Epoch 2845, Training loss 0.938274, Validation loss 15.338428\n", "Epoch 2846, Training loss 1.089141, Validation loss 7.985219\n", "Epoch 2847, Training loss 0.951427, Validation loss 15.326261\n", "Epoch 2848, Training loss 1.126198, Validation loss 7.940146\n", "Epoch 2849, Training loss 0.938411, Validation loss 15.336752\n", "Epoch 2850, Training loss 1.089936, Validation loss 7.984705\n", "Epoch 2851, Training loss 0.950682, Validation loss 15.325234\n", "Epoch 2852, Training loss 1.124525, Validation loss 7.942639\n", "Epoch 2853, Training loss 0.938530, Validation loss 15.335065\n", "Epoch 2854, Training loss 1.090688, Validation loss 7.984282\n", "Epoch 2855, Training loss 0.949950, Validation loss 15.324164\n", "Epoch 2856, Training loss 1.122891, Validation loss 7.945080\n", "Epoch 2857, Training loss 0.938635, Validation loss 15.333351\n", "Epoch 2858, Training loss 1.091399, Validation loss 7.983880\n", "Epoch 2859, Training loss 0.949233, Validation loss 15.323072\n", "Epoch 2860, Training loss 1.121297, Validation loss 7.947474\n", "Epoch 2861, Training loss 0.938722, Validation loss 15.331614\n", "Epoch 2862, Training loss 1.092061, Validation loss 7.983559\n", "Epoch 2863, Training loss 0.948527, Validation loss 15.321921\n", "Epoch 2864, Training loss 1.119742, Validation loss 7.949851\n", "Epoch 2865, Training loss 0.938791, Validation loss 15.329825\n", "Epoch 2866, Training loss 1.092677, Validation loss 7.983301\n", "Epoch 2867, Training loss 0.947837, Validation loss 15.320723\n", "Epoch 2868, Training loss 1.118233, Validation loss 7.952180\n", "Epoch 2869, Training loss 0.938840, Validation loss 15.328021\n", "Epoch 2870, Training loss 1.093243, Validation loss 7.983117\n", "Epoch 2871, Training loss 0.947163, Validation loss 15.319495\n", "Epoch 2872, Training loss 1.116771, Validation loss 7.954457\n", "Epoch 2873, Training loss 0.938871, Validation loss 15.326201\n", "Epoch 2874, Training loss 1.093760, Validation loss 7.982989\n", "Epoch 2875, Training loss 0.946508, Validation loss 15.318242\n", "Epoch 2876, Training loss 1.115365, Validation loss 7.956646\n", "Epoch 2877, Training loss 0.938887, Validation loss 15.324403\n", "Epoch 2878, Training loss 1.094226, Validation loss 7.982912\n", "Epoch 2879, Training loss 0.945869, Validation loss 15.316945\n", "Epoch 2880, Training loss 1.114001, Validation loss 7.958827\n", "Epoch 2881, Training loss 0.938880, Validation loss 15.322552\n", "Epoch 2882, Training loss 1.094641, Validation loss 7.982918\n", "Epoch 2883, Training loss 0.945242, Validation loss 15.315590\n", "Epoch 2884, Training loss 1.112684, Validation loss 7.960940\n", "Epoch 2885, Training loss 0.938854, Validation loss 15.320671\n", "Epoch 2886, Training loss 1.095001, Validation loss 7.982996\n", "Epoch 2887, Training loss 0.944634, Validation loss 15.314198\n", "Epoch 2888, Training loss 1.111415, Validation loss 7.963026\n", "Epoch 2889, Training loss 0.938809, Validation loss 15.318789\n", "Epoch 2890, Training loss 1.095317, Validation loss 7.983122\n", "Epoch 2891, Training loss 0.944045, Validation loss 15.312795\n", "Epoch 2892, Training loss 1.110201, Validation loss 7.965013\n", "Epoch 2893, Training loss 0.938751, Validation loss 15.316958\n", "Epoch 2894, Training loss 1.095585, Validation loss 7.983313\n", "Epoch 2895, Training loss 0.943474, Validation loss 15.311393\n", "Epoch 2896, Training loss 1.109035, Validation loss 7.966947\n", "Epoch 2897, Training loss 0.938672, Validation loss 15.315075\n", "Epoch 2898, Training loss 1.095798, Validation loss 7.983578\n", "Epoch 2899, Training loss 0.942917, Validation loss 15.309921\n", "Epoch 2900, Training loss 1.107913, Validation loss 7.968865\n", "Epoch 2901, Training loss 0.938573, Validation loss 15.313188\n", "Epoch 2902, Training loss 1.095965, Validation loss 7.983885\n", "Epoch 2903, Training loss 0.942382, Validation loss 15.308428\n", "Epoch 2904, Training loss 1.106851, Validation loss 7.970674\n", "Epoch 2905, Training loss 0.938462, Validation loss 15.311337\n", "Epoch 2906, Training loss 1.096084, Validation loss 7.984250\n", "Epoch 2907, Training loss 0.941864, Validation loss 15.306938\n", "Epoch 2908, Training loss 1.105832, Validation loss 7.972433\n", "Epoch 2909, Training loss 0.938331, Validation loss 15.309459\n", "Epoch 2910, Training loss 1.096156, Validation loss 7.984683\n", "Epoch 2911, Training loss 0.941362, Validation loss 15.305421\n", "Epoch 2912, Training loss 1.104864, Validation loss 7.974127\n", "Epoch 2913, Training loss 0.938189, Validation loss 15.307625\n", "Epoch 2914, Training loss 1.096188, Validation loss 7.985138\n", "Epoch 2915, Training loss 0.940874, Validation loss 15.303885\n", "Epoch 2916, Training loss 1.103932, Validation loss 7.975791\n", "Epoch 2917, Training loss 0.938031, Validation loss 15.305779\n", "Epoch 2918, Training loss 1.096180, Validation loss 7.985666\n", "Epoch 2919, Training loss 0.940401, Validation loss 15.302327\n", "Epoch 2920, Training loss 1.103040, Validation loss 7.977390\n", "Epoch 2921, Training loss 0.937858, Validation loss 15.303926\n", "Epoch 2922, Training loss 1.096134, Validation loss 7.986210\n", "Epoch 2923, Training loss 0.939943, Validation loss 15.300762\n", "Epoch 2924, Training loss 1.102190, Validation loss 7.978954\n", "Epoch 2925, Training loss 0.937674, Validation loss 15.302091\n", "Epoch 2926, Training loss 1.096053, Validation loss 7.986831\n", "Epoch 2927, Training loss 0.939499, Validation loss 15.299184\n", "Epoch 2928, Training loss 1.101381, Validation loss 7.980447\n", "Epoch 2929, Training loss 0.937480, Validation loss 15.300277\n", "Epoch 2930, Training loss 1.095939, Validation loss 7.987458\n", "Epoch 2931, Training loss 0.939070, Validation loss 15.297613\n", "Epoch 2932, Training loss 1.100605, Validation loss 7.981887\n", "Epoch 2933, Training loss 0.937277, Validation loss 15.298493\n", "Epoch 2934, Training loss 1.095797, Validation loss 7.988098\n", "Epoch 2935, Training loss 0.938654, Validation loss 15.296027\n", "Epoch 2936, Training loss 1.099867, Validation loss 7.983273\n", "Epoch 2937, Training loss 0.937059, Validation loss 15.296694\n", "Epoch 2938, Training loss 1.095617, Validation loss 7.988822\n", "Epoch 2939, Training loss 0.938248, Validation loss 15.294406\n", "Epoch 2940, Training loss 1.099159, Validation loss 7.984630\n", "Epoch 2941, Training loss 0.936837, Validation loss 15.294938\n", "Epoch 2942, Training loss 1.095415, Validation loss 7.989552\n", "Epoch 2943, Training loss 0.937851, Validation loss 15.292803\n", "Epoch 2944, Training loss 1.098475, Validation loss 7.985964\n", "Epoch 2945, Training loss 0.936603, Validation loss 15.293145\n", "Epoch 2946, Training loss 1.095188, Validation loss 7.990316\n", "Epoch 2947, Training loss 0.937465, Validation loss 15.291178\n", "Epoch 2948, Training loss 1.097821, Validation loss 7.987243\n", "Epoch 2949, Training loss 0.936364, Validation loss 15.291397\n", "Epoch 2950, Training loss 1.094943, Validation loss 7.991064\n", "Epoch 2951, Training loss 0.937093, Validation loss 15.289595\n", "Epoch 2952, Training loss 1.097195, Validation loss 7.988487\n", "Epoch 2953, Training loss 0.936115, Validation loss 15.289663\n", "Epoch 2954, Training loss 1.094666, Validation loss 7.991884\n", "Epoch 2955, Training loss 0.936726, Validation loss 15.287974\n", "Epoch 2956, Training loss 1.096588, Validation loss 7.989700\n", "Epoch 2957, Training loss 0.935864, Validation loss 15.287944\n", "Epoch 2958, Training loss 1.094380, Validation loss 7.992682\n", "Epoch 2959, Training loss 0.936369, Validation loss 15.286369\n", "Epoch 2960, Training loss 1.096008, Validation loss 7.990854\n", "Epoch 2961, Training loss 0.935607, Validation loss 15.286242\n", "Epoch 2962, Training loss 1.094075, Validation loss 7.993507\n", "Epoch 2963, Training loss 0.936021, Validation loss 15.284776\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2964, Training loss 1.095442, Validation loss 7.991999\n", "Epoch 2965, Training loss 0.935346, Validation loss 15.284553\n", "Epoch 2966, Training loss 1.093758, Validation loss 7.994332\n", "Epoch 2967, Training loss 0.935680, Validation loss 15.283187\n", "Epoch 2968, Training loss 1.094899, Validation loss 7.993115\n", "Epoch 2969, Training loss 0.935081, Validation loss 15.282870\n", "Epoch 2970, Training loss 1.093426, Validation loss 7.995184\n", "Epoch 2971, Training loss 0.935343, Validation loss 15.281568\n", "Epoch 2972, Training loss 1.094368, Validation loss 7.994213\n", "Epoch 2973, Training loss 0.934810, Validation loss 15.281187\n", "Epoch 2974, Training loss 1.093080, Validation loss 7.996033\n", "Epoch 2975, Training loss 0.935012, Validation loss 15.279960\n", "Epoch 2976, Training loss 1.093853, Validation loss 7.995277\n", "Epoch 2977, Training loss 0.934540, Validation loss 15.279519\n", "Epoch 2978, Training loss 1.092729, Validation loss 7.996917\n", "Epoch 2979, Training loss 0.934684, Validation loss 15.278337\n", "Epoch 2980, Training loss 1.093345, Validation loss 7.996340\n", "Epoch 2981, Training loss 0.934263, Validation loss 15.277847\n", "Epoch 2982, Training loss 1.092366, Validation loss 7.997786\n", "Epoch 2983, Training loss 0.934364, Validation loss 15.276731\n", "Epoch 2984, Training loss 1.092854, Validation loss 7.997363\n", "Epoch 2985, Training loss 0.933987, Validation loss 15.276200\n", "Epoch 2986, Training loss 1.091999, Validation loss 7.998638\n", "Epoch 2987, Training loss 0.934050, Validation loss 15.275157\n", "Epoch 2988, Training loss 1.092375, Validation loss 7.998375\n", "Epoch 2989, Training loss 0.933711, Validation loss 15.274580\n", "Epoch 2990, Training loss 1.091624, Validation loss 7.999517\n", "Epoch 2991, Training loss 0.933740, Validation loss 15.273576\n", "Epoch 2992, Training loss 1.091905, Validation loss 7.999368\n", "Epoch 2993, Training loss 0.933427, Validation loss 15.272930\n", "Epoch 2994, Training loss 1.091237, Validation loss 8.000396\n", "Epoch 2995, Training loss 0.933433, Validation loss 15.271957\n", "Epoch 2996, Training loss 1.091450, Validation loss 8.000318\n", "Epoch 2997, Training loss 0.933148, Validation loss 15.271328\n", "Epoch 2998, Training loss 1.090851, Validation loss 8.001263\n", "Epoch 2999, Training loss 0.933129, Validation loss 15.270377\n", "Epoch 3000, Training loss 1.090997, Validation loss 8.001279\n", "Epoch 3001, Training loss 0.932866, Validation loss 15.269727\n", "Epoch 3002, Training loss 1.090458, Validation loss 8.002156\n", "Epoch 3003, Training loss 0.932828, Validation loss 15.268797\n", "Epoch 3004, Training loss 1.090549, Validation loss 8.002210\n", "Epoch 3005, Training loss 0.932586, Validation loss 15.268128\n", "Epoch 3006, Training loss 1.090069, Validation loss 8.003024\n", "Epoch 3007, Training loss 0.932529, Validation loss 15.267217\n", "Epoch 3008, Training loss 1.090108, Validation loss 8.003151\n", "Epoch 3009, Training loss 0.932305, Validation loss 15.266538\n", "Epoch 3010, Training loss 1.089673, Validation loss 8.003890\n", "Epoch 3011, Training loss 0.932235, Validation loss 15.265651\n", "Epoch 3012, Training loss 1.089673, Validation loss 8.004065\n", "Epoch 3013, Training loss 0.932026, Validation loss 15.264965\n", "Epoch 3014, Training loss 1.089279, Validation loss 8.004747\n", "Epoch 3015, Training loss 0.931943, Validation loss 15.264092\n", "Epoch 3016, Training loss 1.089243, Validation loss 8.004975\n", "Epoch 3017, Training loss 0.931744, Validation loss 15.263386\n", "Epoch 3018, Training loss 1.088880, Validation loss 8.005610\n", "Epoch 3019, Training loss 0.931649, Validation loss 15.262520\n", "Epoch 3020, Training loss 1.088813, Validation loss 8.005883\n", "Epoch 3021, Training loss 0.931465, Validation loss 15.261821\n", "Epoch 3022, Training loss 1.088488, Validation loss 8.006468\n", "Epoch 3023, Training loss 0.931361, Validation loss 15.260984\n", "Epoch 3024, Training loss 1.088392, Validation loss 8.006747\n", "Epoch 3025, Training loss 0.931185, Validation loss 15.260253\n", "Epoch 3026, Training loss 1.088086, Validation loss 8.007333\n", "Epoch 3027, Training loss 0.931076, Validation loss 15.259423\n", "Epoch 3028, Training loss 1.087978, Validation loss 8.007630\n", "Epoch 3029, Training loss 0.930904, Validation loss 15.258702\n", "Epoch 3030, Training loss 1.087686, Validation loss 8.008174\n", "Epoch 3031, Training loss 0.930791, Validation loss 15.257884\n", "Epoch 3032, Training loss 1.087562, Validation loss 8.008501\n", "Epoch 3033, Training loss 0.930626, Validation loss 15.257164\n", "Epoch 3034, Training loss 1.087289, Validation loss 8.009004\n", "Epoch 3035, Training loss 0.930510, Validation loss 15.256356\n", "Epoch 3036, Training loss 1.087153, Validation loss 8.009342\n", "Epoch 3037, Training loss 0.930349, Validation loss 15.255625\n", "Epoch 3038, Training loss 1.086890, Validation loss 8.009849\n", "Epoch 3039, Training loss 0.930228, Validation loss 15.254821\n", "Epoch 3040, Training loss 1.086744, Validation loss 8.010201\n", "Epoch 3041, Training loss 0.930068, Validation loss 15.254064\n", "Epoch 3042, Training loss 1.086492, Validation loss 8.010680\n", "Epoch 3043, Training loss 0.929947, Validation loss 15.253271\n", "Epoch 3044, Training loss 1.086339, Validation loss 8.011036\n", "Epoch 3045, Training loss 0.929792, Validation loss 15.252540\n", "Epoch 3046, Training loss 1.086091, Validation loss 8.011525\n", "Epoch 3047, Training loss 0.929669, Validation loss 15.251755\n", "Epoch 3048, Training loss 1.085935, Validation loss 8.011870\n", "Epoch 3049, Training loss 0.929518, Validation loss 15.251016\n", "Epoch 3050, Training loss 1.085700, Validation loss 8.012330\n", "Epoch 3051, Training loss 0.929392, Validation loss 15.250241\n", "Epoch 3052, Training loss 1.085535, Validation loss 8.012701\n", "Epoch 3053, Training loss 0.929244, Validation loss 15.249511\n", "Epoch 3054, Training loss 1.085307, Validation loss 8.013139\n", "Epoch 3055, Training loss 0.929114, Validation loss 15.248733\n", "Epoch 3056, Training loss 1.085130, Validation loss 8.013525\n", "Epoch 3057, Training loss 0.928972, Validation loss 15.247987\n", "Epoch 3058, Training loss 1.084915, Validation loss 8.013961\n", "Epoch 3059, Training loss 0.928838, Validation loss 15.247227\n", "Epoch 3060, Training loss 1.084730, Validation loss 8.014339\n", "Epoch 3061, Training loss 0.928702, Validation loss 15.246500\n", "Epoch 3062, Training loss 1.084530, Validation loss 8.014748\n", "Epoch 3063, Training loss 0.928566, Validation loss 15.245751\n", "Epoch 3064, Training loss 1.084333, Validation loss 8.015142\n", "Epoch 3065, Training loss 0.928430, Validation loss 15.244995\n", "Epoch 3066, Training loss 1.084141, Validation loss 8.015517\n", "Epoch 3067, Training loss 0.928295, Validation loss 15.244275\n", "Epoch 3068, Training loss 1.083942, Validation loss 8.015924\n", "Epoch 3069, Training loss 0.928162, Validation loss 15.243512\n", "Epoch 3070, Training loss 1.083750, Validation loss 8.016318\n", "Epoch 3071, Training loss 0.928024, Validation loss 15.242756\n", "Epoch 3072, Training loss 1.083552, Validation loss 8.016716\n", "Epoch 3073, Training loss 0.927891, Validation loss 15.242014\n", "Epoch 3074, Training loss 1.083359, Validation loss 8.017093\n", "Epoch 3075, Training loss 0.927754, Validation loss 15.241265\n", "Epoch 3076, Training loss 1.083162, Validation loss 8.017499\n", "Epoch 3077, Training loss 0.927622, Validation loss 15.240531\n", "Epoch 3078, Training loss 1.082975, Validation loss 8.017884\n", "Epoch 3079, Training loss 0.927487, Validation loss 15.239782\n", "Epoch 3080, Training loss 1.082777, Validation loss 8.018271\n", "Epoch 3081, Training loss 0.927355, Validation loss 15.239056\n", "Epoch 3082, Training loss 1.082587, Validation loss 8.018654\n", "Epoch 3083, Training loss 0.927220, Validation loss 15.238321\n", "Epoch 3084, Training loss 1.082391, Validation loss 8.019044\n", "Epoch 3085, Training loss 0.927089, Validation loss 15.237580\n", "Epoch 3086, Training loss 1.082203, Validation loss 8.019431\n", "Epoch 3087, Training loss 0.926952, Validation loss 15.236832\n", "Epoch 3088, Training loss 1.082003, Validation loss 8.019820\n", "Epoch 3089, Training loss 0.926825, Validation loss 15.236105\n", "Epoch 3090, Training loss 1.081824, Validation loss 8.020180\n", "Epoch 3091, Training loss 0.926687, Validation loss 15.235386\n", "Epoch 3092, Training loss 1.081620, Validation loss 8.020569\n", "Epoch 3093, Training loss 0.926559, Validation loss 15.234659\n", "Epoch 3094, Training loss 1.081438, Validation loss 8.020928\n", "Epoch 3095, Training loss 0.926425, Validation loss 15.233925\n", "Epoch 3096, Training loss 1.081241, Validation loss 8.021302\n", "Epoch 3097, Training loss 0.926298, Validation loss 15.233207\n", "Epoch 3098, Training loss 1.081063, Validation loss 8.021673\n", "Epoch 3099, Training loss 0.926162, Validation loss 15.232491\n", "Epoch 3100, Training loss 1.080860, Validation loss 8.022054\n", "Epoch 3101, Training loss 0.926034, Validation loss 15.231746\n", "Epoch 3102, Training loss 1.080680, Validation loss 8.022412\n", "Epoch 3103, Training loss 0.925902, Validation loss 15.231037\n", "Epoch 3104, Training loss 1.080484, Validation loss 8.022793\n", "Epoch 3105, Training loss 0.925773, Validation loss 15.230300\n", "Epoch 3106, Training loss 1.080304, Validation loss 8.023151\n", "Epoch 3107, Training loss 0.925642, Validation loss 15.229604\n", "Epoch 3108, Training loss 1.080109, Validation loss 8.023534\n", "Epoch 3109, Training loss 0.925515, Validation loss 15.228869\n", "Epoch 3110, Training loss 1.079929, Validation loss 8.023863\n", "Epoch 3111, Training loss 0.925382, Validation loss 15.228162\n", "Epoch 3112, Training loss 1.079733, Validation loss 8.024232\n", "Epoch 3113, Training loss 0.925256, Validation loss 15.227439\n", "Epoch 3114, Training loss 1.079555, Validation loss 8.024584\n", "Epoch 3115, Training loss 0.925125, Validation loss 15.226734\n", "Epoch 3116, Training loss 1.079364, Validation loss 8.024947\n", "Epoch 3117, Training loss 0.924998, Validation loss 15.226015\n", "Epoch 3118, Training loss 1.079179, Validation loss 8.025311\n", "Epoch 3119, Training loss 0.924866, Validation loss 15.225282\n", "Epoch 3120, Training loss 1.078992, Validation loss 8.025651\n", "Epoch 3121, Training loss 0.924742, Validation loss 15.224607\n", "Epoch 3122, Training loss 1.078810, Validation loss 8.026012\n", "Epoch 3123, Training loss 0.924611, Validation loss 15.223887\n", "Epoch 3124, Training loss 1.078620, Validation loss 8.026370\n", "Epoch 3125, Training loss 0.924483, Validation loss 15.223176\n", "Epoch 3126, Training loss 1.078436, Validation loss 8.026736\n", "Epoch 3127, Training loss 0.924356, Validation loss 15.222465\n", "Epoch 3128, Training loss 1.078254, Validation loss 8.027072\n", "Epoch 3129, Training loss 0.924231, Validation loss 15.221768\n", "Epoch 3130, Training loss 1.078069, Validation loss 8.027418\n", "Epoch 3131, Training loss 0.924105, Validation loss 15.221071\n", "Epoch 3132, Training loss 1.077887, Validation loss 8.027770\n", "Epoch 3133, Training loss 0.923974, Validation loss 15.220348\n", "Epoch 3134, Training loss 1.077699, Validation loss 8.028120\n", "Epoch 3135, Training loss 0.923851, Validation loss 15.219656\n", "Epoch 3136, Training loss 1.077522, Validation loss 8.028455\n", "Epoch 3137, Training loss 0.923723, Validation loss 15.218945\n", "Epoch 3138, Training loss 1.077334, Validation loss 8.028796\n", "Epoch 3139, Training loss 0.923598, Validation loss 15.218247\n", "Epoch 3140, Training loss 1.077154, Validation loss 8.029137\n", "Epoch 3141, Training loss 0.923472, Validation loss 15.217544\n", "Epoch 3142, Training loss 1.076973, Validation loss 8.029491\n", "Epoch 3143, Training loss 0.923344, Validation loss 15.216846\n", "Epoch 3144, Training loss 1.076786, Validation loss 8.029832\n", "Epoch 3145, Training loss 0.923223, Validation loss 15.216139\n", "Epoch 3146, Training loss 1.076614, Validation loss 8.030138\n", "Epoch 3147, Training loss 0.923094, Validation loss 15.215446\n", "Epoch 3148, Training loss 1.076424, Validation loss 8.030505\n", "Epoch 3149, Training loss 0.922972, Validation loss 15.214749\n", "Epoch 3150, Training loss 1.076254, Validation loss 8.030817\n", "Epoch 3151, Training loss 0.922845, Validation loss 15.214060\n", "Epoch 3152, Training loss 1.076063, Validation loss 8.031166\n", "Epoch 3153, Training loss 0.922721, Validation loss 15.213348\n", "Epoch 3154, Training loss 1.075891, Validation loss 8.031486\n", "Epoch 3155, Training loss 0.922597, Validation loss 15.212659\n", "Epoch 3156, Training loss 1.075708, Validation loss 8.031818\n", "Epoch 3157, Training loss 0.922472, Validation loss 15.211969\n", "Epoch 3158, Training loss 1.075529, Validation loss 8.032157\n", "Epoch 3159, Training loss 0.922348, Validation loss 15.211273\n", "Epoch 3160, Training loss 1.075348, Validation loss 8.032488\n", "Epoch 3161, Training loss 0.922227, Validation loss 15.210591\n", "Epoch 3162, Training loss 1.075175, Validation loss 8.032797\n", "Epoch 3163, Training loss 0.922102, Validation loss 15.209917\n", "Epoch 3164, Training loss 1.074992, Validation loss 8.033117\n", "Epoch 3165, Training loss 0.921981, Validation loss 15.209239\n", "Epoch 3166, Training loss 1.074815, Validation loss 8.033442\n", "Epoch 3167, Training loss 0.921858, Validation loss 15.208539\n", "Epoch 3168, Training loss 1.074638, Validation loss 8.033768\n", "Epoch 3169, Training loss 0.921736, Validation loss 15.207860\n", "Epoch 3170, Training loss 1.074458, Validation loss 8.034093\n", "Epoch 3171, Training loss 0.921614, Validation loss 15.207179\n", "Epoch 3172, Training loss 1.074286, Validation loss 8.034410\n", "Epoch 3173, Training loss 0.921489, Validation loss 15.206490\n", "Epoch 3174, Training loss 1.074103, Validation loss 8.034733\n", "Epoch 3175, Training loss 0.921372, Validation loss 15.205819\n", "Epoch 3176, Training loss 1.073936, Validation loss 8.035037\n", "Epoch 3177, Training loss 0.921247, Validation loss 15.205137\n", "Epoch 3178, Training loss 1.073752, Validation loss 8.035351\n", "Epoch 3179, Training loss 0.921129, Validation loss 15.204463\n", "Epoch 3180, Training loss 1.073584, Validation loss 8.035675\n", "Epoch 3181, Training loss 0.921003, Validation loss 15.203788\n", "Epoch 3182, Training loss 1.073397, Validation loss 8.035991\n", "Epoch 3183, Training loss 0.920886, Validation loss 15.203099\n", "Epoch 3184, Training loss 1.073235, Validation loss 8.036283\n", "Epoch 3185, Training loss 0.920762, Validation loss 15.202425\n", "Epoch 3186, Training loss 1.073048, Validation loss 8.036605\n", "Epoch 3187, Training loss 0.920645, Validation loss 15.201747\n", "Epoch 3188, Training loss 1.072882, Validation loss 8.036902\n", "Epoch 3189, Training loss 0.920524, Validation loss 15.201091\n", "Epoch 3190, Training loss 1.072706, Validation loss 8.037212\n", "Epoch 3191, Training loss 0.920404, Validation loss 15.200409\n", "Epoch 3192, Training loss 1.072533, Validation loss 8.037528\n", "Epoch 3193, Training loss 0.920281, Validation loss 15.199728\n", "Epoch 3194, Training loss 1.072355, Validation loss 8.037843\n", "Epoch 3195, Training loss 0.920164, Validation loss 15.199046\n", "Epoch 3196, Training loss 1.072187, Validation loss 8.038136\n", "Epoch 3197, Training loss 0.920042, Validation loss 15.198372\n", "Epoch 3198, Training loss 1.072008, Validation loss 8.038437\n", "Epoch 3199, Training loss 0.919927, Validation loss 15.197723\n", "Epoch 3200, Training loss 1.071843, Validation loss 8.038737\n", "Epoch 3201, Training loss 0.919803, Validation loss 15.197045\n", "Epoch 3202, Training loss 1.071661, Validation loss 8.039058\n", "Epoch 3203, Training loss 0.919685, Validation loss 15.196371\n", "Epoch 3204, Training loss 1.071492, Validation loss 8.039365\n", "Epoch 3205, Training loss 0.919566, Validation loss 15.195701\n", "Epoch 3206, Training loss 1.071321, Validation loss 8.039662\n", "Epoch 3207, Training loss 0.919447, Validation loss 15.195023\n", "Epoch 3208, Training loss 1.071149, Validation loss 8.039948\n", "Epoch 3209, Training loss 0.919332, Validation loss 15.194378\n", "Epoch 3210, Training loss 1.070982, Validation loss 8.040244\n", "Epoch 3211, Training loss 0.919212, Validation loss 15.193719\n", "Epoch 3212, Training loss 1.070806, Validation loss 8.040528\n", "Epoch 3213, Training loss 0.919094, Validation loss 15.193048\n", "Epoch 3214, Training loss 1.070636, Validation loss 8.040828\n", "Epoch 3215, Training loss 0.918979, Validation loss 15.192392\n", "Epoch 3216, Training loss 1.070470, Validation loss 8.041126\n", "Epoch 3217, Training loss 0.918858, Validation loss 15.191732\n", "Epoch 3218, Training loss 1.070293, Validation loss 8.041425\n", "Epoch 3219, Training loss 0.918746, Validation loss 15.191077\n", "Epoch 3220, Training loss 1.070134, Validation loss 8.041677\n", "Epoch 3221, Training loss 0.918625, Validation loss 15.190433\n", "Epoch 3222, Training loss 1.069956, Validation loss 8.041984\n", "Epoch 3223, Training loss 0.918513, Validation loss 15.189780\n", "Epoch 3224, Training loss 1.069795, Validation loss 8.042267\n", "Epoch 3225, Training loss 0.918391, Validation loss 15.189121\n", "Epoch 3226, Training loss 1.069616, Validation loss 8.042562\n", "Epoch 3227, Training loss 0.918279, Validation loss 15.188451\n", "Epoch 3228, Training loss 1.069461, Validation loss 8.042827\n", "Epoch 3229, Training loss 0.918159, Validation loss 15.187810\n", "Epoch 3230, Training loss 1.069278, Validation loss 8.043135\n", "Epoch 3231, Training loss 0.918048, Validation loss 15.187143\n", "Epoch 3232, Training loss 1.069122, Validation loss 8.043396\n", "Epoch 3233, Training loss 0.917928, Validation loss 15.186506\n", "Epoch 3234, Training loss 1.068944, Validation loss 8.043696\n", "Epoch 3235, Training loss 0.917816, Validation loss 15.185843\n", "Epoch 3236, Training loss 1.068788, Validation loss 8.043963\n", "Epoch 3237, Training loss 0.917697, Validation loss 15.185202\n", "Epoch 3238, Training loss 1.068609, Validation loss 8.044259\n", "Epoch 3239, Training loss 0.917585, Validation loss 15.184539\n", "Epoch 3240, Training loss 1.068454, Validation loss 8.044511\n", "Epoch 3241, Training loss 0.917470, Validation loss 15.183921\n", "Epoch 3242, Training loss 1.068280, Validation loss 8.044795\n", "Epoch 3243, Training loss 0.917356, Validation loss 15.183280\n", "Epoch 3244, Training loss 1.068117, Validation loss 8.045072\n", "Epoch 3245, Training loss 0.917239, Validation loss 15.182602\n", "Epoch 3246, Training loss 1.067950, Validation loss 8.045350\n", "Epoch 3247, Training loss 0.917127, Validation loss 15.181961\n", "Epoch 3248, Training loss 1.067786, Validation loss 8.045610\n", "Epoch 3249, Training loss 0.917012, Validation loss 15.181339\n", "Epoch 3250, Training loss 1.067618, Validation loss 8.045889\n", "Epoch 3251, Training loss 0.916898, Validation loss 15.180695\n", "Epoch 3252, Training loss 1.067453, Validation loss 8.046160\n", "Epoch 3253, Training loss 0.916787, Validation loss 15.180064\n", "Epoch 3254, Training loss 1.067290, Validation loss 8.046422\n", "Epoch 3255, Training loss 0.916671, Validation loss 15.179401\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3256, Training loss 1.067123, Validation loss 8.046698\n", "Epoch 3257, Training loss 0.916559, Validation loss 15.178754\n", "Epoch 3258, Training loss 1.066963, Validation loss 8.046956\n", "Epoch 3259, Training loss 0.916447, Validation loss 15.178143\n", "Epoch 3260, Training loss 1.066797, Validation loss 8.047230\n", "Epoch 3261, Training loss 0.916333, Validation loss 15.177505\n", "Epoch 3262, Training loss 1.066632, Validation loss 8.047501\n", "Epoch 3263, Training loss 0.916219, Validation loss 15.176849\n", "Epoch 3264, Training loss 1.066469, Validation loss 8.047761\n", "Epoch 3265, Training loss 0.916108, Validation loss 15.176212\n", "Epoch 3266, Training loss 1.066310, Validation loss 8.048009\n", "Epoch 3267, Training loss 0.915996, Validation loss 15.175591\n", "Epoch 3268, Training loss 1.066140, Validation loss 8.048297\n", "Epoch 3269, Training loss 0.915882, Validation loss 15.174932\n", "Epoch 3270, Training loss 1.065982, Validation loss 8.048554\n", "Epoch 3271, Training loss 0.915769, Validation loss 15.174287\n", "Epoch 3272, Training loss 1.065816, Validation loss 8.048819\n", "Epoch 3273, Training loss 0.915659, Validation loss 15.173664\n", "Epoch 3274, Training loss 1.065655, Validation loss 8.049076\n", "Epoch 3275, Training loss 0.915545, Validation loss 15.173024\n", "Epoch 3276, Training loss 1.065493, Validation loss 8.049330\n", "Epoch 3277, Training loss 0.915435, Validation loss 15.172394\n", "Epoch 3278, Training loss 1.065332, Validation loss 8.049604\n", "Epoch 3279, Training loss 0.915320, Validation loss 15.171750\n", "Epoch 3280, Training loss 1.065167, Validation loss 8.049866\n", "Epoch 3281, Training loss 0.915209, Validation loss 15.171109\n", "Epoch 3282, Training loss 1.065008, Validation loss 8.050113\n", "Epoch 3283, Training loss 0.915101, Validation loss 15.170484\n", "Epoch 3284, Training loss 1.064852, Validation loss 8.050349\n", "Epoch 3285, Training loss 0.914991, Validation loss 15.169871\n", "Epoch 3286, Training loss 1.064687, Validation loss 8.050612\n", "Epoch 3287, Training loss 0.914879, Validation loss 15.169243\n", "Epoch 3288, Training loss 1.064526, Validation loss 8.050866\n", "Epoch 3289, Training loss 0.914769, Validation loss 15.168612\n", "Epoch 3290, Training loss 1.064367, Validation loss 8.051123\n", "Epoch 3291, Training loss 0.914660, Validation loss 15.167994\n", "Epoch 3292, Training loss 1.064211, Validation loss 8.051367\n", "Epoch 3293, Training loss 0.914547, Validation loss 15.167361\n", "Epoch 3294, Training loss 1.064044, Validation loss 8.051615\n", "Epoch 3295, Training loss 0.914439, Validation loss 15.166724\n", "Epoch 3296, Training loss 1.063891, Validation loss 8.051855\n", "Epoch 3297, Training loss 0.914329, Validation loss 15.166109\n", "Epoch 3298, Training loss 1.063728, Validation loss 8.052097\n", "Epoch 3299, Training loss 0.914220, Validation loss 15.165487\n", "Epoch 3300, Training loss 1.063572, Validation loss 8.052358\n", "Epoch 3301, Training loss 0.914109, Validation loss 15.164865\n", "Epoch 3302, Training loss 1.063407, Validation loss 8.052598\n", "Epoch 3303, Training loss 0.914001, Validation loss 15.164243\n", "Epoch 3304, Training loss 1.063253, Validation loss 8.052843\n", "Epoch 3305, Training loss 0.913893, Validation loss 15.163629\n", "Epoch 3306, Training loss 1.063096, Validation loss 8.053087\n", "Epoch 3307, Training loss 0.913782, Validation loss 15.162999\n", "Epoch 3308, Training loss 1.062935, Validation loss 8.053340\n", "Epoch 3309, Training loss 0.913674, Validation loss 15.162362\n", "Epoch 3310, Training loss 1.062781, Validation loss 8.053567\n", "Epoch 3311, Training loss 0.913565, Validation loss 15.161747\n", "Epoch 3312, Training loss 1.062620, Validation loss 8.053806\n", "Epoch 3313, Training loss 0.913456, Validation loss 15.161133\n", "Epoch 3314, Training loss 1.062462, Validation loss 8.054052\n", "Epoch 3315, Training loss 0.913349, Validation loss 15.160510\n", "Epoch 3316, Training loss 1.062308, Validation loss 8.054288\n", "Epoch 3317, Training loss 0.913242, Validation loss 15.159910\n", "Epoch 3318, Training loss 1.062151, Validation loss 8.054520\n", "Epoch 3319, Training loss 0.913135, Validation loss 15.159303\n", "Epoch 3320, Training loss 1.061998, Validation loss 8.054747\n", "Epoch 3321, Training loss 0.913027, Validation loss 15.158673\n", "Epoch 3322, Training loss 1.061841, Validation loss 8.054989\n", "Epoch 3323, Training loss 0.912919, Validation loss 15.158066\n", "Epoch 3324, Training loss 1.061682, Validation loss 8.055243\n", "Epoch 3325, Training loss 0.912811, Validation loss 15.157455\n", "Epoch 3326, Training loss 1.061530, Validation loss 8.055458\n", "Epoch 3327, Training loss 0.912703, Validation loss 15.156837\n", "Epoch 3328, Training loss 1.061368, Validation loss 8.055707\n", "Epoch 3329, Training loss 0.912597, Validation loss 15.156208\n", "Epoch 3330, Training loss 1.061218, Validation loss 8.055927\n", "Epoch 3331, Training loss 0.912489, Validation loss 15.155622\n", "Epoch 3332, Training loss 1.061058, Validation loss 8.056165\n", "Epoch 3333, Training loss 0.912386, Validation loss 15.155012\n", "Epoch 3334, Training loss 1.060913, Validation loss 8.056381\n", "Epoch 3335, Training loss 0.912275, Validation loss 15.154386\n", "Epoch 3336, Training loss 1.060748, Validation loss 8.056623\n", "Epoch 3337, Training loss 0.912171, Validation loss 15.153760\n", "Epoch 3338, Training loss 1.060602, Validation loss 8.056845\n", "Epoch 3339, Training loss 0.912061, Validation loss 15.153172\n", "Epoch 3340, Training loss 1.060437, Validation loss 8.057094\n", "Epoch 3341, Training loss 0.911959, Validation loss 15.152542\n", "Epoch 3342, Training loss 1.060295, Validation loss 8.057298\n", "Epoch 3343, Training loss 0.911849, Validation loss 15.151949\n", "Epoch 3344, Training loss 1.060130, Validation loss 8.057526\n", "Epoch 3345, Training loss 0.911747, Validation loss 15.151335\n", "Epoch 3346, Training loss 1.059988, Validation loss 8.057741\n", "Epoch 3347, Training loss 0.911641, Validation loss 15.150750\n", "Epoch 3348, Training loss 1.059828, Validation loss 8.057977\n", "Epoch 3349, Training loss 0.911539, Validation loss 15.150146\n", "Epoch 3350, Training loss 1.059683, Validation loss 8.058187\n", "Epoch 3351, Training loss 0.911431, Validation loss 15.149528\n", "Epoch 3352, Training loss 1.059526, Validation loss 8.058412\n", "Epoch 3353, Training loss 0.911325, Validation loss 15.148928\n", "Epoch 3354, Training loss 1.059372, Validation loss 8.058651\n", "Epoch 3355, Training loss 0.911219, Validation loss 15.148306\n", "Epoch 3356, Training loss 1.059219, Validation loss 8.058872\n", "Epoch 3357, Training loss 0.911113, Validation loss 15.147688\n", "Epoch 3358, Training loss 1.059067, Validation loss 8.059089\n", "Epoch 3359, Training loss 0.911009, Validation loss 15.147084\n", "Epoch 3360, Training loss 1.058913, Validation loss 8.059310\n", "Epoch 3361, Training loss 0.910901, Validation loss 15.146455\n", "Epoch 3362, Training loss 1.058761, Validation loss 8.059541\n", "Epoch 3363, Training loss 0.910797, Validation loss 15.145855\n", "Epoch 3364, Training loss 1.058610, Validation loss 8.059757\n", "Epoch 3365, Training loss 0.910697, Validation loss 15.145278\n", "Epoch 3366, Training loss 1.058467, Validation loss 8.059947\n", "Epoch 3367, Training loss 0.910590, Validation loss 15.144692\n", "Epoch 3368, Training loss 1.058305, Validation loss 8.060183\n", "Epoch 3369, Training loss 0.910489, Validation loss 15.144086\n", "Epoch 3370, Training loss 1.058163, Validation loss 8.060385\n", "Epoch 3371, Training loss 0.910382, Validation loss 15.143486\n", "Epoch 3372, Training loss 1.058005, Validation loss 8.060608\n", "Epoch 3373, Training loss 0.910281, Validation loss 15.142872\n", "Epoch 3374, Training loss 1.057864, Validation loss 8.060812\n", "Epoch 3375, Training loss 0.910173, Validation loss 15.142290\n", "Epoch 3376, Training loss 1.057700, Validation loss 8.061041\n", "Epoch 3377, Training loss 0.910075, Validation loss 15.141686\n", "Epoch 3378, Training loss 1.057563, Validation loss 8.061247\n", "Epoch 3379, Training loss 0.909966, Validation loss 15.141079\n", "Epoch 3380, Training loss 1.057400, Validation loss 8.061484\n", "Epoch 3381, Training loss 0.909866, Validation loss 15.140472\n", "Epoch 3382, Training loss 1.057261, Validation loss 8.061673\n", "Epoch 3383, Training loss 0.909762, Validation loss 15.139887\n", "Epoch 3384, Training loss 1.057107, Validation loss 8.061880\n", "Epoch 3385, Training loss 0.909660, Validation loss 15.139302\n", "Epoch 3386, Training loss 1.056959, Validation loss 8.062102\n", "Epoch 3387, Training loss 0.909557, Validation loss 15.138696\n", "Epoch 3388, Training loss 1.056812, Validation loss 8.062289\n", "Epoch 3389, Training loss 0.909454, Validation loss 15.138110\n", "Epoch 3390, Training loss 1.056662, Validation loss 8.062499\n", "Epoch 3391, Training loss 0.909352, Validation loss 15.137518\n", "Epoch 3392, Training loss 1.056513, Validation loss 8.062720\n", "Epoch 3393, Training loss 0.909249, Validation loss 15.136904\n", "Epoch 3394, Training loss 1.056366, Validation loss 8.062921\n", "Epoch 3395, Training loss 0.909146, Validation loss 15.136312\n", "Epoch 3396, Training loss 1.056216, Validation loss 8.063118\n", "Epoch 3397, Training loss 0.909046, Validation loss 15.135734\n", "Epoch 3398, Training loss 1.056072, Validation loss 8.063313\n", "Epoch 3399, Training loss 0.908942, Validation loss 15.135145\n", "Epoch 3400, Training loss 1.055916, Validation loss 8.063536\n", "Epoch 3401, Training loss 0.908842, Validation loss 15.134549\n", "Epoch 3402, Training loss 1.055777, Validation loss 8.063720\n", "Epoch 3403, Training loss 0.908740, Validation loss 15.133980\n", "Epoch 3404, Training loss 1.055623, Validation loss 8.063930\n", "Epoch 3405, Training loss 0.908639, Validation loss 15.133388\n", "Epoch 3406, Training loss 1.055480, Validation loss 8.064127\n", "Epoch 3407, Training loss 0.908539, Validation loss 15.132810\n", "Epoch 3408, Training loss 1.055334, Validation loss 8.064323\n", "Epoch 3409, Training loss 0.908439, Validation loss 15.132225\n", "Epoch 3410, Training loss 1.055188, Validation loss 8.064529\n", "Epoch 3411, Training loss 0.908336, Validation loss 15.131625\n", "Epoch 3412, Training loss 1.055040, Validation loss 8.064722\n", "Epoch 3413, Training loss 0.908236, Validation loss 15.131033\n", "Epoch 3414, Training loss 1.054894, Validation loss 8.064926\n", "Epoch 3415, Training loss 0.908133, Validation loss 15.130447\n", "Epoch 3416, Training loss 1.054745, Validation loss 8.065123\n", "Epoch 3417, Training loss 0.908034, Validation loss 15.129856\n", "Epoch 3418, Training loss 1.054600, Validation loss 8.065318\n", "Epoch 3419, Training loss 0.907931, Validation loss 15.129264\n", "Epoch 3420, Training loss 1.054454, Validation loss 8.065511\n", "Epoch 3421, Training loss 0.907832, Validation loss 15.128694\n", "Epoch 3422, Training loss 1.054309, Validation loss 8.065699\n", "Epoch 3423, Training loss 0.907734, Validation loss 15.128116\n", "Epoch 3424, Training loss 1.054169, Validation loss 8.065889\n", "Epoch 3425, Training loss 0.907633, Validation loss 15.127527\n", "Epoch 3426, Training loss 1.054021, Validation loss 8.066071\n", "Epoch 3427, Training loss 0.907533, Validation loss 15.126946\n", "Epoch 3428, Training loss 1.053877, Validation loss 8.066261\n", "Epoch 3429, Training loss 0.907435, Validation loss 15.126362\n", "Epoch 3430, Training loss 1.053735, Validation loss 8.066444\n", "Epoch 3431, Training loss 0.907334, Validation loss 15.125788\n", "Epoch 3432, Training loss 1.053586, Validation loss 8.066640\n", "Epoch 3433, Training loss 0.907238, Validation loss 15.125221\n", "Epoch 3434, Training loss 1.053448, Validation loss 8.066831\n", "Epoch 3435, Training loss 0.907136, Validation loss 15.124651\n", "Epoch 3436, Training loss 1.053296, Validation loss 8.067024\n", "Epoch 3437, Training loss 0.907041, Validation loss 15.124074\n", "Epoch 3438, Training loss 1.053164, Validation loss 8.067193\n", "Epoch 3439, Training loss 0.906936, Validation loss 15.123482\n", "Epoch 3440, Training loss 1.053008, Validation loss 8.067406\n", "Epoch 3441, Training loss 0.906841, Validation loss 15.122897\n", "Epoch 3442, Training loss 1.052875, Validation loss 8.067570\n", "Epoch 3443, Training loss 0.906740, Validation loss 15.122334\n", "Epoch 3444, Training loss 1.052723, Validation loss 8.067766\n", "Epoch 3445, Training loss 0.906643, Validation loss 15.121738\n", "Epoch 3446, Training loss 1.052586, Validation loss 8.067940\n", "Epoch 3447, Training loss 0.906542, Validation loss 15.121165\n", "Epoch 3448, Training loss 1.052440, Validation loss 8.068143\n", "Epoch 3449, Training loss 0.906446, Validation loss 15.120610\n", "Epoch 3450, Training loss 1.052301, Validation loss 8.068312\n", "Epoch 3451, Training loss 0.906348, Validation loss 15.120025\n", "Epoch 3452, Training loss 1.052156, Validation loss 8.068508\n", "Epoch 3453, Training loss 0.906247, Validation loss 15.119426\n", "Epoch 3454, Training loss 1.052011, Validation loss 8.068704\n", "Epoch 3455, Training loss 0.906148, Validation loss 15.118834\n", "Epoch 3456, Training loss 1.051871, Validation loss 8.068886\n", "Epoch 3457, Training loss 0.906047, Validation loss 15.118241\n", "Epoch 3458, Training loss 1.051720, Validation loss 8.069078\n", "Epoch 3459, Training loss 0.905953, Validation loss 15.117667\n", "Epoch 3460, Training loss 1.051590, Validation loss 8.069242\n", "Epoch 3461, Training loss 0.905850, Validation loss 15.117108\n", "Epoch 3462, Training loss 1.051434, Validation loss 8.069442\n", "Epoch 3463, Training loss 0.905759, Validation loss 15.116520\n", "Epoch 3464, Training loss 1.051312, Validation loss 8.069584\n", "Epoch 3465, Training loss 0.905657, Validation loss 15.115969\n", "Epoch 3466, Training loss 1.051153, Validation loss 8.069798\n", "Epoch 3467, Training loss 0.905563, Validation loss 15.115376\n", "Epoch 3468, Training loss 1.051028, Validation loss 8.069950\n", "Epoch 3469, Training loss 0.905465, Validation loss 15.114837\n", "Epoch 3470, Training loss 1.050877, Validation loss 8.070136\n", "Epoch 3471, Training loss 0.905367, Validation loss 15.114244\n", "Epoch 3472, Training loss 1.050738, Validation loss 8.070323\n", "Epoch 3473, Training loss 0.905268, Validation loss 15.113660\n", "Epoch 3474, Training loss 1.050593, Validation loss 8.070509\n", "Epoch 3475, Training loss 0.905173, Validation loss 15.113090\n", "Epoch 3476, Training loss 1.050459, Validation loss 8.070662\n", "Epoch 3477, Training loss 0.905075, Validation loss 15.112546\n", "Epoch 3478, Training loss 1.050310, Validation loss 8.070855\n", "Epoch 3479, Training loss 0.904980, Validation loss 15.111965\n", "Epoch 3480, Training loss 1.050178, Validation loss 8.071029\n", "Epoch 3481, Training loss 0.904881, Validation loss 15.111380\n", "Epoch 3482, Training loss 1.050031, Validation loss 8.071210\n", "Epoch 3483, Training loss 0.904783, Validation loss 15.110795\n", "Epoch 3484, Training loss 1.049893, Validation loss 8.071363\n", "Epoch 3485, Training loss 0.904691, Validation loss 15.110255\n", "Epoch 3486, Training loss 1.049758, Validation loss 8.071534\n", "Epoch 3487, Training loss 0.904591, Validation loss 15.109678\n", "Epoch 3488, Training loss 1.049616, Validation loss 8.071712\n", "Epoch 3489, Training loss 0.904498, Validation loss 15.109116\n", "Epoch 3490, Training loss 1.049479, Validation loss 8.071863\n", "Epoch 3491, Training loss 0.904400, Validation loss 15.108553\n", "Epoch 3492, Training loss 1.049335, Validation loss 8.072071\n", "Epoch 3493, Training loss 0.904306, Validation loss 15.107976\n", "Epoch 3494, Training loss 1.049203, Validation loss 8.072222\n", "Epoch 3495, Training loss 0.904207, Validation loss 15.107394\n", "Epoch 3496, Training loss 1.049057, Validation loss 8.072397\n", "Epoch 3497, Training loss 0.904113, Validation loss 15.106843\n", "Epoch 3498, Training loss 1.048924, Validation loss 8.072576\n", "Epoch 3499, Training loss 0.904018, Validation loss 15.106281\n", "Epoch 3500, Training loss 1.048783, Validation loss 8.072741\n", "Epoch 3501, Training loss 0.903921, Validation loss 15.105685\n", "Epoch 3502, Training loss 1.048645, Validation loss 8.072898\n", "Epoch 3503, Training loss 0.903828, Validation loss 15.105156\n", "Epoch 3504, Training loss 1.048508, Validation loss 8.073060\n", "Epoch 3505, Training loss 0.903732, Validation loss 15.104593\n", "Epoch 3506, Training loss 1.048369, Validation loss 8.073223\n", "Epoch 3507, Training loss 0.903637, Validation loss 15.104031\n", "Epoch 3508, Training loss 1.048232, Validation loss 8.073385\n", "Epoch 3509, Training loss 0.903542, Validation loss 15.103461\n", "Epoch 3510, Training loss 1.048093, Validation loss 8.073566\n", "Epoch 3511, Training loss 0.903446, Validation loss 15.102884\n", "Epoch 3512, Training loss 1.047955, Validation loss 8.073733\n", "Epoch 3513, Training loss 0.903351, Validation loss 15.102322\n", "Epoch 3514, Training loss 1.047818, Validation loss 8.073895\n", "Epoch 3515, Training loss 0.903256, Validation loss 15.101752\n", "Epoch 3516, Training loss 1.047680, Validation loss 8.074051\n", "Epoch 3517, Training loss 0.903162, Validation loss 15.101190\n", "Epoch 3518, Training loss 1.047546, Validation loss 8.074204\n", "Epoch 3519, Training loss 0.903069, Validation loss 15.100649\n", "Epoch 3520, Training loss 1.047408, Validation loss 8.074376\n", "Epoch 3521, Training loss 0.902974, Validation loss 15.100094\n", "Epoch 3522, Training loss 1.047271, Validation loss 8.074546\n", "Epoch 3523, Training loss 0.902880, Validation loss 15.099524\n", "Epoch 3524, Training loss 1.047135, Validation loss 8.074697\n", "Epoch 3525, Training loss 0.902787, Validation loss 15.098969\n", "Epoch 3526, Training loss 1.047000, Validation loss 8.074870\n", "Epoch 3527, Training loss 0.902693, Validation loss 15.098407\n", "Epoch 3528, Training loss 1.046865, Validation loss 8.075015\n", "Epoch 3529, Training loss 0.902597, Validation loss 15.097830\n", "Epoch 3530, Training loss 1.046726, Validation loss 8.075170\n", "Epoch 3531, Training loss 0.902505, Validation loss 15.097275\n", "Epoch 3532, Training loss 1.046594, Validation loss 8.075324\n", "Epoch 3533, Training loss 0.902413, Validation loss 15.096724\n", "Epoch 3534, Training loss 1.046459, Validation loss 8.075494\n", "Epoch 3535, Training loss 0.902314, Validation loss 15.096158\n", "Epoch 3536, Training loss 1.046315, Validation loss 8.075665\n", "Epoch 3537, Training loss 0.902223, Validation loss 15.095588\n", "Epoch 3538, Training loss 1.046187, Validation loss 8.075815\n", "Epoch 3539, Training loss 0.902128, Validation loss 15.095048\n", "Epoch 3540, Training loss 1.046045, Validation loss 8.075967\n", "Epoch 3541, Training loss 0.902037, Validation loss 15.094478\n", "Epoch 3542, Training loss 1.045916, Validation loss 8.076117\n", "Epoch 3543, Training loss 0.901942, Validation loss 15.093926\n", "Epoch 3544, Training loss 1.045778, Validation loss 8.076280\n", "Epoch 3545, Training loss 0.901849, Validation loss 15.093365\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3546, Training loss 1.045645, Validation loss 8.076436\n", "Epoch 3547, Training loss 0.901758, Validation loss 15.092820\n", "Epoch 3548, Training loss 1.045510, Validation loss 8.076598\n", "Epoch 3549, Training loss 0.901663, Validation loss 15.092258\n", "Epoch 3550, Training loss 1.045374, Validation loss 8.076749\n", "Epoch 3551, Training loss 0.901572, Validation loss 15.091703\n", "Epoch 3552, Training loss 1.045243, Validation loss 8.076902\n", "Epoch 3553, Training loss 0.901477, Validation loss 15.091133\n", "Epoch 3554, Training loss 1.045106, Validation loss 8.077043\n", "Epoch 3555, Training loss 0.901386, Validation loss 15.090605\n", "Epoch 3556, Training loss 1.044972, Validation loss 8.077197\n", "Epoch 3557, Training loss 0.901296, Validation loss 15.090054\n", "Epoch 3558, Training loss 1.044843, Validation loss 8.077336\n", "Epoch 3559, Training loss 0.901202, Validation loss 15.089495\n", "Epoch 3560, Training loss 1.044702, Validation loss 8.077516\n", "Epoch 3561, Training loss 0.901108, Validation loss 15.088914\n", "Epoch 3562, Training loss 1.044573, Validation loss 8.077661\n", "Epoch 3563, Training loss 0.901016, Validation loss 15.088367\n", "Epoch 3564, Training loss 1.044437, Validation loss 8.077802\n", "Epoch 3565, Training loss 0.900924, Validation loss 15.087811\n", "Epoch 3566, Training loss 1.044307, Validation loss 8.077946\n", "Epoch 3567, Training loss 0.900834, Validation loss 15.087272\n", "Epoch 3568, Training loss 1.044173, Validation loss 8.078098\n", "Epoch 3569, Training loss 0.900742, Validation loss 15.086731\n", "Epoch 3570, Training loss 1.044040, Validation loss 8.078235\n", "Epoch 3571, Training loss 0.900651, Validation loss 15.086184\n", "Epoch 3572, Training loss 1.043909, Validation loss 8.078389\n", "Epoch 3573, Training loss 0.900560, Validation loss 15.085636\n", "Epoch 3574, Training loss 1.043775, Validation loss 8.078533\n", "Epoch 3575, Training loss 0.900466, Validation loss 15.085052\n", "Epoch 3576, Training loss 1.043642, Validation loss 8.078684\n", "Epoch 3577, Training loss 0.900372, Validation loss 15.084504\n", "Epoch 3578, Training loss 1.043505, Validation loss 8.078853\n", "Epoch 3579, Training loss 0.900284, Validation loss 15.083950\n", "Epoch 3580, Training loss 1.043378, Validation loss 8.078978\n", "Epoch 3581, Training loss 0.900191, Validation loss 15.083402\n", "Epoch 3582, Training loss 1.043242, Validation loss 8.079131\n", "Epoch 3583, Training loss 0.900102, Validation loss 15.082866\n", "Epoch 3584, Training loss 1.043117, Validation loss 8.079256\n", "Epoch 3585, Training loss 0.900011, Validation loss 15.082322\n", "Epoch 3586, Training loss 1.042981, Validation loss 8.079391\n", "Epoch 3587, Training loss 0.899920, Validation loss 15.081759\n", "Epoch 3588, Training loss 1.042853, Validation loss 8.079548\n", "Epoch 3589, Training loss 0.899827, Validation loss 15.081205\n", "Epoch 3590, Training loss 1.042716, Validation loss 8.079706\n", "Epoch 3591, Training loss 0.899739, Validation loss 15.080661\n", "Epoch 3592, Training loss 1.042592, Validation loss 8.079834\n", "Epoch 3593, Training loss 0.899644, Validation loss 15.080103\n", "Epoch 3594, Training loss 1.042450, Validation loss 8.080001\n", "Epoch 3595, Training loss 0.899554, Validation loss 15.079540\n", "Epoch 3596, Training loss 1.042324, Validation loss 8.080130\n", "Epoch 3597, Training loss 0.899464, Validation loss 15.079008\n", "Epoch 3598, Training loss 1.042192, Validation loss 8.080282\n", "Epoch 3599, Training loss 0.899373, Validation loss 15.078445\n", "Epoch 3600, Training loss 1.042061, Validation loss 8.080426\n", "Epoch 3601, Training loss 0.899282, Validation loss 15.077898\n", "Epoch 3602, Training loss 1.041932, Validation loss 8.080561\n", "Epoch 3603, Training loss 0.899191, Validation loss 15.077344\n", "Epoch 3604, Training loss 1.041798, Validation loss 8.080703\n", "Epoch 3605, Training loss 0.899104, Validation loss 15.076811\n", "Epoch 3606, Training loss 1.041673, Validation loss 8.080835\n", "Epoch 3607, Training loss 0.899015, Validation loss 15.076279\n", "Epoch 3608, Training loss 1.041544, Validation loss 8.080960\n", "Epoch 3609, Training loss 0.898922, Validation loss 15.075708\n", "Epoch 3610, Training loss 1.041409, Validation loss 8.081100\n", "Epoch 3611, Training loss 0.898836, Validation loss 15.075183\n", "Epoch 3612, Training loss 1.041286, Validation loss 8.081229\n", "Epoch 3613, Training loss 0.898744, Validation loss 15.074651\n", "Epoch 3614, Training loss 1.041148, Validation loss 8.081390\n", "Epoch 3615, Training loss 0.898655, Validation loss 15.074096\n", "Epoch 3616, Training loss 1.041024, Validation loss 8.081518\n", "Epoch 3617, Training loss 0.898562, Validation loss 15.073548\n", "Epoch 3618, Training loss 1.040887, Validation loss 8.081677\n", "Epoch 3619, Training loss 0.898475, Validation loss 15.072990\n", "Epoch 3620, Training loss 1.040765, Validation loss 8.081782\n", "Epoch 3621, Training loss 0.898381, Validation loss 15.072439\n", "Epoch 3622, Training loss 1.040626, Validation loss 8.081944\n", "Epoch 3623, Training loss 0.898297, Validation loss 15.071896\n", "Epoch 3624, Training loss 1.040508, Validation loss 8.082063\n", "Epoch 3625, Training loss 0.898203, Validation loss 15.071352\n", "Epoch 3626, Training loss 1.040370, Validation loss 8.082211\n", "Epoch 3627, Training loss 0.898115, Validation loss 15.070797\n", "Epoch 3628, Training loss 1.040246, Validation loss 8.082340\n", "Epoch 3629, Training loss 0.898026, Validation loss 15.070250\n", "Epoch 3630, Training loss 1.040118, Validation loss 8.082470\n", "Epoch 3631, Training loss 0.897937, Validation loss 15.069717\n", "Epoch 3632, Training loss 1.039986, Validation loss 8.082609\n", "Epoch 3633, Training loss 0.897848, Validation loss 15.069162\n", "Epoch 3634, Training loss 1.039859, Validation loss 8.082754\n", "Epoch 3635, Training loss 0.897758, Validation loss 15.068619\n", "Epoch 3636, Training loss 1.039730, Validation loss 8.082876\n", "Epoch 3637, Training loss 0.897672, Validation loss 15.068097\n", "Epoch 3638, Training loss 1.039605, Validation loss 8.082997\n", "Epoch 3639, Training loss 0.897583, Validation loss 15.067554\n", "Epoch 3640, Training loss 1.039474, Validation loss 8.083132\n", "Epoch 3641, Training loss 0.897495, Validation loss 15.066996\n", "Epoch 3642, Training loss 1.039350, Validation loss 8.083263\n", "Epoch 3643, Training loss 0.897403, Validation loss 15.066466\n", "Epoch 3644, Training loss 1.039214, Validation loss 8.083406\n", "Epoch 3645, Training loss 0.897318, Validation loss 15.065916\n", "Epoch 3646, Training loss 1.039094, Validation loss 8.083514\n", "Epoch 3647, Training loss 0.897227, Validation loss 15.065369\n", "Epoch 3648, Training loss 1.038961, Validation loss 8.083665\n", "Epoch 3649, Training loss 0.897139, Validation loss 15.064829\n", "Epoch 3650, Training loss 1.038835, Validation loss 8.083799\n", "Epoch 3651, Training loss 0.897049, Validation loss 15.064266\n", "Epoch 3652, Training loss 1.038706, Validation loss 8.083923\n", "Epoch 3653, Training loss 0.896963, Validation loss 15.063734\n", "Epoch 3654, Training loss 1.038580, Validation loss 8.084058\n", "Epoch 3655, Training loss 0.896874, Validation loss 15.063209\n", "Epoch 3656, Training loss 1.038451, Validation loss 8.084188\n", "Epoch 3657, Training loss 0.896787, Validation loss 15.062669\n", "Epoch 3658, Training loss 1.038327, Validation loss 8.084321\n", "Epoch 3659, Training loss 0.896696, Validation loss 15.062099\n", "Epoch 3660, Training loss 1.038197, Validation loss 8.084436\n", "Epoch 3661, Training loss 0.896612, Validation loss 15.061582\n", "Epoch 3662, Training loss 1.038074, Validation loss 8.084552\n", "Epoch 3663, Training loss 0.896523, Validation loss 15.061049\n", "Epoch 3664, Training loss 1.037942, Validation loss 8.084692\n", "Epoch 3665, Training loss 0.896436, Validation loss 15.060510\n", "Epoch 3666, Training loss 1.037822, Validation loss 8.084810\n", "Epoch 3667, Training loss 0.896349, Validation loss 15.059977\n", "Epoch 3668, Training loss 1.037695, Validation loss 8.084924\n", "Epoch 3669, Training loss 0.896264, Validation loss 15.059437\n", "Epoch 3670, Training loss 1.037571, Validation loss 8.085050\n", "Epoch 3671, Training loss 0.896173, Validation loss 15.058883\n", "Epoch 3672, Training loss 1.037441, Validation loss 8.085185\n", "Epoch 3673, Training loss 0.896086, Validation loss 15.058335\n", "Epoch 3674, Training loss 1.037317, Validation loss 8.085304\n", "Epoch 3675, Training loss 0.895999, Validation loss 15.057817\n", "Epoch 3676, Training loss 1.037191, Validation loss 8.085435\n", "Epoch 3677, Training loss 0.895910, Validation loss 15.057263\n", "Epoch 3678, Training loss 1.037059, Validation loss 8.085569\n", "Epoch 3679, Training loss 0.895824, Validation loss 15.056712\n", "Epoch 3680, Training loss 1.036938, Validation loss 8.085688\n", "Epoch 3681, Training loss 0.895735, Validation loss 15.056176\n", "Epoch 3682, Training loss 1.036809, Validation loss 8.085818\n", "Epoch 3683, Training loss 0.895650, Validation loss 15.055644\n", "Epoch 3684, Training loss 1.036688, Validation loss 8.085921\n", "Epoch 3685, Training loss 0.895565, Validation loss 15.055126\n", "Epoch 3686, Training loss 1.036563, Validation loss 8.086044\n", "Epoch 3687, Training loss 0.895475, Validation loss 15.054579\n", "Epoch 3688, Training loss 1.036433, Validation loss 8.086175\n", "Epoch 3689, Training loss 0.895390, Validation loss 15.054031\n", "Epoch 3690, Training loss 1.036311, Validation loss 8.086305\n", "Epoch 3691, Training loss 0.895302, Validation loss 15.053499\n", "Epoch 3692, Training loss 1.036185, Validation loss 8.086419\n", "Epoch 3693, Training loss 0.895219, Validation loss 15.052981\n", "Epoch 3694, Training loss 1.036063, Validation loss 8.086536\n", "Epoch 3695, Training loss 0.895131, Validation loss 15.052449\n", "Epoch 3696, Training loss 1.035935, Validation loss 8.086652\n", "Epoch 3697, Training loss 0.895045, Validation loss 15.051906\n", "Epoch 3698, Training loss 1.035812, Validation loss 8.086773\n", "Epoch 3699, Training loss 0.894960, Validation loss 15.051384\n", "Epoch 3700, Training loss 1.035692, Validation loss 8.086894\n", "Epoch 3701, Training loss 0.894872, Validation loss 15.050826\n", "Epoch 3702, Training loss 1.035561, Validation loss 8.087014\n", "Epoch 3703, Training loss 0.894784, Validation loss 15.050282\n", "Epoch 3704, Training loss 1.035436, Validation loss 8.087133\n", "Epoch 3705, Training loss 0.894700, Validation loss 15.049762\n", "Epoch 3706, Training loss 1.035316, Validation loss 8.087242\n", "Epoch 3707, Training loss 0.894613, Validation loss 15.049218\n", "Epoch 3708, Training loss 1.035191, Validation loss 8.087361\n", "Epoch 3709, Training loss 0.894530, Validation loss 15.048700\n", "Epoch 3710, Training loss 1.035070, Validation loss 8.087481\n", "Epoch 3711, Training loss 0.894442, Validation loss 15.048161\n", "Epoch 3712, Training loss 1.034942, Validation loss 8.087613\n", "Epoch 3713, Training loss 0.894355, Validation loss 15.047606\n", "Epoch 3714, Training loss 1.034818, Validation loss 8.087719\n", "Epoch 3715, Training loss 0.894272, Validation loss 15.047092\n", "Epoch 3716, Training loss 1.034697, Validation loss 8.087830\n", "Epoch 3717, Training loss 0.894184, Validation loss 15.046545\n", "Epoch 3718, Training loss 1.034573, Validation loss 8.087952\n", "Epoch 3719, Training loss 0.894098, Validation loss 15.046001\n", "Epoch 3720, Training loss 1.034450, Validation loss 8.088072\n", "Epoch 3721, Training loss 0.894011, Validation loss 15.045465\n", "Epoch 3722, Training loss 1.034323, Validation loss 8.088191\n", "Epoch 3723, Training loss 0.893925, Validation loss 15.044922\n", "Epoch 3724, Training loss 1.034200, Validation loss 8.088316\n", "Epoch 3725, Training loss 0.893840, Validation loss 15.044383\n", "Epoch 3726, Training loss 1.034077, Validation loss 8.088431\n", "Epoch 3727, Training loss 0.893751, Validation loss 15.043843\n", "Epoch 3728, Training loss 1.033950, Validation loss 8.088544\n", "Epoch 3729, Training loss 0.893669, Validation loss 15.043311\n", "Epoch 3730, Training loss 1.033833, Validation loss 8.088657\n", "Epoch 3731, Training loss 0.893584, Validation loss 15.042793\n", "Epoch 3732, Training loss 1.033708, Validation loss 8.088766\n", "Epoch 3733, Training loss 0.893499, Validation loss 15.042246\n", "Epoch 3734, Training loss 1.033589, Validation loss 8.088867\n", "Epoch 3735, Training loss 0.893413, Validation loss 15.041718\n", "Epoch 3736, Training loss 1.033463, Validation loss 8.088995\n", "Epoch 3737, Training loss 0.893331, Validation loss 15.041203\n", "Epoch 3738, Training loss 1.033346, Validation loss 8.089098\n", "Epoch 3739, Training loss 0.893245, Validation loss 15.040671\n", "Epoch 3740, Training loss 1.033221, Validation loss 8.089208\n", "Epoch 3741, Training loss 0.893161, Validation loss 15.040135\n", "Epoch 3742, Training loss 1.033098, Validation loss 8.089322\n", "Epoch 3743, Training loss 0.893076, Validation loss 15.039592\n", "Epoch 3744, Training loss 1.032976, Validation loss 8.089447\n", "Epoch 3745, Training loss 0.892990, Validation loss 15.039045\n", "Epoch 3746, Training loss 1.032854, Validation loss 8.089550\n", "Epoch 3747, Training loss 0.892905, Validation loss 15.038535\n", "Epoch 3748, Training loss 1.032729, Validation loss 8.089670\n", "Epoch 3749, Training loss 0.892823, Validation loss 15.038003\n", "Epoch 3750, Training loss 1.032613, Validation loss 8.089779\n", "Epoch 3751, Training loss 0.892736, Validation loss 15.037477\n", "Epoch 3752, Training loss 1.032486, Validation loss 8.089883\n", "Epoch 3753, Training loss 0.892655, Validation loss 15.036935\n", "Epoch 3754, Training loss 1.032374, Validation loss 8.089976\n", "Epoch 3755, Training loss 0.892568, Validation loss 15.036425\n", "Epoch 3756, Training loss 1.032246, Validation loss 8.090081\n", "Epoch 3757, Training loss 0.892488, Validation loss 15.035911\n", "Epoch 3758, Training loss 1.032128, Validation loss 8.090189\n", "Epoch 3759, Training loss 0.892401, Validation loss 15.035367\n", "Epoch 3760, Training loss 1.032002, Validation loss 8.090321\n", "Epoch 3761, Training loss 0.892317, Validation loss 15.034831\n", "Epoch 3762, Training loss 1.031884, Validation loss 8.090415\n", "Epoch 3763, Training loss 0.892233, Validation loss 15.034299\n", "Epoch 3764, Training loss 1.031761, Validation loss 8.090532\n", "Epoch 3765, Training loss 0.892147, Validation loss 15.033749\n", "Epoch 3766, Training loss 1.031641, Validation loss 8.090647\n", "Epoch 3767, Training loss 0.892063, Validation loss 15.033235\n", "Epoch 3768, Training loss 1.031519, Validation loss 8.090752\n", "Epoch 3769, Training loss 0.891980, Validation loss 15.032684\n", "Epoch 3770, Training loss 1.031400, Validation loss 8.090844\n", "Epoch 3771, Training loss 0.891896, Validation loss 15.032170\n", "Epoch 3772, Training loss 1.031277, Validation loss 8.090969\n", "Epoch 3773, Training loss 0.891812, Validation loss 15.031620\n", "Epoch 3774, Training loss 1.031158, Validation loss 8.091083\n", "Epoch 3775, Training loss 0.891725, Validation loss 15.031069\n", "Epoch 3776, Training loss 1.031033, Validation loss 8.091187\n", "Epoch 3777, Training loss 0.891644, Validation loss 15.030560\n", "Epoch 3778, Training loss 1.030918, Validation loss 8.091291\n", "Epoch 3779, Training loss 0.891560, Validation loss 15.030027\n", "Epoch 3780, Training loss 1.030795, Validation loss 8.091386\n", "Epoch 3781, Training loss 0.891478, Validation loss 15.029506\n", "Epoch 3782, Training loss 1.030677, Validation loss 8.091489\n", "Epoch 3783, Training loss 0.891394, Validation loss 15.028985\n", "Epoch 3784, Training loss 1.030555, Validation loss 8.091600\n", "Epoch 3785, Training loss 0.891308, Validation loss 15.028438\n", "Epoch 3786, Training loss 1.030434, Validation loss 8.091719\n", "Epoch 3787, Training loss 0.891225, Validation loss 15.027895\n", "Epoch 3788, Training loss 1.030312, Validation loss 8.091833\n", "Epoch 3789, Training loss 0.891142, Validation loss 15.027367\n", "Epoch 3790, Training loss 1.030194, Validation loss 8.091937\n", "Epoch 3791, Training loss 0.891056, Validation loss 15.026834\n", "Epoch 3792, Training loss 1.030072, Validation loss 8.092031\n", "Epoch 3793, Training loss 0.890978, Validation loss 15.026320\n", "Epoch 3794, Training loss 1.029960, Validation loss 8.092122\n", "Epoch 3795, Training loss 0.890894, Validation loss 15.025818\n", "Epoch 3796, Training loss 1.029836, Validation loss 8.092238\n", "Epoch 3797, Training loss 0.890811, Validation loss 15.025282\n", "Epoch 3798, Training loss 1.029718, Validation loss 8.092331\n", "Epoch 3799, Training loss 0.890730, Validation loss 15.024746\n", "Epoch 3800, Training loss 1.029600, Validation loss 8.092442\n", "Epoch 3801, Training loss 0.890644, Validation loss 15.024210\n", "Epoch 3802, Training loss 1.029479, Validation loss 8.092539\n", "Epoch 3803, Training loss 0.890562, Validation loss 15.023675\n", "Epoch 3804, Training loss 1.029361, Validation loss 8.092640\n", "Epoch 3805, Training loss 0.890479, Validation loss 15.023147\n", "Epoch 3806, Training loss 1.029242, Validation loss 8.092744\n", "Epoch 3807, Training loss 0.890396, Validation loss 15.022625\n", "Epoch 3808, Training loss 1.029122, Validation loss 8.092847\n", "Epoch 3809, Training loss 0.890315, Validation loss 15.022116\n", "Epoch 3810, Training loss 1.029006, Validation loss 8.092937\n", "Epoch 3811, Training loss 0.890234, Validation loss 15.021602\n", "Epoch 3812, Training loss 1.028887, Validation loss 8.093041\n", "Epoch 3813, Training loss 0.890150, Validation loss 15.021062\n", "Epoch 3814, Training loss 1.028765, Validation loss 8.093133\n", "Epoch 3815, Training loss 0.890070, Validation loss 15.020541\n", "Epoch 3816, Training loss 1.028651, Validation loss 8.093224\n", "Epoch 3817, Training loss 0.889987, Validation loss 15.020027\n", "Epoch 3818, Training loss 1.028532, Validation loss 8.093326\n", "Epoch 3819, Training loss 0.889905, Validation loss 15.019503\n", "Epoch 3820, Training loss 1.028414, Validation loss 8.093437\n", "Epoch 3821, Training loss 0.889819, Validation loss 15.018941\n", "Epoch 3822, Training loss 1.028291, Validation loss 8.093563\n", "Epoch 3823, Training loss 0.889738, Validation loss 15.018410\n", "Epoch 3824, Training loss 1.028177, Validation loss 8.093637\n", "Epoch 3825, Training loss 0.889656, Validation loss 15.017892\n", "Epoch 3826, Training loss 1.028058, Validation loss 8.093750\n", "Epoch 3827, Training loss 0.889573, Validation loss 15.017360\n", "Epoch 3828, Training loss 1.027936, Validation loss 8.093868\n", "Epoch 3829, Training loss 0.889491, Validation loss 15.016821\n", "Epoch 3830, Training loss 1.027822, Validation loss 8.093943\n", "Epoch 3831, Training loss 0.889409, Validation loss 15.016303\n", "Epoch 3832, Training loss 1.027702, Validation loss 8.094037\n", "Epoch 3833, Training loss 0.889329, Validation loss 15.015779\n", "Epoch 3834, Training loss 1.027589, Validation loss 8.094131\n", "Epoch 3835, Training loss 0.889244, Validation loss 15.015254\n", "Epoch 3836, Training loss 1.027467, Validation loss 8.094241\n", "Epoch 3837, Training loss 0.889165, Validation loss 15.014729\n", "Epoch 3838, Training loss 1.027354, Validation loss 8.094330\n", "Epoch 3839, Training loss 0.889083, Validation loss 15.014213\n", "Epoch 3840, Training loss 1.027235, Validation loss 8.094425\n", "Epoch 3841, Training loss 0.889003, Validation loss 15.013688\n", "Epoch 3842, Training loss 1.027122, Validation loss 8.094522\n", "Epoch 3843, Training loss 0.888917, Validation loss 15.013148\n", "Epoch 3844, Training loss 1.026997, Validation loss 8.094620\n", "Epoch 3845, Training loss 0.888838, Validation loss 15.012624\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 3846, Training loss 1.026885, Validation loss 8.094727\n", "Epoch 3847, Training loss 0.888754, Validation loss 15.012088\n", "Epoch 3848, Training loss 1.026764, Validation loss 8.094826\n", "Epoch 3849, Training loss 0.888674, Validation loss 15.011567\n", "Epoch 3850, Training loss 1.026647, Validation loss 8.094914\n", "Epoch 3851, Training loss 0.888593, Validation loss 15.011035\n", "Epoch 3852, Training loss 1.026530, Validation loss 8.095017\n", "Epoch 3853, Training loss 0.888511, Validation loss 15.010510\n", "Epoch 3854, Training loss 1.026416, Validation loss 8.095098\n", "Epoch 3855, Training loss 0.888430, Validation loss 15.009997\n", "Epoch 3856, Training loss 1.026297, Validation loss 8.095195\n", "Epoch 3857, Training loss 0.888350, Validation loss 15.009469\n", "Epoch 3858, Training loss 1.026183, Validation loss 8.095286\n", "Epoch 3859, Training loss 0.888267, Validation loss 15.008933\n", "Epoch 3860, Training loss 1.026063, Validation loss 8.095386\n", "Epoch 3861, Training loss 0.888187, Validation loss 15.008412\n", "Epoch 3862, Training loss 1.025948, Validation loss 8.095492\n", "Epoch 3863, Training loss 0.888103, Validation loss 15.007880\n", "Epoch 3864, Training loss 1.025827, Validation loss 8.095605\n", "Epoch 3865, Training loss 0.888022, Validation loss 15.007334\n", "Epoch 3866, Training loss 1.025715, Validation loss 8.095670\n", "Epoch 3867, Training loss 0.887944, Validation loss 15.006831\n", "Epoch 3868, Training loss 1.025599, Validation loss 8.095772\n", "Epoch 3869, Training loss 0.887861, Validation loss 15.006285\n", "Epoch 3870, Training loss 1.025483, Validation loss 8.095872\n", "Epoch 3871, Training loss 0.887781, Validation loss 15.005772\n", "Epoch 3872, Training loss 1.025368, Validation loss 8.095955\n", "Epoch 3873, Training loss 0.887699, Validation loss 15.005236\n", "Epoch 3874, Training loss 1.025251, Validation loss 8.096046\n", "Epoch 3875, Training loss 0.887620, Validation loss 15.004726\n", "Epoch 3876, Training loss 1.025138, Validation loss 8.096138\n", "Epoch 3877, Training loss 0.887537, Validation loss 15.004202\n", "Epoch 3878, Training loss 1.025019, Validation loss 8.096233\n", "Epoch 3879, Training loss 0.887459, Validation loss 15.003684\n", "Epoch 3880, Training loss 1.024907, Validation loss 8.096316\n", "Epoch 3881, Training loss 0.887376, Validation loss 15.003153\n", "Epoch 3882, Training loss 1.024786, Validation loss 8.096427\n", "Epoch 3883, Training loss 0.887296, Validation loss 15.002603\n", "Epoch 3884, Training loss 1.024675, Validation loss 8.096519\n", "Epoch 3885, Training loss 0.887213, Validation loss 15.002074\n", "Epoch 3886, Training loss 1.024556, Validation loss 8.096600\n", "Epoch 3887, Training loss 0.887135, Validation loss 15.001564\n", "Epoch 3888, Training loss 1.024443, Validation loss 8.096704\n", "Epoch 3889, Training loss 0.887054, Validation loss 15.001033\n", "Epoch 3890, Training loss 1.024327, Validation loss 8.096771\n", "Epoch 3891, Training loss 0.886974, Validation loss 15.000515\n", "Epoch 3892, Training loss 1.024210, Validation loss 8.096889\n", "Epoch 3893, Training loss 0.886894, Validation loss 14.999991\n", "Epoch 3894, Training loss 1.024098, Validation loss 8.096972\n", "Epoch 3895, Training loss 0.886813, Validation loss 14.999467\n", "Epoch 3896, Training loss 1.023982, Validation loss 8.097060\n", "Epoch 3897, Training loss 0.886734, Validation loss 14.998957\n", "Epoch 3898, Training loss 1.023866, Validation loss 8.097155\n", "Epoch 3899, Training loss 0.886651, Validation loss 14.998403\n", "Epoch 3900, Training loss 1.023749, Validation loss 8.097249\n", "Epoch 3901, Training loss 0.886571, Validation loss 14.997871\n", "Epoch 3902, Training loss 1.023636, Validation loss 8.097339\n", "Epoch 3903, Training loss 0.886491, Validation loss 14.997355\n", "Epoch 3904, Training loss 1.023521, Validation loss 8.097427\n", "Epoch 3905, Training loss 0.886412, Validation loss 14.996834\n", "Epoch 3906, Training loss 1.023405, Validation loss 8.097514\n", "Epoch 3907, Training loss 0.886330, Validation loss 14.996302\n", "Epoch 3908, Training loss 1.023289, Validation loss 8.097625\n", "Epoch 3909, Training loss 0.886251, Validation loss 14.995770\n", "Epoch 3910, Training loss 1.023178, Validation loss 8.097710\n", "Epoch 3911, Training loss 0.886172, Validation loss 14.995264\n", "Epoch 3912, Training loss 1.023064, Validation loss 8.097785\n", "Epoch 3913, Training loss 0.886092, Validation loss 14.994747\n", "Epoch 3914, Training loss 1.022951, Validation loss 8.097858\n", "Epoch 3915, Training loss 0.886014, Validation loss 14.994229\n", "Epoch 3916, Training loss 1.022835, Validation loss 8.097959\n", "Epoch 3917, Training loss 0.885932, Validation loss 14.993690\n", "Epoch 3918, Training loss 1.022721, Validation loss 8.098030\n", "Epoch 3919, Training loss 0.885855, Validation loss 14.993189\n", "Epoch 3920, Training loss 1.022609, Validation loss 8.098126\n", "Epoch 3921, Training loss 0.885773, Validation loss 14.992645\n", "Epoch 3922, Training loss 1.022490, Validation loss 8.098217\n", "Epoch 3923, Training loss 0.885696, Validation loss 14.992132\n", "Epoch 3924, Training loss 1.022381, Validation loss 8.098305\n", "Epoch 3925, Training loss 0.885614, Validation loss 14.991601\n", "Epoch 3926, Training loss 1.022262, Validation loss 8.098410\n", "Epoch 3927, Training loss 0.885533, Validation loss 14.991057\n", "Epoch 3928, Training loss 1.022150, Validation loss 8.098474\n", "Epoch 3929, Training loss 0.885459, Validation loss 14.990566\n", "Epoch 3930, Training loss 1.022042, Validation loss 8.098544\n", "Epoch 3931, Training loss 0.885378, Validation loss 14.990049\n", "Epoch 3932, Training loss 1.021926, Validation loss 8.098632\n", "Epoch 3933, Training loss 0.885301, Validation loss 14.989525\n", "Epoch 3934, Training loss 1.021814, Validation loss 8.098721\n", "Epoch 3935, Training loss 0.885219, Validation loss 14.989008\n", "Epoch 3936, Training loss 1.021694, Validation loss 8.098826\n", "Epoch 3937, Training loss 0.885142, Validation loss 14.988469\n", "Epoch 3938, Training loss 1.021587, Validation loss 8.098894\n", "Epoch 3939, Training loss 0.885060, Validation loss 14.987952\n", "Epoch 3940, Training loss 1.021469, Validation loss 8.098986\n", "Epoch 3941, Training loss 0.884981, Validation loss 14.987413\n", "Epoch 3942, Training loss 1.021357, Validation loss 8.099072\n", "Epoch 3943, Training loss 0.884902, Validation loss 14.986889\n", "Epoch 3944, Training loss 1.021244, Validation loss 8.099162\n", "Epoch 3945, Training loss 0.884822, Validation loss 14.986350\n", "Epoch 3946, Training loss 1.021131, Validation loss 8.099247\n", "Epoch 3947, Training loss 0.884744, Validation loss 14.985833\n", "Epoch 3948, Training loss 1.021019, Validation loss 8.099339\n", "Epoch 3949, Training loss 0.884665, Validation loss 14.985316\n", "Epoch 3950, Training loss 1.020907, Validation loss 8.099428\n", "Epoch 3951, Training loss 0.884585, Validation loss 14.984799\n", "Epoch 3952, Training loss 1.020793, Validation loss 8.099504\n", "Epoch 3953, Training loss 0.884507, Validation loss 14.984267\n", "Epoch 3954, Training loss 1.020679, Validation loss 8.099581\n", "Epoch 3955, Training loss 0.884429, Validation loss 14.983744\n", "Epoch 3956, Training loss 1.020569, Validation loss 8.099672\n", "Epoch 3957, Training loss 0.884349, Validation loss 14.983227\n", "Epoch 3958, Training loss 1.020453, Validation loss 8.099763\n", "Epoch 3959, Training loss 0.884272, Validation loss 14.982718\n", "Epoch 3960, Training loss 1.020346, Validation loss 8.099828\n", "Epoch 3961, Training loss 0.884194, Validation loss 14.982200\n", "Epoch 3962, Training loss 1.020231, Validation loss 8.099901\n", "Epoch 3963, Training loss 0.884116, Validation loss 14.981690\n", "Epoch 3964, Training loss 1.020118, Validation loss 8.099976\n", "Epoch 3965, Training loss 0.884038, Validation loss 14.981174\n", "Epoch 3966, Training loss 1.020005, Validation loss 8.100070\n", "Epoch 3967, Training loss 0.883957, Validation loss 14.980627\n", "Epoch 3968, Training loss 1.019892, Validation loss 8.100166\n", "Epoch 3969, Training loss 0.883879, Validation loss 14.980096\n", "Epoch 3970, Training loss 1.019782, Validation loss 8.100245\n", "Epoch 3971, Training loss 0.883800, Validation loss 14.979587\n", "Epoch 3972, Training loss 1.019665, Validation loss 8.100334\n", "Epoch 3973, Training loss 0.883722, Validation loss 14.979036\n", "Epoch 3974, Training loss 1.019557, Validation loss 8.100417\n", "Epoch 3975, Training loss 0.883643, Validation loss 14.978516\n", "Epoch 3976, Training loss 1.019444, Validation loss 8.100494\n", "Epoch 3977, Training loss 0.883565, Validation loss 14.977992\n", "Epoch 3978, Training loss 1.019335, Validation loss 8.100560\n", "Epoch 3979, Training loss 0.883487, Validation loss 14.977482\n", "Epoch 3980, Training loss 1.019221, Validation loss 8.100647\n", "Epoch 3981, Training loss 0.883410, Validation loss 14.976961\n", "Epoch 3982, Training loss 1.019112, Validation loss 8.100718\n", "Epoch 3983, Training loss 0.883333, Validation loss 14.976460\n", "Epoch 3984, Training loss 1.019000, Validation loss 8.100798\n", "Epoch 3985, Training loss 0.883255, Validation loss 14.975931\n", "Epoch 3986, Training loss 1.018891, Validation loss 8.100878\n", "Epoch 3987, Training loss 0.883177, Validation loss 14.975408\n", "Epoch 3988, Training loss 1.018777, Validation loss 8.100967\n", "Epoch 3989, Training loss 0.883098, Validation loss 14.974891\n", "Epoch 3990, Training loss 1.018664, Validation loss 8.101057\n", "Epoch 3991, Training loss 0.883021, Validation loss 14.974356\n", "Epoch 3992, Training loss 1.018555, Validation loss 8.101120\n", "Epoch 3993, Training loss 0.882940, Validation loss 14.973821\n", "Epoch 3994, Training loss 1.018438, Validation loss 8.101216\n", "Epoch 3995, Training loss 0.882863, Validation loss 14.973285\n", "Epoch 3996, Training loss 1.018332, Validation loss 8.101290\n", "Epoch 3997, Training loss 0.882786, Validation loss 14.972787\n", "Epoch 3998, Training loss 1.018222, Validation loss 8.101375\n", "Epoch 3999, Training loss 0.882708, Validation loss 14.972266\n", "Epoch 4000, Training loss 1.018109, Validation loss 8.101448\n", "Epoch 4001, Training loss 0.882631, Validation loss 14.971745\n", "Epoch 4002, Training loss 1.017998, Validation loss 8.101527\n", "Epoch 4003, Training loss 0.882553, Validation loss 14.971222\n", "Epoch 4004, Training loss 1.017887, Validation loss 8.101613\n", "Epoch 4005, Training loss 0.882476, Validation loss 14.970691\n", "Epoch 4006, Training loss 1.017776, Validation loss 8.101697\n", "Epoch 4007, Training loss 0.882395, Validation loss 14.970159\n", "Epoch 4008, Training loss 1.017660, Validation loss 8.101788\n", "Epoch 4009, Training loss 0.882320, Validation loss 14.969635\n", "Epoch 4010, Training loss 1.017556, Validation loss 8.101846\n", "Epoch 4011, Training loss 0.882240, Validation loss 14.969125\n", "Epoch 4012, Training loss 1.017439, Validation loss 8.101941\n", "Epoch 4013, Training loss 0.882164, Validation loss 14.968587\n", "Epoch 4014, Training loss 1.017336, Validation loss 8.102010\n", "Epoch 4015, Training loss 0.882085, Validation loss 14.968070\n", "Epoch 4016, Training loss 1.017219, Validation loss 8.102095\n", "Epoch 4017, Training loss 0.882010, Validation loss 14.967531\n", "Epoch 4018, Training loss 1.017115, Validation loss 8.102159\n", "Epoch 4019, Training loss 0.881930, Validation loss 14.967022\n", "Epoch 4020, Training loss 1.016996, Validation loss 8.102259\n", "Epoch 4021, Training loss 0.881850, Validation loss 14.966469\n", "Epoch 4022, Training loss 1.016886, Validation loss 8.102339\n", "Epoch 4023, Training loss 0.881774, Validation loss 14.965959\n", "Epoch 4024, Training loss 1.016779, Validation loss 8.102419\n", "Epoch 4025, Training loss 0.881698, Validation loss 14.965449\n", "Epoch 4026, Training loss 1.016669, Validation loss 8.102469\n", "Epoch 4027, Training loss 0.881622, Validation loss 14.964933\n", "Epoch 4028, Training loss 1.016560, Validation loss 8.102551\n", "Epoch 4029, Training loss 0.881545, Validation loss 14.964409\n", "Epoch 4030, Training loss 1.016451, Validation loss 8.102630\n", "Epoch 4031, Training loss 0.881467, Validation loss 14.963900\n", "Epoch 4032, Training loss 1.016338, Validation loss 8.102707\n", "Epoch 4033, Training loss 0.881390, Validation loss 14.963383\n", "Epoch 4034, Training loss 1.016227, Validation loss 8.102788\n", "Epoch 4035, Training loss 0.881313, Validation loss 14.962844\n", "Epoch 4036, Training loss 1.016117, Validation loss 8.102871\n", "Epoch 4037, Training loss 0.881235, Validation loss 14.962309\n", "Epoch 4038, Training loss 1.016008, Validation loss 8.102953\n", "Epoch 4039, Training loss 0.881158, Validation loss 14.961789\n", "Epoch 4040, Training loss 1.015898, Validation loss 8.103027\n", "Epoch 4041, Training loss 0.881082, Validation loss 14.961265\n", "Epoch 4042, Training loss 1.015792, Validation loss 8.103096\n", "Epoch 4043, Training loss 0.881003, Validation loss 14.960737\n", "Epoch 4044, Training loss 1.015678, Validation loss 8.103168\n", "Epoch 4045, Training loss 0.880928, Validation loss 14.960202\n", "Epoch 4046, Training loss 1.015572, Validation loss 8.103252\n", "Epoch 4047, Training loss 0.880849, Validation loss 14.959697\n", "Epoch 4048, Training loss 1.015457, Validation loss 8.103322\n", "Epoch 4049, Training loss 0.880775, Validation loss 14.959169\n", "Epoch 4050, Training loss 1.015355, Validation loss 8.103397\n", "Epoch 4051, Training loss 0.880695, Validation loss 14.958660\n", "Epoch 4052, Training loss 1.015237, Validation loss 8.103482\n", "Epoch 4053, Training loss 0.880622, Validation loss 14.958114\n", "Epoch 4054, Training loss 1.015138, Validation loss 8.103542\n", "Epoch 4055, Training loss 0.880542, Validation loss 14.957604\n", "Epoch 4056, Training loss 1.015019, Validation loss 8.103637\n", "Epoch 4057, Training loss 0.880466, Validation loss 14.957066\n", "Epoch 4058, Training loss 1.014914, Validation loss 8.103715\n", "Epoch 4059, Training loss 0.880389, Validation loss 14.956542\n", "Epoch 4060, Training loss 1.014803, Validation loss 8.103785\n", "Epoch 4061, Training loss 0.880311, Validation loss 14.956011\n", "Epoch 4062, Training loss 1.014692, Validation loss 8.103879\n", "Epoch 4063, Training loss 0.880234, Validation loss 14.955487\n", "Epoch 4064, Training loss 1.014582, Validation loss 8.103948\n", "Epoch 4065, Training loss 0.880158, Validation loss 14.954948\n", "Epoch 4066, Training loss 1.014476, Validation loss 8.104017\n", "Epoch 4067, Training loss 0.880081, Validation loss 14.954447\n", "Epoch 4068, Training loss 1.014362, Validation loss 8.104106\n", "Epoch 4069, Training loss 0.880005, Validation loss 14.953893\n", "Epoch 4070, Training loss 1.014258, Validation loss 8.104161\n", "Epoch 4071, Training loss 0.879929, Validation loss 14.953406\n", "Epoch 4072, Training loss 1.014148, Validation loss 8.104239\n", "Epoch 4073, Training loss 0.879853, Validation loss 14.952875\n", "Epoch 4074, Training loss 1.014040, Validation loss 8.104308\n", "Epoch 4075, Training loss 0.879776, Validation loss 14.952344\n", "Epoch 4076, Training loss 1.013931, Validation loss 8.104394\n", "Epoch 4077, Training loss 0.879699, Validation loss 14.951835\n", "Epoch 4078, Training loss 1.013820, Validation loss 8.104464\n", "Epoch 4079, Training loss 0.879625, Validation loss 14.951310\n", "Epoch 4080, Training loss 1.013715, Validation loss 8.104522\n", "Epoch 4081, Training loss 0.879548, Validation loss 14.950801\n", "Epoch 4082, Training loss 1.013604, Validation loss 8.104603\n", "Epoch 4083, Training loss 0.879473, Validation loss 14.950270\n", "Epoch 4084, Training loss 1.013500, Validation loss 8.104676\n", "Epoch 4085, Training loss 0.879394, Validation loss 14.949717\n", "Epoch 4086, Training loss 1.013385, Validation loss 8.104762\n", "Epoch 4087, Training loss 0.879320, Validation loss 14.949204\n", "Epoch 4088, Training loss 1.013283, Validation loss 8.104826\n", "Epoch 4089, Training loss 0.879244, Validation loss 14.948706\n", "Epoch 4090, Training loss 1.013173, Validation loss 8.104885\n", "Epoch 4091, Training loss 0.879169, Validation loss 14.948174\n", "Epoch 4092, Training loss 1.013066, Validation loss 8.104950\n", "Epoch 4093, Training loss 0.879094, Validation loss 14.947665\n", "Epoch 4094, Training loss 1.012960, Validation loss 8.105024\n", "Epoch 4095, Training loss 0.879016, Validation loss 14.947134\n", "Epoch 4096, Training loss 1.012848, Validation loss 8.105115\n", "Epoch 4097, Training loss 0.878939, Validation loss 14.946604\n", "Epoch 4098, Training loss 1.012740, Validation loss 8.105185\n", "Epoch 4099, Training loss 0.878865, Validation loss 14.946073\n", "Epoch 4100, Training loss 1.012634, Validation loss 8.105257\n", "Epoch 4101, Training loss 0.878789, Validation loss 14.945541\n", "Epoch 4102, Training loss 1.012528, Validation loss 8.105319\n", "Epoch 4103, Training loss 0.878712, Validation loss 14.945040\n", "Epoch 4104, Training loss 1.012416, Validation loss 8.105394\n", "Epoch 4105, Training loss 0.878635, Validation loss 14.944505\n", "Epoch 4106, Training loss 1.012305, Validation loss 8.105474\n", "Epoch 4107, Training loss 0.878558, Validation loss 14.943970\n", "Epoch 4108, Training loss 1.012197, Validation loss 8.105555\n", "Epoch 4109, Training loss 0.878482, Validation loss 14.943428\n", "Epoch 4110, Training loss 1.012092, Validation loss 8.105624\n", "Epoch 4111, Training loss 0.878407, Validation loss 14.942923\n", "Epoch 4112, Training loss 1.011982, Validation loss 8.105692\n", "Epoch 4113, Training loss 0.878332, Validation loss 14.942384\n", "Epoch 4114, Training loss 1.011875, Validation loss 8.105766\n", "Epoch 4115, Training loss 0.878256, Validation loss 14.941875\n", "Epoch 4116, Training loss 1.011768, Validation loss 8.105834\n", "Epoch 4117, Training loss 0.878181, Validation loss 14.941344\n", "Epoch 4118, Training loss 1.011662, Validation loss 8.105906\n", "Epoch 4119, Training loss 0.878105, Validation loss 14.940813\n", "Epoch 4120, Training loss 1.011552, Validation loss 8.105984\n", "Epoch 4121, Training loss 0.878029, Validation loss 14.940286\n", "Epoch 4122, Training loss 1.011447, Validation loss 8.106048\n", "Epoch 4123, Training loss 0.877953, Validation loss 14.939765\n", "Epoch 4124, Training loss 1.011338, Validation loss 8.106118\n", "Epoch 4125, Training loss 0.877880, Validation loss 14.939257\n", "Epoch 4126, Training loss 1.011233, Validation loss 8.106198\n", "Epoch 4127, Training loss 0.877802, Validation loss 14.938732\n", "Epoch 4128, Training loss 1.011125, Validation loss 8.106269\n", "Epoch 4129, Training loss 0.877727, Validation loss 14.938187\n", "Epoch 4130, Training loss 1.011017, Validation loss 8.106344\n", "Epoch 4131, Training loss 0.877649, Validation loss 14.937655\n", "Epoch 4132, Training loss 1.010906, Validation loss 8.106414\n", "Epoch 4133, Training loss 0.877575, Validation loss 14.937140\n", "Epoch 4134, Training loss 1.010802, Validation loss 8.106478\n", "Epoch 4135, Training loss 0.877500, Validation loss 14.936604\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4136, Training loss 1.010696, Validation loss 8.106537\n", "Epoch 4137, Training loss 0.877427, Validation loss 14.936107\n", "Epoch 4138, Training loss 1.010591, Validation loss 8.106612\n", "Epoch 4139, Training loss 0.877352, Validation loss 14.935591\n", "Epoch 4140, Training loss 1.010483, Validation loss 8.106688\n", "Epoch 4141, Training loss 0.877276, Validation loss 14.935060\n", "Epoch 4142, Training loss 1.010376, Validation loss 8.106757\n", "Epoch 4143, Training loss 0.877203, Validation loss 14.934525\n", "Epoch 4144, Training loss 1.010272, Validation loss 8.106817\n", "Epoch 4145, Training loss 0.877125, Validation loss 14.934016\n", "Epoch 4146, Training loss 1.010160, Validation loss 8.106903\n", "Epoch 4147, Training loss 0.877051, Validation loss 14.933474\n", "Epoch 4148, Training loss 1.010060, Validation loss 8.106964\n", "Epoch 4149, Training loss 0.876973, Validation loss 14.932942\n", "Epoch 4150, Training loss 1.009945, Validation loss 8.107046\n", "Epoch 4151, Training loss 0.876901, Validation loss 14.932426\n", "Epoch 4152, Training loss 1.009845, Validation loss 8.107092\n", "Epoch 4153, Training loss 0.876824, Validation loss 14.931903\n", "Epoch 4154, Training loss 1.009734, Validation loss 8.107179\n", "Epoch 4155, Training loss 0.876751, Validation loss 14.931395\n", "Epoch 4156, Training loss 1.009633, Validation loss 8.107222\n", "Epoch 4157, Training loss 0.876677, Validation loss 14.930878\n", "Epoch 4158, Training loss 1.009526, Validation loss 8.107292\n", "Epoch 4159, Training loss 0.876602, Validation loss 14.930350\n", "Epoch 4160, Training loss 1.009418, Validation loss 8.107360\n", "Epoch 4161, Training loss 0.876525, Validation loss 14.929809\n", "Epoch 4162, Training loss 1.009309, Validation loss 8.107443\n", "Epoch 4163, Training loss 0.876451, Validation loss 14.929277\n", "Epoch 4164, Training loss 1.009209, Validation loss 8.107491\n", "Epoch 4165, Training loss 0.876376, Validation loss 14.928780\n", "Epoch 4166, Training loss 1.009099, Validation loss 8.107574\n", "Epoch 4167, Training loss 0.876301, Validation loss 14.928238\n", "Epoch 4168, Training loss 1.008993, Validation loss 8.107637\n", "Epoch 4169, Training loss 0.876226, Validation loss 14.927722\n", "Epoch 4170, Training loss 1.008885, Validation loss 8.107710\n", "Epoch 4171, Training loss 0.876152, Validation loss 14.927183\n", "Epoch 4172, Training loss 1.008784, Validation loss 8.107767\n", "Epoch 4173, Training loss 0.876077, Validation loss 14.926675\n", "Epoch 4174, Training loss 1.008673, Validation loss 8.107843\n", "Epoch 4175, Training loss 0.876004, Validation loss 14.926147\n", "Epoch 4176, Training loss 1.008571, Validation loss 8.107913\n", "Epoch 4177, Training loss 0.875928, Validation loss 14.925620\n", "Epoch 4178, Training loss 1.008464, Validation loss 8.107978\n", "Epoch 4179, Training loss 0.875854, Validation loss 14.925093\n", "Epoch 4180, Training loss 1.008360, Validation loss 8.108029\n", "Epoch 4181, Training loss 0.875780, Validation loss 14.924573\n", "Epoch 4182, Training loss 1.008252, Validation loss 8.108103\n", "Epoch 4183, Training loss 0.875707, Validation loss 14.924039\n", "Epoch 4184, Training loss 1.008149, Validation loss 8.108167\n", "Epoch 4185, Training loss 0.875630, Validation loss 14.923512\n", "Epoch 4186, Training loss 1.008042, Validation loss 8.108227\n", "Epoch 4187, Training loss 0.875558, Validation loss 14.922988\n", "Epoch 4188, Training loss 1.007939, Validation loss 8.108305\n", "Epoch 4189, Training loss 0.875481, Validation loss 14.922461\n", "Epoch 4190, Training loss 1.007831, Validation loss 8.108374\n", "Epoch 4191, Training loss 0.875407, Validation loss 14.921945\n", "Epoch 4192, Training loss 1.007726, Validation loss 8.108434\n", "Epoch 4193, Training loss 0.875334, Validation loss 14.921425\n", "Epoch 4194, Training loss 1.007622, Validation loss 8.108501\n", "Epoch 4195, Training loss 0.875258, Validation loss 14.920905\n", "Epoch 4196, Training loss 1.007513, Validation loss 8.108574\n", "Epoch 4197, Training loss 0.875184, Validation loss 14.920356\n", "Epoch 4198, Training loss 1.007412, Validation loss 8.108633\n", "Epoch 4199, Training loss 0.875110, Validation loss 14.919858\n", "Epoch 4200, Training loss 1.007302, Validation loss 8.108715\n", "Epoch 4201, Training loss 0.875036, Validation loss 14.919316\n", "Epoch 4202, Training loss 1.007200, Validation loss 8.108765\n", "Epoch 4203, Training loss 0.874961, Validation loss 14.918797\n", "Epoch 4204, Training loss 1.007090, Validation loss 8.108845\n", "Epoch 4205, Training loss 0.874885, Validation loss 14.918241\n", "Epoch 4206, Training loss 1.006986, Validation loss 8.108903\n", "Epoch 4207, Training loss 0.874813, Validation loss 14.917739\n", "Epoch 4208, Training loss 1.006883, Validation loss 8.108976\n", "Epoch 4209, Training loss 0.874736, Validation loss 14.917193\n", "Epoch 4210, Training loss 1.006776, Validation loss 8.109036\n", "Epoch 4211, Training loss 0.874664, Validation loss 14.916677\n", "Epoch 4212, Training loss 1.006672, Validation loss 8.109114\n", "Epoch 4213, Training loss 0.874590, Validation loss 14.916146\n", "Epoch 4214, Training loss 1.006567, Validation loss 8.109177\n", "Epoch 4215, Training loss 0.874514, Validation loss 14.915612\n", "Epoch 4216, Training loss 1.006458, Validation loss 8.109261\n", "Epoch 4217, Training loss 0.874438, Validation loss 14.915063\n", "Epoch 4218, Training loss 1.006353, Validation loss 8.109328\n", "Epoch 4219, Training loss 0.874366, Validation loss 14.914547\n", "Epoch 4220, Training loss 1.006251, Validation loss 8.109377\n", "Epoch 4221, Training loss 0.874291, Validation loss 14.914017\n", "Epoch 4222, Training loss 1.006144, Validation loss 8.109450\n", "Epoch 4223, Training loss 0.874217, Validation loss 14.913486\n", "Epoch 4224, Training loss 1.006039, Validation loss 8.109508\n", "Epoch 4225, Training loss 0.874143, Validation loss 14.912955\n", "Epoch 4226, Training loss 1.005936, Validation loss 8.109573\n", "Epoch 4227, Training loss 0.874069, Validation loss 14.912442\n", "Epoch 4228, Training loss 1.005831, Validation loss 8.109643\n", "Epoch 4229, Training loss 0.873997, Validation loss 14.911916\n", "Epoch 4230, Training loss 1.005728, Validation loss 8.109712\n", "Epoch 4231, Training loss 0.873921, Validation loss 14.911392\n", "Epoch 4232, Training loss 1.005620, Validation loss 8.109777\n", "Epoch 4233, Training loss 0.873848, Validation loss 14.910839\n", "Epoch 4234, Training loss 1.005520, Validation loss 8.109834\n", "Epoch 4235, Training loss 0.873775, Validation loss 14.910331\n", "Epoch 4236, Training loss 1.005415, Validation loss 8.109900\n", "Epoch 4237, Training loss 0.873702, Validation loss 14.909803\n", "Epoch 4238, Training loss 1.005312, Validation loss 8.109958\n", "Epoch 4239, Training loss 0.873628, Validation loss 14.909284\n", "Epoch 4240, Training loss 1.005206, Validation loss 8.110025\n", "Epoch 4241, Training loss 0.873552, Validation loss 14.908735\n", "Epoch 4242, Training loss 1.005099, Validation loss 8.110105\n", "Epoch 4243, Training loss 0.873479, Validation loss 14.908208\n", "Epoch 4244, Training loss 1.004997, Validation loss 8.110177\n", "Epoch 4245, Training loss 0.873403, Validation loss 14.907670\n", "Epoch 4246, Training loss 1.004891, Validation loss 8.110232\n", "Epoch 4247, Training loss 0.873332, Validation loss 14.907168\n", "Epoch 4248, Training loss 1.004790, Validation loss 8.110290\n", "Epoch 4249, Training loss 0.873259, Validation loss 14.906653\n", "Epoch 4250, Training loss 1.004683, Validation loss 8.110353\n", "Epoch 4251, Training loss 0.873185, Validation loss 14.906108\n", "Epoch 4252, Training loss 1.004580, Validation loss 8.110426\n", "Epoch 4253, Training loss 0.873110, Validation loss 14.905570\n", "Epoch 4254, Training loss 1.004476, Validation loss 8.110478\n", "Epoch 4255, Training loss 0.873036, Validation loss 14.905046\n", "Epoch 4256, Training loss 1.004368, Validation loss 8.110565\n", "Epoch 4257, Training loss 0.872962, Validation loss 14.904505\n", "Epoch 4258, Training loss 1.004264, Validation loss 8.110629\n", "Epoch 4259, Training loss 0.872889, Validation loss 14.903978\n", "Epoch 4260, Training loss 1.004163, Validation loss 8.110680\n", "Epoch 4261, Training loss 0.872817, Validation loss 14.903459\n", "Epoch 4262, Training loss 1.004062, Validation loss 8.110741\n", "Epoch 4263, Training loss 0.872744, Validation loss 14.902931\n", "Epoch 4264, Training loss 1.003959, Validation loss 8.110796\n", "Epoch 4265, Training loss 0.872670, Validation loss 14.902415\n", "Epoch 4266, Training loss 1.003852, Validation loss 8.110857\n", "Epoch 4267, Training loss 0.872598, Validation loss 14.901899\n", "Epoch 4268, Training loss 1.003750, Validation loss 8.110911\n", "Epoch 4269, Training loss 0.872525, Validation loss 14.901361\n", "Epoch 4270, Training loss 1.003649, Validation loss 8.110977\n", "Epoch 4271, Training loss 0.872450, Validation loss 14.900839\n", "Epoch 4272, Training loss 1.003538, Validation loss 8.111067\n", "Epoch 4273, Training loss 0.872376, Validation loss 14.900264\n", "Epoch 4274, Training loss 1.003438, Validation loss 8.111126\n", "Epoch 4275, Training loss 0.872301, Validation loss 14.899748\n", "Epoch 4276, Training loss 1.003328, Validation loss 8.111204\n", "Epoch 4277, Training loss 0.872230, Validation loss 14.899210\n", "Epoch 4278, Training loss 1.003233, Validation loss 8.111244\n", "Epoch 4279, Training loss 0.872156, Validation loss 14.898694\n", "Epoch 4280, Training loss 1.003126, Validation loss 8.111300\n", "Epoch 4281, Training loss 0.872085, Validation loss 14.898186\n", "Epoch 4282, Training loss 1.003025, Validation loss 8.111372\n", "Epoch 4283, Training loss 0.872011, Validation loss 14.897641\n", "Epoch 4284, Training loss 1.002920, Validation loss 8.111426\n", "Epoch 4285, Training loss 0.871939, Validation loss 14.897125\n", "Epoch 4286, Training loss 1.002818, Validation loss 8.111488\n", "Epoch 4287, Training loss 0.871864, Validation loss 14.896587\n", "Epoch 4288, Training loss 1.002713, Validation loss 8.111558\n", "Epoch 4289, Training loss 0.871790, Validation loss 14.896049\n", "Epoch 4290, Training loss 1.002611, Validation loss 8.111629\n", "Epoch 4291, Training loss 0.871717, Validation loss 14.895512\n", "Epoch 4292, Training loss 1.002505, Validation loss 8.111694\n", "Epoch 4293, Training loss 0.871643, Validation loss 14.894966\n", "Epoch 4294, Training loss 1.002401, Validation loss 8.111765\n", "Epoch 4295, Training loss 0.871569, Validation loss 14.894440\n", "Epoch 4296, Training loss 1.002297, Validation loss 8.111820\n", "Epoch 4297, Training loss 0.871498, Validation loss 14.893906\n", "Epoch 4298, Training loss 1.002200, Validation loss 8.111871\n", "Epoch 4299, Training loss 0.871425, Validation loss 14.893404\n", "Epoch 4300, Training loss 1.002094, Validation loss 8.111932\n", "Epoch 4301, Training loss 0.871352, Validation loss 14.892859\n", "Epoch 4302, Training loss 1.001991, Validation loss 8.112002\n", "Epoch 4303, Training loss 0.871279, Validation loss 14.892336\n", "Epoch 4304, Training loss 1.001888, Validation loss 8.112065\n", "Epoch 4305, Training loss 0.871206, Validation loss 14.891791\n", "Epoch 4306, Training loss 1.001787, Validation loss 8.112114\n", "Epoch 4307, Training loss 0.871134, Validation loss 14.891283\n", "Epoch 4308, Training loss 1.001683, Validation loss 8.112169\n", "Epoch 4309, Training loss 0.871063, Validation loss 14.890763\n", "Epoch 4310, Training loss 1.001585, Validation loss 8.112235\n", "Epoch 4311, Training loss 0.870988, Validation loss 14.890229\n", "Epoch 4312, Training loss 1.001476, Validation loss 8.112307\n", "Epoch 4313, Training loss 0.870918, Validation loss 14.889710\n", "Epoch 4314, Training loss 1.001381, Validation loss 8.112351\n", "Epoch 4315, Training loss 0.870844, Validation loss 14.889191\n", "Epoch 4316, Training loss 1.001269, Validation loss 8.112436\n", "Epoch 4317, Training loss 0.870773, Validation loss 14.888631\n", "Epoch 4318, Training loss 1.001177, Validation loss 8.112475\n", "Epoch 4319, Training loss 0.870700, Validation loss 14.888137\n", "Epoch 4320, Training loss 1.001071, Validation loss 8.112539\n", "Epoch 4321, Training loss 0.870627, Validation loss 14.887596\n", "Epoch 4322, Training loss 1.000968, Validation loss 8.112597\n", "Epoch 4323, Training loss 0.870554, Validation loss 14.887062\n", "Epoch 4324, Training loss 1.000865, Validation loss 8.112665\n", "Epoch 4325, Training loss 0.870480, Validation loss 14.886517\n", "Epoch 4326, Training loss 1.000760, Validation loss 8.112732\n", "Epoch 4327, Training loss 0.870408, Validation loss 14.885994\n", "Epoch 4328, Training loss 1.000660, Validation loss 8.112787\n", "Epoch 4329, Training loss 0.870337, Validation loss 14.885470\n", "Epoch 4330, Training loss 1.000558, Validation loss 8.112840\n", "Epoch 4331, Training loss 0.870262, Validation loss 14.884930\n", "Epoch 4332, Training loss 1.000452, Validation loss 8.112899\n", "Epoch 4333, Training loss 0.870192, Validation loss 14.884406\n", "Epoch 4334, Training loss 1.000354, Validation loss 8.112965\n", "Epoch 4335, Training loss 0.870116, Validation loss 14.883869\n", "Epoch 4336, Training loss 1.000247, Validation loss 8.113026\n", "Epoch 4337, Training loss 0.870046, Validation loss 14.883346\n", "Epoch 4338, Training loss 1.000150, Validation loss 8.113080\n", "Epoch 4339, Training loss 0.869974, Validation loss 14.882812\n", "Epoch 4340, Training loss 1.000048, Validation loss 8.113142\n", "Epoch 4341, Training loss 0.869902, Validation loss 14.882297\n", "Epoch 4342, Training loss 0.999946, Validation loss 8.113207\n", "Epoch 4343, Training loss 0.869828, Validation loss 14.881751\n", "Epoch 4344, Training loss 0.999842, Validation loss 8.113275\n", "Epoch 4345, Training loss 0.869757, Validation loss 14.881221\n", "Epoch 4346, Training loss 0.999740, Validation loss 8.113330\n", "Epoch 4347, Training loss 0.869682, Validation loss 14.880676\n", "Epoch 4348, Training loss 0.999636, Validation loss 8.113396\n", "Epoch 4349, Training loss 0.869611, Validation loss 14.880146\n", "Epoch 4350, Training loss 0.999536, Validation loss 8.113462\n", "Epoch 4351, Training loss 0.869538, Validation loss 14.879620\n", "Epoch 4352, Training loss 0.999433, Validation loss 8.113519\n", "Epoch 4353, Training loss 0.869466, Validation loss 14.879074\n", "Epoch 4354, Training loss 0.999330, Validation loss 8.113585\n", "Epoch 4355, Training loss 0.869391, Validation loss 14.878533\n", "Epoch 4356, Training loss 0.999227, Validation loss 8.113632\n", "Epoch 4357, Training loss 0.869322, Validation loss 14.878003\n", "Epoch 4358, Training loss 0.999130, Validation loss 8.113693\n", "Epoch 4359, Training loss 0.869248, Validation loss 14.877487\n", "Epoch 4360, Training loss 0.999024, Validation loss 8.113759\n", "Epoch 4361, Training loss 0.869175, Validation loss 14.876939\n", "Epoch 4362, Training loss 0.998922, Validation loss 8.113821\n", "Epoch 4363, Training loss 0.869105, Validation loss 14.876412\n", "Epoch 4364, Training loss 0.998824, Validation loss 8.113873\n", "Epoch 4365, Training loss 0.869032, Validation loss 14.875890\n", "Epoch 4366, Training loss 0.998722, Validation loss 8.113929\n", "Epoch 4367, Training loss 0.868960, Validation loss 14.875352\n", "Epoch 4368, Training loss 0.998619, Validation loss 8.113996\n", "Epoch 4369, Training loss 0.868887, Validation loss 14.874814\n", "Epoch 4370, Training loss 0.998517, Validation loss 8.114055\n", "Epoch 4371, Training loss 0.868815, Validation loss 14.874288\n", "Epoch 4372, Training loss 0.998415, Validation loss 8.114119\n", "Epoch 4373, Training loss 0.868743, Validation loss 14.873747\n", "Epoch 4374, Training loss 0.998313, Validation loss 8.114180\n", "Epoch 4375, Training loss 0.868672, Validation loss 14.873224\n", "Epoch 4376, Training loss 0.998214, Validation loss 8.114229\n", "Epoch 4377, Training loss 0.868600, Validation loss 14.872694\n", "Epoch 4378, Training loss 0.998113, Validation loss 8.114284\n", "Epoch 4379, Training loss 0.868530, Validation loss 14.872178\n", "Epoch 4380, Training loss 0.998013, Validation loss 8.114336\n", "Epoch 4381, Training loss 0.868456, Validation loss 14.871630\n", "Epoch 4382, Training loss 0.997908, Validation loss 8.114408\n", "Epoch 4383, Training loss 0.868384, Validation loss 14.871078\n", "Epoch 4384, Training loss 0.997809, Validation loss 8.114473\n", "Epoch 4385, Training loss 0.868311, Validation loss 14.870543\n", "Epoch 4386, Training loss 0.997704, Validation loss 8.114528\n", "Epoch 4387, Training loss 0.868240, Validation loss 14.870007\n", "Epoch 4388, Training loss 0.997606, Validation loss 8.114586\n", "Epoch 4389, Training loss 0.868168, Validation loss 14.869491\n", "Epoch 4390, Training loss 0.997503, Validation loss 8.114653\n", "Epoch 4391, Training loss 0.868095, Validation loss 14.868932\n", "Epoch 4392, Training loss 0.997403, Validation loss 8.114698\n", "Epoch 4393, Training loss 0.868026, Validation loss 14.868423\n", "Epoch 4394, Training loss 0.997305, Validation loss 8.114764\n", "Epoch 4395, Training loss 0.867954, Validation loss 14.867901\n", "Epoch 4396, Training loss 0.997201, Validation loss 8.114821\n", "Epoch 4397, Training loss 0.867882, Validation loss 14.867356\n", "Epoch 4398, Training loss 0.997103, Validation loss 8.114877\n", "Epoch 4399, Training loss 0.867811, Validation loss 14.866840\n", "Epoch 4400, Training loss 0.997001, Validation loss 8.114933\n", "Epoch 4401, Training loss 0.867738, Validation loss 14.866300\n", "Epoch 4402, Training loss 0.996899, Validation loss 8.114994\n", "Epoch 4403, Training loss 0.867668, Validation loss 14.865766\n", "Epoch 4404, Training loss 0.996800, Validation loss 8.115050\n", "Epoch 4405, Training loss 0.867594, Validation loss 14.865225\n", "Epoch 4406, Training loss 0.996695, Validation loss 8.115128\n", "Epoch 4407, Training loss 0.867522, Validation loss 14.864673\n", "Epoch 4408, Training loss 0.996598, Validation loss 8.115165\n", "Epoch 4409, Training loss 0.867451, Validation loss 14.864161\n", "Epoch 4410, Training loss 0.996496, Validation loss 8.115212\n", "Epoch 4411, Training loss 0.867381, Validation loss 14.863616\n", "Epoch 4412, Training loss 0.996398, Validation loss 8.115284\n", "Epoch 4413, Training loss 0.867308, Validation loss 14.863093\n", "Epoch 4414, Training loss 0.996295, Validation loss 8.115336\n", "Epoch 4415, Training loss 0.867238, Validation loss 14.862549\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4416, Training loss 0.996196, Validation loss 8.115387\n", "Epoch 4417, Training loss 0.867167, Validation loss 14.862026\n", "Epoch 4418, Training loss 0.996097, Validation loss 8.115445\n", "Epoch 4419, Training loss 0.867093, Validation loss 14.861482\n", "Epoch 4420, Training loss 0.995992, Validation loss 8.115516\n", "Epoch 4421, Training loss 0.867022, Validation loss 14.860944\n", "Epoch 4422, Training loss 0.995893, Validation loss 8.115571\n", "Epoch 4423, Training loss 0.866950, Validation loss 14.860407\n", "Epoch 4424, Training loss 0.995789, Validation loss 8.115639\n", "Epoch 4425, Training loss 0.866878, Validation loss 14.859855\n", "Epoch 4426, Training loss 0.995693, Validation loss 8.115688\n", "Epoch 4427, Training loss 0.866807, Validation loss 14.859332\n", "Epoch 4428, Training loss 0.995590, Validation loss 8.115746\n", "Epoch 4429, Training loss 0.866734, Validation loss 14.858799\n", "Epoch 4430, Training loss 0.995488, Validation loss 8.115808\n", "Epoch 4431, Training loss 0.866663, Validation loss 14.858261\n", "Epoch 4432, Training loss 0.995389, Validation loss 8.115863\n", "Epoch 4433, Training loss 0.866593, Validation loss 14.857742\n", "Epoch 4434, Training loss 0.995289, Validation loss 8.115930\n", "Epoch 4435, Training loss 0.866520, Validation loss 14.857183\n", "Epoch 4436, Training loss 0.995187, Validation loss 8.115990\n", "Epoch 4437, Training loss 0.866448, Validation loss 14.856653\n", "Epoch 4438, Training loss 0.995085, Validation loss 8.116055\n", "Epoch 4439, Training loss 0.866376, Validation loss 14.856094\n", "Epoch 4440, Training loss 0.994986, Validation loss 8.116115\n", "Epoch 4441, Training loss 0.866304, Validation loss 14.855572\n", "Epoch 4442, Training loss 0.994886, Validation loss 8.116163\n", "Epoch 4443, Training loss 0.866234, Validation loss 14.855023\n", "Epoch 4444, Training loss 0.994787, Validation loss 8.116218\n", "Epoch 4445, Training loss 0.866162, Validation loss 14.854497\n", "Epoch 4446, Training loss 0.994683, Validation loss 8.116292\n", "Epoch 4447, Training loss 0.866092, Validation loss 14.853967\n", "Epoch 4448, Training loss 0.994587, Validation loss 8.116334\n", "Epoch 4449, Training loss 0.866018, Validation loss 14.853418\n", "Epoch 4450, Training loss 0.994484, Validation loss 8.116382\n", "Epoch 4451, Training loss 0.865952, Validation loss 14.852897\n", "Epoch 4452, Training loss 0.994392, Validation loss 8.116431\n", "Epoch 4453, Training loss 0.865880, Validation loss 14.852377\n", "Epoch 4454, Training loss 0.994288, Validation loss 8.116481\n", "Epoch 4455, Training loss 0.865811, Validation loss 14.851840\n", "Epoch 4456, Training loss 0.994191, Validation loss 8.116542\n", "Epoch 4457, Training loss 0.865740, Validation loss 14.851317\n", "Epoch 4458, Training loss 0.994091, Validation loss 8.116594\n", "Epoch 4459, Training loss 0.865667, Validation loss 14.850774\n", "Epoch 4460, Training loss 0.993990, Validation loss 8.116663\n", "Epoch 4461, Training loss 0.865596, Validation loss 14.850244\n", "Epoch 4462, Training loss 0.993889, Validation loss 8.116714\n", "Epoch 4463, Training loss 0.865524, Validation loss 14.849691\n", "Epoch 4464, Training loss 0.993788, Validation loss 8.116778\n", "Epoch 4465, Training loss 0.865453, Validation loss 14.849147\n", "Epoch 4466, Training loss 0.993688, Validation loss 8.116830\n", "Epoch 4467, Training loss 0.865383, Validation loss 14.848632\n", "Epoch 4468, Training loss 0.993592, Validation loss 8.116889\n", "Epoch 4469, Training loss 0.865309, Validation loss 14.848073\n", "Epoch 4470, Training loss 0.993487, Validation loss 8.116948\n", "Epoch 4471, Training loss 0.865238, Validation loss 14.847528\n", "Epoch 4472, Training loss 0.993387, Validation loss 8.116997\n", "Epoch 4473, Training loss 0.865169, Validation loss 14.846998\n", "Epoch 4474, Training loss 0.993291, Validation loss 8.117067\n", "Epoch 4475, Training loss 0.865096, Validation loss 14.846455\n", "Epoch 4476, Training loss 0.993190, Validation loss 8.117116\n", "Epoch 4477, Training loss 0.865026, Validation loss 14.845942\n", "Epoch 4478, Training loss 0.993089, Validation loss 8.117181\n", "Epoch 4479, Training loss 0.864957, Validation loss 14.845398\n", "Epoch 4480, Training loss 0.992994, Validation loss 8.117218\n", "Epoch 4481, Training loss 0.864885, Validation loss 14.844865\n", "Epoch 4482, Training loss 0.992891, Validation loss 8.117286\n", "Epoch 4483, Training loss 0.864815, Validation loss 14.844320\n", "Epoch 4484, Training loss 0.992793, Validation loss 8.117341\n", "Epoch 4485, Training loss 0.864742, Validation loss 14.843776\n", "Epoch 4486, Training loss 0.992692, Validation loss 8.117399\n", "Epoch 4487, Training loss 0.864674, Validation loss 14.843246\n", "Epoch 4488, Training loss 0.992596, Validation loss 8.117439\n", "Epoch 4489, Training loss 0.864602, Validation loss 14.842716\n", "Epoch 4490, Training loss 0.992491, Validation loss 8.117512\n", "Epoch 4491, Training loss 0.864531, Validation loss 14.842154\n", "Epoch 4492, Training loss 0.992395, Validation loss 8.117567\n", "Epoch 4493, Training loss 0.864459, Validation loss 14.841609\n", "Epoch 4494, Training loss 0.992293, Validation loss 8.117631\n", "Epoch 4495, Training loss 0.864387, Validation loss 14.841065\n", "Epoch 4496, Training loss 0.992195, Validation loss 8.117687\n", "Epoch 4497, Training loss 0.864317, Validation loss 14.840517\n", "Epoch 4498, Training loss 0.992096, Validation loss 8.117748\n", "Epoch 4499, Training loss 0.864247, Validation loss 14.839994\n", "Epoch 4500, Training loss 0.991998, Validation loss 8.117795\n", "Epoch 4501, Training loss 0.864174, Validation loss 14.839458\n", "Epoch 4502, Training loss 0.991895, Validation loss 8.117858\n", "Epoch 4503, Training loss 0.864104, Validation loss 14.838899\n", "Epoch 4504, Training loss 0.991799, Validation loss 8.117919\n", "Epoch 4505, Training loss 0.864034, Validation loss 14.838377\n", "Epoch 4506, Training loss 0.991700, Validation loss 8.117976\n", "Epoch 4507, Training loss 0.863962, Validation loss 14.837825\n", "Epoch 4508, Training loss 0.991598, Validation loss 8.118033\n", "Epoch 4509, Training loss 0.863892, Validation loss 14.837280\n", "Epoch 4510, Training loss 0.991501, Validation loss 8.118090\n", "Epoch 4511, Training loss 0.863820, Validation loss 14.836743\n", "Epoch 4512, Training loss 0.991401, Validation loss 8.118136\n", "Epoch 4513, Training loss 0.863752, Validation loss 14.836236\n", "Epoch 4514, Training loss 0.991305, Validation loss 8.118182\n", "Epoch 4515, Training loss 0.863680, Validation loss 14.835684\n", "Epoch 4516, Training loss 0.991201, Validation loss 8.118261\n", "Epoch 4517, Training loss 0.863610, Validation loss 14.835126\n", "Epoch 4518, Training loss 0.991106, Validation loss 8.118301\n", "Epoch 4519, Training loss 0.863540, Validation loss 14.834603\n", "Epoch 4520, Training loss 0.991006, Validation loss 8.118359\n", "Epoch 4521, Training loss 0.863471, Validation loss 14.834073\n", "Epoch 4522, Training loss 0.990911, Validation loss 8.118404\n", "Epoch 4523, Training loss 0.863399, Validation loss 14.833532\n", "Epoch 4524, Training loss 0.990808, Validation loss 8.118472\n", "Epoch 4525, Training loss 0.863329, Validation loss 14.832993\n", "Epoch 4526, Training loss 0.990713, Validation loss 8.118510\n", "Epoch 4527, Training loss 0.863258, Validation loss 14.832448\n", "Epoch 4528, Training loss 0.990611, Validation loss 8.118578\n", "Epoch 4529, Training loss 0.863188, Validation loss 14.831911\n", "Epoch 4530, Training loss 0.990514, Validation loss 8.118632\n", "Epoch 4531, Training loss 0.863116, Validation loss 14.831364\n", "Epoch 4532, Training loss 0.990411, Validation loss 8.118695\n", "Epoch 4533, Training loss 0.863045, Validation loss 14.830808\n", "Epoch 4534, Training loss 0.990314, Validation loss 8.118752\n", "Epoch 4535, Training loss 0.862976, Validation loss 14.830278\n", "Epoch 4536, Training loss 0.990216, Validation loss 8.118814\n", "Epoch 4537, Training loss 0.862905, Validation loss 14.829723\n", "Epoch 4538, Training loss 0.990120, Validation loss 8.118858\n", "Epoch 4539, Training loss 0.862836, Validation loss 14.829212\n", "Epoch 4540, Training loss 0.990019, Validation loss 8.118913\n", "Epoch 4541, Training loss 0.862765, Validation loss 14.828661\n", "Epoch 4542, Training loss 0.989922, Validation loss 8.118972\n", "Epoch 4543, Training loss 0.862696, Validation loss 14.828132\n", "Epoch 4544, Training loss 0.989827, Validation loss 8.119004\n", "Epoch 4545, Training loss 0.862626, Validation loss 14.827594\n", "Epoch 4546, Training loss 0.989727, Validation loss 8.119067\n", "Epoch 4547, Training loss 0.862555, Validation loss 14.827043\n", "Epoch 4548, Training loss 0.989627, Validation loss 8.119130\n", "Epoch 4549, Training loss 0.862485, Validation loss 14.826506\n", "Epoch 4550, Training loss 0.989531, Validation loss 8.119176\n", "Epoch 4551, Training loss 0.862415, Validation loss 14.825962\n", "Epoch 4552, Training loss 0.989432, Validation loss 8.119233\n", "Epoch 4553, Training loss 0.862342, Validation loss 14.825425\n", "Epoch 4554, Training loss 0.989328, Validation loss 8.119301\n", "Epoch 4555, Training loss 0.862273, Validation loss 14.824866\n", "Epoch 4556, Training loss 0.989237, Validation loss 8.119343\n", "Epoch 4557, Training loss 0.862204, Validation loss 14.824351\n", "Epoch 4558, Training loss 0.989138, Validation loss 8.119390\n", "Epoch 4559, Training loss 0.862132, Validation loss 14.823804\n", "Epoch 4560, Training loss 0.989035, Validation loss 8.119468\n", "Epoch 4561, Training loss 0.862062, Validation loss 14.823241\n", "Epoch 4562, Training loss 0.988941, Validation loss 8.119516\n", "Epoch 4563, Training loss 0.861992, Validation loss 14.822704\n", "Epoch 4564, Training loss 0.988841, Validation loss 8.119572\n", "Epoch 4565, Training loss 0.861920, Validation loss 14.822145\n", "Epoch 4566, Training loss 0.988740, Validation loss 8.119627\n", "Epoch 4567, Training loss 0.861852, Validation loss 14.821624\n", "Epoch 4568, Training loss 0.988645, Validation loss 8.119676\n", "Epoch 4569, Training loss 0.861781, Validation loss 14.821087\n", "Epoch 4570, Training loss 0.988545, Validation loss 8.119725\n", "Epoch 4571, Training loss 0.861714, Validation loss 14.820550\n", "Epoch 4572, Training loss 0.988451, Validation loss 8.119763\n", "Epoch 4573, Training loss 0.861646, Validation loss 14.820028\n", "Epoch 4574, Training loss 0.988356, Validation loss 8.119813\n", "Epoch 4575, Training loss 0.861575, Validation loss 14.819499\n", "Epoch 4576, Training loss 0.988256, Validation loss 8.119879\n", "Epoch 4577, Training loss 0.861504, Validation loss 14.818922\n", "Epoch 4578, Training loss 0.988157, Validation loss 8.119937\n", "Epoch 4579, Training loss 0.861433, Validation loss 14.818385\n", "Epoch 4580, Training loss 0.988056, Validation loss 8.120001\n", "Epoch 4581, Training loss 0.861361, Validation loss 14.817819\n", "Epoch 4582, Training loss 0.987959, Validation loss 8.120055\n", "Epoch 4583, Training loss 0.861291, Validation loss 14.817271\n", "Epoch 4584, Training loss 0.987861, Validation loss 8.120108\n", "Epoch 4585, Training loss 0.861223, Validation loss 14.816757\n", "Epoch 4586, Training loss 0.987764, Validation loss 8.120157\n", "Epoch 4587, Training loss 0.861151, Validation loss 14.816213\n", "Epoch 4588, Training loss 0.987663, Validation loss 8.120236\n", "Epoch 4589, Training loss 0.861082, Validation loss 14.815654\n", "Epoch 4590, Training loss 0.987568, Validation loss 8.120274\n", "Epoch 4591, Training loss 0.861013, Validation loss 14.815125\n", "Epoch 4592, Training loss 0.987470, Validation loss 8.120332\n", "Epoch 4593, Training loss 0.860941, Validation loss 14.814573\n", "Epoch 4594, Training loss 0.987370, Validation loss 8.120383\n", "Epoch 4595, Training loss 0.860873, Validation loss 14.814022\n", "Epoch 4596, Training loss 0.987275, Validation loss 8.120439\n", "Epoch 4597, Training loss 0.860803, Validation loss 14.813493\n", "Epoch 4598, Training loss 0.987179, Validation loss 8.120480\n", "Epoch 4599, Training loss 0.860734, Validation loss 14.812949\n", "Epoch 4600, Training loss 0.987080, Validation loss 8.120529\n", "Epoch 4601, Training loss 0.860664, Validation loss 14.812405\n", "Epoch 4602, Training loss 0.986984, Validation loss 8.120586\n", "Epoch 4603, Training loss 0.860593, Validation loss 14.811853\n", "Epoch 4604, Training loss 0.986886, Validation loss 8.120639\n", "Epoch 4605, Training loss 0.860526, Validation loss 14.811324\n", "Epoch 4606, Training loss 0.986790, Validation loss 8.120696\n", "Epoch 4607, Training loss 0.860454, Validation loss 14.810766\n", "Epoch 4608, Training loss 0.986691, Validation loss 8.120743\n", "Epoch 4609, Training loss 0.860387, Validation loss 14.810229\n", "Epoch 4610, Training loss 0.986596, Validation loss 8.120806\n", "Epoch 4611, Training loss 0.860317, Validation loss 14.809704\n", "Epoch 4612, Training loss 0.986497, Validation loss 8.120843\n", "Epoch 4613, Training loss 0.860248, Validation loss 14.809149\n", "Epoch 4614, Training loss 0.986402, Validation loss 8.120895\n", "Epoch 4615, Training loss 0.860178, Validation loss 14.808619\n", "Epoch 4616, Training loss 0.986303, Validation loss 8.120955\n", "Epoch 4617, Training loss 0.860108, Validation loss 14.808068\n", "Epoch 4618, Training loss 0.986207, Validation loss 8.121003\n", "Epoch 4619, Training loss 0.860040, Validation loss 14.807539\n", "Epoch 4620, Training loss 0.986109, Validation loss 8.121062\n", "Epoch 4621, Training loss 0.859966, Validation loss 14.806966\n", "Epoch 4622, Training loss 0.986007, Validation loss 8.121129\n", "Epoch 4623, Training loss 0.859896, Validation loss 14.806415\n", "Epoch 4624, Training loss 0.985913, Validation loss 8.121181\n", "Epoch 4625, Training loss 0.859828, Validation loss 14.805871\n", "Epoch 4626, Training loss 0.985817, Validation loss 8.121235\n", "Epoch 4627, Training loss 0.859756, Validation loss 14.805320\n", "Epoch 4628, Training loss 0.985714, Validation loss 8.121306\n", "Epoch 4629, Training loss 0.859687, Validation loss 14.804769\n", "Epoch 4630, Training loss 0.985620, Validation loss 8.121343\n", "Epoch 4631, Training loss 0.859619, Validation loss 14.804239\n", "Epoch 4632, Training loss 0.985524, Validation loss 8.121383\n", "Epoch 4633, Training loss 0.859548, Validation loss 14.803681\n", "Epoch 4634, Training loss 0.985422, Validation loss 8.121456\n", "Epoch 4635, Training loss 0.859478, Validation loss 14.803138\n", "Epoch 4636, Training loss 0.985327, Validation loss 8.121503\n", "Epoch 4637, Training loss 0.859410, Validation loss 14.802594\n", "Epoch 4638, Training loss 0.985232, Validation loss 8.121555\n", "Epoch 4639, Training loss 0.859339, Validation loss 14.802043\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4640, Training loss 0.985132, Validation loss 8.121606\n", "Epoch 4641, Training loss 0.859270, Validation loss 14.801502\n", "Epoch 4642, Training loss 0.985036, Validation loss 8.121671\n", "Epoch 4643, Training loss 0.859201, Validation loss 14.800955\n", "Epoch 4644, Training loss 0.984940, Validation loss 8.121716\n", "Epoch 4645, Training loss 0.859131, Validation loss 14.800411\n", "Epoch 4646, Training loss 0.984841, Validation loss 8.121776\n", "Epoch 4647, Training loss 0.859061, Validation loss 14.799846\n", "Epoch 4648, Training loss 0.984746, Validation loss 8.121831\n", "Epoch 4649, Training loss 0.858991, Validation loss 14.799309\n", "Epoch 4650, Training loss 0.984649, Validation loss 8.121881\n", "Epoch 4651, Training loss 0.858923, Validation loss 14.798773\n", "Epoch 4652, Training loss 0.984553, Validation loss 8.121935\n", "Epoch 4653, Training loss 0.858852, Validation loss 14.798214\n", "Epoch 4654, Training loss 0.984453, Validation loss 8.121996\n", "Epoch 4655, Training loss 0.858784, Validation loss 14.797670\n", "Epoch 4656, Training loss 0.984358, Validation loss 8.122044\n", "Epoch 4657, Training loss 0.858713, Validation loss 14.797105\n", "Epoch 4658, Training loss 0.984260, Validation loss 8.122106\n", "Epoch 4659, Training loss 0.858643, Validation loss 14.796546\n", "Epoch 4660, Training loss 0.984164, Validation loss 8.122137\n", "Epoch 4661, Training loss 0.858577, Validation loss 14.796032\n", "Epoch 4662, Training loss 0.984071, Validation loss 8.122185\n", "Epoch 4663, Training loss 0.858508, Validation loss 14.795488\n", "Epoch 4664, Training loss 0.983976, Validation loss 8.122240\n", "Epoch 4665, Training loss 0.858439, Validation loss 14.794952\n", "Epoch 4666, Training loss 0.983877, Validation loss 8.122295\n", "Epoch 4667, Training loss 0.858371, Validation loss 14.794423\n", "Epoch 4668, Training loss 0.983784, Validation loss 8.122337\n", "Epoch 4669, Training loss 0.858301, Validation loss 14.793865\n", "Epoch 4670, Training loss 0.983685, Validation loss 8.122396\n", "Epoch 4671, Training loss 0.858233, Validation loss 14.793328\n", "Epoch 4672, Training loss 0.983591, Validation loss 8.122448\n", "Epoch 4673, Training loss 0.858161, Validation loss 14.792755\n", "Epoch 4674, Training loss 0.983490, Validation loss 8.122496\n", "Epoch 4675, Training loss 0.858093, Validation loss 14.792219\n", "Epoch 4676, Training loss 0.983397, Validation loss 8.122559\n", "Epoch 4677, Training loss 0.858024, Validation loss 14.791668\n", "Epoch 4678, Training loss 0.983298, Validation loss 8.122618\n", "Epoch 4679, Training loss 0.857955, Validation loss 14.791132\n", "Epoch 4680, Training loss 0.983201, Validation loss 8.122664\n", "Epoch 4681, Training loss 0.857885, Validation loss 14.790581\n", "Epoch 4682, Training loss 0.983105, Validation loss 8.122707\n", "Epoch 4683, Training loss 0.857816, Validation loss 14.790037\n", "Epoch 4684, Training loss 0.983011, Validation loss 8.122770\n", "Epoch 4685, Training loss 0.857746, Validation loss 14.789479\n", "Epoch 4686, Training loss 0.982911, Validation loss 8.122828\n", "Epoch 4687, Training loss 0.857677, Validation loss 14.788928\n", "Epoch 4688, Training loss 0.982817, Validation loss 8.122863\n", "Epoch 4689, Training loss 0.857609, Validation loss 14.788400\n", "Epoch 4690, Training loss 0.982717, Validation loss 8.122934\n", "Epoch 4691, Training loss 0.857538, Validation loss 14.787819\n", "Epoch 4692, Training loss 0.982623, Validation loss 8.122989\n", "Epoch 4693, Training loss 0.857469, Validation loss 14.787276\n", "Epoch 4694, Training loss 0.982525, Validation loss 8.123042\n", "Epoch 4695, Training loss 0.857400, Validation loss 14.786725\n", "Epoch 4696, Training loss 0.982431, Validation loss 8.123096\n", "Epoch 4697, Training loss 0.857331, Validation loss 14.786174\n", "Epoch 4698, Training loss 0.982332, Validation loss 8.123142\n", "Epoch 4699, Training loss 0.857264, Validation loss 14.785630\n", "Epoch 4700, Training loss 0.982239, Validation loss 8.123181\n", "Epoch 4701, Training loss 0.857193, Validation loss 14.785090\n", "Epoch 4702, Training loss 0.982140, Validation loss 8.123240\n", "Epoch 4703, Training loss 0.857125, Validation loss 14.784518\n", "Epoch 4704, Training loss 0.982048, Validation loss 8.123298\n", "Epoch 4705, Training loss 0.857054, Validation loss 14.783985\n", "Epoch 4706, Training loss 0.981946, Validation loss 8.123365\n", "Epoch 4707, Training loss 0.856985, Validation loss 14.783423\n", "Epoch 4708, Training loss 0.981852, Validation loss 8.123409\n", "Epoch 4709, Training loss 0.856917, Validation loss 14.782876\n", "Epoch 4710, Training loss 0.981755, Validation loss 8.123457\n", "Epoch 4711, Training loss 0.856847, Validation loss 14.782310\n", "Epoch 4712, Training loss 0.981659, Validation loss 8.123505\n", "Epoch 4713, Training loss 0.856779, Validation loss 14.781778\n", "Epoch 4714, Training loss 0.981563, Validation loss 8.123558\n", "Epoch 4715, Training loss 0.856709, Validation loss 14.781224\n", "Epoch 4716, Training loss 0.981466, Validation loss 8.123610\n", "Epoch 4717, Training loss 0.856642, Validation loss 14.780681\n", "Epoch 4718, Training loss 0.981373, Validation loss 8.123669\n", "Epoch 4719, Training loss 0.856573, Validation loss 14.780115\n", "Epoch 4720, Training loss 0.981279, Validation loss 8.123713\n", "Epoch 4721, Training loss 0.856504, Validation loss 14.779586\n", "Epoch 4722, Training loss 0.981181, Validation loss 8.123765\n", "Epoch 4723, Training loss 0.856437, Validation loss 14.779050\n", "Epoch 4724, Training loss 0.981088, Validation loss 8.123809\n", "Epoch 4725, Training loss 0.856366, Validation loss 14.778500\n", "Epoch 4726, Training loss 0.980989, Validation loss 8.123863\n", "Epoch 4727, Training loss 0.856301, Validation loss 14.777948\n", "Epoch 4728, Training loss 0.980899, Validation loss 8.123907\n", "Epoch 4729, Training loss 0.856229, Validation loss 14.777398\n", "Epoch 4730, Training loss 0.980797, Validation loss 8.123969\n", "Epoch 4731, Training loss 0.856161, Validation loss 14.776836\n", "Epoch 4732, Training loss 0.980704, Validation loss 8.124022\n", "Epoch 4733, Training loss 0.856093, Validation loss 14.776304\n", "Epoch 4734, Training loss 0.980608, Validation loss 8.124057\n", "Epoch 4735, Training loss 0.856025, Validation loss 14.775753\n", "Epoch 4736, Training loss 0.980513, Validation loss 8.124120\n", "Epoch 4737, Training loss 0.855955, Validation loss 14.775191\n", "Epoch 4738, Training loss 0.980415, Validation loss 8.124178\n", "Epoch 4739, Training loss 0.855887, Validation loss 14.774652\n", "Epoch 4740, Training loss 0.980323, Validation loss 8.124215\n", "Epoch 4741, Training loss 0.855819, Validation loss 14.774094\n", "Epoch 4742, Training loss 0.980228, Validation loss 8.124273\n", "Epoch 4743, Training loss 0.855750, Validation loss 14.773551\n", "Epoch 4744, Training loss 0.980130, Validation loss 8.124321\n", "Epoch 4745, Training loss 0.855681, Validation loss 14.772985\n", "Epoch 4746, Training loss 0.980036, Validation loss 8.124380\n", "Epoch 4747, Training loss 0.855611, Validation loss 14.772428\n", "Epoch 4748, Training loss 0.979937, Validation loss 8.124444\n", "Epoch 4749, Training loss 0.855541, Validation loss 14.771870\n", "Epoch 4750, Training loss 0.979842, Validation loss 8.124478\n", "Epoch 4751, Training loss 0.855474, Validation loss 14.771322\n", "Epoch 4752, Training loss 0.979748, Validation loss 8.124524\n", "Epoch 4753, Training loss 0.855407, Validation loss 14.770782\n", "Epoch 4754, Training loss 0.979654, Validation loss 8.124582\n", "Epoch 4755, Training loss 0.855339, Validation loss 14.770247\n", "Epoch 4756, Training loss 0.979558, Validation loss 8.124641\n", "Epoch 4757, Training loss 0.855269, Validation loss 14.769681\n", "Epoch 4758, Training loss 0.979462, Validation loss 8.124686\n", "Epoch 4759, Training loss 0.855201, Validation loss 14.769135\n", "Epoch 4760, Training loss 0.979369, Validation loss 8.124721\n", "Epoch 4761, Training loss 0.855131, Validation loss 14.768580\n", "Epoch 4762, Training loss 0.979270, Validation loss 8.124787\n", "Epoch 4763, Training loss 0.855063, Validation loss 14.768026\n", "Epoch 4764, Training loss 0.979178, Validation loss 8.124832\n", "Epoch 4765, Training loss 0.854995, Validation loss 14.767468\n", "Epoch 4766, Training loss 0.979082, Validation loss 8.124890\n", "Epoch 4767, Training loss 0.854925, Validation loss 14.766911\n", "Epoch 4768, Training loss 0.978987, Validation loss 8.124934\n", "Epoch 4769, Training loss 0.854858, Validation loss 14.766385\n", "Epoch 4770, Training loss 0.978891, Validation loss 8.124998\n", "Epoch 4771, Training loss 0.854788, Validation loss 14.765823\n", "Epoch 4772, Training loss 0.978793, Validation loss 8.125049\n", "Epoch 4773, Training loss 0.854720, Validation loss 14.765259\n", "Epoch 4774, Training loss 0.978697, Validation loss 8.125108\n", "Epoch 4775, Training loss 0.854650, Validation loss 14.764697\n", "Epoch 4776, Training loss 0.978603, Validation loss 8.125153\n", "Epoch 4777, Training loss 0.854583, Validation loss 14.764147\n", "Epoch 4778, Training loss 0.978508, Validation loss 8.125206\n", "Epoch 4779, Training loss 0.854513, Validation loss 14.763593\n", "Epoch 4780, Training loss 0.978411, Validation loss 8.125259\n", "Epoch 4781, Training loss 0.854445, Validation loss 14.763039\n", "Epoch 4782, Training loss 0.978317, Validation loss 8.125307\n", "Epoch 4783, Training loss 0.854378, Validation loss 14.762495\n", "Epoch 4784, Training loss 0.978223, Validation loss 8.125365\n", "Epoch 4785, Training loss 0.854308, Validation loss 14.761937\n", "Epoch 4786, Training loss 0.978124, Validation loss 8.125420\n", "Epoch 4787, Training loss 0.854241, Validation loss 14.761379\n", "Epoch 4788, Training loss 0.978034, Validation loss 8.125462\n", "Epoch 4789, Training loss 0.854175, Validation loss 14.760840\n", "Epoch 4790, Training loss 0.977943, Validation loss 8.125504\n", "Epoch 4791, Training loss 0.854107, Validation loss 14.760293\n", "Epoch 4792, Training loss 0.977847, Validation loss 8.125543\n", "Epoch 4793, Training loss 0.854038, Validation loss 14.759743\n", "Epoch 4794, Training loss 0.977750, Validation loss 8.125607\n", "Epoch 4795, Training loss 0.853970, Validation loss 14.759178\n", "Epoch 4796, Training loss 0.977659, Validation loss 8.125648\n", "Epoch 4797, Training loss 0.853900, Validation loss 14.758627\n", "Epoch 4798, Training loss 0.977558, Validation loss 8.125716\n", "Epoch 4799, Training loss 0.853832, Validation loss 14.758070\n", "Epoch 4800, Training loss 0.977466, Validation loss 8.125751\n", "Epoch 4801, Training loss 0.853764, Validation loss 14.757519\n", "Epoch 4802, Training loss 0.977372, Validation loss 8.125818\n", "Epoch 4803, Training loss 0.853695, Validation loss 14.756954\n", "Epoch 4804, Training loss 0.977274, Validation loss 8.125870\n", "Epoch 4805, Training loss 0.853628, Validation loss 14.756396\n", "Epoch 4806, Training loss 0.977183, Validation loss 8.125912\n", "Epoch 4807, Training loss 0.853561, Validation loss 14.755868\n", "Epoch 4808, Training loss 0.977087, Validation loss 8.125956\n", "Epoch 4809, Training loss 0.853491, Validation loss 14.755303\n", "Epoch 4810, Training loss 0.976990, Validation loss 8.126019\n", "Epoch 4811, Training loss 0.853422, Validation loss 14.754745\n", "Epoch 4812, Training loss 0.976893, Validation loss 8.126068\n", "Epoch 4813, Training loss 0.853356, Validation loss 14.754195\n", "Epoch 4814, Training loss 0.976804, Validation loss 8.126113\n", "Epoch 4815, Training loss 0.853286, Validation loss 14.753633\n", "Epoch 4816, Training loss 0.976706, Validation loss 8.126164\n", "Epoch 4817, Training loss 0.853217, Validation loss 14.753065\n", "Epoch 4818, Training loss 0.976610, Validation loss 8.126228\n", "Epoch 4819, Training loss 0.853151, Validation loss 14.752522\n", "Epoch 4820, Training loss 0.976521, Validation loss 8.126258\n", "Epoch 4821, Training loss 0.853083, Validation loss 14.751965\n", "Epoch 4822, Training loss 0.976425, Validation loss 8.126304\n", "Epoch 4823, Training loss 0.853014, Validation loss 14.751414\n", "Epoch 4824, Training loss 0.976327, Validation loss 8.126379\n", "Epoch 4825, Training loss 0.852944, Validation loss 14.750849\n", "Epoch 4826, Training loss 0.976231, Validation loss 8.126425\n", "Epoch 4827, Training loss 0.852878, Validation loss 14.750302\n", "Epoch 4828, Training loss 0.976141, Validation loss 8.126465\n", "Epoch 4829, Training loss 0.852810, Validation loss 14.749742\n", "Epoch 4830, Training loss 0.976046, Validation loss 8.126508\n", "Epoch 4831, Training loss 0.852744, Validation loss 14.749212\n", "Epoch 4832, Training loss 0.975953, Validation loss 8.126554\n", "Epoch 4833, Training loss 0.852676, Validation loss 14.748666\n", "Epoch 4834, Training loss 0.975860, Validation loss 8.126610\n", "Epoch 4835, Training loss 0.852607, Validation loss 14.748097\n", "Epoch 4836, Training loss 0.975765, Validation loss 8.126654\n", "Epoch 4837, Training loss 0.852540, Validation loss 14.747551\n", "Epoch 4838, Training loss 0.975670, Validation loss 8.126722\n", "Epoch 4839, Training loss 0.852470, Validation loss 14.746968\n", "Epoch 4840, Training loss 0.975574, Validation loss 8.126762\n", "Epoch 4841, Training loss 0.852403, Validation loss 14.746410\n", "Epoch 4842, Training loss 0.975480, Validation loss 8.126825\n", "Epoch 4843, Training loss 0.852332, Validation loss 14.745831\n", "Epoch 4844, Training loss 0.975382, Validation loss 8.126884\n", "Epoch 4845, Training loss 0.852264, Validation loss 14.745277\n", "Epoch 4846, Training loss 0.975290, Validation loss 8.126923\n", "Epoch 4847, Training loss 0.852199, Validation loss 14.744738\n", "Epoch 4848, Training loss 0.975199, Validation loss 8.126973\n", "Epoch 4849, Training loss 0.852131, Validation loss 14.744187\n", "Epoch 4850, Training loss 0.975103, Validation loss 8.127017\n", "Epoch 4851, Training loss 0.852062, Validation loss 14.743637\n", "Epoch 4852, Training loss 0.975006, Validation loss 8.127068\n", "Epoch 4853, Training loss 0.851997, Validation loss 14.743094\n", "Epoch 4854, Training loss 0.974918, Validation loss 8.127102\n", "Epoch 4855, Training loss 0.851930, Validation loss 14.742544\n", "Epoch 4856, Training loss 0.974824, Validation loss 8.127154\n", "Epoch 4857, Training loss 0.851862, Validation loss 14.741994\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4858, Training loss 0.974730, Validation loss 8.127202\n", "Epoch 4859, Training loss 0.851793, Validation loss 14.741433\n", "Epoch 4860, Training loss 0.974634, Validation loss 8.127263\n", "Epoch 4861, Training loss 0.851727, Validation loss 14.740879\n", "Epoch 4862, Training loss 0.974544, Validation loss 8.127310\n", "Epoch 4863, Training loss 0.851656, Validation loss 14.740314\n", "Epoch 4864, Training loss 0.974445, Validation loss 8.127357\n", "Epoch 4865, Training loss 0.851589, Validation loss 14.739764\n", "Epoch 4866, Training loss 0.974350, Validation loss 8.127400\n", "Epoch 4867, Training loss 0.851522, Validation loss 14.739203\n", "Epoch 4868, Training loss 0.974258, Validation loss 8.127476\n", "Epoch 4869, Training loss 0.851452, Validation loss 14.738627\n", "Epoch 4870, Training loss 0.974162, Validation loss 8.127517\n", "Epoch 4871, Training loss 0.851386, Validation loss 14.738077\n", "Epoch 4872, Training loss 0.974070, Validation loss 8.127557\n", "Epoch 4873, Training loss 0.851318, Validation loss 14.737520\n", "Epoch 4874, Training loss 0.973974, Validation loss 8.127608\n", "Epoch 4875, Training loss 0.851251, Validation loss 14.736956\n", "Epoch 4876, Training loss 0.973884, Validation loss 8.127661\n", "Epoch 4877, Training loss 0.851181, Validation loss 14.736405\n", "Epoch 4878, Training loss 0.973784, Validation loss 8.127727\n", "Epoch 4879, Training loss 0.851114, Validation loss 14.735833\n", "Epoch 4880, Training loss 0.973695, Validation loss 8.127748\n", "Epoch 4881, Training loss 0.851049, Validation loss 14.735297\n", "Epoch 4882, Training loss 0.973602, Validation loss 8.127800\n", "Epoch 4883, Training loss 0.850978, Validation loss 14.734726\n", "Epoch 4884, Training loss 0.973502, Validation loss 8.127868\n", "Epoch 4885, Training loss 0.850913, Validation loss 14.734164\n", "Epoch 4886, Training loss 0.973415, Validation loss 8.127908\n", "Epoch 4887, Training loss 0.850845, Validation loss 14.733619\n", "Epoch 4888, Training loss 0.973319, Validation loss 8.127968\n", "Epoch 4889, Training loss 0.850775, Validation loss 14.733043\n", "Epoch 4890, Training loss 0.973223, Validation loss 8.128019\n", "Epoch 4891, Training loss 0.850708, Validation loss 14.732489\n", "Epoch 4892, Training loss 0.973129, Validation loss 8.128068\n", "Epoch 4893, Training loss 0.850640, Validation loss 14.731932\n", "Epoch 4894, Training loss 0.973038, Validation loss 8.128109\n", "Epoch 4895, Training loss 0.850573, Validation loss 14.731381\n", "Epoch 4896, Training loss 0.972944, Validation loss 8.128160\n", "Epoch 4897, Training loss 0.850507, Validation loss 14.730824\n", "Epoch 4898, Training loss 0.972852, Validation loss 8.128193\n", "Epoch 4899, Training loss 0.850441, Validation loss 14.730292\n", "Epoch 4900, Training loss 0.972760, Validation loss 8.128244\n", "Epoch 4901, Training loss 0.850374, Validation loss 14.729732\n", "Epoch 4902, Training loss 0.972667, Validation loss 8.128298\n", "Epoch 4903, Training loss 0.850305, Validation loss 14.729159\n", "Epoch 4904, Training loss 0.972572, Validation loss 8.128345\n", "Epoch 4905, Training loss 0.850239, Validation loss 14.728610\n", "Epoch 4906, Training loss 0.972478, Validation loss 8.128393\n", "Epoch 4907, Training loss 0.850169, Validation loss 14.728038\n", "Epoch 4908, Training loss 0.972383, Validation loss 8.128438\n", "Epoch 4909, Training loss 0.850102, Validation loss 14.727466\n", "Epoch 4910, Training loss 0.972293, Validation loss 8.128491\n", "Epoch 4911, Training loss 0.850035, Validation loss 14.726920\n", "Epoch 4912, Training loss 0.972197, Validation loss 8.128535\n", "Epoch 4913, Training loss 0.849966, Validation loss 14.726352\n", "Epoch 4914, Training loss 0.972102, Validation loss 8.128587\n", "Epoch 4915, Training loss 0.849901, Validation loss 14.725805\n", "Epoch 4916, Training loss 0.972011, Validation loss 8.128638\n", "Epoch 4917, Training loss 0.849833, Validation loss 14.725245\n", "Epoch 4918, Training loss 0.971918, Validation loss 8.128693\n", "Epoch 4919, Training loss 0.849764, Validation loss 14.724676\n", "Epoch 4920, Training loss 0.971822, Validation loss 8.128748\n", "Epoch 4921, Training loss 0.849698, Validation loss 14.724123\n", "Epoch 4922, Training loss 0.971730, Validation loss 8.128796\n", "Epoch 4923, Training loss 0.849629, Validation loss 14.723540\n", "Epoch 4924, Training loss 0.971633, Validation loss 8.128859\n", "Epoch 4925, Training loss 0.849562, Validation loss 14.722980\n", "Epoch 4926, Training loss 0.971544, Validation loss 8.128895\n", "Epoch 4927, Training loss 0.849495, Validation loss 14.722437\n", "Epoch 4928, Training loss 0.971450, Validation loss 8.128936\n", "Epoch 4929, Training loss 0.849429, Validation loss 14.721883\n", "Epoch 4930, Training loss 0.971358, Validation loss 8.128971\n", "Epoch 4931, Training loss 0.849363, Validation loss 14.721330\n", "Epoch 4932, Training loss 0.971264, Validation loss 8.129034\n", "Epoch 4933, Training loss 0.849293, Validation loss 14.720757\n", "Epoch 4934, Training loss 0.971168, Validation loss 8.129090\n", "Epoch 4935, Training loss 0.849226, Validation loss 14.720186\n", "Epoch 4936, Training loss 0.971077, Validation loss 8.129145\n", "Epoch 4937, Training loss 0.849157, Validation loss 14.719618\n", "Epoch 4938, Training loss 0.970983, Validation loss 8.129189\n", "Epoch 4939, Training loss 0.849091, Validation loss 14.719065\n", "Epoch 4940, Training loss 0.970890, Validation loss 8.129232\n", "Epoch 4941, Training loss 0.849024, Validation loss 14.718514\n", "Epoch 4942, Training loss 0.970797, Validation loss 8.129292\n", "Epoch 4943, Training loss 0.848957, Validation loss 14.717950\n", "Epoch 4944, Training loss 0.970706, Validation loss 8.129322\n", "Epoch 4945, Training loss 0.848891, Validation loss 14.717408\n", "Epoch 4946, Training loss 0.970613, Validation loss 8.129376\n", "Epoch 4947, Training loss 0.848820, Validation loss 14.716825\n", "Epoch 4948, Training loss 0.970513, Validation loss 8.129439\n", "Epoch 4949, Training loss 0.848754, Validation loss 14.716243\n", "Epoch 4950, Training loss 0.970427, Validation loss 8.129485\n", "Epoch 4951, Training loss 0.848685, Validation loss 14.715700\n", "Epoch 4952, Training loss 0.970328, Validation loss 8.129543\n", "Epoch 4953, Training loss 0.848621, Validation loss 14.715143\n", "Epoch 4954, Training loss 0.970243, Validation loss 8.129560\n", "Epoch 4955, Training loss 0.848554, Validation loss 14.714593\n", "Epoch 4956, Training loss 0.970146, Validation loss 8.129625\n", "Epoch 4957, Training loss 0.848487, Validation loss 14.714022\n", "Epoch 4958, Training loss 0.970053, Validation loss 8.129667\n", "Epoch 4959, Training loss 0.848420, Validation loss 14.713471\n", "Epoch 4960, Training loss 0.969961, Validation loss 8.129719\n", "Epoch 4961, Training loss 0.848352, Validation loss 14.712904\n", "Epoch 4962, Training loss 0.969868, Validation loss 8.129763\n", "Epoch 4963, Training loss 0.848287, Validation loss 14.712343\n", "Epoch 4964, Training loss 0.969777, Validation loss 8.129811\n", "Epoch 4965, Training loss 0.848217, Validation loss 14.711768\n", "Epoch 4966, Training loss 0.969679, Validation loss 8.129878\n", "Epoch 4967, Training loss 0.848150, Validation loss 14.711207\n", "Epoch 4968, Training loss 0.969588, Validation loss 8.129913\n", "Epoch 4969, Training loss 0.848084, Validation loss 14.710658\n", "Epoch 4970, Training loss 0.969496, Validation loss 8.129971\n", "Epoch 4971, Training loss 0.848017, Validation loss 14.710086\n", "Epoch 4972, Training loss 0.969403, Validation loss 8.130004\n", "Epoch 4973, Training loss 0.847951, Validation loss 14.709533\n", "Epoch 4974, Training loss 0.969314, Validation loss 8.130053\n", "Epoch 4975, Training loss 0.847883, Validation loss 14.708972\n", "Epoch 4976, Training loss 0.969218, Validation loss 8.130108\n", "Epoch 4977, Training loss 0.847817, Validation loss 14.708408\n", "Epoch 4978, Training loss 0.969128, Validation loss 8.130157\n", "Epoch 4979, Training loss 0.847749, Validation loss 14.707854\n", "Epoch 4980, Training loss 0.969032, Validation loss 8.130206\n", "Epoch 4981, Training loss 0.847683, Validation loss 14.707294\n", "Epoch 4982, Training loss 0.968943, Validation loss 8.130245\n", "Epoch 4983, Training loss 0.847615, Validation loss 14.706730\n", "Epoch 4984, Training loss 0.968850, Validation loss 8.130295\n", "Epoch 4985, Training loss 0.847548, Validation loss 14.706173\n", "Epoch 4986, Training loss 0.968753, Validation loss 8.130355\n", "Epoch 4987, Training loss 0.847480, Validation loss 14.705586\n", "Epoch 4988, Training loss 0.968662, Validation loss 8.130404\n", "Epoch 4989, Training loss 0.847414, Validation loss 14.705026\n", "Epoch 4990, Training loss 0.968569, Validation loss 8.130451\n", "Epoch 4991, Training loss 0.847348, Validation loss 14.704466\n", "Epoch 4992, Training loss 0.968480, Validation loss 8.130482\n", "Epoch 4993, Training loss 0.847282, Validation loss 14.703916\n", "Epoch 4994, Training loss 0.968388, Validation loss 8.130531\n", "Epoch 4995, Training loss 0.847216, Validation loss 14.703362\n", "Epoch 4996, Training loss 0.968295, Validation loss 8.130578\n", "Epoch 4997, Training loss 0.847147, Validation loss 14.702787\n", "Epoch 4998, Training loss 0.968200, Validation loss 8.130628\n", "Epoch 4999, Training loss 0.847083, Validation loss 14.702230\n" ] }, { "data": { "text/plain": [ "(tensor([[16.0549],\n", " [13.6323]], grad_fn=), tensor([[13.],\n", " [11.]]))" ] }, "execution_count": 137, "metadata": {}, "output_type": "execute_result" } ], "source": [ "import torch\n", "import torch.nn as nn\n", "\n", "model = nn.Sequential(OrderedDict([\n", " ('hidden_linear', nn.Linear(1, 10)),\n", " ('hidden_activation', nn.Tanh()),\n", " ('output_linear', nn.Linear(10, 1))\n", "]))\n", "\n", "learning_rate = 1e-2\n", "\n", "optimizer = optim.SGD(model.parameters(), lr=learning_rate)\n", "\n", "loss_fn = nn.MSELoss()\n", "\n", "nepochs = 5000\n", "\n", "for epoch in range(nepochs):\n", " # forward pass\n", " t_p_train = model(t_un_train)\n", " loss_train = loss_fn(t_p_train, t_c_train)\n", "\n", " with torch.no_grad():\n", " t_p_val = model(t_un_val)\n", " loss_val = loss_fn(t_p_val, t_c_val)\n", "\n", " print('Epoch %d, Training loss %f, Validation loss %f' % (epoch, float(loss_train), float(loss_val)))\n", " \n", " # backward pass\n", " optimizer.zero_grad()\n", " loss_train.backward() \n", " optimizer.step()\n", " \n", "model(t_un_val), t_c_val" ] }, { "cell_type": "code", "execution_count": 138, "metadata": { "scrolled": true }, "outputs": [ { "data": { "text/plain": [ "[]" ] }, "execution_count": 138, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXYAAAD8CAYAAABjAo9vAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAFchJREFUeJzt3X9w3PV95/Hn20LpyflRQXEyWOCzEjJu7AC2q+Hiw3UTYnBJ22Dcow2NW+jQkj9gjG8SB0hn7nKdyRBiQoxLmiktHORIaUJik4TJxMWFjCufw2UtU2zjeqAxMcgcKENVc4cmJ6T3/bErLBsbS95d7e5Xz8eMZnc/+vr7fX+i3Vc+fL7f/XwjM5EkFceMRhcgSaotg12SCsZgl6SCMdglqWAMdkkqGINdkgrGYJekgjHYJalgDHZJKpjTGnHQM888M+fOnduIQ0tSy9q5c+fPM3PWybZrSLDPnTuXUqnUiENLUsuKiJ9NZDunYiSpYAx2SSoYg12SCsZgl6SCMdglqWAaclWMJE03D+/qZ/2W/RwaHGJ2ZwfrVsxj5aKuuhzLYJekOtvzrT9n8+6Z9A9/AID+wSE2b3qQc595jQ/+3n+p+fGcipGkOvurZ36ZO2ZsYMmMvQAsmbGXO2Zs4K+e+eW6HM8RuyTV2SOvnsvPZ6zhrvaNPDCynNVtW7lheA0//sW5/EUdjueIXZLqbHZnBztGF/DAyHJuPG0zD4wsZ8foAmZ3dtTleAa7JNXZuhXz+I32faxu28qdr1/B6rat/Eb7PtatmFeX4zkVI0l1trLzX7is4y4+k+t45NVzebZjEXfHV/ilzguB2l8ZY7BLUr319/FLV32dv+hedmRO/cAi6O+D7mU1P5zBLkn1tnTtm9u6l9Ul1ME5dkkqHINdkgrGYJekgjHYJalgDHZJKhiDXZIKxmCXpIIx2CWpYAx2SSqYCQd7RJwTEY9HxL6I2BsRN1baPx8R/RHxZOXnY/UrV5J0MpNZUuB14NOZ2RcR7wR2RsSjld99JTNvr315kqTJmnCwZ+aLwIuV569GxD7qsSyZJKkqpzTHHhFzgUXAE5WmGyLiqYi4NyJOr1FtkqRTMOlgj4h3AN8B1mbmYeBrwPuAhZRH9F8+wb+7LiJKEVEaGBioomRJ0luZVLBHRDvlUP9GZm4CyMyXMnMkM0eBvwYuPN6/zcy7M7MnM3tmzZpVbd2SpBOYzFUxAdwD7MvMO8a1nzVusyuAPbUrT5I0WZO5KuYi4A+B3RHxZKXtc8BVEbEQSOA54FM1rVCSNCmTuSqmF4jj/OoHtStHklQtv3kqSQVjsEtSwRjsklQwBrskFYzBLkkFY7BLUsEY7JJUMAa7JBWMwS5JBWOwS1LBGOySVDAGuyQVjMEuSQVjsEtSwRjsklQwBrskFcxk7qAkSRPy8K5+1m/Zz6HBIWZ3drBuxTxWLupqdFnThsEuqaYe3tXPLZt2MzQ8AkD/4BC3bNoNYLhPEadiJNXU+i373wj1MUPDI6zfsr9BFU0/Brukmjo0ODSpdtWewS6ppmZ3dkyqXbVnsEuqqXUr5tHR3nZUW0d7G+tWzGtQRdPPhIM9Is6JiMcjYl9E7I2IGyvtZ0TEoxHxTOXx9PqVK6nZrVzUxa2rzqOrs4MAujo7uHXVeZ44nUKRmRPbMOIs4KzM7IuIdwI7gZXANcArmfnFiLgZOD0zb3qrffX09GSpVKquckmaZiJiZ2b2nGy7CY/YM/PFzOyrPH8V2Ad0AZcD91c2u59y2EuSGuSU5tgjYi6wCHgCeE9mvgjl8AfeXaviJEmTN+lgj4h3AN8B1mbm4Un8u+siohQRpYGBgckeVlKr6N0AB7Yd3XZgW7ldU2JSwR4R7ZRD/RuZuanS/FJl/n1sHv7l4/3bzLw7M3sys2fWrFnV1CypmXUthoeuORLuB7aVX3ctbmRV08pkrooJ4B5gX2beMe5X3wOurjy/Gvhu7cqT1HK6l8GV95XD/LEvlB+vvK/crikxmRH7RcAfAhdHxJOVn48BXwQuiYhngEsqryVNZ93LoOda2Pal8qOhPqUmvAhYZvYCcYJff7Q25UgqhAPboHQPLPts+bH71w33KeQ3TyXV1tic+pX3wcV/dmRa5tgTqqobg11SbfX3HT2nPjbn3t/XyKqmFddjl1RbS9e+ua17mVMxU8gRuyQVjMEuSQVjsEtSwRjsklQwBrskFYzBLkkFY7BLUsEY7JJUMAa7JBWMwS5JBWOwS1LBGOySVDAGuyQVjMEutRpvFq2TMNilVuPNonUSrscutZrxN4vuubZ86zlvFq1xHLFLrcibRestGOxSKzr2ZtHeT1TjGOxSq/Fm0TqJCQd7RNwbES9HxJ5xbZ+PiP6IeLLy87H6lCnpDd4sWicxmZOn9wF3AV8/pv0rmXl7zSqSpqPeDfS+Noeb+jo5NDjE7M4Obls8yNKZB998c2hvFq2TmPCIPTO3Aa/UsRZp2up9bQ7zt69hzuESCcw5XGL+9jX0vjan0aWpBdXicscbIuKPgBLw6cz81xrsU5pWburrZM7wGu5q38gDI8tZ3baV64fXcLCvk+2XNro6tZpqT55+DXgfsBB4EfjyiTaMiOsiohQRpYGBgSoPKxXLocEhdowu4IGR5dx42mYeGFnOjtEFHBocanRpakFVBXtmvpSZI5k5Cvw1cOFbbHt3ZvZkZs+sWbOqOaxUOLM7O1gyYy+r27Zy5+tXsLptK0tm7GV2Z0ejS1MLqmoqJiLOyswXKy+vAPa81faSju+2xYPM376R64fXsGN0AT8enc9X2zfy9OKNjS5NLWjCwR4RDwIfBs6MiBeA/wp8OCIWAgk8B3yqDjVKLenhXf2s37L/jatc1q2Yx8pFXcfddunMg/RetJGDfZ3E4BAH39XD04s3lq+KkSYpMnPKD9rT05OlUmnKjytNlYd39XPLpt0MDY+80dbR3satq847YbhLJxMROzOz52Tb+c1TqQ7Wb9nP0PAIn2r7Pktm7AVgaHiE9Vv2u8Su6s5gl+pg7GqWp/K93NW+8Y1wn3O45BK7qjuX7ZXq4DPv+CG9r53DjtEF3FC5Pv1HoxfwsbafwJXf9luiqitH7FIdXHDhR/hqZaS+Y3QBPxq9gN9t62Xg7BWGuurOEbtUB0svXUUv8Jf/80YeHzmflW3bOXj2x5nzyvbyHHuV4T6ZK240/Thil+pk6aWrOP38y1jV1suM83+POX/yP2qyxO7YFTf9g0Mk0D84xC2bdvPwrv5ala4WZ7BL9XJgG+z7Ppz/+/Ds1iMj9SqX2B274ma8N664kXAqRqqPsZth/ME3y2E+/uYYVS6xe6L1Y1xXRmMcsUv1UMebYZxo/RjXldEYg12qh6Vr3zwq7152/JtkTNK6FfPoaG87qq2jvY11K+ZVvW8Vg1MxUosZu/rFq2J0Iga71IJWLuoyyHVCTsVIUsEY7JJUMAa7JBWMwS5JBePJU6nOXNdFU81gl+ro2Dspja3rAhjuqhunYqQ6cl0XNYLBLtWR67qoEQx2qY5c10WNYLBLdeS6LmqECQd7RNwbES9HxJ5xbWdExKMR8Uzl8fT6lCm1ppWLurh11Xl0dXYQQFdnB7euOs8Tp6qryMyJbRixDPg/wNcz84OVti8Br2TmFyPiZuD0zLzpZPvq6enJUqlURdmSNP1ExM7M7DnZdhMesWfmNuCVY5ovB+6vPL8fWDnhCiVJdVHtHPt7MvNFgMrju0+0YURcFxGliCgNDAxUeVhJ0olM2cnTzLw7M3sys2fWrFlTdVhJmnaqDfaXIuIsgMrjy9WXJEmqRrXB/j3g6srzq4HvVrk/SVKVJnO544PADmBeRLwQEdcCXwQuiYhngEsqryVJDTThRcAy86oT/OqjNapFklQDfvNUkgrGYJekgjHYJalgDHZJKhjvoKTC8BZ0UpnBrkLwFnTSEU7FqBC8BZ10hMGuQvAWdNIRBrsKwVvQSUcY7CoEb0EnHeHJUxXC2AlSr4qRDHYVyMpFXQa5hFMxKoreDXBg29FtB7aV26VpxmBXMXQthoeuORLuB7aVX3ctbmRVUkM4FaPW1ruhHN7dy+DK+8phfu5y2Pd9+INvltulacYRu1rb+JF697JyqD/1TfjA7xjqmrYcsau1dS+jd+HtLLj/kzw2cj5XtG3nhbM/zpxntx4Je2maccSulvbwrn7+9B9n8tjI+fxuWy+bRy5ixc8+Se/C24+ec5emEYNdLW39lv0sHHmKy2b8hO+MLOXDM/6JhSNPcVNfZ3nOvb+v0SVKU86pGLW0OYdL3NW+kWuHP8OO0QUsmbGXu9o3csPhNdD9WadiNC05YldLWzrzeW4YXsOO0QUA7BhdwA3Da1g68/kGVyY1jsGultb1WzfzZNv5R7U92XY+Xb91c4MqkhqvJlMxEfEc8CowAryemT212K90Mq4RI71ZLefYP5KZP6/h/qQJcY0Y6WhOxUhSwdQq2BP4+4jYGRHXHW+DiLguIkoRURoYGKjRYSVJx6pVsF+UmYuBy4DrI+JN15hl5t2Z2ZOZPbNmzarRYSVJx6pJsGfmocrjy8Bm4MJa7FeSNHlVB3tEvD0i3jn2HLgU2FPtfiVJp6YWV8W8B9gcEWP7+9vM/GEN9itJOgVVB3tm/hS4oAa1SJJqwMsdJalgDHZJKhiDXZIKxmCXpIJxPfYm9vCufhe3kjRpBnuTenhXP7ds2s3Q8AgA/YND3LJpN4DhLuktORXTpNZv2f9GqI8ZGh5h/Zb9DapIUqsw2JvUocGhSbVL0hiDvUnN7uyYVLskjTHYm9S6FfPoaG87qq2jvY11K+Y1qCJJrcKTp03KW75JOlUGexOb0C3fejdA12LoHrcE/oFt0N8HS9fWt0BJTcmpmFbXtRgeuqYc5lB+fOiacrukaclgb1a9G46E9ZgD28rt43UvgyvvK4f5Y18oP15539EjeEnTisHerCYzEu9eBj3XwrYvlR8NdWlaM9ib1WRG4ge2QekeWPbZ8uOxI31J04rB3swmMhIfG8lfeR9c/GdH/s/AcJemLYO9mU1kJN7fd/RIfmyk3983lZVKaiJe7tisxo/Eu5dB968ffzrmeJc0di9znl2axgz2ZvVWI/FjQtvlfSWNZ7A3qwmOxF3eV9KxnGNvcS7vK+lYNQn2iPjNiNgfEc9GxM212KcmxuV9JR2r6mCPiDbgq8BlwHzgqoiYX+1+NTEu7yvpWLUYsV8IPJuZP83M/wf8HXB5DfarCXB5X0nHqkWwdwHPj3v9QqXtKBFxXUSUIqI0MDBQg8MKyidIb111Hl2dHQTQ1dnBravO88SpNI3V4qqYOE5bvqkh827gboCenp43/V6nbkLL+0qaNmoxYn8BOGfc67OBQzXYryTpFNQi2H8CvD8iuiPibcAngO/VYL+SpFNQ9VRMZr4eETcAW4A24N7M3Ft1ZZKkU1KTb55m5g+AH9RiX5Kk6vjNU0kqGINdkgrGYJekgjHYJalgDHZJKhiDXZIKxmCXpIIx2CWpYAx2SSoYg12SCsZgl6SCMdglqWAMdkkqGINdkgrGYJekgjHYJalganKjjbrr3UDva3O4qa+TQ4NDzO7s4LbFgyydeRCWrm10dZLUVFpixN772hzmb1/DnMMlEphzuMT87WvofW1Oo0uTpKbTEiP2m/o6mTO8hrvaN/LAyHJWt23l+uE1HOzrZPulja5OkppLSwT7ocEh+lnAAyPLufG0zdz5+hXsGF1ADA41ujRJajotMRUzu7ODJTP2srptK3e+fgWr27ayZMZeZnd2NLo0SWo6VY3YI+LzwJ8CA5Wmz2XmD6ot6li3LR5k/vaNXD+8hh2jC/jx6Hy+2r6RpxdvrPWhJKnl1WIq5iuZeXsN9nNCS2cepPeijRzs6yQGhzj4rh6eXryxfFWMJOkoLTHHztK1LAVPlErSBNRijv2GiHgqIu6NiNNrsD9JUhVOGuwRsTUi9hzn53Lga8D7gIXAi8CX32I/10VEKSJKAwMDJ9pMklSlyMza7ChiLvBIZn7wZNv29PRkqVSqyXElabqIiJ2Z2XOy7aqaiomIs8a9vALYU83+JEnVq/bk6ZciYiGQwHPAp6quSJJUlZpNxUzqoBEDwM+O86szgZ9PcTn1UpS+FKUfYF+aUVH6AVPTl3+fmbNOtlFDgv1EIqI0kfmjVlCUvhSlH2BfmlFR+gHN1ZeWWFJAkjRxBrskFUyzBfvdjS6ghorSl6L0A+xLMypKP6CJ+tJUc+ySpOo124hdklSlhgR7RJwTEY9HxL6I2BsRN1baz4iIRyPimcpj0689ExH/LiL+V0T8U6Uv/63S3h0RT1T68s2IeFuja52oiGiLiF0R8Ujldcv1JSKei4jdEfFkRJQqbS33/gKIiM6I+HZE/HPlM7OkFfsSEfMqf4+xn8MRsbZF+/KfK5/3PRHxYCUHmuZz0qgR++vApzPzA8CHgOsjYj5wM/APmfl+4B8qr5vdL4CLM/MCymvm/GZEfAi4jfKSxu8H/hW4toE1TtaNwL5xr1u1Lx/JzIXjLkFrxfcXwJ3ADzPzV4ELKP9tWq4vmbm/8vdYCPwa8BqwmRbrS0R0AWuAnsoSKm3AJ2imz0lmNvwH+C5wCbAfOKvSdhawv9G1TbIfM4E+4D9Q/qLCaZX2JcCWRtc3wT6cTfnDdTHwCBCt2BfK34Q+85i2lnt/Ae8CDlA5H9bKfTmm/kuB7a3YF6ALeB44g/K39x8BVjTT56Thc+yVxcMWAU8A78nMFwEqj+9uXGUTV5m6eBJ4GXgU+BdgMDNfr2zyAuU3QyvYAHwWGK28/hVasy8J/H1E7IyI6yptrfj+ei/lO5T998r02N9ExNtpzb6M9wngwcrzlupLZvYDtwMHKa9q+2/ATproc9LQYI+IdwDfAdZm5uFG1lKNzBzJ8n9eng1cCHzgeJtNbVWTFxG/DbycmTvHNx9n06bvC3BRZi4GLqM81bes0QWdotOAxcDXMnMR8H9p8qmKk6nMPX8ceKjRtZyKyjmAy4FuYDbwdsrvs2M17HPSsGCPiHbKof6NzNxUaX5pbMXIyuPLjarvVGTmIPAjyucNOiNibJG1s4FDjaprEi4CPh4RzwF/R3k6ZgMt2JfMPFR5fJnyPO6FtOb76wXghcx8ovL625SDvhX7MuYyoC8zX6q8brW+LAcOZOZAZg4Dm4D/SBN9Thp1VUwA9wD7MvOOcb/6HnB15fnVlOfem1pEzIqIzsrzDsp/9H3A48B/qmzWEn3JzFsy8+zMnEv5P5Ufy8xP0mJ9iYi3R8Q7x55Tns/dQwu+vzLzfwPPR8S8StNHgadpwb6McxVHpmGg9fpyEPhQRMysZNnY36RpPieNWt1xKfCPwG6OzOV+jvI8+7eAOZT/x7syM1+Z8gInISLOB+6nfGZ8BvCtzPzziHgv5VHvGcAuYHVm/qJxlU5ORHwY+Exm/nar9aVS7+bKy9OAv83ML0TEr9Bi7y+AytLYfwO8Dfgp8MdU3mu0Xl9mUj7x+N7M/LdKW8v9XSqXNf8+5Sv8dgF/QnlOvSk+J37zVJIKpuFXxUiSastgl6SCMdglqWAMdkkqGINdkgrGYJekgjHYJalgDHZJKpj/D2bJ/+gbLGBYAAAAAElFTkSuQmCC\n", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "from matplotlib import pyplot as plt\n", "\n", "plt.plot(t_u.numpy(), t_c.numpy(), 'o')\n", "plt.plot(t_u.numpy(), model(0.1 * t_u).detach().numpy(), 'x')" ] }, { "cell_type": "code", "execution_count": 94, "metadata": { "scrolled": true }, "outputs": [ { "data": { "text/plain": [ "[]" ] }, "execution_count": 94, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXYAAAD8CAYAAABjAo9vAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAHOVJREFUeJzt3X90ldWd7/H31wA1AhpFVAgqUhVHBUEjtTI6VFuh7ajULtfYrsv11jq0nXpHex2q6B3Hzl0VpjC2dbXTltaftdWxgpTxV7Roa221EggYKTIo/igBJIoRf0QJyff+sc/x5CGBJOc8J885z/m81mIle3NyzveofPyy93P2Y+6OiIikxz5JFyAiIvFSsIuIpIyCXUQkZRTsIiIpo2AXEUkZBbuISMoo2EVEUkbBLiKSMgp2EZGUGZTEix588ME+duzYJF5aRKRsrVy58nV3H9nb4xIJ9rFjx9LQ0JDES4uIlC0ze6Uvj9NSjIhIyijYRURSRsEuIpIyCnYRkZRRsIuIpEwiV8WIiFSapY3NLKhfz+bWNkbXVDNn+nhmTq4tymsp2EVEimxpYzNzlzTR1t4BQHNrG3OXNAEUJdy1FCMiUmQL6td/GOpZbe0dLKhfX5TXU7CLiBTZ5ta2fs0XSsEuIlJko2uq+zVfKAW7iEiRzZk+nurBVZG56sFVzJk+viivp81TEZEiy26Q6qoYEZEUmTm5tmhBvjstxYiIpIyCXUQkZRTsIiID4fHH4dJLoa04lzh2pWAXESmm996DESPgrLPg5pvhjTeK/pIKdhGRYrnxRhg6FLZvD+OnnoIxY4r+sroqRkQkbi+9BOPG5caXXBK69QGiYBcRiYs7zJwJy5bl5rZsgcMOG9AytBQjIhKHRx+FffbJhfrPfhaCfoBDHdSxi4gU5t134dBDw1eA8ePh2WdhyJDESupzx25mh5vZ42a2zszWmtnlmfnrzazZzFZnfn2meOWKiJSQ+fNh2LBcqD/zDDz/fKKhDv3r2HcBV7r7KjMbDqw0s0czv/ddd18Yf3kiIiXohRfgmGNy469+FX70o+Tq2U2fg93dtwBbMt+/bWbrgIE5+EBEpBS4w6c/DfX1ubnXXoNDDkmuph7ktXlqZmOBycCfMlOXmdmzZnaLmR0YU20iIqXjoYfC5mg21O+4IwR9iYU65BHsZjYMWAxc4e47gB8BHwUmETr6f9/Dz802swYza2hpaSmgZBGRAfT222HN/DOZ7cOJE2HnTpg1K9m69qJfwW5mgwmh/gt3XwLg7q+5e4e7dwI/Bab09LPuvsjd69y9buTIkYXWLSJSfP/6r7D//tDeHsarVsGaNTB4cLJ19aLPa+xmZsDNwDp3v7HL/KjM+jvA54Dn4i1RRGSArV8Pxx2XG19+OXzve8nV00/9uSpmKjALaDKz1Zm5a4AvmNkkwIGXga/EWqGIyEDp7IRPfjKcxJjV0gIHH5xcTXnoz1UxTwLWw289GF85IiIJWbYMzj8/N77rLrjoouTqKYA+eSoile2tt6CmJjeuqwunMA4q33jUWTEiUrn++Z+job5mDaxYUdahDurYRaQS/fnPcMIJufGcOfCd7yRXT8wU7CJSOTo64Mwz4Y9/zM298QYcdFByNRWBlmJEpDIsXhyWWLKh/qtfhU+OpizUQR27iKTdm29Gw/v00+GJJ6CqKrmaikwdu4ik11VXRUP9uefgD39IdaiDgl1E0qipCcxyG6LXXhuWXbpumKaYlmJEJD06OuC006ChITf35pvRSxorgDp2EUmH//zPsDmaDfWlS0OXXmGhDurYRaTcvfFG9CyXadNg+fJwdnqFqtx3LiLl7xvfiIb6unXhAK8KDnVQsItIOWpsDJuj2aN0r78+LLt0PWq3gmkpRkTKx65dcMop8OyzYTxoUFiK2X//ZOsqMerYRaQ83HlnuHNRNtTvvz/c2Uih3o06dhGJ3dLGZhbUr2dzaxuja6qZM308MyfX5vdkLS3RG0ZPnx5uLG093R5CQB27iMRsaWMzc5c00dzahgPNrW3MXdLE0sbm/j/Z178eDfUNG+DhhxXqvVCwi0isFtSvp629IzLX1t7Bgvr1fX+ShoYQ3v/xH2H87W+HzdGjj46x0vTSUoyIxGpza1u/5iPa22HiRHj++TAeOhS2boVhw2KsMP3UsYtIrEbXVPdr/kO33gpDhuRCvb4e3nlHoZ4HBbuIxGrO9PFUD46enlg9uIo508f3/ANbt4Zll0suCeNzz4XOTjjnnCJXml59DnYzO9zMHjezdWa21swuz8wfZGaPmtmGzNcDi1euiJS6mZNrmXfBBGprqjGgtqaaeRdM6PmqmL//exg1KjfeuBGWLdPmaIHM3fv2QLNRwCh3X2Vmw4GVwEzgfwHb3X2+mV0NHOjuV+3tuerq6ryh6+lrIlJZnn4aPv7x3HjBAvinf0qunjJhZivdva63x/V589TdtwBbMt+/bWbrgFrgfGBa5mG3A78F9hrsIlKhdu4MH/t/6aUwPvBA2LQJ9tsv2bpSJq81djMbC0wG/gQcmgn9bPgfsoefmW1mDWbW0NLSkl+1IlK+Fi2Cj3wkF+rLl8P27Qr1Iuj35Y5mNgxYDFzh7jusj2th7r4IWARhKaa/rysiZWrzZqjtsr7++c+HG0lrHb1o+tWxm9lgQqj/wt2XZKZfy6y/Z9fht8VbooiUJXf47Gejof7yy3DvvQr1IuvPVTEG3Aysc/cbu/zWMuDizPcXA7+OrzwRKUu//GU4E/3BB8P4+98PQX/kkcnWVSH6sxQzFZgFNJnZ6szcNcB84B4z+zLwKnBhvCWKSNl4993uHyjasQOGD0+mngrVn6tingT29Pens+MpR0TK1sUXwx135MZ33AGzZiVXTwXTWTEiUpi1a+HEE3PjffeF997TOnqCFOwikh/3cAejzs7c3Nq1cPzxydUkgM6KEZF83HZb2BzNhvoll4SgV6iXBHXsItJ3b7/d/VZ077wTjteVkqGOXUT65u/+Lhrqd98dunSFeslRxy4ie7dmDUyalBvX1MCbbyZXj/RKwS4iPXMP6+hdrV8Pxx6bTD3SZ1qKEZHuFi2KhvrXvhaCXqFeFtSxi0hOa2s4Srer996D6l5uayclRR27iAQzZ0ZDffHi0KUr1MuOOnaRStfQAKeemhuPGhWO2pWypWAXqVQ9bY6++CKMG5dMPRIbLcWIVKIf/CAa6ldcEYJeoZ4K6thFKsn27TBiRHTu/ffDLeskNdSxi1SKGTOiof5f/xW6dIV66qhjF0m7p5+Gj388Nx43LqylS2op2EXSqrMTqqqic6+8AkcckUw9MmC0FCOSRjfeGA31q68Oyy4K9Yqgjl0kTVpa4JBDonMffABDhiRTjyRCHbtIWkybFg31hx8OXbpCveL0OdjN7BYz22Zmz3WZu97Mms1sdebXZ4pTpojs0ZNPhvuL/u53YXzCCSHQp09Pti5JTH+WYm4DfgDcsdv8d919YWwViVSopY3NLKhfz+bWNkbXVDNn+nhmTq7d8w90dIR7jna1aRPU7uVnpCL0uWN39yeA7UWsRaRiLW1sZu6SJppb23CgubWNuUuaWNrY3PMPzJsXDfXrrgtdukJdiGfz9DIz+59AA3Clu+vWKiL9tKB+PW3tHZG5tvYOFtSvj3btW7eGQ7q6am/v3rlLRSt08/RHwEeBScAW4N/39EAzm21mDWbW0NLSUuDLiqTL5ta23uenTImG+vLloUtXqMtuCgp2d3/N3TvcvRP4KTBlL49d5O517l43cuTIQl5WJHVG1/R85vnommp4/PGwObpiRZg89dQQ6GedNYAVSjkpKNjNrOvfCT8HPLenx4rIns2ZPp7qwdFPiQ6rgj/MPTsa4Fu2wDPPDHB1Um76/Hc4M7sLmAYcbGabgH8BppnZJMCBl4GvFKFGkbLUn6tcsvPZx//fhnv48vIuF6DdcAPMnTsQZUsKmLsP+IvW1dV5Q0PDgL+uyEDJXuXSdUO0enAV8y6YsPdLGJubYcyY6NyuXd3PfJGKZGYr3b2ut8fpk6ciRbC3q1z2aOLEaKg/8URYS1eoSz8p2EWKoE9XuWQ98kjYHG1qCuMzzgiBfsYZRaxQ0kzXSYkUweiaapp7CPHI1S/t7d3Pcdm2DXTVmBRIHbtIEfR0lUv14CrmTB8fBtdcEw31hQtDl97HUF/a2MzU+Y9x1NUPMHX+Y3v+hKpUJHXsIkWw+1UuH14VM6IjLLt01dERvbF0L3bfmM0eP9D1daWyKdhFimTm5Npo0B5zDLzwQm781FNw2mn9ft4+Hz8gFUtLMSLF9sADoUvPhvo554RllzxCHfq5MSsVSR27SLF88AHsu2907vXXYcSIgp62TxuzUtHUsYsUw5VXRkP9pptCl15gqEMfNmal4qljF4nTxo3w0Y9G5/q5OdqbPW7Man1dMhTsInEZMyYcCZC1YgXU9frp77x025gV6UJLMSKFuu++sDmaDfXzzgvLLkUKdZHeqGMXydf770P1bhuWb74JNTXJ1COSoY5dJB+XXRYN9Z/8JHTpCnUpAerYRfpjwwY49tjoXGdn90+TiiRIwS7SVyNGwPbtuXFjI0ya1OuP9eeGGyJx0FKMSG/uuSd05NlQv/DCsOzSx1Cfu6SJ5tY2nNy5Ljq0S4pJHbvInrz3HgwdGp176y3Yf/8+P4XOdZEkqGMX6cmll0ZD/dZbQ5fej1AHnesiyVDHLtLVunVw/PG5cVVVuCFGnpujOtdFktDnjt3MbjGzbWb2XJe5g8zsUTPbkPl6YHHKFCkyd9hvv2ioNzWFG0kXcMWLznWRJPRnKeY2YMZuc1cDy939GGB5ZixSXu68M5zl0pbprGfNCkF/4okFP/XMybXMu2ACtTXVGFBbU828CyZofV2Kqs9LMe7+hJmN3W36fGBa5vvbgd8CV8VQl0jxvfMODB8enXv7bRg2LNaX0bkuMtAK3Tw91N23AGS+HlJ4SSIDYNasaKjfeWfo0mMOdZEkDNjmqZnNBmYDHHHEEQP1siJRTU0wcWJuPGwY7NihT45KqhTasb9mZqMAMl+37emB7r7I3evcvW5kH+/ELhIb9xDeXUN93bqw9KJQl5QpNNiXARdnvr8Y+HWBzycSv1tvjd7o4tJLQ9Afd1xyNYkUUZ+XYszsLsJG6cFmtgn4F2A+cI+ZfRl4FbiwGEWK5GXHDjjggOjcu++GyxpFUqw/V8V8YQ+/dXZMtYjE58IL4d57c+N77glzIhVAnzyVdGlshJNPzo0PPhhaWpKrRyQBCnZJB/fuN4zesAGOPjqZekQSpEPApPz9+MfRUL/sshD0CnWpUOrYpXy9+SYcdFB0rq0N9t03mXpESoQ6dilP554bDfWlS1m6ahNTv/dHjrr6AabOf0w3s5CKpY5dysuKFTBlSm58+OHw6qsf3qkoe1OL7J2KAJ3TIhVHHbuUh+wNo7uG+saN8OqrwN7vVCRSaRTsUvpuuinc8CLryivD5uhRR304pTsVieRoKUZK1xtvhOvQu3r/ffjIR7o9VHcqEslRxy6l6VOfiob6/feHLr2HUAfdqUikK3XsUlqeegpOPz03PvZYWN/7Onl2g3RB/Xo2t7YxuqaaOdPHa+NUKpKCXUpDRwcM2u0/x1dfDVe99JHuVCQSaClGkrdwYTTUr7kmLLv0I9RFJEcduyRn2zY49NDo3M6dMHhwMvWIpIQ6dknGGWdEQ/2RR0KXrlAXKZg6dhlYTzwBf/M3ufFJJ8Hq1cnVI5JCCnYZGD1tjjY3w+jRBT/10sZmXQ0j0oWWYqT4vv3taKh/61th2SWmUJ+7pInm1jac3BkxOgBMKpk6dimeLVu6h3d7e/fOvQB7OyNGXbtUKnXsUhx1ddFQf/zx0KXHGOqgM2JEeqJgl3gtXx5OYVy5MoxPOy0E+rRpRXm5PZ0FozNipJLF0j6Z2cvA20AHsMvd6+J4Xikj7e0wZEh0buvW7tepx2zO9PGRc9hBZ8SIxNmxf8LdJynUK9B110VDff780KUXOdQhHCMw74IJ1NZUY0BtTTXzLpig9XWpaNo8lfxt2tT9Y/+7dkXPTh8AOiNGJCqujt2BR8xspZnNjuk5pZQdf3w01H//+9ClD3Coi0h3cQX7VHc/Gfg08HUzO3P3B5jZbDNrMLOGlpaWmF5WBtxDD4XN0XXrwnjatBDof/3XiZYlIjmxLMW4++bM121mdh8wBXhit8csAhYB1NXVeRyvKwNo587uN7nYtg1GjkymHhHZo4I7djMbambDs98D5wDPFfq8UkKuuioa6jfeGLp0hbpISYqjYz8UuM/Mss/3S3d/OIbnlaS98gqMHRud6+iAffTxB5FSVnCwu/tG4KQYapFSMm4cvPRSbvz00/CxjyVXj4j0mVoviVq2LGyOZkN9xoyw7KJQFykbuo5dgg8+gH33jc698QYcdFAy9YhI3tSxC1xxRTTUf/jD0KUr1EXKkjr2Elb0G0i8+CIcfXR0rrMzLMWISNlSx16iin4DiVGjoqHe0BC6dIW6SNlTsJeovd1AoiCLF4fw3ro1jGfODIF+yimFPa+IlAwtxZSo2G8g0dYG++0XnWtthQMOyO/5RKRkqWMvUbHeQOKrX42G+k9/Grp0hbpIKqljL1Gx3EBi/Xo47rjonDZHRVJPwV6isle/5H1VTE0NvPVWbrxmDUycWIRKRaTUKNhLWF43kLjrLvjiF3Pjiy4KcyJSMRTsafHuuzBsWHRuxw4YPjyZekQkMdo8TYN//MdoqN9+e9gcVaiLVCR17OXsL3+BI47IjYcMgfff1+aoSIVTx16O3OELX4iG+iuvhIO8FOoiFU/BXm5+97two4u77w7jH/wgBH3XkBeRiqalmHLR1gZHHgnZG4GPGQMbNnQ/aldEKp469nLw/e+HT45mQ/33vw/r6wp1EemBOvZStvs9Ry++GG67rdvDin68r4iUFQV7KXKHz38e7rsvN9fcDKNHd3to9njf7NED2eN9AYW7SIWKZSnGzGaY2Xoze8HMro7jOSvW8uVhczQb6j/5SQj6HkIdini8r4iUrYI7djOrAn4IfArYBKwws2Xu/udCn7uivPdeCO/s+S7jxsG6deHa9L2I/XhfESl7cXTsU4AX3H2ju+8E7gbOj+F5K8fChTB0aC7Un3oq3Laul1CHmI/3FZFUiCPYa4G/dBlvysxJbzZuDB8omjMnjC+9NCy7nHZan59izvTxVA+uisz1+3hfEUmVODZPe/qoo3d7kNlsYDbAEZX+YRp3OPdceOCB3NyWLXDYYf1+qoKP9xWR1Ikj2DcBh3cZjwE27/4gd18ELAKoq6vrFvwVo74eZszIjW+5Bb70pYKeMq/jfUUkteII9hXAMWZ2FNAMXAR8ce8/UoHeeQcOOSR8ghTCnY2efRYGD062LhFJnYLX2N19F3AZUA+sA+5x97WFPm+q3HBDOEI3G+orVoQrXhTqIlIEsXxAyd0fBB6M47lSZcMGOPbY3Pgf/gF++MPk6hGRiqBPnhZDZ2dYR3/00dzctm0wcmRyNYlIxdAhYHG7/36oqsqF+s9/Hq6CUaiLyABRxx6XHTvgoIOgI/Px/pNOgoYGGKR/xCIysNSxx+H66+GAA3Kh3tgIq1cr1EUkEUqeQjz/PPzVX+XGV1wB3/1ucvWIiKBgz09nJ5x1VrhNXdbrr8OIEcnVJCKSoaWY/lq6NGyOZkP97rvD5qhCXURKhDr2vmpthQMPzI1PPTWcwlhVteefERFJgDr2vrj22mioP/ssPPOMQl1ESpI69r1ZuxZOPDE3/uY34d/+Lbl6RET6QMHek44OOOOMsNSStX17tGsXESlRWorZ3b33huvPs6G+eHHYHFWoi0iZUMeetX179MqWqVPDlS9aRxeRMqOOHcKt6bqG+tq18OSTCnURKUuVHexr1oR7ji5cGMbXXhuWXY4/Ptm6REQKUDZLMUsbm+O7r+euXeGG0StX5uZaW8N5LyIiZa4sOvaljc3MXdJEc2sbDjS3tjF3SRNLG5v7/2R33RXuXJQN9V//OnTpCnURSYmy6NgX1K+nrb0jMtfW3sGC+vV979pffz16JvonPgG/+Q3sUxb/bxMR6bOySLXNrW39mu/m8sujof788/DYYwp1EUmlski20TXV/Zr/0KpVYXP0ppvC+FvfCssu48fHXKGISOkoi2CfM3081YOjlx5WD65izvQ9BPSuXTBxIpxyShgPGRLucHTddUWuVEQkeQUFu5ldb2bNZrY68+szcRXW1czJtcy7YAK1NdUYUFtTzbwLJvS8vv7zn4fN0aamMH7gAfjgAxg+vBiliYiUnDg2T7/r7gtjeJ69mjm5du8bpdu2waGH5sYzZsCDD4alGBGRClIWSzG9+trXoqG+YQM89JBCXUQqUhzBfpmZPWtmt5jZHk/KMrPZZtZgZg0tLS0xvCywYkUI7x//OIxvuCFsjh59dDzPLyJShszd9/4As98Ah/XwW9cCTwOvAw78P2CUu1/S24vW1dV5Q0ND/6vN2rkTJkyA//7vMB42DLZsCV9FRFLKzFa6e11vj+t1jd3dP9nHF/wpcH9fHluQW26BL385N66vh3POKfrLioiUi0KvihnVZfg54LnCyunFrbfmQv2886CzU6EuIrKbQq+K+Y6ZTSIsxbwMfKXgivbmhBPgYx8L570cdVRRX0pEpFwVFOzuPiuuQvpkyhR4+ukBfUkRkXKTjssdRUTkQwp2EZGUUbCLiKSMgl1EJGUU7CIiKaNgFxFJGQW7iEjKKNhFRFKm10PAivKiZi3AKz381sGEQ8XSIC3vJS3vA/ReSlFa3gcMzHs50t1H9vagRIJ9T8ysoS8nl5WDtLyXtLwP0HspRWl5H1Ba70VLMSIiKaNgFxFJmVIL9kVJFxCjtLyXtLwP0HspRWl5H1BC76Wk1thFRKRwpdaxi4hIgUoi2M3scDN73MzWmdlaM7s86ZryYWb7mtkzZrYm8z6+lXRNhTKzKjNrNLPi3/awiMzsZTNrMrPVZlbADXeTZWY1ZnavmT2f+fPy8aRryoeZjc/8u8j+2mFmVyRdVz7M7BuZP+/PmdldZrZv4jWVwlJM5hZ7o9x9lZkNB1YCM939zwmX1i9mZsBQd3/HzAYDTwKXu3vZ3h3EzP4PUAfs7+5/m3Q9+TKzl4E6dy/ra6bN7Hbg9+7+MzMbAuzn7q1J11UIM6sCmoGPuXtPn28pWWZWS/hzfry7t5nZPcCD7n5bknWVRMfu7lvcfVXm+7eBdUBtslX1nwfvZIaDM7+S/z9nnsxsDPBZ4GdJ1yJgZvsDZwI3A7j7znIP9YyzgRfLLdS7GARUm9kgYD9gc8L1lEawd2VmY4HJwJ+SrSQ/maWL1cA24FF3L8v3kfE94JtAZ9KFxMCBR8xspZnNTrqYPI0DWoBbM8tjPzOzoUkXFYOLgLuSLiIf7t4MLAReBbYAb7n7I8lWVWLBbmbDgMXAFe6+I+l68uHuHe4+CRgDTDGzE5OuKR9m9rfANndfmXQtMZnq7icDnwa+bmZnJl1QHgYBJwM/cvfJwLvA1cmWVJjMctJ5wK+SriUfZnYgcD5wFDAaGGpm/yPZqkoo2DNr0ouBX7j7kqTrKVTmr8i/BWYkXEq+pgLnZdam7wbOMrM7ky0pf+6+OfN1G3AfMCXZivKyCdjU5W+B9xKCvpx9Gljl7q8lXUiePgm85O4t7t4OLAFOT7im0gj2zKbjzcA6d78x6XryZWYjzawm83014V/688lWlR93n+vuY9x9LOGvyo+5e+KdSD7MbGhmU57M0sU5wHPJVtV/7r4V+IuZjc9MnQ2U1QUGPfgCZboMk/EqcJqZ7ZfJsbMJe4SJGpR0ARlTgVlAU2Z9GuAad38wwZryMQq4PbPLvw9wj7uX9WWCKXEocF/4c8cg4Jfu/nCyJeXtfwO/yCxhbAS+lHA9eTOz/YBPAV9JupZ8ufufzOxeYBWwC2ikBD6BWhKXO4qISHxKYilGRETio2AXEUkZBbuISMoo2EVEUkbBLiKSMgp2EZGUUbCLiKSMgl1EJGX+P4jL2TihBZB9AAAAAElFTkSuQmCC\n", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "from matplotlib import pyplot as plt\n", "\n", "plt.plot(t_un.numpy(), t_c.numpy(), 'o')\n", "plt.plot(t_un.numpy(), model(t_un, w, b).numpy(), 'r-')\n", "\n", "#plt.plot(t_u.numpy(), model(0.1 * t_u).detach().numpy(), 'x')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.4" } }, "nbformat": 4, "nbformat_minor": 2 }