In [1]:
import numpy as np
import matplotlib.pyplot as plt
import matplotlib

from matplotlib import animation, rc
from IPython.display import HTML
import random
import pandas as pd
from math import ceil
import sys

#Graph Modules
import plotly.io as pio
import plotly.express as px
import plotly
pio.renderers.default = 'iframe_connected'
import seaborn as sns
from collections import Counter

from plotly.subplots import make_subplots
import plotly.graph_objects as go

Warning

The code here has been re-ran and graphs only may show different values due to error of display chart inside Jupyter lab This is just a newer version (ran in a later date) than the report! Thank you for your understanding!

In [2]:
%matplotlib inline
import warnings
warnings.filterwarnings('ignore')
In [3]:
#Base data
Mutations = [['SARS-CoV-2', 1], ['B.1.1.7', 1.5], ['B.1.351', 1.5]]

#https://www.cdc.gov/coronavirus/2019-ncov/cases-updates/variant-surveillance/variant-info.html - 'B.1.1.7' or United Kingdom mutant
#https://www.cdc.gov/coronavirus/2019-ncov/cases-updates/variant-surveillance/variant-info.html - 'B.1.152' or South African Mutant
In [4]:
defaultY = 200
defaultX = 400

class Person():
    #                  infected, what type of mutation, spread radius, xMovement, yMovement, boardSize for x, board size for y, borders if any
    def __init__(self, infected, mutationType, spreadRadius, xPlus, yPlus, days, boardX = defaultX, boardY = defaultY, borders = None):
        self.position = [int(random.uniform(1, boardX)), int(random.uniform(1, boardY))]
        
        self.rateX = xPlus
        self.rateY = yPlus
        
        self.infected = infected
        self.immune = False
        self.days = days
        self.mutation = mutationType
        self.spreadRadius = spreadRadius
        
        self.xPlus = random.choice([int(random.uniform(2, self.rateX)), int(random.uniform(-int(self.rateX), -2))])
        self.yPlus = random.choice([int(random.uniform(2, self.rateY)), int(random.uniform(-int(self.rateY), -2))])
        
        self.boardX = boardX
        self.boardY = boardY
        
        self.border = borders
        
    def move(self):
        self.position = [int(self.position[0] + self.xPlus), int(self.position[1] + self.yPlus)]
        
        #Hit the Corner
        if (self.position[0] + self.xPlus) < 0:
            self.xPlus = int(random.uniform(2, self.rateX))
        elif (self.position[0] + self.xPlus) >= self.boardX:
            self.xPlus = int(random.uniform(-self.rateX, -2))
            
        if (self.position[1] + self.yPlus) < 0:
            self.yPlus = int(random.uniform(2, self.rateY))
        elif (self.position[1] + self.yPlus) > self.boardY:
            self.yPlus = int(random.uniform(-self.rateY, -2))
    
class Board():
    def __init__(self, boardX, boardY, Num, perPersonSpeedX, perPersonSpeedY, radius, infectionLength, variants, borders = None):
        self.boardSizeX = boardX
        self.boardSizeY = boardY
        self.radius = radius
        self.infectionLength = infectionLength
        self.borders = borders
        self.variants = variants
        self.cumlativeMutants = variants

        self.population = []
        #Add Infected Person
        #infected, mutationType, spreadRadius, xPlus, yPlus, boardX = defaultX, boardY = defaultY, borders = None
        for variant in self.cumlativeMutants:
            self.population.append(Person(True, variant[0], self.radius*variant[1], perPersonSpeedX, perPersonSpeedY, 0, self.boardSizeX, self.boardSizeY))
        self.population = self.population + [Person(False, None, None, int(random.uniform(1, perPersonSpeedX)), int(random.uniform(1, perPersonSpeedY)), 0, self.boardSizeX, self.boardSizeY, self.borders) for i in range(Num-len(self.cumlativeMutants))]
        
        self.infectedIndexes = [i for i in range(len(self.population)) if self.population[i].infected == True]
        self.infectionCount = len(self.infectedIndexes)
        self.recoveredCount = 0
        
    def move(self):
        for i in range(len(self.population)):
            self.population[i].move()
        
        ########################################## CHECK if paitent recovered ####################################################
        #most Infection end around 2 weeks so we are going to make many people immune after 2 weeks 40% to be immune after 2 weeks
        for index in self.infectedIndexes:
            if self.population[index].days > self.infectionLength:
                per = random.uniform(0, 1)
                if per < 0.4:
                    self.population[index].days += 0.1
                else:
                    self.population[index].immune = True
                    self.recoveredCount += 1
                    self.infectedIndexes.remove(index)
            else:
                self.population[index].days += 0.1
                
        
        ################################# Updated infections's positions and their variants ######################################
        #Update all infected indexes
        #self.infectedIndexes = [index for index in range(len(self.population)) if self.population[index].infected == True and self.population[index].immune == False]
        positions = []
        infectorIndexes = []
        variant = []
        spreadRadiusList = []
        for personIndex in self.infectedIndexes:
            position = self.population[personIndex].position
            for i in range(1, ceil(self.population[personIndex].spreadRadius)):
                p = [int(position[0] + i), int(position[1])]
                if p not in positions:
                    positions.append(p)
                    infectorIndexes.append(personIndex)
                    variant.append(self.population[personIndex].mutation)
                    spreadRadiusList.append(self.population[personIndex].spreadRadius)
                    
                p = [int(position[0]), int(position[1] + i)]
                if p not in positions:
                    positions.append(p)
                    infectorIndexes.append(personIndex)
                    variant.append(self.population[personIndex].mutation)
                    spreadRadiusList.append(self.population[personIndex].spreadRadius)
                    
                p = [int(position[0] + i), int(position[1] + i)]
                if p not in positions:
                    positions.append(p)
                    infectorIndexes.append(personIndex)
                    variant.append(self.population[personIndex].mutation)
                    spreadRadiusList.append(self.population[personIndex].spreadRadius)
                    
                p = [int(position[0] - i), int(position[1])]
                if p not in positions:
                    positions.append(p)
                    infectorIndexes.append(personIndex)
                    variant.append(self.population[personIndex].mutation)
                    spreadRadiusList.append(self.population[personIndex].spreadRadius)
                    
                p = [int(position[0]), int(position[1] - i)]
                if p not in positions:
                    positions.append(p)
                    infectorIndexes.append(personIndex)
                    variant.append(self.population[personIndex].mutation)
                    spreadRadiusList.append(self.population[personIndex].spreadRadius)
                    
                p = [int(position[0] - i), int(position[1] - i)]
                if p not in positions:
                    positions.append(p)
                    infectorIndexes.append(personIndex)
                    variant.append(self.population[personIndex].mutation)
                    spreadRadiusList.append(self.population[personIndex].spreadRadius)
                    
                p = [int(position[0] + i), int(position[1] - i)]
                if p not in positions:
                    positions.append(p)
                    infectorIndexes.append(personIndex)
                    variant.append(self.population[personIndex].mutation)
                    spreadRadiusList.append(self.population[personIndex].spreadRadius)
                    
                p = [int(position[0] - i), int(position[1] + i)]
                if p not in positions:
                    positions.append(p)
                    infectorIndexes.append(personIndex)
                    variant.append(self.population[personIndex].mutation)
                    spreadRadiusList.append(self.population[personIndex].spreadRadius)
                    
        #print(len(positions), len(infectorIndexes), len(variant), len(spreadRadiusList))
        #print(Counter(variant))
        
        for personIndex in range(len(self.population)):
            if self.population[personIndex].position in positions and self.population[personIndex].infected == False and self.population[personIndex].immune == False:
                idx = positions.index(self.population[personIndex].position)
                self.population[personIndex].infected = True
                self.population[personIndex].mutation = variant[idx]
                self.population[personIndex].spreadRadius = spreadRadiusList[idx]
                self.infectedIndexes.append(personIndex)
                
    def moveWithLockDown(self, x):
        limit = len(self.population) * (1-x)
        self.population[0].move()
        for i in range(int(limit),len(self.population)):
            self.population[i].move()
        
        ########################################## CHECK if paitent recovered ####################################################
        #most Infection end around 2 weeks so we are going to make many people immune after 2 weeks 40% to be immune after 2 weeks
        for index in self.infectedIndexes:
            if self.population[index].days > self.infectionLength:
                per = random.uniform(0, 1)
                if per < 0.4:
                    self.population[index].days += 0.1
                else:
                    self.population[index].immune = True
                    self.recoveredCount += 1
                    self.infectedIndexes.remove(index)
            else:
                self.population[index].days += 0.1
                
        
        ################################# Updated infections's positions and their variants ######################################
        #Update all infected indexes
        #self.infectedIndexes = [index for index in range(len(self.population)) if self.population[index].infected == True and self.population[index].immune == False]
        positions = []
        infectorIndexes = []
        variant = []
        spreadRadiusList = []
        for personIndex in self.infectedIndexes:
            position = self.population[personIndex].position
            for i in range(1, ceil(self.population[personIndex].spreadRadius)):
                p = [int(position[0] + i), int(position[1])]
                if p not in positions:
                    positions.append(p)
                    infectorIndexes.append(personIndex)
                    variant.append(self.population[personIndex].mutation)
                    spreadRadiusList.append(self.population[personIndex].spreadRadius)
                    
                p = [int(position[0]), int(position[1] + i)]
                if p not in positions:
                    positions.append(p)
                    infectorIndexes.append(personIndex)
                    variant.append(self.population[personIndex].mutation)
                    spreadRadiusList.append(self.population[personIndex].spreadRadius)
                    
                p = [int(position[0] + i), int(position[1] + i)]
                if p not in positions:
                    positions.append(p)
                    infectorIndexes.append(personIndex)
                    variant.append(self.population[personIndex].mutation)
                    spreadRadiusList.append(self.population[personIndex].spreadRadius)
                    
                p = [int(position[0] - i), int(position[1])]
                if p not in positions:
                    positions.append(p)
                    infectorIndexes.append(personIndex)
                    variant.append(self.population[personIndex].mutation)
                    spreadRadiusList.append(self.population[personIndex].spreadRadius)
                    
                p = [int(position[0]), int(position[1] - i)]
                if p not in positions:
                    positions.append(p)
                    infectorIndexes.append(personIndex)
                    variant.append(self.population[personIndex].mutation)
                    spreadRadiusList.append(self.population[personIndex].spreadRadius)
                    
                p = [int(position[0] - i), int(position[1] - i)]
                if p not in positions:
                    positions.append(p)
                    infectorIndexes.append(personIndex)
                    variant.append(self.population[personIndex].mutation)
                    spreadRadiusList.append(self.population[personIndex].spreadRadius)
                    
                p = [int(position[0] + i), int(position[1] - i)]
                if p not in positions:
                    positions.append(p)
                    infectorIndexes.append(personIndex)
                    variant.append(self.population[personIndex].mutation)
                    spreadRadiusList.append(self.population[personIndex].spreadRadius)
                    
                p = [int(position[0] - i), int(position[1] + i)]
                if p not in positions:
                    positions.append(p)
                    infectorIndexes.append(personIndex)
                    variant.append(self.population[personIndex].mutation)
                    spreadRadiusList.append(self.population[personIndex].spreadRadius)
                    
        #print(len(positions), len(infectorIndexes), len(variant), len(spreadRadiusList))
        #print(Counter(variant))
        
        for personIndex in range(len(self.population)):
            if self.population[personIndex].position in positions and self.population[personIndex].infected == False and self.population[personIndex].immune == False:
                idx = positions.index(self.population[personIndex].position)
                self.population[personIndex].infected = True
                self.population[personIndex].mutation = variant[idx]
                self.population[personIndex].spreadRadius = spreadRadiusList[idx]
                self.infectedIndexes.append(personIndex)
                
    def checkImmunity(self):
        ########################################## CHECK if paitent recovered ####################################################
        #most Infection end around 2 weeks so we are going to make many people immune after 2 weeks 40% to be immune after 2 weeks
        for index in self.infectedIndexes:
            if self.population[index].days > self.infectionLength:
                per = random.uniform(0, 1)
                if per < 0.4:
                    self.population[index].days += 0.1
                else:
                    self.population[index].immune = True
                    self.recoveredCount += 1
                    self.infectedIndexes.remove(index)
            else:
                self.population[index].days += 0.1
    
    def radiusPositionSpread(self, position, spread):
        ans = [position]
        for i in range(1, ceil(spread)):
            ans.append([int(position[0] + i), int(position[1])])
            ans.append([int(position[0]), int(position[1] + i)])
            ans.append([int(position[0] + i), int(position[1] + i)])
                        
            ans.append([int(position[0] - i), int(position[1])])
            ans.append([int(position[0]), int(position[1] - i)])
            ans.append([int(position[0] - i), int(position[1] - i)])

            ans.append([int(position[0] + i), int(position[1] - i)])
            ans.append([int(position[0] - i), int(position[1] + i)])
        return ans
        
In [5]:
def makeData(defaultX, defaultY, populationSize, xSpeedMax, ySpeedMax, spreadRadius, infectionLength, variants):
    #boardX, boardY, Num, perPersonSpeedX, perPersonSpeedY, radius, infectionLength, variants, borders = None
    board = Board(defaultX, defaultY, populationSize, xSpeedMax, ySpeedMax, spreadRadius, infectionLength, variants)
    df = pd.DataFrame(columns=['frame', 'x', 'y', 'color', 'totalInfected'])
    
    for frame in range(maxFrames):
        board.moveWithLockDown()
        
        xValue = []
        yValue = []
        color = []
        pID = []
        personType = []
        
        for personID in range(len(board.population)):
            if board.population[personID].immune == True:
                xValue.append(board.population[personID].position[0])
                yValue.append(board.population[personID].position[1])
                color.append('green')
                pID.append(int(personID))
                personType.append('Immune')
            elif board.population[personID].infected == True:
                xValue.append(board.population[personID].position[0])
                yValue.append(board.population[personID].position[1])
                color.append('red')
                pID.append(int(personID))
                personType.append('Infecious(' + str(board.population[personID].mutation) + ')')
            else:
                xValue.append(board.population[personID].position[0])
                yValue.append(board.population[personID].position[1])
                color.append('blue')
                pID.append(int(personID))
                personType.append('Normal')
        
        tempDF_UnInfected = pd.DataFrame({
            'frame': frame,
            'id':pID,
            'x': xValue,
            'y': yValue,
            'color': color,
            'totalInfected': board.infectionCount,
            'Recovered': board.recoveredCount,
            'Type': personType
        })
        
        #Add Immune People Before hand so the category will exists
        tempDF_UnInfected = tempDF_UnInfected.append({
            'frame': frame,
            'id':int(len(pID)),
            'x': 10000,
            'y': 10000,
            'color': 'green',
            'totalInfected': board.infectionCount,
            'Recovered': board.recoveredCount,
            'Type': 'Immune'
        }, ignore_index = True)
        
        tempDF_UnInfected = tempDF_UnInfected.sort_values(['id'])
        
        
        df = pd.concat([df, tempDF_UnInfected])
    return df
In [6]:
populationSize = 200
xSpeedMax = 6
ySpeedMax = 8
spreadRadius = 5
maxFrames = 400
infectionLength = 14
variants = [['SARS-CoV-2', 1]]
defaultY = 200
defaultX = 400

try:
    #For Normal Covid
    data = pd.read_csv('data/simulation_Normal_SARS_COV_2.csv')
except Exception:
    data = makeData(defaultX, defaultY, populationSize, xSpeedMax, ySpeedMax, spreadRadius, infectionLength, variants)
    data.to_csv('data/simulation_Normal_SARS_COV_2.csv')
    
try:
    #For UK variant covid
    data = pd.read_csv('data/simulation_UK_variant_Only.csv')
except Exception:
    data = makeData(defaultX, defaultY, populationSize, xSpeedMax, ySpeedMax, spreadRadius, infectionLength, [['B.1.1.7', 1.5]])
    data.to_csv('data/simulation_UK_variant_Only.csv')
In [7]:
fig = px.scatter(pd.read_csv('data/simulation_Normal_SARS_COV_2.csv'), x = 'x', y = 'y', animation_frame='frame', animation_group = 'id', color='Type', range_x=[0,defaultX], range_y=[0, defaultY], title='Infections over 10 days for Covid-19')
fig.show()
In [8]:
fig_with_Variant = px.scatter(pd.read_csv('data/simulation_UK_variant_Only.csv'), x = 'x', y = 'y', animation_frame='frame', animation_group = 'id', color='Type', range_x=[0,defaultX], range_y=[0, defaultY], title='Infections over 10 days for UK mutation')
fig_with_Variant.show()
In [9]:
#Combination of SARS-COV-2 and B.1.1.7
populationSize = 200
xSpeedMax = 6
ySpeedMax = 8
spreadRadius = 5
maxFrames = 400
infectionLength = 14
variants = [['B.1.1.7', 1.5], ['SARS-CoV-2', 1]]
defaultY = 200
defaultX = 400

try:
    data = pd.read_csv('data/simulation_UK_variant_Combination.csv')
except Exception:
    data = makeData(defaultX, defaultY, populationSize, xSpeedMax, ySpeedMax, spreadRadius, infectionLength, variants)
    data.to_csv('data/simulation_UK_variant_Combination.csv')
In [10]:
#SARS-COV-2 X 2
populationSize = 200
xSpeedMax = 6
ySpeedMax = 8
spreadRadius = 5
maxFrames = 400
infectionLength = 14
variants = [['SARS-CoV-2', 1], ['SARS-CoV-2', 1]]
defaultY = 200
defaultX = 400

try:
    data = pd.read_csv('data/simulation_2_SARS_COV_2.csv')
except Exception:
    data = makeData(defaultX, defaultY, populationSize, xSpeedMax, ySpeedMax, spreadRadius, infectionLength, variants)
    data.to_csv('data/simulation_2_SARS_COV_2.csv')
In [11]:
fig_with_Variant_comb = px.scatter(pd.read_csv('data/simulation_UK_variant_Combination.csv'), x = 'x', y = 'y', animation_frame='frame', animation_group = 'id', color='Type', range_x=[0,defaultX], range_y=[0, defaultY], title='Infections over 10 days for Covid-19 and its UK mutation each')
fig_with_Variant_comb.show()
In [12]:
fig_with_COVID_Dual = px.scatter(pd.read_csv('data/simulation_2_SARS_COV_2.csv'), x = 'x', y = 'y', animation_frame='frame', animation_group = 'id', color='Type', range_x=[0,defaultX], range_y=[0, defaultY], title='Infections over 10 days for Covid-19 (2 of them)')
fig_with_COVID_Dual.show()
In [13]:
def makeDataset(data, infectionType, frames):
    df = pd.DataFrame(columns=['x', 'infection', 'recovered', 'susceptible'])
    infectionCount = 0
    recovered = 0
    susceptible = 0
    
    for i in range(frames):
        tempdf = data[data['frame']==i]['Type'].tolist()
        infectionCount = tempdf.count(infectionType)
        recovered = tempdf.count('Immune')
        susceptible = tempdf.count('Normal')
        if infectionType == 'all':
            infectionCount = 200 - (recovered + susceptible)
    
        df = df.append({
                'x': i,
                'infection':infectionCount,
                'recovered': recovered,
                'susceptible': susceptible
            }, ignore_index = True)
        
    return df
In [14]:
ukVaraint = makeDataset(pd.read_csv('data/simulation_UK_variant_Only.csv'), 'Infecious(B.1.1.7)', 400)
nonVariant = makeDataset(pd.read_csv('data/simulation_Normal_SARS_COV_2.csv'), 'Infecious(SARS-CoV-2)', 400)

compare_fig = make_subplots(rows=1, cols=2, subplot_titles=("UK Variant and Normal Variant", "Normal Covid-19"))

compare_fig.add_trace(
    go.Scatter(x=ukVaraint['x'].tolist(), y=ukVaraint['infection'].tolist(), fill='tozeroy', name='UK Variant B.1.1.7'),
    row=1, col=1
)

compare_fig.add_trace(
    go.Scatter(x=nonVariant['x'].tolist(), y=nonVariant['infection'].tolist(), fill='tonexty', name='Normal SARS-CoV-2'),
    row=1, col=2
)

compare_fig.add_hrect(y0=0, y1=50, line_width=0, fillcolor="green", opacity=0.3)


compare_fig.update_layout(height=600, width=2000, title_text="UK variant vs Normal SARS-Cov-2")
compare_fig.show()
In [15]:
ukVaraint = makeDataset(pd.read_csv('data/simulation_UK_variant_Combination.csv'), 'Infecious(B.1.1.7)', 400)
normalVariant = makeDataset(pd.read_csv('data/simulation_UK_variant_Combination.csv'), 'Infecious(SARS-CoV-2)', 400)
totalInfections = makeDataset(pd.read_csv('data/simulation_UK_variant_Combination.csv'), 'all', 400)
nonVariant = makeDataset(pd.read_csv('data/simulation_2_SARS_COV_2.csv'), 'Infecious(SARS-CoV-2)', 400)

compare_fig = make_subplots(rows=1, cols=2, subplot_titles=("UK Variant and Normal Variant", "Normal Covid-19"))

compare_fig.add_trace(
    go.Scatter(x=totalInfections['x'].tolist(), y=totalInfections['infection'].tolist(), fill='tozeroy', name='Total'),
    row=1, col=1
)

compare_fig.add_trace(
    go.Scatter(x=ukVaraint['x'].tolist(), y=ukVaraint['infection'].tolist(), fill='tozeroy', name='UK Variant B.1.1.7 and Normal SARS-COV-2'),
    row=1, col=1
)

compare_fig.add_trace(
    go.Scatter(x=normalVariant['x'].tolist(), y=normalVariant['infection'].tolist(), fill='tonexty', name='Normal SARS-CoV-2'),
    row=1, col=1
)

compare_fig.add_trace(
    go.Scatter(x=nonVariant['x'].tolist(), y=nonVariant['infection'].tolist(), fill='tonexty', name='Normal SARS-CoV-2 X 2'),
    row=1, col=2
)

compare_fig.add_hrect(y0=0, y1=50, line_width=0, fillcolor="green", opacity=0.3)


compare_fig.update_layout(height=600, width=2000, title_text="UK variant vs Normal SARS-Cov-2")
compare_fig.show()
In [16]:
def makeData_with_curfew(defaultX, defaultY, populationSize, xSpeedMax, ySpeedMax, spreadRadius, infectionLength, variants):
    #boardX, boardY, Num, perPersonSpeedX, perPersonSpeedY, radius, infectionLength, variants, borders = None
    board = Board(defaultX, defaultY, populationSize, xSpeedMax, ySpeedMax, spreadRadius, infectionLength, variants)
    df = pd.DataFrame(columns=['frame', 'x', 'y', 'color', 'totalInfected'])
    
    for frame in range(maxFrames):
        if int(str(frame)[-1]) % 7 != 0 and int(str(frame)[-1]) % 8 != 0 and int(str(frame)[-1]) % 9 != 0 and int(str(frame)[-1]) % 10 != 0:
            board.move()
        else:
            board.checkImmunity()
        
        xValue = []
        yValue = []
        color = []
        pID = []
        personType = []
        
        for personID in range(len(board.population)):
            if board.population[personID].immune == True:
                xValue.append(board.population[personID].position[0])
                yValue.append(board.population[personID].position[1])
                color.append('green')
                pID.append(int(personID))
                personType.append('Immune')
            elif board.population[personID].infected == True:
                xValue.append(board.population[personID].position[0])
                yValue.append(board.population[personID].position[1])
                color.append('red')
                pID.append(int(personID))
                personType.append('Infecious(' + str(board.population[personID].mutation) + ')')
            else:
                xValue.append(board.population[personID].position[0])
                yValue.append(board.population[personID].position[1])
                color.append('blue')
                pID.append(int(personID))
                personType.append('Normal')
        
        tempDF_UnInfected = pd.DataFrame({
            'frame': frame,
            'id':pID,
            'x': xValue,
            'y': yValue,
            'color': color,
            'totalInfected': board.infectionCount,
            'Recovered': board.recoveredCount,
            'Type': personType
        })
        
        #Add Immune People Before hand so the category will exists
        tempDF_UnInfected = tempDF_UnInfected.append({
            'frame': frame,
            'id':int(len(pID)),
            'x': 10000,
            'y': 10000,
            'color': 'green',
            'totalInfected': board.infectionCount,
            'Recovered': board.recoveredCount,
            'Type': 'Immune'
        }, ignore_index = True)
        
        tempDF_UnInfected = tempDF_UnInfected.sort_values(['id'])
        
        
        df = pd.concat([df, tempDF_UnInfected])
    return df

populationSize = 200
xSpeedMax = 6
ySpeedMax = 8
spreadRadius = 5
maxFrames = 400
infectionLength = 14
variants = [['SARS-CoV-2', 1]]
defaultY = 200
defaultX = 400

try:
    curfewData_9 = pr.read_csv('Data/covid_With_Curfew_9.csv')
except Exception:
    curfewData_9 = makeData_with_curfew(defaultX, defaultY, populationSize, xSpeedMax, ySpeedMax, spreadRadius, infectionLength, variants)
    curfewData_9.to_csv('Data/covid_With_Curfew_9.csv')
In [17]:
fig_with_Curfew = px.scatter(curfewData_9, x = 'x', y = 'y', animation_frame='frame', animation_group = 'id', color='Type', range_x=[0,defaultX], range_y=[0, defaultY], title='Infections over 10 days for Covid-19 and its UK mutation')
fig_with_Curfew.show()
In [18]:
withCurfew = makeDataset(curfewData_9, 'Infecious(SARS-CoV-2)', 400)
noCurfew = makeDataset(pd.read_csv('data/simulation_Normal_SARS_COV_2.csv'), 'Infecious(SARS-CoV-2)', 400)

compare_fig = make_subplots(rows=1, cols=2, subplot_titles=("Curfew for 30% of the day", "Normal Covid-19"))

compare_fig.add_trace(
    go.Scatter(x=withCurfew['x'].tolist(), y=withCurfew['infection'].tolist(), fill='tozeroy', name='Curfew from 30% of the day'),
    row=1, col=1
)

compare_fig.add_trace(
    go.Scatter(x=noCurfew['x'].tolist(), y=noCurfew['infection'].tolist(), fill='tonexty', name='Normal SARS-CoV-2'),
    row=1, col=2
)

compare_fig.add_hrect(y0=0, y1=50, line_width=0, fillcolor="green", opacity=0.3)


compare_fig.update_layout(height=600, width=2000, title_text="Curfew vs No-Curfew")
compare_fig.show()

Lock Down and movement for only essential workers

Around 3.1 million people are considered ESSENTIAL workers and we will only allow them to commute 3.1/25 = 15.5 %

https://www.theaustralian.com.au/world/coronavirus-the-essential-workers-wholl-keep-australia-ticking-along/news-story/cb7992b6ae722afd0838f17e61338e01

In [19]:
def makeData_with_LockDown(defaultX, defaultY, populationSize, xSpeedMax, ySpeedMax, spreadRadius, infectionLength, variants):
    #boardX, boardY, Num, perPersonSpeedX, perPersonSpeedY, radius, infectionLength, variants, borders = None
    board = Board(defaultX, defaultY, populationSize, xSpeedMax, ySpeedMax, spreadRadius, infectionLength, variants)
    df = pd.DataFrame(columns=['frame', 'x', 'y', 'color', 'totalInfected'])
    
    for frame in range(maxFrames):
        board.moveWithLockDown(0.15)
        
        xValue = []
        yValue = []
        color = []
        pID = []
        personType = []
        
        for personID in range(len(board.population)):
            if board.population[personID].immune == True:
                xValue.append(board.population[personID].position[0])
                yValue.append(board.population[personID].position[1])
                color.append('green')
                pID.append(int(personID))
                personType.append('Immune')
            elif board.population[personID].infected == True:
                xValue.append(board.population[personID].position[0])
                yValue.append(board.population[personID].position[1])
                color.append('red')
                pID.append(int(personID))
                personType.append('Infecious(' + str(board.population[personID].mutation) + ')')
            else:
                xValue.append(board.population[personID].position[0])
                yValue.append(board.population[personID].position[1])
                color.append('blue')
                pID.append(int(personID))
                personType.append('Normal')
        
        tempDF_UnInfected = pd.DataFrame({
            'frame': frame,
            'id':pID,
            'x': xValue,
            'y': yValue,
            'color': color,
            'totalInfected': board.infectionCount,
            'Recovered': board.recoveredCount,
            'Type': personType
        })
        
        #Add Immune People Before hand so the category will exists
        tempDF_UnInfected = tempDF_UnInfected.append({
            'frame': frame,
            'id':int(len(pID)),
            'x': 10000,
            'y': 10000,
            'color': 'green',
            'totalInfected': board.infectionCount,
            'Recovered': board.recoveredCount,
            'Type': 'Immune'
        }, ignore_index = True)
        
        tempDF_UnInfected = tempDF_UnInfected.sort_values(['id'])
        
        
        df = pd.concat([df, tempDF_UnInfected])
    return df

#2 SARS-COV-2 X 2
populationSize = 200
xSpeedMax = 6
ySpeedMax = 8
spreadRadius = 5
maxFrames = 400
infectionLength = 14
variants = [['SARS-CoV-2', 1]]
defaultY = 200
defaultX = 400

essentialWorkers = makeData_with_LockDown(defaultX, defaultY, populationSize, xSpeedMax, ySpeedMax, spreadRadius, infectionLength, variants)
In [20]:
fig_with_Essential = px.scatter(essentialWorkers, x = 'x', y = 'y', animation_frame='frame', animation_group = 'id', color='Type', range_x=[0,defaultX], range_y=[0, defaultY], title='Lockdown and only Essential workers can travel')
fig_with_Essential.show()
In [21]:
essential = makeDataset(essentialWorkers, 'Infecious(SARS-CoV-2)', 400)
nonessential = makeDataset(pd.read_csv('data/simulation_Normal_SARS_COV_2.csv'), 'Infecious(SARS-CoV-2)', 400)

compare_fig = make_subplots(rows=1, cols=2, subplot_titles=("Lockdown", "Normal Covid-19"))

compare_fig.add_trace(
    go.Scatter(x=essential['x'].tolist(), y=essential['infection'].tolist(), fill='tozeroy', name='Curfew from 30% of the day'),
    row=1, col=1
)

compare_fig.add_trace(
    go.Scatter(x=nonessential['x'].tolist(), y=nonessential['infection'].tolist(), fill='tonexty', name='Normal SARS-CoV-2'),
    row=1, col=2
)

compare_fig.add_hrect(y0=0, y1=50, line_width=0, fillcolor="green", opacity=0.3)


compare_fig.update_layout(height=600, width=2000, title_text="Lockdown vs No-Lockdown")
compare_fig.show()

Predicting Covid-19 numbers

In [4]:
import tensorflow as tf
import numpy as np
import pandas as pd
import plotly.io as pio
import plotly.express as px
import plotly
import sys
pio.renderers.default = 'iframe_connected'
print(tf.__version__)
2.0.0
In [5]:
import plotly.graph_objects as go

fig = go.Figure()
fig.add_trace(go.Bar(
    y=['Data'],
    x=[80],
    name='Train Data',
    orientation='h',
))
fig.add_trace(go.Bar(
    y=['Data'],
    x=[10],
    name='Test Data',
    orientation='h',
))
fig.add_trace(go.Bar(
    y=['Data'],
    x=[10],
    name='DevTest Data',
    orientation='h',
))

fig.update_layout(barmode='stack', height=300, width=1500, title_text="Data Division")
fig.show()
In [6]:
dataset = pd.read_csv('Data/owid-covid-data.csv')
dataset.columns
Out[6]:
Index(['iso_code', 'continent', 'location', 'date', 'total_cases', 'new_cases',
       'new_cases_smoothed', 'total_deaths', 'new_deaths',
       'new_deaths_smoothed', 'total_cases_per_million',
       'new_cases_per_million', 'new_cases_smoothed_per_million',
       'total_deaths_per_million', 'new_deaths_per_million',
       'new_deaths_smoothed_per_million', 'reproduction_rate', 'icu_patients',
       'icu_patients_per_million', 'hosp_patients',
       'hosp_patients_per_million', 'weekly_icu_admissions',
       'weekly_icu_admissions_per_million', 'weekly_hosp_admissions',
       'weekly_hosp_admissions_per_million', 'new_tests', 'total_tests',
       'total_tests_per_thousand', 'new_tests_per_thousand',
       'new_tests_smoothed', 'new_tests_smoothed_per_thousand',
       'positive_rate', 'tests_per_case', 'tests_units', 'total_vaccinations',
       'people_vaccinated', 'people_fully_vaccinated', 'new_vaccinations',
       'new_vaccinations_smoothed', 'total_vaccinations_per_hundred',
       'people_vaccinated_per_hundred', 'people_fully_vaccinated_per_hundred',
       'new_vaccinations_smoothed_per_million', 'stringency_index',
       'population', 'population_density', 'median_age', 'aged_65_older',
       'aged_70_older', 'gdp_per_capita', 'extreme_poverty',
       'cardiovasc_death_rate', 'diabetes_prevalence', 'female_smokers',
       'male_smokers', 'handwashing_facilities', 'hospital_beds_per_thousand',
       'life_expectancy', 'human_development_index'],
      dtype='object')
In [7]:
dataset = dataset[['location', 'date', 'new_cases_smoothed', 'new_deaths_smoothed', 'new_tests', 'positive_rate', 'total_vaccinations', 'population_density']]
mobility = pd.read_csv('Data/changes-visitors-covid.csv')
mobility['location'] = mobility['Entity']
mobility['date'] = mobility['Day']
df = pd.merge(mobility, dataset, how = 'left', on=['date', 'location'])
df = df.drop(['Entity', 'Code', 'Day'], axis = 1)
df = df.fillna(0)
df.columns
Out[7]:
Index(['retail_and_recreation', 'grocery_and_pharmacy', 'residential',
       'transit_stations', 'parks', 'workplaces', 'location', 'date',
       'new_cases_smoothed', 'new_deaths_smoothed', 'new_tests',
       'positive_rate', 'total_vaccinations', 'population_density'],
      dtype='object')
In [10]:
mobility[mobility['location'] == 'Japan'].tail()
Out[10]:
Entity Code Day retail_and_recreation grocery_and_pharmacy residential transit_stations parks workplaces location date
26362 Japan JPN 2021-05-06 -6.857 4.857 12.857 -34.000 14.571 -40.429 Japan 2021-05-06
26363 Japan JPN 2021-05-07 -8.714 3.000 12.857 -34.286 12.143 -39.429 Japan 2021-05-07
26364 Japan JPN 2021-05-08 -8.857 3.143 12.857 -34.714 13.429 -38.143 Japan 2021-05-08
26365 Japan JPN 2021-05-09 -9.143 3.429 13.000 -35.571 14.429 -37.429 Japan 2021-05-09
26366 Japan JPN 2021-05-10 -13.143 2.571 11.143 -33.571 4.429 -29.286 Japan 2021-05-10
In [11]:
df[df['location'] == 'Japan'].tail()
Out[11]:
retail_and_recreation grocery_and_pharmacy residential transit_stations parks workplaces location date new_cases_smoothed new_deaths_smoothed new_tests positive_rate total_vaccinations population_density
26362 -6.857 4.857 12.857 -34.000 14.571 -40.429 Japan 2021-05-06 4813.286 58.000 89586.0 0.085 4197463.0 347.778
26363 -8.714 3.000 12.857 -34.286 12.143 -39.429 Japan 2021-05-07 5007.857 73.286 109758.0 0.079 0.0 347.778
26364 -8.857 3.143 12.857 -34.714 13.429 -38.143 Japan 2021-05-08 5187.857 73.571 71001.0 0.082 0.0 347.778
26365 -9.143 3.429 13.000 -35.571 14.429 -37.429 Japan 2021-05-09 5285.143 74.000 41338.0 0.082 4436325.0 347.778
26366 -13.143 2.571 11.143 -33.571 4.429 -29.286 Japan 2021-05-10 5339.714 77.143 130677.0 0.070 4734029.0 347.778
In [12]:
country = 'Japan'
data = df[df['location'] == country]
data.sort_values(by='date', ascending = False, inplace=True)
data = data.drop(['location', 'date'], axis = 1)
print(data.head())

xData = np.array([data.iloc[0]])
yData = np.array([data.iloc[0]['new_cases_smoothed']])

for index in range(1, data.shape[0]):
    xData = np.vstack([xData, np.array([data.iloc[index]])])
    yData = np.vstack([yData, np.array([data.iloc[index]['new_cases_smoothed']])])

trainTestSplit = int(0.8 * df[df['location'] == country].shape[0])
devTestSplit = int(0.9 * df[df['location'] == country].shape[0])

xTrain = xData[:trainTestSplit]
yTrain = yData[:trainTestSplit]

xTest = xData[trainTestSplit:devTestSplit]
yTest = yData[trainTestSplit:devTestSplit]

xDevTest = xData[devTestSplit:]
yDevTest = yData[devTestSplit:]
       retail_and_recreation  grocery_and_pharmacy  residential  \
26366                -13.143                 2.571       11.143   
26365                 -9.143                 3.429       13.000   
26364                 -8.857                 3.143       12.857   
26363                 -8.714                 3.000       12.857   
26362                 -6.857                 4.857       12.857   

       transit_stations   parks  workplaces  new_cases_smoothed  \
26366           -33.571   4.429     -29.286            5339.714   
26365           -35.571  14.429     -37.429            5285.143   
26364           -34.714  13.429     -38.143            5187.857   
26363           -34.286  12.143     -39.429            5007.857   
26362           -34.000  14.571     -40.429            4813.286   

       new_deaths_smoothed  new_tests  positive_rate  total_vaccinations  \
26366               77.143   130677.0          0.070           4734029.0   
26365               74.000    41338.0          0.082           4436325.0   
26364               73.571    71001.0          0.082                 0.0   
26363               73.286   109758.0          0.079                 0.0   
26362               58.000    89586.0          0.085           4197463.0   

       population_density  
26366             347.778  
26365             347.778  
26364             347.778  
26363             347.778  
26362             347.778  
In [13]:
# Variable we need to take into consider to taking are: Population, cumulative_positivity_rate before 14days ago, total_tests_per_thousand 14 days, total test 14 days ago, daily comfirmed test for 28 days from 14 days ago
# We will be predicting the next 14 days using past 28 days data
BatchSize = 16
SequenceLength = 7
features = xData.shape[1]

np.set_printoptions(suppress=True)
In [22]:
#BatchSize, SequenceLength, features, country, population, 0, 320
def datagen(xVals, yVals, batchSize, sequenceLength, numFeatures):
    while True:
        x_shape = (batchSize, sequenceLength, numFeatures)
        x_batch = np.zeros(shape=x_shape)

        y_shape = (batchSize, sequenceLength, 1)
        y_batch = np.zeros(shape=y_shape)
        
        for i in range(batchSize):
            index = np.random.randint(sequenceLength, len(xVals)-sequenceLength)
            
            x_batch[i] = xVals[index-sequenceLength:index]
            y_batch[i] = yVals[index:index+sequenceLength]
        
        yield (x_batch, y_batch)
        
def dataGenerator_Train(batchSize, sequenceLength, numFeatures):
    np.set_printoptions(threshold=sys.maxsize)
    while True:
        x_shape = (batchSize, sequenceLength, numFeatures)
        x_batch = np.zeros(shape=x_shape)

        y_shape = (batchSize, sequenceLength, 1)
        y_batch = np.zeros(shape=y_shape)
        
        for i in range(batchSize):
            index = np.random.randint(sequenceLength, len(xTrain)-sequenceLength)
            
            x_batch[i] = xTrain[index-sequenceLength:index]
            y_batch[i] = yTrain[index:index+sequenceLength]
        
        yield (x_batch, y_batch)

def dataGenerator_Test(batchSize, sequenceLength, numFeatures):
    while True:
        x_shape = (batchSize, sequenceLength, numFeatures)
        x_batch = np.zeros(shape=x_shape)

        y_shape = (batchSize, sequenceLength, 1)
        y_batch = np.zeros(shape=y_shape)

        for i in range(batchSize):
            index = np.random.randint(sequenceLength, len(xTest)-sequenceLength)

            x_batch[i] = xTest[index-sequenceLength:index]
            y_batch[i] = yTest[index:index+sequenceLength]
            
        yield (x_batch, y_batch)
        
def dataGenerator_DevTest(batchSize, sequenceLength, numFeatures):
    while True:
        x_shape = (batchSize, sequenceLength, numFeatures)
        x_batch = np.zeros(shape=x_shape)

        y_shape = (batchSize, sequenceLength, 1)
        y_batch = np.zeros(shape=y_shape)

        for i in range(batchSize):
            index = np.random.randint(sequenceLength, len(xDevTest)-sequenceLength)

            x_batch[i] = xDevTest[index-sequenceLength:index]
            y_batch[i] = yDevTest[index:index+sequenceLength]
            
        yield (x_batch, y_batch)
In [15]:
model = tf.keras.models.Sequential([
    tf.keras.layers.Dense(32, input_shape=(None, features,), activation='relu'),
    tf.keras.layers.Dense(64, input_shape=(None, features,), activation='relu'),
    tf.keras.layers.Dense(128, input_shape=(None, features,), activation='relu'),
    tf.keras.layers.Dense(64, input_shape=(None, features,), activation='relu'),
    tf.keras.layers.Dense(32, input_shape=(None, features,), activation='relu'),
    #tf.compat.v1.keras.layers.CuDNNGRU(units=512, return_sequences=True, input_shape=(None, features,)),
    tf.keras.layers.Dense(1, activation='linear')
])

model.compile(optimizer=tf.keras.optimizers.Adam(), loss='mse', metrics=['mae'])
model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense (Dense)                (None, None, 32)          416       
_________________________________________________________________
dense_1 (Dense)              (None, None, 64)          2112      
_________________________________________________________________
dense_2 (Dense)              (None, None, 128)         8320      
_________________________________________________________________
dense_3 (Dense)              (None, None, 64)          8256      
_________________________________________________________________
dense_4 (Dense)              (None, None, 32)          2080      
_________________________________________________________________
dense_5 (Dense)              (None, None, 1)           33        
=================================================================
Total params: 21,217
Trainable params: 21,217
Non-trainable params: 0
_________________________________________________________________
In [17]:
country = 'Japan'
history = model.fit_generator(dataGenerator_Train(BatchSize, SequenceLength, features), epochs = 1000, steps_per_epoch=200, 
              validation_data=dataGenerator_Test(BatchSize, SequenceLength, features), validation_steps=50, verbose=2)
model.save('firstModel.h5')
Epoch 1/1000
200/200 - 5s - loss: 297558.9507 - mae: 327.1920 - val_loss: 19967.7207 - val_mae: 125.3273
Epoch 2/1000
200/200 - 4s - loss: 341514.6053 - mae: 347.2926 - val_loss: 25703.3978 - val_mae: 145.0009
Epoch 3/1000
200/200 - 4s - loss: 469508.4074 - mae: 354.9184 - val_loss: 18678.6188 - val_mae: 115.3548
Epoch 4/1000
200/200 - 5s - loss: 1416348.3791 - mae: 453.2336 - val_loss: 23735.5089 - val_mae: 135.5982
Epoch 5/1000
200/200 - 5s - loss: 310412.5594 - mae: 332.0715 - val_loss: 20834.3776 - val_mae: 130.6212
Epoch 6/1000
200/200 - 4s - loss: 349030.4546 - mae: 355.5790 - val_loss: 17506.2378 - val_mae: 118.6201
Epoch 7/1000
200/200 - 5s - loss: 259350.7819 - mae: 303.9453 - val_loss: 20470.7733 - val_mae: 129.1080
Epoch 8/1000
200/200 - 5s - loss: 390736.4226 - mae: 375.2139 - val_loss: 19942.5283 - val_mae: 123.5323
Epoch 9/1000
200/200 - 5s - loss: 424629.8671 - mae: 346.6435 - val_loss: 21903.5305 - val_mae: 135.4180
Epoch 10/1000
200/200 - 5s - loss: 271692.0908 - mae: 312.5629 - val_loss: 25426.8887 - val_mae: 148.3088
Epoch 11/1000
200/200 - 5s - loss: 266047.3683 - mae: 305.8067 - val_loss: 19541.6175 - val_mae: 126.5498
Epoch 12/1000
200/200 - 5s - loss: 260720.8194 - mae: 301.4422 - val_loss: 29381.3307 - val_mae: 159.0518
Epoch 13/1000
200/200 - 5s - loss: 263504.8735 - mae: 310.5371 - val_loss: 21698.5032 - val_mae: 134.7003
Epoch 14/1000
200/200 - 4s - loss: 292465.1901 - mae: 325.8081 - val_loss: 18168.5186 - val_mae: 121.0108
Epoch 15/1000
200/200 - 4s - loss: 269519.6079 - mae: 312.4954 - val_loss: 18936.1690 - val_mae: 128.7044
Epoch 16/1000
200/200 - 4s - loss: 250462.7175 - mae: 296.9693 - val_loss: 17906.8038 - val_mae: 119.9380
Epoch 17/1000
200/200 - 4s - loss: 283647.3576 - mae: 316.6698 - val_loss: 19354.4680 - val_mae: 130.6016
Epoch 18/1000
200/200 - 4s - loss: 284832.0041 - mae: 318.9521 - val_loss: 17675.9879 - val_mae: 118.9782
Epoch 19/1000
200/200 - 4s - loss: 271413.6572 - mae: 308.8612 - val_loss: 17389.4754 - val_mae: 120.5618
Epoch 20/1000
200/200 - 4s - loss: 246769.3385 - mae: 295.4188 - val_loss: 22896.2761 - val_mae: 141.9184
Epoch 21/1000
200/200 - 5s - loss: 295821.7499 - mae: 333.1615 - val_loss: 16951.0653 - val_mae: 120.9536
Epoch 22/1000
200/200 - 4s - loss: 251974.0130 - mae: 300.5835 - val_loss: 15517.0947 - val_mae: 110.9055
Epoch 23/1000
200/200 - 5s - loss: 248477.5345 - mae: 296.9034 - val_loss: 18501.5080 - val_mae: 123.8791
Epoch 24/1000
200/200 - 5s - loss: 235011.4403 - mae: 289.0147 - val_loss: 13260.3125 - val_mae: 94.6937
Epoch 25/1000
200/200 - 5s - loss: 258106.4319 - mae: 304.3425 - val_loss: 20225.6664 - val_mae: 133.4507
Epoch 26/1000
200/200 - 4s - loss: 258059.8444 - mae: 301.5922 - val_loss: 23686.6230 - val_mae: 141.1946
Epoch 27/1000
200/200 - 4s - loss: 228203.3397 - mae: 284.3024 - val_loss: 17443.6058 - val_mae: 123.0104
Epoch 28/1000
200/200 - 5s - loss: 246692.5491 - mae: 298.8354 - val_loss: 12301.7473 - val_mae: 85.0997
Epoch 29/1000
200/200 - 5s - loss: 222882.3587 - mae: 277.4102 - val_loss: 11558.8249 - val_mae: 87.3018
Epoch 30/1000
200/200 - 5s - loss: 239355.3721 - mae: 285.8022 - val_loss: 12231.4849 - val_mae: 98.3868
Epoch 31/1000
200/200 - 5s - loss: 225774.3340 - mae: 280.9618 - val_loss: 12235.8650 - val_mae: 84.2375
Epoch 32/1000
200/200 - 5s - loss: 247561.7548 - mae: 296.3135 - val_loss: 12544.2411 - val_mae: 81.2513
Epoch 33/1000
200/200 - 5s - loss: 280640.3989 - mae: 317.4503 - val_loss: 12803.1563 - val_mae: 81.4385
Epoch 34/1000
200/200 - 5s - loss: 232079.1800 - mae: 281.5979 - val_loss: 10537.4643 - val_mae: 74.5469
Epoch 35/1000
200/200 - 4s - loss: 238128.8798 - mae: 287.4297 - val_loss: 9515.5293 - val_mae: 72.6718
Epoch 36/1000
200/200 - 4s - loss: 237726.1385 - mae: 286.9218 - val_loss: 12062.6306 - val_mae: 80.9639
Epoch 37/1000
200/200 - 5s - loss: 221887.7692 - mae: 272.9520 - val_loss: 12513.6361 - val_mae: 78.7172
Epoch 38/1000
200/200 - 5s - loss: 227220.2143 - mae: 279.6310 - val_loss: 11509.0561 - val_mae: 75.9965
Epoch 39/1000
200/200 - 5s - loss: 260099.0475 - mae: 301.4562 - val_loss: 15270.7426 - val_mae: 89.1693
Epoch 40/1000
200/200 - 4s - loss: 219151.3504 - mae: 270.6333 - val_loss: 11824.8386 - val_mae: 81.7036
Epoch 41/1000
200/200 - 4s - loss: 222550.1258 - mae: 271.9841 - val_loss: 10779.1301 - val_mae: 78.9982
Epoch 42/1000
200/200 - 4s - loss: 224875.0406 - mae: 279.1915 - val_loss: 11030.8076 - val_mae: 74.6465
Epoch 43/1000
200/200 - 4s - loss: 225058.7415 - mae: 278.9116 - val_loss: 12876.0633 - val_mae: 82.7925
Epoch 44/1000
200/200 - 4s - loss: 228008.4668 - mae: 279.8374 - val_loss: 11966.7190 - val_mae: 79.6417
Epoch 45/1000
200/200 - 4s - loss: 214080.5819 - mae: 268.7685 - val_loss: 12513.8103 - val_mae: 86.5882
Epoch 46/1000
200/200 - 4s - loss: 214719.0170 - mae: 270.3361 - val_loss: 11892.5335 - val_mae: 87.1170
Epoch 47/1000
200/200 - 4s - loss: 184124.8101 - mae: 247.1603 - val_loss: 12521.4183 - val_mae: 86.6739
Epoch 48/1000
200/200 - 4s - loss: 213188.4463 - mae: 270.9875 - val_loss: 13526.2100 - val_mae: 90.0294
Epoch 49/1000
200/200 - 5s - loss: 195625.9035 - mae: 259.5692 - val_loss: 13337.0926 - val_mae: 101.4559
Epoch 50/1000
200/200 - 4s - loss: 221079.6863 - mae: 274.6320 - val_loss: 11077.4421 - val_mae: 81.1805
Epoch 51/1000
200/200 - 4s - loss: 249824.4042 - mae: 296.3745 - val_loss: 14055.9745 - val_mae: 91.6278
Epoch 52/1000
200/200 - 4s - loss: 197427.3377 - mae: 263.8223 - val_loss: 13013.9378 - val_mae: 90.8550
Epoch 53/1000
200/200 - 4s - loss: 217319.5645 - mae: 282.0485 - val_loss: 11003.0287 - val_mae: 84.4016
Epoch 54/1000
200/200 - 4s - loss: 210152.4369 - mae: 270.4469 - val_loss: 10904.4435 - val_mae: 81.4576
Epoch 55/1000
200/200 - 4s - loss: 200537.8468 - mae: 265.7679 - val_loss: 10079.4107 - val_mae: 75.3455
Epoch 56/1000
200/200 - 4s - loss: 174094.1390 - mae: 248.2051 - val_loss: 10757.6168 - val_mae: 79.8130
Epoch 57/1000
200/200 - 4s - loss: 178827.9046 - mae: 248.2535 - val_loss: 15130.9846 - val_mae: 109.5473
Epoch 58/1000
200/200 - 4s - loss: 195903.6737 - mae: 264.5081 - val_loss: 10171.5874 - val_mae: 76.4759
Epoch 59/1000
200/200 - 4s - loss: 182572.0381 - mae: 252.9109 - val_loss: 9456.1624 - val_mae: 71.3751
Epoch 60/1000
200/200 - 4s - loss: 190480.1706 - mae: 258.0484 - val_loss: 7611.1330 - val_mae: 64.1006
Epoch 61/1000
200/200 - 4s - loss: 184336.5409 - mae: 255.1129 - val_loss: 13702.1410 - val_mae: 84.6505
Epoch 62/1000
200/200 - 4s - loss: 169457.3965 - mae: 245.0236 - val_loss: 12880.5238 - val_mae: 89.1526
Epoch 63/1000
200/200 - 4s - loss: 171665.4883 - mae: 247.7337 - val_loss: 10789.2996 - val_mae: 72.9756
Epoch 64/1000
200/200 - 4s - loss: 174661.2872 - mae: 251.7585 - val_loss: 20401.7300 - val_mae: 113.7402
Epoch 65/1000
200/200 - 5s - loss: 128028.1963 - mae: 211.7700 - val_loss: 21727.6256 - val_mae: 119.8988
Epoch 66/1000
200/200 - 4s - loss: 145414.5292 - mae: 225.5115 - val_loss: 19141.0609 - val_mae: 108.3885
Epoch 67/1000
200/200 - 4s - loss: 153555.0521 - mae: 233.6849 - val_loss: 16751.3476 - val_mae: 96.4907
Epoch 68/1000
200/200 - 4s - loss: 146387.6550 - mae: 225.5312 - val_loss: 12679.7377 - val_mae: 79.5796
Epoch 69/1000
200/200 - 4s - loss: 179557.9528 - mae: 245.9375 - val_loss: 13102.2549 - val_mae: 85.5717
Epoch 70/1000
200/200 - 5s - loss: 164312.5796 - mae: 243.8397 - val_loss: 12642.7406 - val_mae: 90.9106
Epoch 71/1000
200/200 - 5s - loss: 163551.1867 - mae: 242.3981 - val_loss: 15581.5064 - val_mae: 92.5021
Epoch 72/1000
200/200 - 4s - loss: 130668.2141 - mae: 215.0279 - val_loss: 17826.6529 - val_mae: 106.9564
Epoch 73/1000
200/200 - 4s - loss: 146297.4568 - mae: 225.9536 - val_loss: 12968.9932 - val_mae: 83.3819
Epoch 74/1000
200/200 - 4s - loss: 154892.7242 - mae: 231.7317 - val_loss: 21318.3558 - val_mae: 124.9668
Epoch 75/1000
200/200 - 4s - loss: 139591.5887 - mae: 222.7659 - val_loss: 18118.3670 - val_mae: 103.5108
Epoch 76/1000
200/200 - 5s - loss: 124856.4791 - mae: 202.1545 - val_loss: 13906.1463 - val_mae: 91.8437
Epoch 77/1000
200/200 - 4s - loss: 130891.1822 - mae: 210.1764 - val_loss: 10222.3427 - val_mae: 76.1689
Epoch 78/1000
200/200 - 5s - loss: 111192.5534 - mae: 193.6143 - val_loss: 17763.2912 - val_mae: 107.7943
Epoch 79/1000
200/200 - 5s - loss: 116475.4904 - mae: 200.6434 - val_loss: 23184.7866 - val_mae: 126.3743
Epoch 80/1000
200/200 - 5s - loss: 151646.5707 - mae: 222.6356 - val_loss: 13586.2889 - val_mae: 93.2359
Epoch 81/1000
200/200 - 5s - loss: 118427.4090 - mae: 199.0390 - val_loss: 19533.8477 - val_mae: 115.1420
Epoch 82/1000
200/200 - 4s - loss: 129338.5636 - mae: 206.0432 - val_loss: 15713.9654 - val_mae: 98.3975
Epoch 83/1000
200/200 - 4s - loss: 108259.8729 - mae: 189.8087 - val_loss: 34441.4739 - val_mae: 160.0366
Epoch 84/1000
200/200 - 5s - loss: 123326.4655 - mae: 199.5950 - val_loss: 25888.5394 - val_mae: 132.5213
Epoch 85/1000
200/200 - 4s - loss: 95282.9476 - mae: 173.9222 - val_loss: 24165.1677 - val_mae: 125.0829
Epoch 86/1000
200/200 - 5s - loss: 133126.4656 - mae: 208.7252 - val_loss: 32472.5418 - val_mae: 148.9889
Epoch 87/1000
200/200 - 5s - loss: 86336.2279 - mae: 166.6086 - val_loss: 17721.8530 - val_mae: 107.0884
Epoch 88/1000
200/200 - 5s - loss: 105040.9205 - mae: 182.4182 - val_loss: 27018.7482 - val_mae: 135.3563
Epoch 89/1000
200/200 - 5s - loss: 109449.0145 - mae: 183.6367 - val_loss: 28040.6442 - val_mae: 135.4533
Epoch 90/1000
200/200 - 5s - loss: 101846.9517 - mae: 180.9288 - val_loss: 27346.0944 - val_mae: 140.0652
Epoch 91/1000
200/200 - 5s - loss: 109433.7404 - mae: 186.5804 - val_loss: 45559.3871 - val_mae: 174.4635
Epoch 92/1000
200/200 - 5s - loss: 88583.3945 - mae: 162.9641 - val_loss: 24279.8468 - val_mae: 128.6601
Epoch 93/1000
200/200 - 5s - loss: 91284.8105 - mae: 169.5771 - val_loss: 34694.6456 - val_mae: 152.8071
Epoch 94/1000
200/200 - 5s - loss: 86162.0320 - mae: 163.4421 - val_loss: 40392.0859 - val_mae: 172.3972
Epoch 95/1000
200/200 - 5s - loss: 97407.6747 - mae: 172.2357 - val_loss: 36801.8106 - val_mae: 164.2892
Epoch 96/1000
200/200 - 5s - loss: 78327.5118 - mae: 158.8874 - val_loss: 33873.5287 - val_mae: 152.6337
Epoch 97/1000
200/200 - 5s - loss: 93518.3331 - mae: 170.1551 - val_loss: 72151.4689 - val_mae: 225.8266
Epoch 98/1000
200/200 - 5s - loss: 96324.7305 - mae: 179.8632 - val_loss: 56695.2256 - val_mae: 198.3091
Epoch 99/1000
200/200 - 5s - loss: 100129.9617 - mae: 173.1749 - val_loss: 60260.7321 - val_mae: 203.0446
Epoch 100/1000
200/200 - 5s - loss: 99887.0602 - mae: 176.8975 - val_loss: 39581.0006 - val_mae: 166.7460
Epoch 101/1000
200/200 - 5s - loss: 79675.1569 - mae: 152.8127 - val_loss: 55955.2043 - val_mae: 195.7195
Epoch 102/1000
200/200 - 5s - loss: 77162.4375 - mae: 151.3895 - val_loss: 50141.8375 - val_mae: 179.9607
Epoch 103/1000
200/200 - 5s - loss: 84717.7020 - mae: 162.0424 - val_loss: 64573.7258 - val_mae: 206.8745
Epoch 104/1000
200/200 - 5s - loss: 85304.3548 - mae: 157.5341 - val_loss: 38619.1239 - val_mae: 160.1833
Epoch 105/1000
200/200 - 5s - loss: 75531.5873 - mae: 149.6182 - val_loss: 45736.6913 - val_mae: 169.3386
Epoch 106/1000
200/200 - 5s - loss: 69223.1230 - mae: 145.7743 - val_loss: 73292.1952 - val_mae: 217.4244
Epoch 107/1000
200/200 - 5s - loss: 85236.7057 - mae: 160.9083 - val_loss: 58799.7716 - val_mae: 191.9691
Epoch 108/1000
200/200 - 5s - loss: 78031.2469 - mae: 150.0401 - val_loss: 47058.0419 - val_mae: 175.0575
Epoch 109/1000
200/200 - 5s - loss: 101153.4528 - mae: 176.0868 - val_loss: 73789.7464 - val_mae: 224.3783
Epoch 110/1000
200/200 - 5s - loss: 98842.1317 - mae: 169.8109 - val_loss: 39371.3907 - val_mae: 161.0605
Epoch 111/1000
200/200 - 5s - loss: 67708.8450 - mae: 140.5504 - val_loss: 53502.2859 - val_mae: 181.5161
Epoch 112/1000
200/200 - 5s - loss: 75386.9668 - mae: 149.4087 - val_loss: 44743.6152 - val_mae: 168.5779
Epoch 113/1000
200/200 - 5s - loss: 89751.5105 - mae: 154.6767 - val_loss: 104457.9931 - val_mae: 260.7484
Epoch 114/1000
200/200 - 5s - loss: 77985.6010 - mae: 154.2370 - val_loss: 46026.4502 - val_mae: 175.3930
Epoch 115/1000
200/200 - 5s - loss: 92467.4290 - mae: 165.5109 - val_loss: 57071.6113 - val_mae: 189.2490
Epoch 116/1000
200/200 - 5s - loss: 65944.6570 - mae: 140.9275 - val_loss: 51495.5333 - val_mae: 184.7342
Epoch 117/1000
200/200 - 5s - loss: 78821.4650 - mae: 152.3661 - val_loss: 43156.0452 - val_mae: 166.9943
Epoch 118/1000
200/200 - 5s - loss: 55320.7549 - mae: 125.3395 - val_loss: 51802.8652 - val_mae: 184.1510
Epoch 119/1000
200/200 - 5s - loss: 72299.4191 - mae: 145.9130 - val_loss: 42635.5905 - val_mae: 171.5102
Epoch 120/1000
200/200 - 5s - loss: 63622.1136 - mae: 136.5315 - val_loss: 50518.4046 - val_mae: 178.3023
Epoch 121/1000
200/200 - 5s - loss: 66484.5451 - mae: 136.9728 - val_loss: 112506.1995 - val_mae: 259.6035
Epoch 122/1000
200/200 - 5s - loss: 63394.8490 - mae: 135.3191 - val_loss: 55884.4105 - val_mae: 190.4211
Epoch 123/1000
200/200 - 5s - loss: 94274.7632 - mae: 169.3065 - val_loss: 84860.9836 - val_mae: 234.6544
Epoch 124/1000
200/200 - 5s - loss: 76906.3606 - mae: 148.7227 - val_loss: 70139.3995 - val_mae: 216.8551
Epoch 125/1000
200/200 - 5s - loss: 61578.7225 - mae: 133.9276 - val_loss: 41523.4022 - val_mae: 170.1236
Epoch 126/1000
200/200 - 5s - loss: 50960.6726 - mae: 121.4233 - val_loss: 51537.3568 - val_mae: 179.4726
Epoch 127/1000
200/200 - 5s - loss: 51908.6801 - mae: 123.5350 - val_loss: 55599.3303 - val_mae: 190.1151
Epoch 128/1000
200/200 - 5s - loss: 47837.9459 - mae: 112.4564 - val_loss: 54618.2924 - val_mae: 185.2003
Epoch 129/1000
200/200 - 6s - loss: 37999.9924 - mae: 100.9192 - val_loss: 37572.0903 - val_mae: 157.0198
Epoch 130/1000
200/200 - 6s - loss: 89268.5366 - mae: 156.0341 - val_loss: 35676.4218 - val_mae: 153.3373
Epoch 131/1000
200/200 - 6s - loss: 64606.6988 - mae: 132.6827 - val_loss: 76852.2583 - val_mae: 217.4518
Epoch 132/1000
200/200 - 6s - loss: 51729.6374 - mae: 122.2740 - val_loss: 54369.8877 - val_mae: 181.9311
Epoch 133/1000
200/200 - 6s - loss: 62381.4926 - mae: 135.8479 - val_loss: 33704.1511 - val_mae: 151.4472
Epoch 134/1000
200/200 - 6s - loss: 58447.8142 - mae: 129.7138 - val_loss: 41189.3255 - val_mae: 157.5584
Epoch 135/1000
200/200 - 6s - loss: 49673.8535 - mae: 112.6025 - val_loss: 56577.8480 - val_mae: 187.7459
Epoch 136/1000
200/200 - 6s - loss: 62094.3127 - mae: 127.3803 - val_loss: 32133.3121 - val_mae: 144.6401
Epoch 137/1000
200/200 - 6s - loss: 63797.7841 - mae: 133.2876 - val_loss: 34099.0907 - val_mae: 156.4100
Epoch 138/1000
200/200 - 6s - loss: 40359.3726 - mae: 105.0749 - val_loss: 39445.5221 - val_mae: 158.1442
Epoch 139/1000
200/200 - 6s - loss: 49500.7147 - mae: 113.6839 - val_loss: 49743.1909 - val_mae: 178.8076
Epoch 140/1000
200/200 - 6s - loss: 65910.3881 - mae: 132.5507 - val_loss: 34339.7370 - val_mae: 148.9499
Epoch 141/1000
200/200 - 6s - loss: 51527.8952 - mae: 120.8491 - val_loss: 50949.1691 - val_mae: 183.1766
Epoch 142/1000
200/200 - 6s - loss: 59276.8057 - mae: 130.0125 - val_loss: 36323.5181 - val_mae: 145.9638
Epoch 143/1000
200/200 - 5s - loss: 43746.6042 - mae: 109.7320 - val_loss: 60706.6815 - val_mae: 192.3359
Epoch 144/1000
200/200 - 6s - loss: 53059.0558 - mae: 114.8553 - val_loss: 78228.6702 - val_mae: 216.8197
Epoch 145/1000
200/200 - 6s - loss: 52657.5920 - mae: 120.6551 - val_loss: 88961.9412 - val_mae: 238.4525
Epoch 146/1000
200/200 - 6s - loss: 65321.6068 - mae: 124.5467 - val_loss: 81116.3678 - val_mae: 223.4227
Epoch 147/1000
200/200 - 6s - loss: 100468.8072 - mae: 180.7186 - val_loss: 28207.5950 - val_mae: 143.4814
Epoch 148/1000
200/200 - 6s - loss: 64887.1022 - mae: 138.7551 - val_loss: 45882.2303 - val_mae: 174.0827
Epoch 149/1000
200/200 - 6s - loss: 50284.4963 - mae: 119.1863 - val_loss: 53688.0344 - val_mae: 182.4985
Epoch 150/1000
200/200 - 6s - loss: 80315.3475 - mae: 153.2660 - val_loss: 42592.8220 - val_mae: 165.7377
Epoch 151/1000
200/200 - 6s - loss: 54289.2500 - mae: 119.1254 - val_loss: 44550.3413 - val_mae: 166.3676
Epoch 152/1000
200/200 - 5s - loss: 50656.6159 - mae: 114.2945 - val_loss: 38143.5387 - val_mae: 158.0609
Epoch 153/1000
200/200 - 6s - loss: 58692.0566 - mae: 126.4913 - val_loss: 32715.8250 - val_mae: 147.3798
Epoch 154/1000
200/200 - 6s - loss: 65683.5649 - mae: 133.4694 - val_loss: 52935.6398 - val_mae: 177.0074
Epoch 155/1000
200/200 - 6s - loss: 65017.5633 - mae: 132.9797 - val_loss: 60919.9633 - val_mae: 186.0532
Epoch 156/1000
200/200 - 6s - loss: 47082.1647 - mae: 112.0821 - val_loss: 42497.6241 - val_mae: 162.4888
Epoch 157/1000
200/200 - 6s - loss: 41157.3748 - mae: 103.7933 - val_loss: 49536.0077 - val_mae: 170.1675
Epoch 158/1000
200/200 - 6s - loss: 46156.1836 - mae: 115.9528 - val_loss: 54365.8225 - val_mae: 182.3892
Epoch 159/1000
200/200 - 6s - loss: 62773.9029 - mae: 130.2236 - val_loss: 58801.9034 - val_mae: 186.7483
Epoch 160/1000
200/200 - 6s - loss: 78129.6032 - mae: 148.9915 - val_loss: 31259.6286 - val_mae: 147.4799
Epoch 161/1000
200/200 - 5s - loss: 51939.0340 - mae: 116.9263 - val_loss: 33277.5970 - val_mae: 152.2869
Epoch 162/1000
200/200 - 6s - loss: 54023.5419 - mae: 119.5287 - val_loss: 25892.3050 - val_mae: 133.5890
Epoch 163/1000
200/200 - 6s - loss: 48544.9878 - mae: 117.9353 - val_loss: 49013.2478 - val_mae: 176.3331
Epoch 164/1000
200/200 - 6s - loss: 39000.8683 - mae: 100.6102 - val_loss: 34531.3559 - val_mae: 149.0771
Epoch 165/1000
200/200 - 6s - loss: 38062.8703 - mae: 98.5599 - val_loss: 36790.6159 - val_mae: 156.2888
Epoch 166/1000
200/200 - 6s - loss: 31097.2145 - mae: 87.1767 - val_loss: 40300.2598 - val_mae: 160.4959
Epoch 167/1000
200/200 - 6s - loss: 32822.0364 - mae: 92.3261 - val_loss: 48065.0521 - val_mae: 173.4302
Epoch 168/1000
200/200 - 6s - loss: 47052.1769 - mae: 112.7586 - val_loss: 40478.0508 - val_mae: 158.2069
Epoch 169/1000
200/200 - 6s - loss: 28538.7330 - mae: 83.6628 - val_loss: 32930.2887 - val_mae: 144.4558
Epoch 170/1000
200/200 - 6s - loss: 44348.4178 - mae: 109.9821 - val_loss: 33016.5614 - val_mae: 143.4807
Epoch 171/1000
200/200 - 6s - loss: 41959.1903 - mae: 104.8506 - val_loss: 39566.4316 - val_mae: 162.6849
Epoch 172/1000
200/200 - 5s - loss: 58823.7879 - mae: 127.1522 - val_loss: 27396.8729 - val_mae: 134.2979
Epoch 173/1000
200/200 - 6s - loss: 41761.4056 - mae: 104.3124 - val_loss: 48027.8221 - val_mae: 171.5773
Epoch 174/1000
200/200 - 6s - loss: 34322.7135 - mae: 91.5795 - val_loss: 34503.4193 - val_mae: 152.1762
Epoch 175/1000
200/200 - 6s - loss: 31794.1909 - mae: 89.9559 - val_loss: 44384.0856 - val_mae: 160.1860
Epoch 176/1000
200/200 - 6s - loss: 39500.2032 - mae: 102.9981 - val_loss: 30784.0870 - val_mae: 144.8846
Epoch 177/1000
200/200 - 6s - loss: 43980.2243 - mae: 106.0072 - val_loss: 36135.5169 - val_mae: 151.5226
Epoch 178/1000
200/200 - 6s - loss: 44581.0931 - mae: 105.8369 - val_loss: 48030.9384 - val_mae: 174.4843
Epoch 179/1000
200/200 - 6s - loss: 37352.1559 - mae: 100.0588 - val_loss: 38450.2900 - val_mae: 152.6018
Epoch 180/1000
200/200 - 6s - loss: 43261.6297 - mae: 105.7315 - val_loss: 29318.2758 - val_mae: 141.4414
Epoch 181/1000
200/200 - 6s - loss: 74589.9950 - mae: 143.2327 - val_loss: 26399.4418 - val_mae: 129.8617
Epoch 182/1000
200/200 - 6s - loss: 32206.7682 - mae: 91.6546 - val_loss: 21039.1865 - val_mae: 115.2992
Epoch 183/1000
200/200 - 6s - loss: 43958.8662 - mae: 106.0580 - val_loss: 30045.4561 - val_mae: 136.9402
Epoch 184/1000
200/200 - 6s - loss: 29859.1017 - mae: 83.0872 - val_loss: 24072.1920 - val_mae: 130.8298
Epoch 185/1000
200/200 - 5s - loss: 21588.1068 - mae: 72.9217 - val_loss: 31172.6836 - val_mae: 133.9369
Epoch 186/1000
200/200 - 5s - loss: 44414.0264 - mae: 105.2457 - val_loss: 22565.3614 - val_mae: 115.7832
Epoch 187/1000
200/200 - 5s - loss: 21118.0256 - mae: 72.2259 - val_loss: 28654.6291 - val_mae: 129.1812
Epoch 188/1000
200/200 - 5s - loss: 59146.8761 - mae: 119.7341 - val_loss: 30897.7575 - val_mae: 141.7038
Epoch 189/1000
200/200 - 5s - loss: 28605.2706 - mae: 84.9061 - val_loss: 31416.2354 - val_mae: 135.2019
Epoch 190/1000
200/200 - 5s - loss: 37737.4330 - mae: 97.4701 - val_loss: 55958.3562 - val_mae: 179.4845
Epoch 191/1000
200/200 - 5s - loss: 41858.6264 - mae: 100.5862 - val_loss: 25965.1486 - val_mae: 129.8038
Epoch 192/1000
200/200 - 6s - loss: 37149.8444 - mae: 96.1406 - val_loss: 27436.9812 - val_mae: 134.3150
Epoch 193/1000
200/200 - 5s - loss: 59027.2836 - mae: 120.8383 - val_loss: 38254.2186 - val_mae: 146.4823
Epoch 194/1000
200/200 - 5s - loss: 27339.8043 - mae: 84.0022 - val_loss: 30515.8241 - val_mae: 134.8343
Epoch 195/1000
200/200 - 5s - loss: 34680.8563 - mae: 93.3295 - val_loss: 45151.2370 - val_mae: 153.9337
Epoch 196/1000
200/200 - 5s - loss: 21218.5526 - mae: 72.2775 - val_loss: 33661.2636 - val_mae: 133.5724
Epoch 197/1000
200/200 - 5s - loss: 78686.3051 - mae: 147.9177 - val_loss: 39252.2907 - val_mae: 154.8481
Epoch 198/1000
200/200 - 5s - loss: 31609.2575 - mae: 96.8761 - val_loss: 42311.6865 - val_mae: 164.7129
Epoch 199/1000
200/200 - 5s - loss: 30228.9879 - mae: 93.7945 - val_loss: 35406.3604 - val_mae: 151.9692
Epoch 200/1000
200/200 - 5s - loss: 46325.6723 - mae: 114.8147 - val_loss: 27986.6295 - val_mae: 128.2113
Epoch 201/1000
200/200 - 5s - loss: 38617.9385 - mae: 104.9402 - val_loss: 15208.2980 - val_mae: 96.0877
Epoch 202/1000
200/200 - 5s - loss: 46507.1214 - mae: 116.9514 - val_loss: 35835.2680 - val_mae: 140.3513
Epoch 203/1000
200/200 - 5s - loss: 40882.7133 - mae: 105.4363 - val_loss: 26160.4973 - val_mae: 127.9659
Epoch 204/1000
200/200 - 5s - loss: 43348.8098 - mae: 107.1486 - val_loss: 31677.9071 - val_mae: 133.4089
Epoch 205/1000
200/200 - 5s - loss: 50247.1436 - mae: 111.5824 - val_loss: 42754.2795 - val_mae: 149.2998
Epoch 206/1000
200/200 - 5s - loss: 48996.3713 - mae: 114.3719 - val_loss: 41590.6394 - val_mae: 152.6345
Epoch 207/1000
200/200 - 5s - loss: 92020.0132 - mae: 158.0645 - val_loss: 48684.8867 - val_mae: 154.1817
Epoch 208/1000
200/200 - 5s - loss: 53685.1820 - mae: 127.1718 - val_loss: 36581.8169 - val_mae: 145.3575
Epoch 209/1000
200/200 - 5s - loss: 46622.7094 - mae: 116.0090 - val_loss: 46858.5257 - val_mae: 151.1032
Epoch 210/1000
200/200 - 5s - loss: 44448.1024 - mae: 109.3648 - val_loss: 49440.1195 - val_mae: 162.6092
Epoch 211/1000
200/200 - 5s - loss: 41313.1536 - mae: 104.6847 - val_loss: 36925.3400 - val_mae: 144.2580
Epoch 212/1000
200/200 - 5s - loss: 37690.4027 - mae: 102.3339 - val_loss: 33237.8692 - val_mae: 132.1831
Epoch 213/1000
200/200 - 5s - loss: 34795.7099 - mae: 96.7508 - val_loss: 46862.6767 - val_mae: 152.5682
Epoch 214/1000
200/200 - 5s - loss: 39033.3513 - mae: 102.4810 - val_loss: 30306.6082 - val_mae: 133.4464
Epoch 215/1000
200/200 - 5s - loss: 42525.9970 - mae: 101.3792 - val_loss: 29980.1257 - val_mae: 128.9068
Epoch 216/1000
200/200 - 5s - loss: 25899.2081 - mae: 82.9671 - val_loss: 42796.1504 - val_mae: 151.7445
Epoch 217/1000
200/200 - 5s - loss: 27511.3689 - mae: 82.7177 - val_loss: 45930.9074 - val_mae: 159.0186
Epoch 218/1000
200/200 - 5s - loss: 34865.8703 - mae: 95.4276 - val_loss: 49354.7541 - val_mae: 162.0834
Epoch 219/1000
200/200 - 5s - loss: 32492.5521 - mae: 87.3499 - val_loss: 42926.4171 - val_mae: 153.0408
Epoch 220/1000
200/200 - 5s - loss: 17670.6640 - mae: 66.3714 - val_loss: 41503.0025 - val_mae: 147.7998
Epoch 221/1000
200/200 - 5s - loss: 28256.7586 - mae: 86.3447 - val_loss: 53692.1616 - val_mae: 166.6319
Epoch 222/1000
200/200 - 5s - loss: 30776.4126 - mae: 87.2844 - val_loss: 31877.6612 - val_mae: 136.2053
Epoch 223/1000
200/200 - 5s - loss: 48871.7055 - mae: 110.6295 - val_loss: 39058.4182 - val_mae: 168.6630
Epoch 224/1000
200/200 - 5s - loss: 37083.0578 - mae: 96.3756 - val_loss: 56728.0771 - val_mae: 166.3280
Epoch 225/1000
200/200 - 5s - loss: 56111.5395 - mae: 119.0860 - val_loss: 52785.2895 - val_mae: 164.1785
Epoch 226/1000
200/200 - 5s - loss: 41281.8105 - mae: 99.1041 - val_loss: 51033.7468 - val_mae: 163.6002
Epoch 227/1000
200/200 - 5s - loss: 46399.4958 - mae: 107.4563 - val_loss: 66715.3046 - val_mae: 178.9797
Epoch 228/1000
200/200 - 5s - loss: 21322.4415 - mae: 72.6327 - val_loss: 58130.9257 - val_mae: 166.9359
Epoch 229/1000
200/200 - 5s - loss: 24363.5645 - mae: 75.5827 - val_loss: 32856.1098 - val_mae: 150.7270
Epoch 230/1000
200/200 - 5s - loss: 27714.5632 - mae: 85.6739 - val_loss: 57664.0107 - val_mae: 164.6734
Epoch 231/1000
200/200 - 6s - loss: 15612.9537 - mae: 63.4520 - val_loss: 46563.7762 - val_mae: 147.2466
Epoch 232/1000
200/200 - 6s - loss: 12099.3656 - mae: 55.3523 - val_loss: 68814.5602 - val_mae: 179.0722
Epoch 233/1000
200/200 - 6s - loss: 49833.5974 - mae: 104.8005 - val_loss: 79207.1125 - val_mae: 188.8982
Epoch 234/1000
200/200 - 6s - loss: 43764.1146 - mae: 107.6403 - val_loss: 46900.8682 - val_mae: 157.8055
Epoch 235/1000
200/200 - 5s - loss: 20433.4619 - mae: 73.5311 - val_loss: 40158.5408 - val_mae: 152.1881
Epoch 236/1000
200/200 - 5s - loss: 38487.0877 - mae: 95.2410 - val_loss: 36144.8067 - val_mae: 147.9748
Epoch 237/1000
200/200 - 5s - loss: 61718.7529 - mae: 122.9823 - val_loss: 51994.6957 - val_mae: 175.3289
Epoch 238/1000
200/200 - 5s - loss: 45616.1099 - mae: 106.6514 - val_loss: 28216.0151 - val_mae: 131.8247
Epoch 239/1000
200/200 - 5s - loss: 33504.6004 - mae: 94.3795 - val_loss: 57473.2317 - val_mae: 165.8754
Epoch 240/1000
200/200 - 5s - loss: 24706.7844 - mae: 74.6310 - val_loss: 77507.8149 - val_mae: 194.0500
Epoch 241/1000
200/200 - 5s - loss: 27864.3554 - mae: 82.2967 - val_loss: 52126.3730 - val_mae: 166.9151
Epoch 242/1000
200/200 - 5s - loss: 16431.4145 - mae: 63.7426 - val_loss: 50327.8606 - val_mae: 159.7024
Epoch 243/1000
200/200 - 5s - loss: 37903.5624 - mae: 90.2365 - val_loss: 37501.7047 - val_mae: 151.5846
Epoch 244/1000
200/200 - 5s - loss: 23022.5622 - mae: 74.6283 - val_loss: 65437.1811 - val_mae: 184.1918
Epoch 245/1000
200/200 - 6s - loss: 27382.5516 - mae: 78.7581 - val_loss: 66121.1795 - val_mae: 183.6845
Epoch 246/1000
200/200 - 5s - loss: 21010.2263 - mae: 69.5320 - val_loss: 57623.8611 - val_mae: 172.6754
Epoch 247/1000
200/200 - 5s - loss: 23609.1045 - mae: 76.2787 - val_loss: 72743.6634 - val_mae: 189.5097
Epoch 248/1000
200/200 - 5s - loss: 31700.5665 - mae: 89.9194 - val_loss: 68968.9523 - val_mae: 191.9373
Epoch 249/1000
200/200 - 5s - loss: 34465.1733 - mae: 90.9870 - val_loss: 68780.7537 - val_mae: 189.3254
Epoch 250/1000
200/200 - 5s - loss: 32086.5119 - mae: 84.3918 - val_loss: 69292.1848 - val_mae: 188.9474
Epoch 251/1000
200/200 - 6s - loss: 23582.6909 - mae: 73.0355 - val_loss: 67838.2823 - val_mae: 191.0736
Epoch 252/1000
200/200 - 6s - loss: 87799.3785 - mae: 152.8179 - val_loss: 93819.4200 - val_mae: 219.3416
Epoch 253/1000
200/200 - 5s - loss: 31560.0508 - mae: 88.8678 - val_loss: 69050.5156 - val_mae: 196.6209
Epoch 254/1000
200/200 - 6s - loss: 41277.1713 - mae: 95.7513 - val_loss: 88657.9477 - val_mae: 212.2429
Epoch 255/1000
200/200 - 6s - loss: 23478.0088 - mae: 75.9869 - val_loss: 60685.2070 - val_mae: 186.7768
Epoch 256/1000
200/200 - 6s - loss: 42629.9764 - mae: 99.0527 - val_loss: 60430.5915 - val_mae: 186.1024
Epoch 257/1000
200/200 - 5s - loss: 14969.5572 - mae: 63.0631 - val_loss: 61241.3181 - val_mae: 190.1353
Epoch 258/1000
200/200 - 5s - loss: 20140.6789 - mae: 64.8649 - val_loss: 73401.9393 - val_mae: 204.6337
Epoch 259/1000
200/200 - 6s - loss: 35609.2031 - mae: 92.2107 - val_loss: 64935.8973 - val_mae: 197.7347
Epoch 260/1000
200/200 - 5s - loss: 32200.3128 - mae: 83.9124 - val_loss: 69675.1395 - val_mae: 198.0125
Epoch 261/1000
200/200 - 5s - loss: 25139.0700 - mae: 73.2776 - val_loss: 72808.2832 - val_mae: 206.2714
Epoch 262/1000
200/200 - 6s - loss: 22021.5935 - mae: 73.2449 - val_loss: 58827.5602 - val_mae: 187.6619
Epoch 263/1000
200/200 - 5s - loss: 10514.6581 - mae: 51.6471 - val_loss: 67570.0291 - val_mae: 198.2039
Epoch 264/1000
200/200 - 6s - loss: 27199.2385 - mae: 74.1113 - val_loss: 73427.6166 - val_mae: 194.0723
Epoch 265/1000
200/200 - 6s - loss: 47929.2641 - mae: 110.0759 - val_loss: 80640.6771 - val_mae: 207.2504
Epoch 266/1000
200/200 - 6s - loss: 30752.4357 - mae: 85.8076 - val_loss: 63305.0615 - val_mae: 195.8274
Epoch 267/1000
200/200 - 5s - loss: 30314.4031 - mae: 80.5657 - val_loss: 57829.8441 - val_mae: 185.0587
Epoch 268/1000
200/200 - 5s - loss: 14087.9840 - mae: 59.6166 - val_loss: 71945.6796 - val_mae: 200.9382
Epoch 269/1000
200/200 - 5s - loss: 16481.2022 - mae: 58.2318 - val_loss: 48420.2461 - val_mae: 163.7564
Epoch 270/1000
200/200 - 5s - loss: 23974.8774 - mae: 78.3625 - val_loss: 63441.2774 - val_mae: 180.7921
Epoch 271/1000
200/200 - 5s - loss: 31439.2947 - mae: 85.1865 - val_loss: 44215.3636 - val_mae: 157.8684
Epoch 272/1000
200/200 - 5s - loss: 16303.9078 - mae: 64.3338 - val_loss: 65833.8241 - val_mae: 191.3702
Epoch 273/1000
200/200 - 5s - loss: 7641.3400 - mae: 44.6216 - val_loss: 66026.8586 - val_mae: 192.8484
Epoch 274/1000
200/200 - 5s - loss: 14190.0649 - mae: 52.2106 - val_loss: 84360.0636 - val_mae: 206.5013
Epoch 275/1000
200/200 - 5s - loss: 22181.1033 - mae: 72.2161 - val_loss: 65913.2550 - val_mae: 195.2415
Epoch 276/1000
200/200 - 5s - loss: 15416.9173 - mae: 59.3909 - val_loss: 52035.0959 - val_mae: 176.5712
Epoch 277/1000
200/200 - 5s - loss: 4461.1589 - mae: 33.8463 - val_loss: 49380.9024 - val_mae: 176.9160
Epoch 278/1000
200/200 - 5s - loss: 39993.7822 - mae: 97.8670 - val_loss: 71558.4827 - val_mae: 195.4941
Epoch 279/1000
200/200 - 5s - loss: 29044.3890 - mae: 85.5748 - val_loss: 53773.1517 - val_mae: 167.8629
Epoch 280/1000
200/200 - 5s - loss: 11179.1226 - mae: 55.8164 - val_loss: 49921.5573 - val_mae: 166.0010
Epoch 281/1000
200/200 - 5s - loss: 8072.5474 - mae: 45.5236 - val_loss: 53926.3873 - val_mae: 166.0580
Epoch 282/1000
200/200 - 5s - loss: 4335.2498 - mae: 31.8514 - val_loss: 56761.4302 - val_mae: 167.7299
Epoch 283/1000
200/200 - 5s - loss: 25139.4313 - mae: 76.2013 - val_loss: 53437.5715 - val_mae: 168.5419
Epoch 284/1000
200/200 - 5s - loss: 27065.0084 - mae: 79.3818 - val_loss: 54844.8798 - val_mae: 185.6355
Epoch 285/1000
200/200 - 5s - loss: 12123.1519 - mae: 56.6261 - val_loss: 49881.0992 - val_mae: 168.0109
Epoch 286/1000
200/200 - 5s - loss: 5733.9271 - mae: 38.7806 - val_loss: 57304.6405 - val_mae: 173.0717
Epoch 287/1000
200/200 - 5s - loss: 3740.3991 - mae: 28.9748 - val_loss: 57646.0090 - val_mae: 176.8329
Epoch 288/1000
200/200 - 5s - loss: 5914.3472 - mae: 35.6920 - val_loss: 56589.4743 - val_mae: 174.7272
Epoch 289/1000
200/200 - 5s - loss: 5229.8844 - mae: 33.6133 - val_loss: 53651.4966 - val_mae: 170.4221
Epoch 290/1000
200/200 - 5s - loss: 4407.8421 - mae: 30.1465 - val_loss: 56324.8862 - val_mae: 172.0656
Epoch 291/1000
200/200 - 5s - loss: 40728.7897 - mae: 89.7313 - val_loss: 63634.8522 - val_mae: 184.3691
Epoch 292/1000
200/200 - 5s - loss: 49274.3264 - mae: 109.6131 - val_loss: 37926.3416 - val_mae: 154.7200
Epoch 293/1000
200/200 - 5s - loss: 39991.5363 - mae: 97.5625 - val_loss: 38606.5026 - val_mae: 151.3131
Epoch 294/1000
200/200 - 5s - loss: 27635.9536 - mae: 77.7498 - val_loss: 73351.8898 - val_mae: 194.2299
Epoch 295/1000
200/200 - 5s - loss: 21196.2955 - mae: 67.5570 - val_loss: 48085.8298 - val_mae: 153.6676
Epoch 296/1000
200/200 - 5s - loss: 39510.9424 - mae: 92.6284 - val_loss: 32620.4646 - val_mae: 140.0467
Epoch 297/1000
200/200 - 5s - loss: 34439.2388 - mae: 91.6220 - val_loss: 53659.3020 - val_mae: 173.0294
Epoch 298/1000
200/200 - 5s - loss: 14858.7493 - mae: 62.1890 - val_loss: 52728.2396 - val_mae: 163.0095
Epoch 299/1000
200/200 - 5s - loss: 7542.7164 - mae: 42.9187 - val_loss: 54494.9122 - val_mae: 163.9563
Epoch 300/1000
200/200 - 5s - loss: 6968.6536 - mae: 39.1097 - val_loss: 58388.2151 - val_mae: 176.7832
Epoch 301/1000
200/200 - 5s - loss: 49088.7586 - mae: 105.3299 - val_loss: 48763.6312 - val_mae: 166.0457
Epoch 302/1000
200/200 - 5s - loss: 26139.0321 - mae: 79.9314 - val_loss: 56075.7422 - val_mae: 175.7781
Epoch 303/1000
200/200 - 6s - loss: 7443.4825 - mae: 45.5524 - val_loss: 53452.3090 - val_mae: 167.4594
Epoch 304/1000
200/200 - 6s - loss: 5970.5633 - mae: 39.4788 - val_loss: 50480.5556 - val_mae: 156.8645
Epoch 305/1000
200/200 - 5s - loss: 39843.3828 - mae: 93.2333 - val_loss: 45012.9146 - val_mae: 158.4639
Epoch 306/1000
200/200 - 5s - loss: 17965.9480 - mae: 54.0976 - val_loss: 80005.6106 - val_mae: 194.1017
Epoch 307/1000
200/200 - 5s - loss: 37507.8495 - mae: 88.3673 - val_loss: 64153.6073 - val_mae: 185.3870
Epoch 308/1000
200/200 - 5s - loss: 6291.4250 - mae: 41.4436 - val_loss: 60960.6882 - val_mae: 182.5862
Epoch 309/1000
200/200 - 5s - loss: 4109.3392 - mae: 32.4541 - val_loss: 67305.4355 - val_mae: 183.8466
Epoch 310/1000
200/200 - 5s - loss: 4942.8585 - mae: 34.8978 - val_loss: 65224.5263 - val_mae: 189.7479
Epoch 311/1000
200/200 - 5s - loss: 37243.9786 - mae: 85.0375 - val_loss: 35235.6844 - val_mae: 139.8160
Epoch 312/1000
200/200 - 5s - loss: 12583.3584 - mae: 58.5750 - val_loss: 50804.8719 - val_mae: 163.4823
Epoch 313/1000
200/200 - 5s - loss: 22242.7157 - mae: 67.8894 - val_loss: 55420.1932 - val_mae: 162.1066
Epoch 314/1000
200/200 - 5s - loss: 15888.6031 - mae: 60.6345 - val_loss: 43671.3777 - val_mae: 149.9778
Epoch 315/1000
200/200 - 5s - loss: 4323.5314 - mae: 33.0582 - val_loss: 44107.0034 - val_mae: 152.0596
Epoch 316/1000
200/200 - 5s - loss: 4025.2365 - mae: 32.0428 - val_loss: 40626.2237 - val_mae: 141.6192
Epoch 317/1000
200/200 - 5s - loss: 4843.0397 - mae: 36.5199 - val_loss: 39507.7159 - val_mae: 145.7854
Epoch 318/1000
200/200 - 5s - loss: 20211.2989 - mae: 65.7144 - val_loss: 41868.1091 - val_mae: 146.7384
Epoch 319/1000
200/200 - 5s - loss: 7323.8735 - mae: 39.4352 - val_loss: 43095.0131 - val_mae: 155.0044
Epoch 320/1000
200/200 - 5s - loss: 70320.1123 - mae: 132.7238 - val_loss: 44173.3429 - val_mae: 154.8703
Epoch 321/1000
200/200 - 5s - loss: 27415.0158 - mae: 86.4900 - val_loss: 91731.9894 - val_mae: 214.7931
Epoch 322/1000
200/200 - 5s - loss: 50891.6091 - mae: 111.8989 - val_loss: 42781.3103 - val_mae: 158.4841
Epoch 323/1000
200/200 - 5s - loss: 48246.7627 - mae: 103.7779 - val_loss: 38404.8863 - val_mae: 148.9056
Epoch 324/1000
200/200 - 5s - loss: 54176.9114 - mae: 112.4961 - val_loss: 49737.9663 - val_mae: 162.3455
Epoch 325/1000
200/200 - 5s - loss: 59921.3204 - mae: 118.7127 - val_loss: 29978.8101 - val_mae: 130.2062
Epoch 326/1000
200/200 - 5s - loss: 23272.9941 - mae: 74.3270 - val_loss: 41279.9792 - val_mae: 144.7666
Epoch 327/1000
200/200 - 5s - loss: 21988.4599 - mae: 71.0530 - val_loss: 37655.1373 - val_mae: 141.7216
Epoch 328/1000
200/200 - 5s - loss: 9955.3530 - mae: 46.6866 - val_loss: 45639.6109 - val_mae: 154.7854
Epoch 329/1000
200/200 - 5s - loss: 19064.1520 - mae: 66.1078 - val_loss: 42372.9338 - val_mae: 148.1870
Epoch 330/1000
200/200 - 5s - loss: 15076.2256 - mae: 59.1044 - val_loss: 40143.4954 - val_mae: 145.2471
Epoch 331/1000
200/200 - 5s - loss: 27202.3911 - mae: 78.9415 - val_loss: 50249.9084 - val_mae: 163.5584
Epoch 332/1000
200/200 - 5s - loss: 16767.5184 - mae: 63.0404 - val_loss: 42574.5167 - val_mae: 149.5853
Epoch 333/1000
200/200 - 5s - loss: 15010.4211 - mae: 58.9944 - val_loss: 32026.9102 - val_mae: 132.3943
Epoch 334/1000
200/200 - 5s - loss: 24293.0087 - mae: 71.8172 - val_loss: 32714.4050 - val_mae: 136.1682
Epoch 335/1000
200/200 - 5s - loss: 15974.1149 - mae: 58.5723 - val_loss: 40037.6057 - val_mae: 145.2724
Epoch 336/1000
200/200 - 5s - loss: 45881.4373 - mae: 107.1506 - val_loss: 25883.8511 - val_mae: 112.3930
Epoch 337/1000
200/200 - 5s - loss: 13490.6996 - mae: 55.5036 - val_loss: 22314.4720 - val_mae: 101.4339
Epoch 338/1000
200/200 - 5s - loss: 9572.6055 - mae: 49.6673 - val_loss: 21627.6324 - val_mae: 106.2333
Epoch 339/1000
200/200 - 5s - loss: 14365.2044 - mae: 57.6272 - val_loss: 23671.1067 - val_mae: 112.5663
Epoch 340/1000
200/200 - 5s - loss: 31786.0707 - mae: 73.9479 - val_loss: 32935.6099 - val_mae: 133.6021
Epoch 341/1000
200/200 - 5s - loss: 14029.1340 - mae: 56.5700 - val_loss: 22031.0734 - val_mae: 102.3986
Epoch 342/1000
200/200 - 5s - loss: 15775.9817 - mae: 64.0447 - val_loss: 38642.2797 - val_mae: 141.1254
Epoch 343/1000
200/200 - 5s - loss: 8341.6322 - mae: 46.8509 - val_loss: 25696.6923 - val_mae: 113.5763
Epoch 344/1000
200/200 - 5s - loss: 21252.4557 - mae: 66.2370 - val_loss: 25580.9085 - val_mae: 106.4846
Epoch 345/1000
200/200 - 5s - loss: 5630.8561 - mae: 39.1882 - val_loss: 32814.5069 - val_mae: 127.6766
Epoch 346/1000
200/200 - 5s - loss: 4529.3791 - mae: 34.8145 - val_loss: 34263.5736 - val_mae: 128.7630
Epoch 347/1000
200/200 - 5s - loss: 12154.9835 - mae: 53.0079 - val_loss: 21513.5719 - val_mae: 99.2207
Epoch 348/1000
200/200 - 5s - loss: 27020.6337 - mae: 80.3456 - val_loss: 26427.3435 - val_mae: 123.4265
Epoch 349/1000
200/200 - 5s - loss: 10684.6272 - mae: 51.5272 - val_loss: 29989.1399 - val_mae: 120.9874
Epoch 350/1000
200/200 - 5s - loss: 35601.5664 - mae: 88.3334 - val_loss: 23620.5298 - val_mae: 112.6884
Epoch 351/1000
200/200 - 5s - loss: 21228.3466 - mae: 69.3283 - val_loss: 33112.7142 - val_mae: 126.9250
Epoch 352/1000
200/200 - 5s - loss: 34871.1472 - mae: 94.1204 - val_loss: 23294.6089 - val_mae: 107.1991
Epoch 353/1000
200/200 - 5s - loss: 6771.2427 - mae: 44.2506 - val_loss: 27054.3618 - val_mae: 109.5719
Epoch 354/1000
200/200 - 5s - loss: 9004.4453 - mae: 50.2182 - val_loss: 29690.8884 - val_mae: 113.8245
Epoch 355/1000
200/200 - 5s - loss: 2986.2798 - mae: 29.7085 - val_loss: 32307.8695 - val_mae: 121.1246
Epoch 356/1000
200/200 - 5s - loss: 3340.5937 - mae: 32.3206 - val_loss: 35702.7257 - val_mae: 127.7217
Epoch 357/1000
200/200 - 5s - loss: 2845.1742 - mae: 27.8720 - val_loss: 28813.5013 - val_mae: 113.1625
Epoch 358/1000
200/200 - 5s - loss: 2945.0153 - mae: 26.4198 - val_loss: 31475.2895 - val_mae: 120.8052
Epoch 359/1000
200/200 - 5s - loss: 3416.7747 - mae: 29.0902 - val_loss: 32123.2203 - val_mae: 121.4602
Epoch 360/1000
200/200 - 5s - loss: 2954.1972 - mae: 27.5874 - val_loss: 33125.8163 - val_mae: 123.9482
Epoch 361/1000
200/200 - 5s - loss: 3909.9454 - mae: 28.8626 - val_loss: 33341.8076 - val_mae: 124.3103
Epoch 362/1000
200/200 - 5s - loss: 3574.2954 - mae: 29.2404 - val_loss: 32188.5105 - val_mae: 122.7465
Epoch 363/1000
200/200 - 5s - loss: 2765.7108 - mae: 25.0802 - val_loss: 31951.9997 - val_mae: 120.6537
Epoch 364/1000
200/200 - 5s - loss: 36822.0798 - mae: 75.9993 - val_loss: 24969.7537 - val_mae: 123.0653
Epoch 365/1000
200/200 - 5s - loss: 39013.6023 - mae: 106.0610 - val_loss: 20854.2788 - val_mae: 109.2950
Epoch 366/1000
200/200 - 5s - loss: 59607.8031 - mae: 122.7401 - val_loss: 24196.4437 - val_mae: 109.7366
Epoch 367/1000
200/200 - 5s - loss: 21484.1866 - mae: 72.0968 - val_loss: 23762.5770 - val_mae: 104.7764
Epoch 368/1000
200/200 - 5s - loss: 24107.6339 - mae: 73.4123 - val_loss: 17442.3890 - val_mae: 94.2667
Epoch 369/1000
200/200 - 5s - loss: 8865.7555 - mae: 48.0447 - val_loss: 20683.3051 - val_mae: 106.7891
Epoch 370/1000
200/200 - 5s - loss: 3355.2087 - mae: 31.1047 - val_loss: 24237.9704 - val_mae: 111.9698
Epoch 371/1000
200/200 - 5s - loss: 2617.3921 - mae: 26.0345 - val_loss: 23008.9216 - val_mae: 105.1383
Epoch 372/1000
200/200 - 5s - loss: 6876.0254 - mae: 41.2606 - val_loss: 22998.9236 - val_mae: 103.7979
Epoch 373/1000
200/200 - 5s - loss: 2552.7445 - mae: 25.2875 - val_loss: 23816.2882 - val_mae: 107.1020
Epoch 374/1000
200/200 - 5s - loss: 3039.4317 - mae: 25.5726 - val_loss: 22652.4393 - val_mae: 104.4654
Epoch 375/1000
200/200 - 5s - loss: 3881.3389 - mae: 29.8354 - val_loss: 25753.5715 - val_mae: 106.5560
Epoch 376/1000
200/200 - 5s - loss: 1941.6880 - mae: 21.5467 - val_loss: 23329.4744 - val_mae: 103.3990
Epoch 377/1000
200/200 - 5s - loss: 1767.2010 - mae: 20.3922 - val_loss: 24416.3747 - val_mae: 105.4634
Epoch 378/1000
200/200 - 5s - loss: 2418.3655 - mae: 23.3730 - val_loss: 24185.2324 - val_mae: 106.7365
Epoch 379/1000
200/200 - 5s - loss: 1998.5269 - mae: 20.7068 - val_loss: 25508.7360 - val_mae: 107.2726
Epoch 380/1000
200/200 - 5s - loss: 2641.4953 - mae: 24.9003 - val_loss: 23832.2158 - val_mae: 103.0688
Epoch 381/1000
200/200 - 5s - loss: 2697.9842 - mae: 27.8019 - val_loss: 26285.0811 - val_mae: 107.5478
Epoch 382/1000
200/200 - 5s - loss: 19633.7518 - mae: 51.8448 - val_loss: 22279.3804 - val_mae: 124.3423
Epoch 383/1000
200/200 - 5s - loss: 107323.4780 - mae: 176.8932 - val_loss: 10350.4487 - val_mae: 77.6647
Epoch 384/1000
200/200 - 5s - loss: 59150.1884 - mae: 131.2589 - val_loss: 19579.6372 - val_mae: 106.0039
Epoch 385/1000
200/200 - 5s - loss: 29076.7864 - mae: 92.8402 - val_loss: 19919.8364 - val_mae: 97.4001
Epoch 386/1000
200/200 - 5s - loss: 33733.1622 - mae: 97.6496 - val_loss: 18694.8239 - val_mae: 104.5662
Epoch 387/1000
200/200 - 5s - loss: 58476.5793 - mae: 123.1865 - val_loss: 18049.7182 - val_mae: 100.2097
Epoch 388/1000
200/200 - 5s - loss: 49614.9017 - mae: 118.5079 - val_loss: 14321.8654 - val_mae: 98.6055
Epoch 389/1000
200/200 - 5s - loss: 27243.9690 - mae: 92.1910 - val_loss: 18657.7918 - val_mae: 104.3143
Epoch 390/1000
200/200 - 5s - loss: 12391.3824 - mae: 65.9467 - val_loss: 18844.9949 - val_mae: 106.8325
Epoch 391/1000
200/200 - 5s - loss: 24757.0316 - mae: 80.9127 - val_loss: 19682.3705 - val_mae: 108.5401
Epoch 392/1000
200/200 - 5s - loss: 7504.9664 - mae: 52.4563 - val_loss: 20583.4675 - val_mae: 114.2237
Epoch 393/1000
200/200 - 5s - loss: 8284.7662 - mae: 52.6630 - val_loss: 20676.2755 - val_mae: 119.6745
Epoch 394/1000
200/200 - 5s - loss: 6973.3341 - mae: 46.7242 - val_loss: 22687.4681 - val_mae: 123.9379
Epoch 395/1000
200/200 - 5s - loss: 18801.7193 - mae: 71.1319 - val_loss: 18453.0594 - val_mae: 113.4572
Epoch 396/1000
200/200 - 5s - loss: 27997.8582 - mae: 86.5486 - val_loss: 16482.1384 - val_mae: 105.8349
Epoch 397/1000
200/200 - 5s - loss: 13280.2337 - mae: 58.3637 - val_loss: 17989.0199 - val_mae: 111.7855
Epoch 398/1000
200/200 - 5s - loss: 4928.8625 - mae: 39.5663 - val_loss: 17830.3119 - val_mae: 112.3593
Epoch 399/1000
200/200 - 5s - loss: 4671.3486 - mae: 36.5277 - val_loss: 18358.1236 - val_mae: 113.2937
Epoch 400/1000
200/200 - 5s - loss: 4114.5519 - mae: 36.3108 - val_loss: 19066.1528 - val_mae: 115.4420
Epoch 401/1000
200/200 - 5s - loss: 6426.6765 - mae: 39.7750 - val_loss: 19426.9402 - val_mae: 116.3992
Epoch 402/1000
200/200 - 5s - loss: 3724.1888 - mae: 35.7282 - val_loss: 18359.9050 - val_mae: 112.8233
Epoch 403/1000
200/200 - 5s - loss: 5487.1198 - mae: 40.3979 - val_loss: 17221.6526 - val_mae: 111.4521
Epoch 404/1000
200/200 - 5s - loss: 30034.5816 - mae: 71.2324 - val_loss: 20387.3837 - val_mae: 115.1350
Epoch 405/1000
200/200 - 5s - loss: 51330.2082 - mae: 117.2584 - val_loss: 21818.8073 - val_mae: 120.8466
Epoch 406/1000
200/200 - 5s - loss: 22017.4832 - mae: 75.5503 - val_loss: 20519.7831 - val_mae: 117.1985
Epoch 407/1000
200/200 - 5s - loss: 40057.2296 - mae: 98.5204 - val_loss: 18375.1653 - val_mae: 105.2417
Epoch 408/1000
200/200 - 5s - loss: 11475.8682 - mae: 56.0603 - val_loss: 21776.1264 - val_mae: 118.8094
Epoch 409/1000
200/200 - 5s - loss: 20852.4346 - mae: 73.8567 - val_loss: 16771.8610 - val_mae: 98.5826
Epoch 410/1000
200/200 - 5s - loss: 13051.1019 - mae: 58.0407 - val_loss: 18586.2552 - val_mae: 106.2147
Epoch 411/1000
200/200 - 5s - loss: 21863.7577 - mae: 71.7942 - val_loss: 19208.9506 - val_mae: 104.8906
Epoch 412/1000
200/200 - 5s - loss: 18419.6689 - mae: 72.1838 - val_loss: 19433.2009 - val_mae: 108.1545
Epoch 413/1000
200/200 - 5s - loss: 3558.3808 - mae: 33.9296 - val_loss: 18711.3736 - val_mae: 108.7634
Epoch 414/1000
200/200 - 5s - loss: 3453.8567 - mae: 29.8345 - val_loss: 18945.2546 - val_mae: 108.9602
Epoch 415/1000
200/200 - 5s - loss: 2510.6887 - mae: 26.9844 - val_loss: 20323.3396 - val_mae: 111.4188
Epoch 416/1000
200/200 - 5s - loss: 2654.1395 - mae: 26.6942 - val_loss: 18894.4066 - val_mae: 108.1749
Epoch 417/1000
200/200 - 5s - loss: 2704.2750 - mae: 26.3803 - val_loss: 18445.2989 - val_mae: 105.3119
Epoch 418/1000
200/200 - 5s - loss: 2458.1737 - mae: 26.3166 - val_loss: 22156.8913 - val_mae: 116.4817
Epoch 419/1000
200/200 - 5s - loss: 2540.4997 - mae: 25.9514 - val_loss: 19982.7349 - val_mae: 111.3590
Epoch 420/1000
200/200 - 5s - loss: 44209.6123 - mae: 98.5092 - val_loss: 17749.9662 - val_mae: 102.8542
Epoch 421/1000
200/200 - 5s - loss: 18915.7984 - mae: 72.8945 - val_loss: 22649.4419 - val_mae: 117.7871
Epoch 422/1000
200/200 - 5s - loss: 9445.0949 - mae: 51.7734 - val_loss: 15595.3939 - val_mae: 103.8625
Epoch 423/1000
200/200 - 5s - loss: 3514.7838 - mae: 33.5004 - val_loss: 16353.5216 - val_mae: 106.2871
Epoch 424/1000
200/200 - 5s - loss: 3177.6439 - mae: 30.6274 - val_loss: 15817.3393 - val_mae: 106.1232
Epoch 425/1000
200/200 - 5s - loss: 2957.7232 - mae: 29.5748 - val_loss: 15307.7211 - val_mae: 103.0030
Epoch 426/1000
200/200 - 5s - loss: 3179.0800 - mae: 28.1854 - val_loss: 16711.5756 - val_mae: 108.6390
Epoch 427/1000
200/200 - 5s - loss: 2253.2448 - mae: 24.7650 - val_loss: 17534.2852 - val_mae: 107.9176
Epoch 428/1000
200/200 - 5s - loss: 42799.0500 - mae: 89.6705 - val_loss: 22627.1945 - val_mae: 107.7597
Epoch 429/1000
200/200 - 5s - loss: 68167.2958 - mae: 151.3532 - val_loss: 33267.7905 - val_mae: 145.9573
Epoch 430/1000
200/200 - 5s - loss: 43888.8149 - mae: 112.9676 - val_loss: 34955.2360 - val_mae: 147.0943
Epoch 431/1000
200/200 - 5s - loss: 37534.0517 - mae: 102.5336 - val_loss: 34735.4746 - val_mae: 144.2116
Epoch 432/1000
200/200 - 5s - loss: 27299.6407 - mae: 87.5100 - val_loss: 39027.0304 - val_mae: 144.8732
Epoch 433/1000
200/200 - 5s - loss: 27007.7398 - mae: 83.2674 - val_loss: 27560.9284 - val_mae: 121.8260
Epoch 434/1000
200/200 - 5s - loss: 20553.2649 - mae: 74.5252 - val_loss: 33748.6803 - val_mae: 130.0587
Epoch 435/1000
200/200 - 5s - loss: 21416.5720 - mae: 76.5098 - val_loss: 27225.3854 - val_mae: 117.9221
Epoch 436/1000
200/200 - 5s - loss: 22517.1867 - mae: 79.0259 - val_loss: 29146.5250 - val_mae: 140.1168
Epoch 437/1000
200/200 - 5s - loss: 41281.2735 - mae: 101.3340 - val_loss: 34376.3743 - val_mae: 134.5208
Epoch 438/1000
200/200 - 5s - loss: 16473.9045 - mae: 67.6629 - val_loss: 23737.6815 - val_mae: 114.2698
Epoch 439/1000
200/200 - 5s - loss: 16092.5688 - mae: 64.6441 - val_loss: 30231.8743 - val_mae: 123.8018
Epoch 440/1000
200/200 - 5s - loss: 23566.3409 - mae: 81.0465 - val_loss: 19733.1586 - val_mae: 103.0706
Epoch 441/1000
200/200 - 5s - loss: 16579.5394 - mae: 70.9277 - val_loss: 27996.6697 - val_mae: 124.0665
Epoch 442/1000
200/200 - 5s - loss: 13844.2901 - mae: 61.4028 - val_loss: 21047.3852 - val_mae: 102.5015
Epoch 443/1000
200/200 - 5s - loss: 27757.4819 - mae: 90.2303 - val_loss: 17672.3764 - val_mae: 103.2891
Epoch 444/1000
200/200 - 5s - loss: 16119.8334 - mae: 68.1009 - val_loss: 18639.0917 - val_mae: 95.3858
Epoch 445/1000
200/200 - 5s - loss: 23233.0918 - mae: 83.9847 - val_loss: 23056.7733 - val_mae: 132.3241
Epoch 446/1000
200/200 - 5s - loss: 9177.6028 - mae: 52.3964 - val_loss: 22298.9117 - val_mae: 116.0199
Epoch 447/1000
200/200 - 5s - loss: 18285.8278 - mae: 68.1194 - val_loss: 17562.8591 - val_mae: 109.1937
Epoch 448/1000
200/200 - 5s - loss: 14783.5845 - mae: 67.7171 - val_loss: 19584.2204 - val_mae: 116.3136
Epoch 449/1000
200/200 - 5s - loss: 22660.0073 - mae: 72.8864 - val_loss: 29582.1922 - val_mae: 137.1726
Epoch 450/1000
200/200 - 5s - loss: 20801.3390 - mae: 76.0215 - val_loss: 26668.7163 - val_mae: 137.4393
Epoch 451/1000
200/200 - 5s - loss: 13114.2847 - mae: 62.3579 - val_loss: 23937.6041 - val_mae: 132.7429
Epoch 452/1000
200/200 - 5s - loss: 17308.1228 - mae: 66.6854 - val_loss: 30593.4473 - val_mae: 141.7578
Epoch 453/1000
200/200 - 5s - loss: 12153.5476 - mae: 62.7181 - val_loss: 19357.0362 - val_mae: 112.5890
Epoch 454/1000
200/200 - 5s - loss: 11946.7086 - mae: 53.7651 - val_loss: 36965.2460 - val_mae: 167.5436
Epoch 455/1000
200/200 - 5s - loss: 20988.2054 - mae: 72.5279 - val_loss: 23620.5552 - val_mae: 122.1671
Epoch 456/1000
200/200 - 5s - loss: 4005.7216 - mae: 38.4876 - val_loss: 24843.8022 - val_mae: 130.5474
Epoch 457/1000
200/200 - 5s - loss: 28561.0316 - mae: 77.2000 - val_loss: 21281.3664 - val_mae: 120.5752
Epoch 458/1000
200/200 - 5s - loss: 17364.5700 - mae: 75.1747 - val_loss: 24566.7686 - val_mae: 130.7864
Epoch 459/1000
200/200 - 5s - loss: 8979.5893 - mae: 52.7282 - val_loss: 22955.0620 - val_mae: 123.6549
Epoch 460/1000
200/200 - 5s - loss: 34846.2617 - mae: 96.7348 - val_loss: 28447.2807 - val_mae: 143.5793
Epoch 461/1000
200/200 - 5s - loss: 16172.2222 - mae: 70.9936 - val_loss: 30483.4025 - val_mae: 152.8319
Epoch 462/1000
200/200 - 5s - loss: 10163.2317 - mae: 54.3214 - val_loss: 32126.4027 - val_mae: 152.3953
Epoch 463/1000
200/200 - 5s - loss: 13439.3472 - mae: 57.5792 - val_loss: 30156.6518 - val_mae: 148.4457
Epoch 464/1000
200/200 - 5s - loss: 17661.8632 - mae: 62.0844 - val_loss: 36619.5496 - val_mae: 164.6902
Epoch 465/1000
200/200 - 5s - loss: 37111.4576 - mae: 97.6163 - val_loss: 35099.2539 - val_mae: 157.5080
Epoch 466/1000
200/200 - 5s - loss: 20744.7521 - mae: 74.8694 - val_loss: 31168.7574 - val_mae: 147.8585
Epoch 467/1000
200/200 - 5s - loss: 5103.7605 - mae: 40.0085 - val_loss: 29389.5996 - val_mae: 142.9089
Epoch 468/1000
200/200 - 5s - loss: 19589.1997 - mae: 67.3117 - val_loss: 37770.4788 - val_mae: 143.3011
Epoch 469/1000
200/200 - 5s - loss: 19830.6977 - mae: 76.4596 - val_loss: 25163.1796 - val_mae: 133.4490
Epoch 470/1000
200/200 - 5s - loss: 15603.9419 - mae: 67.2511 - val_loss: 23704.8605 - val_mae: 125.7954
Epoch 471/1000
200/200 - 5s - loss: 49097.0796 - mae: 111.9025 - val_loss: 25362.7168 - val_mae: 135.2210
Epoch 472/1000
200/200 - 5s - loss: 26923.2919 - mae: 82.2547 - val_loss: 25795.2832 - val_mae: 130.9445
Epoch 473/1000
200/200 - 5s - loss: 25212.4257 - mae: 79.0043 - val_loss: 29069.8785 - val_mae: 137.8130
Epoch 474/1000
200/200 - 5s - loss: 15663.4488 - mae: 61.7524 - val_loss: 27962.9566 - val_mae: 136.0296
Epoch 475/1000
200/200 - 5s - loss: 14983.9383 - mae: 63.7260 - val_loss: 27980.4669 - val_mae: 133.4123
Epoch 476/1000
200/200 - 5s - loss: 19099.2300 - mae: 68.3274 - val_loss: 29469.4320 - val_mae: 137.6196
Epoch 477/1000
200/200 - 5s - loss: 24778.5432 - mae: 79.4911 - val_loss: 26865.1454 - val_mae: 127.2382
Epoch 478/1000
200/200 - 5s - loss: 12400.8181 - mae: 56.2171 - val_loss: 26701.5185 - val_mae: 131.6482
Epoch 479/1000
200/200 - 5s - loss: 9197.7631 - mae: 50.1460 - val_loss: 28244.7980 - val_mae: 135.2951
Epoch 480/1000
200/200 - 5s - loss: 15092.4645 - mae: 63.2246 - val_loss: 26073.6112 - val_mae: 124.8708
Epoch 481/1000
200/200 - 5s - loss: 11392.5088 - mae: 60.3389 - val_loss: 29889.7274 - val_mae: 138.8761
Epoch 482/1000
200/200 - 5s - loss: 25755.1011 - mae: 79.1540 - val_loss: 26872.2993 - val_mae: 134.1209
Epoch 483/1000
200/200 - 5s - loss: 21386.3744 - mae: 69.2332 - val_loss: 23221.7887 - val_mae: 121.2641
Epoch 484/1000
200/200 - 5s - loss: 8133.5111 - mae: 46.8360 - val_loss: 23040.9389 - val_mae: 123.5748
Epoch 485/1000
200/200 - 5s - loss: 52736.5548 - mae: 108.7004 - val_loss: 22929.7036 - val_mae: 115.3547
Epoch 486/1000
200/200 - 5s - loss: 12699.9247 - mae: 66.2049 - val_loss: 22971.8523 - val_mae: 114.7631
Epoch 487/1000
200/200 - 5s - loss: 7819.6858 - mae: 49.6226 - val_loss: 24252.7252 - val_mae: 118.2621
Epoch 488/1000
200/200 - 5s - loss: 8290.1836 - mae: 50.9050 - val_loss: 23820.6307 - val_mae: 117.2663
Epoch 489/1000
200/200 - 5s - loss: 5239.6651 - mae: 42.0886 - val_loss: 23919.0358 - val_mae: 121.4966
Epoch 490/1000
200/200 - 5s - loss: 6113.0186 - mae: 43.7232 - val_loss: 24751.0147 - val_mae: 128.2321
Epoch 491/1000
200/200 - 5s - loss: 22943.7742 - mae: 77.5083 - val_loss: 23669.1082 - val_mae: 121.3460
Epoch 492/1000
200/200 - 5s - loss: 11239.7449 - mae: 49.0129 - val_loss: 39860.1930 - val_mae: 151.7160
Epoch 493/1000
200/200 - 5s - loss: 26559.7581 - mae: 77.3743 - val_loss: 26391.9085 - val_mae: 124.6488
Epoch 494/1000
200/200 - 5s - loss: 7733.7240 - mae: 47.7686 - val_loss: 26329.2601 - val_mae: 127.1842
Epoch 495/1000
200/200 - 5s - loss: 6065.5441 - mae: 40.5960 - val_loss: 20024.7407 - val_mae: 114.6621
Epoch 496/1000
200/200 - 5s - loss: 28086.0798 - mae: 83.3466 - val_loss: 29465.3575 - val_mae: 139.5493
Epoch 497/1000
200/200 - 5s - loss: 27393.9091 - mae: 85.6408 - val_loss: 26580.6215 - val_mae: 129.4206
Epoch 498/1000
200/200 - 5s - loss: 4457.9221 - mae: 38.8194 - val_loss: 27903.7666 - val_mae: 130.5087
Epoch 499/1000
200/200 - 5s - loss: 15107.0883 - mae: 56.7916 - val_loss: 28697.9464 - val_mae: 139.2904
Epoch 500/1000
200/200 - 5s - loss: 2907.5336 - mae: 31.3699 - val_loss: 28715.5385 - val_mae: 138.4146
Epoch 501/1000
200/200 - 5s - loss: 2977.9111 - mae: 31.6828 - val_loss: 29665.7848 - val_mae: 139.5987
Epoch 502/1000
200/200 - 5s - loss: 3590.0512 - mae: 34.7133 - val_loss: 30380.6154 - val_mae: 140.0577
Epoch 503/1000
200/200 - 5s - loss: 2360.6966 - mae: 29.1173 - val_loss: 28881.4770 - val_mae: 139.1787
Epoch 504/1000
200/200 - 5s - loss: 2082.2067 - mae: 27.1448 - val_loss: 28401.6836 - val_mae: 138.2816
Epoch 505/1000
200/200 - 5s - loss: 1898.9858 - mae: 26.5689 - val_loss: 26847.9857 - val_mae: 134.8400
Epoch 506/1000
200/200 - 5s - loss: 3692.2517 - mae: 35.0265 - val_loss: 28776.7822 - val_mae: 139.2917
Epoch 507/1000
200/200 - 5s - loss: 13507.3445 - mae: 59.0371 - val_loss: 30948.6575 - val_mae: 137.4675
Epoch 508/1000
200/200 - 5s - loss: 3121.8470 - mae: 31.5561 - val_loss: 30756.3831 - val_mae: 140.0867
Epoch 509/1000
200/200 - 5s - loss: 3729.9672 - mae: 31.4503 - val_loss: 28663.7007 - val_mae: 136.9393
Epoch 510/1000
200/200 - 5s - loss: 2127.7045 - mae: 26.9609 - val_loss: 29358.2189 - val_mae: 138.9250
Epoch 511/1000
200/200 - 5s - loss: 32117.0759 - mae: 81.3888 - val_loss: 34505.8729 - val_mae: 157.5848
Epoch 512/1000
200/200 - 5s - loss: 20339.4655 - mae: 76.2534 - val_loss: 33388.6676 - val_mae: 139.1564
Epoch 513/1000
200/200 - 5s - loss: 30120.6618 - mae: 90.9228 - val_loss: 28011.1124 - val_mae: 133.5523
Epoch 514/1000
200/200 - 5s - loss: 19452.2099 - mae: 73.9555 - val_loss: 32093.0957 - val_mae: 138.5131
Epoch 515/1000
200/200 - 5s - loss: 11809.3838 - mae: 54.7455 - val_loss: 26986.8789 - val_mae: 129.1586
Epoch 516/1000
200/200 - 5s - loss: 18681.8307 - mae: 65.7356 - val_loss: 28872.9239 - val_mae: 132.2393
Epoch 517/1000
200/200 - 5s - loss: 3811.0257 - mae: 34.7917 - val_loss: 25661.3792 - val_mae: 123.3970
Epoch 518/1000
200/200 - 5s - loss: 2964.5143 - mae: 30.2750 - val_loss: 28290.3595 - val_mae: 130.6688
Epoch 519/1000
200/200 - 5s - loss: 3564.9967 - mae: 32.8860 - val_loss: 28026.7668 - val_mae: 131.1287
Epoch 520/1000
200/200 - 5s - loss: 2767.7253 - mae: 29.0749 - val_loss: 28503.6098 - val_mae: 134.8470
Epoch 521/1000
200/200 - 5s - loss: 2290.1972 - mae: 25.9905 - val_loss: 27925.4561 - val_mae: 133.5345
Epoch 522/1000
200/200 - 5s - loss: 2195.9703 - mae: 26.0207 - val_loss: 30609.8703 - val_mae: 140.2396
Epoch 523/1000
200/200 - 5s - loss: 1984.7322 - mae: 24.4853 - val_loss: 28745.3101 - val_mae: 135.4697
Epoch 524/1000
200/200 - 5s - loss: 2411.9034 - mae: 27.1703 - val_loss: 31788.6840 - val_mae: 145.3195
Epoch 525/1000
200/200 - 5s - loss: 2101.4458 - mae: 26.2023 - val_loss: 30106.3859 - val_mae: 140.5339
Epoch 526/1000
200/200 - 5s - loss: 2064.0920 - mae: 25.7104 - val_loss: 29871.9979 - val_mae: 141.0179
Epoch 527/1000
200/200 - 5s - loss: 1536.7937 - mae: 23.2552 - val_loss: 31355.5329 - val_mae: 148.3971
Epoch 528/1000
200/200 - 5s - loss: 44549.2514 - mae: 87.6334 - val_loss: 17603.6760 - val_mae: 108.2755
Epoch 529/1000
200/200 - 5s - loss: 73541.8983 - mae: 151.2975 - val_loss: 33596.3891 - val_mae: 150.0410
Epoch 530/1000
200/200 - 5s - loss: 28159.3289 - mae: 92.1719 - val_loss: 21745.6694 - val_mae: 128.4294
Epoch 531/1000
200/200 - 5s - loss: 21562.0549 - mae: 76.6266 - val_loss: 28372.1186 - val_mae: 141.6025
Epoch 532/1000
200/200 - 5s - loss: 19632.6350 - mae: 77.7934 - val_loss: 29985.2816 - val_mae: 144.6940
Epoch 533/1000
200/200 - 5s - loss: 6858.7168 - mae: 48.6947 - val_loss: 31191.5915 - val_mae: 146.5848
Epoch 534/1000
200/200 - 5s - loss: 17858.0579 - mae: 65.9913 - val_loss: 33695.3194 - val_mae: 155.0171
Epoch 535/1000
200/200 - 6s - loss: 12081.7979 - mae: 54.4587 - val_loss: 28618.3622 - val_mae: 136.2619
Epoch 536/1000
200/200 - 6s - loss: 17396.8221 - mae: 68.3318 - val_loss: 37982.3289 - val_mae: 162.3284
Epoch 537/1000
200/200 - 6s - loss: 5271.5340 - mae: 40.0059 - val_loss: 38152.3241 - val_mae: 161.8570
Epoch 538/1000
200/200 - 6s - loss: 3894.5455 - mae: 35.1259 - val_loss: 40239.0810 - val_mae: 164.6452
Epoch 539/1000
200/200 - 6s - loss: 3061.3478 - mae: 31.7544 - val_loss: 42748.3096 - val_mae: 168.4571
Epoch 540/1000
200/200 - 6s - loss: 3729.3408 - mae: 35.0858 - val_loss: 42878.3080 - val_mae: 170.0530
Epoch 541/1000
200/200 - 6s - loss: 4779.9458 - mae: 39.1906 - val_loss: 42659.0630 - val_mae: 168.9644
Epoch 542/1000
200/200 - 6s - loss: 14932.6211 - mae: 45.3913 - val_loss: 57705.3070 - val_mae: 188.2668
Epoch 543/1000
200/200 - 6s - loss: 40369.4781 - mae: 101.9157 - val_loss: 32364.4884 - val_mae: 147.5661
Epoch 544/1000
200/200 - 6s - loss: 6196.2428 - mae: 42.9719 - val_loss: 31911.3388 - val_mae: 144.0289
Epoch 545/1000
200/200 - 6s - loss: 3092.7910 - mae: 32.9236 - val_loss: 32212.7901 - val_mae: 144.5308
Epoch 546/1000
200/200 - 6s - loss: 2527.8415 - mae: 29.9289 - val_loss: 30477.8497 - val_mae: 138.7241
Epoch 547/1000
200/200 - 6s - loss: 3913.6923 - mae: 33.2853 - val_loss: 31137.5050 - val_mae: 140.3599
Epoch 548/1000
200/200 - 6s - loss: 3144.4357 - mae: 31.3629 - val_loss: 30885.6479 - val_mae: 141.6737
Epoch 549/1000
200/200 - 6s - loss: 2504.3381 - mae: 28.3531 - val_loss: 31797.7683 - val_mae: 141.7980
Epoch 550/1000
200/200 - 6s - loss: 7332.4327 - mae: 41.9207 - val_loss: 38550.1575 - val_mae: 152.9979
Epoch 551/1000
200/200 - 6s - loss: 48293.7279 - mae: 122.9729 - val_loss: 38336.6701 - val_mae: 147.0185
Epoch 552/1000
200/200 - 6s - loss: 8269.9409 - mae: 47.5433 - val_loss: 31749.4668 - val_mae: 138.5024
Epoch 553/1000
200/200 - 6s - loss: 2889.5066 - mae: 30.8971 - val_loss: 31953.9714 - val_mae: 140.1259
Epoch 554/1000
200/200 - 6s - loss: 2482.2847 - mae: 29.1611 - val_loss: 35695.6432 - val_mae: 147.2957
Epoch 555/1000
200/200 - 6s - loss: 2654.8836 - mae: 30.5243 - val_loss: 32683.9752 - val_mae: 139.6006
Epoch 556/1000
200/200 - 6s - loss: 2590.2840 - mae: 28.8683 - val_loss: 34544.5807 - val_mae: 142.6956
Epoch 557/1000
200/200 - 6s - loss: 1653.7569 - mae: 23.3344 - val_loss: 34279.7767 - val_mae: 144.3993
Epoch 558/1000
200/200 - 6s - loss: 1530.5867 - mae: 22.1565 - val_loss: 34766.2632 - val_mae: 147.3972
Epoch 559/1000
200/200 - 6s - loss: 1713.2747 - mae: 23.2627 - val_loss: 33983.5771 - val_mae: 143.0633
Epoch 560/1000
200/200 - 5s - loss: 1688.9239 - mae: 23.2575 - val_loss: 33951.9539 - val_mae: 146.4505
Epoch 561/1000
200/200 - 5s - loss: 2180.3090 - mae: 26.5989 - val_loss: 34946.4123 - val_mae: 146.9033
Epoch 562/1000
200/200 - 5s - loss: 2870.5628 - mae: 29.6100 - val_loss: 38079.6005 - val_mae: 153.1254
Epoch 563/1000
200/200 - 5s - loss: 1878.4789 - mae: 26.1765 - val_loss: 32638.3724 - val_mae: 144.3531
Epoch 564/1000
200/200 - 5s - loss: 39183.9724 - mae: 101.7896 - val_loss: 34371.1364 - val_mae: 148.5856
Epoch 565/1000
200/200 - 5s - loss: 36382.7994 - mae: 96.6552 - val_loss: 28537.7562 - val_mae: 126.1801
Epoch 566/1000
200/200 - 5s - loss: 5704.9745 - mae: 45.8157 - val_loss: 30308.5496 - val_mae: 132.7166
Epoch 567/1000
200/200 - 5s - loss: 2894.1458 - mae: 33.1014 - val_loss: 33715.8991 - val_mae: 141.6198
Epoch 568/1000
200/200 - 5s - loss: 2541.1985 - mae: 30.9217 - val_loss: 31382.8765 - val_mae: 140.7829
Epoch 569/1000
200/200 - 5s - loss: 25907.5753 - mae: 83.0686 - val_loss: 34286.6126 - val_mae: 155.8164
Epoch 570/1000
200/200 - 5s - loss: 3509.4055 - mae: 36.0390 - val_loss: 35803.2430 - val_mae: 151.0521
Epoch 571/1000
200/200 - 5s - loss: 1807.4701 - mae: 25.7474 - val_loss: 34144.8168 - val_mae: 151.1177
Epoch 572/1000
200/200 - 5s - loss: 1784.9060 - mae: 26.0108 - val_loss: 34601.6963 - val_mae: 150.9771
Epoch 573/1000
200/200 - 5s - loss: 55469.3073 - mae: 115.6008 - val_loss: 30717.2435 - val_mae: 136.2113
Epoch 574/1000
200/200 - 5s - loss: 20753.8798 - mae: 68.5932 - val_loss: 31048.8739 - val_mae: 145.6252
Epoch 575/1000
200/200 - 5s - loss: 17869.2023 - mae: 66.7556 - val_loss: 25225.4854 - val_mae: 125.9747
Epoch 576/1000
200/200 - 5s - loss: 19270.4951 - mae: 68.1056 - val_loss: 27261.5667 - val_mae: 137.6763
Epoch 577/1000
200/200 - 5s - loss: 19627.1317 - mae: 70.4150 - val_loss: 19113.2811 - val_mae: 109.0144
Epoch 578/1000
200/200 - 6s - loss: 18678.9012 - mae: 69.8492 - val_loss: 23417.4921 - val_mae: 124.3404
Epoch 579/1000
200/200 - 6s - loss: 2566.9315 - mae: 31.2034 - val_loss: 26689.8851 - val_mae: 129.4380
Epoch 580/1000
200/200 - 6s - loss: 1752.3387 - mae: 26.6647 - val_loss: 26559.0479 - val_mae: 129.2331
Epoch 581/1000
200/200 - 6s - loss: 1318.2167 - mae: 22.9810 - val_loss: 28477.5846 - val_mae: 134.4601
Epoch 582/1000
200/200 - 6s - loss: 1291.5551 - mae: 22.6823 - val_loss: 26638.0801 - val_mae: 130.6361
Epoch 583/1000
200/200 - 6s - loss: 1272.4604 - mae: 21.5196 - val_loss: 26022.6420 - val_mae: 129.6935
Epoch 584/1000
200/200 - 6s - loss: 1379.4212 - mae: 21.0232 - val_loss: 27077.1271 - val_mae: 131.1552
Epoch 585/1000
200/200 - 6s - loss: 1717.7257 - mae: 24.1112 - val_loss: 29817.0713 - val_mae: 139.8526
Epoch 586/1000
200/200 - 6s - loss: 1879.7802 - mae: 24.0874 - val_loss: 29845.8678 - val_mae: 137.4332
Epoch 587/1000
200/200 - 6s - loss: 1360.4699 - mae: 21.6418 - val_loss: 36639.2973 - val_mae: 155.9266
Epoch 588/1000
200/200 - 6s - loss: 984.4960 - mae: 18.5584 - val_loss: 32926.0613 - val_mae: 148.5345
Epoch 589/1000
200/200 - 6s - loss: 1226.0109 - mae: 21.7433 - val_loss: 31015.8542 - val_mae: 146.9978
Epoch 590/1000
200/200 - 6s - loss: 70871.5838 - mae: 123.3808 - val_loss: 39233.6861 - val_mae: 150.3947
Epoch 591/1000
200/200 - 6s - loss: 51451.0943 - mae: 122.2151 - val_loss: 40547.9155 - val_mae: 150.6565
Epoch 592/1000
200/200 - 6s - loss: 21457.0354 - mae: 78.6500 - val_loss: 35391.6801 - val_mae: 143.0080
Epoch 593/1000
200/200 - 6s - loss: 21404.8590 - mae: 76.6487 - val_loss: 40908.9038 - val_mae: 164.8596
Epoch 594/1000
200/200 - 6s - loss: 26724.5303 - mae: 85.1054 - val_loss: 42966.3826 - val_mae: 169.2337
Epoch 595/1000
200/200 - 6s - loss: 17162.0239 - mae: 67.8200 - val_loss: 43286.5748 - val_mae: 165.3064
Epoch 596/1000
200/200 - 6s - loss: 14080.6872 - mae: 61.9832 - val_loss: 34794.6668 - val_mae: 149.3433
Epoch 597/1000
200/200 - 6s - loss: 6671.8347 - mae: 46.1157 - val_loss: 34932.7257 - val_mae: 148.5180
Epoch 598/1000
200/200 - 6s - loss: 3779.6034 - mae: 35.4209 - val_loss: 36300.3649 - val_mae: 151.4193
Epoch 599/1000
200/200 - 6s - loss: 4039.5386 - mae: 37.8062 - val_loss: 34949.1205 - val_mae: 149.3580
Epoch 600/1000
200/200 - 6s - loss: 2617.7055 - mae: 30.3430 - val_loss: 35999.2815 - val_mae: 151.3329
Epoch 601/1000
200/200 - 6s - loss: 5473.0139 - mae: 41.2791 - val_loss: 35430.1121 - val_mae: 151.8726
Epoch 602/1000
200/200 - 6s - loss: 4414.1085 - mae: 39.0690 - val_loss: 29922.4821 - val_mae: 140.3699
Epoch 603/1000
200/200 - 6s - loss: 28218.9470 - mae: 88.6380 - val_loss: 31768.8908 - val_mae: 141.8798
Epoch 604/1000
200/200 - 6s - loss: 2712.3141 - mae: 31.9427 - val_loss: 28470.8992 - val_mae: 134.7927
Epoch 605/1000
200/200 - 6s - loss: 2694.4735 - mae: 32.1476 - val_loss: 30231.1427 - val_mae: 140.4949
Epoch 606/1000
200/200 - 6s - loss: 3059.1530 - mae: 33.7997 - val_loss: 28187.6707 - val_mae: 134.6213
Epoch 607/1000
200/200 - 6s - loss: 2162.0525 - mae: 28.4167 - val_loss: 29840.6143 - val_mae: 140.0446
Epoch 608/1000
200/200 - 6s - loss: 2022.0416 - mae: 27.1376 - val_loss: 31435.4885 - val_mae: 143.1520
Epoch 609/1000
200/200 - 6s - loss: 20111.8445 - mae: 55.9164 - val_loss: 32081.4808 - val_mae: 138.6381
Epoch 610/1000
200/200 - 6s - loss: 43704.4988 - mae: 119.0066 - val_loss: 49137.5137 - val_mae: 177.1057
Epoch 611/1000
200/200 - 6s - loss: 29654.3999 - mae: 85.0128 - val_loss: 38242.8707 - val_mae: 159.8459
Epoch 612/1000
200/200 - 6s - loss: 3801.6727 - mae: 38.9225 - val_loss: 37763.5586 - val_mae: 163.5624
Epoch 613/1000
200/200 - 6s - loss: 2283.5878 - mae: 29.5796 - val_loss: 40397.4341 - val_mae: 167.1354
Epoch 614/1000
200/200 - 6s - loss: 2435.6932 - mae: 29.9533 - val_loss: 36701.9790 - val_mae: 160.6602
Epoch 615/1000
200/200 - 6s - loss: 2164.6536 - mae: 29.1563 - val_loss: 33201.5549 - val_mae: 151.7371
Epoch 616/1000
200/200 - 6s - loss: 2499.4737 - mae: 29.0986 - val_loss: 36506.7341 - val_mae: 158.2118
Epoch 617/1000
200/200 - 6s - loss: 1734.9075 - mae: 25.4601 - val_loss: 36275.5464 - val_mae: 156.7254
Epoch 618/1000
200/200 - 6s - loss: 23047.9003 - mae: 57.6326 - val_loss: 35435.9216 - val_mae: 165.1085
Epoch 619/1000
200/200 - 6s - loss: 63155.6098 - mae: 140.1846 - val_loss: 36647.1129 - val_mae: 156.7718
Epoch 620/1000
200/200 - 6s - loss: 18281.1773 - mae: 75.8300 - val_loss: 41999.1204 - val_mae: 171.5922
Epoch 621/1000
200/200 - 6s - loss: 13277.5079 - mae: 63.3214 - val_loss: 47749.9664 - val_mae: 176.2352
Epoch 622/1000
200/200 - 5s - loss: 11953.3961 - mae: 60.3936 - val_loss: 48438.0300 - val_mae: 173.6883
Epoch 623/1000
200/200 - 6s - loss: 9776.5419 - mae: 54.6730 - val_loss: 44170.1014 - val_mae: 163.8968
Epoch 624/1000
200/200 - 6s - loss: 5668.7297 - mae: 44.2041 - val_loss: 45175.7900 - val_mae: 166.3139
Epoch 625/1000
200/200 - 6s - loss: 14624.0587 - mae: 60.6118 - val_loss: 44072.3281 - val_mae: 164.8596
Epoch 626/1000
200/200 - 6s - loss: 4634.4761 - mae: 40.7351 - val_loss: 40583.9382 - val_mae: 157.9032
Epoch 627/1000
200/200 - 6s - loss: 3260.2607 - mae: 34.6624 - val_loss: 41807.3145 - val_mae: 160.4175
Epoch 628/1000
200/200 - 6s - loss: 27018.3035 - mae: 84.2758 - val_loss: 40595.6837 - val_mae: 171.6911
Epoch 629/1000
200/200 - 6s - loss: 4960.1485 - mae: 42.2990 - val_loss: 43128.9304 - val_mae: 173.2320
Epoch 630/1000
200/200 - 6s - loss: 2811.5627 - mae: 32.0471 - val_loss: 42374.5055 - val_mae: 170.7085
Epoch 631/1000
200/200 - 6s - loss: 2382.6532 - mae: 29.6380 - val_loss: 41796.7447 - val_mae: 170.6962
Epoch 632/1000
200/200 - 6s - loss: 8591.3314 - mae: 46.1367 - val_loss: 26803.0911 - val_mae: 115.5921
Epoch 633/1000
200/200 - 8s - loss: 6098.2249 - mae: 43.0526 - val_loss: 31665.4474 - val_mae: 143.4333
Epoch 634/1000
200/200 - 6s - loss: 1994.9243 - mae: 27.0279 - val_loss: 35031.2294 - val_mae: 152.0678
Epoch 635/1000
200/200 - 6s - loss: 3097.8484 - mae: 29.9757 - val_loss: 42074.6169 - val_mae: 172.5438
Epoch 636/1000
200/200 - 6s - loss: 2455.4054 - mae: 29.4310 - val_loss: 38195.0391 - val_mae: 165.0075
Epoch 637/1000
200/200 - 6s - loss: 1754.1780 - mae: 25.2739 - val_loss: 41010.4334 - val_mae: 172.3627
Epoch 638/1000
200/200 - 6s - loss: 1678.6992 - mae: 24.3816 - val_loss: 39743.8272 - val_mae: 170.0708
Epoch 639/1000
200/200 - 6s - loss: 22956.1853 - mae: 70.6530 - val_loss: 22377.2937 - val_mae: 111.0200
Epoch 640/1000
200/200 - 6s - loss: 17307.5266 - mae: 69.8927 - val_loss: 32059.4621 - val_mae: 137.6201
Epoch 641/1000
200/200 - 6s - loss: 6056.2231 - mae: 45.3650 - val_loss: 28967.6598 - val_mae: 134.7022
Epoch 642/1000
200/200 - 6s - loss: 2709.5136 - mae: 29.7033 - val_loss: 31860.4230 - val_mae: 143.1549
Epoch 643/1000
200/200 - 6s - loss: 2055.7854 - mae: 26.5306 - val_loss: 31299.9966 - val_mae: 143.9662
Epoch 644/1000
200/200 - 6s - loss: 1952.9612 - mae: 26.1953 - val_loss: 30972.9122 - val_mae: 141.5163
Epoch 645/1000
200/200 - 5s - loss: 2603.2702 - mae: 29.3025 - val_loss: 33484.7846 - val_mae: 147.6163
Epoch 646/1000
200/200 - 6s - loss: 1672.5439 - mae: 24.1128 - val_loss: 31377.4083 - val_mae: 143.7539
Epoch 647/1000
200/200 - 5s - loss: 4706.7135 - mae: 30.6351 - val_loss: 30009.7005 - val_mae: 134.7458
Epoch 648/1000
200/200 - 5s - loss: 44026.8366 - mae: 105.9778 - val_loss: 22841.4321 - val_mae: 116.2567
Epoch 649/1000
200/200 - 6s - loss: 4604.5166 - mae: 37.2783 - val_loss: 24289.9338 - val_mae: 121.6153
Epoch 650/1000
200/200 - 5s - loss: 1570.1026 - mae: 23.5361 - val_loss: 24207.1261 - val_mae: 124.3251
Epoch 651/1000
200/200 - 5s - loss: 1558.7863 - mae: 22.1650 - val_loss: 24419.4845 - val_mae: 125.7779
Epoch 652/1000
200/200 - 5s - loss: 1219.7487 - mae: 20.4514 - val_loss: 26214.2209 - val_mae: 130.9019
Epoch 653/1000
200/200 - 5s - loss: 1269.9182 - mae: 21.0013 - val_loss: 27265.0122 - val_mae: 133.8965
Epoch 654/1000
200/200 - 5s - loss: 1191.2304 - mae: 20.4102 - val_loss: 27539.0501 - val_mae: 133.6155
Epoch 655/1000
200/200 - 5s - loss: 1199.8639 - mae: 19.8719 - val_loss: 25617.4076 - val_mae: 130.1463
Epoch 656/1000
200/200 - 5s - loss: 1703.9575 - mae: 24.3331 - val_loss: 26620.7761 - val_mae: 130.6499
Epoch 657/1000
200/200 - 5s - loss: 31563.9894 - mae: 73.6559 - val_loss: 54124.2999 - val_mae: 197.4873
Epoch 658/1000
200/200 - 5s - loss: 18616.2167 - mae: 72.4174 - val_loss: 31261.1915 - val_mae: 141.1556
Epoch 659/1000
200/200 - 5s - loss: 11441.7044 - mae: 51.4465 - val_loss: 45796.7515 - val_mae: 166.3041
Epoch 660/1000
200/200 - 5s - loss: 2234.2970 - mae: 27.8604 - val_loss: 47381.7339 - val_mae: 170.4038
Epoch 661/1000
200/200 - 5s - loss: 1157.9759 - mae: 20.2380 - val_loss: 47119.0216 - val_mae: 169.4406
Epoch 662/1000
200/200 - 5s - loss: 1178.2709 - mae: 19.0450 - val_loss: 47041.5800 - val_mae: 170.0654
Epoch 663/1000
200/200 - 5s - loss: 1688.1038 - mae: 22.3724 - val_loss: 49242.6223 - val_mae: 174.9084
Epoch 664/1000
200/200 - 5s - loss: 3056.6234 - mae: 27.6549 - val_loss: 47978.4830 - val_mae: 172.8764
Epoch 665/1000
200/200 - 5s - loss: 1283.0815 - mae: 20.7838 - val_loss: 48630.3652 - val_mae: 175.3038
Epoch 666/1000
200/200 - 5s - loss: 2567.0667 - mae: 27.0732 - val_loss: 48113.3545 - val_mae: 177.7767
Epoch 667/1000
200/200 - 5s - loss: 47895.4354 - mae: 105.1153 - val_loss: 61206.7988 - val_mae: 188.3365
Epoch 668/1000
200/200 - 5s - loss: 8930.7373 - mae: 56.4981 - val_loss: 46911.4177 - val_mae: 176.8602
Epoch 669/1000
200/200 - 5s - loss: 3316.0418 - mae: 33.8009 - val_loss: 44200.5375 - val_mae: 172.8324
Epoch 670/1000
200/200 - 5s - loss: 2364.5237 - mae: 28.0042 - val_loss: 44119.1398 - val_mae: 172.8172
Epoch 671/1000
200/200 - 5s - loss: 2925.9340 - mae: 29.3180 - val_loss: 46292.5248 - val_mae: 177.8627
Epoch 672/1000
200/200 - 5s - loss: 2162.7178 - mae: 25.0339 - val_loss: 46684.8815 - val_mae: 179.2410
Epoch 673/1000
200/200 - 5s - loss: 2185.8162 - mae: 25.0024 - val_loss: 47457.9742 - val_mae: 180.2366
Epoch 674/1000
200/200 - 5s - loss: 2551.7467 - mae: 27.3028 - val_loss: 49253.8244 - val_mae: 184.4342
Epoch 675/1000
200/200 - 5s - loss: 2230.1592 - mae: 26.0687 - val_loss: 48024.6741 - val_mae: 180.8171
Epoch 676/1000
200/200 - 5s - loss: 2716.1452 - mae: 28.9413 - val_loss: 47146.6095 - val_mae: 179.7066
Epoch 677/1000
200/200 - 5s - loss: 2764.9797 - mae: 28.3367 - val_loss: 47271.9809 - val_mae: 180.0489
Epoch 678/1000
200/200 - 5s - loss: 91257.9907 - mae: 155.5396 - val_loss: 51528.5396 - val_mae: 180.6929
Epoch 679/1000
200/200 - 5s - loss: 38516.0722 - mae: 108.9738 - val_loss: 95087.6847 - val_mae: 245.5572
Epoch 680/1000
200/200 - 5s - loss: 23386.5165 - mae: 84.6591 - val_loss: 88141.4545 - val_mae: 233.4674
Epoch 681/1000
200/200 - 5s - loss: 22288.3511 - mae: 83.8600 - val_loss: 67863.0448 - val_mae: 204.5654
Epoch 682/1000
200/200 - 5s - loss: 15183.0145 - mae: 68.9475 - val_loss: 67537.3975 - val_mae: 221.6337
Epoch 683/1000
200/200 - 5s - loss: 9125.7384 - mae: 57.0959 - val_loss: 67590.6841 - val_mae: 222.2123
Epoch 684/1000
200/200 - 5s - loss: 12194.8499 - mae: 64.2178 - val_loss: 57997.5870 - val_mae: 204.4897
Epoch 685/1000
200/200 - 5s - loss: 20472.0865 - mae: 75.2199 - val_loss: 65205.3922 - val_mae: 213.3932
Epoch 686/1000
200/200 - 5s - loss: 6396.7916 - mae: 48.3013 - val_loss: 73141.8374 - val_mae: 227.6682
Epoch 687/1000
200/200 - 5s - loss: 4508.9728 - mae: 42.2993 - val_loss: 66833.0762 - val_mae: 212.9752
Epoch 688/1000
200/200 - 5s - loss: 4221.9006 - mae: 41.1670 - val_loss: 67500.8752 - val_mae: 209.5086
Epoch 689/1000
200/200 - 5s - loss: 4005.6037 - mae: 38.8400 - val_loss: 66995.3081 - val_mae: 211.4325
Epoch 690/1000
200/200 - 5s - loss: 23051.3593 - mae: 76.0033 - val_loss: 53355.0839 - val_mae: 189.0371
Epoch 691/1000
200/200 - 5s - loss: 8562.4603 - mae: 54.2216 - val_loss: 71302.5555 - val_mae: 227.7313
Epoch 692/1000
200/200 - 5s - loss: 3307.7388 - mae: 35.0390 - val_loss: 72608.9440 - val_mae: 228.9850
Epoch 693/1000
200/200 - 5s - loss: 3278.7991 - mae: 35.6880 - val_loss: 64371.3648 - val_mae: 216.7296
Epoch 694/1000
200/200 - 5s - loss: 3424.8899 - mae: 35.0467 - val_loss: 70269.8605 - val_mae: 225.8685
Epoch 695/1000
200/200 - 5s - loss: 14018.2316 - mae: 55.7423 - val_loss: 75092.6535 - val_mae: 225.9861
Epoch 696/1000
200/200 - 5s - loss: 17726.4998 - mae: 69.4664 - val_loss: 64324.8648 - val_mae: 209.5261
Epoch 697/1000
200/200 - 5s - loss: 8351.4068 - mae: 49.7201 - val_loss: 56695.2741 - val_mae: 198.6230
Epoch 698/1000
200/200 - 5s - loss: 2802.6555 - mae: 32.3403 - val_loss: 57866.5171 - val_mae: 199.8808
Epoch 699/1000
200/200 - 5s - loss: 3019.8963 - mae: 32.6789 - val_loss: 59232.4603 - val_mae: 201.8088
Epoch 700/1000
200/200 - 5s - loss: 2729.2462 - mae: 32.3868 - val_loss: 66370.6590 - val_mae: 211.9891
Epoch 701/1000
200/200 - 5s - loss: 3245.8593 - mae: 33.7324 - val_loss: 64801.2324 - val_mae: 208.8179
Epoch 702/1000
200/200 - 5s - loss: 75121.0543 - mae: 148.2632 - val_loss: 68573.4665 - val_mae: 223.5534
Epoch 703/1000
200/200 - 5s - loss: 35387.5135 - mae: 99.9452 - val_loss: 55444.3756 - val_mae: 210.5912
Epoch 704/1000
200/200 - 6s - loss: 30683.3313 - mae: 90.9509 - val_loss: 55482.5868 - val_mae: 209.4248
Epoch 705/1000
200/200 - 6s - loss: 27272.9777 - mae: 85.0048 - val_loss: 61287.8254 - val_mae: 218.9295
Epoch 706/1000
200/200 - 5s - loss: 25172.3428 - mae: 80.6503 - val_loss: 73753.9226 - val_mae: 237.8765
Epoch 707/1000
200/200 - 5s - loss: 32186.7095 - mae: 95.6442 - val_loss: 76124.1774 - val_mae: 239.8820
Epoch 708/1000
200/200 - 5s - loss: 17779.9880 - mae: 66.6925 - val_loss: 67907.5801 - val_mae: 224.3802
Epoch 709/1000
200/200 - 5s - loss: 21008.9403 - mae: 74.3962 - val_loss: 71606.6469 - val_mae: 229.1142
Epoch 710/1000
200/200 - 6s - loss: 19362.6221 - mae: 74.7814 - val_loss: 82746.2312 - val_mae: 247.6717
Epoch 711/1000
200/200 - 5s - loss: 18482.4161 - mae: 70.4121 - val_loss: 68009.4327 - val_mae: 223.4368
Epoch 712/1000
200/200 - 5s - loss: 16287.3960 - mae: 64.8713 - val_loss: 79855.0976 - val_mae: 245.1716
Epoch 713/1000
200/200 - 6s - loss: 14200.1050 - mae: 61.1036 - val_loss: 74362.2466 - val_mae: 239.8439
Epoch 714/1000
200/200 - 5s - loss: 10961.7204 - mae: 54.9007 - val_loss: 56966.1937 - val_mae: 209.6248
Epoch 715/1000
200/200 - 6s - loss: 11393.4016 - mae: 56.7913 - val_loss: 41991.0652 - val_mae: 182.9178
Epoch 716/1000
200/200 - 5s - loss: 17066.7523 - mae: 71.1980 - val_loss: 47051.3366 - val_mae: 189.4328
Epoch 717/1000
200/200 - 5s - loss: 17529.3606 - mae: 72.1040 - val_loss: 55068.0502 - val_mae: 204.4425
Epoch 718/1000
200/200 - 5s - loss: 11615.0743 - mae: 58.4422 - val_loss: 66383.0436 - val_mae: 229.0278
Epoch 719/1000
200/200 - 5s - loss: 11127.7551 - mae: 56.8094 - val_loss: 59438.4373 - val_mae: 218.7000
Epoch 720/1000
200/200 - 6s - loss: 9409.6393 - mae: 54.5977 - val_loss: 54672.2719 - val_mae: 208.9375
Epoch 721/1000
200/200 - 6s - loss: 9729.1279 - mae: 53.4268 - val_loss: 55709.3351 - val_mae: 209.3547
Epoch 722/1000
200/200 - 6s - loss: 6412.9023 - mae: 45.4127 - val_loss: 58379.3705 - val_mae: 216.4567
Epoch 723/1000
200/200 - 6s - loss: 5433.3280 - mae: 43.1965 - val_loss: 56360.7912 - val_mae: 212.0184
Epoch 724/1000
200/200 - 6s - loss: 7151.5241 - mae: 47.5527 - val_loss: 55464.4774 - val_mae: 210.9553
Epoch 725/1000
200/200 - 5s - loss: 4639.9171 - mae: 40.1466 - val_loss: 52556.6808 - val_mae: 203.1997
Epoch 726/1000
200/200 - 5s - loss: 10919.2474 - mae: 57.4912 - val_loss: 52515.2371 - val_mae: 191.0030
Epoch 727/1000
200/200 - 5s - loss: 17043.9197 - mae: 69.7083 - val_loss: 54028.4245 - val_mae: 205.2924
Epoch 728/1000
200/200 - 5s - loss: 4807.5524 - mae: 39.2619 - val_loss: 48773.8901 - val_mae: 197.4620
Epoch 729/1000
200/200 - 5s - loss: 29870.6691 - mae: 89.0890 - val_loss: 58698.7905 - val_mae: 216.1344
Epoch 730/1000
200/200 - 5s - loss: 3508.0561 - mae: 35.3788 - val_loss: 63111.8227 - val_mae: 223.3842
Epoch 731/1000
200/200 - 5s - loss: 2625.7222 - mae: 30.1444 - val_loss: 58444.1818 - val_mae: 212.7367
Epoch 732/1000
200/200 - 5s - loss: 3192.0262 - mae: 32.9727 - val_loss: 61360.1945 - val_mae: 219.0898
Epoch 733/1000
200/200 - 6s - loss: 4584.7893 - mae: 37.6685 - val_loss: 62103.7306 - val_mae: 221.1903
Epoch 734/1000
200/200 - 5s - loss: 6520.7283 - mae: 44.1954 - val_loss: 66412.1271 - val_mae: 229.2436
Epoch 735/1000
200/200 - 5s - loss: 2312.6180 - mae: 28.6484 - val_loss: 65715.8370 - val_mae: 229.6837
Epoch 736/1000
200/200 - 5s - loss: 3943.7994 - mae: 37.2565 - val_loss: 55449.6990 - val_mae: 210.7441
Epoch 737/1000
200/200 - 5s - loss: 18465.1928 - mae: 72.0978 - val_loss: 48540.2315 - val_mae: 187.9180
Epoch 738/1000
200/200 - 6s - loss: 14842.5634 - mae: 69.8005 - val_loss: 45934.8831 - val_mae: 188.3114
Epoch 739/1000
200/200 - 5s - loss: 2805.4890 - mae: 32.1146 - val_loss: 44331.8280 - val_mae: 188.1669
Epoch 740/1000
200/200 - 5s - loss: 6452.2832 - mae: 36.4390 - val_loss: 44977.9688 - val_mae: 186.4863
Epoch 741/1000
200/200 - 5s - loss: 4901.2387 - mae: 36.0534 - val_loss: 46507.4487 - val_mae: 188.2741
Epoch 742/1000
200/200 - 5s - loss: 2686.5417 - mae: 30.6027 - val_loss: 48326.1204 - val_mae: 197.7398
Epoch 743/1000
200/200 - 5s - loss: 6429.6208 - mae: 45.2375 - val_loss: 44587.5978 - val_mae: 190.0048
Epoch 744/1000
200/200 - 5s - loss: 2471.3357 - mae: 29.4678 - val_loss: 37057.0557 - val_mae: 169.7523
Epoch 745/1000
200/200 - 5s - loss: 1693.0198 - mae: 24.3544 - val_loss: 44020.4935 - val_mae: 184.8006
Epoch 746/1000
200/200 - 5s - loss: 12720.4037 - mae: 55.1884 - val_loss: 45996.6459 - val_mae: 177.0861
Epoch 747/1000
200/200 - 5s - loss: 7153.2523 - mae: 47.2072 - val_loss: 26602.4479 - val_mae: 139.7689
Epoch 748/1000
200/200 - 5s - loss: 1778.2202 - mae: 25.9346 - val_loss: 26205.3411 - val_mae: 135.2782
Epoch 749/1000
200/200 - 5s - loss: 1485.2881 - mae: 23.3527 - val_loss: 26998.0774 - val_mae: 139.8957
Epoch 750/1000
200/200 - 5s - loss: 9698.6084 - mae: 46.9290 - val_loss: 37418.1429 - val_mae: 167.1145
Epoch 751/1000
200/200 - 5s - loss: 5641.4691 - mae: 41.4527 - val_loss: 27356.8859 - val_mae: 144.7486
Epoch 752/1000
200/200 - 5s - loss: 1966.0289 - mae: 25.9449 - val_loss: 24755.5275 - val_mae: 137.8897
Epoch 753/1000
200/200 - 5s - loss: 2665.4817 - mae: 30.9242 - val_loss: 31514.9243 - val_mae: 153.3170
Epoch 754/1000
200/200 - 5s - loss: 7207.2282 - mae: 37.4317 - val_loss: 27586.1392 - val_mae: 146.1114
Epoch 755/1000
200/200 - 5s - loss: 3029.7581 - mae: 31.7084 - val_loss: 33762.1561 - val_mae: 161.3053
Epoch 756/1000
200/200 - 5s - loss: 2796.6176 - mae: 30.2488 - val_loss: 27886.5033 - val_mae: 147.5796
Epoch 757/1000
200/200 - 5s - loss: 30294.2450 - mae: 81.9266 - val_loss: 45341.6390 - val_mae: 187.2034
Epoch 758/1000
200/200 - 5s - loss: 2794.7613 - mae: 31.6441 - val_loss: 42508.6504 - val_mae: 174.4499
Epoch 759/1000
200/200 - 5s - loss: 2372.3601 - mae: 29.0546 - val_loss: 45141.5031 - val_mae: 183.0089
Epoch 760/1000
200/200 - 5s - loss: 1491.5514 - mae: 22.6529 - val_loss: 45174.0068 - val_mae: 184.4098
Epoch 761/1000
200/200 - 5s - loss: 2501.9677 - mae: 29.8724 - val_loss: 46527.7965 - val_mae: 191.1367
Epoch 762/1000
200/200 - 5s - loss: 1638.5182 - mae: 24.8824 - val_loss: 49662.1925 - val_mae: 198.3248
Epoch 763/1000
200/200 - 5s - loss: 1432.2251 - mae: 22.2818 - val_loss: 51617.7206 - val_mae: 202.1114
Epoch 764/1000
200/200 - 5s - loss: 1685.2141 - mae: 24.3301 - val_loss: 50333.1323 - val_mae: 199.2482
Epoch 765/1000
200/200 - 5s - loss: 1272.4944 - mae: 21.3772 - val_loss: 51811.4761 - val_mae: 200.9635
Epoch 766/1000
200/200 - 5s - loss: 1242.7328 - mae: 21.1572 - val_loss: 50572.6653 - val_mae: 196.7404
Epoch 767/1000
200/200 - 5s - loss: 11003.2354 - mae: 36.0917 - val_loss: 75315.1754 - val_mae: 244.7169
Epoch 768/1000
200/200 - 5s - loss: 69582.8709 - mae: 145.9264 - val_loss: 91552.5575 - val_mae: 253.4140
Epoch 769/1000
200/200 - 6s - loss: 26954.7748 - mae: 84.7568 - val_loss: 89625.9128 - val_mae: 254.9024
Epoch 770/1000
200/200 - 5s - loss: 19726.9262 - mae: 75.8991 - val_loss: 102793.1639 - val_mae: 270.4897
Epoch 771/1000
200/200 - 5s - loss: 14094.8531 - mae: 63.7789 - val_loss: 99081.5811 - val_mae: 271.9773
Epoch 772/1000
200/200 - 5s - loss: 8096.1873 - mae: 49.4780 - val_loss: 87849.7222 - val_mae: 253.9107
Epoch 773/1000
200/200 - 5s - loss: 5642.1174 - mae: 42.7043 - val_loss: 89740.3997 - val_mae: 255.8100
Epoch 774/1000
200/200 - 5s - loss: 7289.9672 - mae: 46.6109 - val_loss: 82843.2812 - val_mae: 245.9754
Epoch 775/1000
200/200 - 6s - loss: 6028.1046 - mae: 44.7262 - val_loss: 79382.5111 - val_mae: 242.6966
Epoch 776/1000
200/200 - 5s - loss: 5491.8278 - mae: 40.9878 - val_loss: 78384.5377 - val_mae: 241.0405
Epoch 777/1000
200/200 - 5s - loss: 4246.0376 - mae: 37.1940 - val_loss: 80456.4358 - val_mae: 245.6927
Epoch 778/1000
200/200 - 5s - loss: 21630.5469 - mae: 59.7419 - val_loss: 39673.3033 - val_mae: 166.4334
Epoch 779/1000
200/200 - 5s - loss: 35913.0943 - mae: 97.7165 - val_loss: 29632.2584 - val_mae: 145.0044
Epoch 780/1000
200/200 - 5s - loss: 21375.8145 - mae: 75.0747 - val_loss: 33524.9232 - val_mae: 159.0769
Epoch 781/1000
200/200 - 5s - loss: 18107.4016 - mae: 69.8288 - val_loss: 39289.1427 - val_mae: 174.6031
Epoch 782/1000
200/200 - 5s - loss: 7782.5110 - mae: 47.8715 - val_loss: 39253.9314 - val_mae: 174.0389
Epoch 783/1000
200/200 - 5s - loss: 8814.4822 - mae: 50.0984 - val_loss: 41822.2415 - val_mae: 177.5765
Epoch 784/1000
200/200 - 5s - loss: 11075.6488 - mae: 58.0705 - val_loss: 52062.4850 - val_mae: 196.3584
Epoch 785/1000
200/200 - 5s - loss: 11905.5713 - mae: 59.1963 - val_loss: 39524.1580 - val_mae: 176.0364
Epoch 786/1000
200/200 - 5s - loss: 5538.5229 - mae: 40.4181 - val_loss: 42727.5292 - val_mae: 182.4323
Epoch 787/1000
200/200 - 5s - loss: 7872.8169 - mae: 47.8913 - val_loss: 39620.3666 - val_mae: 178.2469
Epoch 788/1000
200/200 - 5s - loss: 8493.7277 - mae: 47.4891 - val_loss: 39100.2028 - val_mae: 172.0412
Epoch 789/1000
200/200 - 5s - loss: 5420.5697 - mae: 41.2205 - val_loss: 37817.8697 - val_mae: 174.2991
Epoch 790/1000
200/200 - 5s - loss: 10239.1863 - mae: 47.2193 - val_loss: 62293.1607 - val_mae: 205.5742
Epoch 791/1000
200/200 - 5s - loss: 7915.7910 - mae: 47.4552 - val_loss: 33672.3074 - val_mae: 165.1976
Epoch 792/1000
200/200 - 6s - loss: 4813.7405 - mae: 37.4210 - val_loss: 34814.7235 - val_mae: 169.0984
Epoch 793/1000
200/200 - 5s - loss: 3818.6955 - mae: 34.3721 - val_loss: 33827.9475 - val_mae: 165.1547
Epoch 794/1000
200/200 - 5s - loss: 9830.1327 - mae: 54.5725 - val_loss: 33528.7114 - val_mae: 164.1275
Epoch 795/1000
200/200 - 5s - loss: 4533.3407 - mae: 38.1345 - val_loss: 35485.9582 - val_mae: 165.9470
Epoch 796/1000
200/200 - 5s - loss: 7328.5242 - mae: 45.9072 - val_loss: 29788.8176 - val_mae: 148.3195
Epoch 797/1000
200/200 - 7s - loss: 3725.1282 - mae: 34.6472 - val_loss: 33441.6957 - val_mae: 157.8105
Epoch 798/1000
200/200 - 6s - loss: 9393.3694 - mae: 52.5414 - val_loss: 27993.8484 - val_mae: 146.9309
Epoch 799/1000
200/200 - 5s - loss: 38425.2302 - mae: 100.2048 - val_loss: 32388.8248 - val_mae: 153.5488
Epoch 800/1000
200/200 - 5s - loss: 11880.3973 - mae: 59.0299 - val_loss: 31443.9568 - val_mae: 141.7567
Epoch 801/1000
200/200 - 5s - loss: 4151.8237 - mae: 37.1888 - val_loss: 31648.4358 - val_mae: 141.0189
Epoch 802/1000
200/200 - 5s - loss: 4101.7026 - mae: 36.9909 - val_loss: 32709.4952 - val_mae: 142.8880
Epoch 803/1000
200/200 - 5s - loss: 3990.4426 - mae: 37.1139 - val_loss: 31829.2927 - val_mae: 142.0843
Epoch 804/1000
200/200 - 5s - loss: 5957.0471 - mae: 41.5821 - val_loss: 35923.5696 - val_mae: 152.4187
Epoch 805/1000
200/200 - 5s - loss: 15573.5021 - mae: 66.9175 - val_loss: 45582.5360 - val_mae: 171.3065
Epoch 806/1000
200/200 - 5s - loss: 2943.8408 - mae: 32.3453 - val_loss: 43822.3605 - val_mae: 167.2435
Epoch 807/1000
200/200 - 5s - loss: 2925.3475 - mae: 32.1142 - val_loss: 40861.8902 - val_mae: 162.7142
Epoch 808/1000
200/200 - 5s - loss: 40614.3701 - mae: 94.5323 - val_loss: 39223.1356 - val_mae: 168.0314
Epoch 809/1000
200/200 - 5s - loss: 6557.9580 - mae: 46.2786 - val_loss: 35650.8016 - val_mae: 164.4547
Epoch 810/1000
200/200 - 5s - loss: 2981.0900 - mae: 31.2849 - val_loss: 31886.9785 - val_mae: 155.2417
Epoch 811/1000
200/200 - 5s - loss: 2316.8084 - mae: 28.9618 - val_loss: 34497.8035 - val_mae: 160.1841
Epoch 812/1000
200/200 - 5s - loss: 8008.9936 - mae: 47.0010 - val_loss: 35515.8539 - val_mae: 161.3022
Epoch 813/1000
200/200 - 6s - loss: 2617.0964 - mae: 30.2150 - val_loss: 37640.1161 - val_mae: 167.5129
Epoch 814/1000
200/200 - 6s - loss: 3554.7740 - mae: 33.4469 - val_loss: 37214.4616 - val_mae: 165.8319
Epoch 815/1000
200/200 - 5s - loss: 13666.2719 - mae: 52.6795 - val_loss: 53833.4684 - val_mae: 193.7148
Epoch 816/1000
200/200 - 5s - loss: 6895.2363 - mae: 45.5430 - val_loss: 41151.9675 - val_mae: 176.3007
Epoch 817/1000
200/200 - 5s - loss: 2920.6043 - mae: 32.4045 - val_loss: 40428.5304 - val_mae: 177.0752
Epoch 818/1000
200/200 - 5s - loss: 1912.5447 - mae: 26.0518 - val_loss: 42143.6048 - val_mae: 179.4229
Epoch 819/1000
200/200 - 6s - loss: 2987.4854 - mae: 31.7382 - val_loss: 38211.6406 - val_mae: 170.5282
Epoch 820/1000
200/200 - 5s - loss: 14943.2482 - mae: 68.0275 - val_loss: 34872.5543 - val_mae: 153.4049
Epoch 821/1000
200/200 - 6s - loss: 51310.6560 - mae: 121.6965 - val_loss: 69354.3974 - val_mae: 221.2629
Epoch 822/1000
200/200 - 6s - loss: 18149.5170 - mae: 71.9406 - val_loss: 77138.2319 - val_mae: 221.4546
Epoch 823/1000
200/200 - 5s - loss: 19058.2518 - mae: 69.9471 - val_loss: 73111.0317 - val_mae: 225.4687
Epoch 824/1000
200/200 - 5s - loss: 27885.8356 - mae: 81.5505 - val_loss: 57522.6466 - val_mae: 205.3432
Epoch 825/1000
200/200 - 5s - loss: 17437.8552 - mae: 64.7465 - val_loss: 54921.3842 - val_mae: 194.4798
Epoch 826/1000
200/200 - 5s - loss: 11068.3994 - mae: 53.2010 - val_loss: 54497.5186 - val_mae: 195.1194
Epoch 827/1000
200/200 - 5s - loss: 28967.2898 - mae: 82.1509 - val_loss: 43600.5762 - val_mae: 176.6448
Epoch 828/1000
200/200 - 5s - loss: 9799.7267 - mae: 51.6965 - val_loss: 49155.0259 - val_mae: 190.4111
Epoch 829/1000
200/200 - 5s - loss: 7444.2502 - mae: 45.5947 - val_loss: 47105.3464 - val_mae: 185.6185
Epoch 830/1000
200/200 - 5s - loss: 12360.6640 - mae: 57.5851 - val_loss: 66777.8044 - val_mae: 205.7908
Epoch 831/1000
200/200 - 5s - loss: 44327.7125 - mae: 92.2480 - val_loss: 33764.3636 - val_mae: 162.1794
Epoch 832/1000
200/200 - 5s - loss: 18473.8767 - mae: 60.2510 - val_loss: 41277.6057 - val_mae: 174.7666
Epoch 833/1000
200/200 - 5s - loss: 21167.7977 - mae: 64.1931 - val_loss: 46463.4688 - val_mae: 184.8905
Epoch 834/1000
200/200 - 5s - loss: 17669.2164 - mae: 57.5374 - val_loss: 43671.8633 - val_mae: 184.0552
Epoch 835/1000
200/200 - 5s - loss: 25358.2839 - mae: 77.4326 - val_loss: 44372.4541 - val_mae: 184.2285
Epoch 836/1000
200/200 - 5s - loss: 24408.0024 - mae: 73.7284 - val_loss: 49688.3887 - val_mae: 188.9832
Epoch 837/1000
200/200 - 5s - loss: 26295.7641 - mae: 77.8673 - val_loss: 36921.5807 - val_mae: 168.3065
Epoch 838/1000
200/200 - 5s - loss: 15483.7153 - mae: 57.7223 - val_loss: 29048.0445 - val_mae: 147.6113
Epoch 839/1000
200/200 - 5s - loss: 15334.1951 - mae: 58.2777 - val_loss: 46197.3302 - val_mae: 187.8900
Epoch 840/1000
200/200 - 5s - loss: 20844.4244 - mae: 65.8824 - val_loss: 38177.1394 - val_mae: 175.9989
Epoch 841/1000
200/200 - 6s - loss: 16576.7996 - mae: 58.9989 - val_loss: 48660.2132 - val_mae: 192.2726
Epoch 842/1000
200/200 - 6s - loss: 13454.0200 - mae: 52.8715 - val_loss: 45798.6178 - val_mae: 185.7401
Epoch 843/1000
200/200 - 5s - loss: 21211.2833 - mae: 70.6863 - val_loss: 39839.1876 - val_mae: 170.2175
Epoch 844/1000
200/200 - 6s - loss: 13195.7229 - mae: 52.1193 - val_loss: 34230.0677 - val_mae: 163.1905
Epoch 845/1000
200/200 - 6s - loss: 13516.3752 - mae: 53.2918 - val_loss: 40350.3458 - val_mae: 172.2812
Epoch 846/1000
200/200 - 6s - loss: 11361.9997 - mae: 50.5876 - val_loss: 40844.2898 - val_mae: 177.9577
Epoch 847/1000
200/200 - 5s - loss: 13288.3667 - mae: 56.2060 - val_loss: 41350.8300 - val_mae: 169.8728
Epoch 848/1000
200/200 - 5s - loss: 16266.0337 - mae: 60.7057 - val_loss: 48211.9087 - val_mae: 187.2238
Epoch 849/1000
200/200 - 6s - loss: 24971.1319 - mae: 74.0727 - val_loss: 25191.7221 - val_mae: 130.2796
Epoch 850/1000
200/200 - 6s - loss: 28709.5755 - mae: 76.5809 - val_loss: 28822.9845 - val_mae: 136.8804
Epoch 851/1000
200/200 - 5s - loss: 12235.4635 - mae: 52.0959 - val_loss: 34376.2433 - val_mae: 143.5624
Epoch 852/1000
200/200 - 5s - loss: 10478.3472 - mae: 48.3828 - val_loss: 34827.6720 - val_mae: 139.4867
Epoch 853/1000
200/200 - 5s - loss: 28645.2437 - mae: 79.0441 - val_loss: 37157.4423 - val_mae: 153.6894
Epoch 854/1000
200/200 - 5s - loss: 9905.4515 - mae: 47.0339 - val_loss: 39371.6150 - val_mae: 154.3960
Epoch 855/1000
200/200 - 5s - loss: 23777.8407 - mae: 71.4382 - val_loss: 40202.9025 - val_mae: 149.6264
Epoch 856/1000
200/200 - 5s - loss: 13265.7665 - mae: 52.8052 - val_loss: 39630.6391 - val_mae: 155.1603
Epoch 857/1000
200/200 - 6s - loss: 9718.8338 - mae: 46.6564 - val_loss: 34765.4510 - val_mae: 150.0958
Epoch 858/1000
200/200 - 6s - loss: 11071.5537 - mae: 50.2854 - val_loss: 32857.7937 - val_mae: 148.0860
Epoch 859/1000
200/200 - 6s - loss: 16229.1874 - mae: 59.6400 - val_loss: 37436.2340 - val_mae: 156.4964
Epoch 860/1000
200/200 - 5s - loss: 10938.1312 - mae: 52.4074 - val_loss: 38143.9949 - val_mae: 156.9563
Epoch 861/1000
200/200 - 5s - loss: 19042.2252 - mae: 64.8862 - val_loss: 44042.2640 - val_mae: 167.5070
Epoch 862/1000
200/200 - 5s - loss: 17853.7710 - mae: 65.4707 - val_loss: 51869.3760 - val_mae: 175.6256
Epoch 863/1000
200/200 - 5s - loss: 17726.7901 - mae: 60.1416 - val_loss: 31647.2066 - val_mae: 147.0993
Epoch 864/1000
200/200 - 5s - loss: 7437.3333 - mae: 43.6710 - val_loss: 39094.9478 - val_mae: 151.1078
Epoch 865/1000
200/200 - 5s - loss: 9454.1282 - mae: 48.3563 - val_loss: 33738.5017 - val_mae: 145.0812
Epoch 866/1000
200/200 - 5s - loss: 15358.1611 - mae: 60.4990 - val_loss: 35917.8495 - val_mae: 146.8560
Epoch 867/1000
200/200 - 5s - loss: 9132.5065 - mae: 46.0512 - val_loss: 40132.0695 - val_mae: 156.0101
Epoch 868/1000
200/200 - 5s - loss: 4540.7840 - mae: 35.0659 - val_loss: 37303.8900 - val_mae: 149.4353
Epoch 869/1000
200/200 - 5s - loss: 15962.9023 - mae: 56.2375 - val_loss: 26773.0988 - val_mae: 137.7047
Epoch 870/1000
200/200 - 5s - loss: 55489.0211 - mae: 115.8952 - val_loss: 38826.4805 - val_mae: 145.0875
Epoch 871/1000
200/200 - 5s - loss: 21054.1335 - mae: 70.3996 - val_loss: 31243.6676 - val_mae: 125.5390
Epoch 872/1000
200/200 - 5s - loss: 15675.1469 - mae: 60.7839 - val_loss: 32465.9044 - val_mae: 126.5174
Epoch 873/1000
200/200 - 5s - loss: 15172.3627 - mae: 59.4512 - val_loss: 37538.0826 - val_mae: 138.8680
Epoch 874/1000
200/200 - 5s - loss: 29444.4556 - mae: 74.8731 - val_loss: 29647.3341 - val_mae: 137.6967
Epoch 875/1000
200/200 - 5s - loss: 14035.0214 - mae: 58.2451 - val_loss: 36520.8679 - val_mae: 145.6161
Epoch 876/1000
200/200 - 5s - loss: 17509.8111 - mae: 61.9472 - val_loss: 35031.0214 - val_mae: 136.6007
Epoch 877/1000
200/200 - 5s - loss: 16330.9392 - mae: 59.6898 - val_loss: 32730.3966 - val_mae: 133.2532
Epoch 878/1000
200/200 - 5s - loss: 8578.5775 - mae: 44.2231 - val_loss: 35692.0039 - val_mae: 139.6823
Epoch 879/1000
200/200 - 5s - loss: 30977.9287 - mae: 85.0626 - val_loss: 31010.0300 - val_mae: 139.6866
Epoch 880/1000
200/200 - 5s - loss: 10121.3587 - mae: 49.9998 - val_loss: 28482.1106 - val_mae: 131.0053
Epoch 881/1000
200/200 - 5s - loss: 9400.2273 - mae: 48.7554 - val_loss: 30587.5359 - val_mae: 133.6443
Epoch 882/1000
200/200 - 5s - loss: 10163.1023 - mae: 48.1596 - val_loss: 28530.1221 - val_mae: 126.2653
Epoch 883/1000
200/200 - 5s - loss: 8869.5797 - mae: 47.3573 - val_loss: 29808.7939 - val_mae: 135.2893
Epoch 884/1000
200/200 - 5s - loss: 7051.0763 - mae: 43.3117 - val_loss: 26980.5712 - val_mae: 125.1703
Epoch 885/1000
200/200 - 5s - loss: 9457.0304 - mae: 48.9187 - val_loss: 26612.3965 - val_mae: 123.8044
Epoch 886/1000
200/200 - 5s - loss: 9858.7319 - mae: 49.4838 - val_loss: 26278.5870 - val_mae: 130.7144
Epoch 887/1000
200/200 - 5s - loss: 12195.5734 - mae: 55.1106 - val_loss: 29532.0329 - val_mae: 140.9931
Epoch 888/1000
200/200 - 5s - loss: 11497.1378 - mae: 49.3262 - val_loss: 26187.6910 - val_mae: 124.7096
Epoch 889/1000
200/200 - 5s - loss: 7653.9435 - mae: 44.8472 - val_loss: 30235.1820 - val_mae: 133.0714
Epoch 890/1000
200/200 - 5s - loss: 19099.0842 - mae: 68.0114 - val_loss: 24877.0075 - val_mae: 123.0589
Epoch 891/1000
200/200 - 5s - loss: 9386.9850 - mae: 46.1511 - val_loss: 32302.7742 - val_mae: 140.2072
Epoch 892/1000
200/200 - 6s - loss: 5363.4356 - mae: 38.9296 - val_loss: 31742.6135 - val_mae: 140.4592
Epoch 893/1000
200/200 - 6s - loss: 7423.9440 - mae: 44.3401 - val_loss: 33090.3726 - val_mae: 147.4442
Epoch 894/1000
200/200 - 5s - loss: 25291.5633 - mae: 74.5937 - val_loss: 26900.5506 - val_mae: 132.9621
Epoch 895/1000
200/200 - 5s - loss: 17203.9168 - mae: 60.2936 - val_loss: 32540.1211 - val_mae: 146.0459
Epoch 896/1000
200/200 - 5s - loss: 17481.4814 - mae: 60.8984 - val_loss: 34985.0802 - val_mae: 150.2919
Epoch 897/1000
200/200 - 5s - loss: 12477.3399 - mae: 50.5793 - val_loss: 34496.5484 - val_mae: 141.6035
Epoch 898/1000
200/200 - 6s - loss: 14123.3507 - mae: 56.3983 - val_loss: 39106.5898 - val_mae: 157.6015
Epoch 899/1000
200/200 - 6s - loss: 20122.9625 - mae: 64.0863 - val_loss: 35832.4548 - val_mae: 150.3002
Epoch 900/1000
200/200 - 6s - loss: 22583.0987 - mae: 72.5932 - val_loss: 35112.1695 - val_mae: 152.6562
Epoch 901/1000
200/200 - 5s - loss: 10909.0849 - mae: 49.8041 - val_loss: 39320.5369 - val_mae: 155.2354
Epoch 902/1000
200/200 - 5s - loss: 10788.0347 - mae: 49.1295 - val_loss: 38615.5615 - val_mae: 160.1746
Epoch 903/1000
200/200 - 5s - loss: 9417.9242 - mae: 45.2050 - val_loss: 35253.7207 - val_mae: 152.4762
Epoch 904/1000
200/200 - 5s - loss: 10912.6006 - mae: 50.3715 - val_loss: 34077.4076 - val_mae: 151.0265
Epoch 905/1000
200/200 - 5s - loss: 10901.1985 - mae: 49.1922 - val_loss: 38311.0791 - val_mae: 157.2813
Epoch 906/1000
200/200 - 5s - loss: 7654.3757 - mae: 42.0975 - val_loss: 34349.1236 - val_mae: 156.8205
Epoch 907/1000
200/200 - 5s - loss: 22710.3158 - mae: 66.7510 - val_loss: 52688.6234 - val_mae: 183.6258
Epoch 908/1000
200/200 - 5s - loss: 23499.3125 - mae: 73.3930 - val_loss: 34108.6495 - val_mae: 160.1502
Epoch 909/1000
200/200 - 5s - loss: 24550.4237 - mae: 81.5189 - val_loss: 24799.6007 - val_mae: 125.2342
Epoch 910/1000
200/200 - 5s - loss: 10011.6265 - mae: 49.0982 - val_loss: 26508.7845 - val_mae: 134.3714
Epoch 911/1000
200/200 - 5s - loss: 7447.9715 - mae: 41.2596 - val_loss: 29325.5919 - val_mae: 142.5711
Epoch 912/1000
200/200 - 5s - loss: 7898.7529 - mae: 41.9549 - val_loss: 27503.8656 - val_mae: 137.0742
Epoch 913/1000
200/200 - 5s - loss: 6772.2710 - mae: 39.7458 - val_loss: 26633.4617 - val_mae: 132.7175
Epoch 914/1000
200/200 - 5s - loss: 7533.6150 - mae: 45.2351 - val_loss: 27774.7505 - val_mae: 138.6615
Epoch 915/1000
200/200 - 5s - loss: 5970.5830 - mae: 39.7164 - val_loss: 25010.3400 - val_mae: 130.2077
Epoch 916/1000
200/200 - 5s - loss: 5117.3331 - mae: 35.2736 - val_loss: 23383.2085 - val_mae: 120.5601
Epoch 917/1000
200/200 - 5s - loss: 3891.5109 - mae: 32.8684 - val_loss: 25803.2709 - val_mae: 128.6482
Epoch 918/1000
200/200 - 5s - loss: 11252.8921 - mae: 50.3054 - val_loss: 22420.2943 - val_mae: 122.5462
Epoch 919/1000
200/200 - 5s - loss: 17521.2803 - mae: 63.7642 - val_loss: 23419.6812 - val_mae: 128.6338
Epoch 920/1000
200/200 - 5s - loss: 14233.2376 - mae: 56.3149 - val_loss: 29030.3360 - val_mae: 139.9789
Epoch 921/1000
200/200 - 5s - loss: 4553.3036 - mae: 35.5751 - val_loss: 24485.7643 - val_mae: 124.3424
Epoch 922/1000
200/200 - 5s - loss: 9083.2962 - mae: 48.7242 - val_loss: 23877.5579 - val_mae: 118.2408
Epoch 923/1000
200/200 - 5s - loss: 7011.8572 - mae: 41.1065 - val_loss: 27625.7673 - val_mae: 131.8496
Epoch 924/1000
200/200 - 5s - loss: 17514.3378 - mae: 66.0230 - val_loss: 24897.1839 - val_mae: 123.4889
Epoch 925/1000
200/200 - 5s - loss: 12151.8722 - mae: 53.9273 - val_loss: 26152.7821 - val_mae: 130.6688
Epoch 926/1000
200/200 - 5s - loss: 3810.6234 - mae: 32.2389 - val_loss: 28176.8185 - val_mae: 135.0696
Epoch 927/1000
200/200 - 5s - loss: 9581.4931 - mae: 51.3471 - val_loss: 27239.6036 - val_mae: 131.2641
Epoch 928/1000
200/200 - 5s - loss: 6908.2265 - mae: 40.9939 - val_loss: 26849.6181 - val_mae: 132.5011
Epoch 929/1000
200/200 - 5s - loss: 2931.6971 - mae: 31.0794 - val_loss: 25043.1398 - val_mae: 126.4420
Epoch 930/1000
200/200 - 5s - loss: 37912.3979 - mae: 90.7753 - val_loss: 28505.3232 - val_mae: 130.1229
Epoch 931/1000
200/200 - 5s - loss: 13770.6301 - mae: 54.7808 - val_loss: 32536.0707 - val_mae: 140.6441
Epoch 932/1000
200/200 - 5s - loss: 12064.3788 - mae: 53.6822 - val_loss: 30665.2095 - val_mae: 137.5345
Epoch 933/1000
200/200 - 5s - loss: 4788.7024 - mae: 34.1641 - val_loss: 27071.3322 - val_mae: 130.0728
Epoch 934/1000
200/200 - 5s - loss: 7435.2305 - mae: 43.6046 - val_loss: 27758.5792 - val_mae: 136.5371
Epoch 935/1000
200/200 - 5s - loss: 6692.0977 - mae: 42.7843 - val_loss: 29979.4980 - val_mae: 142.1919
Epoch 936/1000
200/200 - 5s - loss: 11862.8680 - mae: 53.0524 - val_loss: 34726.8596 - val_mae: 146.6014
Epoch 937/1000
200/200 - 5s - loss: 9934.9108 - mae: 49.8897 - val_loss: 30549.6389 - val_mae: 131.7353
Epoch 938/1000
200/200 - 5s - loss: 2106.2101 - mae: 26.4306 - val_loss: 29074.1891 - val_mae: 130.2196
Epoch 939/1000
200/200 - 5s - loss: 2270.5354 - mae: 25.5855 - val_loss: 28242.5057 - val_mae: 127.1592
Epoch 940/1000
200/200 - 5s - loss: 1440.7665 - mae: 21.6739 - val_loss: 29277.9584 - val_mae: 131.8902
Epoch 941/1000
200/200 - 5s - loss: 18059.7685 - mae: 47.9785 - val_loss: 25388.9132 - val_mae: 133.5685
Epoch 942/1000
200/200 - 5s - loss: 8621.2925 - mae: 44.8809 - val_loss: 33097.3197 - val_mae: 138.4894
Epoch 943/1000
200/200 - 5s - loss: 4289.5483 - mae: 31.1892 - val_loss: 33717.8812 - val_mae: 139.3470
Epoch 944/1000
200/200 - 5s - loss: 4327.4371 - mae: 34.8634 - val_loss: 34112.9477 - val_mae: 138.0903
Epoch 945/1000
200/200 - 5s - loss: 918.0516 - mae: 17.5492 - val_loss: 32475.4604 - val_mae: 135.2385
Epoch 946/1000
200/200 - 5s - loss: 882.1988 - mae: 16.5513 - val_loss: 34765.2300 - val_mae: 142.0174
Epoch 947/1000
200/200 - 5s - loss: 3397.3323 - mae: 30.6369 - val_loss: 35017.5134 - val_mae: 140.3629
Epoch 948/1000
200/200 - 5s - loss: 55116.8543 - mae: 115.8121 - val_loss: 36460.8749 - val_mae: 153.9228
Epoch 949/1000
200/200 - 6s - loss: 35505.9969 - mae: 89.6121 - val_loss: 38346.6813 - val_mae: 151.3527
Epoch 950/1000
200/200 - 5s - loss: 4186.3109 - mae: 35.4583 - val_loss: 38764.0305 - val_mae: 151.0175
Epoch 951/1000
200/200 - 5s - loss: 1723.7459 - mae: 24.0219 - val_loss: 40681.3368 - val_mae: 156.1888
Epoch 952/1000
200/200 - 5s - loss: 1595.7903 - mae: 22.1816 - val_loss: 40656.9695 - val_mae: 155.9981
Epoch 953/1000
200/200 - 5s - loss: 5459.4291 - mae: 36.4404 - val_loss: 40543.9821 - val_mae: 153.4098
Epoch 954/1000
200/200 - 5s - loss: 1534.5979 - mae: 20.4163 - val_loss: 39904.8988 - val_mae: 150.8103
Epoch 955/1000
200/200 - 5s - loss: 1214.9359 - mae: 19.7357 - val_loss: 42018.1085 - val_mae: 152.1956
Epoch 956/1000
200/200 - 5s - loss: 1715.6343 - mae: 21.4187 - val_loss: 42506.2176 - val_mae: 152.2344
Epoch 957/1000
200/200 - 5s - loss: 1785.2312 - mae: 20.4746 - val_loss: 42811.5201 - val_mae: 151.5846
Epoch 958/1000
200/200 - 5s - loss: 1774.5386 - mae: 20.3888 - val_loss: 46918.2728 - val_mae: 161.5703
Epoch 959/1000
200/200 - 5s - loss: 1142.9817 - mae: 18.2658 - val_loss: 45897.0926 - val_mae: 158.1211
Epoch 960/1000
200/200 - 5s - loss: 8787.6316 - mae: 40.4389 - val_loss: 69610.9296 - val_mae: 213.5356
Epoch 961/1000
200/200 - 5s - loss: 27714.4099 - mae: 85.4003 - val_loss: 33533.0089 - val_mae: 145.7448
Epoch 962/1000
200/200 - 5s - loss: 16745.5569 - mae: 61.7938 - val_loss: 31917.6380 - val_mae: 140.4275
Epoch 963/1000
200/200 - 5s - loss: 12218.2578 - mae: 50.6880 - val_loss: 34253.2654 - val_mae: 143.0125
Epoch 964/1000
200/200 - 5s - loss: 25887.5327 - mae: 78.2253 - val_loss: 31278.9427 - val_mae: 136.4424
Epoch 965/1000
200/200 - 5s - loss: 8665.5590 - mae: 46.2088 - val_loss: 34763.0154 - val_mae: 149.0019
Epoch 966/1000
200/200 - 5s - loss: 11744.9277 - mae: 49.9328 - val_loss: 34509.1627 - val_mae: 147.8059
Epoch 967/1000
200/200 - 5s - loss: 11625.4116 - mae: 49.4815 - val_loss: 32878.6410 - val_mae: 142.3851
Epoch 968/1000
200/200 - 5s - loss: 9526.7843 - mae: 46.5843 - val_loss: 29171.4487 - val_mae: 135.5851
Epoch 969/1000
200/200 - 5s - loss: 13672.0318 - mae: 55.7687 - val_loss: 38820.2150 - val_mae: 162.6993
Epoch 970/1000
200/200 - 5s - loss: 14893.8529 - mae: 53.5727 - val_loss: 28087.4614 - val_mae: 132.2291
Epoch 971/1000
200/200 - 5s - loss: 11924.5886 - mae: 54.1710 - val_loss: 36564.7833 - val_mae: 155.2218
Epoch 972/1000
200/200 - 5s - loss: 11776.0225 - mae: 50.6658 - val_loss: 39038.4171 - val_mae: 166.7022
Epoch 973/1000
200/200 - 5s - loss: 18172.2345 - mae: 64.0846 - val_loss: 33471.1973 - val_mae: 152.7680
Epoch 974/1000
200/200 - 5s - loss: 7175.4052 - mae: 44.0422 - val_loss: 43242.6650 - val_mae: 171.8160
Epoch 975/1000
200/200 - 5s - loss: 10085.7038 - mae: 49.0481 - val_loss: 34826.8757 - val_mae: 152.7413
Epoch 976/1000
200/200 - 5s - loss: 7434.1317 - mae: 42.6046 - val_loss: 39091.8214 - val_mae: 162.6183
Epoch 977/1000
200/200 - 5s - loss: 18658.6593 - mae: 67.5919 - val_loss: 27959.4631 - val_mae: 133.6311
Epoch 978/1000
200/200 - 5s - loss: 6784.1798 - mae: 41.0215 - val_loss: 29562.8477 - val_mae: 139.9064
Epoch 979/1000
200/200 - 5s - loss: 21834.2282 - mae: 59.9415 - val_loss: 39664.7441 - val_mae: 170.8058
Epoch 980/1000
200/200 - 6s - loss: 16525.9762 - mae: 62.2731 - val_loss: 31630.2536 - val_mae: 152.5211
Epoch 981/1000
200/200 - 6s - loss: 15391.3183 - mae: 57.6229 - val_loss: 31496.9255 - val_mae: 148.8994
Epoch 982/1000
200/200 - 5s - loss: 11952.4137 - mae: 50.3944 - val_loss: 28527.6193 - val_mae: 137.2698
Epoch 983/1000
200/200 - 5s - loss: 11937.4805 - mae: 47.7758 - val_loss: 26715.2962 - val_mae: 133.4292
Epoch 984/1000
200/200 - 5s - loss: 15763.2742 - mae: 63.2429 - val_loss: 30328.0688 - val_mae: 141.4334
Epoch 985/1000
200/200 - 5s - loss: 11331.6162 - mae: 47.1737 - val_loss: 29453.8077 - val_mae: 139.4608
Epoch 986/1000
200/200 - 5s - loss: 14320.1101 - mae: 57.8605 - val_loss: 31945.4046 - val_mae: 146.5063
Epoch 987/1000
200/200 - 5s - loss: 12561.1990 - mae: 53.4312 - val_loss: 31995.7050 - val_mae: 148.5589
Epoch 988/1000
200/200 - 5s - loss: 10788.1030 - mae: 48.1398 - val_loss: 34045.9916 - val_mae: 154.7587
Epoch 989/1000
200/200 - 5s - loss: 19508.0594 - mae: 69.4695 - val_loss: 35414.0857 - val_mae: 151.0663
Epoch 990/1000
200/200 - 5s - loss: 12017.6402 - mae: 51.0220 - val_loss: 36889.3302 - val_mae: 158.5365
Epoch 991/1000
200/200 - 5s - loss: 20007.2075 - mae: 65.8860 - val_loss: 36789.7980 - val_mae: 152.1356
Epoch 992/1000
200/200 - 5s - loss: 11009.9182 - mae: 49.1116 - val_loss: 35557.7420 - val_mae: 152.5765
Epoch 993/1000
200/200 - 5s - loss: 16262.7443 - mae: 62.2864 - val_loss: 34355.5023 - val_mae: 148.8097
Epoch 994/1000
200/200 - 5s - loss: 9873.3204 - mae: 45.1176 - val_loss: 30428.6650 - val_mae: 142.8104
Epoch 995/1000
200/200 - 5s - loss: 12048.4846 - mae: 54.2781 - val_loss: 33108.9229 - val_mae: 149.2615
Epoch 996/1000
200/200 - 5s - loss: 6979.3725 - mae: 37.9319 - val_loss: 33725.4140 - val_mae: 150.8119
Epoch 997/1000
200/200 - 5s - loss: 13651.1898 - mae: 57.3505 - val_loss: 35540.0877 - val_mae: 155.7748
Epoch 998/1000
200/200 - 5s - loss: 17507.8056 - mae: 64.4217 - val_loss: 34576.9181 - val_mae: 153.0232
Epoch 999/1000
200/200 - 5s - loss: 9253.9100 - mae: 45.5649 - val_loss: 36437.6950 - val_mae: 154.8501
Epoch 1000/1000
200/200 - 5s - loss: 11769.3182 - mae: 53.6574 - val_loss: 33373.5372 - val_mae: 147.9762
In [21]:
model = tf.keras.models.load_model('Model/firstModel.h5')
model.evaluate(dataGenerator_DevTest(BatchSize, SequenceLength, features, population), steps = 100)
100/100 [==============================] - 1s 8ms/step - loss: 2104.2078 - mae: 38.3150
Out[21]:
[2104.2077783203126, 38.314995]
In [18]:
plt.figure(figsize=(18, 5))
plt.plot(history.history['mae'])
plt.plot(history.history['val_mae'])
plt.legend(['train', 'test'], loc='upper left')
plt.show()
In [22]:
import matplotlib.pyplot as plt
for i in range(1):
    tempBatchSize = 1
    x_shape = (tempBatchSize, SequenceLength, features)
    x_batch = np.zeros(shape=x_shape, dtype=np.float16)

    y_shape = (tempBatchSize, SequenceLength, 1)
    y_batch = np.zeros(shape=y_shape, dtype=np.float16)

    for i in range(tempBatchSize):
        index = np.random.randint(xDevTest.shape[0] - SequenceLength)

        x_batch[i] = xDevTest[index:index+SequenceLength]
        y_batch[i] = yDevTest[index:index+SequenceLength]
    
    ans = model.predict(x_batch)
    
    ans = ans.flatten().tolist()
    y_batch = y_batch.flatten().tolist()
    
    fig = plt.figure(figsize=(13,5))
    ax = fig.add_subplot(111)
    
    print(y_batch)
    print(ans)
    ax.plot(list(range(len(y_batch))), y_batch, label = 'true')
    ax.plot(list(range(len(ans))), ans, label = 'predicted')
    ax.grid(axis='both')
   
    ax.legend()    
[148.25, 122.4375, 114.6875, 95.5625, 72.5625, 64.0, 55.15625]
[85.65009307861328, 121.42975616455078, 38.233985900878906, 53.78330993652344, 40.04692840576172, 68.74250793457031, 42.0467414855957]

For United Kingdom

In [25]:
def trainModel(country):
    BatchSize = 16
    SequenceLength = 7
    dataset = pd.read_csv('Data/owid-covid-data.csv')
    country = 'Japan'
    data = df[df['location'] == country]
    data.sort_values(by='date', ascending = False, inplace=True)
    data = data.drop(['location', 'date'], axis = 1)

    xData = np.array([data.iloc[0]])
    yData = np.array([data.iloc[0]['new_cases_smoothed']])

    for index in range(1, data.shape[0]):
        xData = np.vstack([xData, np.array([data.iloc[index]])])
        yData = np.vstack([yData, np.array([data.iloc[index]['new_cases_smoothed']])])

    trainTestSplit = int(0.8 * df[df['location'] == country].shape[0])
    devTestSplit = int(0.9 * df[df['location'] == country].shape[0])

    xTrain = xData[:trainTestSplit]
    yTrain = yData[:trainTestSplit]

    xTest = xData[trainTestSplit:devTestSplit]
    yTest = yData[trainTestSplit:devTestSplit]

    xDevTest = xData[devTestSplit:]
    yDevTest = yData[devTestSplit:]
    
    features = xData.shape[1]
    
    model = tf.keras.models.Sequential([
        tf.keras.layers.Dense(32, input_shape=(None, features,), activation='relu'),
        tf.keras.layers.Dense(64, input_shape=(None, features,), activation='relu'),
        tf.keras.layers.Dense(128, input_shape=(None, features,), activation='relu'),
        tf.keras.layers.Dense(64, input_shape=(None, features,), activation='relu'),
        tf.keras.layers.Dense(32, input_shape=(None, features,), activation='relu'),
        #tf.compat.v1.keras.layers.CuDNNGRU(units=512, return_sequences=True, input_shape=(None, features,)),
        tf.keras.layers.Dense(1, activation='linear')
    ])

    model.compile(optimizer=tf.keras.optimizers.Adam(), loss='mse', metrics=['mae'])
    
    country = 'Japan'
    population = 125
    history = model.fit_generator(datagen(xTrain, yTrain, BatchSize, SequenceLength, features), epochs = 1000, steps_per_epoch=200, 
                  validation_data=datagen(xTest, yTest, BatchSize, SequenceLength, features), validation_steps=50, verbose=0)
    
    return history, model, xDevTest, yDevTest
In [26]:
ukHistory, ukModel, ukxDevTest, ukyDevTest = trainModel('United Kingdom')
ukModel.save('Model/ukModel.h5')
In [32]:
ukModel.evaluate(datagen(ukxDevTest, ukyDevTest, 16, 7, 12), steps = 100)
100/100 [==============================] - 1s 6ms/step - loss: 3068.2620 - mae: 42.0461
Out[32]:
[3068.261951904297, 42.046093]
In [34]:
plt.figure(figsize=(18, 5))
plt.plot(ukHistory.history['mae'])
plt.plot(ukHistory.history['val_mae'])
plt.legend(['train', 'test'], loc='upper left')
plt.show()
In [63]:
import matplotlib.pyplot as plt
SequenceLength = 7
features = 12
tempBatchSize = 1
for i in range(1):
    x_shape = (1, SequenceLength, features)
    x_batch = np.zeros(shape=x_shape, dtype=np.float16)

    y_shape = (1, SequenceLength, 1)
    y_batch = np.zeros(shape=y_shape, dtype=np.float16)

    for i in range(tempBatchSize):
        index = np.random.randint(ukxDevTest.shape[0] - SequenceLength)

        x_batch[i] = ukxDevTest[index:index+SequenceLength]
        y_batch[i] = ukyDevTest[index:index+SequenceLength]
    
    ans = ukModel.predict(x_batch)
    
    ans = ans.flatten().tolist()
    y_batch = y_batch.flatten().tolist()
    
    fig = plt.figure(figsize=(13,5))
    ax = fig.add_subplot(111)
    
    print(y_batch)
    print(ans)
    ax.plot(list(range(len(y_batch))), y_batch, label = 'true')
    ax.plot(list(range(len(ans))), ans, label = 'predicted')A
    ax.grid(axis='both')
   
    ax.legend()    
[44.71875, 47.28125, 43.84375, 41.0, 35.5625, 34.28125, 31.578125]
[39.00185012817383, 48.56122589111328, 28.508695602416992, 36.077247619628906, 39.56138229370117, 38.748573303222656, 42.22467803955078]
In [ ]: