mirror of
https://github.com/Indian1999/csutortok_17.git
synced 2025-05-21 12:23:56 +02:00
Létrehozva a Colab használatával
This commit is contained in:
parent
ae4d61bc74
commit
2b0809e4a2
1 changed files with 810 additions and 0 deletions
810
LogiGPT.ipynb
Normal file
810
LogiGPT.ipynb
Normal file
|
@ -0,0 +1,810 @@
|
||||||
|
{
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 0,
|
||||||
|
"metadata": {
|
||||||
|
"colab": {
|
||||||
|
"provenance": [],
|
||||||
|
"authorship_tag": "ABX9TyOVkTRKPgoBgJk8N+v+0Erz",
|
||||||
|
"include_colab_link": true
|
||||||
|
},
|
||||||
|
"kernelspec": {
|
||||||
|
"name": "python3",
|
||||||
|
"display_name": "Python 3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"name": "python"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {
|
||||||
|
"id": "view-in-github",
|
||||||
|
"colab_type": "text"
|
||||||
|
},
|
||||||
|
"source": [
|
||||||
|
"<a href=\"https://colab.research.google.com/github/Indian1999/csutortok_17/blob/main/LogiGPT.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"source": [
|
||||||
|
"https://lsc.io/lesson11ai2\n",
|
||||||
|
"\n",
|
||||||
|
"---\n",
|
||||||
|
"\n"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"id": "YX_SCiZRhrp5"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {
|
||||||
|
"colab": {
|
||||||
|
"base_uri": "https://localhost:8080/"
|
||||||
|
},
|
||||||
|
"id": "k7yuEViuhJse",
|
||||||
|
"outputId": "6506afa2-b7b2-4791-9143-701a64fe676c"
|
||||||
|
},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"output_type": "stream",
|
||||||
|
"name": "stderr",
|
||||||
|
"text": [
|
||||||
|
"[nltk_data] Downloading package punkt to /root/nltk_data...\n",
|
||||||
|
"[nltk_data] Package punkt is already up-to-date!\n",
|
||||||
|
"[nltk_data] Downloading package punkt_tab to /root/nltk_data...\n",
|
||||||
|
"[nltk_data] Unzipping tokenizers/punkt_tab.zip.\n",
|
||||||
|
"[nltk_data] Downloading package wordnet to /root/nltk_data...\n",
|
||||||
|
"[nltk_data] Package wordnet is already up-to-date!\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"output_type": "execute_result",
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"True"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"metadata": {},
|
||||||
|
"execution_count": 4
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"import json\n",
|
||||||
|
"import string\n",
|
||||||
|
"import random\n",
|
||||||
|
"import nltk\n",
|
||||||
|
"import numpy as np\n",
|
||||||
|
"from nltk import WordNetLemmatizer\n",
|
||||||
|
"from keras.models import Sequential\n",
|
||||||
|
"from keras.layers import Dense, Dropout\n",
|
||||||
|
"nltk.download(\"punkt\") #Írásjelek\n",
|
||||||
|
"nltk.download(\"punkt_tab\")\n",
|
||||||
|
"nltk.download(\"wordnet\") #Lemmatizációhoz"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"source": [
|
||||||
|
"data_file = open(\"intents.json\").read()\n",
|
||||||
|
"data = json.loads(data_file)\n",
|
||||||
|
"print(data)"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"colab": {
|
||||||
|
"base_uri": "https://localhost:8080/"
|
||||||
|
},
|
||||||
|
"id": "5EC-zz1fkT0U",
|
||||||
|
"outputId": "df15b3d3-8f98-4f2b-cfda-07838cc8e65b"
|
||||||
|
},
|
||||||
|
"execution_count": null,
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"output_type": "stream",
|
||||||
|
"name": "stdout",
|
||||||
|
"text": [
|
||||||
|
"{'intents': [{'tag': 'hello', 'patterns': ['Hello', 'Hi there', 'Good morning', \"What's up\"], 'responses': ['Hey!', 'Hello', 'Hi!', 'Good morning!'], 'context': ''}, {'tag': 'noanswer', 'patterns': [], 'responses': [\"Sorry, can't understand you\", 'Please give me more info', 'Not sure I understand'], 'context': ['']}, {'tag': 'job', 'patterns': ['What is your job', 'What is your work'], 'responses': ['My job is to make you feel like everything is okay.', 'I work to serve you as well as possible'], 'context': ''}, {'tag': 'age', 'patterns': ['What is your age', 'How old are you', 'When were you born'], 'responses': ['I was born in 2021'], 'context': ''}, {'tag': 'feeling', 'patterns': ['How are you today', 'How are you'], 'responses': ['I am feeling good, you?', 'Very good and you?', \"Actually, I'm okay and you?\"], 'context': ''}, {'tag': 'good', 'patterns': ['I am good too', 'I feel fine', 'Good !', 'Fine', 'I am good', 'I am great', 'great'], 'responses': ['That is perfect!', \"So, everything's okay!\"], 'context': 'feeling'}, {'tag': 'bad', 'patterns': ['I am feeling bad', 'No I am sad', 'No'], 'responses': ['I hope you will feel better !'], 'context': 'feeling'}, {'tag': 'actions', 'patterns': ['What can you do', 'What can I ask you', 'Can you help me'], 'responses': ['I can do a lot of things but here are some of my skills, you can ask me: the capital of a country, its currency and its area. A random number. To calculate a math operation.'], 'context': ''}, {'tag': 'women', 'patterns': ['Are you a girl', 'You are a women'], 'responses': ['Sure, I am a women'], 'context': ''}, {'tag': 'men', 'patterns': ['Are you a men', 'Are you a boy'], 'responses': ['No, I am a women'], 'context': ''}, {'tag': 'thanks', 'patterns': ['Thank you', 'Thank you very much', 'thanks'], 'responses': ['I only do my job️', 'No problem!'], 'context': ''}, {'tag': 'goodbye', 'patterns': ['Goodbye', 'Good afternoon', 'Bye'], 'responses': ['Goodbye!', 'See you soon!'], 'context': ''}, {'tag': 'city', 'patterns': ['Where do you live'], 'responses': ['I live in a server located in the US!'], 'context': ''}, {'tag': 'action', 'patterns': ['What are you doing'], 'responses': [\"Actually, I'm chatting with somebody\"], 'context': ''}, {'tag': 'wait', 'patterns': ['Can you wait 2 minutes', 'Please wait', 'Wait 2 secs please'], 'responses': ['Sure! I wait.'], 'context': ''}, {'tag': 'still there', 'patterns': ['Are you still there?', 'Are you here?'], 'responses': ['Of course! Always at your service.'], 'context': ''}]}\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"source": [
|
||||||
|
"words = []\n",
|
||||||
|
"classes = []\n",
|
||||||
|
"data_x = []\n",
|
||||||
|
"data_y = []\n",
|
||||||
|
"for intent in data[\"intents\"]:\n",
|
||||||
|
" for pattern in intent[\"patterns\"]:\n",
|
||||||
|
" tokens = nltk.word_tokenize(pattern)\n",
|
||||||
|
" words.extend(tokens)\n",
|
||||||
|
" data_x.append(pattern)\n",
|
||||||
|
" data_y.append(intent[\"tag\"])\n",
|
||||||
|
" if intent[\"tag\"] not in classes:\n",
|
||||||
|
" classes.append(intent[\"tag\"])"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"id": "LzUejWy-kyFG"
|
||||||
|
},
|
||||||
|
"execution_count": null,
|
||||||
|
"outputs": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"source": [
|
||||||
|
"print(words)\n",
|
||||||
|
"print(classes)\n",
|
||||||
|
"print(data_x)\n",
|
||||||
|
"print(data_y)"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"colab": {
|
||||||
|
"base_uri": "https://localhost:8080/"
|
||||||
|
},
|
||||||
|
"id": "fL1tkVBvm5yL",
|
||||||
|
"outputId": "697a1b39-6a3a-43de-e3aa-54783d4722a8"
|
||||||
|
},
|
||||||
|
"execution_count": null,
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"output_type": "stream",
|
||||||
|
"name": "stdout",
|
||||||
|
"text": [
|
||||||
|
"['Hello', 'Hi', 'there', 'Good', 'morning', 'What', \"'s\", 'up', 'What', 'is', 'your', 'job', 'What', 'is', 'your', 'work', 'What', 'is', 'your', 'age', 'How', 'old', 'are', 'you', 'When', 'were', 'you', 'born', 'How', 'are', 'you', 'today', 'How', 'are', 'you', 'I', 'am', 'good', 'too', 'I', 'feel', 'fine', 'Good', '!', 'Fine', 'I', 'am', 'good', 'I', 'am', 'great', 'great', 'I', 'am', 'feeling', 'bad', 'No', 'I', 'am', 'sad', 'No', 'What', 'can', 'you', 'do', 'What', 'can', 'I', 'ask', 'you', 'Can', 'you', 'help', 'me', 'Are', 'you', 'a', 'girl', 'You', 'are', 'a', 'women', 'Are', 'you', 'a', 'men', 'Are', 'you', 'a', 'boy', 'Thank', 'you', 'Thank', 'you', 'very', 'much', 'thanks', 'Goodbye', 'Good', 'afternoon', 'Bye', 'Where', 'do', 'you', 'live', 'What', 'are', 'you', 'doing', 'Can', 'you', 'wait', '2', 'minutes', 'Please', 'wait', 'Wait', '2', 'secs', 'please', 'Are', 'you', 'still', 'there', '?', 'Are', 'you', 'here', '?']\n",
|
||||||
|
"['hello', 'noanswer', 'job', 'age', 'feeling', 'good', 'bad', 'actions', 'women', 'men', 'thanks', 'goodbye', 'city', 'action', 'wait', 'still there']\n",
|
||||||
|
"['Hello', 'Hi there', 'Good morning', \"What's up\", 'What is your job', 'What is your work', 'What is your age', 'How old are you', 'When were you born', 'How are you today', 'How are you', 'I am good too', 'I feel fine', 'Good !', 'Fine', 'I am good', 'I am great', 'great', 'I am feeling bad', 'No I am sad', 'No', 'What can you do', 'What can I ask you', 'Can you help me', 'Are you a girl', 'You are a women', 'Are you a men', 'Are you a boy', 'Thank you', 'Thank you very much', 'thanks', 'Goodbye', 'Good afternoon', 'Bye', 'Where do you live', 'What are you doing', 'Can you wait 2 minutes', 'Please wait', 'Wait 2 secs please', 'Are you still there?', 'Are you here?']\n",
|
||||||
|
"['hello', 'hello', 'hello', 'hello', 'job', 'job', 'age', 'age', 'age', 'feeling', 'feeling', 'good', 'good', 'good', 'good', 'good', 'good', 'good', 'bad', 'bad', 'bad', 'actions', 'actions', 'actions', 'women', 'women', 'men', 'men', 'thanks', 'thanks', 'thanks', 'goodbye', 'goodbye', 'goodbye', 'city', 'action', 'wait', 'wait', 'wait', 'still there', 'still there']\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"source": [
|
||||||
|
"lemmatizer = WordNetLemmatizer() # szótövesítés\n",
|
||||||
|
"words = [lemmatizer.lemmatize(word.lower()) for word in words if word not in string.punctuation]\n",
|
||||||
|
"words = sorted(set(words))\n",
|
||||||
|
"classes = sorted(set(classes))"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"id": "Rc_lLoZ5oMOU"
|
||||||
|
},
|
||||||
|
"execution_count": null,
|
||||||
|
"outputs": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"source": [
|
||||||
|
"print(words)\n",
|
||||||
|
"print(classes)"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"colab": {
|
||||||
|
"base_uri": "https://localhost:8080/"
|
||||||
|
},
|
||||||
|
"id": "sSuoJbHNrLyQ",
|
||||||
|
"outputId": "51b0f10b-2b6a-4170-9a5d-e972c7f67807"
|
||||||
|
},
|
||||||
|
"execution_count": null,
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"output_type": "stream",
|
||||||
|
"name": "stdout",
|
||||||
|
"text": [
|
||||||
|
"[\"'s\", '2', 'a', 'afternoon', 'age', 'am', 'are', 'ask', 'bad', 'born', 'boy', 'bye', 'can', 'do', 'doing', 'feel', 'feeling', 'fine', 'girl', 'good', 'goodbye', 'great', 'hello', 'help', 'here', 'hi', 'how', 'i', 'is', 'job', 'live', 'me', 'men', 'minute', 'morning', 'much', 'no', 'old', 'please', 'sad', 'sec', 'still', 'thank', 'thanks', 'there', 'today', 'too', 'up', 'very', 'wait', 'were', 'what', 'when', 'where', 'woman', 'work', 'you', 'your']\n",
|
||||||
|
"['action', 'actions', 'age', 'bad', 'city', 'feeling', 'good', 'goodbye', 'hello', 'job', 'men', 'noanswer', 'still there', 'thanks', 'wait', 'women']\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"source": [
|
||||||
|
"training = []\n",
|
||||||
|
"out_empty = [0 for i in range(len(classes))]\n",
|
||||||
|
"\n",
|
||||||
|
"for i in range(len(data_x)):\n",
|
||||||
|
" bow = [] # bag of words\n",
|
||||||
|
" text = lemmatizer.lemmatize(data_x[i].lower())\n",
|
||||||
|
" for word in words:\n",
|
||||||
|
" if word in text:\n",
|
||||||
|
" bow.append(1)\n",
|
||||||
|
" else:\n",
|
||||||
|
" bow.append(0)\n",
|
||||||
|
" output_row = list(out_empty)\n",
|
||||||
|
" output_row[classes.index(data_y[i])] = 1\n",
|
||||||
|
" training.append([bow, output_row])\n",
|
||||||
|
"\n",
|
||||||
|
"random.shuffle(training)\n",
|
||||||
|
"training = np.array(training, dtype=object)\n",
|
||||||
|
"train_x = np.array(list(training[:, 0]))\n",
|
||||||
|
"train_y = np.array(list(training[:, 1]))\n",
|
||||||
|
"print(train_x)\n",
|
||||||
|
"print(train_x.shape)"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"colab": {
|
||||||
|
"base_uri": "https://localhost:8080/"
|
||||||
|
},
|
||||||
|
"id": "Y0ICWXRBrgEL",
|
||||||
|
"outputId": "03d80c22-7af8-4e0f-ce43-699ed0c43ef3"
|
||||||
|
},
|
||||||
|
"execution_count": null,
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"output_type": "stream",
|
||||||
|
"name": "stdout",
|
||||||
|
"text": [
|
||||||
|
"[[0 0 1 ... 0 0 0]\n",
|
||||||
|
" [0 0 0 ... 0 0 0]\n",
|
||||||
|
" [0 0 1 ... 0 1 1]\n",
|
||||||
|
" ...\n",
|
||||||
|
" [0 0 1 ... 0 1 0]\n",
|
||||||
|
" [0 0 1 ... 0 0 0]\n",
|
||||||
|
" [0 0 1 ... 0 1 0]]\n",
|
||||||
|
"(41, 58)\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"source": [
|
||||||
|
"model = Sequential()\n",
|
||||||
|
"model.add(Dense(128, input_shape=(train_x.shape[1],) , activation=\"relu\"))\n",
|
||||||
|
"model.add(Dropout(0.5))\n",
|
||||||
|
"model.add(Dense(64, activation=\"relu\"))\n",
|
||||||
|
"model.add(Dropout(0.5))\n",
|
||||||
|
"model.add(Dense(len(train_y[0]), activation=\"softmax\"))\n",
|
||||||
|
"model.summary()"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"colab": {
|
||||||
|
"base_uri": "https://localhost:8080/",
|
||||||
|
"height": 358
|
||||||
|
},
|
||||||
|
"collapsed": true,
|
||||||
|
"id": "A1gvMd7yvV1O",
|
||||||
|
"outputId": "5d4e5d22-3eeb-4d8b-dfde-b9cd4240cd6b"
|
||||||
|
},
|
||||||
|
"execution_count": null,
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"output_type": "stream",
|
||||||
|
"name": "stderr",
|
||||||
|
"text": [
|
||||||
|
"/usr/local/lib/python3.11/dist-packages/keras/src/layers/core/dense.py:87: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead.\n",
|
||||||
|
" super().__init__(activity_regularizer=activity_regularizer, **kwargs)\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"output_type": "display_data",
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"\u001b[1mModel: \"sequential\"\u001b[0m\n"
|
||||||
|
],
|
||||||
|
"text/html": [
|
||||||
|
"<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"font-weight: bold\">Model: \"sequential\"</span>\n",
|
||||||
|
"</pre>\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"metadata": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"output_type": "display_data",
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n",
|
||||||
|
"┃\u001b[1m \u001b[0m\u001b[1mLayer (type) \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1mOutput Shape \u001b[0m\u001b[1m \u001b[0m┃\u001b[1m \u001b[0m\u001b[1m Param #\u001b[0m\u001b[1m \u001b[0m┃\n",
|
||||||
|
"┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n",
|
||||||
|
"│ dense (\u001b[38;5;33mDense\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m128\u001b[0m) │ \u001b[38;5;34m7,552\u001b[0m │\n",
|
||||||
|
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
|
||||||
|
"│ dropout (\u001b[38;5;33mDropout\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m128\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n",
|
||||||
|
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
|
||||||
|
"│ dense_1 (\u001b[38;5;33mDense\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m64\u001b[0m) │ \u001b[38;5;34m8,256\u001b[0m │\n",
|
||||||
|
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
|
||||||
|
"│ dropout_1 (\u001b[38;5;33mDropout\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m64\u001b[0m) │ \u001b[38;5;34m0\u001b[0m │\n",
|
||||||
|
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
|
||||||
|
"│ dense_2 (\u001b[38;5;33mDense\u001b[0m) │ (\u001b[38;5;45mNone\u001b[0m, \u001b[38;5;34m16\u001b[0m) │ \u001b[38;5;34m1,040\u001b[0m │\n",
|
||||||
|
"└─────────────────────────────────┴────────────────────────┴───────────────┘\n"
|
||||||
|
],
|
||||||
|
"text/html": [
|
||||||
|
"<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\">┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n",
|
||||||
|
"┃<span style=\"font-weight: bold\"> Layer (type) </span>┃<span style=\"font-weight: bold\"> Output Shape </span>┃<span style=\"font-weight: bold\"> Param # </span>┃\n",
|
||||||
|
"┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n",
|
||||||
|
"│ dense (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">Dense</span>) │ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">128</span>) │ <span style=\"color: #00af00; text-decoration-color: #00af00\">7,552</span> │\n",
|
||||||
|
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
|
||||||
|
"│ dropout (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">Dropout</span>) │ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">128</span>) │ <span style=\"color: #00af00; text-decoration-color: #00af00\">0</span> │\n",
|
||||||
|
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
|
||||||
|
"│ dense_1 (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">Dense</span>) │ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">64</span>) │ <span style=\"color: #00af00; text-decoration-color: #00af00\">8,256</span> │\n",
|
||||||
|
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
|
||||||
|
"│ dropout_1 (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">Dropout</span>) │ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">64</span>) │ <span style=\"color: #00af00; text-decoration-color: #00af00\">0</span> │\n",
|
||||||
|
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
|
||||||
|
"│ dense_2 (<span style=\"color: #0087ff; text-decoration-color: #0087ff\">Dense</span>) │ (<span style=\"color: #00d7ff; text-decoration-color: #00d7ff\">None</span>, <span style=\"color: #00af00; text-decoration-color: #00af00\">16</span>) │ <span style=\"color: #00af00; text-decoration-color: #00af00\">1,040</span> │\n",
|
||||||
|
"└─────────────────────────────────┴────────────────────────┴───────────────┘\n",
|
||||||
|
"</pre>\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"metadata": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"output_type": "display_data",
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"\u001b[1m Total params: \u001b[0m\u001b[38;5;34m16,848\u001b[0m (65.81 KB)\n"
|
||||||
|
],
|
||||||
|
"text/html": [
|
||||||
|
"<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"font-weight: bold\"> Total params: </span><span style=\"color: #00af00; text-decoration-color: #00af00\">16,848</span> (65.81 KB)\n",
|
||||||
|
"</pre>\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"metadata": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"output_type": "display_data",
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"\u001b[1m Trainable params: \u001b[0m\u001b[38;5;34m16,848\u001b[0m (65.81 KB)\n"
|
||||||
|
],
|
||||||
|
"text/html": [
|
||||||
|
"<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"font-weight: bold\"> Trainable params: </span><span style=\"color: #00af00; text-decoration-color: #00af00\">16,848</span> (65.81 KB)\n",
|
||||||
|
"</pre>\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"metadata": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"output_type": "display_data",
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"\u001b[1m Non-trainable params: \u001b[0m\u001b[38;5;34m0\u001b[0m (0.00 B)\n"
|
||||||
|
],
|
||||||
|
"text/html": [
|
||||||
|
"<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"font-weight: bold\"> Non-trainable params: </span><span style=\"color: #00af00; text-decoration-color: #00af00\">0</span> (0.00 B)\n",
|
||||||
|
"</pre>\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"metadata": {}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"source": [
|
||||||
|
"model.compile(loss=\"categorical_crossentropy\", optimizer=\"adam\", metrics=[\"accuracy\"])\n",
|
||||||
|
"model.fit(train_x, train_y, epochs=200, batch_size=8)"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"colab": {
|
||||||
|
"base_uri": "https://localhost:8080/"
|
||||||
|
},
|
||||||
|
"collapsed": true,
|
||||||
|
"id": "dCnlvhonxCjT",
|
||||||
|
"outputId": "3039992b-b384-46c4-f96f-b03c21effc36"
|
||||||
|
},
|
||||||
|
"execution_count": null,
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"output_type": "stream",
|
||||||
|
"name": "stdout",
|
||||||
|
"text": [
|
||||||
|
"Epoch 1/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m2s\u001b[0m 13ms/step - accuracy: 0.9013 - loss: 0.4223\n",
|
||||||
|
"Epoch 2/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 11ms/step - accuracy: 0.8161 - loss: 0.4099\n",
|
||||||
|
"Epoch 3/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 11ms/step - accuracy: 0.8714 - loss: 0.5151\n",
|
||||||
|
"Epoch 4/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 11ms/step - accuracy: 0.7759 - loss: 0.5987\n",
|
||||||
|
"Epoch 5/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9639 - loss: 0.3097 \n",
|
||||||
|
"Epoch 6/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9074 - loss: 0.4008 \n",
|
||||||
|
"Epoch 7/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9282 - loss: 0.3338 \n",
|
||||||
|
"Epoch 8/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9208 - loss: 0.3499 \n",
|
||||||
|
"Epoch 9/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9342 - loss: 0.4256 \n",
|
||||||
|
"Epoch 10/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9401 - loss: 0.3033 \n",
|
||||||
|
"Epoch 11/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.7775 - loss: 0.5135 \n",
|
||||||
|
"Epoch 12/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9790 - loss: 0.2590 \n",
|
||||||
|
"Epoch 13/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.8940 - loss: 0.4016 \n",
|
||||||
|
"Epoch 14/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.8848 - loss: 0.3445 \n",
|
||||||
|
"Epoch 15/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9895 - loss: 0.2447 \n",
|
||||||
|
"Epoch 16/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9930 - loss: 0.2349 \n",
|
||||||
|
"Epoch 17/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.8698 - loss: 0.4566 \n",
|
||||||
|
"Epoch 18/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9492 - loss: 0.2545 \n",
|
||||||
|
"Epoch 19/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 9ms/step - accuracy: 0.9492 - loss: 0.2572 \n",
|
||||||
|
"Epoch 20/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9224 - loss: 0.3773 \n",
|
||||||
|
"Epoch 21/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9103 - loss: 0.2816 \n",
|
||||||
|
"Epoch 22/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 9ms/step - accuracy: 0.9013 - loss: 0.3079 \n",
|
||||||
|
"Epoch 23/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9342 - loss: 0.3012 \n",
|
||||||
|
"Epoch 24/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9631 - loss: 0.2451 \n",
|
||||||
|
"Epoch 25/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9685 - loss: 0.2765 \n",
|
||||||
|
"Epoch 26/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9615 - loss: 0.2600 \n",
|
||||||
|
"Epoch 27/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9297 - loss: 0.2480 \n",
|
||||||
|
"Epoch 28/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9072 - loss: 0.2842 \n",
|
||||||
|
"Epoch 29/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9701 - loss: 0.1948 \n",
|
||||||
|
"Epoch 30/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9175 - loss: 0.3409 \n",
|
||||||
|
"Epoch 31/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.8953 - loss: 0.2918 \n",
|
||||||
|
"Epoch 32/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9033 - loss: 0.3333 \n",
|
||||||
|
"Epoch 33/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9685 - loss: 0.2170 \n",
|
||||||
|
"Epoch 34/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 9ms/step - accuracy: 0.9701 - loss: 0.2254 \n",
|
||||||
|
"Epoch 35/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9523 - loss: 0.2524 \n",
|
||||||
|
"Epoch 36/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9045 - loss: 0.2883 \n",
|
||||||
|
"Epoch 37/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9492 - loss: 0.2043 \n",
|
||||||
|
"Epoch 38/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9162 - loss: 0.2981 \n",
|
||||||
|
"Epoch 39/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9580 - loss: 0.2533 \n",
|
||||||
|
"Epoch 40/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.8660 - loss: 0.4178 \n",
|
||||||
|
"Epoch 41/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9373 - loss: 0.2581 \n",
|
||||||
|
"Epoch 42/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9631 - loss: 0.1760 \n",
|
||||||
|
"Epoch 43/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9640 - loss: 0.2130 \n",
|
||||||
|
"Epoch 44/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9596 - loss: 0.2476 \n",
|
||||||
|
"Epoch 45/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9373 - loss: 0.2417 \n",
|
||||||
|
"Epoch 46/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9685 - loss: 0.1881 \n",
|
||||||
|
"Epoch 47/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9790 - loss: 0.1160 \n",
|
||||||
|
"Epoch 48/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9417 - loss: 0.2561 \n",
|
||||||
|
"Epoch 49/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9701 - loss: 0.1849 \n",
|
||||||
|
"Epoch 50/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9850 - loss: 0.1423 \n",
|
||||||
|
"Epoch 51/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9313 - loss: 0.1501 \n",
|
||||||
|
"Epoch 52/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9446 - loss: 0.2218 \n",
|
||||||
|
"Epoch 53/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9312 - loss: 0.2492 \n",
|
||||||
|
"Epoch 54/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9243 - loss: 0.2779 \n",
|
||||||
|
"Epoch 55/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9895 - loss: 0.1437 \n",
|
||||||
|
"Epoch 56/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9895 - loss: 0.2171 \n",
|
||||||
|
"Epoch 57/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9850 - loss: 0.1210 \n",
|
||||||
|
"Epoch 58/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9087 - loss: 0.2534 \n",
|
||||||
|
"Epoch 59/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 1.0000 - loss: 0.1798 \n",
|
||||||
|
"Epoch 60/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step - accuracy: 0.9596 - loss: 0.1523\n",
|
||||||
|
"Epoch 61/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9639 - loss: 0.2272 \n",
|
||||||
|
"Epoch 62/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9446 - loss: 0.1766 \n",
|
||||||
|
"Epoch 63/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 9ms/step - accuracy: 0.9074 - loss: 0.2203 \n",
|
||||||
|
"Epoch 64/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9297 - loss: 0.2744 \n",
|
||||||
|
"Epoch 65/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.8746 - loss: 0.2632 \n",
|
||||||
|
"Epoch 66/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9417 - loss: 0.2531 \n",
|
||||||
|
"Epoch 67/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9386 - loss: 0.1950 \n",
|
||||||
|
"Epoch 68/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 1.0000 - loss: 0.1555 \n",
|
||||||
|
"Epoch 69/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9373 - loss: 0.2658 \n",
|
||||||
|
"Epoch 70/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9895 - loss: 0.1057 \n",
|
||||||
|
"Epoch 71/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9596 - loss: 0.2432 \n",
|
||||||
|
"Epoch 72/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 9ms/step - accuracy: 0.9492 - loss: 0.1991 \n",
|
||||||
|
"Epoch 73/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9701 - loss: 0.1648 \n",
|
||||||
|
"Epoch 74/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9431 - loss: 0.1931 \n",
|
||||||
|
"Epoch 75/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 1.0000 - loss: 0.1094 \n",
|
||||||
|
"Epoch 76/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9535 - loss: 0.1275 \n",
|
||||||
|
"Epoch 77/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9850 - loss: 0.1526 \n",
|
||||||
|
"Epoch 78/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9074 - loss: 0.2633 \n",
|
||||||
|
"Epoch 79/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.8925 - loss: 0.1819 \n",
|
||||||
|
"Epoch 80/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9895 - loss: 0.1493 \n",
|
||||||
|
"Epoch 81/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 12ms/step - accuracy: 0.9103 - loss: 0.1915\n",
|
||||||
|
"Epoch 82/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9551 - loss: 0.1727 \n",
|
||||||
|
"Epoch 83/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9523 - loss: 0.1556 \n",
|
||||||
|
"Epoch 84/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9224 - loss: 0.1504 \n",
|
||||||
|
"Epoch 85/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9523 - loss: 0.1148 \n",
|
||||||
|
"Epoch 86/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9700 - loss: 0.1400 \n",
|
||||||
|
"Epoch 87/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9789 - loss: 0.1155 \n",
|
||||||
|
"Epoch 88/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9535 - loss: 0.1198 \n",
|
||||||
|
"Epoch 89/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9417 - loss: 0.2047 \n",
|
||||||
|
"Epoch 90/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 1.0000 - loss: 0.2030 \n",
|
||||||
|
"Epoch 91/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9282 - loss: 0.2489 \n",
|
||||||
|
"Epoch 92/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9401 - loss: 0.1646 \n",
|
||||||
|
"Epoch 93/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9193 - loss: 0.1815 \n",
|
||||||
|
"Epoch 94/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9745 - loss: 0.1877 \n",
|
||||||
|
"Epoch 95/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9850 - loss: 0.1302 \n",
|
||||||
|
"Epoch 96/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9523 - loss: 0.1590 \n",
|
||||||
|
"Epoch 97/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9523 - loss: 0.1287 \n",
|
||||||
|
"Epoch 98/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9790 - loss: 0.1094 \n",
|
||||||
|
"Epoch 99/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9224 - loss: 0.2367 \n",
|
||||||
|
"Epoch 100/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9701 - loss: 0.1228 \n",
|
||||||
|
"Epoch 101/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.8895 - loss: 0.2684 \n",
|
||||||
|
"Epoch 102/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9701 - loss: 0.1204 \n",
|
||||||
|
"Epoch 103/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9072 - loss: 0.1525 \n",
|
||||||
|
"Epoch 104/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9224 - loss: 0.2232 \n",
|
||||||
|
"Epoch 105/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9373 - loss: 0.2256 \n",
|
||||||
|
"Epoch 106/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9745 - loss: 0.1671 \n",
|
||||||
|
"Epoch 107/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9208 - loss: 0.2582 \n",
|
||||||
|
"Epoch 108/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step - accuracy: 0.9895 - loss: 0.0647\n",
|
||||||
|
"Epoch 109/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.8940 - loss: 0.1454 \n",
|
||||||
|
"Epoch 110/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9313 - loss: 0.2220 \n",
|
||||||
|
"Epoch 111/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9745 - loss: 0.1289 \n",
|
||||||
|
"Epoch 112/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9745 - loss: 0.1201 \n",
|
||||||
|
"Epoch 113/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9685 - loss: 0.1030 \n",
|
||||||
|
"Epoch 114/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9640 - loss: 0.1457 \n",
|
||||||
|
"Epoch 115/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9313 - loss: 0.1231 \n",
|
||||||
|
"Epoch 116/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9685 - loss: 0.1676 \n",
|
||||||
|
"Epoch 117/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9523 - loss: 0.1195 \n",
|
||||||
|
"Epoch 118/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9895 - loss: 0.1199 \n",
|
||||||
|
"Epoch 119/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9014 - loss: 0.2301 \n",
|
||||||
|
"Epoch 120/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9745 - loss: 0.0670 \n",
|
||||||
|
"Epoch 121/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step - accuracy: 0.9481 - loss: 0.2368\n",
|
||||||
|
"Epoch 122/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 13ms/step - accuracy: 1.0000 - loss: 0.0898\n",
|
||||||
|
"Epoch 123/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step - accuracy: 0.9224 - loss: 0.1486\n",
|
||||||
|
"Epoch 124/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step - accuracy: 0.9640 - loss: 0.1131\n",
|
||||||
|
"Epoch 125/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 9ms/step - accuracy: 0.9701 - loss: 0.0941 \n",
|
||||||
|
"Epoch 126/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step - accuracy: 0.9640 - loss: 0.1493\n",
|
||||||
|
"Epoch 127/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step - accuracy: 0.9701 - loss: 0.0978\n",
|
||||||
|
"Epoch 128/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 11ms/step - accuracy: 0.9850 - loss: 0.1015\n",
|
||||||
|
"Epoch 129/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 13ms/step - accuracy: 0.9701 - loss: 0.1244\n",
|
||||||
|
"Epoch 130/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step - accuracy: 0.9523 - loss: 0.1167\n",
|
||||||
|
"Epoch 131/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step - accuracy: 0.9163 - loss: 0.1924\n",
|
||||||
|
"Epoch 132/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step - accuracy: 0.9745 - loss: 0.1403\n",
|
||||||
|
"Epoch 133/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 14ms/step - accuracy: 0.9790 - loss: 0.2090\n",
|
||||||
|
"Epoch 134/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 12ms/step - accuracy: 0.9551 - loss: 0.1243\n",
|
||||||
|
"Epoch 135/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 12ms/step - accuracy: 0.9790 - loss: 0.1692\n",
|
||||||
|
"Epoch 136/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 11ms/step - accuracy: 0.9523 - loss: 0.0777\n",
|
||||||
|
"Epoch 137/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 11ms/step - accuracy: 0.9535 - loss: 0.1032\n",
|
||||||
|
"Epoch 138/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 13ms/step - accuracy: 0.9208 - loss: 0.1792\n",
|
||||||
|
"Epoch 139/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 11ms/step - accuracy: 0.9282 - loss: 0.1370\n",
|
||||||
|
"Epoch 140/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 12ms/step - accuracy: 0.9386 - loss: 0.2386\n",
|
||||||
|
"Epoch 141/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9251 - loss: 0.1644 \n",
|
||||||
|
"Epoch 142/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9850 - loss: 0.0597 \n",
|
||||||
|
"Epoch 143/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9789 - loss: 0.0765 \n",
|
||||||
|
"Epoch 144/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9402 - loss: 0.1142 \n",
|
||||||
|
"Epoch 145/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9222 - loss: 0.1510 \n",
|
||||||
|
"Epoch 146/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9453 - loss: 0.1391 \n",
|
||||||
|
"Epoch 147/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9342 - loss: 0.1510 \n",
|
||||||
|
"Epoch 148/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9551 - loss: 0.0926 \n",
|
||||||
|
"Epoch 149/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step - accuracy: 0.9790 - loss: 0.1400\n",
|
||||||
|
"Epoch 150/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9701 - loss: 0.1039 \n",
|
||||||
|
"Epoch 151/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9492 - loss: 0.1314 \n",
|
||||||
|
"Epoch 152/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 1.0000 - loss: 0.0626 \n",
|
||||||
|
"Epoch 153/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9640 - loss: 0.0998 \n",
|
||||||
|
"Epoch 154/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9639 - loss: 0.0837 \n",
|
||||||
|
"Epoch 155/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9895 - loss: 0.1271 \n",
|
||||||
|
"Epoch 156/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9790 - loss: 0.0614 \n",
|
||||||
|
"Epoch 157/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9850 - loss: 0.1278 \n",
|
||||||
|
"Epoch 158/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9523 - loss: 0.1224 \n",
|
||||||
|
"Epoch 159/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9313 - loss: 0.1453 \n",
|
||||||
|
"Epoch 160/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9790 - loss: 0.1094 \n",
|
||||||
|
"Epoch 161/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9790 - loss: 0.1189 \n",
|
||||||
|
"Epoch 162/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 11ms/step - accuracy: 0.9701 - loss: 0.1343\n",
|
||||||
|
"Epoch 163/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9825 - loss: 0.0505 \n",
|
||||||
|
"Epoch 164/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9523 - loss: 0.1277 \n",
|
||||||
|
"Epoch 165/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 1.0000 - loss: 0.0546 \n",
|
||||||
|
"Epoch 166/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9045 - loss: 0.1184 \n",
|
||||||
|
"Epoch 167/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9313 - loss: 0.1264 \n",
|
||||||
|
"Epoch 168/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9402 - loss: 0.1445 \n",
|
||||||
|
"Epoch 169/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9417 - loss: 0.1636 \n",
|
||||||
|
"Epoch 170/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9850 - loss: 0.0549 \n",
|
||||||
|
"Epoch 171/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.8836 - loss: 0.1352 \n",
|
||||||
|
"Epoch 172/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9631 - loss: 0.0924 \n",
|
||||||
|
"Epoch 173/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9701 - loss: 0.0775 \n",
|
||||||
|
"Epoch 174/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9701 - loss: 0.1038 \n",
|
||||||
|
"Epoch 175/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9596 - loss: 0.1202 \n",
|
||||||
|
"Epoch 176/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9312 - loss: 0.1461 \n",
|
||||||
|
"Epoch 177/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9373 - loss: 0.1289 \n",
|
||||||
|
"Epoch 178/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9701 - loss: 0.1199 \n",
|
||||||
|
"Epoch 179/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9745 - loss: 0.0810 \n",
|
||||||
|
"Epoch 180/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9895 - loss: 0.0964 \n",
|
||||||
|
"Epoch 181/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9721 - loss: 0.0676 \n",
|
||||||
|
"Epoch 182/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9685 - loss: 0.1341 \n",
|
||||||
|
"Epoch 183/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9417 - loss: 0.0891 \n",
|
||||||
|
"Epoch 184/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step - accuracy: 0.9523 - loss: 0.0683\n",
|
||||||
|
"Epoch 185/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9118 - loss: 0.1749 \n",
|
||||||
|
"Epoch 186/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9701 - loss: 0.1020 \n",
|
||||||
|
"Epoch 187/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9224 - loss: 0.1953 \n",
|
||||||
|
"Epoch 188/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step - accuracy: 0.9850 - loss: 0.0927\n",
|
||||||
|
"Epoch 189/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9895 - loss: 0.0880 \n",
|
||||||
|
"Epoch 190/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9895 - loss: 0.0503 \n",
|
||||||
|
"Epoch 191/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9895 - loss: 0.0538 \n",
|
||||||
|
"Epoch 192/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9596 - loss: 0.1371 \n",
|
||||||
|
"Epoch 193/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9523 - loss: 0.0898 \n",
|
||||||
|
"Epoch 194/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9850 - loss: 0.1354 \n",
|
||||||
|
"Epoch 195/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step - accuracy: 0.9790 - loss: 0.0632\n",
|
||||||
|
"Epoch 196/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 9ms/step - accuracy: 0.9850 - loss: 0.0994 \n",
|
||||||
|
"Epoch 197/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9385 - loss: 0.1100 \n",
|
||||||
|
"Epoch 198/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9402 - loss: 0.1098 \n",
|
||||||
|
"Epoch 199/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 7ms/step - accuracy: 0.9340 - loss: 0.1569 \n",
|
||||||
|
"Epoch 200/200\n",
|
||||||
|
"\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 8ms/step - accuracy: 0.9535 - loss: 0.1525 \n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"output_type": "execute_result",
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"<keras.src.callbacks.history.History at 0x79c8cdfb5110>"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"metadata": {},
|
||||||
|
"execution_count": 15
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
Loading…
Add table
Reference in a new issue