Como crear un chatbot con keras para Telegram – Telegram Group

Como crear un chatbot con keras para Telegram

Primeros Pasos

$ python -m venv venv/
$ source venv/bin/activate
(venv) $ pip install python-telegram-bot
(venv) $ pip install tensorflow
(venv) $ pip install nltk
.
├── app.py
├── telebot
│ ├── credentials.py
│ | .
│ | Aqui puedes agregar los archivos de entrenamiento
│ | .
│ └──
└── venv
bor_token="El token que te dio BotFather"
bot_username: "El username que le pusiste"
import nltk
nltk.download('punkt')
nltk.download('wordnet')
from nltk.stem import WordNetLemmatizerlemmatizer = WordNetLemmatizer()import json
import pickle
import random
import numpy as npfrom tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Activation, Dropout
from tensorflow.keras.optimizers import SGD
words=[]
classes=[]
documents=[]
ignore_words=['?','!']
data_file=open('q&a.json').read()
intents = json.loads(data_file)
{"intents":[
{"tag": "saludos",
"patterns":["Hola", "hola", "que onda", "como estas","Como Estas", "alguien ahi?", "buenos dias", "buenas"],
"responses": ["Hola, como estas?", "Hay dias en lo que simplemente existo", "Hola, como te encuentras hoy?"],
"context": [""]
},
{"tag": "despedida",
"patterns":["Adios", "adios", "nos vemos", "buena platica, bye", "bye", "hasta luego", "bai", "buenas noches"],
"responses": ["nos vemos!", "Que te vaya bien!", "Hasta la vista"],
"context": [""]
},
{"tag": "noanswer",
"patterns":[],
"responses": ["Lo sinto, pero no entiendo tu pregunta", "Necesito más informacion", "No te estoy entendiendo"],
"context": [""]
},
{"tag": "frase william",
"patterns":["que pinesas de lo bueno y lo malo?"],
"responses": ["No existe nada bueno ni malo; es el pensamiento humano el que lo hace parecer asi\\. \n \\-William Shakespeare"],
"context":[""]
},
{"tag":"biografia mileto",
"patterns":["quien era tales de mileto?"],
"responses": ["Tales de Mileto fue de los primeros pensadores en tener registros, ya que se cree que vivio entre 624\\-546 a\\.C\\. en Mileto, en la que hoy conocemos como Turquia\\. Aunque no se tenga ninguno de sus escritos, si es que existen, se le considera un gran pialar debido a sus menciones por parte de Aristoteles y Diógenes\\."],
"context":[""]
},
{"tag":"frase mileto",
"patterns":["de que esta compuesto la vida?"],
"responses": ["Mmm debe ser algo esencial para la vida, algo a partir de lo que pueda formarse todo, algo que se transforma\\. Todo es agua\\. \n \\-Tales de Mileto"],
"context":[""]
}
}
#Primero, importamos nuestros 'intents'
for intent in intents['intents']:
for pattern in intent['patterns']:
#tokenizamos cada palabra de nuestras preguntas
w = nltk.word_tokenize(pattern)
#Agregamos w a nuestras palabras
words.extend(w)
#añadimos a documents la dupla de (palabra tokenizada, nombre de la clase que pertenece)
documents.append((w, intent['tag']))
#agregamos los tags a classes para categorizarlas
if intent['tag'] not in classes:
classes.append(intent['tag'])
#limpiamos las palbras. En este caso solo son los caracteres '?' y '!' y ordenamos
words = [lemmatizer.lemmatize(w.lower()) for w in words if w not in ignore_words]
words = sorted(list(set(words)))
classes= sorted(list(set(classes)))

#Revisamos nuestras variables
print(len(documents), "documents")
print(len(classes), "classes", classes)
print(len(words), "unicas lemmatized words", words)#Volvemos nuestras variables 'dummys'
pickle.dump(words, open('words.pkl', 'wb'))
pickle.dump(classes, open('classes.pkl', 'wb'))
training=[]
output_empty=[0] * len(classes)
for doc in documents:
bag=[]
pattern_words= doc[0] #convertimos a minusculas nuestras palabras
pattern_words = [lemmatizer.lemmatize(word.lower()) for word in pattern_words] #agregamosnlas palabras claves que se encuentran en nuestras preguntas
for w in words:
bag.append(1) if w in pattern_words else bag.append(0) output_row = list(output_empty)
output_row[classes.index(doc[1])] = 1
training.append([bag, output_row])#entrenamos
random.shuffle(training)
training = np.array(training)
train_x = list(training[:,0])
train_y = list(training[:,1])
print("training data ceated")
#Creamos nuestra red neuronal de 3 capas
model = Sequential()
model.add(Dense(128, input_shape=(len(train_x[0]),), activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(len(train_y[0],), activation='softmax'))
print(model.summary())sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])#Guardamos nuestro modelo y listo
hist = model.fit(np.array(train_x), np.array(train_y), epochs=200, batch_size=5, verbose=1)
model.save('chatbot_model.h5', hist)
print("model created")

Configuración del Bot

import logging
import telegram
import json
import random
import nltk
import pickle
import numpy as np
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters
from telebot.credentials import bot_token, bot_user_bot
from tensorflow.keras.models import load_model
from nltk.stem import WordNetLemmatizer
lemmatizer = WordNetLemmatizer()
#Cargamos nuestros archivos
model = load_model('chatbot_model.h5')
intents = json.loads(open('q&a.json').read())
words = pickle.load(open('words.pkl', 'rb'))
classes = pickle.load(open('classes.pkl', 'rb'))
#Configuración del loggin
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
logger = logging.getLogger()
#TOKEN del bot
TOKEN = bot_token
def  clean_up_sentence(sentence):
sentence_words = nltk.word_tokenize(sentence)
sentence_words = [lemmatizer.lemmatize(word.lower()) for word in sentence_words]
return sentence_wordsdef bow(sentence, words, show_details=True):
sentence_words = clean_up_sentence(sentence)
bag = [0]*len(words) for s in sentence_words:
for i,w in enumerate(words):
if w == s:
bag[i] = 1
if show_details:
print(f"found in bag: {w}")
return(np.array(bag))
def  predict_class(sentence, model):
p = bow(sentence, words, show_details=False)
res = model.predict(np.array([p]))[0] ERROR_THRESHOLD=0.25

results = [[i, r] for i,r in enumerate(res) if r>ERROR_THRESHOLD]
results.sort(key=lambda x: x[1], reverse=True)
return_list=[] for r in results:
return_list.append({"intent": classes[r[0]], "probability": str(r[1])}) return return_listdef getResponse(ints, intents_json):
tag = ints[0]['intent']
list_of_intents=intents_json['intents'] for i in list_of_intents:
if i['tag'] == tag:
result = random.choice(i['responses'])
break
return resultdef chatbot_response(msg):
ints = predict_class(msg, model)
res = getResponse(ints, intents) return res
def  echo(update, context):
user_id = update.effective_user['id']
logger.info(f"El usuario {user_id}, a enviado un mensaje de texto.")
text = update.message.text if text != '':
res = chatbot_response(text)
context.bot.send_message(
chat_id=user_id,
parse_mode="MarkdownV2",
text=f"{res}")
#Enlace entre el updater con el bot
updater = Updater(my_bot.token, use_context=True)#Despachador
dp= updater.dispatcher#Manejadores
dp.add_handler(MessageHandler(Filters.text, echo))updater.start_polling()
print("Bot cargado")
updater.idle() #Para finalizar el bot con ctrl + c
$ python app.py

GitHub – GerardoLeyvaConde/FilosoraptorBot: Chatbot de Telegram que sirve como enciplopedia de…

You can't perform that action at this time. You signed in with another tab or window. You signed out in another tab or…

github.com

Ten articles before and after

Telegram HTTP API With Python — Sending Messages Programatically – Telegram Group

How to keep content in Telegram channels ‘members-only’ – Telegram Group

5 Reasons Why You Should Change WhatsApp for Telegram – Telegram Group

Unmarshal Telegram Genie Bot. Unmarshal is proud to release the… – Telegram Group

HODLing in Bear Market (DAO). “When the going gets tough , the though… – Telegram Group

Come triangolare utenti Telegram attraverso l’uso di strumenti automatici – Telegram Group

Unboxing Cryptocurrencies — Getting Started – Telegram Group

Telegram — not the app 🙂 , the yesteryear’s service that was used in India – Telegram Group

data-rh=”true”>Scrypt-Adaptive-Nfactor – Everscale – Telegram Group

How much to save for bitcoin?. Everyone has thought about purchasing a… – Telegram Group