diff --git a/.gitignore b/.gitignore index 948a4f4..7cfbc19 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ config.ini +venv/ __pycache__ diff --git a/functions.py b/functions.py index 6778156..ba30a40 100644 --- a/functions.py +++ b/functions.py @@ -1,4 +1,4 @@ -import scraper_functions as sf +import ollama def processmsg(msg, rcpt): if "youtube.com/watch" in msg: @@ -8,4 +8,12 @@ def processmsg(msg, rcpt): def command(msg, rcpt): if msg.startswith("!help"): - return "chatbot commands: \n" + "!help Show this help page" + response = "chatbot commands: \n" + response += "!help Show this help page" + response += "!ai [message] Ask llama2" + return response + elif msg.startswith("!ai"): + client = ollama.Client(host='https://ollama.krov.dmz.rs') + response = client.chat(model='llama2-uncensored:latest', messages=[{'role':'user','content':f'{msg[4:]}'}]) + return(response['message']['content']) + diff --git a/requirements.txt b/requirements.txt index f8acc82..e1e7e30 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,2 @@ slixmpp -requests -beautifulsoup4 +ollama diff --git a/scraper_functions.py b/scraper_functions.py deleted file mode 100644 index a6f3891..0000000 --- a/scraper_functions.py +++ /dev/null @@ -1,9 +0,0 @@ -import requests -from bs4 import BeautifulSoup - -def query_external_website(base_url, query): - page = requests.get(base_url + query) - soup = BeautifulSoup(page.content, "html.parser") - title = soup.select(".mw-page-title-main")[0] - content = soup.find(id="bodyContent").select("p")[2].text - return "\nTITLE: " + title.text + "\n\n" + "CONTENT:" + "\n" + content + "\n\n" + "FULL LINK:\n" + base_url + query