Compare commits
30 Commits
dc1e0d42af
...
task_manag
Author | SHA1 | Date | |
---|---|---|---|
|
97d613df58 | ||
|
1e56a84a4c | ||
c011383f0e
|
|||
544f8052e9
|
|||
|
161abdf32e | ||
a256bc277d | |||
d4d14806db | |||
6df3c82a7e | |||
41e38ef80f | |||
0813460e8b | |||
4ca01a868d | |||
1795a87c4b
|
|||
9704474c29 | |||
9c63ada6e5 | |||
5b4ae05582 | |||
9c76ea38ce | |||
79f9c3ec02 | |||
834934fccd | |||
dd371e3326
|
|||
252a7fbdba | |||
70b2c9e322 | |||
4bb860b818
|
|||
bc2e7422f5
|
|||
e89201cb4b | |||
c2286ae0a6
|
|||
a8b9850be5 | |||
6b3a3853c3
|
|||
08f958ff1e | |||
21357a9b71 | |||
ab863f03a3 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,2 +1,3 @@
|
||||
config.ini
|
||||
venv/
|
||||
__pycache__
|
||||
|
@@ -7,3 +7,6 @@ sudo apt install python3-slixmpp
|
||||
|
||||
## Install dependencies with pip
|
||||
pip install -r requirements.txt
|
||||
|
||||
## Setup
|
||||
Create `config.ini` based on the `config.ini.example`, with your credentials
|
||||
|
32
functions.py
32
functions.py
@@ -1,5 +1,29 @@
|
||||
import ollama
|
||||
import scraper_functions as sf
|
||||
|
||||
def processmsg(msg, rcpt):
|
||||
if msg.startswith("!"):
|
||||
return ""
|
||||
elif "youtube.com/watch" in msg:
|
||||
return msg.replace("youtube.com", "iv.datura.network")
|
||||
if "youtube.com/watch" in msg:
|
||||
return msg.replace("youtube.com", "iv.datura.network")
|
||||
elif msg.startswith("!"):
|
||||
return command(msg, "")
|
||||
elif "good bot" in msg:
|
||||
return "^_^"
|
||||
|
||||
def command(msg, rcpt):
|
||||
if msg.startswith("!help"):
|
||||
response = "chatbot commands:" + "\n"
|
||||
response += "!help Show this help page" + "\n"
|
||||
response += "!ai [message] Ask llama2" + "\n"
|
||||
response += "!wiki [message] Ask wiki"
|
||||
return response
|
||||
elif msg.startswith("!ai"):
|
||||
client = ollama.Client(host='https://ollama.krov.dmz.rs')
|
||||
response = client.chat(model='llama2-uncensored:latest', messages=[{'role':'user','content':f'{msg[4:]}'}])
|
||||
return(response['message']['content'])
|
||||
elif msg.startswith("!wiki"):
|
||||
cmd, query = msg.split(" ", 1)
|
||||
return sf.query_external_website("https://en.wikipedia.org", "/wiki/" + query)
|
||||
elif msg.startswith("!tasks"):
|
||||
content = sf.getDmzTasks()
|
||||
return content
|
||||
|
||||
|
@@ -1 +1,4 @@
|
||||
slixmpp
|
||||
ollama
|
||||
requests
|
||||
beautifulsoup4
|
||||
|
33
scraper_functions.py
Normal file
33
scraper_functions.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import quote
|
||||
|
||||
def query_external_website(base_url, query):
|
||||
try:
|
||||
page = requests.get(base_url + quote(query))
|
||||
soup = BeautifulSoup(page.content, "html.parser")
|
||||
title = soup.find(id="firstHeading").text
|
||||
mainContentElement = soup.find(id="mw-content-text")
|
||||
if "This page is a redirect" in mainContentElement.text:
|
||||
redirectLink = mainContentElement.find(class_="redirectMsg").find_all("a")[0]["href"]
|
||||
return query_external_website(base_url, redirectLink)
|
||||
content = next((paragraph for paragraph in mainContentElement.select("p") if not paragraph.has_attr("class")), None)
|
||||
if content == None:
|
||||
raise Exception("Can't parse search result :(")
|
||||
return "\nTITLE:\n" + title + "\n\nCONTENT:\n" + content.text + "\n\nFULL LINK:\n" + base_url + quote(query)
|
||||
except Exception as e:
|
||||
return e
|
||||
|
||||
def getDmzTasks():
|
||||
try:
|
||||
page = requests.get("https://todo.dmz.rs/")
|
||||
soup = BeautifulSoup(page.content, "html.parser")
|
||||
tasks = soup.find_all(class_="task")
|
||||
result = "\nActive tasks:\n"
|
||||
for task in tasks:
|
||||
taskIndex = task.select("div")[0].text
|
||||
taskTitle = task.select("div")[1].text
|
||||
result += taskIndex + " " + taskTitle + "\n"
|
||||
return result
|
||||
except Exception as e:
|
||||
return e
|
Reference in New Issue
Block a user