Compare commits
17 Commits
task_manag
...
master
Author | SHA1 | Date | |
---|---|---|---|
72c474e97f | |||
d52b4ac5e5 | |||
690239bb86 | |||
b416e04a9a | |||
f2b288fa30 | |||
145114320c | |||
4485d15740 | |||
b4b4d9797d | |||
305fc1405a | |||
ea6ba7464c | |||
328329eb9a | |||
63044c545f | |||
|
5382b876e2 | ||
|
3d80517a6f | ||
|
d08e8199f8 | ||
|
adb4a25d25 | ||
|
dab4e41de0 |
12
frontends.py
Normal file
12
frontends.py
Normal file
@ -0,0 +1,12 @@
|
||||
FRONTENDS = {
|
||||
"www.youtube.com/watch": ("iv.datura.network/watch", "inv.tux.pizza/watch"),
|
||||
"youtube.com/watch": ("iv.datura.network/watch", "inv.tux.pizza/watch"),
|
||||
"medium.com" : ("scribe.rip", "sc.vern.cc", "m.opnxng.com"),
|
||||
"stackoverflow.com": ("code.whatever.social", "ao.vern.cc", "overflow.smnz.de"),
|
||||
"instagram.com": ("bibliogram.1d4.us", "bibliogram.froth.zone", "ig.opnxng.com", "proxigram.lunar.icu"),
|
||||
"genius.com": ("dm.vern.cc", "dumb.lunar.icu", "dumb.esmailelbob.xyz"),
|
||||
"reddit.com":("eu.safereddit.com", "l.opnxng.com", "libreddit.bus-hit.me"),
|
||||
"www.imdb.com": ("libremdb.iket.me", "ld.vern.cc", "binge.whatever.social"),
|
||||
"twitter.com": ("n.opnxng.com", "nitter.1d4.us", "nitter.adminforge.de"),
|
||||
"wikipedia.com": ("wiki.adminforge.de", "wiki.froth.zone", "wikiless.esmailelbob.xyz")
|
||||
}
|
23
functions.py
23
functions.py
@ -1,20 +1,25 @@
|
||||
import ollama
|
||||
import scraper_functions as sf
|
||||
import random
|
||||
from frontends import FRONTENDS
|
||||
|
||||
def processmsg(msg, rcpt):
|
||||
if "youtube.com/watch" in msg:
|
||||
return msg.replace("youtube.com", "iv.datura.network")
|
||||
elif msg.startswith("!"):
|
||||
if msg.startswith("!"):
|
||||
return command(msg, "")
|
||||
elif "good bot" in msg:
|
||||
return "^_^"
|
||||
for big_tech_site in FRONTENDS:
|
||||
if big_tech_site in msg:
|
||||
return "libre link: " + msg.replace(big_tech_site, random.choice(FRONTENDS[big_tech_site]))
|
||||
|
||||
def command(msg, rcpt):
|
||||
if msg.startswith("!help"):
|
||||
response = "chatbot commands:" + "\n"
|
||||
response += "!help Show this help page" + "\n"
|
||||
response += "!ai [message] Ask llama2" + "\n"
|
||||
response += "!wiki [message] Ask wiki"
|
||||
response += "!wiki [message] Ask wiki\n"
|
||||
response += "!tasks Show active tasks from the taskmanager\n"
|
||||
response += "!vreme [city] | !prognoza [city] | !weather [city] Show weather for [city]\n"
|
||||
return response
|
||||
elif msg.startswith("!ai"):
|
||||
client = ollama.Client(host='https://ollama.krov.dmz.rs')
|
||||
@ -24,6 +29,12 @@ def command(msg, rcpt):
|
||||
cmd, query = msg.split(" ", 1)
|
||||
return sf.query_external_website("https://en.wikipedia.org", "/wiki/" + query)
|
||||
elif msg.startswith("!tasks"):
|
||||
content = sf.getDmzTasks()
|
||||
content = sf.getDmzTasks("https://todo.dmz.rs/")
|
||||
return content
|
||||
|
||||
elif msg.startswith("!vreme") or msg.startswith("!prognoza") or msg.startswith("!weather"):
|
||||
commandsplit = msg.split(" ", 1)
|
||||
if len(commandsplit) == 1:
|
||||
return sf.get_weather("Beograd")
|
||||
else:
|
||||
query = commandsplit[1]
|
||||
return sf.get_weather(query)
|
||||
|
@ -2,10 +2,14 @@ import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import quote
|
||||
|
||||
def query_external_website(base_url, query):
|
||||
try:
|
||||
def getSoup(base_url, query = ""):
|
||||
page = requests.get(base_url + quote(query))
|
||||
soup = BeautifulSoup(page.content, "html.parser")
|
||||
return soup
|
||||
|
||||
def query_external_website(base_url, query):
|
||||
try:
|
||||
soup = getSoup(base_url, query)
|
||||
title = soup.find(id="firstHeading").text
|
||||
mainContentElement = soup.find(id="mw-content-text")
|
||||
if "This page is a redirect" in mainContentElement.text:
|
||||
@ -18,16 +22,36 @@ def query_external_website(base_url, query):
|
||||
except Exception as e:
|
||||
return e
|
||||
|
||||
def getDmzTasks():
|
||||
def getDmzTasks(url):
|
||||
try:
|
||||
page = requests.get("https://todo.dmz.rs/")
|
||||
soup = BeautifulSoup(page.content, "html.parser")
|
||||
soup = getSoup(url)
|
||||
tasks = soup.find_all(class_="task")
|
||||
result = "\nActive tasks:\n"
|
||||
for task in tasks:
|
||||
taskIndex = task.select("div")[0].text
|
||||
taskTitle = task.select("div")[1].text
|
||||
result += taskIndex + " " + taskTitle + "\n"
|
||||
result += taskIndex + " " + taskTitle
|
||||
taskSoup = getSoup(url + task.find("a")["href"][1:])
|
||||
description = taskSoup.find("main").select("section")[0].find("p").text
|
||||
result += "\n\tDescription:\n" + "\t\t" + description + "\n"
|
||||
result += "\tAssigned users:\n" + "\t\t"
|
||||
assignedUsers = taskSoup.find_all(class_="user-info-wrap")
|
||||
if len(assignedUsers) == 0:
|
||||
result += "None! Be the first :)\n"
|
||||
result += "\tLink: " + url + task.find("a")["href"][1:] + "\n\n"
|
||||
continue
|
||||
usersList = ""
|
||||
for user in assignedUsers:
|
||||
usersList += user.find("div").text.split(": ")[1] + ", "
|
||||
result += usersList[:-2] + "\n"
|
||||
result += "\tLink: " + url + task.find("a")["href"][1:] + "\n\n"
|
||||
return result
|
||||
except Exception as e:
|
||||
return e
|
||||
|
||||
def get_weather(city:str) -> str:
|
||||
url = f"https://wttr.in/{city}?format=4"
|
||||
if not city.replace(" ","").isalpha():
|
||||
return "no such city"
|
||||
resp = requests.get(url)
|
||||
return resp.content.decode("utf-8").strip()
|
||||
|
Loading…
Reference in New Issue
Block a user