Compare commits

..

No commits in common. "master" and "task_manager" have entirely different histories.

3 changed files with 12 additions and 59 deletions

View File

@ -1,12 +0,0 @@
FRONTENDS = {
"www.youtube.com/watch": ("iv.datura.network/watch", "inv.tux.pizza/watch"),
"youtube.com/watch": ("iv.datura.network/watch", "inv.tux.pizza/watch"),
"medium.com" : ("scribe.rip", "sc.vern.cc", "m.opnxng.com"),
"stackoverflow.com": ("code.whatever.social", "ao.vern.cc", "overflow.smnz.de"),
"instagram.com": ("bibliogram.1d4.us", "bibliogram.froth.zone", "ig.opnxng.com", "proxigram.lunar.icu"),
"genius.com": ("dm.vern.cc", "dumb.lunar.icu", "dumb.esmailelbob.xyz"),
"reddit.com":("eu.safereddit.com", "l.opnxng.com", "libreddit.bus-hit.me"),
"www.imdb.com": ("libremdb.iket.me", "ld.vern.cc", "binge.whatever.social"),
"twitter.com": ("n.opnxng.com", "nitter.1d4.us", "nitter.adminforge.de"),
"wikipedia.com": ("wiki.adminforge.de", "wiki.froth.zone", "wikiless.esmailelbob.xyz")
}

View File

@ -1,25 +1,20 @@
import ollama import ollama
import scraper_functions as sf import scraper_functions as sf
import random
from frontends import FRONTENDS
def processmsg(msg, rcpt): def processmsg(msg, rcpt):
if msg.startswith("!"): if "youtube.com/watch" in msg:
return msg.replace("youtube.com", "iv.datura.network")
elif msg.startswith("!"):
return command(msg, "") return command(msg, "")
elif "good bot" in msg: elif "good bot" in msg:
return "^_^" return "^_^"
for big_tech_site in FRONTENDS:
if big_tech_site in msg:
return "libre link: " + msg.replace(big_tech_site, random.choice(FRONTENDS[big_tech_site]))
def command(msg, rcpt): def command(msg, rcpt):
if msg.startswith("!help"): if msg.startswith("!help"):
response = "chatbot commands:" + "\n" response = "chatbot commands:" + "\n"
response += "!help Show this help page" + "\n" response += "!help Show this help page" + "\n"
response += "!ai [message] Ask llama2" + "\n" response += "!ai [message] Ask llama2" + "\n"
response += "!wiki [message] Ask wiki\n" response += "!wiki [message] Ask wiki"
response += "!tasks Show active tasks from the taskmanager\n"
response += "!vreme [city] | !prognoza [city] | !weather [city] Show weather for [city]\n"
return response return response
elif msg.startswith("!ai"): elif msg.startswith("!ai"):
client = ollama.Client(host='https://ollama.krov.dmz.rs') client = ollama.Client(host='https://ollama.krov.dmz.rs')
@ -29,12 +24,6 @@ def command(msg, rcpt):
cmd, query = msg.split(" ", 1) cmd, query = msg.split(" ", 1)
return sf.query_external_website("https://en.wikipedia.org", "/wiki/" + query) return sf.query_external_website("https://en.wikipedia.org", "/wiki/" + query)
elif msg.startswith("!tasks"): elif msg.startswith("!tasks"):
content = sf.getDmzTasks("https://todo.dmz.rs/") content = sf.getDmzTasks()
return content return content
elif msg.startswith("!vreme") or msg.startswith("!prognoza") or msg.startswith("!weather"):
commandsplit = msg.split(" ", 1)
if len(commandsplit) == 1:
return sf.get_weather("Beograd")
else:
query = commandsplit[1]
return sf.get_weather(query)

View File

@ -2,14 +2,10 @@ import requests
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from urllib.parse import quote from urllib.parse import quote
def getSoup(base_url, query = ""):
page = requests.get(base_url + quote(query))
soup = BeautifulSoup(page.content, "html.parser")
return soup
def query_external_website(base_url, query): def query_external_website(base_url, query):
try: try:
soup = getSoup(base_url, query) page = requests.get(base_url + quote(query))
soup = BeautifulSoup(page.content, "html.parser")
title = soup.find(id="firstHeading").text title = soup.find(id="firstHeading").text
mainContentElement = soup.find(id="mw-content-text") mainContentElement = soup.find(id="mw-content-text")
if "This page is a redirect" in mainContentElement.text: if "This page is a redirect" in mainContentElement.text:
@ -22,36 +18,16 @@ def query_external_website(base_url, query):
except Exception as e: except Exception as e:
return e return e
def getDmzTasks(url): def getDmzTasks():
try: try:
soup = getSoup(url) page = requests.get("https://todo.dmz.rs/")
soup = BeautifulSoup(page.content, "html.parser")
tasks = soup.find_all(class_="task") tasks = soup.find_all(class_="task")
result = "\nActive tasks:\n" result = "\nActive tasks:\n"
for task in tasks: for task in tasks:
taskIndex = task.select("div")[0].text taskIndex = task.select("div")[0].text
taskTitle = task.select("div")[1].text taskTitle = task.select("div")[1].text
result += taskIndex + " " + taskTitle result += taskIndex + " " + taskTitle + "\n"
taskSoup = getSoup(url + task.find("a")["href"][1:])
description = taskSoup.find("main").select("section")[0].find("p").text
result += "\n\tDescription:\n" + "\t\t" + description + "\n"
result += "\tAssigned users:\n" + "\t\t"
assignedUsers = taskSoup.find_all(class_="user-info-wrap")
if len(assignedUsers) == 0:
result += "None! Be the first :)\n"
result += "\tLink: " + url + task.find("a")["href"][1:] + "\n\n"
continue
usersList = ""
for user in assignedUsers:
usersList += user.find("div").text.split(": ")[1] + ", "
result += usersList[:-2] + "\n"
result += "\tLink: " + url + task.find("a")["href"][1:] + "\n\n"
return result return result
except Exception as e: except Exception as e:
return e return e
def get_weather(city:str) -> str:
url = f"https://wttr.in/{city}?format=4"
if not city.replace(" ","").isalpha():
return "no such city"
resp = requests.get(url)
return resp.content.decode("utf-8").strip()