Compare commits

...

31 Commits

Author SHA1 Message Date
72c474e97f
add wind speed for !vreme 2024-03-04 14:44:23 +01:00
d52b4ac5e5
fix !vreme for spaces in cities 2024-03-04 11:45:57 +01:00
690239bb86
fix typo in !vreme 2024-03-04 11:32:37 +01:00
b416e04a9a
fix !vreme issue with wrong number of arguments 2024-03-04 11:28:32 +01:00
f2b288fa30
fix youtube link change 2024-02-18 15:44:12 +01:00
145114320c
add www to youtube in frontends 2024-02-09 12:52:20 +01:00
4485d15740 Merge pull request 'Frontendi dodatni' (#3) from dev into master
Reviewed-on: #3
2024-02-09 11:16:50 +00:00
b4b4d9797d Merge pull request 'Dodato jos frontova' (#2) from svitvojimilioni/chatbot:libre-rewrite into dev
Reviewed-on: #2
2024-02-09 11:16:03 +00:00
305fc1405a Dodao frontende za vise servisa
- frontendi su u frontends.py fajlu
- ima vise instanci da ne bi stalno slao na jednu, da nema centralizacije ;)
2024-02-07 20:04:59 -05:00
ea6ba7464c Dodao frontende za vise servisa
- frontendi su u frontends.py fajlu
- ima vise instanci da ne bi stalno slao na jednu, da nema centralizacije ;)
2024-02-07 19:57:36 -05:00
328329eb9a Dorada za vreme funckiju
- samo slova smeju u ime grada
- dodao i englesku verziju `!weather`
2024-02-07 17:47:36 -05:00
63044c545f Dodao funkciju za vreme
Upotreba: !vreme <ime_grada>
- koristi wttr.in api
- vraca liniju jednu, lakse je nego da se parsira json
- reaguje i na prognoza
2024-02-07 17:21:54 -05:00
t3xhno
5382b876e2 Added \!tasks to \!help 2024-02-06 22:18:44 +01:00
t3xhno
3d80517a6f Add link to tasks that have users assigned 2024-02-06 22:08:43 +01:00
t3xhno
d08e8199f8 Added more info for dmz tasks 2024-02-06 22:06:06 +01:00
t3xhno
adb4a25d25 Formatting 2024-02-06 21:27:55 +01:00
t3xhno
dab4e41de0 Added link to tasks 2024-02-06 21:24:04 +01:00
t3xhno
97d613df58 Added tasks integration 2024-02-06 21:17:49 +01:00
t3xhno
1e56a84a4c Added required positional argument placeholder 2024-02-06 21:05:02 +01:00
c011383f0e
fix wiki commands 2024-02-06 20:51:31 +01:00
544f8052e9
fix wiki placing it into command functions 2024-02-06 20:48:08 +01:00
t3xhno
161abdf32e Better wiki redirect 2024-02-06 11:39:10 +01:00
a256bc277d Better heading selector 2024-02-06 03:46:17 +01:00
d4d14806db Raise exception on None content 2024-02-06 03:37:26 +01:00
6df3c82a7e Get first available paragraph from query 2024-02-06 03:34:43 +01:00
41e38ef80f Correct url link from multiword wiki query 2024-02-06 02:58:45 +01:00
0813460e8b Merge branch 'master' of ssh://gitea.dmz.rs:2222/Decentrala/chatbot 2024-02-06 02:49:46 +01:00
4ca01a868d Multiword wiki query added 2024-02-06 02:49:40 +01:00
1795a87c4b
add wiki to help 2024-02-06 02:47:01 +01:00
9704474c29 Merge branch 'master' of ssh://gitea.dmz.rs:2222/Decentrala/chatbot 2024-02-06 02:38:46 +01:00
5b4ae05582 Merge pull request 'scraper_functions' (#1) from scraper_functions into master
Reviewed-on: #1
2024-02-06 01:24:57 +00:00
3 changed files with 88 additions and 13 deletions

12
frontends.py Normal file
View File

@ -0,0 +1,12 @@
FRONTENDS = {
"www.youtube.com/watch": ("iv.datura.network/watch", "inv.tux.pizza/watch"),
"youtube.com/watch": ("iv.datura.network/watch", "inv.tux.pizza/watch"),
"medium.com" : ("scribe.rip", "sc.vern.cc", "m.opnxng.com"),
"stackoverflow.com": ("code.whatever.social", "ao.vern.cc", "overflow.smnz.de"),
"instagram.com": ("bibliogram.1d4.us", "bibliogram.froth.zone", "ig.opnxng.com", "proxigram.lunar.icu"),
"genius.com": ("dm.vern.cc", "dumb.lunar.icu", "dumb.esmailelbob.xyz"),
"reddit.com":("eu.safereddit.com", "l.opnxng.com", "libreddit.bus-hit.me"),
"www.imdb.com": ("libremdb.iket.me", "ld.vern.cc", "binge.whatever.social"),
"twitter.com": ("n.opnxng.com", "nitter.1d4.us", "nitter.adminforge.de"),
"wikipedia.com": ("wiki.adminforge.de", "wiki.froth.zone", "wikiless.esmailelbob.xyz")
}

View File

@ -1,22 +1,40 @@
import ollama
import scraper_functions as sf
import random
from frontends import FRONTENDS
def processmsg(msg, rcpt):
if "youtube.com/watch" in msg:
return msg.replace("youtube.com", "iv.datura.network")
elif msg.startswith("!wiki"):
return sf.query_external_website("https://en.wikipedia.org/wiki/", msg.split(" ")[1])
if msg.startswith("!"):
return command(msg, "")
elif "good bot" in msg:
return "^_^"
for big_tech_site in FRONTENDS:
if big_tech_site in msg:
return "libre link: " + msg.replace(big_tech_site, random.choice(FRONTENDS[big_tech_site]))
def command(msg, rcpt):
if msg.startswith("!help"):
response = "chatbot commands:" + "\n"
response += "!help Show this help page" + "\n"
response += "!ai [message] Ask llama2"
response += "!ai [message] Ask llama2" + "\n"
response += "!wiki [message] Ask wiki\n"
response += "!tasks Show active tasks from the taskmanager\n"
response += "!vreme [city] | !prognoza [city] | !weather [city] Show weather for [city]\n"
return response
elif msg.startswith("!ai"):
client = ollama.Client(host='https://ollama.krov.dmz.rs')
response = client.chat(model='llama2-uncensored:latest', messages=[{'role':'user','content':f'{msg[4:]}'}])
return(response['message']['content'])
elif msg.startswith("!wiki"):
cmd, query = msg.split(" ", 1)
return sf.query_external_website("https://en.wikipedia.org", "/wiki/" + query)
elif msg.startswith("!tasks"):
content = sf.getDmzTasks("https://todo.dmz.rs/")
return content
elif msg.startswith("!vreme") or msg.startswith("!prognoza") or msg.startswith("!weather"):
commandsplit = msg.split(" ", 1)
if len(commandsplit) == 1:
return sf.get_weather("Beograd")
else:
query = commandsplit[1]
return sf.get_weather(query)

View File

@ -1,12 +1,57 @@
import requests
from bs4 import BeautifulSoup
from urllib.parse import quote
def getSoup(base_url, query = ""):
page = requests.get(base_url + quote(query))
soup = BeautifulSoup(page.content, "html.parser")
return soup
def query_external_website(base_url, query):
try:
page = requests.get(base_url + query)
soup = BeautifulSoup(page.content, "html.parser")
title = soup.find("span", class_="mw-page-title-main").text
content = soup.find(id="mw-content-text").select("p")[2].text
return "\nTITLE:\n" + title + "\n\nCONTENT:\n" + content + "\n\nFULL LINK:\n" + base_url + query
except:
return "Can't parse search result :("
soup = getSoup(base_url, query)
title = soup.find(id="firstHeading").text
mainContentElement = soup.find(id="mw-content-text")
if "This page is a redirect" in mainContentElement.text:
redirectLink = mainContentElement.find(class_="redirectMsg").find_all("a")[0]["href"]
return query_external_website(base_url, redirectLink)
content = next((paragraph for paragraph in mainContentElement.select("p") if not paragraph.has_attr("class")), None)
if content == None:
raise Exception("Can't parse search result :(")
return "\nTITLE:\n" + title + "\n\nCONTENT:\n" + content.text + "\n\nFULL LINK:\n" + base_url + quote(query)
except Exception as e:
return e
def getDmzTasks(url):
try:
soup = getSoup(url)
tasks = soup.find_all(class_="task")
result = "\nActive tasks:\n"
for task in tasks:
taskIndex = task.select("div")[0].text
taskTitle = task.select("div")[1].text
result += taskIndex + " " + taskTitle
taskSoup = getSoup(url + task.find("a")["href"][1:])
description = taskSoup.find("main").select("section")[0].find("p").text
result += "\n\tDescription:\n" + "\t\t" + description + "\n"
result += "\tAssigned users:\n" + "\t\t"
assignedUsers = taskSoup.find_all(class_="user-info-wrap")
if len(assignedUsers) == 0:
result += "None! Be the first :)\n"
result += "\tLink: " + url + task.find("a")["href"][1:] + "\n\n"
continue
usersList = ""
for user in assignedUsers:
usersList += user.find("div").text.split(": ")[1] + ", "
result += usersList[:-2] + "\n"
result += "\tLink: " + url + task.find("a")["href"][1:] + "\n\n"
return result
except Exception as e:
return e
def get_weather(city:str) -> str:
url = f"https://wttr.in/{city}?format=4"
if not city.replace(" ","").isalpha():
return "no such city"
resp = requests.get(url)
return resp.content.decode("utf-8").strip()