forked from Decentrala/chatbot
Merge pull request 'scraper_functions' (#1) from scraper_functions into master
Reviewed-on: Decentrala/chatbot#1
This commit is contained in:
commit
5b4ae05582
@ -1,10 +1,11 @@
|
||||
import ollama
|
||||
import scraper_functions as sf
|
||||
|
||||
def processmsg(msg, rcpt):
|
||||
if msg.startswith("!"):
|
||||
return command(msg, rcpt)
|
||||
elif "youtube.com/watch" in msg:
|
||||
if "youtube.com/watch" in msg:
|
||||
return msg.replace("youtube.com", "iv.datura.network")
|
||||
elif msg.startswith("!wiki"):
|
||||
return sf.query_external_website("https://en.wikipedia.org/wiki/", msg.split(" ")[1])
|
||||
elif "good bot" in msg:
|
||||
return "^_^"
|
||||
|
||||
|
@ -1,2 +1,4 @@
|
||||
slixmpp
|
||||
ollama
|
||||
requests
|
||||
beautifulsoup4
|
||||
|
9
scraper_functions.py
Normal file
9
scraper_functions.py
Normal file
@ -0,0 +1,9 @@
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
def query_external_website(base_url, query):
|
||||
page = requests.get(base_url + query)
|
||||
soup = BeautifulSoup(page.content, "html.parser")
|
||||
title = soup.find("span", class_="mw-page-title-main").text
|
||||
content = soup.find(id="mw-content-text").select("p")[2].text
|
||||
return "\nTITLE:\n" + title + "\n\nCONTENT:\n" + content + "\n\nFULL LINK:\n" + base_url + query
|
Loading…
Reference in New Issue
Block a user