diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index 4b7764e..8988f25 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -47,7 +47,7 @@ jobs: # https://github.com/docker/metadata-action - name: Extract Docker metadata id: meta - uses: docker/metadata-action@57396166ad8aefe6098280995947635806a0e6ea + uses: docker/metadata-action@507c2f2dc502c992ad446e3d7a5dfbe311567a96 with: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} flavor: latest=true @@ -55,7 +55,7 @@ jobs: # Build and push Docker image with Buildx (don't push on PR) # https://github.com/docker/build-push-action - name: Build and push Docker image - uses: docker/build-push-action@c56af957549030174b10d6867f20e78cfd7debc5 + uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 with: context: . push: ${{ github.event_name != 'pull_request' }} diff --git a/README.md b/README.md index 3626eea..f44e612 100644 --- a/README.md +++ b/README.md @@ -717,6 +717,14 @@ Reads Twitter links from room, replaces domain with nitter, removes query parame * !nitter enable - enable converting twitter links to nitter links in this room (must be done as room admin) * !nitter disable - disable converting twitter links to nitter links in this room (must be done as room admin) +### Wikipedia + +Searches Wikipedia for a given query and returns the first result summary and link. + +#### Usage + +* !wikipedia [query] - Search Wikipedia for query + ## Bot setup * Create a Matrix user diff --git a/modules/wikipedia.py b/modules/wikipedia.py new file mode 100644 index 0000000..e59be5a --- /dev/null +++ b/modules/wikipedia.py @@ -0,0 +1,64 @@ +import re + +import requests + +from modules.common.module import BotModule + +# This module searches wikipedia for query, returns page summary and link. +class MatrixModule(BotModule): + def __init__(self, name): + super().__init__(name) + self.api_url = 'https://en.wikipedia.org/w/api.php' + + async def matrix_message(self, bot, room, event): + args = event.body.split() + + if len(args) > 1: + query = event.body[len(args[0])+1:] + try: + response = requests.get(self.api_url, params={ + 'action': 'query', + 'prop': 'extracts', + 'exintro': True, + 'explaintext': True, + 'titles': query, + 'format': 'json', + 'formatversion': 2 + }) + + response.raise_for_status() + data = response.json() + if 'query' not in data or 'pages' not in data['query'] or len(data['query']['pages']) == 0: + await bot.send_text(room, 'No results found') + return + + page = data['query']['pages'][0] + + if 'extract' not in page: + await bot.send_text(room, 'No results found') + return + + # Remove all html tags + extract = re.sub('<[^<]+?>', '', page['extract']) + # Remove any multiple spaces + extract = re.sub(' +', ' ', extract) + # Remove any new lines + extract = re.sub('', '', extract) + # Remove any tabs + extract = re.sub('\t', '', extract) + + # Truncate to 500 chars + extract = extract[:500] + + # Add a link to the page + extract = extract + '\nhttps://en.wikipedia.org/?curid=' + str(page['pageid']) + + await bot.send_text(room, extract) + return + except Exception as exc: + await bot.send_text(room, str(exc)) + else: + await bot.send_text(room, 'Usage: !wikipedia ') + + def help(self): + return ('Wikipedia bot') \ No newline at end of file