Skip to content

Commit

Permalink
Merge pull request sopel-irc#1508 from Exirel/url-callbacks-interface
Browse files Browse the repository at this point in the history
core: URL Callbacks new interface
  • Loading branch information
dgw authored Mar 21, 2019
2 parents d2e7a16 + 2c81f72 commit 4aba0f2
Show file tree
Hide file tree
Showing 7 changed files with 142 additions and 40 deletions.
98 changes: 95 additions & 3 deletions sopel/bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,10 +247,8 @@ def register(self, callables, jobs, shutdowns, urls):
job = sopel.tools.jobs.Job(interval, func)
self.scheduler.add_job(job)

if not self.memory.contains('url_callbacks'):
self.memory['url_callbacks'] = tools.SopelMemory()
for func in urls:
self.memory['url_callbacks'][func.url_regex] = func
self.register_url_callback(func.url_regex, func)

def part(self, channel, msg=None):
"""Part a channel."""
Expand Down Expand Up @@ -648,3 +646,97 @@ def cap_req(self, module_name, capability, arg=None, failure_callback=None,
entry.append(_CapReq(prefix, module_name, failure_callback, arg,
success_callback))
self._cap_reqs[cap] = entry

def register_url_callback(self, pattern, callback):
"""Register a ``callback`` for URLs matching the regex ``pattern``
:param pattern: compiled regex pattern to register
:param callback: callable object to handle matching URLs
.. versionadded:: 7.0
This method replaces manual management of ``url_callbacks`` in
Sopel's plugins, so instead of doing this in ``setup()``::
if not bot.memory.contains('url_callbacks'):
bot.memory['url_callbacks'] = tools.SopelMemory()
regex = re.compile(r'http://example.com/path/.*')
bot.memory['url_callbacks'][regex] = callback
use this much more concise pattern::
regex = re.compile(r'http://example.com/path/.*')
bot.register_url_callback(regex, callback)
"""
if not self.memory.contains('url_callbacks'):
self.memory['url_callbacks'] = tools.SopelMemory()

if isinstance(pattern, basestring):
pattern = re.compile(pattern)

self.memory['url_callbacks'][pattern] = callback

def unregister_url_callback(self, pattern):
"""Unregister the callback for URLs matching the regex ``pattern``
:param pattern: compiled regex pattern to unregister callback
.. versionadded:: 7.0
This method replaces manual management of ``url_callbacks`` in
Sopel's plugins, so instead of doing this in ``shutdown()``::
regex = re.compile(r'http://example.com/path/.*')
try:
del bot.memory['url_callbacks'][regex]
except KeyError:
pass
use this much more concise pattern::
regex = re.compile(r'http://example.com/path/.*')
bot.unregister_url_callback(regex)
"""
if not self.memory.contains('url_callbacks'):
# nothing to unregister
return

if isinstance(pattern, basestring):
pattern = re.compile(pattern)

try:
del self.memory['url_callbacks'][pattern]
except KeyError:
pass

def search_url_callbacks(self, url):
"""Yield callbacks found for ``url`` matching their regex pattern
:param str url: URL found in a trigger
:return: yield 2-value tuples of ``(callback, match)``
For each pattern that matches the ``url`` parameter, it yields a
2-value tuple of ``(callable, match)`` for that pattern.
The ``callable`` is the one registered with
:meth:`register_url_callback`, and the ``match`` is the result of
the regex pattern's ``search`` method.
.. versionadded:: 7.0
.. seealso::
The Python documentation for the `re.search`__ function and
the `match object`__.
.. __: https://docs.python.org/3.6/library/re.html#re.search
.. __: https://docs.python.org/3.6/library/re.html#match-objects
"""
for regex, function in tools.iteritems(self.memory['url_callbacks']):
match = regex.search(url)
if match:
yield function, match
12 changes: 2 additions & 10 deletions sopel/modules/bugzilla.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@

import xmltodict

from sopel import tools
from sopel.config.types import StaticSection, ListAttribute
from sopel.logger import get_logger
from sopel.module import rule
Expand Down Expand Up @@ -46,24 +45,17 @@ def setup(bot):

if not bot.config.bugzilla.domains:
return
if not bot.memory.contains('url_callbacks'):
bot.memory['url_callbacks'] = tools.SopelMemory()

domains = '|'.join(bot.config.bugzilla.domains)
regex = re.compile((r'https?://(%s)'
r'(/show_bug.cgi\?\S*?)'
r'(id=\d+)')
% domains)
bot.memory['url_callbacks'][regex] = show_bug
bot.register_url_callback(regex, show_bug)


def shutdown(bot):
try:
del bot.memory['url_callbacks'][regex]
except KeyError:
# bot.config.bugzilla.domains was probably just empty on startup
# everything's daijoubu
pass
bot.unregister_url_callback(regex)


@rule(r'.*https?://(\S+?)'
Expand Down
8 changes: 3 additions & 5 deletions sopel/modules/instagram.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

from requests import get

from sopel import module, tools
from sopel import module

try:
from ujson import loads
Expand All @@ -26,13 +26,11 @@


def setup(bot):
if not bot.memory.contains('url_callbacks'):
bot.memory['url_callbacks'] = tools.SopelMemory()
bot.memory['url_callbacks'][instagram_pattern] = instaparse
bot.register_url_callback(instagram_pattern, instaparse)


def shutdown(bot):
del bot.memory['url_callbacks'][instagram_pattern]
bot.unregister_url_callback(instagram_pattern)

# TODO: Parse Instagram profile page

Expand Down
12 changes: 5 additions & 7 deletions sopel/modules/reddit.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from sopel.module import commands, rule, example, require_chanmsg, NOLIMIT, OP
from sopel.formatting import bold, color, colors
from sopel.web import USER_AGENT
from sopel.tools import SopelMemory, time
from sopel.tools import time
import datetime as dt
import praw
import re
Expand Down Expand Up @@ -34,15 +34,13 @@


def setup(bot):
if not bot.memory.contains('url_callbacks'):
bot.memory['url_callbacks'] = SopelMemory()
bot.memory['url_callbacks'][post_regex] = rpost_info
bot.memory['url_callbacks'][user_regex] = redditor_info
bot.register_url_callback(post_regex, rpost_info)
bot.register_url_callback(user_regex, redditor_info)


def shutdown(bot):
del bot.memory['url_callbacks'][post_regex]
del bot.memory['url_callbacks'][user_regex]
bot.unregister_url_callback(post_regex)
bot.unregister_url_callback(user_regex)


@rule('.*%s.*' % post_url)
Expand Down
16 changes: 6 additions & 10 deletions sopel/modules/url.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,9 +88,7 @@ def setup(bot):
exclude.extend(regexes)
bot.memory['url_exclude'] = exclude

# Ensure that url_callbacks and last_seen_url are in memory
if not bot.memory.contains('url_callbacks'):
bot.memory['url_callbacks'] = tools.SopelMemory()
# Ensure last_seen_url is in memory
if not bot.memory.contains('last_seen_url'):
bot.memory['last_seen_url'] = tools.SopelMemory()

Expand Down Expand Up @@ -237,13 +235,11 @@ def check_callbacks(bot, trigger, url, run=True):
# Check if it matches the exclusion list first
matched = any(regex.search(url) for regex in bot.memory['url_exclude'])
# Then, check if there's anything in the callback list
for regex, function in tools.iteritems(bot.memory['url_callbacks']):
match = regex.search(url)
if match:
# Always run ones from @url; they don't run on their own.
if run or hasattr(function, 'url_regex'):
function(bot, trigger, match)
matched = True
for function, match in bot.search_url_callbacks(url):
# Always run ones from @url; they don't run on their own.
if run or hasattr(function, 'url_regex'):
function(bot, trigger, match)
matched = True
return matched


Expand Down
10 changes: 5 additions & 5 deletions sopel/modules/wikipedia.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# Licensed under the Eiffel Forum License 2.
from __future__ import unicode_literals, absolute_import, print_function, division

from sopel import tools
from sopel.config.types import StaticSection, ValidatedAttribute
from sopel.module import NOLIMIT, commands, example, rule
from requests import get
Expand All @@ -19,6 +18,7 @@
from urllib.parse import quote, unquote

REDIRECT = re.compile(r'^REDIRECT (.*)')
WIKIPEDIA_REGEX = re.compile('([a-z]+).(wikipedia.org/wiki/)([^ ]+)')


class WikipediaSection(StaticSection):
Expand All @@ -30,11 +30,11 @@ class WikipediaSection(StaticSection):

def setup(bot):
bot.config.define_section('wikipedia', WikipediaSection)
bot.register_url_callback(WIKIPEDIA_REGEX, mw_info)

regex = re.compile('([a-z]+).(wikipedia.org/wiki/)([^ ]+)')
if not bot.memory.contains('url_callbacks'):
bot.memory['url_callbacks'] = tools.SopelMemory()
bot.memory['url_callbacks'][regex] = mw_info

def shutdown(bot):
bot.unregister_url_callback(WIKIPEDIA_REGEX)


def configure(config):
Expand Down
26 changes: 26 additions & 0 deletions sopel/test_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@
import sopel.trigger


if sys.version_info.major >= 3:
basestring = str


class MockConfig(sopel.config.Config):
def __init__(self):
self.filename = tempfile.mkstemp()[1]
Expand Down Expand Up @@ -52,6 +56,7 @@ def __init__(self, nick, admin=False, owner=False):
self.channels[channel] = sopel.tools.target.Channel(channel)

self.memory = sopel.tools.SopelMemory()
self.memory['url_callbacks'] = sopel.tools.SopelMemory()

self.ops = {}
self.halfplus = {}
Expand All @@ -74,6 +79,27 @@ def _init_config(self):
os.mkdir(home_dir)
cfg.parser.set('core', 'homedir', home_dir)

def register_url_callback(self, pattern, callback):
if isinstance(pattern, basestring):
pattern = re.compile(pattern)

self.memory['url_callbacks'][pattern] = callback

def unregister_url_callback(self, pattern):
if isinstance(pattern, basestring):
pattern = re.compile(pattern)

try:
del self.memory['url_callbacks'][pattern]
except KeyError:
pass

def search_url_callbacks(self, url):
for regex, function in sopel.tools.iteritems(self.memory['url_callbacks']):
match = regex.search(url)
if match:
yield function, match


class MockSopelWrapper(object):
def __init__(self, bot, pretrigger):
Expand Down

0 comments on commit 4aba0f2

Please sign in to comment.