Files
urlbot-native/urlbot.py

268 lines
7.0 KiB
Python
Raw Normal View History

#!/usr/bin/python3
2014-08-10 22:10:00 +02:00
# -*- coding: utf-8 -*-
2014-07-20 23:39:51 +02:00
2014-10-03 19:18:52 +02:00
import sys, os, stat, re, time, pickle, random
import urllib.request, urllib.parse, urllib.error, html.parser
from local_config import conf, set_conf
from common import *
2014-10-05 23:39:51 +02:00
from strsim import str_sim
2014-07-20 23:39:51 +02:00
2014-07-21 04:54:50 +02:00
# rate limiting to 5 messages per 10 minutes
hist_ts = []
hist_flag = True
2014-07-21 04:54:50 +02:00
parser = None
2014-07-21 00:53:26 +02:00
def fetch_page(url):
logger('info', 'fetching page ' + url)
try:
2014-10-05 13:58:44 +02:00
request = urllib.request.Request(url)
request.add_header('User-Agent', '''Mozilla/5.0 (X11; Linux x86_64; rv:31.0) Gecko/20100101 Firefox/31.0 Iceweasel/31.0''')
response = urllib.request.urlopen(request)
html_text = response.read(BUFSIZ) # ignore more than BUFSIZ
response.close()
2014-10-01 10:21:27 +02:00
return (0, html_text, response.headers)
except Exception as e:
2014-10-01 10:21:27 +02:00
logger('warn', 'failed: ' + str(e))
return (1, str(e), 'dummy')
2014-08-09 23:39:00 +02:00
2014-10-01 10:21:27 +02:00
return (-1, None, None)
2014-07-21 00:53:26 +02:00
def extract_title(url):
global parser
if 'repo/urlbot.git' in url:
logger('info', 'repo URL found: ' + url)
return (3, 'wee, that looks like my home repo!')
2014-07-21 00:53:26 +02:00
logger('info', 'extracting title from ' + url)
2014-10-01 10:21:27 +02:00
(code, html_text, headers) = fetch_page(url)
if 1 == code:
return (3, 'failed: %s for %s' %(html_text, url))
if html_text:
charset = ''
2014-07-27 12:21:32 +02:00
if 'content-type' in headers:
logger('debug', 'content-type: ' + headers['content-type'])
2014-07-27 12:21:32 +02:00
if 'text/' != headers['content-type'][:len('text/')]:
return (1, headers['content-type'])
charset = re.sub('.*charset=(?P<charset>\S+).*',
'\g<charset>', headers['content-type'], re.IGNORECASE)
if '' != charset:
try:
html_text = html_text.decode(charset)
except LookupError:
logger('warn', 'invalid charset in ' + headers['content-type'])
if str != type(html_text):
html_text = str(html_text)
result = re.match(r'.*?<title.*?>(.*?)</title>.*?', html_text, re.S | re.M | re.IGNORECASE)
if result:
match = result.groups()[0]
if None == parser:
parser = html.parser.HTMLParser()
try:
expanded_html = parser.unescape(match)
except UnicodeDecodeError as e: # idk why this can happen, but it does
logger('warn', 'parser.unescape() expoded here: ' + str(e))
expanded_html = match
return (0, expanded_html)
2014-08-01 20:49:07 +02:00
else:
return (2, 'no title')
2014-08-09 23:39:00 +02:00
2014-08-01 20:49:07 +02:00
return (-1, 'error')
2014-07-20 23:39:51 +02:00
def chat_write(message, prefix='/say '):
set_conf('request_counter', conf('request_counter') + 1)
2014-08-02 09:20:52 +02:00
2014-07-21 08:28:46 +02:00
if debug_enabled():
print(message)
2014-07-21 08:28:46 +02:00
else:
try:
fd = open(fifo_path, 'wb')
# FIXME 2to3
# FIXME: somehow, unicode chars can end up inside a <str> message,
# which seems to make both unicode() and ''.encode('utf8') fail.
try:
msg = str(prefix) + str(message) + '\n'
msg = msg.encode('utf8')
2014-10-20 07:46:05 +02:00
except UnicodeDecodeError as e:
logger('warn', 'encoding msg failed: ' + str(e))
msg = prefix + message + '\n'
fd.write(msg)
2014-07-21 08:28:46 +02:00
fd.close()
except IOError as e:
logger('err', "couldn't print to fifo %s: %s" % (fifo_path, str(e)))
2014-09-27 05:51:18 +02:00
def ratelimit_touch(ignored=None): # FIXME: separate counters
2014-09-27 08:43:33 +02:00
hist_ts.append(time.time())
2014-07-21 04:54:50 +02:00
2014-09-27 05:56:39 +02:00
if conf('hist_max_count') < len(hist_ts):
2014-09-27 05:51:18 +02:00
hist_ts.pop(0)
def ratelimit_exceeded(ignored=None): # FIXME: separate counters
global hist_flag
2014-09-27 05:56:39 +02:00
if conf('hist_max_count') < len(hist_ts):
2014-07-21 04:54:50 +02:00
first = hist_ts.pop(0)
2014-09-27 08:43:33 +02:00
if (time.time() - first) < conf('hist_max_time'):
if hist_flag:
hist_flag = False
2014-09-27 05:56:39 +02:00
chat_write('(rate limited to %d messages in %d seconds, try again at %s)' %(conf('hist_max_count'), conf('hist_max_time'), time.strftime('%T %Z', time.localtime(hist_ts[0] + conf('hist_max_time')))))
2014-07-21 04:54:50 +02:00
logger('warn', 'rate limiting exceeded: ' + pickle.dumps(hist_ts))
return True
hist_flag = True
2014-07-21 04:54:50 +02:00
return False
2014-07-20 23:39:51 +02:00
def extract_url(data):
2014-08-02 20:48:06 +02:00
ret = None
2014-07-21 09:59:09 +02:00
result = re.findall("(https?://[^\s>]+)", data)
2014-07-20 23:39:51 +02:00
if result:
2014-10-06 00:19:47 +02:00
for url in result:
ratelimit_touch()
if ratelimit_exceeded():
return False
# urllib.request is broken:
# >>> '.'.encode('idna')
# ....
# UnicodeError: label empty or too long
# >>> '.a.'.encode('idna')
# ....
# UnicodeError: label empty or too long
# >>> 'a.a.'.encode('idna')
# b'a.a.'
try:
(status, title) = extract_title(url)
except UnicodeError as e:
(status, title) = (4, str(e))
2014-07-21 00:53:26 +02:00
2014-07-27 12:21:32 +02:00
if 0 == status:
title = title.strip()
2014-10-06 00:19:47 +02:00
lev_url = re.sub(r'https?://[^/]*/', '', url)
lev_res = levenshtein(lev_url, title)
2014-10-05 23:39:51 +02:00
sim = str_sim(title, lev_url)
sim_len_title = len(sim)
sim_len_url = len(sim[0])
sim_sum = sum([sum(a) for a in sim])
2014-10-06 00:19:47 +02:00
obj = conf_load()
obj['lev'].append((lev_res, title, url))
obj['sim'].append((sim_sum, sim_len_title, sim_len_url, title, url))
conf_save(obj)
message = 'Title: %s: %s' %(title, url)
2014-08-01 20:49:07 +02:00
elif 1 == status:
2014-10-03 19:18:52 +02:00
if conf('image_preview'):
# of course it's fake, but it looks interesting at least
char = """,._-+=\|/*`~"'"""
message = 'No text but %s, 1-bit ASCII art preview: [%c] %s' %(
2014-10-06 00:19:47 +02:00
title, random.choice(char), url
2014-10-03 19:18:52 +02:00
)
else:
2014-10-06 00:19:47 +02:00
logger('info', 'no message sent for non-text %s (%s)' %(url, title))
2014-10-03 19:18:52 +02:00
continue
2014-08-01 20:49:07 +02:00
elif 2 == status:
2014-10-06 00:19:47 +02:00
message = 'No title: %s' % url
elif 3 == status:
message = title
elif 4 == status:
message = 'Bug triggered (%s), invalid URL/domain part: %s' % (title, url)
logger('warn', message)
2014-08-01 20:49:07 +02:00
else:
2014-10-06 00:19:47 +02:00
message = 'some error occurred when fetching %s' % url
2014-07-22 22:23:10 +02:00
message = message.replace('\n', '\\n')
2014-07-20 23:39:51 +02:00
logger('info', 'printing ' + message)
2014-07-21 08:28:46 +02:00
chat_write(message)
2014-08-02 20:48:06 +02:00
ret = True
return ret
def parse_pn(data):
## reply_user = data.split(' ')[0].strip('<>')
# since we can't determine if a user named 'foo> ' just wrote ' > bar'
# or a user 'foo' just wrote '> > bar', we can't safely answer here
logger('warn', 'received PN: ' + data)
return False
2014-07-20 23:39:51 +02:00
def parse_delete(filepath):
try:
fd = open(filepath, 'r')
except IOError as e:
logger('err', 'file has vanished: %s: %s' % (filepath, e))
2014-07-21 08:28:46 +02:00
return False
2014-07-20 23:39:51 +02:00
content = fd.read(BUFSIZ) # ignore more than BUFSIZ
fd.close()
os.remove(filepath) # probably better crash here
2014-07-20 23:39:51 +02:00
2014-09-27 05:51:18 +02:00
if content[1:1+len(conf('bot_user'))] == conf('bot_user'):
return
2014-07-20 23:39:51 +02:00
if 'has set the subject to:' in content:
return
if content.startswith('PRIV#'):
parse_pn(content)
return
2014-09-17 15:49:52 +02:00
if 'nospoiler' in content:
2014-09-29 19:15:00 +02:00
# logger('info', "no spoiler for: " + content)
2014-09-17 15:49:52 +02:00
return
2014-07-20 23:39:51 +02:00
if sys.argv[0] in content:
logger('info', 'silenced, this is my own log')
return
if True != extract_url(content):
plugins.data_parse_commands(content)
plugins.data_parse_other(content)
return
2014-07-20 23:39:51 +02:00
import plugins
2014-09-27 06:03:04 +02:00
plugins.chat_write = chat_write
plugins.ratelimit_exceeded = ratelimit_exceeded
2014-09-27 05:51:18 +02:00
plugins.ratelimit_touch = ratelimit_touch
2014-09-27 06:03:04 +02:00
plugins.register_all()
2014-08-09 23:50:40 +02:00
if '__main__' == __name__:
print(sys.argv[0] + ' ' + VERSION)
2014-09-21 20:10:37 +02:00
if not os.path.exists(fifo_path):
logger('error', 'fifo_path "%s" does not exist, exiting' % fifo_path)
exit(1)
if not stat.S_ISFIFO(os.stat(fifo_path).st_mode):
logger('error', 'fifo_path "%s" is not a FIFO, exiting' % fifo_path)
exit(1)
while 1:
try:
for f in os.listdir(event_files_dir):
if 'mcabber-' == f[:8]:
parse_delete(os.path.join(event_files_dir, f))
2014-07-20 23:39:51 +02:00
2014-09-29 19:15:00 +02:00
plugins.event_trigger()
time.sleep(delay)
except KeyboardInterrupt:
2014-09-28 22:44:42 +02:00
print('')
exit(130)