2014-09-27 09:19:46 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2015-11-20 21:07:48 +01:00
|
|
|
import html.parser
|
|
|
|
|
import logging
|
|
|
|
|
import os
|
|
|
|
|
import pickle
|
|
|
|
|
import re
|
|
|
|
|
import sys
|
2015-11-28 01:08:26 +01:00
|
|
|
import time
|
2015-11-20 21:07:48 +01:00
|
|
|
import urllib.request
|
2015-11-28 01:08:26 +01:00
|
|
|
from collections import namedtuple
|
|
|
|
|
from threading import Lock
|
2014-09-27 16:06:26 +02:00
|
|
|
from local_config import conf
|
2014-09-27 09:19:46 +02:00
|
|
|
|
2015-11-28 01:08:26 +01:00
|
|
|
RATE_NO_LIMIT = 0x00
|
2014-12-02 17:01:40 +01:00
|
|
|
RATE_GLOBAL = 0x01
|
|
|
|
|
RATE_NO_SILENCE = 0x02
|
2014-09-27 09:19:46 +02:00
|
|
|
RATE_INTERACTIVE = 0x04
|
2014-12-02 17:01:40 +01:00
|
|
|
RATE_CHAT = 0x08
|
|
|
|
|
RATE_URL = 0x10
|
2015-11-28 01:08:26 +01:00
|
|
|
RATE_EVENT = 0x20
|
|
|
|
|
RATE_FUN = 0x40
|
2015-11-20 22:23:31 +01:00
|
|
|
|
2014-09-27 09:19:46 +02:00
|
|
|
BUFSIZ = 8192
|
2015-08-21 23:35:28 +02:00
|
|
|
EVENTLOOP_DELAY = 0.100 # seconds
|
|
|
|
|
USER_AGENT = '''Mozilla/5.0 (X11; Linux x86_64; rv:31.0) Gecko/20100101 Firefox/31.0 Iceweasel/31.0'''
|
2014-09-27 09:19:46 +02:00
|
|
|
|
|
|
|
|
basedir = '.'
|
2014-12-02 17:01:40 +01:00
|
|
|
if 2 == len(sys.argv):
|
2015-11-30 19:17:40 +01:00
|
|
|
basedir = sys.argv[1]
|
2014-09-27 09:19:46 +02:00
|
|
|
|
|
|
|
|
|
2014-09-27 16:06:26 +02:00
|
|
|
def conf_save(obj):
|
2015-11-30 19:17:40 +01:00
|
|
|
with open(conf('persistent_storage'), 'wb') as fd:
|
|
|
|
|
return pickle.dump(obj, fd)
|
2014-09-27 16:06:26 +02:00
|
|
|
|
2015-11-20 21:07:48 +01:00
|
|
|
|
2014-09-27 16:06:26 +02:00
|
|
|
def conf_load():
|
2015-11-30 19:17:40 +01:00
|
|
|
path = conf('persistent_storage')
|
|
|
|
|
if os.path.isfile(path):
|
|
|
|
|
with open(path, 'rb') as fd:
|
|
|
|
|
fd.seek(0)
|
|
|
|
|
return pickle.load(fd)
|
|
|
|
|
else:
|
|
|
|
|
return {}
|
2014-09-27 16:06:26 +02:00
|
|
|
|
2015-11-20 21:07:48 +01:00
|
|
|
|
2015-11-28 13:11:22 +01:00
|
|
|
def conf_set(key, value):
|
2015-11-30 19:17:40 +01:00
|
|
|
blob = conf_load()
|
|
|
|
|
blob[key] = value
|
|
|
|
|
conf_save(blob)
|
2015-11-28 13:11:22 +01:00
|
|
|
|
|
|
|
|
|
2015-11-28 18:41:40 +01:00
|
|
|
def conf_get(key, default=None):
|
2015-11-30 19:17:40 +01:00
|
|
|
blob = conf_load()
|
|
|
|
|
return blob.get(key, default)
|
2015-11-28 13:11:22 +01:00
|
|
|
|
2015-11-28 01:08:26 +01:00
|
|
|
Bucket = namedtuple("BucketConfig", ["history", "period", "max_hist_len"])
|
|
|
|
|
|
|
|
|
|
buckets = {
|
2015-11-30 19:17:40 +01:00
|
|
|
# everything else
|
|
|
|
|
RATE_GLOBAL: Bucket(history=[], period=60, max_hist_len=10),
|
|
|
|
|
# bot writes with no visible stimuli
|
|
|
|
|
RATE_NO_SILENCE: Bucket(history=[], period=10, max_hist_len=5),
|
|
|
|
|
# interactive stuff like ping
|
|
|
|
|
RATE_INTERACTIVE: Bucket(history=[], period=30, max_hist_len=5),
|
|
|
|
|
# chitty-chat, master volume control
|
|
|
|
|
RATE_CHAT: Bucket(history=[], period=10, max_hist_len=5),
|
|
|
|
|
# reacting on URLs
|
|
|
|
|
RATE_URL: Bucket(history=[], period=10, max_hist_len=5),
|
|
|
|
|
# triggering events
|
|
|
|
|
RATE_EVENT: Bucket(history=[], period=60, max_hist_len=10),
|
|
|
|
|
# bot blames people, produces cake and entertains
|
|
|
|
|
RATE_FUN: Bucket(history=[], period=180, max_hist_len=5),
|
2015-11-28 01:08:26 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
rate_limit_classes = buckets.keys()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def rate_limit(rate_class=RATE_GLOBAL):
|
2015-11-30 19:17:40 +01:00
|
|
|
"""
|
|
|
|
|
Remember N timestamps,
|
|
|
|
|
if N[0] newer than now()-T then do not output, do not append.
|
|
|
|
|
else pop(0); append()
|
|
|
|
|
|
|
|
|
|
:param rate_class: the type of message to verify
|
|
|
|
|
:return: False if blocked, True if allowed
|
|
|
|
|
"""
|
|
|
|
|
if rate_class not in rate_limit_classes:
|
|
|
|
|
return all(rate_limit(c) for c in rate_limit_classes if c & rate_class)
|
|
|
|
|
|
|
|
|
|
now = time.time()
|
|
|
|
|
bucket = buckets[rate_class]
|
|
|
|
|
logging.getLogger(__name__).debug("[ratelimit][bucket=%x][time=%s]%s" % (rate_class, now, bucket.history))
|
|
|
|
|
|
|
|
|
|
if len(bucket.history) >= bucket.max_hist_len and bucket.history[0] > (now - bucket.period):
|
|
|
|
|
# print("blocked")
|
|
|
|
|
return False
|
|
|
|
|
else:
|
|
|
|
|
if bucket.history and len(bucket.history) > bucket.max_hist_len:
|
|
|
|
|
bucket.history.pop(0)
|
|
|
|
|
bucket.history.append(now)
|
|
|
|
|
return True
|
2015-11-28 01:08:26 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def rate_limited(max_per_second):
|
2015-11-30 19:17:40 +01:00
|
|
|
"""
|
|
|
|
|
very simple flow control context manager
|
|
|
|
|
:param max_per_second: how many events per second may be executed - more are delayed
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
min_interval = 1.0 / float(max_per_second)
|
2015-11-28 01:08:26 +01:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
def decorate(func):
|
|
|
|
|
lasttimecalled = [0.0]
|
2015-11-28 01:08:26 +01:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
def ratelimitedfunction(*args, **kargs):
|
|
|
|
|
elapsed = time.clock() - lasttimecalled[0]
|
|
|
|
|
lefttowait = min_interval - elapsed
|
|
|
|
|
if lefttowait > 0:
|
|
|
|
|
time.sleep(lefttowait)
|
|
|
|
|
ret = func(*args, **kargs)
|
|
|
|
|
lasttimecalled[0] = time.clock()
|
|
|
|
|
return ret
|
2015-11-28 01:08:26 +01:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
return ratelimitedfunction
|
2015-11-28 01:08:26 +01:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
return decorate
|
2015-11-28 01:08:26 +01:00
|
|
|
|
|
|
|
|
|
2014-09-27 09:41:29 +02:00
|
|
|
def get_version_git():
|
2015-11-30 19:17:40 +01:00
|
|
|
import subprocess
|
2014-09-27 09:41:29 +02:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
cmd = ['git', 'log', '--oneline', '--abbrev-commit']
|
2014-09-27 09:41:29 +02:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
try:
|
|
|
|
|
p = subprocess.Popen(cmd, bufsize=1, stdout=subprocess.PIPE)
|
|
|
|
|
first_line = p.stdout.readline()
|
|
|
|
|
line_count = len(p.stdout.readlines()) + 1
|
2014-09-27 09:41:29 +02:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
if 0 == p.wait():
|
|
|
|
|
# skip this 1st, 2nd, 3rd stuff and use always [0-9]th
|
|
|
|
|
return "version (Git, %dth rev) '%s'" % (
|
|
|
|
|
line_count, str(first_line.strip(), encoding='utf8')
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
return "(unknown version)"
|
|
|
|
|
except:
|
|
|
|
|
return "cannot determine version"
|
2014-09-27 09:41:29 +02:00
|
|
|
|
2015-11-20 21:07:48 +01:00
|
|
|
|
2014-09-27 09:41:29 +02:00
|
|
|
VERSION = get_version_git()
|
2015-11-20 21:07:48 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def fetch_page(url):
|
2015-11-30 19:17:40 +01:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
log.info('fetching page ' + url)
|
|
|
|
|
try:
|
|
|
|
|
request = urllib.request.Request(url)
|
|
|
|
|
request.add_header('User-Agent', USER_AGENT)
|
|
|
|
|
response = urllib.request.urlopen(request)
|
|
|
|
|
html_text = response.read(BUFSIZ) # ignore more than BUFSIZ
|
|
|
|
|
response.close()
|
|
|
|
|
return 0, html_text, response.headers
|
|
|
|
|
except Exception as e:
|
|
|
|
|
log.warn('failed: %s' % e)
|
|
|
|
|
return 1, str(e), 'dummy'
|
2015-11-20 21:07:48 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def extract_title(url):
|
2015-11-30 19:17:40 +01:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
global parser
|
2015-11-20 21:07:48 +01:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
if 'repo/urlbot-native.git' in url:
|
|
|
|
|
log.info('repo URL found: ' + url)
|
|
|
|
|
return 3, 'wee, that looks like my home repo!'
|
2015-11-20 21:07:48 +01:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
log.info('extracting title from ' + url)
|
2015-11-20 21:07:48 +01:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
(code, html_text, headers) = fetch_page(url)
|
2015-11-20 21:07:48 +01:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
if 1 == code:
|
|
|
|
|
return 3, 'failed: %s for %s' % (html_text, url)
|
2015-11-20 21:07:48 +01:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
if not html_text:
|
|
|
|
|
return -1, 'error'
|
2015-11-20 21:07:48 +01:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
charset = ''
|
|
|
|
|
if 'content-type' in headers:
|
|
|
|
|
log.debug('content-type: ' + headers['content-type'])
|
2015-11-20 21:07:48 +01:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
if 'text/' != headers['content-type'][:len('text/')]:
|
|
|
|
|
return 1, headers['content-type']
|
2015-11-20 21:07:48 +01:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
charset = re.sub(
|
|
|
|
|
r'.*charset=(?P<charset>\S+).*',
|
|
|
|
|
r'\g<charset>', headers['content-type'], re.IGNORECASE
|
|
|
|
|
)
|
2015-11-20 21:07:48 +01:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
if '' != charset:
|
|
|
|
|
try:
|
|
|
|
|
html_text = html_text.decode(charset)
|
|
|
|
|
except LookupError:
|
|
|
|
|
log.warn("invalid charset in '%s': '%s'" % (headers['content-type'], charset))
|
2015-11-20 21:07:48 +01:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
if str != type(html_text):
|
|
|
|
|
html_text = str(html_text)
|
2015-11-20 21:07:48 +01:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
result = re.match(r'.*?<title.*?>(.*?)</title>.*?', html_text, re.S | re.M | re.IGNORECASE)
|
|
|
|
|
if result:
|
|
|
|
|
match = result.groups()[0]
|
2015-11-20 21:07:48 +01:00
|
|
|
|
2015-11-30 19:17:40 +01:00
|
|
|
parser = html.parser.HTMLParser()
|
|
|
|
|
try:
|
|
|
|
|
expanded_html = parser.unescape(match)
|
|
|
|
|
except UnicodeDecodeError as e: # idk why this can happen, but it does
|
|
|
|
|
log.warn('parser.unescape() expoded here: ' + str(e))
|
|
|
|
|
expanded_html = match
|
|
|
|
|
return 0, expanded_html
|
|
|
|
|
else:
|
|
|
|
|
return 2, 'no title'
|