grab-site/libgrabsite/wpull_hooks.py

333 lines
8.8 KiB
Python
Raw Normal View History

import re
import os
import sys
import json
import pprint
import signal
2015-07-18 03:17:27 +00:00
import trollius as asyncio
from urllib.request import urlopen
from autobahn.asyncio.websocket import WebSocketClientFactory, WebSocketClientProtocol
from libgrabsite.ignoracle import Ignoracle, parameterize_record_info
2015-07-18 05:40:35 +00:00
realStdoutWrite = sys.stdout.buffer.write
realStderrWrite = sys.stderr.buffer.write
2015-07-18 05:40:35 +00:00
def printToReal(s):
realStdoutWrite((s + "\n").encode("utf-8"))
2015-07-18 05:48:33 +00:00
sys.stdout.buffer.flush()
2015-07-18 10:57:57 +00:00
class GrabberClientProtocol(WebSocketClientProtocol):
def onOpen(self):
2015-07-18 03:17:27 +00:00
self.factory.client = self
2015-07-18 05:48:33 +00:00
printToReal("{} connected to WebSocket server".format(self.__class__.__name__))
2015-07-18 05:51:15 +00:00
self.sendMessage(json.dumps({
"type": "hello",
"mode": "grabber",
"url": jobData["url"]
2015-07-18 05:51:15 +00:00
}).encode('utf-8'))
2015-07-18 03:17:27 +00:00
def onClose(self, wasClean, code, reason):
self.factory.client = None
2015-07-18 05:48:33 +00:00
printToReal("{} disconnected from WebSocket server".format(self.__class__.__name__))
# TODO: exponentially increasing delay (copy Decayer from dashboard)
2015-07-18 03:17:27 +00:00
asyncio.ensure_future(connectToServer())
def sendObject(self, obj):
self.sendMessage(json.dumps(obj).encode("utf-8"))
2015-07-18 10:57:57 +00:00
class GrabberClientFactory(WebSocketClientFactory):
protocol = GrabberClientProtocol
2015-07-18 03:17:27 +00:00
def __init__(self):
super().__init__()
self.client = None
2015-07-18 10:57:57 +00:00
wsFactory = GrabberClientFactory()
2015-07-17 23:57:46 +00:00
2015-07-18 03:17:27 +00:00
@asyncio.coroutine
2015-07-17 23:57:46 +00:00
def connectToServer():
2015-07-18 06:16:46 +00:00
host = os.environ.get('GRAB_SITE_WS_HOST', '127.0.0.1')
2015-07-17 23:57:46 +00:00
port = int(os.environ.get('GRAB_SITE_WS_PORT', 29001))
2015-07-18 03:17:27 +00:00
while True:
try:
2015-07-18 06:16:46 +00:00
coro = yield from loop.create_connection(wsFactory, host, port)
2015-07-18 03:17:27 +00:00
except OSError:
2015-07-18 05:48:33 +00:00
printToReal("Could not connect to WebSocket server, retrying in 2 seconds...")
2015-07-18 03:17:27 +00:00
yield from asyncio.sleep(2)
else:
break
loop = asyncio.get_event_loop()
asyncio.ensure_future(connectToServer())
2015-07-17 23:57:46 +00:00
2015-07-19 20:20:56 +00:00
def gracefulStopCallback():
printToReal("\n^C detected, creating 'stop' file, please wait for exit...")
with open(os.path.join(workingDir, "stop"), "wb") as f:
pass
2015-07-19 20:20:56 +00:00
def forcefulStopCallback():
loop.stop()
2015-07-19 20:20:56 +00:00
loop.add_signal_handler(signal.SIGINT, gracefulStopCallback)
loop.add_signal_handler(signal.SIGTERM, forcefulStopCallback)
2015-07-17 23:57:46 +00:00
igsetCache = {}
def getPatternsForIgnoreSet(name):
assert name != "", name
if name in igsetCache:
return igsetCache[name]
2015-07-18 05:40:35 +00:00
printToReal("Fetching ArchiveBot/master/db/ignore_patterns/%s.json" % name)
igsetCache[name] = json.loads(urlopen(
"https://raw.githubusercontent.com/ArchiveTeam/ArchiveBot/" +
"master/db/ignore_patterns/%s.json" % name).read().decode("utf-8")
)["patterns"]
return igsetCache[name]
workingDir = os.environ['GRAB_SITE_WORKING_DIR']
2015-02-05 04:39:52 +00:00
def mtime(f):
return os.stat(f).st_mtime
class FileChangedWatcher(object):
def __init__(self, fname):
self.fname = fname
2015-07-19 20:20:56 +00:00
self.lastModificationTime = mtime(fname)
2015-07-19 20:20:56 +00:00
def hasChanged(self):
nowModificationTime = mtime(self.fname)
changed = mtime(self.fname) != self.lastModificationTime
self.lastModificationTime = nowModificationTime
return changed
igsetsWatcher = FileChangedWatcher(os.path.join(workingDir, "igsets"))
ignoresWatcher = FileChangedWatcher(os.path.join(workingDir, "ignores"))
ignoracle = Ignoracle()
def updateIgnoracle():
with open(os.path.join(workingDir, "igsets"), "r") as f:
igsets = f.read().strip("\r\n\t ,").split(',')
2015-02-05 04:39:52 +00:00
with open(os.path.join(workingDir, "ignores"), "r") as f:
ignores = set(ig for ig in f.read().strip("\r\n").split('\n') if ig != "")
for igset in igsets:
2015-02-05 04:39:52 +00:00
ignores.update(getPatternsForIgnoreSet(igset))
2015-07-18 05:40:35 +00:00
printToReal("Using these %d ignores:" % len(ignores))
printToReal(pprint.pformat(ignores))
2015-02-05 04:39:52 +00:00
ignoracle.set_patterns(ignores)
updateIgnoracle()
2015-07-18 04:31:54 +00:00
def shouldIgnoreURL(url, recordInfo):
"""
Returns whether a URL should be ignored.
"""
2015-07-18 04:31:54 +00:00
parameters = parameterize_record_info(recordInfo)
return ignoracle.ignores(url, **parameters)
2015-07-18 08:21:34 +00:00
def acceptURL(urlInfo, recordInfo, verdict, reasons):
2015-07-19 20:20:56 +00:00
if igsetsWatcher.hasChanged() or ignoresWatcher.hasChanged():
updateIgnoracle()
2015-07-18 04:31:54 +00:00
url = urlInfo['url']
if url.startswith('data:'):
# data: URLs aren't something you can grab, so drop them to avoid ignore
# checking and ignore logging.
return False
2015-07-18 04:31:54 +00:00
pattern = shouldIgnoreURL(url, recordInfo)
if pattern:
2015-07-18 05:55:51 +00:00
maybeLogIgnore(url, pattern)
return False
# If we get here, none of our ignores apply. Return the original verdict.
return verdict
2015-07-18 08:21:34 +00:00
def queuedURL(urlInfo):
jobData["items_queued"] += 1
2015-07-19 20:20:56 +00:00
def dequeuedURL(urlInfo, recordInfo):
2015-07-18 08:21:34 +00:00
jobData["items_downloaded"] += 1
jobData = {
"ident": open(os.path.join(workingDir, "id")).read().strip(),
"url": open(os.path.join(workingDir, "start_url")).read().strip(),
"started_at": os.stat(os.path.join(workingDir, "start_url")).st_mtime,
"suppress_ignore_reports": True,
"concurrency": int(open(os.path.join(workingDir, "concurrency")).read().strip()),
2015-07-18 08:21:34 +00:00
"bytes_downloaded": 0,
"items_queued": 0,
"items_downloaded": 0,
"delay_min": 0,
"delay_max": 0,
"r1xx": 0,
"r2xx": 0,
"r3xx": 0,
"r4xx": 0,
"r5xx": 0,
"runk": 0,
}
2015-07-18 04:31:54 +00:00
def handleResult(urlInfo, recordInfo, errorInfo={}, httpInfo={}):
#print("urlInfo", urlInfo)
#print("recordInfo", recordInfo)
#print("errorInfo", errorInfo)
#print("httpInfo", httpInfo)
updateIgoffInJobData()
2015-07-18 09:49:03 +00:00
response_code = 0
2015-07-18 08:21:34 +00:00
if httpInfo.get("response_code"):
2015-07-18 09:49:03 +00:00
response_code = httpInfo.get("response_code")
response_code_str = str(httpInfo["response_code"])
if len(response_code_str) == 3 and response_code_str[0] in "12345":
jobData["r%sxx" % response_code_str[0]] += 1
2015-07-18 09:49:50 +00:00
else:
jobData["runk"] += 1
2015-07-18 08:21:34 +00:00
2015-07-18 04:31:54 +00:00
if httpInfo.get("body"):
2015-07-18 08:21:34 +00:00
jobData["bytes_downloaded"] += httpInfo["body"]["content_size"]
2015-07-18 04:31:54 +00:00
2015-07-18 08:53:38 +00:00
stop = shouldStop()
2015-07-18 09:49:03 +00:00
response_message = httpInfo.get("response_message")
if errorInfo:
response_code = 0
response_message = errorInfo["error"]
2015-07-18 03:17:27 +00:00
if wsFactory.client:
wsFactory.client.sendObject({
"type": "download",
"job_data": jobData,
"url": urlInfo["url"],
2015-07-18 09:49:03 +00:00
"response_code": response_code,
"response_message": response_message,
})
2015-07-18 08:53:38 +00:00
if stop:
return wpull_hook.actions.STOP
return wpull_hook.actions.NORMAL
2015-07-18 04:31:54 +00:00
def handleResponse(urlInfo, recordInfo, httpInfo):
return handleResult(urlInfo, recordInfo, httpInfo=httpInfo)
2015-07-18 04:31:54 +00:00
def handleError(urlInfo, recordInfo, errorInfo):
return handleResult(urlInfo, recordInfo, errorInfo=errorInfo)
2015-07-18 08:53:38 +00:00
# TODO: check only every 5 seconds max
def shouldStop():
return os.path.exists(os.path.join(workingDir, "stop"))
# TODO: check only every 5 seconds max
def updateIgoffInJobData():
igoff = os.path.exists(os.path.join(workingDir, "igoff"))
jobData["suppress_ignore_reports"] = igoff
return igoff
2015-07-18 05:48:33 +00:00
def maybeLogIgnore(url, pattern):
if not updateIgoffInJobData():
2015-07-18 05:55:51 +00:00
printToReal("IGNOR %s by %s" % (url, pattern))
if wsFactory.client:
wsFactory.client.sendObject({
"type": "ignore",
2015-07-18 08:21:34 +00:00
"job_data": jobData,
2015-07-18 05:55:51 +00:00
"url": url,
"pattern": pattern
})
2015-07-18 05:48:33 +00:00
# Regular expressions for server headers go here
ICY_FIELD_PATTERN = re.compile('icy-|ice-|x-audiocast-', re.IGNORECASE)
ICY_VALUE_PATTERN = re.compile('icecast', re.IGNORECASE)
2015-07-19 20:20:56 +00:00
def handlePreResponse(urlInfo, urlRecord, responseInfo):
2015-07-18 04:31:54 +00:00
url = urlInfo['url']
# Check if server version starts with ICY
2015-07-19 20:20:56 +00:00
if responseInfo.get('version', '') == 'ICY':
2015-07-18 05:48:33 +00:00
maybeLogIgnore(url, '[icy version]')
return wpull_hook.actions.FINISH
# Loop through all the server headers for matches
2015-07-19 20:20:56 +00:00
for field, value in responseInfo.get('fields', []):
if ICY_FIELD_PATTERN.match(field):
2015-07-18 05:48:33 +00:00
maybeLogIgnore(url, '[icy field]')
return wpull_hook.actions.FINISH
if field == 'Server' and ICY_VALUE_PATTERN.match(value):
2015-07-18 05:48:33 +00:00
maybeLogIgnore(url, '[icy server]')
return wpull_hook.actions.FINISH
# Nothing matched, allow download
2015-07-18 05:48:33 +00:00
printToReal(url + " ...")
return wpull_hook.actions.NORMAL
def stdoutWriteToBoth(message):
2015-07-18 05:51:53 +00:00
assert isinstance(message, bytes), message
try:
2015-07-18 05:40:35 +00:00
realStdoutWrite(message)
if wsFactory.client:
wsFactory.client.sendObject({
"type": "stdout",
2015-07-18 08:21:34 +00:00
"job_data": jobData,
"message": message.decode("utf-8")
})
except Exception as e:
2015-07-18 05:40:35 +00:00
realStderrWrite((str(e) + "\n").encode("utf-8"))
2015-07-19 20:20:56 +00:00
def stderrWriteToBoth(message):
2015-07-18 05:51:53 +00:00
assert isinstance(message, bytes), message
try:
2015-07-18 05:40:35 +00:00
realStderrWrite(message)
if wsFactory.client:
wsFactory.client.sendObject({
"type": "stderr",
2015-07-18 08:21:34 +00:00
"job_data": jobData,
"message": message.decode("utf-8")
})
except Exception as e:
2015-07-18 05:40:35 +00:00
realStderrWrite((str(e) + "\n").encode("utf-8"))
sys.stdout.buffer.write = stdoutWriteToBoth
sys.stderr.buffer.write = stderrWriteToBoth
def exitStatus(code):
print("Finished grab {} {} with exit code {}".format(jobData["ident"], jobData["url"], code))
return code
assert 2 in wpull_hook.callbacks.AVAILABLE_VERSIONS
wpull_hook.callbacks.version = 2
2015-07-18 08:21:34 +00:00
wpull_hook.callbacks.accept_url = acceptURL
wpull_hook.callbacks.queued_url = queuedURL
wpull_hook.callbacks.dequeued_url = dequeuedURL
wpull_hook.callbacks.handle_response = handleResponse
wpull_hook.callbacks.handle_error = handleError
wpull_hook.callbacks.handle_pre_response = handlePreResponse
wpull_hook.callbacks.exit_status = exitStatus