v/pol
1
0
mirror of https://github.com/taroved/pol synced 2025-05-28 03:50:08 -07:00

sanitize feed preview

This commit is contained in:
Alexandr Nesterenko 2018-02-07 19:27:33 +03:00
parent d03cbe3478
commit 94b7e53a40
2 changed files with 113 additions and 78 deletions

View File

@ -30,7 +30,7 @@
<script type="text/javascript">
function tryGetFeed() {
$.getFeed({
url: '{{ feed_url }}',
url: '{{ feed_url }}?sanitize=Y',
success: function (feed) {
if ('items' in feed) {
$('#preview').empty()

View File

@ -5,7 +5,6 @@ import json
import pickle
import time, sys, traceback
import re
from urlparse import urlparse
from lxml import etree
@ -14,6 +13,7 @@ from twisted.internet import reactor, endpoints, defer
from twisted.web.client import Agent, BrowserLikeRedirectAgent, readBody, PartialDownloadError, HTTPConnectionPool
from twisted.web.server import NOT_DONE_YET
from twisted.web.http_headers import Headers
from twisted.web.http import INTERNAL_SERVER_ERROR
from twisted.web.html import escape
twisted_headers = Headers
from twisted.logger import Logger
@ -37,12 +37,18 @@ log = Logger()
class Downloader(object):
def __init__(self, feed, debug, snapshot_dir='/tmp', stat_tool=None, memon=None):
def __init__(self, feed, debug, snapshot_dir, stat_tool, memon, request,
url, feed_config, selector_defer, sanitize):
self.feed = feed
self.debug = debug
self.snapshot_dir = snapshot_dir
self.stat_tool = stat_tool
self.memon = memon
self.request = request
self.url = url
self.feed_config=feed_config
self.selector_defer = selector_defer
self.sanitize = sanitize
def html2json(self, el):
return [
@ -63,6 +69,34 @@ class Downloader(object):
f.write('\n\n' + etree.tostring(tree, encoding='utf-8', method='html'))
return file_name
def sanitizeAndNumerate(self, selector, numerate=True, sanitize_anchors=True):
tree = selector.root.getroottree()
i = 1
for bad in tree.xpath("//*"):
# remove scripts and iframes
if bad.tag in ['script', 'iframe']:
bad.getparent().remove(bad)
elif numerate:
# set tag-id attribute
bad.attrib['tag-id'] = str(i)
i += 1
# sanitize anchors
if sanitize_anchors and bad.tag == 'a' and 'href' in bad.attrib:
bad.attrib['origin-href'] = bad.attrib['href']
del bad.attrib['href']
# remove html events
for attr in bad.attrib:
if attr.startswith('on'):
del bad.attrib[attr]
# sanitize forms
if bad.tag == 'form':
bad.attrib['onsubmit'] = "return false"
def setBaseAndRemoveScriptsAndMore(self, selector, headers, url):
selector.remove_namespaces()
@ -83,29 +117,7 @@ class Downloader(object):
head.insert(0, base)
base.set('href', url)
i = 1
for bad in tree.xpath("//*"):
# remove scripts
if bad.tag == 'script':
bad.getparent().remove(bad)
else:
# set tag-id attribute
bad.attrib['tag-id'] = str(i)
i += 1
# sanitize anchors
if bad.tag == 'a' and 'href' in bad.attrib:
bad.attrib['origin-href'] = bad.attrib['href']
del bad.attrib['href']
# remove html events
for attr in bad.attrib:
if attr.startswith('on'):
del bad.attrib[attr]
# sanitize forms
if bad.tag == 'form':
bad.attrib['onsubmit'] = "return false"
self.sanitizeAndNumerate(selector)
body = tree.xpath("//body")
if body:
@ -127,36 +139,37 @@ class Downloader(object):
return respcls(url=url, status=status, headers=headers, body=body)
def error_html(self, msg):
return "<html><body>%s</body></html" % msg.replace("\n", "<br/>\n")
return "<html><body>%s</body></html>" % msg.replace("\n", "<br/>\n")
def downloadError(self, error, request=None, url=None, response=None, feed_config=None, selector_defer=None):
def downloadError(self, error):
# read for details: https://stackoverflow.com/questions/29423986/twisted-giving-twisted-web-client-partialdownloaderror-200-ok
if error.type is PartialDownloadError and error.value.status == '200':
d = defer.Deferred()
reactor.callLater(0, d.callback, error.value.response) # error.value.response is response_str
d.addCallback(self.downloadDone, request=request, response=response, feed_config=feed_config, selector_defer=selector_defer)
d.addErrback(self.downloadError, request=request, url=url, response=response, feed_config=feed_config, selector_defer=selector_defer)
d.addCallback(self.downloadDone)
d.addErrback(self.downloadError)
return
if selector_defer:
selector_defer.errback(error)
if self.selector_defer:
self.selector_defer.errback(error)
else:
self.request.setResponseCode(INTERNAL_SERVER_ERROR)
if self.debug:
request.write('Downloader error: ' + error.getErrorMessage())
request.write('Traceback: ' + error.getTraceback())
self.request.write('Downloader error: ' + error.getErrorMessage())
self.request.write('Traceback: ' + error.getTraceback())
else:
request.write(self.error_html('<h1>PolitePol says: "Something wrong"</h1> <p><b>Try to refresh page or contact us by email: <a href="mailto:politepol.com@gmail.com">politepol.com@gmail.com</a></b>\n(Help us to improve our service with your feedback)</p> <p><i>Scary mantra: %s</i></p>' % escape(error.getErrorMessage())))
request.finish()
self.request.write(self.error_html('<h1>PolitePol says: "Something wrong"</h1> <p><b>Try to refresh page or contact us by email: <a href="mailto:politepol.com@gmail.com">politepol.com@gmail.com</a></b>\n(Help us to improve our service with your feedback)</p> <p><i>Scary mantra: %s</i></p>' % escape(error.getErrorMessage())))
self.request.finish()
try:
if self.stat_tool:
feed_id = feed_config and feed_config['id']
feed_id = self.feed_config and self.feed_config['id']
s_url = None
if not feed_id:
feed_id = 0
s_url = url
s_url = self.url
self.stat_tool.trace(
ip = request.getHeader('x-real-ip') or request.client.host,
ip = self.request.getHeader('x-real-ip') or self.request.client.host,
feed_id = feed_id,
post_cnt=0,
new_post_cnt=0,
@ -166,48 +179,60 @@ class Downloader(object):
)
else:
sys.stderr.write('\n'.join(
[str(datetime.utcnow()), request.uri, url, 'Downloader error: ' + error.getErrorMessage(),
[str(datetime.utcnow()), self.request.uri, url, 'Downloader error: ' + error.getErrorMessage(),
'Traceback: ' + error.getTraceback()]))
except:
traceback.print_exc(file=sys.stdout)
def downloadStarted(self, response, request, url, feed_config, selector_defer):
def downloadStarted(self, response):
self.response = response
d = readBody(response)
d.addCallback(self.downloadDone, request=request, response=response, feed_config=feed_config, selector_defer=selector_defer)
d.addErrback(self.downloadError, request=request, url=url, response=response, feed_config=feed_config, selector_defer=selector_defer)
d.addCallback(self.downloadDone)
d.addErrback(self.downloadError)
return response
def downloadDone(self, response_str, request, response, feed_config, selector_defer):
url = response.request.absoluteURI
def downloadDone(self, response_str):
url = self.response.request.absoluteURI
print('Response <%s> ready (%s bytes)' % (url, len(response_str)))
response = self.buildScrapyResponse(response, response_str, url)
sresponse = self.buildScrapyResponse(self.response, response_str, url)
if selector_defer:
selector_defer.callback(response)
if self.selector_defer:
self.selector_defer.callback(sresponse)
else:
self.writeResponse(request, response, feed_config, response_str)
self.writeResponse(sresponse)
self.run_memon()
def writeResponse(self, request, response, feed_config, response_str='PolitePol: Local page processing is failed'):
response = HttpCompressionMiddleware().process_response(Request(response.url), response, None)
response = DecompressionMiddleware().process_response(None, response, None)
def writeResponse(self, sresponse, response_str='PolitePol: Local page processing is failed'):
sresponse = HttpCompressionMiddleware().process_response(Request(sresponse.url), sresponse, None)
sresponse = DecompressionMiddleware().process_response(None, sresponse, None)
if (isinstance(response, TextResponse)):
ip = request.getHeader('x-real-ip') or request.client.host
response_str = self.prepare_response_str(response.selector, response.headers, response.body_as_unicode(), response.url, feed_config, ip)
if feed_config:
request.setHeader(b"Content-Type", b'text/xml; charset=utf-8')
response_headers = self.prepare_response_headers(sresponse.headers)
request.write(response_str)
request.finish()
if (isinstance(sresponse, TextResponse)):
ip = self.request.getHeader('x-real-ip') or self.request.client.host
response_str = self.prepare_response_str(sresponse.selector, sresponse.headers, sresponse.body_as_unicode(), sresponse.url, ip)
if self.feed_config:
response_headers = {b"Content-Type": b'text/xml; charset=utf-8'}
def prepare_response_str(self, selector, headers, page_unicode, url, feed_config, ip=None):
if feed_config:
[response_str, post_cnt, new_post_cnt] = self.feed.buildFeed(selector, page_unicode, feed_config)
for k, v in response_headers.items():
self.request.setHeader(k, v)
self.request.write(response_str)
self.request.finish()
def prepare_response_headers(self, headers):
return {}
def prepare_response_str(self, selector, headers, page_unicode, url, ip=None):
if self.feed_config:
if self.sanitize:
self.sanitizeAndNumerate(selector, numerate=False, sanitize_anchors=False)
[response_str, post_cnt, new_post_cnt] = self.feed.buildFeed(selector, page_unicode, self.feed_config)
if self.stat_tool:
self.stat_tool.trace(ip=ip, feed_id=feed_config['id'], post_cnt=post_cnt, new_post_cnt=new_post_cnt)
self.stat_tool.trace(ip=ip, feed_id=self.feed_config['id'], post_cnt=post_cnt, new_post_cnt=new_post_cnt)
else:
response_str, file_name = self.setBaseAndRemoveScriptsAndMore(selector, headers, url)
if self.stat_tool:
@ -226,9 +251,10 @@ class Downloader(object):
class Site(resource.Resource):
isLeaf = True
feed_regexp = re.compile('^/feed1?/(\d{1,10})$')
feed_regexp = re.compile(b'^/feed/(\d{1,10})')
def __init__(self, db_creds, snapshot_dir, user_agent, debug=False, limiter=None, memon=None, stat_tool=None, prefetch_dir=None, feed=None):
def __init__(self, db_creds, snapshot_dir, user_agent, debug=False, limiter=None, memon=None, stat_tool=None, prefetch_dir=None, feed=None, downloadercls=None):
self.db_creds = db_creds
self.snapshot_dir = snapshot_dir
self.user_agent = user_agent
@ -236,15 +262,22 @@ class Site(resource.Resource):
self.prefetch_dir = prefetch_dir
self.feed = feed or Feed(db_creds)
self.downloader = Downloader(self.feed, debug, snapshot_dir, stat_tool, memon)
self.debug = debug
self.stat_tool = stat_tool
self.memon= memon
self.downloadercls = downloadercls or Downloader
def startRequest(self, request, url, feed_config = None, selector_defer=None, sanitize=False):
downloader = self.downloadercls(self.feed, self.debug, self.snapshot_dir, self.stat_tool, self.memon,
request=request, url=url, feed_config=feed_config,
selector_defer=selector_defer, sanitize=sanitize)
def startRequest(self, request, url, feed_config = None, selector_defer=None):
sresponse = self.tryLocalPage(url)
if sresponse:
if selector_defer:
reactor.callLater(0, selector_defer.callback, sresponse)
else:
self.downloader.writeResponse(request, sresponse, feed_config)
downloader.writeResponse(request, sresponse, feed_config)
else:
agent = BrowserLikeRedirectAgent(
Agent(reactor,
@ -265,8 +298,8 @@ class Site(resource.Resource):
None
)
print('Request <GET %s> started' % (url,))
d.addCallback(self.downloader.downloadStarted, request=request, url=url, feed_config=feed_config, selector_defer=selector_defer)
d.addErrback(self.downloader.downloadError, request=request, url=url, feed_config=feed_config, selector_defer=selector_defer)
d.addCallback(downloader.downloadStarted)
d.addErrback(downloader.downloadError)
def tryLocalPage(self, url):
if self.prefetch_dir:
@ -283,19 +316,21 @@ class Site(resource.Resource):
'''
Render page for frontend or RSS feed
'''
if 'url' in request.args: # page for frontend
url = request.args['url'][0]
if b'url' in request.args: # page for frontend
url = request.args[b'url'][0]
self.startRequest(request, url)
self.startRequest(request, url, sanitize=True)
return NOT_DONE_YET
elif self.feed_regexp.match(request.uri) is not None: # feed
feed_id = self.feed_regexp.match(request.uri).groups()[0]
sanitize = request.uri.endswith(b'?sanitize=Y')
time_left = self.limiter.check_request_time_limit(request.uri) if self.limiter else 0
if time_left:
request.setResponseCode(429)
request.setHeader('Retry-After', str(time_left) + ' seconds')
return 'Too Many Requests. Retry after %s seconds' % (str(time_left))
return b'Too Many Requests. Retry after %s seconds' % (str(time_left))
else:
res = self.feed.getFeedData(feed_id)
@ -303,15 +338,15 @@ class Site(resource.Resource):
return res
url, feed_config = res
self.startRequest(request, url, feed_config)
self.startRequest(request, url, feed_config, sanitize=sanitize)
return NOT_DONE_YET
else: # neither page and feed
return 'Url is required'
return 'Url is invalid'
class Server(object):
def __init__(self, port, db_creds, snapshot_dir, user_agent, debug=False, limiter=None, memon=None, stat_tool=None, prefetch_dir=None, feed=None):
def __init__(self, port, db_creds, snapshot_dir, user_agent, debug=False, limiter=None, memon=None, stat_tool=None, prefetch_dir=None, feed=None, site=None):
self.port = port
self.db_creds = db_creds
self.snapshot_dir = snapshot_dir
@ -324,7 +359,7 @@ class Server(object):
self.log_handler = LogHandler()
self.site = Site(self.db_creds, self.snapshot_dir, self.user_agent, self.debug, self.limiter, self.memon, self.stat_tool, self.prefetch_dir, feed)
self.site = site or Site(self.db_creds, self.snapshot_dir, self.user_agent, self.debug, self.limiter, self.memon, self.stat_tool, self.prefetch_dir, feed)
def requestSelector(self, url=None, feed_config=None):
d = defer.Deferred()