mirror of
https://github.com/taroved/pol
synced 2025-05-28 03:50:08 -07:00
xpath editor
This commit is contained in:
parent
cefe9e79d4
commit
5cda918fa4
@ -138,7 +138,7 @@ def downloadDone(response_str, request, response, feed_config):
|
||||
if (isinstance(response, TextResponse)):
|
||||
if feed_config:
|
||||
response_str = buildFeed(response, feed_config)
|
||||
request.setHeader(b"Content-Type", b'text/xml')
|
||||
request.setHeader(b"Content-Type", b'text/xml; charset=utf-8')
|
||||
else:
|
||||
response_str, file_name = setBaseAndRemoveScriptsAndMore(response, url)
|
||||
|
||||
|
6
feed.py
6
feed.py
@ -26,6 +26,7 @@ def save_post(conn, created, feed_id, post_fields):
|
||||
post_id = conn.insert_id()
|
||||
for key in ['title', 'description', 'title_link']:
|
||||
if key in post_fields:
|
||||
#import pdb;pdb.set_trace()
|
||||
cur.execute("""insert into frontend_postfield (field_id, post_id, `text`)
|
||||
values (%s, %s, %s)""", (FIELD_IDS[key], post_id, post_fields[key].encode('utf-8')))
|
||||
print(cur._last_executed)
|
||||
@ -96,6 +97,7 @@ def buildFeed(response, feed_config):
|
||||
tree = selector.root.getroottree()
|
||||
# get data from html
|
||||
items = []
|
||||
#import pdb;pdb.set_trace()
|
||||
for node in selector.xpath(feed_config['xpath']):
|
||||
item = {}
|
||||
required_count = 0
|
||||
@ -116,7 +118,7 @@ def buildFeed(response, feed_config):
|
||||
if required_count == required_found:
|
||||
items.append(item)
|
||||
|
||||
title = response.selector.xpath('//title/text()').extract()
|
||||
title = selector.xpath('//title/text()').extract()
|
||||
|
||||
#build feed
|
||||
feed = Rss201rev2Feed(
|
||||
@ -176,6 +178,6 @@ def getFeedData(request, feed_id):
|
||||
|
||||
def get_conn():
|
||||
creds = DATABASES['default']
|
||||
db = MySQLdb.connect(host=creds['HOST'], port=int(creds['PORT']), user=creds['USER'], passwd=creds['PASSWORD'], db=creds['NAME'], init_command='SET NAMES UTF8')
|
||||
db = MySQLdb.connect(host=creds['HOST'], port=int(creds['PORT']), user=creds['USER'], passwd=creds['PASSWORD'], db=creds['NAME'], init_command='SET NAMES utf8mb4')
|
||||
db.autocommit(True)
|
||||
return db
|
||||
|
75
tests.py
75
tests.py
@ -1,98 +1,116 @@
|
||||
from lxml import etree
|
||||
import sys
|
||||
import requests
|
||||
from datetime import datetime
|
||||
|
||||
from feed import element_to_unicode
|
||||
|
||||
def element_to_string(element):
|
||||
def tostring(el, fields):
|
||||
if fields:
|
||||
s = ''
|
||||
for f in fields:
|
||||
for e in el.xpath(f+'/text()'):
|
||||
s += e
|
||||
return s
|
||||
else:
|
||||
etree.tostring(el)
|
||||
|
||||
def element_to_string(element, fields=None):
|
||||
if isinstance(element, basestring): # attribute
|
||||
return element
|
||||
|
||||
s = [element.text] if element.text else []
|
||||
for sub_element in element:
|
||||
s.append(etree.tostring(sub_element))
|
||||
return ''.join(s)
|
||||
if fields:
|
||||
return tostring(element, fields).encode('utf-8')
|
||||
else:
|
||||
for sub_element in element:
|
||||
s.append(etree.tostring(sub_element))
|
||||
return ''.join(s)
|
||||
|
||||
def test1_get_inner_html():
|
||||
root = etree.fromstring('<a>1<b>2</b>3<c>4</c>5</a>')
|
||||
assert element_to_unicode(root, 'utf-8') == u'1<b>2</b>3<c>4</c>5'
|
||||
|
||||
ids = [1,2,3,5,6,54,100,101,113,118,123,124,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,166,167]
|
||||
ids = [1,2,3,5,6,8,44,54,99,100,101,103,113,118,120,123,124,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,249,250,251,252,253,255,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410] # 254 timeout 344 pp gatevway timeout
|
||||
domain = "politepol.com"
|
||||
|
||||
def parse_feed(text):
|
||||
ch = etree.fromstring(text).xpath('/rss/channel')
|
||||
ch = etree.fromstring(text.encode('utf-8')).xpath('/rss/channel')
|
||||
title = ch[0].xpath('title')[0].text
|
||||
link = ch[0].xpath('link')[0].text
|
||||
items = ch[0].xpath('item')
|
||||
return [title.encode('utf-8'), items]
|
||||
return [title, link, items]
|
||||
|
||||
def crawl(extention):
|
||||
number = 0
|
||||
for id in ids:
|
||||
print "ID: %s (%s of %s)" % (id, number, len(ids))
|
||||
print "ID: %s (%s of %s) %s" % (id, number, len(ids), datetime.utcnow())
|
||||
r = requests.get("http://%s/feed/%s" % (domain, id))
|
||||
#r.encoding = 'utf-8'
|
||||
text = r.text.encode('utf-8')
|
||||
with open("tests/%s.%s" % (id, extention), 'w') as f:
|
||||
f.write(text)
|
||||
title, items = parse_feed(text)
|
||||
title, link, items = parse_feed(text)
|
||||
print "Title: %s" % title
|
||||
print "Link: %s" % link
|
||||
print "Items count: %s" % len(items)
|
||||
number += 1
|
||||
|
||||
def diff(ext1, ext2):
|
||||
def diff(ext1, ext2, fields):
|
||||
diff = []
|
||||
number = 0
|
||||
for id in ids:
|
||||
print "ID: %s" % (id,)
|
||||
text1 = None
|
||||
with open("tests/%s.%s" % (id, ext1), 'r') as f:
|
||||
text1 = f.read()
|
||||
text1 = f.read().decode('utf-8')
|
||||
text2 = None
|
||||
with open("tests/%s.%s" % (id, ext2), 'r') as f:
|
||||
text2 = f.read()
|
||||
|
||||
text2 = f.read().decode('utf-8')
|
||||
|
||||
if text1 == text2:
|
||||
print "Identical"
|
||||
else:
|
||||
diff.append(id)
|
||||
posts_diff = 0
|
||||
with open("tests/%s.diff" % (id,), 'w') as f:
|
||||
title1, items1 = parse_feed(text1)
|
||||
title2, items2 = parse_feed(text2)
|
||||
title1, link, items1 = parse_feed(text1)
|
||||
title2, link, items2 = parse_feed(text2)
|
||||
if title1 != title2:
|
||||
print "Different titles"
|
||||
f.write("<<<<<<<<<<<<<<< Different titles >>>>>>>>>>>>>>>\n")
|
||||
f.write(">>>>>>>>>>>>>>> %s.%s <<<<<<<<<<<<<\n" % (id, ext1))
|
||||
f.write(title1 + "\n")
|
||||
f.write(title1.encode('utf-8') + "\n")
|
||||
f.write(">>>>>>>>>>>>>>> %s.%s <<<<<<<<<<<<<\n" % (id, ext2))
|
||||
f.write(title2 + "\n")
|
||||
f.write(title2.encode('utf-8') + "\n")
|
||||
posts1 = {}
|
||||
posts2 = {}
|
||||
if len(items1) != len(items2):
|
||||
print "Different post count: %s vs %s" % (len(items1), len(items2))
|
||||
f.write("<< Different posts count: %s.%s:%s vs %s.%s:%s >>\n" % (id, ext1, len(items1), id, ext2, len(items2)))
|
||||
for post in items1:
|
||||
posts1[element_to_string(post)] = True
|
||||
posts1[element_to_string(post, fields)] = True
|
||||
for post in items2:
|
||||
posts2[element_to_string(post)] = True
|
||||
|
||||
posts2[element_to_string(post, fields)] = True
|
||||
|
||||
for post in items1:
|
||||
if not (element_to_string(post) in posts2):
|
||||
if not (element_to_string(post, fields) in posts2):
|
||||
posts_diff += 1
|
||||
f.write("<<<<<<<<<<<<<<< Different posts (%s) >>>>>>>>>>>>>>>\n" % posts_diff)
|
||||
f.write(">>>>>>>>>>>>>>> %s.%s <<<<<<<<<<<<<\n" % (id, ext1))
|
||||
f.write(element_to_string(post) + "\n")
|
||||
#import pdb;pdb.set_trace()
|
||||
f.write(element_to_string(post, fields) + "\n")
|
||||
f.write(">>>>>>>>>>>>>>> %s.%s <<<<<<<<<<<<<\n" % (id, ext2))
|
||||
f.write("*** Not found ***\n")
|
||||
|
||||
|
||||
for post in items2:
|
||||
if not (element_to_string(post) in posts1):
|
||||
if not (element_to_string(post, fields) in posts1):
|
||||
posts_diff += 1
|
||||
f.write("<<<<<<<<<<<<<<< Different posts (%s) >>>>>>>>>>>>>>>\n" % posts_diff)
|
||||
f.write(">>>>>>>>>>>>>>> %s.%s <<<<<<<<<<<<<\n" % (id, ext1))
|
||||
f.write("*** Not found ***\n")
|
||||
f.write(">>>>>>>>>>>>>>> %s.%s <<<<<<<<<<<<<\n" % (id, ext2))
|
||||
f.write(element_to_string(post) + "\n")
|
||||
f.write(element_to_string(post, fields) + "\n")
|
||||
print "Content of files %s.%s and %s.%s is different. Diff: %s.diff" % (id, ext1, id, ext2, id)
|
||||
if posts_diff > 0:
|
||||
print "Different feeds: %s" % posts_diff
|
||||
@ -110,8 +128,11 @@ elif len(sys.argv) > 2:
|
||||
else:
|
||||
raise Exception("Invalid argument count for crawl")
|
||||
elif sys.argv[1] == 'diff':
|
||||
if len(sys.argv) == 4:
|
||||
diff(sys.argv[2], sys.argv[3])
|
||||
if len(sys.argv) >= 4:
|
||||
fields = None
|
||||
if len(sys.argv) == 5:
|
||||
fields = sys.argv[4].split(',')
|
||||
diff(sys.argv[2], sys.argv[3], fields)
|
||||
else:
|
||||
raise Exception("Invalid argument count for diff")
|
||||
else:
|
||||
|
Loading…
x
Reference in New Issue
Block a user