mirror of
https://github.com/taroved/pol
synced 2025-05-28 12:00:09 -07:00
mon lib + mysql fix
This commit is contained in:
parent
74547224d6
commit
6aefb2dbcd
@ -29,7 +29,6 @@ import re
|
||||
from feed import getFeedData, buildFeed
|
||||
|
||||
from settings import DOWNLOADER_USER_AGENT, FEED_REQUEST_PERIOD_LIMIT, DEBUG, SNAPSHOT_DIR
|
||||
from mlm import pgc
|
||||
|
||||
|
||||
class bcolors:
|
||||
@ -71,14 +70,14 @@ def check_feed_request_time_limit(url):
|
||||
return 0
|
||||
|
||||
|
||||
pool = HTTPConnectionPool(reactor, persistent=False)
|
||||
pool.cachedConnectionTimeout = 3
|
||||
#pool = HTTPConnectionPool(reactor, persistent=False)
|
||||
#pool.cachedConnectionTimeout = 3
|
||||
|
||||
agent = BrowserLikeRedirectAgent(
|
||||
Agent(reactor,
|
||||
contextFactory=ScrapyClientContextFactory(), # skip certificate verification
|
||||
connectTimeout=10,
|
||||
pool=pool),
|
||||
connectTimeout=10),
|
||||
#pool=pool),
|
||||
redirectLimit=5
|
||||
)
|
||||
|
||||
@ -188,28 +187,22 @@ def downloadDone(response_str, request, response, feed_config):
|
||||
request.finish()
|
||||
run_pgc()
|
||||
|
||||
from pympler import summary, muppy, tracker, refbrowser
|
||||
from pympler import tracker
|
||||
import gc
|
||||
#sum = None
|
||||
tr = tracker.SummaryTracker()
|
||||
iterator = 0
|
||||
MON_PERIOD_SECONDS = 5#3 * 60 * 60 # 3 hours
|
||||
mon_time = None
|
||||
def mon(none):
|
||||
global pool
|
||||
pool.closeCachedConnections()
|
||||
#gc.collect()
|
||||
global tr
|
||||
tr.print_diff()
|
||||
global iterator
|
||||
iterator += 1
|
||||
if iterator % 4 == 0:
|
||||
global reactor
|
||||
ib = refbrowser.InteractiveBrowser(reactor)
|
||||
ib.main()
|
||||
#cb = refbrowser.ConsoleBrowser(reactor, maxdepth=2, str_func=output_function)
|
||||
#cb.print_tree()
|
||||
|
||||
def output_function(o):
|
||||
return str(type(o))
|
||||
global mon_time
|
||||
tm = int(time.time())
|
||||
if not mon_time or tm - mon_time >= MON_PERIOD_SECONDS:
|
||||
#global pool
|
||||
#pool.closeCachedConnections()
|
||||
gc.collect()
|
||||
global tr
|
||||
tr.print_diff()
|
||||
mon_time = tm
|
||||
|
||||
def run_pgc():
|
||||
d = defer.Deferred()
|
||||
|
106
feed.py
106
feed.py
@ -9,8 +9,10 @@ from feedgenerator import Rss201rev2Feed, Enclosure
|
||||
import datetime
|
||||
|
||||
import MySQLdb
|
||||
from contextlib import closing
|
||||
from settings import DATABASES, DOWNLOADER_USER_AGENT
|
||||
|
||||
|
||||
url_hash_regexp = re.compile('(#.*)?$')
|
||||
|
||||
POST_TIME_DISTANCE = 15 # minutes, RSS Feed Reader skip same titles created in 10 min interval
|
||||
@ -18,18 +20,18 @@ POST_TIME_DISTANCE = 15 # minutes, RSS Feed Reader skip same titles created in 1
|
||||
FIELD_IDS = {'title': 1, 'description': 2, 'link': 3}
|
||||
|
||||
def save_post(conn, created, feed_id, post_fields):
|
||||
cur = conn.cursor()
|
||||
cur.execute("""insert into frontend_post (md5sum, created, feed_id)
|
||||
values (%s, %s, %s)""", (post_fields['md5'], created, feed_id))
|
||||
print(cur._last_executed)
|
||||
with conn as cur:
|
||||
cur.execute("""insert into frontend_post (md5sum, created, feed_id)
|
||||
values (%s, %s, %s)""", (post_fields['md5'], created, feed_id))
|
||||
print(cur._last_executed)
|
||||
|
||||
post_id = conn.insert_id()
|
||||
for key in ['title', 'description', 'title_link']:
|
||||
if key in post_fields:
|
||||
#import pdb;pdb.set_trace()
|
||||
cur.execute("""insert into frontend_postfield (field_id, post_id, `text`)
|
||||
values (%s, %s, %s)""", (FIELD_IDS[key], post_id, post_fields[key].encode('utf-8')))
|
||||
print(cur._last_executed)
|
||||
post_id = conn.insert_id()
|
||||
for key in ['title', 'description', 'title_link']:
|
||||
if key in post_fields:
|
||||
#import pdb;pdb.set_trace()
|
||||
cur.execute("""insert into frontend_postfield (field_id, post_id, `text`)
|
||||
values (%s, %s, %s)""", (FIELD_IDS[key], post_id, post_fields[key].encode('utf-8')))
|
||||
print(cur._last_executed)
|
||||
|
||||
def fill_time(feed_id, items):
|
||||
if not items:
|
||||
@ -44,31 +46,31 @@ def fill_time(feed_id, items):
|
||||
|
||||
#fetch dates from db
|
||||
fetched_dates = {}
|
||||
db = get_conn()
|
||||
with db:
|
||||
quoted_hashes = ','.join(["'%s'" % (i['md5']) for i in items])
|
||||
with closing(get_conn()) as conn:
|
||||
with conn as cur:
|
||||
quoted_hashes = ','.join(["'%s'" % (i['md5']) for i in items])
|
||||
|
||||
cur = db.cursor()
|
||||
cur.execute("""select p.md5sum, p.created, p.id
|
||||
from frontend_post p
|
||||
where p.md5sum in (%s)
|
||||
and p.feed_id=%s""" % (quoted_hashes, feed_id,))
|
||||
rows = cur.fetchall()
|
||||
print(cur._last_executed)
|
||||
for row in rows:
|
||||
md5hash = row[0]
|
||||
created = row[1]
|
||||
post_id = row[2]
|
||||
fetched_dates[md5hash] = created
|
||||
cur_time = datetime.datetime.utcnow()
|
||||
new_posts = []
|
||||
for item in items:
|
||||
if item['md5'] in fetched_dates:
|
||||
item['time'] = fetched_dates[item['md5']]
|
||||
else:
|
||||
item['time'] = cur_time
|
||||
save_post(db, cur_time, feed_id, item)
|
||||
cur_time -= datetime.timedelta(minutes=POST_TIME_DISTANCE)
|
||||
cur.execute("""select p.md5sum, p.created, p.id
|
||||
from frontend_post p
|
||||
where p.md5sum in (%s)
|
||||
and p.feed_id=%s""" % (quoted_hashes, feed_id,))
|
||||
rows = cur.fetchall()
|
||||
print(cur._last_executed)
|
||||
for row in rows:
|
||||
md5hash = row[0]
|
||||
created = row[1]
|
||||
post_id = row[2]
|
||||
fetched_dates[md5hash] = created
|
||||
|
||||
cur_time = datetime.datetime.utcnow()
|
||||
new_posts = []
|
||||
for item in items:
|
||||
if item['md5'] in fetched_dates:
|
||||
item['time'] = fetched_dates[item['md5']]
|
||||
else:
|
||||
item['time'] = cur_time
|
||||
save_post(conn, cur_time, feed_id, item)
|
||||
cur_time -= datetime.timedelta(minutes=POST_TIME_DISTANCE)
|
||||
|
||||
|
||||
def decode(text, encoding): # it's strange but true
|
||||
@ -152,24 +154,24 @@ def buildFeed(response, feed_config):
|
||||
def getFeedData(request, feed_id):
|
||||
# get url, xpathes
|
||||
feed = {}
|
||||
db = get_conn()
|
||||
with db:
|
||||
cur = db.cursor()
|
||||
cur.execute("""select f.uri, f.xpath, fi.name, ff.xpath, fi.required from frontend_feed f
|
||||
right join frontend_feedfield ff on ff.feed_id=f.id
|
||||
left join frontend_field fi on fi.id=ff.field_id
|
||||
where f.id=%s""", (feed_id,))
|
||||
rows = cur.fetchall()
|
||||
|
||||
for row in rows:
|
||||
if not feed:
|
||||
feed['id'] = feed_id
|
||||
feed['uri'] = row[0]
|
||||
feed['xpath'] = row[1]
|
||||
feed['fields'] = {}
|
||||
feed['required'] = {}
|
||||
feed['fields'][row[2]] = row[3]
|
||||
feed['required'][row[2]] = row[4]
|
||||
with closing(get_conn()) as conn:
|
||||
with conn as cur:
|
||||
cur.execute("""select f.uri, f.xpath, fi.name, ff.xpath, fi.required from frontend_feed f
|
||||
right join frontend_feedfield ff on ff.feed_id=f.id
|
||||
left join frontend_field fi on fi.id=ff.field_id
|
||||
where f.id=%s""", (feed_id,))
|
||||
rows = cur.fetchall()
|
||||
|
||||
for row in rows:
|
||||
if not feed:
|
||||
feed['id'] = feed_id
|
||||
feed['uri'] = row[0]
|
||||
feed['xpath'] = row[1]
|
||||
feed['fields'] = {}
|
||||
feed['required'] = {}
|
||||
feed['fields'][row[2]] = row[3]
|
||||
feed['required'][row[2]] = row[4]
|
||||
|
||||
if feed:
|
||||
return [feed['uri'], feed]
|
||||
|
Loading…
x
Reference in New Issue
Block a user