v/pol
1
0
mirror of https://github.com/taroved/pol synced 2025-06-01 14:00:09 -07:00

mon lib + mysql fix

This commit is contained in:
Alexandr Nesterenko 2017-09-22 16:41:48 +03:00
parent 74547224d6
commit 6aefb2dbcd
2 changed files with 70 additions and 75 deletions

View File

@ -29,7 +29,6 @@ import re
from feed import getFeedData, buildFeed from feed import getFeedData, buildFeed
from settings import DOWNLOADER_USER_AGENT, FEED_REQUEST_PERIOD_LIMIT, DEBUG, SNAPSHOT_DIR from settings import DOWNLOADER_USER_AGENT, FEED_REQUEST_PERIOD_LIMIT, DEBUG, SNAPSHOT_DIR
from mlm import pgc
class bcolors: class bcolors:
@ -71,14 +70,14 @@ def check_feed_request_time_limit(url):
return 0 return 0
pool = HTTPConnectionPool(reactor, persistent=False) #pool = HTTPConnectionPool(reactor, persistent=False)
pool.cachedConnectionTimeout = 3 #pool.cachedConnectionTimeout = 3
agent = BrowserLikeRedirectAgent( agent = BrowserLikeRedirectAgent(
Agent(reactor, Agent(reactor,
contextFactory=ScrapyClientContextFactory(), # skip certificate verification contextFactory=ScrapyClientContextFactory(), # skip certificate verification
connectTimeout=10, connectTimeout=10),
pool=pool), #pool=pool),
redirectLimit=5 redirectLimit=5
) )
@ -188,28 +187,22 @@ def downloadDone(response_str, request, response, feed_config):
request.finish() request.finish()
run_pgc() run_pgc()
from pympler import summary, muppy, tracker, refbrowser from pympler import tracker
import gc import gc
#sum = None #sum = None
tr = tracker.SummaryTracker() tr = tracker.SummaryTracker()
iterator = 0 MON_PERIOD_SECONDS = 5#3 * 60 * 60 # 3 hours
mon_time = None
def mon(none): def mon(none):
global pool global mon_time
pool.closeCachedConnections() tm = int(time.time())
#gc.collect() if not mon_time or tm - mon_time >= MON_PERIOD_SECONDS:
global tr #global pool
tr.print_diff() #pool.closeCachedConnections()
global iterator gc.collect()
iterator += 1 global tr
if iterator % 4 == 0: tr.print_diff()
global reactor mon_time = tm
ib = refbrowser.InteractiveBrowser(reactor)
ib.main()
#cb = refbrowser.ConsoleBrowser(reactor, maxdepth=2, str_func=output_function)
#cb.print_tree()
def output_function(o):
return str(type(o))
def run_pgc(): def run_pgc():
d = defer.Deferred() d = defer.Deferred()

106
feed.py
View File

@ -9,8 +9,10 @@ from feedgenerator import Rss201rev2Feed, Enclosure
import datetime import datetime
import MySQLdb import MySQLdb
from contextlib import closing
from settings import DATABASES, DOWNLOADER_USER_AGENT from settings import DATABASES, DOWNLOADER_USER_AGENT
url_hash_regexp = re.compile('(#.*)?$') url_hash_regexp = re.compile('(#.*)?$')
POST_TIME_DISTANCE = 15 # minutes, RSS Feed Reader skip same titles created in 10 min interval POST_TIME_DISTANCE = 15 # minutes, RSS Feed Reader skip same titles created in 10 min interval
@ -18,18 +20,18 @@ POST_TIME_DISTANCE = 15 # minutes, RSS Feed Reader skip same titles created in 1
FIELD_IDS = {'title': 1, 'description': 2, 'link': 3} FIELD_IDS = {'title': 1, 'description': 2, 'link': 3}
def save_post(conn, created, feed_id, post_fields): def save_post(conn, created, feed_id, post_fields):
cur = conn.cursor() with conn as cur:
cur.execute("""insert into frontend_post (md5sum, created, feed_id) cur.execute("""insert into frontend_post (md5sum, created, feed_id)
values (%s, %s, %s)""", (post_fields['md5'], created, feed_id)) values (%s, %s, %s)""", (post_fields['md5'], created, feed_id))
print(cur._last_executed) print(cur._last_executed)
post_id = conn.insert_id() post_id = conn.insert_id()
for key in ['title', 'description', 'title_link']: for key in ['title', 'description', 'title_link']:
if key in post_fields: if key in post_fields:
#import pdb;pdb.set_trace() #import pdb;pdb.set_trace()
cur.execute("""insert into frontend_postfield (field_id, post_id, `text`) cur.execute("""insert into frontend_postfield (field_id, post_id, `text`)
values (%s, %s, %s)""", (FIELD_IDS[key], post_id, post_fields[key].encode('utf-8'))) values (%s, %s, %s)""", (FIELD_IDS[key], post_id, post_fields[key].encode('utf-8')))
print(cur._last_executed) print(cur._last_executed)
def fill_time(feed_id, items): def fill_time(feed_id, items):
if not items: if not items:
@ -44,31 +46,31 @@ def fill_time(feed_id, items):
#fetch dates from db #fetch dates from db
fetched_dates = {} fetched_dates = {}
db = get_conn() with closing(get_conn()) as conn:
with db: with conn as cur:
quoted_hashes = ','.join(["'%s'" % (i['md5']) for i in items]) quoted_hashes = ','.join(["'%s'" % (i['md5']) for i in items])
cur = db.cursor() cur.execute("""select p.md5sum, p.created, p.id
cur.execute("""select p.md5sum, p.created, p.id from frontend_post p
from frontend_post p where p.md5sum in (%s)
where p.md5sum in (%s) and p.feed_id=%s""" % (quoted_hashes, feed_id,))
and p.feed_id=%s""" % (quoted_hashes, feed_id,)) rows = cur.fetchall()
rows = cur.fetchall() print(cur._last_executed)
print(cur._last_executed) for row in rows:
for row in rows: md5hash = row[0]
md5hash = row[0] created = row[1]
created = row[1] post_id = row[2]
post_id = row[2] fetched_dates[md5hash] = created
fetched_dates[md5hash] = created
cur_time = datetime.datetime.utcnow() cur_time = datetime.datetime.utcnow()
new_posts = [] new_posts = []
for item in items: for item in items:
if item['md5'] in fetched_dates: if item['md5'] in fetched_dates:
item['time'] = fetched_dates[item['md5']] item['time'] = fetched_dates[item['md5']]
else: else:
item['time'] = cur_time item['time'] = cur_time
save_post(db, cur_time, feed_id, item) save_post(conn, cur_time, feed_id, item)
cur_time -= datetime.timedelta(minutes=POST_TIME_DISTANCE) cur_time -= datetime.timedelta(minutes=POST_TIME_DISTANCE)
def decode(text, encoding): # it's strange but true def decode(text, encoding): # it's strange but true
@ -152,24 +154,24 @@ def buildFeed(response, feed_config):
def getFeedData(request, feed_id): def getFeedData(request, feed_id):
# get url, xpathes # get url, xpathes
feed = {} feed = {}
db = get_conn()
with db:
cur = db.cursor()
cur.execute("""select f.uri, f.xpath, fi.name, ff.xpath, fi.required from frontend_feed f
right join frontend_feedfield ff on ff.feed_id=f.id
left join frontend_field fi on fi.id=ff.field_id
where f.id=%s""", (feed_id,))
rows = cur.fetchall()
for row in rows: with closing(get_conn()) as conn:
if not feed: with conn as cur:
feed['id'] = feed_id cur.execute("""select f.uri, f.xpath, fi.name, ff.xpath, fi.required from frontend_feed f
feed['uri'] = row[0] right join frontend_feedfield ff on ff.feed_id=f.id
feed['xpath'] = row[1] left join frontend_field fi on fi.id=ff.field_id
feed['fields'] = {} where f.id=%s""", (feed_id,))
feed['required'] = {} rows = cur.fetchall()
feed['fields'][row[2]] = row[3]
feed['required'][row[2]] = row[4] for row in rows:
if not feed:
feed['id'] = feed_id
feed['uri'] = row[0]
feed['xpath'] = row[1]
feed['fields'] = {}
feed['required'] = {}
feed['fields'][row[2]] = row[3]
feed['required'][row[2]] = row[4]
if feed: if feed:
return [feed['uri'], feed] return [feed['uri'], feed]