Coverage for bookie.bcelery.tasks : 32%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
def hourly_stats(): """Hourly we want to runa series of numbers to track
Currently we're monitoring: - Total number of bookmarks in the system - Unique number of urls in the system - Total number of tags in the system
""" count_total.delay() count_unique.delay() count_tags.delay()
def count_total(): """Count the total number of bookmarks in the system""" trans = transaction.begin() StatBookmarkMgr.count_total_bookmarks() trans.commit()
def count_unique(): """Count the unique number of bookmarks/urls in the system""" trans = transaction.begin() StatBookmarkMgr.count_unique_bookmarks() trans.commit()
def count_tags(): """Count the total number of tags in the system""" trans = transaction.begin() StatBookmarkMgr.count_total_tags() trans.commit()
def importer_process(import_id): """Start the process of running the import.
We load it, mark it as running, and begin begin a task to process.
:param import_id: import id we need to pull and work on
""" trans = transaction.begin() imp = ImportQueueMgr.get(import_id) import_id = imp.id
# Log that we've scheduled it logger = get_task_logger('importer_process') logger.info("IMPORT: SCHEDULED for {0}.".format(imp.username)) # We need to mark that it's running to prevent it getting picked up # again. imp.mark_running() trans.commit() importer_process_worker.delay(import_id)
def importer_process_worker(import_id): """Do the real import work
:param import_id: import id we need to pull and work on
""" logger = get_task_logger('importer_process_worker')
trans = transaction.begin() import_job = ImportQueueMgr.get(import_id) logger.info("IMPORT: RUNNING for {username}".format(**dict(import_job)))
try: # process the file using the import script import_file = open(import_job.file_path) importer = Importer( import_file, import_job.username) importer.process()
# Processing kills off our transaction so we need to start a new one # to update that our import is complete. trans = transaction.begin() import_job = ImportQueueMgr.get(import_id) import_job.mark_done() user = UserMgr.get(username=import_job.username) from bookie.lib.message import UserImportSuccessMessage msg = UserImportSuccessMessage( user.email, 'Bookie: Your requested import has completed.', INI) msg.send({ 'username': import_job.username, })
logger.info( "IMPORT: COMPLETE for {username}".format(**dict(import_job))) trans.commit()
except Exception, exc: # We need to log this and probably send an error email to the # admin from bookie.lib.message import ImportFailureMessage from bookie.lib.message import UserImportFailureMessage
trans = transaction.begin() import_job = ImportQueueMgr.get(import_id) user = UserMgr.get(username=import_job.username)
msg = ImportFailureMessage( INI.get('email.from'), 'Import failure!', INI) msg.send({ 'username': import_job.username, 'file_path': import_job.file_path, 'exc': str(exc) })
# Also send an email to the user that their import failed. msg = UserImportFailureMessage( user.email, 'Bookie: We are sorry, your import failed.', INI) msg.send({ 'username': import_job.username, 'exc': str(exc) })
logger.error(exc) logger.error(str(exc)) import_job.mark_error() logger.info( "IMPORT: ERROR for {username}".format(**dict(import_job))) logger.info(exc) trans.commit()
def email_signup_user(email, msg, settings, message_data): """Do the real import work
:param iid: import id we need to pull and work on
""" from bookie.lib.message import InvitationMsg msg = InvitationMsg(email, msg, settings) status = msg.send(message_data) if status == 4: from bookie.lib.applog import SignupLog trans = transaction.begin() SignupLog(SignupLog.ERROR, 'Could not send smtp email to signup: ' + email) trans.commit()
def fulltext_index_bookmark(bid, content): """Insert bookmark data into the fulltext index."""
logger.error('Could not load bookmark to fulltext index: ' + str(bid)) fulltext_index_bookmark.retry(exc=BookmarkNotFoundException()) else:
found_content = content else:
bid=unicode(b.bid), description=b.description if b.description else u"", extended=b.extended if b.extended else u"", tags=b.tag_str if b.tag_str else u"", readable=found_content, ) except (IndexingError, LockError), exc: # There was an issue saving into the index. logger.error(exc) logger.warning('sending back to the queue') # This should send the work over to a celery task that will try # again in that space. writer.cancel() fulltext_index_bookmark.retry(exc=exc, countdown=60)
"""Rebuild the fulltext index with all bookmarks."""
else: fulltext_index_bookmark.delay(b.bid, None)
"""Check the db for any unfetched content. Fetch and index.""" logger = get_task_logger('fetch_unfetched_bmark_content') logger.info("Checking for unfetched bookmarks")
url_list = Bmark.query.outerjoin( Readable, Bmark.readable).\ filter(Readable.imported == None).all()
for bmark in url_list: fetch_bmark_content.delay(bmark.bid)
def fetch_bmark_content(bid): """Given a bookmark, fetch its content and index it.""" trans = transaction.begin() logger = get_task_logger('fetch_bmark_content')
if not bid: raise Exception('missing bookmark id') bmark = Bmark.query.get(bid) if not bmark: raise Exception('Bookmark not found: ' + str(bid)) hashed = bmark.hashed
try: read = ReadUrl.parse(hashed.url) except ValueError, exc: # We hit this where urllib2 choked trying to get the protocol type of # this url to fetch it. logger.error('Could not parse url: ' + hashed.url) logger.error('exc') read = None
if read: logger.debug(read) logger.debug(read.content)
logger.debug("%s: %s %d %s %s" % ( hashed.hash_id, read.url, len(read.content) if read.content else -1, read.is_error(), read.status_message))
if not read.is_image(): if not bmark.readable: bmark.readable = Readable()
bmark.readable.content = read.content else: if not bmark.readable: bmark.readable = Readable() bmark.readable.content = None
# set some of the extra metadata bmark.readable.content_type = read.content_type bmark.readable.status_code = read.status bmark.readable.status_message = read.status_message trans.commit() fulltext_index_bookmark.delay( bid, read.content if read else None) else: logger.error( 'No readable record for bookmark: ', str(bid, bmark.hashed.url))
# There was a failure reading the thing. bmark.readable = Readable() bmark.readable.status = '900' bmark.readable.status_message = ( 'No readable record ' 'during existing processing') trans.commit() |