Skip to content

Commit

Permalink
Merge pull request #37 from ptcNOP/feature/ssdeep_celery_v2
Browse files Browse the repository at this point in the history
 Move ssdeep.compare to Celery task and add celery crontab
  • Loading branch information
awest1339 authored Nov 13, 2017
2 parents 07fb8f4 + 974ed68 commit c9e275e
Show file tree
Hide file tree
Showing 2 changed files with 35 additions and 4 deletions.
13 changes: 9 additions & 4 deletions utils/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def default(self, obj):
api_config = multiscanner.common.parse_config(api_config_object)

# Needs api_config in order to function properly
from celery_worker import multiscanner_celery
from celery_worker import multiscanner_celery, ssdeep_compare_celery
from ssdeep_analytics import SSDeepAnalytic

db = database.Database(config=api_config.get('Database'))
Expand Down Expand Up @@ -852,9 +852,14 @@ def run_ssdeep_compare():
Runs ssdeep compare analytic and returns success / error message.
'''
try:
ssdeep_analytic = SSDeepAnalytic()
ssdeep_analytic.ssdeep_compare()
return make_response(jsonify({ 'Message': 'Success' }))
if DISTRIBUTED:
# Publish task to Celery
ssdeep_compare_celery.delay()
return make_response(jsonify({ 'Message': 'Success' }))
else:
ssdeep_analytic = SSDeepAnalytic()
ssdeep_analytic.ssdeep_compare()
return make_response(jsonify({ 'Message': 'Success' }))
except Exception as e:
return make_response(
jsonify({'Message': 'Unable to complete request.'}),
Expand Down
26 changes: 26 additions & 0 deletions utils/celery_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,17 @@
# Add the libs dir to the sys.path. Allows import of common, celery_batches modules
if os.path.join(MS_WD, 'libs') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'libs'))
# Add the analytics dir to the sys.path. Allows import of ssdeep_analytics
if os.path.join(MS_WD, 'analytics') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'analytics'))
import multiscanner
import common
import sql_driver as database
from celery_batches import Batches
from ssdeep_analytics import SSDeepAnalytic

from celery import Celery
from celery.schedules import crontab

DEFAULTCONF = {
'protocol': 'pyamqp',
Expand All @@ -34,6 +39,7 @@
'vhost': '/',
'flush_every': '100',
'flush_interval': '10',
'tz': 'US/Eastern',
}

config_object = configparser.SafeConfigParser()
Expand Down Expand Up @@ -61,8 +67,16 @@
worker_config.get('host'),
worker_config.get('vhost'),
))
app.conf.timezone = worker_config.get('tz')
db = database.Database(config=db_config)

@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
# Executes every morning at 2:00 a.m.
sender.add_periodic_task(
crontab(hour=2, minute=0),
ssdeep_compare_celery.s(),
)

def celery_task(files, config=multiscanner.CONFIG):
'''
Expand Down Expand Up @@ -155,6 +169,18 @@ def multiscanner_celery(requests, *args, **kwargs):

celery_task(files)

@app.task()
def ssdeep_compare_celery():
'''
Run ssdeep.compare for new samples.
Usage:
from celery_worker import ssdeep_compare_celery
ssdeep_compare_celery.delay()
'''
ssdeep_analytic = SSDeepAnalytic()
ssdeep_analytic.ssdeep_compare()


if __name__ == '__main__':
app.start()

0 comments on commit c9e275e

Please sign in to comment.