This repository has been archived by the owner on Jul 22, 2020. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 3
/
scheduler.py
51 lines (39 loc) · 1.61 KB
/
scheduler.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
# -*- coding: utf-8 -*-
# @Author: lzc
# @Time : 2019/9/9
import os
import subprocess
from datetime import datetime, timedelta
from runpy import run_path
from apscheduler.schedulers.blocking import BlockingScheduler
from rm_log import rm_log
# 必须使用绝对路径,使用 dirname 在终端会报错
project_path = os.path.abspath('.')
server_path = os.path.join(project_path, 'ip_proxy_site')
spider_path = os.path.join(project_path, 'spider', 'ip_proxies')
log_path = os.path.join(spider_path, 'ip_proxies', 'log')
def enable_crawl_spider():
os.chdir(spider_path)
subprocess.run('python start.py'.split())
# os.system('python start.py')
def enable_verify_spider():
os.chdir(spider_path)
subprocess.run('python start_verify.py'.split())
# os.system('python start_verify.py')
def enable_server():
os.chdir(server_path)
subprocess.run('python manage.py runserver 0.0.0.0:8000'.split())
# os.system('python manage.py runserver 0.0.0.0:8000')
def remove_log():
settings = os.path.join(spider_path, 'ip_proxies', 'settings.py')
settings = run_path(settings)
time_format = settings.get('TIME_FORMAT')
rm_log(log_path, time_format)
# enable_crawl_spider()
sched = BlockingScheduler()
sched.add_job(enable_server, name='Django server')
sched.add_job(enable_crawl_spider, 'interval', hours=4, next_run_time=datetime.now() + timedelta(seconds=10),
name='定时代理爬取')
sched.add_job(enable_verify_spider, 'interval', hours=2, name='定时代理有效性验证')
sched.add_job(remove_log, 'interval', days=2, name='定时删除2天前的日志文件')
sched.start()