Skip to content

Commit

Permalink
Release: 2.0.0
Browse files Browse the repository at this point in the history
  • Loading branch information
lyy289065406 committed Oct 22, 2022
1 parent 32a3ee5 commit 5e41015
Show file tree
Hide file tree
Showing 8 changed files with 42 additions and 162 deletions.
17 changes: 0 additions & 17 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,20 +18,3 @@
5. 修改 [`autorun.yml`](./.github/workflows/autorun.yml),可通过 Github Actions 自动运行
6. 开启 Github Pages,指定目录为 master/docs


## 赞助途径

| 支付宝 | 微信 |
|:---:|:---:|
| ![](docs/imgs/alipay.png) | ![](docs/imgs/wechat.png) |


## 版权声明

 [![Copyright (C) EXP,2016](https://img.shields.io/badge/Copyright%20(C)-EXP%202016-blue.svg)](http://exp-blog.com) [![License: GPL v3](https://img.shields.io/badge/License-GPL%20v3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0)

- Site: [http://exp-blog.com](http://exp-blog.com)
- Mail: <a href="mailto:[email protected]?subject=[EXP's Github]%20Your%20Question%20(请写下您的疑问)&amp;body=What%20can%20I%20help%20you?%20(需要我提供什么帮助吗?)">[email protected]</a>


------
2 changes: 1 addition & 1 deletion gen_pdm.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from pypdm.dbc._sqlite import SqliteDBC
from pypdm.builder import build
from src import config
from src.utils import log
from color_log.clog import log



Expand Down
83 changes: 34 additions & 49 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,35 +3,48 @@
# @Author : EXP
# -----------------------------------------------

import argparse
import sys
from pypdm.dbc._sqlite import SqliteDBC
from src.core.demo_crawler import DemoCrawler
from src import config
from src.core import pager
from src.utils import log


def help_info() :
return '''
-h 查看帮助信息
-p <pages> 爬取页数,默认 10
-z <zone> 指定爬取地区
'''


def main(is_help, pages, zone) :
if is_help :
log.info(help_info())
return

from color_log.clog import log


def args() :
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
prog='Python 爬虫开发模板',
description='使用此模板可以快速搭建一个爬虫框架',
epilog='\r\n'.join([
'示例: ',
' 单机连续帧识别模式:python main.py',
' 单机连续帧模式:python main.py -m alone -f',
' 单机截屏识别模式:python main.py -m alone',
' 联机模式:python main.py -m duplex -r ai',
' 联机模式:python main.py -m duplex -r ctrl',
'',
'(单机模式只支持【无边框全屏】或【窗口】,联机模式只支持【无边框全屏】或【全屏】模式)'
])
)
parser.add_argument('-p', '--pages', dest='pages', type=int, default=10, help='爬取页数')
parser.add_argument('-z', '--zone', dest='zone', type=str, default='china', help='爬取地区')
return parser.parse_args()



def main(args) :
log.info('+++++++++++++++++++++++++++++++++++++++')
options = {
'pages': pages,
'zone': zone
# 爬虫参数,按需替换
# ... ...
'pages': args.pages,
'zone': args.zone
}
crawlers = [
DemoCrawler(options=options),
# .... 其他爬虫的实现类
# ... ... 其他爬虫的实现类
]

all_cache_datas = []
Expand All @@ -47,44 +60,16 @@ def main(is_help, pages, zone) :


def init() :
log.init()
sdbc = SqliteDBC(options=config.settings.database)
sdbc.conn()
sdbc.exec_script(config.settings.base['sqlpath'])
sdbc.close()



def sys_args(sys_args) :
is_help = False
pages = 10
zone = 'CN'

idx = 1
size = len(sys_args)
while idx < size :
try :
if sys_args[idx] == '-h' or sys_args[idx] == '--help' :
is_help = True
break

elif sys_args[idx] == '-p' or sys_args[idx] == '--pages' :
idx += 1
pages = int(sys_args[idx])

elif sys_args[idx] == '-z' or sys_args[idx] == '--zone' :
idx += 1
zone = sys_args[idx]
except :
pass
idx += 1
return [ is_help, pages, zone ]



if __name__ == "__main__" :
init()
try :
main(*sys_args(sys.argv))
init()
main(args())
except :
log.error('未知异常')
6 changes: 4 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
argparse>=1.4.0
requests==2.22.0
bs4==0.0.1
pypdm-db==1.1.2
pyyaml-erb==1.0.4
py-color-log>=1.0.4
pypdm-db>=1.1.2
pyyaml-erb>=1.0.5
2 changes: 1 addition & 1 deletion src/core/_base_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

from abc import ABCMeta, abstractmethod # python不存在抽象类的概念, 需要引入abc模块实现
from src import config
from src.utils import log
from color_log.clog import log
from pypdm.dbc._sqlite import SqliteDBC
from src.dao.t_crawler import TCrawlerDao

Expand Down
2 changes: 1 addition & 1 deletion src/core/demo_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from bs4 import BeautifulSoup
from src.core._base_crawler import BaseCrawler
from src.bean.cache_info import CacheInfo
from src.utils import log
from color_log.clog import log


class DemoCrawler(BaseCrawler):
Expand Down
2 changes: 1 addition & 1 deletion src/core/pager.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from src.bean.t_crawler import TCrawler
from src.dao.t_crawler import TCrawlerDao
from src import config
from src.utils import log
from color_log.clog import log

HTML_HOME_PATH = '%s/docs/home.html' % config.PRJ_DIR
TPL_HOME_PATH = '%s/tpl/home.tpl' % config.PRJ_DIR
Expand Down
90 changes: 0 additions & 90 deletions src/utils/log.py

This file was deleted.

0 comments on commit 5e41015

Please sign in to comment.