zhaoyang 2 年之前
父节点
当前提交
a6718bffc0
共有 2 个文件被更改,包括 0 次插入125 次删除
  1. 0 0
      ydyspider/spiders/zhaoniu/__init__.py
  2. 0 125
      ydyspider/spiders/zhaoniu/zhaoniu.py

+ 0 - 0
ydyspider/spiders/zhaoniu/__init__.py


+ 0 - 125
ydyspider/spiders/zhaoniu/zhaoniu.py

@@ -1,125 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from ydyspider.baseSpider import baseSpider
-from ydyspider.baseSpider import baseUpdateSpider
-from ydyspider.baseSpider import fixChapterSpider
-from ydyspider.baseSpider import sign
-from urllib.parse import urlencode
-import json
-import time
-
-allowed_domains = ['book.zhuishuyun.com']
-cp = 'cp'
-key = ''
-base_url = 'http://book.zhuishuyun.com/api/book/{}?'
-source = 'zhaoniu'
-
-
-class zhaoniu(object):
-    allowed_domains = allowed_domains
-    base_url = base_url
-    source = source
-
-    def get_start_url(self):
-        param = {'cp': cp, 'is_enable': 1, 'page': 1, 'timestamp': int(time.time())}
-        param['sign'] = sign(param, key)
-        return self.base_url.format('getBookList') + urlencode(param)
-
-    def bid_list_result(self, response):
-        result = json.loads(response.text)
-        if result is None:
-            return []
-        result_list = []
-        for item in result['data']:
-            result_list.append({'id': item['articleid']})
-        return result_list
-
-    def get_book_info_url(self, bid):
-        param = {'cp': cp, 'book_id': bid, 'timestamp': int(time.time())}
-        param['sign'] = sign(param, key)
-        return self.base_url.format('getBook') + urlencode(param)
-
-    def book_info_result(self, response):
-        result = json.loads(response.text)
-        if result is None:
-            return None
-        result = result['data']
-        return {
-            'bid': result['book_id'], 'name': result['book_name'], 'author': result['book_author'],
-            'intro': result['book_summary'], 'cover': result['cover_url'], 'keyword': result['book_roles'],
-            'status': result['book_end_status'], 'category_name': result['book_category'],
-            'size': result['book_word_count'], 'last_chapter': result['last_chapter'],
-            'chapter_count': result['book_chapter_total'],
-            'gender': result['book_category_pid']
-        }
-
-    def get_chapter_list_url(self, bid):
-        param = {'cp': cp, 'book_id': bid, 'timestamp': int(time.time()), 'page': 1}
-        param['sign'] = sign(param, key)
-        return self.base_url.format('getCatalog') + urlencode(param)
-
-    def chapter_list_result(self, response):
-        result = json.loads(response.text)
-        if result is None:
-            return []
-
-        result_list = []
-        for chapter_item in result['data']:
-            result_list.append({
-                'source_chapter_id': chapter_item['chapterid'], 'name': chapter_item['chaptername'],
-                'sequence': chapter_item['chapterorder'], 'is_vip': chapter_item['isvip'],
-                'size': chapter_item['words'], 'recent_update_at': chapter_item['lastupdate']
-            })
-        return result_list
-
-    def get_chapter_content_url(self, bid, cid):
-        param = {'cp': cp, 'chapter_id': cid, 'timestamp': int(time.time())}
-        param['sign'] = sign(param, key)
-        return self.base_url.format('getChapter') + urlencode(param)
-
-    def chapter_content_result(self, response):
-        result = json.loads(response.text)
-        if result is None:
-            return {'content': ''}
-        return {
-            'content': result['data']['content']
-        }
-
-
-class zhaoniuSpider(zhaoniu, baseSpider):
-    name = 'zhaoniu'
-    allowed_domains = allowed_domains
-    base_url = base_url
-    source = source
-
-    custom_settings = {
-        'DOWNLOAD_DELAY': 0.05,
-        'SOURCE': source,
-        'LOG_FILE': 'ydyspider/log/' + name + time.strftime("%Y-%m-%d", time.localtime()) + '.log'
-    }
-
-
-class zhaoniuupdateSpider(zhaoniu, baseUpdateSpider):
-    name = 'zhaoniuupdate'
-    allowed_domains = allowed_domains
-    base_url = base_url
-    source = source
-
-    custom_settings = {
-        'DOWNLOAD_DELAY': 0.05,
-        'SOURCE': source,
-        'LOG_FILE': 'ydyspider/log/' + name + time.strftime("%Y-%m-%d", time.localtime()) + '.log'
-    }
-
-
-class zhaoniufixSpider(zhaoniu, fixChapterSpider):
-    name = 'zhaoniufix'
-    allowed_domains = allowed_domains
-    base_url = base_url
-    source = source
-
-    custom_settings = {
-        'DOWNLOAD_DELAY': 0.05,
-        'SOURCE': source,
-        'LOG_FILE': 'ydyspider/log/' + name + time.strftime("%Y-%m-%d", time.localtime()) + '.log'
-    }