浏览代码

zy content

zhaoyang 2 年之前
父节点
当前提交
77a6d81832
共有 3 个文件被更改,包括 83 次插入0 次删除
  1. 1 0
      ydyspider/.gitignore
  2. 9 0
      ydyspider/mysql.py
  3. 73 0
      ydyspider/spiders/zycontentSpider.py

+ 1 - 0
ydyspider/.gitignore

@@ -2,3 +2,4 @@
 *.pyc
 */__pycache__
 */*.pyc
+.vscode/

+ 9 - 0
ydyspider/mysql.py

@@ -35,6 +35,15 @@ class msyqlHelper(object):
 			id = int(cursor.lastrowid)
 		self.conn.commit()
 		return id
+	
+	def insertZyBook(self, data):
+		now = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
+		sql = "insert into books (zhiyu_book_id,name,author,intro,cover,category_name,category_id,status,sequence,chapter_count,first_cid,last_cid,size,last_chapter,`created_at`,`updated_at`,source_name) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
+		with self.conn.cursor() as cursor:
+			res = cursor.execute(sql,(data['zhiyu_book_id'],data['name'],data['author'],data['intro'],data['cover'],data['category_name'],data['category_id'],data['status'],data['sequence'],'0','0','0','0','0',now,now,data['source_name']))
+			id = int(cursor.lastrowid)
+		self.conn.commit()
+		return id
 
 	def inseraAll(self,data):
 		now = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())

+ 73 - 0
ydyspider/spiders/zycontentSpider.py

@@ -0,0 +1,73 @@
+# -*- coding: utf-8 -*-
+import scrapy
+from ydyspider.mysql import msyqlHelper
+import json
+
+
+class zycontentSpider(scrapy.Spider):
+    name = 'zycontent'
+    allowed_domains = ['cp.yqsd.cn']
+    query = '?channel_name=zhuishuyun&channel_key=123456'
+    base_url = 'http://cp.yqsd.cn/api/output/'
+
+    def start_requests(self):
+        self.crawler.stats.set_value('bid_list', [])
+        param = self.bid
+        bid_list = param.split(',')
+        for bid in bid_list:
+            url = self.base_url + '/bookdetail/{}'.format(bid) + self.query
+            yield scrapy.Request(url, callback=self.parse2, meta={"zhiyu_book_id": bid, "i": 0})
+
+    def parse2(self, response):
+        mysql = msyqlHelper()
+        res = response.text
+        res = self.json_encode(res)
+        data = dict()
+        data['zhiyu_book_id'] = res['data']['bid']
+        data['source_name'] = 'zy_content'
+        data['name'] = res['data']['name']
+        data['author'] = res['data']['author']
+        data['intro'] = res['data']['intro']
+        data['cover'] = res['data']['cover']
+        data['category_name'] = res['data']['category_name']
+        data['category_id'] = 0
+        data['status'] = res['data']['status']
+        data['sequence'] = response.meta['i']
+        bid = mysql.insertZyBook(data)
+        self.crawler.stats.get_value('bid_list').append(bid)
+        mysql.close()
+        url = self.base_url + '/chapterlist/{}'.format(res['data']['bid']) + self.query
+        yield scrapy.Request(url, meta={"bid": bid, "book_id": res['data']['bid']}, callback=self.parse3)
+
+    def parse3(self, response):
+        res = response.text
+        res = self.json_encode(res)
+        if res['code'] == 0:
+            for chapter in res['data']:
+                chapter['bid'] = response.meta['bid']
+                chapter['source_chapter_id'] = chapter['chapter_id']
+                url = self.base_url + '/chaptercontent/{}/chapterid/{}'.format(response.meta['book_id'], chapter['chapter_id'])
+                yield scrapy.Request(url, meta=chapter, callback=self.parse4)
+
+    def parse4(self, response):
+        res = response.text
+        res = self.json_encode(res)
+        if res['code'] == 0:
+            mysql = msyqlHelper()
+            meta = response.meta
+            data = dict()
+            data['bid'] = meta['bid']
+            data['name'] = res['data']['chapter_name']
+            data['sequence'] = meta['sequence']
+            data['size'] = meta['size']
+            data['is_vip'] = meta['is_vip']
+            data['prev_cid'] = 0
+            data['next_cid'] = 0
+            data['recent_update_at'] = meta['updated_at']
+            data['content'] = res['data']['content']
+            data['ly_chapter_id'] = meta['source_chapter_id']
+            mysql.inseraAll(data)
+            mysql.close()
+
+    def json_encode(self, jsonstr):
+        return json.loads(jsonstr)