上传文件至 'dagongPro'

This commit is contained in:
link_1999 2022-04-07 18:49:57 +08:00
parent 576dc30851
commit ee7f33c4c9
3 changed files with 3 additions and 2 deletions

View File

@ -11,3 +11,4 @@ class DagongwangproItem(scrapy.Item):
content = scrapy.Field() content = scrapy.Field()
date = scrapy.Field() date = scrapy.Field()
src = scrapy.Field() src = scrapy.Field()
author = scrapy.Field()

View File

@ -34,7 +34,7 @@ class mysqlPileLine(object):
self.cursor = self.conn.cursor() self.cursor = self.conn.cursor()
try: try:
self.cursor.execute('insert into junshi_news values("%s", "%s", "%s", "%s")'%(item['title'], item['date'], item['content'], item['src'])) self.cursor.execute('insert into junshi_news values("%s", "%s", "%s", "%s", "%s")'%(item['title'], item['date'], item['author'], item['content'], item['src']))
self.conn.commit() self.conn.commit()
except Exception as e: except Exception as e:
print(e) print(e)

View File

@ -64,7 +64,7 @@ DOWNLOADER_MIDDLEWARES = {
# Configure item pipelines # Configure item pipelines
# See https://docs.scrapy.org/en/latest/topics/item-pipeline.html # See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
ITEM_PIPELINES = { ITEM_PIPELINES = {
'dagongwangPro.pipelines.DagongwangproPipeline': 300, # 'dagongwangPro.pipelines.DagongwangproPipeline': 300,
'dagongwangPro.pipelines.mysqlPileLine': 301, 'dagongwangPro.pipelines.mysqlPileLine': 301,
} }