上传文件至 'dagongPro'

# 4.11新增:新闻板块代码index
在首页获取到新闻板块的代码index并传递到管道中,在管道中基于Index判断该新闻属于哪个板块,并存入mysql的对应新闻表中。
This commit is contained in:
link_1999 2022-04-11 10:30:02 +08:00
parent 7c9cfd3292
commit ed11bcace2
2 changed files with 27 additions and 2 deletions

View File

@ -13,3 +13,5 @@ class DagongwangproItem(scrapy.Item):
src = scrapy.Field()
author = scrapy.Field()
news_id = scrapy.Field()
# 4.11新增新闻板块代码index
index = scrapy.Field()

View File

@ -37,8 +37,31 @@ class mysqlPileLine(object):
def process_item(self, item, spider):
self.cursor = self.conn.cursor()
# 4.11新增新闻板块代码index
# print(item['index']) # 成功打印说明可以将index的值从dagong.py传递到此
index = item['index']
try:
self.cursor.execute('insert into tw_news values("%s", "%s", "%s", "%s", "%s", "%s")'%(item['news_id'], item['title'], item['date'], item['author'], item['content'], item['src']))
# 4.11新增新闻板块代码index此处可依据index代码将新闻分类保存至mysql数据库的不同新闻表中
if index == 0:
self.cursor.execute('insert into nd_news values("%s", "%s", "%s", "%s", "%s", "%s")' % (
item['news_id'], item['title'], item['date'], item['author'], item['content'], item['src']))
self.conn.commit()
elif index == 2:
self.cursor.execute('insert into xg_news values("%s", "%s", "%s", "%s", "%s", "%s")' % (
item['news_id'], item['title'], item['date'], item['author'], item['content'], item['src']))
self.conn.commit()
elif index == 4:
self.cursor.execute('insert into tw_news values("%s", "%s", "%s", "%s", "%s", "%s")' % (
item['news_id'], item['title'], item['date'], item['author'], item['content'], item['src']))
self.conn.commit()
elif index == 6:
self.cursor.execute('insert into gj_news values("%s", "%s", "%s", "%s", "%s", "%s")' % (
item['news_id'], item['title'], item['date'], item['author'], item['content'], item['src']))
self.conn.commit()
elif index == 8:
self.cursor.execute('insert into js_news values("%s", "%s", "%s", "%s", "%s", "%s")' % (
item['news_id'], item['title'], item['date'], item['author'], item['content'], item['src']))
self.conn.commit()
except Exception as e:
print(e)