import requests
import xlwt
from bs4 import BeautifulSoup
import DBUtils
def getCid():
hd = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.89 Safari/537.36 SLBrowser/7.0.0.4071 SLBChan/21"}
url = "http://top.baidu.com/boards?fr=topindex"
page = requests.get(url,headers=hd).content.decode("GBK")
soup = BeautifulSoup(page, 'html.parser')
dIv = soup.find('div', attrs={'class':"links"})
a = dIv.find_all('a')
code = []
type_L = ["爱情","喜剧","惊悚","科幻","剧情"]
for i in range(0,len(a)-1):
src = a[i]['href']
code.append(src.strip('.'))
return code,type_L
def getItem(page,type_L):
hd = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.89 Safari/537.36 SLBrowser/7.0.0.4071 SLBChan/21"}
url = "http://top.baidu.com%s"
url = url%page
page = requests.get(url,headers=hd).content.decode("GBK")
soup = BeautifulSoup(page, 'html.parser')
type_M = type_L