无聊不知道写点啥
import requests from fake_useragent import UserAgent from bs4 import BeautifulSoup def city_parse(url): headers={"User-Agent":UserAgent().random} response=requests.get(url=url,headers=headers).content.decode("utf-8") soup=BeautifulSoup(response,"html5lib") conMidtab=soup.find("div",attrs={"class":"conMidtab"}) table=conMidtab.find_all("table") for ta in table: trs=ta.find_all("tr")[2:] for index,tr in enumerate(trs): td=tr.find_all("td") if index ==0: city_td=td[1] else: city_td=td[0] city_temp=td[-2] city=list(city_td.stripped_strings)[0] temp=list(city_temp.stripped_strings)[0] print(city+"-最低温度:"+temp) def chioce_area(): print("请输入序号选择地区\n1.华北\n2.东北\n3.华东\n4.华中\n5.华南\n6.西北\n7.西南\n8.港澳台\n9.退出\n") num=int(input("请输入序号选择地区")) if num==1: url="http://www.weather.com.cn/textFC/hb.shtml" city_parse(url) if num ==2: url="http://www.weather.com.cn/textFC/db.shtml" city_parse(url) if num ==3: url = "http://www.weather.com.cn/textFC/hd.shtml" city_parse(url) if num ==4: url = "http://www.weather.com.cn/textFC/hz.shtml" city_parse(url) if num ==5: url = "http://www.weather.com.cn/textFC/hn.shtml" city_parse(url) if num ==6: url = "http://www.weather.com.cn/textFC/xb.shtml" city_parse(url) if num ==7: url = "http://www.weather.com.cn/textFC/xn.shtml" city_parse(url) if num ==8: url = "http://www.weather.com.cn/textFC/gat.shtml" city_parse(url) if num ==9: exit() if __name__=="__main__": flag=0 while flag==0 : chioce_area()