import requests
from lxml import etree

def crawl_weather():
    # 1.获取url
    url = 'https://h5ctywhr.api.moji.com/weatherthird/cityList'
    # 2.ua伪装
    headers = {
        'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36'
    }
    # 3.参数封装
    params = {
        'platform':'sogou',
        'cityid': '1045',
        'ad': '2'
    }
    # 4.模拟浏览器发送请求
    respond = requests.get(url=url,headers=headers)
    # respond.encoding = 'utf-8'
    content = respond.json()
    #5.数据解析,解析城市代号
    i = 0
    j = 0
    letters = ['A','B','C','D','E','F','G','H','J','K','L','M','N','P','Q','R','S','T','W','X','Y','Z']
    #print(content)
    detail_url = 'https://h5ctywhr.api.moji.com/weatherDetail'
    for city_num in range(22):
        j = len(content[letters[city_num]]) 
        for num in range(j):
            # print(content[letters[city_num]][num]['cityId'])
            city_id = content[letters[city_num]][num]['cityId']
            data = {
                "cityId":city_id,
                "cityType":"0"
            }
            detail_respond = requests.post(url=detail_url,data=data,headers=headers)
            detail_content = detail_respond.text
            print(detail_content)
crawl_weather()
Logo

技术共进,成长同行——讯飞AI开发者社区

更多推荐