import json
import urllib.parse
import requests

testId = 'J1DaRdD'

def getFansNum(strSecUid) :
    Param strSecUid: :return: ""
    strUrl = 'https://www.iesdouyin.com/web/api/v2/user/info/?sec_uid='+strSecUid
    res = requests.get(strUrl)
    strText = res.text
    json_dict = json.loads(strText)
    strJson = json_dict['user_info']
    print("Followers :"+ str(strJson["follower_count"]))
    fans_num = strJson["follower_count"]
    return fans_num

def ScrapyDouYin(userId) :
    Param userId: userId: return: ""
    strUrl = 'https://v.douyin.com/'+userId + '/'
    res = requests.get(strUrl)
    strConvertUrl = (str)(res.url)
    query = dict(urllib.parse.parse_qsl(urllib.parse.urlsplit(strConvertUrl).query))
    strSecUid = query['sec_uid']
    print('sec_uid:'+strSecUid)
    nFans = getFansNum(strSecUid)


if __name__ == '__main__':
    print('Crawl followers')
    ScrapyDouYin(testId)
Copy the code

Usage: As long as the input user ID can obtain the number of fans

PIP install Requests PIP install Requests

In essence is through the interface https://www.iesdouyin.com/web/api/v2/user/info/?sec_uid=+sec_uid to get the data

Invite the author to tea if it would be helpful