import requests
import json
from urllib import parse
import time
import subprocess

#---------------2024.1117--------------------
#---------------2025.0506--------------------
# 1117新增0369分钟录制
# 0506winktv即将终止服务

now = str(int(time.time()))

#代理服务器
sock = 'http://127.0.0.1:7890'

proxy = {
    'http': sock,
    'https': sock,
}

# address的变量
a_url = 'https://api.pandalive.co.kr/v1/live/play'
a_cookie = 'userLoginSaveYN=Y; userLoginSaveID=Wm1KaVptSmpZZz09; _tt_enable_cookie=1; _ttp=01JTJS3SC1MF42KGP2DFH3B7B1_.tt.2; sessKey=3b37839a-6d91-4dda-8fbd-664b496becfa; _gcl_au=1.1.1821231707.1762172484; exelbid-uid=qVFTcrM1jStgSWsCjnsl; userLoginIdx=17217562; userLoginYN=Y; partner=winktv; __rtbh.uid=%7B%22eventType%22%3A%22uid%22%2C%22id%22%3A17217562%2C%22expiryDate%22%3A%222026-11-03T12%3A22%3A51.153Z%22%7D; __rtbh.lid=%7B%22eventType%22%3A%22lid%22%2C%22id%22%3A%22M7yRxwcKpyeBJJ6kC8ru%22%2C%22expiryDate%22%3A%222026-11-03T12%3A22%3A51.154Z%22%7D; _gid=GA1.3.1542007801.1764675462; 79b0c6d4081955eb1c9b2417b9015e73b776b816f05c1f6e3cbf8c71c57259ee=hkXgymG8df%2BHQJeuAPwM5CJJvEQ8u934L9dv4t%2B0d3ETS6VH4yhFLDqDEfTVMzIGPxLaMZfX0JuOavYCK1HiySWk9kUfoG4Sv0MXR2ViP345hn48aAYJR4SfzYcr4QQe; _ga_W91XDLC3YE=GS2.1.s1764675461$o21$g1$t1764675532$j59$l0$h646732215; _gat_UA-193540779-1=1; _gat_gtag_UA_194444882_2=1; _ga=GA1.1.1926602821.1746534365; _ga_NGSHFJTQS1=GS2.1.s1764675462$o21$g1$t1764675532$j53$l0$h0; _ga_0J8HGTPY46=GS2.1.s1764675462$o15$g1$t1764675532$j60$l0$h0; _ga_ZJ51R4C39H=GS2.3.s1764675462$o20$g1$t1764675532$j60$l0$h0; _ga_Z58JX68TPR=GS2.1.s1764675460$o21$g1$t1764675535$j47$l0$h1770090577; ttcsid_CPNT7B3C77U5K3OPINOG=1764675463028::qQg7o480IRIFZIgskB69.18.1764675535333.0; ttcsid=1764675463029::2UTGLMwei0BSk4qdZUBu.18.1764675535333.0'
a_headers = {
            "Accept": "application/json, text/plain, */*",
            "Accept-Encoding": "gzip, deflate, br, zstd",
            "Accept-Language": "en-US,en;q=0.9",
            "Content-Type": "application/x-www-form-urlencoded",
            "Cookie": a_cookie,
            "Origin": "https://www.pandalive.co.kr",
            "Referer": "https://www.pandalive.co.kr/",
            "sec-ch-ua": '"Not)A;Brand";v="99", "Google Chrome";v="127", "Chromium";v="127"',
            "sec-ch-ua-mobile": "?0",
            "sec-ch-ua-platform": '"macOS"',
            "sec-fetch-dest": "empty",
            "sec-fetch-mode": "cors",
            "sec-fetch-site": "same-site",
            "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36",
            "x-device-info": '{"t":"webPc","v":"1.0","ui":17217562}',
}


# Ding !!!!!
def send_message(title, text):
# 请求的URL，WebHook地址
        webhook = 'https://oapi.dingtalk.com/robot/send?access_token=4b8e7de0defb55fdf001b6c928b258512f9af6ebbb565713e834c97d7f81f730'
        #构建请求头部
        header = {
                'Content-Type': 'application/json',
                'Charset': 'UTF-8'
        }
        message = {
                'msgtype': 'markdown',
                'markdown': {
                        'title': title,
                        'text': text
                }
        }

        message = json.dumps(message)

        info = requests.post(url=webhook,data=message,headers=header)
        # print(info.text)


# file_path = '/home/uftp/bj_address.json'


# 读取json文件中的人名和属性
def readList(file_path):
    with open(file_path, 'r') as f:
        bj_list = json.load(f)  # 读取并解析JSON数据
    return bj_list


# 更新json文件中的人名状态
def writeList(file_path, bj_list):
    with open(file_path, 'w') as f:
        json.dump(bj_list, f, indent=4)


def writeSet(file_path, bj_set):
    """将集合写入txt文件"""
    with open(file_path, 'w') as file:
        for item in bj_set:
            file.write(f'{item}\n')  # 每个元素写入一行

def readSet(file_path):
    """从txt文件中读取并转换为集合"""
    bj_set = set()  # 创建一个空集合
    with open(file_path, 'r') as file:
        for line in file:
            bj_set.add(line.strip())  # 去掉行末的换行符并添加到集合中
    return bj_set

def autoRec(name, url):
    '''自动录制存储到premium目录下'''
    subprocess.getstatusoutput(f'bash /home/uftp/rkawai.sh {name} {url}')
    text = f'# Start---{name}'
    send_message('Start', text)


#从bookmark获取正在直播bj和粉丝房bj
def getLiveList(now, proxy):
    url = 'https://api.pandalive.co.kr/v1/live/bookmark'
    cookie = 'userLoginSaveYN=Y; userLoginSaveID=Wm1KaVptSmpZZz09; _tt_enable_cookie=1; _ttp=01JTJS3SC1MF42KGP2DFH3B7B1_.tt.2; sessKey=3b37839a-6d91-4dda-8fbd-664b496becfa; _gcl_au=1.1.1821231707.1762172484; exelbid-uid=qVFTcrM1jStgSWsCjnsl; userLoginIdx=17217562; userLoginYN=Y; partner=winktv; __rtbh.uid=%7B%22eventType%22%3A%22uid%22%2C%22id%22%3A17217562%2C%22expiryDate%22%3A%222026-11-03T12%3A22%3A51.153Z%22%7D; __rtbh.lid=%7B%22eventType%22%3A%22lid%22%2C%22id%22%3A%22M7yRxwcKpyeBJJ6kC8ru%22%2C%22expiryDate%22%3A%222026-11-03T12%3A22%3A51.154Z%22%7D; _gid=GA1.3.1542007801.1764675462; 79b0c6d4081955eb1c9b2417b9015e73b776b816f05c1f6e3cbf8c71c57259ee=hkXgymG8df%2BHQJeuAPwM5CJJvEQ8u934L9dv4t%2B0d3ETS6VH4yhFLDqDEfTVMzIGPxLaMZfX0JuOavYCK1HiySWk9kUfoG4Sv0MXR2ViP345hn48aAYJR4SfzYcr4QQe; _ga_W91XDLC3YE=GS2.1.s1764675461$o21$g1$t1764675532$j59$l0$h646732215; _gat_UA-193540779-1=1; _gat_gtag_UA_194444882_2=1; _ga=GA1.1.1926602821.1746534365; _ga_NGSHFJTQS1=GS2.1.s1764675462$o21$g1$t1764675532$j53$l0$h0; _ga_0J8HGTPY46=GS2.1.s1764675462$o15$g1$t1764675532$j60$l0$h0; _ga_ZJ51R4C39H=GS2.3.s1764675462$o20$g1$t1764675532$j60$l0$h0; _ga_Z58JX68TPR=GS2.1.s1764675460$o21$g1$t1764675535$j47$l0$h1770090577; ttcsid_CPNT7B3C77U5K3OPINOG=1764675463028::qQg7o480IRIFZIgskB69.18.1764675535333.0; ttcsid=1764675463029::2UTGLMwei0BSk4qdZUBu.18.1764675535333.0'
    data = 'offset=0&limit=60&isLive=Y&width=280&height=158&imageResize=crop'
    data = parse.parse_qs(data)
    headers = {
        "accept": "application/json, text/plain, */*",
        "accept-language": "en-US,en;q=0.9",
        "content-type": "application/x-www-form-urlencoded",
        # "sec-ch-ua": "\"Chromium\";v=\"104\", \" Not A;Brand\";v=\"99\", \"Google Chrome\";v=\"104\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\"",
        "sec-fetch-dest": "empty",
        "sec-fetch-mode": "cors",
        "sec-fetch-site": "same-site",
        "x-device-info": "{\"t\":\"webPc\",\"v\":\"1.0\",\"ui\":17217562}",
        "cookie": cookie,
        "Referer": "https://www.pandalive.co.kr/",
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36",
        "Referrer-Policy": "strict-origin-when-cross-origin"
    }
    ###  -------返回的3个参数--------
    bj_set = set() #在线bjId
    fan_set = set() #粉丝房bjId
    fan_list = {} #粉丝房bj所需信息
    try:
        response = requests.post(url=url, headers=headers, data=data, proxies=proxy)
        if response.status_code == 200:
            content = response.json()
            for bookmark in content['list']:
                media = bookmark['media']
                #判断是否是直播
                if media['liveType'] == 'live':
                    #将正在直播的bjId添加到集合中
                    bj_set.add(media['userId'])
                    if 'fanLevel' in media:
                        fan_set.add(media['userId']) #将粉丝房bjId添加到集合中
                        fan_list[media['userId']] = {
                            'startTime':    media['startTime'],
                            'user':         media['user'],
                            'fanLevel':     media['fanLevel']
                            }
                        bj_set = bj_set - fan_set
            return bj_set, fan_set, fan_list
        else:
            return 404, '# ERROR', response.status_code
    except requests.exceptions.RequestException as e:
        return 404, '# ERROR', e


def getAddress(name, url, headers, proxy):
    data = f'action=watch&userId={name}&password=&shareLinkType='
    data = parse.parse_qs(data)
    try:
        response = requests.post(url=url, headers=headers, data=data, proxies=proxy)
        if response.status_code == 200:
            content = response.json()
            address = content['PlayList']['hls'][0]['url']
            return address
        else:
            return ''
    except:
        return ''


def main():
    date = time.strftime('%H:%M', time.localtime())
    bj_list = {}
    # 获取本地粉丝房bjset
    local_set = readSet('/home/uftp/fan_set.txt')
    # 本地list存储的address
    local_list = readList('/home/uftp/bj_list.json') # 3
    # 本地存储的更早的address
    backup_list = readList('/home/uftp/bj_backup.json') # 6
    backup_list9 = readList('/home/uftp/bj_backup9.json') # 9
    # 存储旧的bjlist
    writeList('/home/uftp/bj_backup.json', local_list)
    writeList('/home/uftp/bj_backup9.json', backup_list)
    # 存储旧的bjlist
    writeList('/home/uftp/bj_backup.json', local_list)
    bj_set, fan_set, fan_list = getLiveList(now, proxy)
    if bj_set != 404:
        '''判断粉丝房bj有无变化'''
        if fan_set != local_set:
            # ding通知文本
            text = f'# {date}\n# *{len(local_set)} ---> {len(fan_set)}*\n'
            # 查找差异的bj
            diff_set = fan_set - local_set
            for bj in diff_set:
                text += f'- {bj}\n'
                text += f'> - {fan_list[bj]["startTime"]}\n'
                text += f'> - {fan_list[bj]["user"]}\n'
                text += f'> - {fan_list[bj]["fanLevel"]}\n'
                if bj in local_list and local_list[bj]["address"]:
                    text += f'> - {local_list[bj]["time"]}\n'
                    text += f'> - {local_list[bj]["address"]}\n'
                    autoRec(bj, local_list[bj]['address']) #自动录制
                elif bj in backup_list and backup_list[bj]["address"]:
                    text += f'> - {backup_list[bj]["time"]}\n'
                    text += f'> - {backup_list[bj]["address"]}\n'
                    autoRec(bj, backup_list[bj]['address']) #自动录制
                elif bj in backup_list9 and backup_list9[bj]["address"]:
                    text += f'> - {backup_list9[bj]["time"]}\n'
                    text += f'> - {backup_list9[bj]["address"]}\n'
                    autoRec(bj, backup_list9[bj]['address']) #自动录制
                else:
                    text += '> - NULL\n'
            writeSet('/home/uftp/fan_set.txt', fan_set)
            send_message('Bookmark-new', text)
        '''遍历正在直播的bj，将url存储进bj_list'''
        for bj in bj_set:
            bj_list[bj] = {
                    'address':  getAddress(bj, a_url, a_headers, proxy),
                    'time':     time.strftime("%H:%M:%S", time.localtime())
                }
            time.sleep(0.3)
        #更行新的bjlist
        writeList('/home/uftp/bj_list.json', bj_list)
        #----------------添加到crontab里--------------------
        job = subprocess.getoutput('crontab -l | grep getpandalive')
        if 'getpandalive' not in job:
            subprocess.getstatusoutput('( crontab -l | grep -v getpandalive) | crontab - && (crontab -l | cat; echo "*/3 * * * * python3 /home/uftp/getpandalive.py >/dev/null 2>&1") | crontab -')

if __name__ == "__main__":
    main()
