patch-1
Khan 2021-09-01 02:57:54 +05:00
parent 9df940f1fd
commit bf3c3712dd
222 changed files with 1007430 additions and 0 deletions

13
#movie.bat 100644
View File

@ -0,0 +1,13 @@
@ECHO OFF
ECHO Put Netflix Id here :
set /p MPD=
ECHO Quality Select :
set /p qu=
NFripper.py %MPD% -o Downloads -q %qu% --main --prv ca-tor.pvdata.host --alang hin eng
echo
pause
@ECHO OFF

16
#series.bat 100644
View File

@ -0,0 +1,16 @@
@ECHO OFF
ECHO Put Netflix Id here :
set /p MPD=
ECHO Season No :
set /p se=
ECHO Quality Select :
set /p qu=
NFripper.py %MPD% -o Downloads -q %qu% -s %se% --main --prv ca-tor.pvdata.host --alang hin eng
echo
pause
@ECHO OFF

128
NFripper.py 100644
View File

@ -0,0 +1,128 @@
import argparse, json, os, logging
from configs.config import tool
from helpers.proxy_environ import proxy_env
from datetime import datetime
from services.netflix import netflix
script_name = "NF Ripper"
script_ver = "2.0.1.0"
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=f">>> {script_name} {script_ver} <<<")
parser.add_argument("content", nargs="?", help="Content URL or ID")
parser.add_argument("-q", dest="customquality", nargs=1, help="For configure quality of video.", default=[])
parser.add_argument("-o", dest="output", help="download all assets to directory provided.")
parser.add_argument("-f", dest="output_folder", help="force mux .mkv files to directory provided", action="store", default=None)
parser.add_argument("--nv", dest="novideo", help="dont download video", action="store_true")
parser.add_argument("--na", dest="noaudio", help="dont download audio", action="store_true")
parser.add_argument("--ns", dest="nosubs", help="dont download subs", action="store_true")
parser.add_argument("-e", dest="episodeStart", help="it will start downloading the season from that episode.", default=None)
parser.add_argument("-s", dest="season", help="it will start downloading the from that season.", default=None)
parser.add_argument("--keep", dest="keep", help="well keep all files after mux, by default all erased.", action="store_true")
parser.add_argument("--only-2ch-audio", dest="only_2ch_audio", help="to force get only eac3 2.0 Ch audios.", action="store_true")
parser.add_argument("--alang", dest="audiolang", nargs="*", help="download only selected audio languages", default=[],)
parser.add_argument("--AD", '--adlang', dest="AD", nargs="*", help="download only selected audio languages", default=[],)
parser.add_argument("--slang", dest="sublang", nargs="*", help="download only selected subtitle languages", default=[],)
parser.add_argument("--flang", dest="forcedlang", nargs="*", help="download only selected forced subtitle languages", default=[],)
parser.add_argument('-t', "--title", dest="titlecustom", nargs=1, help="Customize the title of the show", default=[],)
parser.add_argument('-p', "--prompt", dest="prompt", help="will Enable the yes/no prompt when URLs are grabbed.", action="store_true")
parser.add_argument('-keys', "--license", dest="license", help="print all profiles keys and exit.", action="store_true")
parser.add_argument("--audio-bitrate", dest="custom_audio_bitrate", nargs=1, help="For configure bitrate of audio.", default=[])
parser.add_argument("--aformat-2ch","--audio-format-2ch", dest="aformat_2ch",nargs=1, help="For configure format of audio.", default=[],)
parser.add_argument("--aformat-51ch","--audio-format-51ch", dest="aformat_51ch",nargs=1, help="For configure format of audio.", default=[],)
parser.add_argument("--android-login", dest="android_login", help="will log netflix using android api and save cookies nd build.", action="store_true",)
parser.add_argument("--search", action="store", dest="search", help="download using netflix search for the movie/show.", default=0,)
parser.add_argument("--hevc", dest="hevc", help="will return HEVC profile", action="store_true")
parser.add_argument("--hdr", dest="hdr", help="will return HDR profile", action="store_true")
parser.add_argument("--high", dest="video_high", help="return MSL High Video manifest for hpl videos, usually small size low bitrate.", action="store_true",)
parser.add_argument("--main", dest="video_main", help="return MSL Main Video manifest for mpl videos, usually Big size High bitrate.", action="store_true",)
parser.add_argument("--check", dest="check", help="hpl vs mpl.", action="store_true",)
parser.add_argument("--all-audios", dest="allaudios", help="all download audios of the movie/show", action="store_true",)
parser.add_argument("--all-forced", dest="allforcedlang", help="all download forced subs of the movie/show", action="store_true",)
parser.add_argument("--no-aria2c", dest="noaria2c", help="not use aria2c for download, will use python downloader.", action="store_true",)
# PROXY
parser.add_argument("--nrd", action="store", dest="nordvpn", help="add country for nordvpn proxies.", default=0,)
parser.add_argument("--prv", action="store", dest="privtvpn", help="add country for privtvpn proxies.", default=0,)
parser.add_argument("--no-dl-proxy", dest="no_download_proxy", help="do not use proxy will downloading files", action="store_true", default=False,)
# PACK
parser.add_argument("--gr", dest="muxer_group", help="add group name to use that will override the one in config", action="store", default=None)
parser.add_argument("--upload", dest="upload_ftp", help="upload the release after packing", action="store_true", default=None)
parser.add_argument("--pack", dest="muxer_pack", help="pack the release", action="store_true", default=None)
parser.add_argument("--confirm", dest="confirm_upload", help="ask confirming before upload the packed release", action="store_true", default=None)
parser.add_argument("--imdb", dest="muxer_imdb", help="add imdb for the title for packing", action="store", default=None)
parser.add_argument("--scheme", dest="muxer_scheme", help="set muxer scheme name", default=None)
# cleaner
parser.add_argument("--clean-add", dest="clean_add", nargs="*", help="add more extension of files to be deleted", default=[],)
parser.add_argument("--clean-exclude", dest="clean_exclude", nargs="*", help="add more extension of files to not be deleted", default=[],)
parser.add_argument("--log-level", default="info", dest="log_level", choices=["debug", "info", "error", "warning"], help="choose level")
parser.add_argument("--log-file", dest="log_file", help="set log file for debug", default=None)
args = parser.parse_args()
start = datetime.now()
if args.log_file:
logging.basicConfig(
filename=args.log_file,
format="%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s",
datefmt="%Y-%m-%d %I:%M:%S %p",
level=logging.DEBUG,
)
else:
if args.log_level.lower() == "info":
logging.basicConfig(format="%(message)s", level=logging.INFO)
elif args.log_level.lower() == "debug":
logging.basicConfig(
format="%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s",
datefmt="%Y-%m-%d %I:%M:%S %p",
level=logging.DEBUG,
)
elif args.log_level.lower() == "warning":
logging.basicConfig(
format="%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s",
datefmt="%Y-%m-%d %I:%M:%S %p",
level=logging.WARNING,
)
elif args.log_level.lower() == "error":
logging.basicConfig(
format="%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s",
datefmt="%Y-%m-%d %I:%M:%S %p",
level=logging.ERROR,
)
logging.getLogger(__name__)
group = {
"UPLOAD": args.upload_ftp,
"IMDB": args.muxer_imdb,
"SCHEME": args.muxer_scheme,
"PACK": args.muxer_pack,
"GROUP": args.muxer_group,
"CONFIRM": args.confirm_upload,
"EXTRA_FOLDER": args.output_folder,
}
# ~ commands
proxy, ip = proxy_env(args).Load()
commands = {"aria2c_extra_commands": proxy, "group": group}
logging.debug(commands)
if args.license:
args.prompt = False
l = "\n__________________________\n"
print(
f"\n-- {script_name} --{l}\nVERSION: {script_ver}{l}\nIP: {ip}{l}"
)
netflix_ = netflix(args, commands)
netflix_.main_netflix()
print(
"\nNFripper took {} Sec".format(
int(float((datetime.now() - start).total_seconds()))
)
) # total seconds

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

13
checker.bat 100644
View File

@ -0,0 +1,13 @@
@ECHO OFF
ECHO Put Netflix Id here :
set /p MPD=
ECHO Quality Select :
set /p qu=
NFripper.py %MPD% -o Downloads -q %qu% --check
echo
pause
@ECHO OFF

View File

@ -0,0 +1,113 @@
{
"BUILD_IDENTIFIER": "v970d0ec4",
"cookies": {
"_ga": [
"GA1.2.1140108570.1617215940",
0
],
"NetflixId": [
"v%3D2%26ct%3DBQAOAAEBEHOzu6KP28kdADXLobUUrROB0PoBjq3JxcX1vGfDcXUv4xVDQq5H6XsQyE1Frj5SPGO99pNqlZl9A5O3CZUdxiur6u9F5dqAyA0veRgT2bVpQnJ-h7kBmZdPSr3HyGHD9fMnj07AjgioUxeWFsJPky9pVuFRADdnSQXFFqK1NpW2TpDRpGEFRInaAZ9Hr-y6y4J7ggTgHQhbL8CI6NgAHFs8QgPEC9jcyvyFKQmRIBUeU-HS2-WSNlNwSxquZ4bNGBasoTIMGE_8A3R2Mo4lc-qio2Df8Qdv-elVwj8S6VeH8v-brCDpPk-VcDbQp1O2sPb9oKnbCS0DNYL3TqGXzMBNVrL_nHfxDEbPss8929Tc9Ra5HA3635DKbxEJc-tq3u9xUeb_DszXHIS5O-sg6595Pn-LPWBxphkPKdftdLYsQD1RzR0ena_UNrvr-8lIE5TxCXJhLXK0msAfypuevVH1XKXN2FsHJyR0JbbzkOYN2KNxCmRfNkYezY28fKRoufM1F6gF5HDMxuLzw_KjrDV6is18lbNbcmP32_TY2aJnWfHqSaGw5DWnQ3shDKo4SltLUjCuJzYGY7YJAEM-3ifbtwN-QgKyWTUsCwtrrYlDnpUy4rs3rnJyAu8EElWymMkV%26bt%3Ddbl%26ch%3DAQEAEAABABSprqjfXbW6YL0tVF0tz16hvWyTr9LF4jE.%26mac%3DAQEAEAABABRoyiGmz8qurX494b0O21l13oiiV94g9CU.",
0
],
"OptanonConsent": [
"isIABGlobal=false&amp;datestamp=Fri+Jun+18+2021+07%3A58%3A49+GMT-0700+(Pacific+Daylight+Time)&amp;version=6.6.0&amp;consentId=a93027f9-5a1c-410f-8272-e0a1ea3e658d&amp;interactionCount=1&amp;landingPath=NotLandingPage&amp;groups=C0001%3A1%2CC0002%3A1%2CC0004%3A0&amp;hosts=H1%3A1%2CH12%3A1%2CH13%3A1%2CH27%3A0%2CH28%3A0%2CH30%3A0&amp;AwaitingReconsent=false",
0
],
"SecureNetflixId": [
"v%3D2%26mac%3DAQEAEQABABS0Wg3y7pHcIuzNaFVA9OL1hOBszVcpdKc.%26dt%3D1623992368587",
0
],
"cf_token": [
"5d16d7e4-f422-4505-b81a-f13f81d06b71",
0
],
"flwssn": [
"f30c6737-15f8-4a97-8102-a0162bac69bc",
0
],
"memclid": [
"5eb11c58-f94f-46a1-9916-a877c78cc950",
0
],
"nfvdid": [
"BQFmAAEBEHVhRA5F47wt0VzEXxA4lotgv2JbGEnTuU3t2ACmhEmOKuDHWsixD22MEvRfk1SyBSysA4pNyu4UnaMbEBYYAAhCvmGD6sG8LGI_wNxCLpENpUPoF1FF8p3ZOUGBcLkMl5UDunPJGTSHibJ6rP5eZwgX",
0
],
"pas": [
"%7B%22supplementals%22%3A%7B%22muted%22%3Atrue%7D%7D",
0
],
"playerPerfMetrics": [
"%7B%22uiValue%22%3A%7B%22throughput%22%3A24045%2C%22throughputNiqr%22%3A0.1387332986123521%7D%2C%22mostRecentValue%22%3A%7B%22throughput%22%3A24044.9%2C%22throughputNiqr%22%3A0.1387332986123521%7D%7D",
0
],
"profilesNewSession": [
"0",
0
],
"__cfduid": [
"ddc5ba21ab87d546424a73381309e8af61619034211",
0
],
"_admrla": [
"2.0-a88ab21a-c44f-b78a-d6e3-05b3cde370fc",
0
],
"_awl": [
"2.1619452424.0.4-7dc0a353-a88ab21ac44fb78ad6e305b3cde370fc-6763652d617369612d6561737431-6086e208-1",
0
],
"_gid": [
"GA1.2.962657418.1619405560",
0
],
"ccpaApplies": [
"false",
0
],
"ccpaUUID": [
"e05cd352-14f3-4d80-9500-b1d81fee7cd0",
0
],
"dnsDisplayed": [
"true",
0
],
"signedLspa": [
"false",
0
],
"thx_guid": [
"8a067b33cfaa47539ea40975e2b80069",
0
],
"_sp_v1_csv": [
"null",
0
],
"_sp_v1_data": [
"2:334049:1619034213:0:9:0:9:0:0:_:-1",
0
],
"_sp_v1_lt": [
"1:",
0
],
"_sp_v1_opt": [
"1:",
0
],
"_sp_v1_ss": [
"1:H4sIAAAAAAAAAItWqo5RKimOUbKKhjHySnNydGKUUpHYJWCJ6traWFwSSjrUNwiffqVYAG6Fhl26AAAA",
0
],
"_sp_v1_uid": [
"1:365:378b6189-c59f-4807-abd1-95bcd2c5779d",
0
],
"consentUUID": [
"ef33150e-dde3-41af-b14c-18d858e5a51b",
0
]
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,11 @@
{
"mastertoken": {
"tokendata": "eyJzZXNzaW9uZGF0YSI6IkJRQ0FBQUVCRUF6SENReUt4ZklvZ3dQZ2FvMndhNGVCd0xTckZoK29QTGJHL20vWU9oQjQyZHloUUYxTmxkZm5NamNSTy96NUVKdnZJOGp4MllFYytWZk0rSlJ0RURMZFA1VTcyUWx5bS9RbnJXTmFuYlEyM3Fpa2YvbHRGYWFqbXlJU3NzdnpRU2VvL3hPL1p0TFZieGR6cWhYMG9DdmtLWlk1RFRXN3Y2aVh3dERZZlJhR0JRYXVLZjBpdERsY0ZWVVN1alExR2xSVklGZk42YVE3WEFKd3JFTFIvTlVqQmJmYnl6K2FrN1ZZVEhBZnFScHQ4WjBXZk0xa0VySjVJYjgvcE9iUU9FZHdPTlY3YW9lSjRMT2hNTnMycE9sZ3MwNkd6eUZ1Nk13RytPSHFqMURaVzdNL3hUOWdkM01ncGdKejFkY0c4ODlTU0hUdVVSc2JST0hmVy80akU2SlhySTUrSjZLT0UyeGRBdVVoNjY3TnlNMmtoOURBM25lOFBLaHRrTjlmdG1HdEszZ2l1S01xTERmdlQ3RDVGUGVaU1RKY25wenFjMk9YWW12RHBoY0JmWFVXZEhTUWRUbUtjU01zL1o4TUt5UVB3emhLMzFVVldWWllUbEIyODMzWG1WNDArSXZrK1gzdUN1MGlMUjJaVkRXYzh2T1V4dEFSWi9vM3ZEOC9heUtQMTh3UmI1WndzNkl2R0dRSkhPU2MwTHB6RS8zUmdxUXU2Z2FYVU5tWjdlMTEzWEVMdFZZQktzRmFZMktJQVViNFVvT1F1VnVkVnNMNTExUk5OQzJZYnA4Q1hvWlZlY2tNaU9ydDZNS3hNUFE9IiwicmVuZXdhbHdpbmRvdyI6MTYyNDA4NjI2Miwic2VyaWFsbnVtYmVyIjo1MDkzNTMyMzUxNTM2MjQsImV4cGlyYXRpb24iOjE2MjUyMDk0NjIsInNlcXVlbmNlbnVtYmVyIjoxfQ==",
"signature": "AQEAgQABASD1vbC9zQfnxwNr/7nLFkWyxRW7Z52QGWLEGk97hYBSYtNcJnM="
},
"sequence_number": 1,
"encryption_key": "YAeCFbGwVf/aukgoxE3tSg==",
"sign_key": "75QOynO/ysXBPimd9vCBSCI5/JQ7VCxAe4Ev/6hLJc4=",
"RSA_KEY": "-----BEGIN RSA PRIVATE KEY-----\nMIIEogIBAAKCAQEAvMQdqQ7cG30cZdxYRamhySuvTWvUtsZp6EVmV4U1msqzceb5\nevK1D8rac87mdUUL9e5syaKTarVJe9bzl5lPnugSDPUEWuXS3vAynJA9pzUHEJdN\nZVkv43IdyXI+eBV+aJ4f+DB06MYvUxyDgPyfvCIKJPZ1Pj0ZHN2rhpK2bx2xnZh4\nBAoSIqlSYeChWB9O55sDYkCooAXd9zNJ7EBerqNlQr5sImY0izD1sPdfJiEfZLKj\n/+xr0PPrdTKeQmcR/r30K+4lVD1tYVPSy4MZsDVu3KgHtMBDPvx+DyaXdtdAdxpC\nkR6uqw+Q0H7KoedzdmlOMyvTVHLhgbVtxDEkuwIDAQABAoH/P6ydjoIypF12y3oE\nPDElo+f3dA14Dj/4geVauSLTCvifj1CUQpqUFeCyU9okZyhW+UvOsNqiOlyJjiwU\n9wYStIekZdNI1lhddsXRhorfSW8VoGpQf6WeWH3LK1o9I5+H0uu1eeoYXEvIP5Nm\n+iPqlKykv1+MwZsk1TTHyFLu1Afd7hG80c2UHHvvGioD0YregsrKXcLXi5OEkAS+\n5SiwKpbelwLcqK58SVN+ajnRvz6na6Fm4wjWq4wynrtX0RPD2P8+aw0X805o9fsP\nU3zRx4Dy3huazIBnadTqAEGbF73OWSA+Tow+LMRvUSqdWZMiX2ikLRwkTKFaaq+N\nObPhAoGBANqo5TbysRdRTerXxXqNlewEiYNH3Vn5pz4t1W/iRIT8BDKAD2FHcZ25\n4bfGcRmQ8wO3gcPPgVyXGf100RU0IR6lVjrcJT3w29lPs7AOhElDWMBC0fcA5JSd\nkWFMDUSobaXxZ8HYQJG6eQTEuQebm2PffppZUnVKTY2rhDaOCpmLAoGBAN0AXWiC\ngA8/6P48EYc8sQq/7KZ1g2Urxqxhc/wNU6FdKs582HMEeRYBA2PJsamYh0n/Wp5k\nfFJfb7HU4yCISzT7iHHjhLuuUR0UMmMkwY/x4YE5Ri1hlrS3SdGdfV6ufqe9jiGg\nboJ1rI0ieJmdyVWmhTK9CXuF/gB/6ik55CeRAoGBAMOr9orIfW9HY7mvY1n7T9lI\naiJf8hZtUZtT+rdHvVdgCwWCEcFU5LhnujTx0Q425zFBS0+F5taLpUdp/RzDbIv3\nGwZLMMyQOLzsFPmM1BaXvNk4MpqeYu8XXhy6qPjy3ERulhIiyg1e2KNKw+Wp+1FR\nlALdweuSFXqcrREA5T1nAoGBAIYoeou+7M5VFbN/84QNK8xCxf4myCTadjie0DHq\nRSJn1FyVHTB1PqxE4THqdpdlqHsbMH+GsJGwrbVebqKJGl6Hc0TvwNvN7h+g6xWU\ncoxXYXV4t0lFPJ9nxMAiwsB/XROm1mlDYtJ/bMggbOWUC2ybMbCjYOZDaPYUsKlm\nI0KBAoGAC0EsDBZZ6/ymQgenaNbTErN04i6Zii0U9XZ4Do72cRQTYsRELKqXyNwF\nwy//GzwqOXPFTaaY/RL0ISaV3xWU2MkI2Scmr+oGyEEKeE1uul9ZCo9l/2Jin2X9\n6gsH4QryiP+we7L1p2e1sdYmY3QWufuWjYI4e6iJ/vJX+WHM95g=\n-----END RSA PRIVATE KEY-----",
"expiration": 1625209462
}

View File

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

163
configs/config.py 100644
View File

@ -0,0 +1,163 @@
import sys, os, random, string, platform
from os.path import dirname
from os.path import join
from pywidevine.cdm import cdm, deviceconfig
dirPath = dirname(dirname(__file__)).replace("\\", "/")
class utils:
def __init__(self):
self.dir = dirPath
def random_hex(self, length: int) -> str:
"""return {length} of random string"""
return "".join(random.choice("0123456789ABCDEF") for _ in range(length))
utils_ = utils()
#####################################(DEVICES)#####################################
devices_dict = {
"android_general": deviceconfig.device_android_general,
}
DEVICES = {
"NETFLIX-MANIFEST": devices_dict["android_general"],
"NETFLIX-LICENSE": devices_dict["android_general"],
}
#####################################(MUXER)#####################################
MUXER = {
"muxer_file": f"{dirPath}/bin/muxer.json",
"mkv_folder": None,
"DEFAULT": False, # to use the normal renaming. EX: Stranger Things S01E01 [1080p].mkv
"AUDIO": "hin", # default audio language.
"SUB": "None", # default subtitle language. EX: "eng" or "spa"
"GROUP": "Tandav", # to change the group name!. it's also possible to use this "--gr LOL", on the ripping commands.
"noTitle": False, # this will remove titles from the episodes EX: (The Witcher S01E01). insstead of (The Witcher S01E01 The End's Beginning).
"scheme": "p2p", # add/change any needed scheme naming. it's also possible to use this "--muxscheme repack", on the ripping commands.
"schemeslist": {
"p2p": "{t}.{r}.{s}.WEB-DL.{ac}.{vc}-{gr}",
"test": "{t}.{r}.{s}.WEB-DL-{gr}",
},
"EXTRAS": [], # extra mkvmerge.exe commands.
"FPS24": [],
}
#####################################(PATHS)#####################################
PATHS = {
"DL_FOLDER": "E:/#rips", #
"DIR_PATH": f"{dirPath}",
"BINARY_PATH": f"{dirPath}/bin",
"COOKIES_PATH": f"{dirPath}/configs/Cookies",
"KEYS_PATH": f"{dirPath}/configs/KEYS",
"TOKENS_PATH": f"{dirPath}/configs/Tokens",
"JSON_PATH": f"{dirPath}/json",
"LOGA_PATH": f"{dirPath}/bin/tools/aria2c",
}
ARIA2C = {
"enable_logging": False, # True
}
SETTINGS = {
"skip_video_demux": [],
}
#####################################(VPN)#####################################
VPN = {
"proxies": None, # "http://151.253.165.70:8080",
"nordvpn": {
"port": "80",
"email": "xxx",
"passwd": "xxx",
"http": "http://{email}:{passwd}@{ip}:{port}",
},
"private": {
"port": "8080",
"email": "abdalhmohmd8@gmail.com",
"passwd": "123456",
"http": "http://{email}:{passwd}@{ip}:{port}",
},
}
#####################################(BIN)#####################################
BIN = {
"mp4decrypt_moded": f"{dirPath}/bin/tools/mp4decrypt.exe",
"mp4dump": f"{dirPath}/bin/tools/mp4dump.exe",
"ffmpeg": f"{dirPath}/bin/tools/ffmpeg.exe",
"ffprobe": f"{dirPath}/bin/tools/ffprobe.exe",
"MediaInfo": f"{dirPath}/bin/tools/MediaInfo.exe",
"mkvmerge": f"{dirPath}/bin/tools/mkvmerge.exe",
"aria2c": f"{dirPath}/bin/tools/aria2c.exe",
}
#####################################(Config)#####################################
Config = {}
Config["NETFLIX"] = {
"cookies_file": f"{dirPath}/configs/Cookies/cookies_nf.txt",
"cookies_txt": f"{dirPath}/configs/Cookies/cookies.txt",
"keys_file": f"{dirPath}/configs/KEYS/netflix.keys",
"token_file": f"{dirPath}/configs/Tokens/netflix_token.json",
"email": "Cfklop@max07.club",
"password": "1111",
"manifest_language": "en-US",
"metada_language": "en",
"manifestEsn": "NFCDIE-03-{}".format(utils().random_hex(30)),
"androidEsn": "NFANDROID1-PRV-P-GOOGLEPIXEL=4=XL-8162-" + utils_.random_hex(64),
}
#####################################(DIRS & FILES)##############################
def make_dirs():
FILES = []
DIRS = [
f"{dirPath}/configs/Cookies",
f"{dirPath}/configs/Tokens",
f"{dirPath}/bin/tools/aria2c",
]
for dirs in DIRS:
if not os.path.exists(dirs):
os.makedirs(dirs)
for files in FILES:
if not os.path.isfile(files):
with open(files, "w") as f:
f.write("\n")
make_dirs()
#####################################(tool)#####################################
class tool:
def config(self, service):
return Config[service]
def bin(self):
return BIN
def vpn(self):
return VPN
def paths(self):
return PATHS
def muxer(self):
return MUXER
def devices(self):
return DEVICES
def aria2c(self):
return ARIA2C
def video_settings(self):
return SETTINGS

629
helpers/Muxer.py 100644
View File

@ -0,0 +1,629 @@

import re, os, sys, subprocess, contextlib, json, glob
from configs.config import tool
from helpers.ripprocess import ripprocess
from pymediainfo import MediaInfo
import logging
class Muxer(object):
def __init__(self, **kwargs):
self.logger = logging.getLogger(__name__)
self.CurrentName_Original = kwargs.get("CurrentName", None)
self.CurrentName = kwargs.get("CurrentName", None)
self.SeasonFolder = kwargs.get("SeasonFolder", None)
self.CurrentHeigh = kwargs.get("CurrentHeigh", None)
self.CurrentWidth = kwargs.get("CurrentWidth", None)
self.source_tag = kwargs.get("Source", None)
self.AudioProfile = self.get_audio_id() # kwargs.get("AudioProfile", None)
self.VideoProfile = self.get_video_id() # kwargs.get("VideoProfile", None)
self.mkvmerge = tool().bin()["mkvmerge"]
self.merge = []
self.muxer_settings = tool().muxer()
##############################################################################
self.packer = kwargs.get("group", None)
self.extra_output_folder = self.packer["EXTRA_FOLDER"]
self.Group = (
self.packer["GROUP"]
if self.packer["GROUP"]
else self.muxer_settings["GROUP"]
)
self.muxer_scheme = (
self.packer["SCHEME"]
if self.packer["SCHEME"]
else self.muxer_settings["scheme"]
)
self.scheme = self.muxer_settings["schemeslist"][self.muxer_scheme]
self.Extras = self.muxer_settings["EXTRAS"]
self.fps24 = True if self.source_tag in self.muxer_settings["FPS24"] else False
self.default_mux = True if self.muxer_settings["DEFAULT"] else False
self.PrepareMuxer()
def is_extra_folder(self):
extra_folder = None
if self.extra_output_folder:
if not os.path.isabs(self.extra_output_folder):
raise ValueError("Error you should provide full path dir: {}.".format(self.extra_output_folder))
if not os.path.exists(self.extra_output_folder):
try:
os.makedirs(self.extra_output_folder)
except Exception as e:
raise ValueError("Error when create folder dir [{}]: {}.".format(e, self.extra_output_folder))
extra_folder = self.extra_output_folder
return extra_folder
if self.muxer_settings["mkv_folder"]:
if not os.path.isabs(self.muxer_settings["mkv_folder"]):
raise ValueError("Error you should provide full path dir: {}.".format(self.muxer_settings["mkv_folder"]))
if not os.path.exists(self.muxer_settings["mkv_folder"]):
try:
os.makedirs(self.muxer_settings["mkv_folder"])
except Exception as e:
raise ValueError("Error when create folder dir [{}]: {}.".format(e, self.muxer_settings["mkv_folder"]))
extra_folder = self.muxer_settings["mkv_folder"]
return extra_folder
return extra_folder
def PrepareMuxer(self):
if self.muxer_settings["noTitle"]:
self.CurrentName = self.noTitle()
extra_folder = self.is_extra_folder()
if extra_folder:
self.SeasonFolder = extra_folder
else:
if not self.default_mux:
if self.SeasonFolder:
self.SeasonFolder = self.setFolder()
return
def SortFilesBySize(self):
file_list = []
audio_tracks = (
glob.glob(f"{self.CurrentName_Original}*.eac3")
+ glob.glob(f"{self.CurrentName_Original}*.ac3")
+ glob.glob(f"{self.CurrentName_Original}*.aac")
+ glob.glob(f"{self.CurrentName_Original}*.m4a")
+ glob.glob(f"{self.CurrentName_Original}*.dts")
)
if audio_tracks == []:
raise FileNotFoundError("no audio files found")
for file in audio_tracks:
file_list.append({"file": file, "size": os.path.getsize(file)})
file_list = sorted(file_list, key=lambda k: int(k["size"]))
return file_list[-1]["file"]
def GetVideoFile(self):
videofiles = [
"{} [{}p]_Demuxed.mp4",
"{} [{}p]_Demuxed.mp4",
"{} [{}p] [UHD]_Demuxed.mp4",
"{} [{}p] [UHD]_Demuxed.mp4",
"{} [{}p] [VP9]_Demuxed.mp4",
"{} [{}p] [HIGH]_Demuxed.mp4",
"{} [{}p] [VP9]_Demuxed.mp4",
"{} [{}p] [HEVC]_Demuxed.mp4",
"{} [{}p] [HDR]_Demuxed.mp4",
"{} [{}p] [HDR-DV]_Demuxed.mp4",
]
for videofile in videofiles:
filename = videofile.format(self.CurrentName_Original, self.CurrentHeigh)
if os.path.isfile(filename):
return filename
return None
def get_video_id(self):
video_file = self.GetVideoFile()
if not video_file:
raise ValueError("No Video file in Dir...")
media_info = MediaInfo.parse(video_file)
track = [track for track in media_info.tracks if track.track_type == "Video"][0]
if track.format == "AVC":
if track.encoding_settings:
return "x264"
return "H.264"
elif track.format == "HEVC":
if track.commercial_name == "HDR10" and track.color_primaries:
return "HDR.HEVC"
if track.commercial_name == "HEVC" and track.color_primaries:
return "HEVC"
return "DV.HEVC"
return None
def get_audio_id(self):
audio_id = None
media_info = MediaInfo.parse(self.SortFilesBySize())
track = [track for track in media_info.tracks if track.track_type == "Audio"][0]
if track.format == "E-AC-3":
audioCodec = "DDP"
elif track.format == "AC-3":
audioCodec = "DD"
elif track.format == "AAC":
audioCodec = "AAC"
elif track.format == "DTS":
audioCodec = "DTS"
elif "DTS" in track.format:
audioCodec = "DTS"
else:
audioCodec = "DDP"
if track.channel_s == 8:
channels = "7.1"
elif track.channel_s == 6:
channels = "5.1"
elif track.channel_s == 2:
channels = "2.0"
elif track.channel_s == 1:
channels = "1.0"
else:
channels = "5.1"
audio_id = (
f"{audioCodec}{channels}.Atmos"
if "Atmos" in track.commercial_name
else f"{audioCodec}{channels}"
)
return audio_id
def Heigh(self):
try:
Width = int(self.CurrentWidth)
Heigh = int(self.CurrentHeigh)
except Exception:
return self.CurrentHeigh
res1080p = "1080p"
res720p = "720p"
sd = ""
if Width >= 3840:
return "2160p"
if Width >= 2560:
return "1440p"
if Width > 1920:
if Heigh > 1440:
return "2160p"
return "1440p"
if Width == 1920:
return res1080p
elif Width == 1280:
return res720p
if Width >= 1400:
return res1080p
if Width < 1400 and Width >= 1100:
return res720p
if Heigh == 1080:
return res1080p
elif Heigh == 720:
return res720p
if Heigh >= 900:
return res1080p
if Heigh < 900 and Heigh >= 700:
return res720p
return sd
def noTitle(self):
regex = re.compile("(.*) [S]([0-9]+)[E]([0-9]+)")
if regex.search(self.CurrentName):
return regex.search(self.CurrentName).group(0)
return self.CurrentName
def Run(self, command):
self.logger.debug("muxing command: {}".format(command))
def unbuffered(proc, stream="stdout"):
newlines = ["\n", "\r\n", "\r"]
stream = getattr(proc, stream)
with contextlib.closing(stream):
while True:
out = []
last = stream.read(1)
# Don't loop forever
if last == "" and proc.poll() is not None:
break
while last not in newlines:
# Don't loop forever
if last == "" and proc.poll() is not None:
break
out.append(last)
last = stream.read(1)
out = "".join(out)
yield out
proc = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
bufsize=1,
universal_newlines=True,
)
self.logger.info("\nStart Muxing...")
for line in unbuffered(proc):
if "Progress:" in line:
sys.stdout.write("\r%s" % (line))
sys.stdout.flush()
elif "Multiplexing" in line:
sys.stdout.write("\r%s" % (line.replace("Multiplexing", "Muxing")))
sys.stdout.flush()
elif "Error" in line:
sys.stdout.write("\r%s" % (line))
sys.stdout.flush()
self.logger.info("")
def setName(self):
outputVideo = (
self.scheme.replace(
"{t}", ripprocess().CleanMyFileNamePlease(self.CurrentName)
)
.replace("{r}", self.Heigh())
.replace("{s}", self.source_tag)
.replace("{ac}", self.AudioProfile)
.replace("{vc}", self.VideoProfile)
.replace("{gr}", self.Group)
)
for i in range(10):
outputVideo = re.sub(r"(\.\.)", ".", outputVideo)
if self.SeasonFolder:
outputVideo = os.path.join(os.path.abspath(self.SeasonFolder), outputVideo)
outputVideo = outputVideo.replace("\\", "/")
return f"{outputVideo}.mkv"
def setFolder(self):
folder = (
self.scheme.replace(
"{t}", ripprocess().CleanMyFileNamePlease(self.SeasonFolder)
)
.replace("{r}", self.Heigh())
.replace("{s}", self.source_tag)
.replace("{ac}", self.AudioProfile)
.replace("{vc}", self.VideoProfile)
.replace("{gr}", self.Group)
)
for i in range(10):
folder = re.sub(r"(\.\.)", ".", folder)
return folder
def LanguageList(self):
LanguageList = [
["Hindi", "hin", "hin", "Hindi"],
["Tamil", "tam", "tam", "Tamil"],
["Telugu", "tel", "tel", "Telugu"],
["English", "eng", "eng", "English"],
["Afrikaans", "af", "afr", "Afrikaans"],
["Arabic", "ara", "ara", "Arabic"],
["Arabic (Syria)", "araSy", "ara", "Arabic Syria"],
["Arabic (Egypt)", "araEG", "ara", "Arabic Egypt"],
["Arabic (Kuwait)", "araKW", "ara", "Arabic Kuwait"],
["Arabic (Lebanon)", "araLB", "ara", "Arabic Lebanon"],
["Arabic (Algeria)", "araDZ", "ara", "Arabic Algeria"],
["Arabic (Bahrain)", "araBH", "ara", "Arabic Bahrain"],
["Arabic (Iraq)", "araIQ", "ara", "Arabic Iraq"],
["Arabic (Jordan)", "araJO", "ara", "Arabic Jordan"],
["Arabic (Libya)", "araLY", "ara", "Arabic Libya"],
["Arabic (Morocco)", "araMA", "ara", "Arabic Morocco"],
["Arabic (Oman)", "araOM", "ara", "Arabic Oman"],
["Arabic (Saudi Arabia)", "araSA", "ara", "Arabic Saudi Arabia"],
["Arabic (Tunisia)", "araTN", "ara", "Arabic Tunisia"],
[
"Arabic (United Arab Emirates)",
"araAE",
"ara",
"Arabic United Arab Emirates",
],
["Arabic (Yemen)", "araYE", "ara", "Arabic Yemen"],
["Armenian", "hye", "arm", "Armenian"],
["Assamese", "asm", "asm", "Assamese"],
["Bengali", "ben", "ben", "Bengali"],
["Basque", "eus", "baq", "Basque"],
["British English", "enGB", "eng", "British English"],
["Bulgarian", "bul", "bul", "Bulgarian"],
["Cantonese", "None", "chi", "Cantonese"],
["Catalan", "cat", "cat", "Catalan"],
["Simplified Chinese", "zhoS", "chi", "Chinese Simplified"],
["Traditional Chinese", "zhoT", "chi", "Chinese Traditional"],
["Croatian", "hrv", "hrv", "Croatian"],
["Czech", "ces", "cze", "Czech"],
["Danish", "dan", "dan", "Danish"],
["Dutch", "nld", "dut", "Dutch"],
["Estonian", "est", "est", "Estonian"],
["Filipino", "fil", "fil", "Filipino"],
["Finnish", "fin", "fin", "Finnish"],
["Flemish", "nlBE", "dut", "Flemish"],
["French", "fra", "fre", "French"],
["French Canadian", "caFra", "fre", "French Canadian"],
["Canadian French", "caFra", "fre", "Canadian French"],
["German", "deu", "ger", "German"],
["Greek", "ell", "gre", "Greek"],
["Gujarati", "guj", "guj", "Gujarati"],
["Hebrew", "heb", "heb", "Hebrew"],
["Hungarian", "hun", "hun", "Hungarian"],
["Icelandic", "isl", "ice", "Icelandic"],
["Indonesian", "ind", "ind", "Indonesian"],
["Italian", "ita", "ita", "Italian"],
["Japanese", "jpn", "jpn", "Japanese"],
["Kannada (India)", "kan", "kan", "Kannada (India)"],
["Khmer", "khm", "khm", "Khmer"],
["Klingon", "tlh", "tlh", "Klingon"],
["Korean", "kor", "kor", "Korean"],
["Lithuanian", "lit", "lit", "Lithuanian"],
["Latvian", "lav", "lav", "Latvian"],
["Malay", "msa", "may", "Malay"],
["Malayalam", "mal", "mal", "Malayalam"],
["Mandarin", "None", "chi", "Mandarin"],
["Mandarin Chinese (Simplified)", "zh-Hans", "chi", "Simplified"],
["Mandarin Chinese (Traditional)", "zh-Hant", "chi", "Traditional"],
["Yue Chinese", "yue", "chi", "(Yue Chinese)"],
["Manipuri", "mni", "mni", "Manipuri"],
["Marathi", "mar", "mar", "Marathi"],
["No Dialogue", "zxx", "zxx", "No Dialogue"],
["Norwegian", "nor", "nor", "Norwegian"],
["Norwegian Bokmal", "nob", "nob", "Norwegian Bokmal"],
["Persian", "fas", "per", "Persian"],
["Polish", "pol", "pol", "Polish"],
["Portuguese", "por", "por", "Portuguese"],
["Brazilian Portuguese", "brPor", "por", "Brazilian Portuguese"],
["Punjabi", "pan", "pan", "Punjabi"],
["Panjabi", "pan", "pan", "Panjabi"],
["Romanian", "ron", "rum", "Romanian"],
["Russian", "rus", "rus", "Russian"],
["Serbian", "srp", "srp", "Serbian"],
["Sinhala", "sin", "sin", "Sinhala"],
["Slovak", "slk", "slo", "Slovak"],
["Slovenian", "slv", "slv", "Slovenian"],
["Spanish", "spa", "spa", "Spanish"],
["European Spanish", "euSpa", "spa", "European Spanish"],
["Swedish", "swe", "swe", "Swedish"],
["Thai", "tha", "tha", "Thai"],
["Tagalog", "tgl", "tgl", "Tagalog"],
["Turkish", "tur", "tur", "Turkish"],
["Ukrainian", "ukr", "ukr", "Ukrainian"],
["Urdu", "urd", "urd", "Urdu"],
["Vietnamese", "vie", "vie", "Vietnamese"],
]
return LanguageList
def ExtraLanguageList(self):
ExtraLanguageList = [
["Polish - Dubbing", "pol", "pol", "Polish - Dubbing"],
["Polish - Lektor", "pol", "pol", "Polish - Lektor"],
]
return ExtraLanguageList
def AddChapters(self):
if os.path.isfile(self.CurrentName_Original + " Chapters.txt"):
self.merge += [
"--chapter-charset",
"UTF-8",
"--chapters",
self.CurrentName_Original + " Chapters.txt",
]
return
def AddVideo(self):
inputVideo = None
videofiles = [
"{} [{}p]_Demuxed.mp4",
"{} [{}p]_Demuxed.mp4",
"{} [{}p] [UHD]_Demuxed.mp4",
"{} [{}p] [UHD]_Demuxed.mp4",
"{} [{}p] [VP9]_Demuxed.mp4",
"{} [{}p] [HIGH]_Demuxed.mp4",
"{} [{}p] [VP9]_Demuxed.mp4",
"{} [{}p] [HEVC]_Demuxed.mp4",
"{} [{}p] [HDR]_Demuxed.mp4",
"{} [{}p] [HDR-DV]_Demuxed.mp4",
]
for videofile in videofiles:
filename = videofile.format(self.CurrentName_Original, self.CurrentHeigh)
if os.path.isfile(filename):
inputVideo = filename
break
if not inputVideo:
self.logger.info("cannot found video file.")
exit(-1)
if self.default_mux:
outputVideo = (
re.compile("|".join([".h264", ".h265", ".vp9", ".mp4"])).sub("", inputVideo)
+ ".mkv"
)
if self.SeasonFolder:
outputVideo = os.path.join(
os.path.abspath(self.SeasonFolder), outputVideo
)
outputVideo = outputVideo.replace("\\", "/")
else:
outputVideo = self.setName()
self.outputVideo = outputVideo
if self.fps24:
self.merge += [
self.mkvmerge,
"--output",
outputVideo,
"--default-duration",
"0:24000/1001p",
"--language",
"0:und",
"--default-track",
"0:yes",
"(",
inputVideo,
")",
]
else:
self.merge += [
self.mkvmerge,
"--output",
outputVideo,
"--title",
'RAB',
"(",
inputVideo,
")",
]
return
def AddAudio(self):
audiofiles = [
"{} {}.ac3",
"{} {} - Audio Description.ac3",
"{} {}.eac3",
"{} {} - Audio Description.eac3",
"{} {}.aac",
"{} {} - Audio Description.aac",
]
for (audio_language, subs_language, language_id, language_name,) in (
self.LanguageList() + self.ExtraLanguageList()
):
for audiofile in audiofiles:
filename = audiofile.format(self.CurrentName_Original, audio_language)
if os.path.isfile(filename):
self.merge += [
"--language",
f"0:{language_id}",
"--track-name",
"0:Audio Description" if 'Audio Description' in filename
else f"0:{language_name}",
"--default-track",
"0:yes"
if subs_language == self.muxer_settings["AUDIO"]
else "0:no",
"(",
filename,
")",
]
return
def AddSubtitles(self):
srts = [
"{} {}.srt",
]
forceds = [
"{} forced-{}.srt",
]
sdhs = [
"{} sdh-{}.srt",
]
for (
audio_language,
subs_language,
language_id,
language_name,
) in self.LanguageList():
for subtitle in srts:
filename = subtitle.format(self.CurrentName_Original, subs_language)
if os.path.isfile(filename):
self.merge += [
"--language",
f"0:{language_id}",
"--track-name",
f"0:{language_name}",
"--forced-track",
"0:no",
"--default-track",
"0:yes"
if subs_language == self.muxer_settings["SUB"]
else "0:no",
"--compression",
"0:none",
"(",
filename,
")",
]
for subtitle in forceds:
filename = subtitle.format(self.CurrentName_Original, subs_language)
if os.path.isfile(filename):
self.merge += [
"--language",
f"0:{language_id}",
"--track-name",
f"0:Forced",
"--forced-track",
"0:yes",
"--default-track",
"0:no",
"--compression",
"0:none",
"(",
filename,
")",
]
for subtitle in sdhs:
filename = subtitle.format(self.CurrentName_Original, subs_language)
if os.path.isfile(filename):
self.merge += [
"--language",
f"0:{language_id}",
"--track-name",
f"0:SDH",
"--forced-track",
"0:no",
"--default-track",
"0:no",
"--compression",
"0:none",
"(",
filename,
")",
]
return
def startMux(self):
self.AddVideo()
self.AddAudio()
self.AddSubtitles()
self.AddChapters()
if not os.path.isfile(self.outputVideo):
self.Run(self.merge + self.Extras)
return self.outputVideo

View File

@ -0,0 +1,551 @@
import base64, binascii, json, os, re, random, requests, string, time, traceback, logging
from datetime import datetime
from Cryptodome.Cipher import AES, PKCS1_OAEP
from Cryptodome.Util import Padding
from Cryptodome.Hash import HMAC, SHA256
from Cryptodome.PublicKey import RSA
from pywidevine.cdm import cdm, deviceconfig
from configs.config import tool
class MSLClient:
def __init__(self, profiles=None, wv_keyexchange=True, proxies=None):
self.session = requests.session()
self.logger = logging.getLogger(__name__)
if proxies:
self.session.proxies.update(proxies)
self.nf_endpoints = {
"manifest": "https://www.netflix.com/nq/msl_v1/cadmium/pbo_manifests/^1.0.0/router",
"license": "https://www.netflix.com/nq/msl_v1/cadmium/pbo_licenses/^1.0.0/router",
}
######################################################################
self.config = tool().config("NETFLIX")
self.email = self.config["email"]
self.password = self.config["password"]
self.device = tool().devices()["NETFLIX-MANIFEST"]
self.save_rsa_location = self.config["token_file"]
self.languages = self.config["manifest_language"]
self.license_path = None
######################################################################
if os.path.isfile(self.save_rsa_location):
self.generatePrivateKey = RSA.importKey(
json.loads(open(self.save_rsa_location, "r").read())["RSA_KEY"]
)
else:
self.generatePrivateKey = RSA.generate(2048)
if wv_keyexchange:
self.wv_keyexchange = True
self.cdm = cdm.Cdm()
self.cdm_session = None
else:
self.wv_keyexchange = False
self.cdm = None
self.cdm_session = None
self.manifest_challenge = '' # set desired wv data to overide wvexchange data
self.profiles = profiles
self.logger.debug("Using profiles: {}".format(self.profiles))
esn = self.config["androidEsn"]
if esn is None:
self.logger.error(
'\nandroid esn not found, set esn with cdm systemID in config.py'
)
else:
self.esn = esn
self.logger.debug("Using esn: " + self.esn)
self.messageid = random.randint(0, 2 ** 52)
self.session_keys = {} #~
self.header = {
"sender": self.esn,
"handshake": True,
"nonreplayable": 2,
"capabilities": {"languages": [], "compressionalgos": []},
"recipient": "Netflix",
"renewable": True,
"messageid": self.messageid,
"timestamp": time.time(),
}
self.setRSA()
def get_header_extra(self):
if self.wv_keyexchange:
self.cdm_session = self.cdm.open_session(
None,
deviceconfig.DeviceConfig(self.device),
b"\x0A\x7A\x00\x6C\x38\x2B",
True,
)
wv_request = base64.b64encode(
self.cdm.get_license_request(self.cdm_session)
).decode("utf-8")
self.header["keyrequestdata"] = [
{
"scheme": "WIDEVINE",
"keydata": {
"keyrequest": wv_request
}
}
]
else:
self.header["keyrequestdata"] = [
{
"scheme": "ASYMMETRIC_WRAPPED",
"keydata": {
"publickey": base64.b64encode(
self.generatePrivateKey.publickey().exportKey("DER")
).decode("utf8"),
"mechanism": "JWK_RSA",
"keypairid": "rsaKeypairId",
},
}
]
return self.header
def setRSA(self):
if os.path.isfile(self.save_rsa_location):
master_token = self.load_tokens()
expires = master_token["expiration"]
valid_until = datetime.utcfromtimestamp(int(expires))
present_time = datetime.now()
difference = valid_until - present_time
difference = difference.total_seconds() / 60 / 60
if difference < 10:
self.logger.debug("rsa file found. expired soon")
self.session_keys["session_keys"] = self.generate_handshake()
else:
self.logger.debug("rsa file found")
self.session_keys["session_keys"] = {
"mastertoken": master_token["mastertoken"],
"sequence_number": master_token["sequence_number"],
"encryption_key": master_token["encryption_key"],
"sign_key": master_token["sign_key"],
}
else:
self.logger.debug("rsa file not found")
self.session_keys["session_keys"] = self.generate_handshake()
def load_playlist(self, viewable_id):
payload = {
"version": 2,
"url": "/manifest", #"/licensedManifest"
"id": int(time.time()),
"languages": self.languages,
"params": {
#"challenge": self.manifest_challenge,
"type": "standard",
"viewableId": viewable_id,
"profiles": self.profiles,
"flavor": "STANDARD", #'PRE_FETCH'
"drmType": "widevine",
"usePsshBox": True,
"useHttpsStreams": True,
"supportsPreReleasePin": True,
"supportsWatermark": True,
'supportsUnequalizedDownloadables': True,
'requestEligibleABTests': True,
"isBranching": False,
'isNonMember': False,
'isUIAutoPlay': False,
"imageSubtitleHeight": 1080,
"uiVersion": "shakti-v4bf615c3",
'uiPlatform': 'SHAKTI',
"clientVersion": "6.0026.291.011",
'desiredVmaf': 'plus_lts', # phone_plus_exp
"showAllSubDubTracks": True,
#"preferredTextLocale": "ar",
#"preferredAudioLocale": "ar",
#"maxSupportedLanguages": 2,
"preferAssistiveAudio": False,
"deviceSecurityLevel": "3000",
'licenseType': 'standard',
'titleSpecificData': {
str(viewable_id): {
'unletterboxed': True
}
},
"videoOutputInfo": [
{
"type": "DigitalVideoOutputDescriptor",
"outputType": "unknown",
"supportedHdcpVersions": ['2.2'],
"isHdcpEngaged": True,
}
],
},
}
request_data = self.msl_request(payload)
response = self.session.post(self.nf_endpoints["manifest"], data=request_data)
manifest = json.loads(json.dumps(self.decrypt_response(response.text)))
if manifest.get("result"):
#with open('videoTraks.json', 'w', encoding='utf-8') as d:
#["result"]["video_tracks"]
# d.write(json.dumps(manifest, indent=2))
self.license_path = manifest["result"]["links"]["license"]["href"]
return manifest
if manifest.get("errormsg"):
self.logger.info(manifest["errormsg"])
return None
else:
self.logger.info(manifest)
return None
def decrypt_response(self, payload):
errored = False
try:
p = json.loads(payload)
if p.get("errordata"):
return json.loads(base64.b64decode(p["errordata"]).decode())
except:
payloads = re.split(
r',"signature":"[0-9A-Za-z/+=]+"}', payload.split("}}")[1]
)
payloads = [x + "}" for x in payloads]
new_payload = payloads[:-1]
chunks = []
for chunk in new_payload:
try:
payloadchunk = json.loads(chunk)["payload"]
encryption_envelope = payloadchunk
cipher = AES.new(
self.session_keys["session_keys"]["encryption_key"],
AES.MODE_CBC,
base64.b64decode(
json.loads(
base64.b64decode(encryption_envelope).decode("utf8")
)["iv"]
),
)
plaintext = cipher.decrypt(
base64.b64decode(
json.loads(
base64.b64decode(encryption_envelope).decode("utf8")
)["ciphertext"]
)
)
plaintext = json.loads(Padding.unpad(plaintext, 16).decode("utf8"))
data = plaintext["data"]
data = base64.b64decode(data).decode("utf8")
chunks.append(data)
except:
continue
decrypted_payload = "".join(chunks)
try:
return json.loads(decrypted_payload)
except:
traceback.print_exc()
self.logger.info("Unable to decrypt payloads...exiting")
exit(-1)
def generate_handshake(self):
self.logger.debug("generate_handshake")
header = self.get_header_extra()
request = {
"entityauthdata": {
"scheme": "NONE",
"authdata": {"identity": self.esn,}
},
"signature": "",
"headerdata": base64.b64encode(json.dumps(header).encode("utf8")).decode("utf8"),
}
response = self.session.post(
url=self.nf_endpoints["manifest"],
json=request,
)
try:
if response.json().get("errordata"):
self.logger.info("ERROR")
self.logger.info(
base64.b64decode(response.json()["errordata"]).decode()
)
exit(-1)
handshake = self.parse_handshake(response=response.json())
return handshake
except:
traceback.print_exc()
self.logger.info(response.text)
exit(-1)
def load_tokens(self):
with open(self.save_rsa_location, "r", encoding='utf-8') as f:
tokens_data = json.loads(f.read())
data = {
"mastertoken": tokens_data["mastertoken"],
"sequence_number": tokens_data["sequence_number"],
"encryption_key": base64.standard_b64decode(tokens_data["encryption_key"]),
"sign_key": base64.standard_b64decode(tokens_data["sign_key"]),
"RSA_KEY": tokens_data["RSA_KEY"],
"expiration": tokens_data["expiration"],
}
return data
def save_tokens(self, tokens_data):
data = {
"mastertoken": tokens_data["mastertoken"],
"sequence_number": tokens_data["sequence_number"],
"encryption_key": base64.standard_b64encode(
tokens_data["encryption_key"]
).decode("utf-8"),
"sign_key": base64.standard_b64encode(tokens_data["sign_key"]).decode(
"utf-8"
),
"RSA_KEY": tokens_data["RSA_KEY"],
"expiration": tokens_data["expiration"],
}
with open(self.save_rsa_location, 'w', encoding='utf-8') as f:
f.write(json.dumps(data, indent=2))
def parse_handshake(self, response):
headerdata = json.loads(base64.b64decode(response["headerdata"]).decode("utf8"))
keyresponsedata = headerdata["keyresponsedata"]
mastertoken = headerdata["keyresponsedata"]["mastertoken"]
sequence_number = json.loads(
base64.b64decode(mastertoken["tokendata"]).decode("utf8")
)["sequencenumber"]
if self.wv_keyexchange:
expected_scheme = "WIDEVINE"
else:
expected_scheme = "ASYMMETRIC_WRAPPED"
scheme = keyresponsedata["scheme"]
if scheme != expected_scheme:
self.logger.info("Key Exchange failed:")
return False
keydata = keyresponsedata["keydata"]
if self.wv_keyexchange:
encryption_key, sign_key = self.__process_wv_keydata(keydata)
else:
encryption_key, sign_key = self.__parse_rsa_wrapped_crypto_keys(keydata)
tokens_data = {
"mastertoken": mastertoken,
"sequence_number": sequence_number,
"encryption_key": encryption_key,
"sign_key": sign_key,
}
tokens_data_save = tokens_data
tokens_data_save.update(
{"RSA_KEY": self.generatePrivateKey.exportKey().decode()}
)
tokens_data_save.update(
{
"expiration": json.loads(
base64.b64decode(
json.loads(base64.b64decode(response["headerdata"]))[
"keyresponsedata"
]["mastertoken"]["tokendata"]
)
)["expiration"]
}
)
self.save_tokens(tokens_data_save)
return tokens_data
def __process_wv_keydata(self, keydata):
wv_response_b64 = keydata["cdmkeyresponse"] # pass as b64
encryptionkeyid = base64.standard_b64decode(keydata["encryptionkeyid"])
hmackeyid = base64.standard_b64decode(keydata["hmackeyid"])
self.cdm.provide_license(self.cdm_session, wv_response_b64)
keys = self.cdm.get_keys(self.cdm_session)
self.logger.debug("wv key exchange: obtained wv key exchange keys %s" % keys)
return (
self.__find_wv_key(encryptionkeyid, keys, ["AllowEncrypt", "AllowDecrypt"]),
self.__find_wv_key(hmackeyid, keys, ["AllowSign", "AllowSignatureVerify"]),
)
def __find_wv_key(self, kid, keys, permissions):
for key in keys:
if key.kid != kid:
continue
if key.type != "OPERATOR_SESSION":
self.logger.debug(
"wv key exchange: Wrong key type (not operator session) key %s"
% key
)
continue
if not set(permissions) <= set(key.permissions):
self.logger.debug(
"wv key exchange: Incorrect permissions, key %s, needed perms %s"
% (key, permissions)
)
continue
return key.key
return None
def __parse_rsa_wrapped_crypto_keys(self, keydata):
# Init Decryption
encrypted_encryption_key = base64.b64decode(keydata["encryptionkey"])
encrypted_sign_key = base64.b64decode(keydata["hmackey"])
oaep_cipher = PKCS1_OAEP.new(self.generatePrivateKey)
encryption_key_data = json.loads(
oaep_cipher.decrypt(encrypted_encryption_key).decode("utf8")
)
encryption_key = self.base64_check(encryption_key_data["k"])
sign_key_data = json.loads(
oaep_cipher.decrypt(encrypted_sign_key).decode("utf8")
)
sign_key = self.base64_check(sign_key_data["k"])
return (encryption_key, sign_key)
def base64key_decode(self, payload):
l = len(payload) % 4
if l == 2:
payload += "=="
elif l == 3:
payload += "="
elif l != 0:
raise ValueError("Invalid base64 string")
return base64.urlsafe_b64decode(payload.encode("utf-8"))
def base64_check(self, string):
while len(string) % 4 != 0:
string = string + "="
return base64.urlsafe_b64decode(string.encode())
def msl_request(self, data, is_handshake=False):
header = self.header.copy()
header["handshake"] = is_handshake
header["userauthdata"] = {
"scheme": "EMAIL_PASSWORD",
"authdata": {"email": self.email, "password": self.password},
}
header_envelope = self.msl_encrypt(self.session_keys, json.dumps(header))
header_signature = HMAC.new(
self.session_keys["session_keys"]["sign_key"], header_envelope, SHA256
).digest()
encrypted_header = {
"headerdata": base64.b64encode(header_envelope).decode("utf8"),
"signature": base64.b64encode(header_signature).decode("utf8"),
"mastertoken": self.session_keys["session_keys"]["mastertoken"],
}
payload = {
"messageid": self.messageid,
"data": base64.b64encode(json.dumps(data).encode()).decode("utf8"),
"sequencenumber": 1,
"endofmsg": True,
}
payload_envelope = self.msl_encrypt(self.session_keys, json.dumps(payload))
payload_signature = HMAC.new(
self.session_keys["session_keys"]["sign_key"], payload_envelope, SHA256
).digest()
payload_chunk = {
"payload": base64.b64encode(payload_envelope).decode("utf8"),
"signature": base64.b64encode(payload_signature).decode("utf8"),
}
return json.dumps(encrypted_header) + json.dumps(payload_chunk)
def msl_encrypt(self, msl_session, plaintext):
cbc_iv = os.urandom(16)
encryption_envelope = {
"keyid": "%s_%s"
% (self.esn, msl_session["session_keys"]["sequence_number"]),
"sha256": "AA==",
"iv": base64.b64encode(cbc_iv).decode("utf8"),
}
plaintext = Padding.pad(plaintext.encode("utf8"), 16)
cipher = AES.new(
msl_session["session_keys"]["encryption_key"], AES.MODE_CBC, cbc_iv
)
ciphertext = cipher.encrypt(plaintext)
encryption_envelope["ciphertext"] = base64.b64encode(ciphertext).decode("utf8")
return json.dumps(encryption_envelope).encode("utf8")
def get_license(self, challenge, session_id):
if not isinstance(challenge, bytes):
raise TypeError("challenge must be of type bytes")
if not isinstance(session_id, str):
raise TypeError("session_id must be of type string")
timestamp = int(time.time() * 10000)
license_request_data = {
"version": 2,
"url": self.license_path,
"id": timestamp,
"languages": "en_US",
"echo": "drmsessionId",
"params": [
{
"drmSessionId": session_id,
"clientTime": int(timestamp / 10000),
"challengeBase64": base64.b64encode(challenge).decode("utf8"),
"xid": str(timestamp + 1610),
}
]
}
request_data = self.msl_request(license_request_data)
resp = self.session.post(url=self.nf_endpoints["license"],data=request_data)
try:
resp.json()
except ValueError:
msl_license_data = json.loads(json.dumps(self.decrypt_response(resp.text)))
if msl_license_data.get("result"):
return msl_license_data
if msl_license_data.get("errormsg"):
raise ValueError(msl_license_data["errormsg"])
raise ValueError(msl_license_data)

View File

@ -0,0 +1,159 @@
import time, os, json, logging, base64
from helpers.Parsers.Netflix.MSLClient import MSLClient
from configs.config import tool
from pywidevine.decrypt.wvdecryptcustom import WvDecrypt
logger = logging.getLogger(__name__)
''' "av1-main-L20-dash-cbcs-prk",
"av1-main-L21-dash-cbcs-prk",
"av1-main-L30-dash-cbcs-prk",
"av1-main-L31-dash-cbcs-prk",
"av1-main-L40-dash-cbcs-prk",
"av1-main-L41-dash-cbcs-prk",
"av1-main-L50-dash-cbcs-prk",
"av1-main-L51-dash-cbcs-prk",'''
''' "vp9-profile0-L21-dash-cenc",
"vp9-profile0-L30-dash-cenc",
"vp9-profile0-L31-dash-cenc",
"vp9-profile0-L40-dash-cenc",
"vp9-profile2-L30-dash-cenc-prk",
"vp9-profile2-L31-dash-cenc-prk",
"vp9-profile2-L40-dash-cenc-prk",
"vp9-profile2-L50-dash-cenc-prk",
"vp9-profile2-L51-dash-cenc-prk"'''
def from_kid(kid):
array_of_bytes = bytearray(b"\x00\x00\x002pssh\x00\x00\x00\x00")
array_of_bytes.extend(bytes.fromhex("edef8ba979d64acea3c827dcd51d21ed"))
array_of_bytes.extend(b"\x00\x00\x00\x12\x12\x10")
array_of_bytes.extend(bytes.fromhex(kid.replace("-", "")))
pssh = base64.b64encode(bytes.fromhex(array_of_bytes.hex()))
return pssh.decode()
def __profiles(profile, addHEVCDO=False):
profiles = [
"heaac-2-dash",
"dfxp-ls-sdh",
"webvtt-lssdh-ios8",
"BIF240",
"BIF320",
]
if profile == "High KEYS":
profiles += [
"playready-h264hpl22-dash",
"playready-h264hpl30-dash",
"playready-h264hpl31-dash",
#'playready-h264hpl40-dash'
]
elif profile == "Main KEYS":
profiles += [
"playready-h264mpl30-dash",
]
elif profile == "HEVC KEYS":
profiles += [
"hevc-main-L30-dash-cenc",
"hevc-main10-L30-dash-cenc",
"hevc-main10-L30-dash-cenc-prk",
"hevc-main-L31-dash-cenc"
"hevc-main10-L31-dash-cenc",
"hevc-main10-L31-dash-cenc-prk",
"hevc-main-L40-dash-cenc",
"hevc-main10-L40-dash-cenc",
"hevc-main10-L40-dash-cenc-prk",
"hevc-main-L41-dash-cenc",
"hevc-main10-L41-dash-cenc",
"hevc-main10-L41-dash-cenc-prk"
]
if addHEVCDO:
profiles += [
"hevc-main10-L31-dash-cenc-prk-do",
"hevc-main10-L31-dash-cenc-prk-do",
"hevc-main10-L40-dash-cenc-prk-do",
"hevc-main10-L41-dash-cenc-prk-do",
]
elif profile == 'HDR-10 KEYS':
profiles += [
"hevc-hdr-main10-L30-dash-cenc",
"hevc-hdr-main10-L30-dash-cenc-prk",
"hevc-hdr-main10-L31-dash-cenc",
"hevc-hdr-main10-L31-dash-cenc-prk",
"hevc-hdr-main10-L40-dash-cenc",
"hevc-hdr-main10-L41-dash-cenc",
"hevc-hdr-main10-L40-dash-cenc-prk",
"hevc-hdr-main10-L41-dash-cenc-prk"
]
else:
profiles += [
"playready-h264mpl30-dash",
]
return profiles
def GettingKEYS_Netflixv2(nfID, profile): #
KEYS = []
available_profiles = [
"High KEYS",
"HEVC KEYS",
"HDR-10 KEYS",
"Main KEYS"
]
if not profile in available_profiles:
logger.info("Error: Unknown profile: {}".format(profile))
exit(1)
logger.info(f"\nGetting {profile}...")
profiles = __profiles(profile)
try:
client = MSLClient(profiles=profiles)
resp = client.load_playlist(int(nfID))
if resp is None:
if profile == 'HEVC KEYS':
profiles = __profiles(profile, addHEVCDO=True)
client = MSLClient(profiles=profiles)
resp = client.load_playlist(int(nfID))
except Exception as e:
logger.error("Manifest Error: {}".format(e))
return KEYS
try:
#init_data_b64 = from_kid('0000000005edabd50000000000000000')
init_data_b64 = resp["result"]["video_tracks"][0]["drmHeader"]["bytes"]
except KeyError:
logger.error("cannot get pssh, {}".format(resp))
return KEYS
cert_data_b64 = "CAUSwwUKvQIIAxIQ5US6QAvBDzfTtjb4tU/7QxiH8c+TBSKOAjCCAQoCggEBAObzvlu2hZRsapAPx4Aa4GUZj4/GjxgXUtBH4THSkM40x63wQeyVxlEEo1D/T1FkVM/S+tiKbJiIGaT0Yb5LTAHcJEhODB40TXlwPfcxBjJLfOkF3jP6wIlqbb6OPVkDi6KMTZ3EYL6BEFGfD1ag/LDsPxG6EZIn3k4S3ODcej6YSzG4TnGD0szj5m6uj/2azPZsWAlSNBRUejmP6Tiota7g5u6AWZz0MsgCiEvnxRHmTRee+LO6U4dswzF3Odr2XBPD/hIAtp0RX8JlcGazBS0GABMMo2qNfCiSiGdyl2xZJq4fq99LoVfCLNChkn1N2NIYLrStQHa35pgObvhwi7ECAwEAAToQdGVzdC5uZXRmbGl4LmNvbRKAA4TTLzJbDZaKfozb9vDv5qpW5A/DNL9gbnJJi/AIZB3QOW2veGmKT3xaKNQ4NSvo/EyfVlhc4ujd4QPrFgYztGLNrxeyRF0J8XzGOPsvv9Mc9uLHKfiZQuy21KZYWF7HNedJ4qpAe6gqZ6uq7Se7f2JbelzENX8rsTpppKvkgPRIKLspFwv0EJQLPWD1zjew2PjoGEwJYlKbSbHVcUNygplaGmPkUCBThDh7p/5Lx5ff2d/oPpIlFvhqntmfOfumt4i+ZL3fFaObvkjpQFVAajqmfipY0KAtiUYYJAJSbm2DnrqP7+DmO9hmRMm9uJkXC2MxbmeNtJHAHdbgKsqjLHDiqwk1JplFMoC9KNMp2pUNdX9TkcrtJoEDqIn3zX9p+itdt3a9mVFc7/ZL4xpraYdQvOwP5LmXj9galK3s+eQJ7bkX6cCi+2X+iBmCMx4R0XJ3/1gxiM5LiStibCnfInub1nNgJDojxFA3jH/IuUcblEf/5Y0s1SzokBnR8V0KbA=="
device = tool().devices()["NETFLIX-LICENSE"]
wvdecrypt = WvDecrypt(
init_data_b64=init_data_b64, cert_data_b64=cert_data_b64, device=device
)
challenge = wvdecrypt.get_challenge()
current_sessionId = str(time.time()).replace(".", "")[0:-2]
data = client.get_license(challenge, current_sessionId)
try:
license_b64 = data["result"][0]["licenseResponseBase64"]
except Exception:
logger.error("MSL LICENSE Error Message: {}".format(data))
return KEYS
wvdecrypt.update_license(license_b64)
Correct, keyswvdecrypt = wvdecrypt.start_process()
KEYS = keyswvdecrypt
return KEYS

View File

@ -0,0 +1,736 @@
from helpers.ripprocess import ripprocess
from helpers.Parsers.Netflix.MSLClient import MSLClient
from configs.config import tool
import re, os, json, logging
def MSLprofiles():
PROFILES = {
"BASICS": ["BIF240", "BIF320", "webvtt-lssdh-ios8", "dfxp-ls-sdh"],
"MAIN": {
"SD": [
"playready-h264bpl30-dash",
"playready-h264mpl22-dash",
"playready-h264mpl30-dash",
],
"HD": [
"playready-h264bpl30-dash",
"playready-h264mpl22-dash",
"playready-h264mpl30-dash",
"playready-h264mpl31-dash",
],
"FHD": [
"playready-h264bpl30-dash",
"playready-h264mpl22-dash",
"playready-h264mpl30-dash",
"playready-h264mpl31-dash",
"playready-h264mpl40-dash",
],
"ALL": [
"playready-h264bpl30-dash",
"playready-h264mpl22-dash",
"playready-h264mpl30-dash",
"playready-h264mpl31-dash",
"playready-h264mpl40-dash",
],
},
"HIGH": {
"SD": [
"playready-h264hpl22-dash",
"playready-h264hpl30-dash",
],
"HD": [
"playready-h264hpl22-dash",
"playready-h264hpl30-dash",
"playready-h264hpl31-dash",
],
"FHD": [
"playready-h264hpl22-dash",
"playready-h264hpl30-dash",
"playready-h264hpl31-dash",
"playready-h264hpl40-dash",
],
"ALL": [
"playready-h264hpl22-dash",
"playready-h264hpl30-dash",
"playready-h264hpl31-dash",
"playready-h264hpl40-dash",
],
},
"HEVC": {
"SD": [
"hevc-main-L30-dash-cenc",
"hevc-main10-L30-dash-cenc",
"hevc-main10-L30-dash-cenc-prk",
],
"HD": [
"hevc-main-L30-dash-cenc",
"hevc-main10-L30-dash-cenc",
"hevc-main10-L30-dash-cenc-prk",
"hevc-main-L31-dash-cenc",
"hevc-main10-L31-dash-cenc",
"hevc-main10-L31-dash-cenc-prk",
],
"FHD": [
"hevc-main-L30-dash-cenc",
"hevc-main10-L30-dash-cenc",
"hevc-main10-L30-dash-cenc-prk",
"hevc-main-L31-dash-cenc"
"hevc-main10-L31-dash-cenc",
"hevc-main10-L31-dash-cenc-prk",
"hevc-main-L40-dash-cenc",
"hevc-main10-L40-dash-cenc",
"hevc-main10-L40-dash-cenc-prk",
"hevc-main-L41-dash-cenc",
"hevc-main10-L41-dash-cenc",
"hevc-main10-L41-dash-cenc-prk",
],
"ALL": [
"hevc-main-L30-dash-cenc",
"hevc-main10-L30-dash-cenc",
"hevc-main10-L30-dash-cenc-prk",
"hevc-main-L31-dash-cenc"
"hevc-main10-L31-dash-cenc",
"hevc-main10-L31-dash-cenc-prk",
"hevc-main-L40-dash-cenc",
"hevc-main10-L40-dash-cenc",
"hevc-main10-L40-dash-cenc-prk",
"hevc-main-L41-dash-cenc",
"hevc-main10-L41-dash-cenc",
"hevc-main10-L41-dash-cenc-prk",
],
},
"HEVCDO": {
"SD": [
"hevc-main10-L30-dash-cenc-prk-do",
],
"HD": [
"hevc-main10-L30-dash-cenc-prk-do",
"hevc-main10-L31-dash-cenc-prk-do"
],
"FHD": [
"hevc-main10-L31-dash-cenc-prk-do",
"hevc-main10-L31-dash-cenc-prk-do",
"hevc-main10-L40-dash-cenc-prk-do",
"hevc-main10-L41-dash-cenc-prk-do",
],
"ALL": [
"hevc-main10-L31-dash-cenc-prk-do",
"hevc-main10-L31-dash-cenc-prk-do",
"hevc-main10-L40-dash-cenc-prk-do",
"hevc-main10-L41-dash-cenc-prk-do",
],
},
"HDR": {
"SD": [
"hevc-hdr-main10-L30-dash-cenc",
"hevc-hdr-main10-L30-dash-cenc-prk",
],
"HD": [
"hevc-hdr-main10-L30-dash-cenc",
"hevc-hdr-main10-L30-dash-cenc-prk",
"hevc-hdr-main10-L31-dash-cenc",
"hevc-hdr-main10-L31-dash-cenc-prk",
],
"FHD": [
"hevc-hdr-main10-L30-dash-cenc",
"hevc-hdr-main10-L30-dash-cenc-prk",
"hevc-hdr-main10-L31-dash-cenc",
"hevc-hdr-main10-L31-dash-cenc-prk",
"hevc-hdr-main10-L40-dash-cenc",
"hevc-hdr-main10-L41-dash-cenc",
"hevc-hdr-main10-L40-dash-cenc-prk",
"hevc-hdr-main10-L41-dash-cenc-prk",
],
"ALL": [
"hevc-hdr-main10-L30-dash-cenc",
"hevc-hdr-main10-L30-dash-cenc-prk",
"hevc-hdr-main10-L31-dash-cenc",
"hevc-hdr-main10-L31-dash-cenc-prk",
"hevc-hdr-main10-L40-dash-cenc",
"hevc-hdr-main10-L41-dash-cenc",
"hevc-hdr-main10-L40-dash-cenc-prk",
"hevc-hdr-main10-L41-dash-cenc-prk",
],
},
}
return PROFILES
class get_manifest:
def __init__(self, args, nfid):
self.logger = logging.getLogger(__name__)
self.args = args
self.nfid = nfid
self.ripprocess = ripprocess()
self.profiles = MSLprofiles()
self.config = tool().config("NETFLIX")
def LoadProfies(self, addHEVCDO=False):
getHigh = False
profiles = self.profiles["BASICS"]
if self.args.video_main:
if self.args.customquality:
if int(self.args.customquality[0]) == 1080:
profiles += self.profiles["MAIN"]["FHD"]
elif (
int(self.args.customquality[0]) < 1080
and int(self.args.customquality[0]) >= 720
):
profiles += self.profiles["MAIN"]["HD"]
elif int(self.args.customquality[0]) < 720:
profiles += self.profiles["MAIN"]["SD"]
else:
profiles += self.profiles["MAIN"]["ALL"]
else:
if self.args.video_high:
if self.args.customquality:
if int(self.args.customquality[0]) == 1080:
profiles += self.profiles["HIGH"]["FHD"]
elif (
int(self.args.customquality[0]) < 1080
and int(self.args.customquality[0]) >= 720
):
profiles += self.profiles["HIGH"]["HD"]
elif int(self.args.customquality[0]) < 720:
profiles += self.profiles["HIGH"]["SD"]
else:
profiles += self.profiles["HIGH"]["ALL"]
else:
if self.args.hdr:
if self.args.customquality:
if int(self.args.customquality[0]) == 1080:
profiles += self.profiles["HDR"]["FHD"]
elif (
int(self.args.customquality[0]) < 1080
and int(self.args.customquality[0]) >= 720
):
profiles += self.profiles["HDR"]["HD"]
elif int(self.args.customquality[0]) < 720:
profiles += self.profiles["HDR"]["SD"]
else:
profiles += self.profiles["HDR"]["ALL"]
elif self.args.hevc:
if self.args.customquality:
if int(self.args.customquality[0]) == 1080:
profiles += self.profiles["HEVC"]["FHD"]
if addHEVCDO:
profiles += self.profiles['HEVCDO']['FHD']
elif (
int(self.args.customquality[0]) < 1080
and int(self.args.customquality[0]) >= 720
):
profiles += self.profiles["HEVC"]["HD"]
if addHEVCDO:
profiles += self.profiles['HEVCDO']['HD']
elif int(self.args.customquality[0]) < 720:
profiles += self.profiles["HEVC"]["SD"]
if addHEVCDO:
profiles += self.profiles['HEVCDO']['SD']
else:
profiles += self.profiles["HEVC"]["ALL"]
if addHEVCDO:
profiles += self.profiles['HEVCDO']['ALL']
else:
getHigh = True
if self.args.customquality:
if int(self.args.customquality[0]) == 1080:
profiles += self.profiles["MAIN"]["FHD"]
elif (
int(self.args.customquality[0]) < 1080
and int(self.args.customquality[0]) >= 720
):
profiles += self.profiles["MAIN"]["HD"]
elif int(self.args.customquality[0]) < 720:
profiles += self.profiles["MAIN"]["SD"]
else:
profiles += self.profiles["MAIN"]["ALL"]
if self.args.aformat_2ch:
if str(self.args.aformat_2ch[0]) == "aac":
profiles.append("heaac-2-dash")
profiles.append("heaac-2hq-dash")
elif str(self.args.aformat_2ch[0]) == "eac3":
profiles.append("ddplus-2.0-dash")
elif str(self.args.aformat_2ch[0]) == "ogg":
profiles.append("playready-oggvorbis-2-dash")
else:
if self.args.only_2ch_audio:
profiles.append("ddplus-2.0-dash")
else:
if self.args.aformat_51ch:
if str(self.args.aformat_51ch[0]) == "aac":
profiles.append("heaac-5.1-dash")
profiles.append("heaac-5.1hq-dash")
elif str(self.args.aformat_51ch[0]) == "eac3":
profiles.append("ddplus-5.1-dash")
profiles.append("ddplus-5.1hq-dash")
elif str(self.args.aformat_51ch[0]) == "ac3":
profiles.append("dd-5.1-dash")
elif str(self.args.aformat_51ch[0]) == "atmos":
profiles.append("dd-5.1-dash")
profiles.append("ddplus-atmos-dash")
else:
profiles.append("dd-5.1-dash")
profiles.append("ddplus-5.1-dash")
profiles.append("ddplus-5.1hq-dash")
else:
profiles.append("ddplus-2.0-dash")
profiles.append("dd-5.1-dash")
profiles.append("ddplus-5.1-dash")
profiles.append("ddplus-5.1hq-dash")
profiles.append("ddplus-atmos-dash")
return list(set(profiles)), getHigh
def PyMSL(self, profiles):
client = MSLClient(profiles=profiles)
try:
resp = client.load_playlist(int(self.nfid))
return resp
except Exception as e:
self.logger.error("Manifest Error: {}".format(e))
return None
def HighVideoMSL(self):
# for bitrate compare with main ~
self.logger.info("Getting High Profile Manifest...")
profiles = self.profiles["BASICS"]
if self.args.customquality:
if int(self.args.customquality[0]) == 1080:
profiles += self.profiles["HIGH"]["FHD"]
elif (
int(self.args.customquality[0]) < 1080
and int(self.args.customquality[0]) >= 720
):
profiles += self.profiles["HIGH"]["HD"]
elif int(self.args.customquality[0]) < 720:
profiles += self.profiles["HIGH"]["SD"]
else:
profiles += self.profiles["HIGH"]["ALL"]
resp = self.PyMSL(profiles=profiles)
VideoList = list()
manifest = resp["result"]
for video_track in manifest["video_tracks"]:
for downloadable in video_track["streams"]:
size_in_bytes = int(float(downloadable["size"]))
vid_size = (
f"{size_in_bytes/1048576:0.2f} MiB"
if size_in_bytes < 1073741824
else f"{size_in_bytes/1073741824:0.2f} GiB"
)
vid_url = downloadable["urls"][0]["url"]
L3 = 'L3' if 'SEGMENT_MAP_2KEY' in str(downloadable['tags']) else '' #
VideoList.append(
{
"Type": "video",
"Drm": downloadable["isDrm"],
"vmaf": downloadable["vmaf"],
"FrameRate": downloadable["framerate_value"],
"Height": downloadable["res_h"],
"Width": downloadable["res_w"],
"Size": vid_size,
"Url": vid_url,
"Bitrate": str(downloadable["bitrate"]),
"Profile": downloadable["content_profile"],
"L3": L3 #
}
)
VideoList = sorted(VideoList, key=lambda k: int(k["Bitrate"]))
if self.args.customquality:
inp_height = int(self.args.customquality[0])
top_height = sorted(VideoList, key=lambda k: int(k["Height"]))[-1]["Height"]
if top_height >= inp_height:
height = [x for x in VideoList if int(x["Height"]) >= inp_height]
if not height == []:
VideoList = height
return VideoList
def ParseVideo(self, resp, getHigh):
manifest = resp["result"]
VideoList = []
checkerinfo = ""
for video_track in manifest["video_tracks"]:
for downloadable in video_track["streams"]:
size_in_bytes = int(float(downloadable["size"]))
vid_size = (
f"{size_in_bytes/1048576:0.2f} MiB"
if size_in_bytes < 1073741824
else f"{size_in_bytes/1073741824:0.2f} GiB"
)
vid_url = downloadable["urls"][0]["url"]
VideoList.append(
{
"Type": "video",
"Drm": downloadable["isDrm"],
"vmaf": downloadable["vmaf"],
"FrameRate": downloadable["framerate_value"],
"Height": downloadable["res_h"],
"Width": downloadable["res_w"],
"Size": vid_size,
"Url": vid_url,
"Bitrate": str(downloadable["bitrate"]),
"Profile": downloadable["content_profile"],
}
)
VideoList = sorted(VideoList, key=lambda k: int(k["Bitrate"]))
self.logger.debug("VideoList: {}".format(VideoList))
if self.args.customquality:
inp_height = int(self.args.customquality[0])
top_height = sorted(VideoList, key=lambda k: int(k["Height"]))[-1]["Height"]
if top_height >= inp_height:
height = [x for x in VideoList if int(x["Height"]) >= inp_height]
if not height == []:
VideoList = height
if getHigh:
HighVideoList = self.HighVideoMSL()
if not HighVideoList == []:
checkerinfo = "\nNetflix Profile Checker v1.0\nMAIN: {}kbps | {}\nHIGH: {}kbps | {}\n\n{}\n"
checkerinfo = checkerinfo.format(
str(dict(VideoList[-1])["Bitrate"]),
str(dict(VideoList[-1])["Profile"]),
str(dict(HighVideoList[-1])["Bitrate"]),
str(dict(HighVideoList[-1])["Profile"]),
"result: MAIN is Better"
if int(dict(VideoList[-1])["Bitrate"])
>= int(dict(HighVideoList[-1])["Bitrate"])
else "result: HIGH is Better",
)
VideoList += HighVideoList
self.logger.debug("HighVideoList: {}".format(HighVideoList))
VideoList = sorted(VideoList, key=lambda k: int(k["Bitrate"]))
return VideoList, checkerinfo
def ParseAudioSubs(self, resp):
def remove_dups(List, keyword=""):
# function to remove all dups based on list items ~
Added_ = set()
Proper_ = []
for L in List:
if L[keyword] not in Added_:
Proper_.append(L)
Added_.add(L[keyword])
return Proper_
def isOriginal(language_text):
# function to detect the original audio ~
if "Original" in language_text:
return True
brackets = re.search(r"\[(.*)\]", language_text)
if brackets:
return True
return False
def noOriginal(language_text):
# function to remove (original) from audio language to be detected in --alang ~
brackets = re.search(r"\[(.*)\]", language_text)
if brackets:
return language_text.replace(brackets[0], "").strip()
return language_text
# start audio, subs parsing ~
manifest = resp["result"]
AudioList, SubtitleList, ForcedList = list(), list(), list()
# parse audios and return all (AD, non AD) as a list
for audio_track in manifest["audio_tracks"]:
AudioDescription = 'Audio Description' if "audio description" in \
audio_track["languageDescription"].lower() else 'Audio'
Original = isOriginal(audio_track["languageDescription"])
LanguageName, LanguageCode = self.ripprocess.countrycode(
audio_track["language"]
)
LanguageName = noOriginal(audio_track["languageDescription"])
for downloadable in audio_track["streams"]:
aud_url = downloadable["urls"][0]["url"]
size = (
str(format(float(int(downloadable["size"])) / 1058816, ".2f"))
+ " MiB"
)
audioDict = {
"Type": AudioDescription,
"Drm": downloadable["isDrm"],
"Original": Original,
"Language": LanguageName,
"langAbbrev": LanguageCode,
"Size": size,
"Url": aud_url,
"channels": str(downloadable["channels"]),
"Bitrate": str(downloadable["bitrate"]),
"Profile": downloadable["content_profile"],
}
if self.args.custom_audio_bitrate:
# only append the audio langs with the given bitrate
if int(downloadable["bitrate"]) <= \
int(self.args.custom_audio_bitrate[0]):
AudioList.append(audioDict)
else:
AudioList.append(audioDict)
AudioList = sorted(AudioList, key=lambda k: int(k["Bitrate"]), reverse=True)
self.logger.debug("AudioList: {}".format(AudioList))
#################################################################################
AudioList = sorted( # keep only highest bitrate for every language
remove_dups(AudioList, keyword="Language"),
key=lambda k: int(k["Bitrate"]),
reverse=True,
)
OriginalAudioList = ( # for detect automatically forced subs ~
AudioList
if len(AudioList) == 1
else [x for x in AudioList if x["Original"]]
)
#################################################################################
# now parser AudioList based on user input to
# --alang X X --AD X X or original if none
if self.args.AD:
ADlist = list()
UserLanguagesLower = list(map(lambda x: x.lower(), self.args.AD))
for aud in AudioList:
if aud['Type'] == 'Audio':
if self.args.allaudios:
ADlist.append(aud)
else:
if aud["Original"]:
ADlist.append(aud)
if aud['Type'] == 'Audio Description':
if (
aud["Language"].lower() in UserLanguagesLower
or aud["langAbbrev"].lower() in UserLanguagesLower
):
ADlist.append(aud)
AudioList = ADlist
if self.args.audiolang:
NewAudioList = list()
UserLanguagesLower = list(map(lambda x: x.lower(), self.args.audiolang))
for aud in AudioList:
if self.args.AD:
# I already have AD langs parsed
if aud['Type'] == 'Audio Description':
NewAudioList.append(aud)
if aud['Type'] == 'Audio':
if (
aud["Language"].lower() in UserLanguagesLower
or aud["langAbbrev"].lower() in UserLanguagesLower
):
NewAudioList.append(aud)
AudioList = NewAudioList
else:
# so I know have the complete Audiolist
if self.args.allaudios: # remove AD tracks if not --AD X X
AllaudiosList = list()
if self.args.AD:
for aud in AudioList:
AllaudiosList.append(aud)
AudioList = AllaudiosList
else:
for aud in AudioList:
if aud['Type'] == 'Audio':
AllaudiosList.append(aud)
AudioList.clear()
AudioList = AllaudiosList
else:
if self.args.AD:
AudioList = AudioList # I mean the ADlist
else:
# I mean no audio options are given, so we go with the original
AudioList = [x for x in AudioList if x["Original"] or len(AudioList) == 1]
#####################################(Subtitles)#####################################
for text_track in manifest["timedtexttracks"]:
if (
not text_track["languageDescription"] == "Off"
and text_track["language"] is not None
):
Language, langAbbrev = self.ripprocess.countrycode(
text_track["language"]
)
Language = text_track["languageDescription"]
Type = text_track["trackType"]
rawTrackType = (
text_track["rawTrackType"]
.replace("closedcaptions", "CC")
.replace("subtitles", "SUB")
)
isForced = "NO"
if (
"CC" in rawTrackType
and langAbbrev != "ara"
and "dfxp-ls-sdh" in str(text_track["ttDownloadables"])
):
Profile = "dfxp-ls-sdh"
Url = next(
iter(
text_track["ttDownloadables"]["dfxp-ls-sdh"][
"downloadUrls"
].values()
)
)
else:
Profile = "webvtt-lssdh-ios8"
Url = next(
iter(
text_track["ttDownloadables"]["webvtt-lssdh-ios8"][
"downloadUrls"
].values()
)
)
SubtitleList.append(
{
"Type": Type,
"rawTrackType": rawTrackType,
"Language": Language,
"isForced": isForced,
"langAbbrev": langAbbrev,
"Url": Url,
"Profile": Profile,
}
)
self.logger.debug("SubtitleList: {}".format(SubtitleList))
SubtitleList = remove_dups(SubtitleList, keyword="Language")
if self.args.sublang:
NewSubtitleList = list()
UserLanguagesLower = list(map(lambda x: x.lower(), self.args.sublang))
for sub in SubtitleList:
if (
sub["Language"].lower() in UserLanguagesLower
or sub["langAbbrev"].lower() in UserLanguagesLower
):
NewSubtitleList.append(sub)
SubtitleList = remove_dups(NewSubtitleList, keyword="Language")
#####################################(Forced Subtitles)###############################
for text_track in manifest["timedtexttracks"]:
if text_track["isForcedNarrative"] and text_track["language"] is not None:
LanguageName, LanguageCode = self.ripprocess.countrycode(
text_track["language"]
)
# LanguageName = text_track["languageDescription"] # no i will use pycountry instead bcs it's off dude.
ForcedList.append(
{
"Type": text_track["trackType"],
"rawTrackType": text_track["rawTrackType"]
.replace("closedcaptions", "CC ")
.replace("subtitles", "SUB"),
"Language": LanguageName,
"isForced": "YES",
"langAbbrev": LanguageCode,
"Url": next(
iter(
text_track["ttDownloadables"]["webvtt-lssdh-ios8"][
"downloadUrls"
].values()
)
),
"Profile": "webvtt-lssdh-ios8",
}
)
ForcedList = remove_dups(ForcedList, keyword="Language")
if self.args.forcedlang:
NewForcedList = []
UserLanguagesLower = list(map(lambda x: x.lower(), self.args.forcedlang))
for sub in ForcedList:
if (
sub["Language"].lower() in UserLanguagesLower
or sub["langAbbrev"].lower() in UserLanguagesLower
):
NewForcedList.append(sub)
ForcedList = remove_dups(NewForcedList, keyword="Language")
else:
if not self.args.allforcedlang:
if len(OriginalAudioList) != 0:
OriginalLanguage = OriginalAudioList[0]["langAbbrev"]
ForcedList = [
x for x in ForcedList if x["langAbbrev"] == OriginalLanguage
]
return AudioList, SubtitleList, ForcedList
def LoadManifest(self):
profiles, getHigh = self.LoadProfies()
if self.args.hevc:
self.logger.info("Getting HEVC Manifest...")
elif self.args.hdr:
self.logger.info("Getting HDR-10 Manifest...")
elif self.args.video_high:
self.logger.info("Getting High Profile Manifest...")
else:
self.logger.info("Getting Main Profile Manifest...")
resp = self.PyMSL(profiles=profiles)
if not resp:
if self.args.hevc:
profiles, getHigh = self.LoadProfies(addHEVCDO=True)
self.logger.info('\nGetting HEVC DO Manifest...')
resp = self.PyMSL(profiles=profiles)
if not resp:
self.logger.info("Failed getting Manifest")
exit(-1)
VideoList, checkerinfo = self.ParseVideo(resp, getHigh)
AudioList, SubtitleList, ForcedList = self.ParseAudioSubs(resp)
return VideoList, AudioList, SubtitleList, ForcedList, checkerinfo

View File

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

369
helpers/aria2.py 100644
View File

@ -0,0 +1,369 @@
import os
import shutil
import subprocess
import sys
import re
import logging
from configs.config import tool
from helpers.ripprocess import ripprocess
class aria2Error(Exception):
pass
class aria2_moded:
def __init__(self, aria2_download_command):
self.logger = logging.getLogger(__name__)
self.aria2_download_command = aria2_download_command
self.env = self.aria2DisableProxies()
self.ripprocess = ripprocess()
self.tool = tool()
self.LOGA_PATH = self.tool.paths()["LOGA_PATH"]
self.bin = self.tool.bin()
self.aria2c_exe = self.bin["aria2c"]
self.last_message_printed = 0
self.speed_radar = "0kbps"
def aria2DisableProxies(self):
env = os.environ.copy()
if env.get("http_proxy"):
del env["http_proxy"]
if env.get("HTTP_PROXY"):
del env["HTTP_PROXY"]
if env.get("https_proxy"):
del env["https_proxy"]
if env.get("HTTPS_PROXY"):
del env["HTTPS_PROXY"]
return env
def read_stdout(self, line):
speed = re.search(r"DL:(.+?)ETA", line)
eta = re.search(r"ETA:(.+?)]", line)
connection = re.search(r"CN:(.+?)DL", line)
percent = re.search(r"\((.*?)\)", line)
size = re.search(r" (.*?)/(.*?)\(", line)
if speed and eta and connection and percent and size:
percent = percent.group().strip().replace(")", "").replace("(", "")
size = size.group().strip().replace(")", "").replace("(", "")
complete, total = size.split("/")
connection = connection.group(1).strip()
eta = eta.group(1).strip()
speed = speed.group(1).strip()
self.speed_radar = speed
stdout_data = {
"percent": str(percent),
"size": str(total),
"complete": str(complete),
"total": str(total),
"connection": str(connection),
"eta": str(eta),
"speed": str(speed),
}
return stdout_data
return None
def if_errors(self, line):
if "exception" in str(line).lower() or "errorcode" in str(line).lower():
return line
return None
def delete_last_message_printed(self):
print(" " * len(str(self.last_message_printed)), end="\r")
def get_status(self, stdout_data: dict):
return "Aria2c_Status; Size: {Size} | Speed: {Speed} | ETA: {ETA} | Progress: {Complete} -> {Total} ({Percent})".format(
Size=stdout_data.get("size"),
Speed=stdout_data.get("speed"),
ETA=stdout_data.get("eta"),
Complete=stdout_data.get("complete"),
Total=stdout_data.get("total"),
Percent=stdout_data.get("percent"),
)
def is_download_completed(self, line):
if "(ok):download completed." in str(line).lower():
return "Download completed: (OK) ({}\\s)".format(self.speed_radar)
return None
def start_download(self):
proc = subprocess.Popen(
self.aria2_download_command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
bufsize=1,
universal_newlines=True,
env=self.env,
)
check_errors = True
for line in getattr(proc, "stdout"):
if check_errors:
if self.if_errors(line):
raise aria2Error("Aria2c Error {}".format(self.if_errors(line)))
check_errors = False
stdout_data = self.read_stdout(line)
if stdout_data:
status_text = self.get_status(stdout_data)
self.delete_last_message_printed()
print(status_text, end="\r", flush=True)
self.last_message_printed = status_text
else:
download_finished = self.is_download_completed(line)
if download_finished:
self.delete_last_message_printed()
print(download_finished, end="\r", flush=True)
self.last_message_printed = download_finished
self.logger.info("")
return
class aria2:
def __init__(self,):
self.env = self.aria2DisableProxies()
self.ripprocess = ripprocess()
self.tool = tool()
self.bin = self.tool.bin()
self.LOGA_PATH = self.tool.paths()["LOGA_PATH"]
self.config = self.tool.aria2c()
self.aria2c_exe = self.bin["aria2c"]
self.logger = logging.getLogger(__name__)
def convert_args(self, arg):
if arg is True:
return "true"
elif arg is False:
return "false"
elif arg is None:
return "none"
else:
return str(arg)
def append_commands(self, command, option_define, option):
if option == "skip":
return []
return ["{}{}".format(option_define, option)]
def append_two_commands(self, command, cmd1, cmd2):
if cmd2 == "skip":
return []
return [cmd1] + [cmd2]
def aria2Options(
self,
allow_overwrite=True,
file_allocation=None,
auto_file_renaming=False,
async_dns=False,
retry_wait=5,
summary_interval=0,
enable_color=False,
connection=16,
concurrent_downloads=16,
split=16,
header="skip",
user_agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36",
uri_selector="inorder",
console_log_level="skip",
download_result="hide",
quiet="false",
extra_commands=[],
):
options = [] + extra_commands
allow_overwrite = self.convert_args(allow_overwrite)
quiet = self.convert_args(quiet)
file_allocation = self.convert_args(file_allocation)
auto_file_renaming = self.convert_args(auto_file_renaming)
async_dns = self.convert_args(async_dns)
retry_wait = self.convert_args(retry_wait)
enable_color = self.convert_args(enable_color)
connection = self.convert_args(connection)
concurrent_downloads = self.convert_args(concurrent_downloads)
split = self.convert_args(split)
header = self.convert_args(header)
uri_selector = self.convert_args(uri_selector)
console_log_level = self.convert_args(console_log_level)
download_result = self.convert_args(download_result)
##############################################################################
options += self.append_commands(options, "--allow-overwrite=", allow_overwrite)
options += self.append_commands(options, "--quiet=", quiet)
options += self.append_commands(options, "--file-allocation=", file_allocation)
options += self.append_commands(
options, "--auto-file-renaming=", auto_file_renaming
)
options += self.append_commands(options, "--async-dns=", async_dns)
options += self.append_commands(options, "--retry-wait=", retry_wait)
options += self.append_commands(options, "--enable-color=", enable_color)
options += self.append_commands(
options, "--max-connection-per-server=", connection
)
options += self.append_commands(
options, "--max-concurrent-downloads=", concurrent_downloads
)
options += self.append_commands(options, "--split=", split)
options += self.append_commands(options, "--header=", header)
options += self.append_commands(options, "--uri-selector=", uri_selector)
options += self.append_commands(
options, "--console-log-level=", console_log_level
)
options += self.append_commands(options, "--download-result=", download_result)
return options
def aria2DisableProxies(self):
env = os.environ.copy()
if env.get("http_proxy"):
del env["http_proxy"]
if env.get("HTTP_PROXY"):
del env["HTTP_PROXY"]
if env.get("https_proxy"):
del env["https_proxy"]
if env.get("HTTPS_PROXY"):
del env["HTTPS_PROXY"]
return env
def aria2DownloadUrl(self, url, output, options, debug=False, moded=False):
self.debug = debug
aria2_download_command = [self.aria2c_exe] + options
if self.config["enable_logging"]:
LogFile = os.path.join(self.LOGA_PATH, output.replace(".mp4", ".log"))
if os.path.isfile(LogFile):
os.remove(LogFile)
aria2_download_command.append("--log={}".format(LogFile))
if not url.startswith("http"):
raise aria2Error("Url does not start with http/https: {}".format(url))
aria2_download_command.append(url)
aria2_download_command += self.append_two_commands(
aria2_download_command, "-o", output
)
self.aria2Debug("Sending Commands to aria2c...")
self.aria2Debug(aria2_download_command)
self.logger.debug("aria2_download_command: {}".format(aria2_download_command))
if moded:
aria2_moded_download = aria2_moded(aria2_download_command)
aria2_moded_download.start_download()
else:
try:
aria = subprocess.call(aria2_download_command, env=self.env)
except FileNotFoundError:
self.logger.info("UNABLE TO FIND {}".format(self.aria2c_exe))
exit(-1)
if aria != 0:
raise aria2Error("Aria2c exited with code {}".format(aria))
return
def aria2DownloadDash(
self, segments, output, options, debug=False, moded=False, fixbytes=False
):
self.debug = debug
aria2_download_command = [self.aria2c_exe] + options
if self.config["enable_logging"]:
LogFile = os.path.join(self.LOGA_PATH, output.replace(".mp4", ".log"))
if os.path.isfile(LogFile):
os.remove(LogFile)
aria2_download_command.append("--log={}".format(LogFile))
if not isinstance(segments, list) or segments == []:
raise aria2Error("invalid list of urls: {}".format(segments))
if moded:
raise aria2Error("moded version not supported for dash downloads atm...")
txt = output.replace(".mp4", ".txt")
folder = output.replace(".mp4", "")
segments = list(dict.fromkeys(segments))
if os.path.exists(folder):
shutil.rmtree(folder)
if not os.path.exists(folder):
os.makedirs(folder)
segments_location = []
opened_txt = open(txt, "w+")
for num, url in enumerate(segments, start=1):
segment_name = str(num).zfill(5) + ".mp4"
segments_location.append(os.path.join(*[os.getcwd(), folder, segment_name]))
opened_txt.write(url + f"\n out={segment_name}" + f"\n dir={folder}" + "\n")
opened_txt.close()
aria2_download_command += self.append_commands(
aria2_download_command, "--input-file=", txt
)
try:
aria = subprocess.call(aria2_download_command, env=self.env)
except FileNotFoundError:
self.logger.info("UNABLE TO FIND {}".format(self.aria2c_exe))
exit(-1)
if aria != 0:
raise aria2Error("Aria2c exited with code {}".format(aria))
self.logger.info("\nJoining files...")
openfile = open(output, "wb")
total = int(len(segments_location))
for current, fragment in enumerate(segments_location):
if os.path.isfile(fragment):
if fixbytes:
with open(fragment, "rb") as f:
wvdll = f.read()
if (
re.search(
b"tfhd\x00\x02\x00\x1a\x00\x00\x00\x01\x00\x00\x00\x02",
wvdll,
re.MULTILINE | re.DOTALL,
)
is not None
):
fw = open(fragment, "wb")
m = re.search(
b"tfhd\x00\x02\x00\x1a\x00\x00\x00\x01\x00\x00\x00",
wvdll,
re.MULTILINE | re.DOTALL,
)
segment_fixed = (
wvdll[: m.end()] + b"\x01" + wvdll[m.end() + 1 :]
)
fw.write(segment_fixed)
fw.close()
shutil.copyfileobj(open(fragment, "rb"), openfile)
os.remove(fragment)
self.ripprocess.updt(total, current + 1)
openfile.close()
if os.path.isfile(txt):
os.remove(txt)
if os.path.exists(folder):
shutil.rmtree(folder)
def aria2Debug(self, txt):
if self.debug:
self.logger.info(txt)

View File

@ -0,0 +1,116 @@
import codecs
import math
import os
import re
class dfxp_to_srt:
def __init__(self):
self.__replace__ = "empty_line"
def leading_zeros(self, value, digits=2):
value = "000000" + str(value)
return value[-digits:]
def convert_time(self, raw_time):
if int(raw_time) == 0:
return "{}:{}:{},{}".format(0, 0, 0, 0)
ms = "000"
if len(raw_time) > 4:
ms = self.leading_zeros(int(raw_time[:-4]) % 1000, 3)
time_in_seconds = int(raw_time[:-7]) if len(raw_time) > 7 else 0
second = self.leading_zeros(time_in_seconds % 60)
minute = self.leading_zeros(int(math.floor(time_in_seconds / 60)) % 60)
hour = self.leading_zeros(int(math.floor(time_in_seconds / 3600)))
return "{}:{}:{},{}".format(hour, minute, second, ms)
def xml_id_display_align_before(self, text):
align_before_re = re.compile(
u'<region.*tts:displayAlign="before".*xml:id="(.*)"/>'
)
has_align_before = re.search(align_before_re, text)
if has_align_before:
return has_align_before.group(1)
return u""
def xml_to_srt(self, text):
def append_subs(start, end, prev_content, format_time):
subs.append(
{
"start_time": self.convert_time(start) if format_time else start,
"end_time": self.convert_time(end) if format_time else end,
"content": u"\n".join(prev_content),
}
)
display_align_before = self.xml_id_display_align_before(text)
begin_re = re.compile(u"\s*<p begin=")
sub_lines = (l for l in text.split("\n") if re.search(begin_re, l))
subs = []
prev_time = {"start": 0, "end": 0}
prev_content = []
start = end = ""
start_re = re.compile(u'begin\="([0-9:\.]*)')
end_re = re.compile(u'end\="([0-9:\.]*)')
content_re = re.compile(u'">(.*)</p>')
# span tags are only used for italics, so we'll get rid of them
# and replace them by <i> and </i>, which is the standard for .srt files
span_start_re = re.compile(u'(<span style="[a-zA-Z0-9_.]+">)+')
span_end_re = re.compile(u"(</span>)+")
br_re = re.compile(u"(<br\s*\/?>)+")
fmt_t = True
for s in sub_lines:
span_start_tags = re.search(span_start_re, s)
if span_start_tags:
s = u"<i>".join(s.split(span_start_tags.group()))
string_region_re = (
r'<p(.*region="' + display_align_before + r'".*")>(.*)</p>'
)
s = re.sub(string_region_re, r"<p\1>{\\an8}\2</p>", s)
content = re.search(content_re, s).group(1)
br_tags = re.search(br_re, content)
if br_tags:
content = u"\n".join(content.split(br_tags.group()))
span_end_tags = re.search(span_end_re, content)
if span_end_tags:
content = u"</i>".join(content.split(span_end_tags.group()))
prev_start = prev_time["start"]
start = re.search(start_re, s).group(1)
end = re.search(end_re, s).group(1)
if len(start.split(":")) > 1:
fmt_t = False
start = start.replace(".", ",")
end = end.replace(".", ",")
if (prev_start == start and prev_time["end"] == end) or not prev_start:
# Fix for multiple lines starting at the same time
prev_time = {"start": start, "end": end}
prev_content.append(content)
continue
append_subs(prev_time["start"], prev_time["end"], prev_content, fmt_t)
prev_time = {"start": start, "end": end}
prev_content = [content]
append_subs(start, end, prev_content, fmt_t)
lines = (
u"{}\n{} --> {}\n{}\n".format(
s + 1, subs[s]["start_time"], subs[s]["end_time"], subs[s]["content"]
)
for s in range(len(subs))
)
return u"\n".join(lines)
def convert(self, Input, Output):
with codecs.open(Input, "rb", "utf-8") as f:
text = f.read()
with codecs.open(Output, "wb", "utf-8") as f:
f.write(self.xml_to_srt(text))
return

View File

@ -0,0 +1,76 @@
import os, json, sys
from helpers.ripprocess import ripprocess
class keysaver:
def __init__(self, **kwargs):
self.keys_file = kwargs.get("keys_file", None)
self.stored = self.get_stored()
def read_(self):
with open(self.keys_file, "r") as fr:
return json.load(fr)
def write_(self, data):
with open(self.keys_file, "w") as fr:
fr.write(json.dumps(data, indent=4))
def get_stored(self):
stored = []
if os.path.isfile(self.keys_file):
return self.read_()
return stored
def formatting(self, keys_list, pssh, name):
return [
{
"NAME": name,
"PSSH": pssh,
"ID": idx,
"KID": key.split(":")[0],
"KEY": key.split(":")[1],
}
for idx, key in enumerate(keys_list, start=1)
]
def dump_keys(self, keys, pssh=None, name=None):
old_keys = list(self.stored)
new_keys = list(self.formatting(keys, pssh, name))
self.write_(old_keys + new_keys)
self.stored = self.get_stored() # to update stored keys
return new_keys
def get_key_by_pssh(self, pssh):
keys = []
added = set()
for key in self.get_stored(): # read file again...
if key["PSSH"]:
if not key["KEY"] in added and pssh in key["PSSH"]:
keys.append(key)
added.add(key["KEY"])
return keys
def get_key_by_kid(self, kid):
keys = []
added = set()
for key in self.get_stored(): # read file again...
if not key["KEY"] in added and key["KID"] == kid:
keys.append(key)
added.add(key["KEY"])
return keys
def generate_kid(self, encrypted_file):
return ripprocess().getKeyId(encrypted_file)
def set_keys(self, keys, no_kid=False):
command_keys = []
for key in keys:
command_keys.append("--key")
command_keys.append(
"{}:{}".format(key["ID"] if no_kid else key["KID"], key["KEY"])
)
return command_keys

View File

@ -0,0 +1,112 @@
import os
import requests
import sys, json
import random
from configs.config import tool
from helpers.vpn import connect
import logging
class hold_proxy(object):
def __init__(self):
self.proxy = os.environ.get("http_proxy")
self.logger = logging.getLogger(__name__)
def disable(self):
os.environ["http_proxy"] = ""
os.environ["HTTP_PROXY"] = ""
os.environ["https_proxy"] = ""
os.environ["HTTPS_PROXY"] = ""
def enable(self):
if self.proxy:
os.environ["http_proxy"] = self.proxy
os.environ["HTTP_PROXY"] = self.proxy
os.environ["https_proxy"] = self.proxy
os.environ["HTTPS_PROXY"] = self.proxy
class proxy_env(object):
def __init__(self, args):
self.logger = logging.getLogger(__name__)
self.args = args
self.vpn = tool().vpn()
def Load(self):
proxies = None
proxy = {}
aria2c_proxy = []
if self.vpn["proxies"]:
proxies = self.vpn["proxies"]
if not self.vpn["proxies"]:
if self.args.privtvpn:
self.logger.info("Proxy Status: Activated-PrivateVpn")
proxy.update({"port": self.vpn["private"]["port"]})
proxy.update({"user": self.vpn["private"]["email"]})
proxy.update({"pass": self.vpn["private"]["passwd"]})
if "pvdata.host" in self.args.privtvpn:
proxy.update({"host": self.args.privtvpn})
else:
proxy.update(
{"host": connect(code=self.args.privtvpn).privateVPN()}
)
proxies = self.vpn["private"]["http"].format(
email=proxy["user"],
passwd=proxy["pass"],
ip=proxy["host"],
port=proxy["port"],
)
else:
if self.args.nordvpn:
self.logger.info("Proxy Status: Activated-NordVpn")
proxy.update({"port": self.vpn["nordvpn"]["port"]})
proxy.update({"user": self.vpn["nordvpn"]["email"]})
proxy.update({"pass": self.vpn["nordvpn"]["passwd"]})
if "nordvpn.com" in self.args.nordvpn:
proxy.update({"host": self.args.nordvpn})
else:
proxy.update(
{"host": connect(code=self.args.nordvpn).nordVPN()}
)
proxies = self.vpn["nordvpn"]["http"].format(
email=proxy["user"],
passwd=proxy["pass"],
ip=proxy["host"],
port=proxy["port"],
)
else:
self.logger.info("Proxy Status: Off")
if proxy.get("host"):
aria2c_proxy.append(
"--https-proxy={}:{}".format(proxy.get("host"), proxy.get("port"))
)
if proxy.get("user"):
aria2c_proxy.append("--https-proxy-user={}".format(proxy.get("user")))
if proxy.get("pass"):
aria2c_proxy.append("--https-proxy-passwd={}".format(proxy.get("pass")))
if proxies:
os.environ["http_proxy"] = proxies
os.environ["HTTP_PROXY"] = proxies
os.environ["https_proxy"] = proxies
os.environ["HTTPS_PROXY"] = proxies
ip = None
try:
self.logger.info("Getting IP...")
r = requests.get("https://ipinfo.io/json", timeout=5)
data = r.json()
ip = f'{data["ip"]} ({data["country"]})'
except Exception as e:
self.logger.info(f"({e.__class__.__name__}: {e})")
sys.exit(1)
return aria2c_proxy, ip

View File

@ -0,0 +1,132 @@
from utils.modules.pymp4.parser import Box
from io import BytesIO
import base64
import requests
import uuid
import binascii
import subprocess
import logging
import json
class pssh_generator(object):
def __init__(self, init, **kwargs):
self.init = init
self.logger = logging.getLogger(__name__)
self.proxies = kwargs.get("proxies", None)
self.mp4dumpexe = kwargs.get("mp4dumpexe", None)
def from_kid(self):
array_of_bytes = bytearray(b"\x00\x00\x002pssh\x00\x00\x00\x00")
array_of_bytes.extend(bytes.fromhex("edef8ba979d64acea3c827dcd51d21ed"))
array_of_bytes.extend(b"\x00\x00\x00\x12\x12\x10")
array_of_bytes.extend(bytes.fromhex(self.init.replace("-", "")))
pssh = base64.b64encode(bytes.fromhex(array_of_bytes.hex()))
return pssh.decode()
def Get_PSSH(self):
WV_SYSTEM_ID = "[ed ef 8b a9 79 d6 4a ce a3 c8 27 dc d5 1d 21 ed]"
pssh = None
data = subprocess.check_output(
[self.mp4dumpexe, "--format", "json", "--verbosity", "1", self.init]
)
data = json.loads(data)
for atom in data:
if atom["name"] == "moov":
for child in atom["children"]:
if child["name"] == "pssh":
if child["system_id"] == WV_SYSTEM_ID:
pssh = child["data"][1:-1].replace(" ", "")
pssh = binascii.unhexlify(pssh)
if pssh.startswith(b"\x08\x01"):
pssh = pssh[0:]
pssh = base64.b64encode(pssh).decode("utf-8")
return pssh
if not pssh:
self.logger.error("Error while generate pssh from file.")
return pssh
def get_moov_pssh(self, moov):
while True:
x = Box.parse_stream(moov)
if x.type == b"moov":
for y in x.children:
if y.type == b"pssh" and y.system_ID == uuid.UUID(
"edef8ba9-79d6-4ace-a3c8-27dcd51d21ed"
):
data = base64.b64encode(y.init_data)
return data
def build_init_segment_mp4(self, bytes_):
moov = BytesIO(bytes_)
data = self.get_moov_pssh(moov)
pssh = data.decode("utf-8")
return pssh
def getInitWithRange2(self, headers):
initbytes = requests.get(url=self.init, proxies=self.proxies, headers=headers,)
try:
pssh = self.build_init_segment_mp4(initbytes.content)
return pssh
except Exception as e:
self.logger.info("Error: " + str(e))
return None
def getInitWithRange(self, start: int, end: int):
initbytes = requests.get(
url=self.init,
proxies=self.proxies,
headers={"Range": "bytes={}-{}".format(start, end)},
)
try:
pssh = self.build_init_segment_mp4(initbytes.content)
return pssh
except Exception as e:
self.logger.info("Error: " + str(e))
return None
def loads(self):
req = requests.get(url=self.init, proxies=self.proxies)
initbytes = req.content
try:
pssh = self.build_init_segment_mp4(initbytes)
return pssh
except Exception as e:
self.logger.error("Error: " + str(e))
return None
def load(self):
with open(self.init, "rb") as f:
initbytes = f.read()
try:
pssh = self.build_init_segment_mp4(initbytes)
return pssh
except Exception as e:
self.logger.error("Error: " + str(e))
return None
def from_str(self):
initbytes = self.init
try:
pssh = self.build_init_segment_mp4(initbytes)
return pssh
except Exception as e:
self.logger.info("Error: " + str(e))
return None

View File

@ -0,0 +1,819 @@
import ffmpy, json, os, sys, unidecode, requests, subprocess, time, pycountry, html, tqdm, re, glob, base64, binascii
from titlecase import titlecase
from configs.config import tool
from helpers.proxy_environ import hold_proxy
import tldextract
from collections import namedtuple, Sequence
from natsort import natsorted
import logging
import unicodedata, string
class EpisodesNumbersHandler:
def __init__(self):
return
def numberRange(self, start: int, end: int):
if list(range(start, end + 1)) != []:
return list(range(start, end + 1))
if list(range(end, start + 1)) != []:
return list(range(end, start + 1))
return [start]
def ListNumber(self, Number: str):
if Number.isdigit():
return [int(Number)]
if Number.strip() == "~" or Number.strip() == "":
return self.numberRange(1, 999)
if "-" in Number:
start, end = Number.split("-")
if start.strip() == "" or end.strip() == "":
raise ValueError("wrong Number: {}".format(Number))
return self.numberRange(int(start), int(end))
if "~" in Number:
start, _ = Number.split("~")
if start.strip() == "":
raise ValueError("wrong Number: {}".format(Number))
return self.numberRange(int(start), 999)
return
def sortNumbers(self, Numbers):
SortedNumbers = []
for Number in Numbers.split(","):
SortedNumbers += self.ListNumber(Number.strip())
return natsorted(list(set(SortedNumbers)))
class ripprocess(object):
def __init__(self):
self.tool = tool()
self.logger = logging.getLogger(__name__)
self.bin = self.tool.bin()
def sort_list(self, media_list, keyword1=None, keyword2=None):
if keyword1:
if keyword2:
return sorted(
media_list, key=lambda k: (int(k[keyword1]), int(k[keyword2]))
)
else:
sorted(media_list, key=lambda k: int(k[keyword1]))
return media_list
def yt2json(self, url, proxies=None):
jsonfile = "info.info.json"
yt_cmd = [
self.bin["youtube"],
"--skip-download",
"--write-info-json",
"--quiet",
"--no-warnings",
"-o",
"info",
url,
]
if proxies:
yt_cmd += ["--proxy", proxies.get("https")]
subprocess.call(yt_cmd)
while not os.path.isfile(jsonfile):
time.sleep(0.2)
with open(jsonfile) as js:
data = json.load(js)
if os.path.isfile(jsonfile):
os.remove(jsonfile)
return data
def getKeyId(self, mp4_file):
data = subprocess.check_output(
[self.bin["mp4dump"], "--format", "json", "--verbosity", "1", mp4_file]
)
try:
return re.sub(
" ",
"",
re.compile(r"default_KID.*\[(.*)\]").search(data.decode()).group(1),
)
except AttributeError:
return None
def flatten(self, l):
return list(self.flatten_g(l))
def flatten_g(self, l):
basestring = (str, bytes)
for el in l:
if isinstance(el, Sequence) and not isinstance(el, basestring):
for sub in self.flatten_g(el):
yield sub
else:
yield el
def removeExtentsion(self, string: str):
if "." in string:
return ".".join(string.split(".")[:-1])
else:
raise ValueError("string has no extentsion: {}".format(string))
def replaceExtentsion(self, string: str, ext: str):
if "." in string:
return ".".join(string.split(".")[:-1]) + f".{ext}"
else:
raise ValueError("string has no extentsion: {}".format(string))
def domain(self, url):
return "{0.domain}.{0.suffix}".format(tldextract.extract(url))
def remove_dups(self, List, keyword=""):
Added_ = set()
Proper_ = []
for L in List:
if L[keyword] not in Added_:
Proper_.append(L)
Added_.add(L[keyword])
return Proper_
def find_str(self, s, char):
index = 0
if char in s:
c = char[0]
for ch in s:
if ch == c:
if s[index : index + len(char)] == char:
return index
index += 1
return -1
def updt(self, total, progress):
barLength, status = 80, ""
progress = float(progress) / float(total)
if progress >= 1.0:
progress, status = 1, "\r\n"
block = int(round(barLength * progress))
text = "\rProgress: {} | {:.0f}% {}".format(
"" * block + "" * (barLength - block), round(progress * 100, 0), status,
)
sys.stdout.write(text)
sys.stdout.flush()
def Get_PSSH(self, mp4_file):
WV_SYSTEM_ID = "[ed ef 8b a9 79 d6 4a ce a3 c8 27 dc d5 1d 21 ed]"
pssh = None
data = subprocess.check_output(
[self.bin["mp4dump"], "--format", "json", "--verbosity", "1", mp4_file]
)
data = json.loads(data)
for atom in data:
if atom["name"] == "moov":
for child in atom["children"]:
if child["name"] == "pssh":
if child["system_id"] == WV_SYSTEM_ID:
pssh = child["data"][1:-1].replace(" ", "")
pssh = binascii.unhexlify(pssh)
if pssh.startswith(b"\x08\x01"):
pssh = pssh[0:]
pssh = base64.b64encode(pssh).decode("utf-8")
return pssh
return None
def SubtitleEdit(
self, contain=None, file=None, removeSDH=False, silent=True, extra_commands=[]
):
if file:
subtitle_command = [
self.bin["SubtitleEdit"],
"/convert",
file,
"srt",
"/overwrite",
"/multiplereplace:.",
"/MergeShortLines",
"/FixCommonErrors",
]
subtitle_command += extra_commands
if removeSDH:
subtitle_command.append("/RemoveTextForHI")
subprocess.call(
subtitle_command, stdout=open(os.devnull, "wb")
) if silent else subprocess.call(subtitle_command)
if contain:
subtitle_command = [
self.bin["SubtitleEdit"],
"/convert",
"{}*.srt".format(contain),
"srt",
"/overwrite",
"/multiplereplace:.",
"/MergeShortLines",
"/FixCommonErrors",
]
subtitle_command += extra_commands
if removeSDH:
subtitle_command.append("/removetextforhi")
subprocess.call(
subtitle_command, stdout=open(os.devnull, "wb")
) if silent else subprocess.call(subtitle_command)
return
def parseCookieFile(self, cookiesfile):
cookies = {}
with open(cookiesfile, "r") as fp:
for line in fp:
if not re.match(r"^\#", line):
lineFields = line.strip().split("\t")
try:
cookies[lineFields[5]] = lineFields[6]
except Exception:
pass
return cookies
def ReplaceCodeLanguages(self, X):
X = X.lower()
X = (
X.replace("_subtitle_dialog_0", "")
.replace("_narrative_dialog_0", "")
.replace("_caption_dialog_0", "")
.replace("_dialog_0", "")
.replace("_descriptive_0", "_descriptive")
.replace("_descriptive", "_descriptive")
.replace("_sdh", "-sdh")
.replace("es-es", "es")
.replace("en-es", "es")
.replace("kn-in", "kn")
.replace("gu-in", "gu")
.replace("ja-jp", "ja")
.replace("mni-in", "mni")
.replace("si-in", "si")
.replace("as-in", "as")
.replace("ml-in", "ml")
.replace("sv-se", "sv")
.replace("hy-hy", "hy")
.replace("sv-sv", "sv")
.replace("da-da", "da")
.replace("fi-fi", "fi")
.replace("nb-nb", "nb")
.replace("is-is", "is")
.replace("uk-uk", "uk")
.replace("hu-hu", "hu")
.replace("bg-bg", "bg")
.replace("hr-hr", "hr")
.replace("lt-lt", "lt")
.replace("et-et", "et")
.replace("el-el", "el")
.replace("he-he", "he")
.replace("ar-ar", "ar")
.replace("fa-fa", "fa")
.replace("ro-ro", "ro")
.replace("sr-sr", "sr")
.replace("cs-cs", "cs")
.replace("sk-sk", "sk")
.replace("mk-mk", "mk")
.replace("hi-hi", "hi")
.replace("bn-bn", "bn")
.replace("ur-ur", "ur")
.replace("pa-pa", "pa")
.replace("ta-ta", "ta")
.replace("te-te", "te")
.replace("mr-mr", "mr")
.replace("kn-kn", "kn")
.replace("gu-gu", "gu")
.replace("ml-ml", "ml")
.replace("si-si", "si")
.replace("as-as", "as")
.replace("mni-mni", "mni")
.replace("tl-tl", "tl")
.replace("id-id", "id")
.replace("ms-ms", "ms")
.replace("vi-vi", "vi")
.replace("th-th", "th")
.replace("km-km", "km")
.replace("ko-ko", "ko")
.replace("zh-zh", "zh")
.replace("ja-ja", "ja")
.replace("ru-ru", "ru")
.replace("tr-tr", "tr")
.replace("it-it", "it")
.replace("es-mx", "es-la")
.replace("ar-sa", "ar")
.replace("zh-cn", "zh")
.replace("nl-nl", "nl")
.replace("pl-pl", "pl")
.replace("pt-pt", "pt")
.replace("hi-in", "hi")
.replace("mr-in", "mr")
.replace("bn-in", "bn")
.replace("te-in", "te")
.replace("cmn-hans", "zh-hans")
.replace("cmn-hant", "zh-hant")
.replace("ko-kr", "ko")
.replace("en-au", "en")
.replace("es-419", "es-la")
.replace("es-us", "es-la")
.replace("en-us", "en")
.replace("en-gb", "en")
.replace("fr-fr", "fr")
.replace("de-de", "de")
.replace("las-419", "es-la")
.replace("ar-ae", "ar")
.replace("da-dk", "da")
.replace("yue-hant", "yue")
.replace("bn-in", "bn")
.replace("ur-in", "ur")
.replace("ta-in", "ta")
.replace("sl-si", "sl")
.replace("cs-cz", "cs")
.replace("hi-jp", "hi")
.replace("-001", "")
.replace("en-US", "en")
.replace("deu", "de")
.replace("eng", "en")
.replace("ca-es", "cat")
.replace("fil-ph", "fil")
.replace("en-ca", "en")
.replace("eu-es", "eu")
.replace("ar-eg", "ar")
.replace("he-il", "he")
.replace("el-gr", "he")
.replace("nb-no", "nb")
.replace("es-ar", "es-la")
.replace("en-ph", "en")
.replace("sq-al", "sq")
.replace("bs-ba", "bs")
)
return X
def countrycode(self, code, site_domain="None"):
languageCodes = {
"zh-Hans": "zhoS",
"zh-Hant": "zhoT",
"pt-BR": "brPor",
"es-ES": "euSpa",
"en-GB": "enGB",
"en-PH": "enPH",
"nl-BE": "nlBE",
"fil": "enPH",
"yue": "zhoS",
"fr-CA": "caFra",
}
if code == "cmn-Hans":
return "Mandarin Chinese (Simplified)", "zh-Hans"
elif code == "cmn-Hant":
return "Mandarin Chinese (Traditional)", "zh-Hant"
elif code == "es-419":
return "Spanish", "spa"
elif code == "es-ES":
return "European Spanish", "euSpa"
elif code == "pt-BR":
return "Brazilian Portuguese", "brPor"
elif code == "pt-PT":
return "Portuguese", "por"
elif code == "fr-CA":
return "French Canadian", "caFra"
elif code == "fr-FR":
return "French", "fra"
elif code == "iw":
return "Modern Hebrew", "heb"
elif code == "es" and site_domain == "google":
return "European Spanish", "euSpa"
lang_code = code[: code.index("-")] if "-" in code else code
lang = pycountry.languages.get(alpha_2=lang_code)
if lang is None:
lang = pycountry.languages.get(alpha_3=lang_code)
try:
languagecode = languageCodes[code]
except KeyError:
languagecode = lang.alpha_3
return lang.name, languagecode
def tqdm_downloader(self, url, file_name, proxies=None):
# self.logger.info(file_name)
r = requests.get(url, stream=True)
file_size = int(r.headers["Content-Length"])
chunk = 1
chunk_size = 1024
num_bars = int(file_size / chunk_size)
with open(file_name, "wb") as fp:
for chunk in tqdm.tqdm(
r.iter_content(chunk_size=chunk_size),
total=num_bars,
unit="KB",
desc=file_name,
leave=True, # progressbar stays
):
fp.write(chunk)
return
def silent_aria2c_download(self, url, file_name, disable_proxy=True):
holder = hold_proxy()
if disable_proxy:
holder.disable()
commands = [
self.bin["aria2c"],
url,
'--user-agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.132 Safari/537.36"',
"--allow-overwrite=true",
"--auto-file-renaming=false",
"--retry-wait=5",
"-x16",
"-j16",
"-s16",
"-o",
file_name,
]
try:
aria = subprocess.call(commands, stdout=open(os.devnull, "wb"),)
except FileNotFoundError:
self.logger.info("UNABLE TO FIND {}".format("aria2c.exe"))
exit(-1)
if aria != 0:
raise ValueError("Aria2c exited with code {}".format(aria))
if disable_proxy:
holder.enable()
def aria2c_download(self, commands, extra_commands, disable_proxy=False):
LogFile = self.bin["aria2c"].replace("exe", "log")
if os.path.isfile(LogFile):
os.remove(LogFile)
aria2_commands = []
aria2_commands.append(self.bin["aria2c"])
aria2_commands.append("--log={}".format(LogFile))
aria2_commands += commands + extra_commands
holder = hold_proxy()
if disable_proxy:
holder.disable()
try:
aria = subprocess.call(aria2_commands)
except FileNotFoundError:
self.logger.info("UNABLE TO FIND {}".format("aria2c.exe"))
exit(-1)
if aria != 0:
self.logger.info("Aria2c exited with code {}".format(aria))
exit(-1)
if disable_proxy:
holder.enable()
self.logger.info()
def isduplelist(self, a, b):
return set(a) == set(b) and len(a) == len(b)
def readfile(self, file, lines=False):
read = ""
if os.path.isfile(file):
with open(file, "r") as f:
if lines:
read = f.readlines()
return read
read = f.read()
else:
self.logger.info("File: %s, is not found" % file)
return None
return read
def strip(self, inputint, left=True, right=False):
if left:
return str(inputint.lstrip("0"))
if right:
return str(inputint.rstrip("0"))
return
def CleanMyFileNamePlease(self, filename):
# edit here...
filename = filename.replace("666", "666")
################################################################################################################################
# dont edit here...
filename = (
filename.replace(" ", ".")
.replace("'", "")
.replace(",", "")
.replace("-", "")
.replace("-.", ".")
.replace(".-.", ".")
)
filename = re.sub(" +", ".", filename)
for i in range(10):
filename = re.sub(r"(\.\.)", ".", filename)
return filename
def RemoveExtraWords(self, name):
if re.search("[eE]pisode [0-9]+", name):
name = name.replace((re.search("[eE]pisode [0-9]+", name)).group(0), "")
if re.search(r"(\(.+?)\)", name):
name = name.replace(re.search(r"(\(.+?)\)", name).group(), "")
name = re.sub(" +", " ", name)
name = name.strip()
name = (
name.replace(" : ", " - ")
.replace(": ", " - ")
.replace(":", " - ")
.replace("&", "and")
.replace("ó", "o")
.replace("*", "x")
)
return name
def DecodeString(self, text):
for encoding in ("utf-8-sig", "utf-8", "utf-16"):
try:
return text.decode(encoding)
except UnicodeDecodeError:
continue
return text.decode("latin-1")
def EncodeString(self, text):
for encoding in ("utf-8-sig", "utf-8", "utf-16"):
try:
return text.encode(encoding)
except UnicodeDecodeError:
continue
return text.encode("latin-1")
def clean_text(self, text):
whitelist = (
"-_.() %s%s" % (string.ascii_letters, string.digits) + "',&#$%@`~!^&+=[]{}"
)
cleaned_text = (
unicodedata.normalize("NFKD", text).encode("ASCII", "ignore").decode()
)
return "".join(c for c in cleaned_text if c in whitelist)
def RemoveCharcters(self, text):
text = self.EncodeString(text)
text = self.DecodeString(text)
text = self.RemoveExtraWords(text)
text = self.clean_text(text)
text = unidecode.unidecode(titlecase(text))
return text
def do_clean(self, contain, exclude=[], added=[]):
"""contain= string name in the file/files you want to delete.
exclude= the files that has a specified extension you do not want to delete. send by list like ['.sfv', '.whatever']
added= another extensions not in the default extension. send by list like ['.sfv', '.whatever']"""
error = []
extensions = [
".mp4",
".h265",
".h264",
".eac3",
".m4a",
".ac3",
".srt",
".vtt",
".txt",
".aac",
".m3u8",
".mpd",
]
extensions += added
erased_files = []
for ext in extensions:
if ext not in exclude:
erased_files += glob.glob(contain + f"*{ext}")
if not erased_files == []:
for files in erased_files:
try:
os.remove(files)
except Exception:
error.append(files)
if not error == []:
self.logger.info(
f"some files not deleted with extensions: "
+ ", ".join(str(x) for x in error)
+ "."
)
return
def mediainfo_(self, file):
mediainfo_output = subprocess.Popen(
[self.bin["MediaInfo"], "--Output=JSON", "-f", file],
stdout=subprocess.PIPE,
)
mediainfo_json = json.load(mediainfo_output.stdout)
return mediainfo_json
def DemuxAudio(self, inputName, replace_str):
if os.path.isfile(inputName):
self.logger.info("\nDemuxing audio...")
mediainfo = self.mediainfo_(inputName)
for m in mediainfo["media"]["track"]:
if m["@type"] == "Audio":
codec_name = m["Format"]
ext = ".ac3"
if codec_name == "AAC":
ext = ".m4a"
else:
if codec_name == "E-AC-3":
ext = ".eac3"
else:
if codec_name == "AC-3":
ext = ".ac3"
if codec_name == "DTS":
ext = ".dts"
outputName = inputName.replace(replace_str, ext)
self.logger.info(("{} -> {}").format(inputName, outputName))
ff = ffmpy.FFmpeg(
executable=self.bin["ffmpeg"],
inputs={inputName: None},
outputs={outputName: "-c:a copy"},
global_options="-vn -sn -y -hide_banner -loglevel panic",
)
ff.run()
time.sleep(0.05)
if os.path.isfile(outputName) and os.path.getsize(outputName) > 1024 * 1024:
os.remove(inputName)
self.logger.info("Done!")
return
def shaka_decrypt(self, encrypted, decrypted, keys, stream):
self.logger.info("\nDecrypting: {}".format(encrypted))
decrypt_command = [
self.bin["shaka-packager"],
"--enable_raw_key_decryption",
"-quiet",
"input={},stream={},output={}".format(encrypted, stream, decrypted),
]
for key in keys:
decrypt_command.append("--keys")
decrypt_command.append(
"key={}:key_id={}".format(
key["KEY"], "00000000000000000000000000000000"
)
)
self.logger.info("\nDecrypting KEYS: ")
for key in keys:
self.logger.info(("{}:{}".format(key["KID"], key["KEY"])))
wvdecrypt_process = subprocess.Popen(decrypt_command)
stdoutdata, stderrdata = wvdecrypt_process.communicate()
wvdecrypt_process.wait()
self.logger.info("Done!")
return True
def mp4_decrypt(
self,
encrypted,
decrypted,
keys,
moded_decrypter=True,
no_kid=True,
silent=False,
):
self.logger.info("\nDecrypting: {}".format(encrypted))
decrypt_command = [
self.bin["mp4decrypt"]
if not moded_decrypter
else self.bin["mp4decrypt_moded"]
]
decrypt_command += ["--show-progress", encrypted, decrypted]
for key in keys:
decrypt_command.append("--key")
decrypt_command.append(
"{}:{}".format(key["ID"] if no_kid else key["KID"], key["KEY"])
)
self.logger.info("\nDecrypting KEYS: ")
for key in keys:
self.logger.info(
("{}:{}".format(key["ID"] if no_kid else key["KID"], key["KEY"]))
)
if silent:
wvdecrypt_process = subprocess.Popen(
decrypt_command, stdout=open(os.devnull, "wb")
)
else:
wvdecrypt_process = subprocess.Popen(decrypt_command)
stdoutdata, stderrdata = wvdecrypt_process.communicate()
wvdecrypt_process.wait()
if wvdecrypt_process.returncode == 0:
self.logger.info("Done!")
return True
return False
def DemuxVideo(
self,
outputVideoTemp,
outputVideo,
ffmpeg=False,
mp4box=False,
ffmpeg_version="ffmpeg",
):
if ffmpeg:
self.logger.info("\nRemuxing video...")
# if not outputVideo.endswith(".h264"):
# os.rename(outputVideoTemp, outputVideo)
# self.logger.info("Done!")
# return True
ff = ffmpy.FFmpeg(
executable=self.bin[ffmpeg_version],
inputs={outputVideoTemp: None},
outputs={outputVideo: "-c copy"},
global_options="-y -hide_banner -loglevel panic",
).run()
time.sleep(0.05)
if (
os.path.isfile(outputVideo)
and os.path.getsize(outputVideo) > 1024 * 1024
):
os.remove(outputVideoTemp)
self.logger.info("Done!")
return True
if mp4box:
self.logger.info("\nRemuxing video...")
if not outputVideo.endswith(".h264"):
os.rename(outputVideoTemp, outputVideo)
self.logger.info("Done!")
return True
subprocess.call(
[
self.bin["mp4box"],
"-quiet",
"-raw",
"1",
"-out",
outputVideo,
outputVideoTemp,
]
)
if (
os.path.isfile(outputVideo)
and os.path.getsize(outputVideo) > 1024 * 1024
):
os.remove(outputVideoTemp)
self.logger.info("Done!")
return True
return False

90
helpers/sdh.py 100644
View File

@ -0,0 +1,90 @@
import codecs
import os
import re
import sys
import pysrt
class sdh_remover:
def __init__(self,):
self.__replace__ = "empty_line"
self.content = []
def cleanLine(self, line, regex):
line = re.sub("</i>", "", line)
line = re.sub("<i>", "", line)
if re.search(r"\[(.*)?\n(.*)?\]", line):
line = re.sub(
re.search(r"\[(.*)?\n(.*)?\]", line).group(), self.__replace__, line
)
if re.search(r"\((.*)?\n(.*)?\)", line):
line = re.sub(
re.search(r"\((.*)?\n(.*)?\)", line).group(), self.__replace__, line
)
try:
# is it inside a markup tag?
match = regex.match(line).group(1)
tag = re.compile("(<[A-z]+[^>]*>)").match(match).group(1)
line = re.sub(match, tag + self.__replace__, line)
except:
try:
line = re.sub(regex, self.__replace__, line)
except:
pass
return line
def _save(self, Output):
file = codecs.open(Output, "w", encoding="utf-8")
for idx, text in enumerate(self.content, start=1):
file.write(
"{}\n{} --> {}\n{}\n\n".format(
str(idx), text["start"], text["end"], text["text"].strip(),
)
)
file.close()
def clean(self):
if not self.content == []:
temp = self.content
self.content = []
for text in temp:
if text["text"].strip() == self.__replace__:
continue
text.update({"text": re.sub(self.__replace__, "", text["text"])})
if not text["text"].strip() == "":
self.content.append(text)
return
def noHI(self, Input=None, Output=None, content=None):
srt = pysrt.open(Input, encoding="utf-8")
for idx, line in enumerate(srt, start=1):
number = str(idx)
start = line.start
end = line.end
text = line.text
text = self.cleanLine(text, re.compile(r"(\[(.+)?\]|\[(.+)?|^(.+)?\])"))
text = self.cleanLine(text, re.compile(r"(\((.+)?\)|\((.+)?|^(.+)?\))"))
text = self.cleanLine(text, re.compile(r"(\[(.+)?\]|\[(.+)?|^(.+)?\])"))
text = self.cleanLine(
text,
re.compile(r"([♩♪♫♭♮♯]+(.+)?[♩♪♫♭♮♯]+|[♩♪♫♭♮♯]+(.+)?|^(.+)?[♩♪♫♭♮♯]+)"),
)
text = self.cleanLine(text, re.compile(r"(<font[^>]*>)|(<\/font>)"))
self.content.append(
{"number": number, "start": start, "end": end, "text": text,}
)
self.clean()
self._save(Output)

135
helpers/vpn.py 100644
View File

@ -0,0 +1,135 @@
import os
import requests
import sys
import random
import logging
class connect(object):
def __init__(self, code):
self.code = code.lower()
self.logger = logging.getLogger(__name__)
self.headers = {
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36"
}
def nordVPN(self):
nordvpn_codes = {
"al": "2",
"ar": "10",
"au": "13",
"at": "14",
"be": "21",
"ba": "27",
"br": "30",
"bg": "33",
"ca": "38",
"cl": "43",
"cr": "52",
"hr": "54",
"cy": "56",
"cz": "57",
"dk": "58",
"eg": "64",
"ee": "68",
"fi": "73",
"fr": "74",
"ge": "80",
"de": "81",
"gr": "84",
"hk": "97",
"hu": "98",
"is": "99",
"in": "100",
"id": "101",
"ie": "104",
"il": "105",
"it": "106",
"jp": "108",
"lv": "119",
"lu": "126",
"my": "131",
"mx": "140",
"md": "142",
"nl": "153",
"nz": "156",
"mk": "128",
"no": "163",
"ro": "179",
"pl": "174",
"si": "197",
"za": "200",
"kr": "114",
"rs": "192",
"sg": "195",
"sk": "196",
"es": "202",
"se": "208",
"ch": "209",
"tw": "211",
"th": "214",
"tr": "220",
"ua": "225",
"ae": "226",
"gb": "227",
"us": "228",
"vn": "234",
"uk": "227",
}
nord_proxy = {}
if nordvpn_codes.get(self.code):
resp = requests.get(
url="https://nordvpn.com/wp-admin/admin-ajax.php?action=servers_recommendations&filters={%22country_id%22:"
+ nordvpn_codes.get(self.code)
+ "}",
headers=self.headers,
)
nord_proxy = resp.json()[0]["hostname"]
else:
self.logger.info(
self.code
+ " : not listed in country codes, read country.doc for more info"
)
return nord_proxy
def load_privatevpn(self):
html_file = "html.html"
hosts = []
resp = requests.get(
"https://privatevpn.com/serverlist/", stream=True, headers=self.headers
)
resp = str(resp.text)
resp = resp.replace("<br>", "")
with open(html_file, "w", encoding="utf8") as file:
file.write(resp)
with open(html_file, "r") as file:
text = file.readlines()
if os.path.exists(html_file):
os.remove(html_file)
for p in text:
if ".pvdata.host" in p:
hosts.append(p.strip())
return hosts
def privateVPN(self):
private_proxy = {}
private_hosts = self.load_privatevpn()
self.logger.debug("private_hosts: {}".format(private_hosts))
search_host = [host for host in private_hosts if host[:2] == self.code]
if not search_host == []:
self.logger.info(f"Founded {str(len(search_host))} Proxies")
for n, p in enumerate(search_host):
self.logger.info(f"[{str(n+1)}] {p}")
inp = input("\nEnter Proxy Number, or Hit Enter for random one: ").strip()
if inp == "":
return random.choice(search_host)
private_proxy = search_host[int(inp) - 1]
else:
self.logger.info(f"no Proxies Found, you may entered wrong code, or search failed!...")
return private_proxy

View File

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,407 @@
import base64
import os
import time
import binascii
from google.protobuf.message import DecodeError
from google.protobuf import text_format
from pywidevine.cdm.formats import wv_proto2_pb2 as wv_proto2
from pywidevine.cdm.session import Session
from pywidevine.cdm.key import Key
from Cryptodome.Random import get_random_bytes
from Cryptodome.Random import random
from Cryptodome.Cipher import PKCS1_OAEP, AES
from Cryptodome.Hash import CMAC, SHA256, HMAC, SHA1
from Cryptodome.PublicKey import RSA
from Cryptodome.Signature import pss
from Cryptodome.Util import Padding
import logging
class Cdm:
def __init__(self):
self.logger = logging.getLogger(__name__)
self.sessions = {}
def open_session(self, init_data_b64, device, raw_init_data=None, offline=False):
self.logger.debug(
"open_session(init_data_b64={}, device={}".format(init_data_b64, device)
)
#self.logger.info("opening new cdm session")
if device.session_id_type == "android":
# format: 16 random hexdigits, 2 digit counter, 14 0s
rand_ascii = "".join(random.choice("ABCDEF0123456789") for _ in range(16))
counter = "01" # this resets regularly so its fine to use 01
rest = "00000000000000"
session_id = rand_ascii + counter + rest
session_id = session_id.encode("ascii")
elif device.session_id_type == "chrome":
rand_bytes = get_random_bytes(16)
session_id = rand_bytes
else:
# other formats NYI
self.logger.error("device type is unusable")
return 1
if raw_init_data and isinstance(raw_init_data, (bytes, bytearray)):
# used for NF key exchange, where they don't provide a valid PSSH
init_data = raw_init_data
self.raw_pssh = True
else:
init_data = self._parse_init_data(init_data_b64)
self.raw_pssh = False
if init_data:
new_session = Session(session_id, init_data, device, offline)
else:
self.logger.error("unable to parse init data")
return 1
self.sessions[session_id] = new_session
#self.logger.info("session opened and init data parsed successfully")
return session_id
def _parse_init_data(self, init_data_b64):
parsed_init_data = wv_proto2.WidevineCencHeader()
try:
self.logger.debug("trying to parse init_data directly")
parsed_init_data.ParseFromString(base64.b64decode(init_data_b64)[32:])
except DecodeError:
self.logger.debug(
"unable to parse as-is, trying with removed pssh box header"
)
try:
id_bytes = parsed_init_data.ParseFromString(
base64.b64decode(init_data_b64)[32:]
)
except DecodeError:
self.logger.error("unable to parse, unsupported init data format")
return None
self.logger.debug("init_data:")
for line in text_format.MessageToString(parsed_init_data).splitlines():
self.logger.debug(line)
return parsed_init_data
def close_session(self, session_id):
self.logger.debug("close_session(session_id={})".format(session_id))
#self.logger.info("closing cdm session")
if session_id in self.sessions:
self.sessions.pop(session_id)
self.logger.info("cdm session closed")
return 0
else:
self.logger.info("session {} not found".format(session_id))
return 1
def set_service_certificate(self, session_id, cert_b64):
self.logger.debug(
"set_service_certificate(session_id={}, cert={})".format(
session_id, cert_b64
)
)
#self.logger.info("setting service certificate")
if session_id not in self.sessions:
self.logger.error("session id doesn't exist")
return 1
session = self.sessions[session_id]
message = wv_proto2.SignedMessage()
try:
message.ParseFromString(base64.b64decode(cert_b64))
except DecodeError:
self.logger.error("failed to parse cert as SignedMessage")
service_certificate = wv_proto2.SignedDeviceCertificate()
if message.Type:
self.logger.debug("service cert provided as signedmessage")
try:
service_certificate.ParseFromString(message.Msg)
except DecodeError:
# self.logger.error("failed to parse service certificate")
return 1
else:
self.logger.debug("service cert provided as signeddevicecertificate")
try:
service_certificate.ParseFromString(base64.b64decode(cert_b64))
except DecodeError:
# self.logger.error("failed to parse service certificate")
return 1
self.logger.debug("service certificate:")
for line in text_format.MessageToString(service_certificate).splitlines():
self.logger.debug(line)
session.service_certificate = service_certificate
session.privacy_mode = True
return 0
def get_license_request(self, session_id):
self.logger.debug("get_license_request(session_id={})".format(session_id))
#self.logger.info("getting license request")
if session_id not in self.sessions:
self.logger.error("session ID does not exist")
return 1
session = self.sessions[session_id]
# raw pssh will be treated as bytes and not parsed
if self.raw_pssh:
license_request = wv_proto2.SignedLicenseRequestRaw()
else:
license_request = wv_proto2.SignedLicenseRequest()
client_id = wv_proto2.ClientIdentification()
if not os.path.exists(session.device_config.device_client_id_blob_filename):
self.logger.error("no client ID blob available for this device")
return 1
with open(session.device_config.device_client_id_blob_filename, "rb") as f:
try:
cid_bytes = client_id.ParseFromString(f.read())
except DecodeError:
self.logger.error("client id failed to parse as protobuf")
return 1
self.logger.debug("building license request")
if not self.raw_pssh:
license_request.Type = wv_proto2.SignedLicenseRequest.MessageType.Value(
"LICENSE_REQUEST"
)
license_request.Msg.ContentId.CencId.Pssh.CopyFrom(session.init_data)
else:
license_request.Type = wv_proto2.SignedLicenseRequestRaw.MessageType.Value(
"LICENSE_REQUEST"
)
license_request.Msg.ContentId.CencId.Pssh = session.init_data # bytes
if session.offline:
license_type = wv_proto2.LicenseType.Value("OFFLINE")
else:
license_type = wv_proto2.LicenseType.Value("DEFAULT")
license_request.Msg.ContentId.CencId.LicenseType = license_type
license_request.Msg.ContentId.CencId.RequestId = session_id
license_request.Msg.Type = wv_proto2.LicenseRequest.RequestType.Value("NEW")
license_request.Msg.RequestTime = int(time.time())
license_request.Msg.ProtocolVersion = wv_proto2.ProtocolVersion.Value("CURRENT")
if session.device_config.send_key_control_nonce:
license_request.Msg.KeyControlNonce = random.randrange(1, 2 ** 31)
if session.privacy_mode:
if session.device_config.vmp:
self.logger.debug("vmp required, adding to client_id")
self.logger.debug("reading vmp hashes")
vmp_hashes = wv_proto2.FileHashes()
with open(session.device_config.device_vmp_blob_filename, "rb") as f:
try:
vmp_bytes = vmp_hashes.ParseFromString(f.read())
except DecodeError:
self.logger.error("vmp hashes failed to parse as protobuf")
return 1
client_id._FileHashes.CopyFrom(vmp_hashes)
self.logger.debug(
"privacy mode & service certificate loaded, encrypting client id"
)
self.logger.debug("unencrypted client id:")
for line in text_format.MessageToString(client_id).splitlines():
self.logger.debug(line)
cid_aes_key = get_random_bytes(16)
cid_iv = get_random_bytes(16)
cid_cipher = AES.new(cid_aes_key, AES.MODE_CBC, cid_iv)
encrypted_client_id = cid_cipher.encrypt(
Padding.pad(client_id.SerializeToString(), 16)
)
service_public_key = RSA.importKey(
session.service_certificate._DeviceCertificate.PublicKey
)
service_cipher = PKCS1_OAEP.new(service_public_key)
encrypted_cid_key = service_cipher.encrypt(cid_aes_key)
encrypted_client_id_proto = wv_proto2.EncryptedClientIdentification()
encrypted_client_id_proto.ServiceId = (
session.service_certificate._DeviceCertificate.ServiceId
)
encrypted_client_id_proto.ServiceCertificateSerialNumber = (
session.service_certificate._DeviceCertificate.SerialNumber
)
encrypted_client_id_proto.EncryptedClientId = encrypted_client_id
encrypted_client_id_proto.EncryptedClientIdIv = cid_iv
encrypted_client_id_proto.EncryptedPrivacyKey = encrypted_cid_key
license_request.Msg.EncryptedClientId.CopyFrom(encrypted_client_id_proto)
else:
license_request.Msg.ClientId.CopyFrom(client_id)
if session.device_config.private_key_available:
key = RSA.importKey(
open(session.device_config.device_private_key_filename).read()
)
session.device_key = key
else:
self.logger.error("need device private key, other methods unimplemented")
return 1
self.logger.debug("signing license request")
hash = SHA1.new(license_request.Msg.SerializeToString())
signature = pss.new(key).sign(hash)
license_request.Signature = signature
session.license_request = license_request
self.logger.debug("license request:")
for line in text_format.MessageToString(session.license_request).splitlines():
self.logger.debug(line)
#self.logger.info("license request created")
self.logger.debug(
"license request b64: {}".format(
base64.b64encode(license_request.SerializeToString())
)
)
return license_request.SerializeToString()
def provide_license(self, session_id, license_b64):
self.logger.debug(
"provide_license(session_id={}, license_b64={})".format(
session_id, license_b64
)
)
#self.logger.info("decrypting provided license")
if session_id not in self.sessions:
self.logger.error("session does not exist")
return 1
session = self.sessions[session_id]
if not session.license_request:
self.logger.error("generate a license request first!")
return 1
license = wv_proto2.SignedLicense()
try:
license.ParseFromString(base64.b64decode(license_b64))
except DecodeError:
self.logger.error("unable to parse license - check protobufs")
return 1
session.license = license
self.logger.debug("license:")
for line in text_format.MessageToString(license).splitlines():
self.logger.debug(line)
self.logger.debug("deriving keys from session key")
oaep_cipher = PKCS1_OAEP.new(session.device_key)
session.session_key = oaep_cipher.decrypt(license.SessionKey)
lic_req_msg = session.license_request.Msg.SerializeToString()
enc_key_base = b"ENCRYPTION\000" + lic_req_msg + b"\0\0\0\x80"
auth_key_base = b"AUTHENTICATION\0" + lic_req_msg + b"\0\0\2\0"
enc_key = b"\x01" + enc_key_base
auth_key_1 = b"\x01" + auth_key_base
auth_key_2 = b"\x02" + auth_key_base
auth_key_3 = b"\x03" + auth_key_base
auth_key_4 = b"\x04" + auth_key_base
cmac_obj = CMAC.new(session.session_key, ciphermod=AES)
cmac_obj.update(enc_key)
enc_cmac_key = cmac_obj.digest()
cmac_obj = CMAC.new(session.session_key, ciphermod=AES)
cmac_obj.update(auth_key_1)
auth_cmac_key_1 = cmac_obj.digest()
cmac_obj = CMAC.new(session.session_key, ciphermod=AES)
cmac_obj.update(auth_key_2)
auth_cmac_key_2 = cmac_obj.digest()
cmac_obj = CMAC.new(session.session_key, ciphermod=AES)
cmac_obj.update(auth_key_3)
auth_cmac_key_3 = cmac_obj.digest()
cmac_obj = CMAC.new(session.session_key, ciphermod=AES)
cmac_obj.update(auth_key_4)
auth_cmac_key_4 = cmac_obj.digest()
auth_cmac_combined_1 = auth_cmac_key_1 + auth_cmac_key_2
auth_cmac_combined_2 = auth_cmac_key_3 + auth_cmac_key_4
session.derived_keys["enc"] = enc_cmac_key
session.derived_keys["auth_1"] = auth_cmac_combined_1
session.derived_keys["auth_2"] = auth_cmac_combined_2
self.logger.debug("verifying license signature")
lic_hmac = HMAC.new(session.derived_keys["auth_1"], digestmod=SHA256)
lic_hmac.update(license.Msg.SerializeToString())
self.logger.debug(
"calculated sig: {} actual sig: {}".format(
lic_hmac.hexdigest(), binascii.hexlify(license.Signature)
)
)
if lic_hmac.digest() != license.Signature:
self.logger.info(
"license signature doesn't match - writing bin so they can be debugged"
)
with open("original_lic.bin", "wb") as f:
f.write(base64.b64decode(license_b64))
with open("parsed_lic.bin", "wb") as f:
f.write(license.SerializeToString())
self.logger.info("continuing anyway")
self.logger.debug("key count: {}".format(len(license.Msg.Key)))
for key in license.Msg.Key:
if key.Id:
key_id = key.Id
else:
key_id = wv_proto2.License.KeyContainer.KeyType.Name(key.Type).encode(
"utf-8"
)
encrypted_key = key.Key
iv = key.Iv
type = wv_proto2.License.KeyContainer.KeyType.Name(key.Type)
cipher = AES.new(session.derived_keys["enc"], AES.MODE_CBC, iv=iv)
decrypted_key = cipher.decrypt(encrypted_key)
if type == "OPERATOR_SESSION":
permissions = []
perms = key._OperatorSessionKeyPermissions
for (descriptor, value) in perms.ListFields():
if value == 1:
permissions.append(descriptor.name)
# print(permissions)
else:
permissions = []
session.keys.append(
Key(key_id, type, Padding.unpad(decrypted_key, 16), permissions)
)
#self.logger.info("decrypted all keys")
return 0
def get_keys(self, session_id):
if session_id in self.sessions:
return self.sessions[session_id].keys
else:
self.logger.error("session not found")
return 1

View File

@ -0,0 +1,115 @@
import os
device_chromecdm_903 = {
"name": "chromecdm_903",
"description": "chrome cdm windows 903",
"security_level": 3,
"session_id_type": "chrome",
"private_key_available": True,
"vmp": False,
"send_key_control_nonce": False,
}
device_android_general = {
"name": "android_general",
"description": "android_general lvl3 security level",
"security_level": 3,
"session_id_type": "android",
"private_key_available": True,
"vmp": False,
"send_key_control_nonce": True,
}
devices_available = [
device_android_general,
device_chromecdm_903,
]
FILES_FOLDER = "devices"
class DeviceConfig:
def __init__(self, device):
self.device_name = device["name"]
self.description = device["description"]
self.security_level = device["security_level"]
self.session_id_type = device["session_id_type"]
self.private_key_available = device["private_key_available"]
self.vmp = device["vmp"]
self.send_key_control_nonce = device["send_key_control_nonce"]
if "keybox_filename" in device:
self.keybox_filename = os.path.join(
os.path.dirname(__file__),
FILES_FOLDER,
device["name"],
device["keybox_filename"],
)
else:
self.keybox_filename = os.path.join(
os.path.dirname(__file__), FILES_FOLDER, device["name"], "keybox"
)
if "device_cert_filename" in device:
self.device_cert_filename = os.path.join(
os.path.dirname(__file__),
FILES_FOLDER,
device["name"],
device["device_cert_filename"],
)
else:
self.device_cert_filename = os.path.join(
os.path.dirname(__file__), FILES_FOLDER, device["name"], "device_cert"
)
if "device_private_key_filename" in device:
self.device_private_key_filename = os.path.join(
os.path.dirname(__file__),
FILES_FOLDER,
device["name"],
device["device_private_key_filename"],
)
else:
self.device_private_key_filename = os.path.join(
os.path.dirname(__file__),
FILES_FOLDER,
device["name"],
"device_private_key",
)
if "device_client_id_blob_filename" in device:
self.device_client_id_blob_filename = os.path.join(
os.path.dirname(__file__),
FILES_FOLDER,
device["name"],
device["device_client_id_blob_filename"],
)
else:
self.device_client_id_blob_filename = os.path.join(
os.path.dirname(__file__),
FILES_FOLDER,
device["name"],
"device_client_id_blob",
)
if "device_vmp_blob_filename" in device:
self.device_vmp_blob_filename = os.path.join(
os.path.dirname(__file__),
FILES_FOLDER,
device["name"],
device["device_vmp_blob_filename"],
)
else:
self.device_vmp_blob_filename = os.path.join(
os.path.dirname(__file__),
FILES_FOLDER,
device["name"],
"device_vmp_blob",
)
def __repr__(self):
return (
"DeviceConfig(name={}, description={}, security_level={}, session_id_type={}, private_key_available={}, vmp={})"
).format(
self.device_name,
self.description,
self.security_level,
self.session_id_type,
self.private_key_available,
self.vmp,
)

View File

@ -0,0 +1,27 @@
-----BEGIN RSA PRIVATE KEY-----
MIIEpQIBAAKCAQEA4sUKDpvMG/idF8oCH5AVSwFd5Mk+rEwOBsLZMYdliXWe1hn9
mdE6u9pjsr+bLrZjlKxMFqPPxbIUcC1Ii7BFSje2Fd8kxnaIprQWxDPgK+NSSx7v
Un452TyB1L9lx39ZBt0PlRfwjkCodX+I9y+oBga73NRh7hPbtLzXe/r/ubFBaEu+
aRkDZBwYPqHgH1RoFLuyFNMjfqGcPosGxceDtvPysmBxB93Hk2evml5fjdYGg6tx
z510g+XFPDFv7GSy1KuWqit83MqzPls9qAQMkwUc05ggjDhGCKW4/p97fn23WDFE
3TzSSsQvyJLKA3s9oJbtJCD/gOHYqDvnWn8zPwIDAQABAoIBAQDCWe1Mp+o+7sx0
XwWC15HoPruiIXg9YtGCqexLrqcvMEd5Z70Z32BfL8TSpbTyTA78lM6BeNPRs9Yg
bi8GyYQZH7ZG+IAkN+LWPPJmJa+y7ZjSGSkzoksiC+GZ3I/2cwZyA3Qfa+0XfgLi
8PMKJyXyREMt+DgWO57JQC/OakhRdCR19mM6NKd+ynd/IEz/NIbjMLDVKwW8HEPx
N3r5CU9O96nr62DI68KVj3jwUR3cDi/5xfhosYhCQjHJuobNbeFR18dY2nQNLWYd
S0wtskla1fl9eYHwYAzwru4wHT4WJC7+V4pscfCI0YZB6PslxDKrv73l5H1tz4cf
Vy58NRSBAoGBAPSmjoVtQzTvQ6PZIs81SF1ulJI9kUpyFaBoSSgt+2ZkeNtF6Hih
Zm7OVJ9wg9sfjpB3SFBUjuhXz/ts/t6dkA2PgCbrvhBMRKSGbfyhhtM2gRf002I4
bJ7Y0C/ont4WzC/XbXEkAmh+fG2/JRvbdVQaIdyS6MmVHtCtRsHEQZS5AoGBAO1K
IXOKAFA+320+Hkbqskfevmxrv+JHIdetliaREZwQH+VYUUM8u5/Kt3oyMat+mH90
rZOKQK2zM8cz4tKclTUT54nrtICxeo6UHVc56FqXZ6sVvVgm8Cnvt1md4XwG4FwQ
r/OlaM6Hr5HRf8dkzuzqm4ZQYRHGzZ6AMphj8Xu3AoGAdmo7p5dIJVH98kuCDrsi
iJ6iaNpF/buUfiyb5EfFXD0bRj7jE6hDdTSHPxjtqVzv2zrxFHipJwqBz5dlEYlA
FWA0ziHiv+66dsveZp4kLQ0/lMHaorre0E/vDJFSe/qa4DksbsvYIo2+WjxfkMk7
U/bGFwZAiHmWDbkg+16rw3kCgYEAyyodWf9eJVavlakJ404vNrnP8KSQtfyRTUii
toKewTBNHuBvM1JckoPOdCFlxZ+ukfIka56DojU8r+IM4qaOWdOg+sWE1mses9S9
CmHaPzZC3IjQhRlRp5ZHNcOnu7lnf2wKOmH1Sl+CQydMcDwvr0lvv6AyfDXq9zps
F2365CECgYEAmYgs/qwnh9m0aGDw/ZGrASoE0TxlpizPvsVDGx9t9UGC2Z+5QvAE
ZcQeKoLCbktr0BnRLI+W1g+KpXQGcnSF9VX/qwUlf72XA6C6kobQvW+Yd/H/IN5d
jPqoL/m41rRzm+J+9/Tfc8Aiy1kkllUYnVJdC5QLAIswuhI8lkaFTN4=
-----END RSA PRIVATE KEY-----

View File

@ -0,0 +1,466 @@
syntax = "proto2";
// from x86 (partial), most of it from the ARM version:
message ClientIdentification {
enum TokenType {
KEYBOX = 0;
DEVICE_CERTIFICATE = 1;
REMOTE_ATTESTATION_CERTIFICATE = 2;
}
message NameValue {
required string Name = 1;
required string Value = 2;
}
message ClientCapabilities {
enum HdcpVersion {
HDCP_NONE = 0;
HDCP_V1 = 1;
HDCP_V2 = 2;
HDCP_V2_1 = 3;
HDCP_V2_2 = 4;
}
optional uint32 ClientToken = 1;
optional uint32 SessionToken = 2;
optional uint32 VideoResolutionConstraints = 3;
optional HdcpVersion MaxHdcpVersion = 4;
optional uint32 OemCryptoApiVersion = 5;
}
required TokenType Type = 1;
//optional bytes Token = 2; // by default the client treats this as blob, but it's usually a DeviceCertificate, so for usefulness sake, I'm replacing it with this one:
optional SignedDeviceCertificate Token = 2; // use this when parsing, "bytes" when building a client id blob
repeated NameValue ClientInfo = 3;
optional bytes ProviderClientToken = 4;
optional uint32 LicenseCounter = 5;
optional ClientCapabilities _ClientCapabilities = 6; // how should we deal with duped names? will have to look at proto docs later
optional FileHashes _FileHashes = 7; // vmp blob goes here
}
message DeviceCertificate {
enum CertificateType {
ROOT = 0;
INTERMEDIATE = 1;
USER_DEVICE = 2;
SERVICE = 3;
}
required CertificateType Type = 1; // the compiled code reused this as ProvisionedDeviceInfo.WvSecurityLevel, however that is incorrect (compiler aliased it as they're both identical as a structure)
optional bytes SerialNumber = 2;
optional uint32 CreationTimeSeconds = 3;
optional bytes PublicKey = 4;
optional uint32 SystemId = 5;
optional uint32 TestDeviceDeprecated = 6; // is it bool or int?
optional bytes ServiceId = 7; // service URL for service certificates
}
// missing some references,
message DeviceCertificateStatus {
enum CertificateStatus {
VALID = 0;
REVOKED = 1;
}
optional bytes SerialNumber = 1;
optional CertificateStatus Status = 2;
optional ProvisionedDeviceInfo DeviceInfo = 4; // where is 3? is it deprecated?
}
message DeviceCertificateStatusList {
optional uint32 CreationTimeSeconds = 1;
repeated DeviceCertificateStatus CertificateStatus = 2;
}
message EncryptedClientIdentification {
required string ServiceId = 1;
optional bytes ServiceCertificateSerialNumber = 2;
required bytes EncryptedClientId = 3;
required bytes EncryptedClientIdIv = 4;
required bytes EncryptedPrivacyKey = 5;
}
// todo: fill (for this top-level type, it might be impossible/difficult)
enum LicenseType {
ZERO = 0;
DEFAULT = 1; // 1 is STREAMING/temporary license; on recent versions may go up to 3 (latest x86); it might be persist/don't persist type, unconfirmed
OFFLINE = 2;
}
// todo: fill (for this top-level type, it might be impossible/difficult)
// this is just a guess because these globals got lost, but really, do we need more?
enum ProtocolVersion {
CURRENT = 21; // don't have symbols for this
}
message LicenseIdentification {
optional bytes RequestId = 1;
optional bytes SessionId = 2;
optional bytes PurchaseId = 3;
optional LicenseType Type = 4;
optional uint32 Version = 5;
optional bytes ProviderSessionToken = 6;
}
message License {
message Policy {
optional bool CanPlay = 1; // changed from uint32 to bool
optional bool CanPersist = 2;
optional bool CanRenew = 3;
optional uint32 RentalDurationSeconds = 4;
optional uint32 PlaybackDurationSeconds = 5;
optional uint32 LicenseDurationSeconds = 6;
optional uint32 RenewalRecoveryDurationSeconds = 7;
optional string RenewalServerUrl = 8;
optional uint32 RenewalDelaySeconds = 9;
optional uint32 RenewalRetryIntervalSeconds = 10;
optional bool RenewWithUsage = 11; // was uint32
}
message KeyContainer {
enum KeyType {
SIGNING = 1;
CONTENT = 2;
KEY_CONTROL = 3;
OPERATOR_SESSION = 4;
}
enum SecurityLevel {
SW_SECURE_CRYPTO = 1;
SW_SECURE_DECODE = 2;
HW_SECURE_CRYPTO = 3;
HW_SECURE_DECODE = 4;
HW_SECURE_ALL = 5;
}
message OutputProtection {
enum CGMS {
COPY_FREE = 0;
COPY_ONCE = 2;
COPY_NEVER = 3;
CGMS_NONE = 0x2A; // PC default!
}
optional ClientIdentification.ClientCapabilities.HdcpVersion Hdcp = 1; // it's most likely a copy of Hdcp version available here, but compiler optimized it away
optional CGMS CgmsFlags = 2;
}
message KeyControl {
required bytes KeyControlBlock = 1; // what is this?
required bytes Iv = 2;
}
message OperatorSessionKeyPermissions {
optional uint32 AllowEncrypt = 1;
optional uint32 AllowDecrypt = 2;
optional uint32 AllowSign = 3;
optional uint32 AllowSignatureVerify = 4;
}
message VideoResolutionConstraint {
optional uint32 MinResolutionPixels = 1;
optional uint32 MaxResolutionPixels = 2;
optional OutputProtection RequiredProtection = 3;
}
optional bytes Id = 1;
optional bytes Iv = 2;
optional bytes Key = 3;
optional KeyType Type = 4;
optional SecurityLevel Level = 5;
optional OutputProtection RequiredProtection = 6;
optional OutputProtection RequestedProtection = 7;
optional KeyControl _KeyControl = 8; // duped names, etc
optional OperatorSessionKeyPermissions _OperatorSessionKeyPermissions = 9; // duped names, etc
repeated VideoResolutionConstraint VideoResolutionConstraints = 10;
}
optional LicenseIdentification Id = 1;
optional Policy _Policy = 2; // duped names, etc
repeated KeyContainer Key = 3;
optional uint32 LicenseStartTime = 4;
optional uint32 RemoteAttestationVerified = 5; // bool?
optional bytes ProviderClientToken = 6;
// there might be more, check with newer versions (I see field 7-8 in a lic)
// this appeared in latest x86:
optional uint32 ProtectionScheme = 7; // type unconfirmed fully, but it's likely as WidevineCencHeader describesit (fourcc)
}
message LicenseError {
enum Error {
INVALID_DEVICE_CERTIFICATE = 1;
REVOKED_DEVICE_CERTIFICATE = 2;
SERVICE_UNAVAILABLE = 3;
}
//LicenseRequest.RequestType ErrorCode; // clang mismatch
optional Error ErrorCode = 1;
}
message LicenseRequest {
message ContentIdentification {
message CENC {
//optional bytes Pssh = 1; // the client's definition is opaque, it doesn't care about the contents, but the PSSH has a clear definition that is understood and requested by the server, thus I'll replace it with:
optional WidevineCencHeader Pssh = 1;
optional LicenseType LicenseType = 2; // unfortunately the LicenseType symbols are not present, acceptable value seems to only be 1 (is this persist/don't persist? look into it!)
optional bytes RequestId = 3;
}
message WebM {
optional bytes Header = 1; // identical to CENC, aside from PSSH and the parent field number used
optional LicenseType LicenseType = 2;
optional bytes RequestId = 3;
}
message ExistingLicense {
optional LicenseIdentification LicenseId = 1;
optional uint32 SecondsSinceStarted = 2;
optional uint32 SecondsSinceLastPlayed = 3;
optional bytes SessionUsageTableEntry = 4; // interesting! try to figure out the connection between the usage table blob and KCB!
}
optional CENC CencId = 1;
optional WebM WebmId = 2;
optional ExistingLicense License = 3;
}
enum RequestType {
NEW = 1;
RENEWAL = 2;
RELEASE = 3;
}
optional ClientIdentification ClientId = 1;
optional ContentIdentification ContentId = 2;
optional RequestType Type = 3;
optional uint32 RequestTime = 4;
optional bytes KeyControlNonceDeprecated = 5;
optional ProtocolVersion ProtocolVersion = 6; // lacking symbols for this
optional uint32 KeyControlNonce = 7;
optional EncryptedClientIdentification EncryptedClientId = 8;
}
// raw pssh hack
message LicenseRequestRaw {
message ContentIdentification {
message CENC {
optional bytes Pssh = 1; // the client's definition is opaque, it doesn't care about the contents, but the PSSH has a clear definition that is understood and requested by the server, thus I'll replace it with:
//optional WidevineCencHeader Pssh = 1;
optional LicenseType LicenseType = 2; // unfortunately the LicenseType symbols are not present, acceptable value seems to only be 1 (is this persist/don't persist? look into it!)
optional bytes RequestId = 3;
}
message WebM {
optional bytes Header = 1; // identical to CENC, aside from PSSH and the parent field number used
optional LicenseType LicenseType = 2;
optional bytes RequestId = 3;
}
message ExistingLicense {
optional LicenseIdentification LicenseId = 1;
optional uint32 SecondsSinceStarted = 2;
optional uint32 SecondsSinceLastPlayed = 3;
optional bytes SessionUsageTableEntry = 4; // interesting! try to figure out the connection between the usage table blob and KCB!
}
optional CENC CencId = 1;
optional WebM WebmId = 2;
optional ExistingLicense License = 3;
}
enum RequestType {
NEW = 1;
RENEWAL = 2;
RELEASE = 3;
}
optional ClientIdentification ClientId = 1;
optional ContentIdentification ContentId = 2;
optional RequestType Type = 3;
optional uint32 RequestTime = 4;
optional bytes KeyControlNonceDeprecated = 5;
optional ProtocolVersion ProtocolVersion = 6; // lacking symbols for this
optional uint32 KeyControlNonce = 7;
optional EncryptedClientIdentification EncryptedClientId = 8;
}
message ProvisionedDeviceInfo {
enum WvSecurityLevel {
LEVEL_UNSPECIFIED = 0;
LEVEL_1 = 1;
LEVEL_2 = 2;
LEVEL_3 = 3;
}
optional uint32 SystemId = 1;
optional string Soc = 2;
optional string Manufacturer = 3;
optional string Model = 4;
optional string DeviceType = 5;
optional uint32 ModelYear = 6;
optional WvSecurityLevel SecurityLevel = 7;
optional uint32 TestDevice = 8; // bool?
}
// todo: fill
message ProvisioningOptions {
}
// todo: fill
message ProvisioningRequest {
}
// todo: fill
message ProvisioningResponse {
}
message RemoteAttestation {
optional EncryptedClientIdentification Certificate = 1;
optional string Salt = 2;
optional string Signature = 3;
}
// todo: fill
message SessionInit {
}
// todo: fill
message SessionState {
}
// todo: fill
message SignedCertificateStatusList {
}
message SignedDeviceCertificate {
//optional bytes DeviceCertificate = 1; // again, they use a buffer where it's supposed to be a message, so we'll replace it with what it really is:
optional DeviceCertificate _DeviceCertificate = 1; // how should we deal with duped names? will have to look at proto docs later
optional bytes Signature = 2;
optional SignedDeviceCertificate Signer = 3;
}
// todo: fill
message SignedProvisioningMessage {
}
// the root of all messages, from either server or client
message SignedMessage {
enum MessageType {
LICENSE_REQUEST = 1;
LICENSE = 2;
ERROR_RESPONSE = 3;
SERVICE_CERTIFICATE_REQUEST = 4;
SERVICE_CERTIFICATE = 5;
}
optional MessageType Type = 1; // has in incorrect overlap with License_KeyContainer_SecurityLevel
optional bytes Msg = 2; // this has to be casted dynamically, to LicenseRequest, License or LicenseError (? unconfirmed), for Request, no other fields but Type need to be present
// for SERVICE_CERTIFICATE, only Type and Msg are present, and it's just a DeviceCertificate with CertificateType set to SERVICE
optional bytes Signature = 3; // might be different type of signatures (ex. RSA vs AES CMAC(??), unconfirmed for now)
optional bytes SessionKey = 4; // often RSA wrapped for licenses
optional RemoteAttestation RemoteAttestation = 5;
}
// This message is copied from google's docs, not reversed:
message WidevineCencHeader {
enum Algorithm {
UNENCRYPTED = 0;
AESCTR = 1;
};
optional Algorithm algorithm = 1;
repeated bytes key_id = 2;
// Content provider name.
optional string provider = 3;
// A content identifier, specified by content provider.
optional bytes content_id = 4;
// Track type. Acceptable values are SD, HD and AUDIO. Used to
// differentiate content keys used by an asset.
optional string track_type_deprecated = 5;
// The name of a registered policy to be used for this asset.
optional string policy = 6;
// Crypto period index, for media using key rotation.
optional uint32 crypto_period_index = 7;
// Optional protected context for group content. The grouped_license is a
// serialized SignedMessage.
optional bytes grouped_license = 8;
// Protection scheme identifying the encryption algorithm.
// Represented as one of the following 4CC values:
// 'cenc' (AESCTR), 'cbc1' (AESCBC),
// 'cens' (AESCTR subsample), 'cbcs' (AESCBC subsample).
optional uint32 protection_scheme = 9;
// Optional. For media using key rotation, this represents the duration
// of each crypto period in seconds.
optional uint32 crypto_period_seconds = 10;
}
// remove these when using it outside of protoc:
// from here on, it's just for testing, these messages don't exist in the binaries, I'm adding them to avoid detecting type programmatically
message SignedLicenseRequest {
enum MessageType {
LICENSE_REQUEST = 1;
LICENSE = 2;
ERROR_RESPONSE = 3;
SERVICE_CERTIFICATE_REQUEST = 4;
SERVICE_CERTIFICATE = 5;
}
optional MessageType Type = 1; // has in incorrect overlap with License_KeyContainer_SecurityLevel
optional LicenseRequest Msg = 2; // this has to be casted dynamically, to LicenseRequest, License or LicenseError (? unconfirmed), for Request, no other fields but Type need to be present
// for SERVICE_CERTIFICATE, only Type and Msg are present, and it's just a DeviceCertificate with CertificateType set to SERVICE
optional bytes Signature = 3; // might be different type of signatures (ex. RSA vs AES CMAC(??), unconfirmed for now)
optional bytes SessionKey = 4; // often RSA wrapped for licenses
optional RemoteAttestation RemoteAttestation = 5;
}
// hack
message SignedLicenseRequestRaw {
enum MessageType {
LICENSE_REQUEST = 1;
LICENSE = 2;
ERROR_RESPONSE = 3;
SERVICE_CERTIFICATE_REQUEST = 4;
SERVICE_CERTIFICATE = 5;
}
optional MessageType Type = 1; // has in incorrect overlap with License_KeyContainer_SecurityLevel
optional LicenseRequestRaw Msg = 2; // this has to be casted dynamically, to LicenseRequest, License or LicenseError (? unconfirmed), for Request, no other fields but Type need to be present
// for SERVICE_CERTIFICATE, only Type and Msg are present, and it's just a DeviceCertificate with CertificateType set to SERVICE
optional bytes Signature = 3; // might be different type of signatures (ex. RSA vs AES CMAC(??), unconfirmed for now)
optional bytes SessionKey = 4; // often RSA wrapped for licenses
optional RemoteAttestation RemoteAttestation = 5;
}
message SignedLicense {
enum MessageType {
LICENSE_REQUEST = 1;
LICENSE = 2;
ERROR_RESPONSE = 3;
SERVICE_CERTIFICATE_REQUEST = 4;
SERVICE_CERTIFICATE = 5;
}
optional MessageType Type = 1; // has in incorrect overlap with License_KeyContainer_SecurityLevel
optional License Msg = 2; // this has to be casted dynamically, to LicenseRequest, License or LicenseError (? unconfirmed), for Request, no other fields but Type need to be present
// for SERVICE_CERTIFICATE, only Type and Msg are present, and it's just a DeviceCertificate with CertificateType set to SERVICE
optional bytes Signature = 3; // might be different type of signatures (ex. RSA vs AES CMAC(??), unconfirmed for now)
optional bytes SessionKey = 4; // often RSA wrapped for licenses
optional RemoteAttestation RemoteAttestation = 5;
}
message SignedServiceCertificate {
enum MessageType {
LICENSE_REQUEST = 1;
LICENSE = 2;
ERROR_RESPONSE = 3;
SERVICE_CERTIFICATE_REQUEST = 4;
SERVICE_CERTIFICATE = 5;
}
optional MessageType Type = 1; // has in incorrect overlap with License_KeyContainer_SecurityLevel
optional SignedDeviceCertificate Msg = 2; // this has to be casted dynamically, to LicenseRequest, License or LicenseError (? unconfirmed), for Request, no other fields but Type need to be present
// for SERVICE_CERTIFICATE, only Type and Msg are present, and it's just a DeviceCertificate with CertificateType set to SERVICE
optional bytes Signature = 3; // might be different type of signatures (ex. RSA vs AES CMAC(??), unconfirmed for now)
optional bytes SessionKey = 4; // often RSA wrapped for licenses
optional RemoteAttestation RemoteAttestation = 5;
}
//vmp support
message FileHashes {
message Signature {
optional string filename = 1;
optional bool test_signing = 2; //0 - release, 1 - testing
optional bytes SHA512Hash = 3;
optional bool main_exe = 4; //0 for dlls, 1 for exe, this is field 3 in file
optional bytes signature = 5;
}
optional bytes signer = 1;
repeated Signature signatures = 2;
}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,389 @@
// beware proto3 won't show missing fields it seems, need to change to "proto2" and add "optional" before every field, and remove all the dummy enum members I added:
syntax = "proto3";
// from x86 (partial), most of it from the ARM version:
message ClientIdentification {
enum TokenType {
KEYBOX = 0;
DEVICE_CERTIFICATE = 1;
REMOTE_ATTESTATION_CERTIFICATE = 2;
}
message NameValue {
string Name = 1;
string Value = 2;
}
message ClientCapabilities {
enum HdcpVersion {
HDCP_NONE = 0;
HDCP_V1 = 1;
HDCP_V2 = 2;
HDCP_V2_1 = 3;
HDCP_V2_2 = 4;
}
uint32 ClientToken = 1;
uint32 SessionToken = 2;
uint32 VideoResolutionConstraints = 3;
HdcpVersion MaxHdcpVersion = 4;
uint32 OemCryptoApiVersion = 5;
}
TokenType Type = 1;
//bytes Token = 2; // by default the client treats this as blob, but it's usually a DeviceCertificate, so for usefulness sake, I'm replacing it with this one:
SignedDeviceCertificate Token = 2;
repeated NameValue ClientInfo = 3;
bytes ProviderClientToken = 4;
uint32 LicenseCounter = 5;
ClientCapabilities _ClientCapabilities = 6; // how should we deal with duped names? will have to look at proto docs later
}
message DeviceCertificate {
enum CertificateType {
ROOT = 0;
INTERMEDIATE = 1;
USER_DEVICE = 2;
SERVICE = 3;
}
//ProvisionedDeviceInfo.WvSecurityLevel Type = 1; // is this how one is supposed to call it? (it's an enum) there might be a bug here, with CertificateType getting confused with WvSecurityLevel, for now renaming it (verify against other binaries)
CertificateType Type = 1;
bytes SerialNumber = 2;
uint32 CreationTimeSeconds = 3;
bytes PublicKey = 4;
uint32 SystemId = 5;
uint32 TestDeviceDeprecated = 6; // is it bool or int?
bytes ServiceId = 7; // service URL for service certificates
}
// missing some references,
message DeviceCertificateStatus {
enum CertificateStatus {
VALID = 0;
REVOKED = 1;
}
bytes SerialNumber = 1;
CertificateStatus Status = 2;
ProvisionedDeviceInfo DeviceInfo = 4; // where is 3? is it deprecated?
}
message DeviceCertificateStatusList {
uint32 CreationTimeSeconds = 1;
repeated DeviceCertificateStatus CertificateStatus = 2;
}
message EncryptedClientIdentification {
string ServiceId = 1;
bytes ServiceCertificateSerialNumber = 2;
bytes EncryptedClientId = 3;
bytes EncryptedClientIdIv = 4;
bytes EncryptedPrivacyKey = 5;
}
// todo: fill (for this top-level type, it might be impossible/difficult)
enum LicenseType {
ZERO = 0;
DEFAULT = 1; // do not know what this is either, but should be 1; on recent versions may go up to 3 (latest x86)
}
// todo: fill (for this top-level type, it might be impossible/difficult)
// this is just a guess because these globals got lost, but really, do we need more?
enum ProtocolVersion {
DUMMY = 0;
CURRENT = 21; // don't have symbols for this
}
message LicenseIdentification {
bytes RequestId = 1;
bytes SessionId = 2;
bytes PurchaseId = 3;
LicenseType Type = 4;
uint32 Version = 5;
bytes ProviderSessionToken = 6;
}
message License {
message Policy {
uint32 CanPlay = 1;
uint32 CanPersist = 2;
uint32 CanRenew = 3;
uint32 RentalDurationSeconds = 4;
uint32 PlaybackDurationSeconds = 5;
uint32 LicenseDurationSeconds = 6;
uint32 RenewalRecoveryDurationSeconds = 7;
string RenewalServerUrl = 8;
uint32 RenewalDelaySeconds = 9;
uint32 RenewalRetryIntervalSeconds = 10;
uint32 RenewWithUsage = 11;
uint32 UnknownPolicy12 = 12;
}
message KeyContainer {
enum KeyType {
_NOKEYTYPE = 0; // dummy, added to satisfy proto3, not present in original
SIGNING = 1;
CONTENT = 2;
KEY_CONTROL = 3;
OPERATOR_SESSION = 4;
}
enum SecurityLevel {
_NOSECLEVEL = 0; // dummy, added to satisfy proto3, not present in original
SW_SECURE_CRYPTO = 1;
SW_SECURE_DECODE = 2;
HW_SECURE_CRYPTO = 3;
HW_SECURE_DECODE = 4;
HW_SECURE_ALL = 5;
}
message OutputProtection {
enum CGMS {
COPY_FREE = 0;
COPY_ONCE = 2;
COPY_NEVER = 3;
CGMS_NONE = 0x2A; // PC default!
}
ClientIdentification.ClientCapabilities.HdcpVersion Hdcp = 1; // it's most likely a copy of Hdcp version available here, but compiler optimized it away
CGMS CgmsFlags = 2;
}
message KeyControl {
bytes KeyControlBlock = 1; // what is this?
bytes Iv = 2;
}
message OperatorSessionKeyPermissions {
uint32 AllowEncrypt = 1;
uint32 AllowDecrypt = 2;
uint32 AllowSign = 3;
uint32 AllowSignatureVerify = 4;
}
message VideoResolutionConstraint {
uint32 MinResolutionPixels = 1;
uint32 MaxResolutionPixels = 2;
OutputProtection RequiredProtection = 3;
}
bytes Id = 1;
bytes Iv = 2;
bytes Key = 3;
KeyType Type = 4;
SecurityLevel Level = 5;
OutputProtection RequiredProtection = 6;
OutputProtection RequestedProtection = 7;
KeyControl _KeyControl = 8; // duped names, etc
OperatorSessionKeyPermissions _OperatorSessionKeyPermissions = 9; // duped names, etc
repeated VideoResolutionConstraint VideoResolutionConstraints = 10;
}
LicenseIdentification Id = 1;
Policy _Policy = 2; // duped names, etc
repeated KeyContainer Key = 3;
uint32 LicenseStartTime = 4;
uint32 RemoteAttestationVerified = 5; // bool?
bytes ProviderClientToken = 6;
// there might be more, check with newer versions (I see field 7-8 in a lic)
// this appeared in latest x86:
uint32 ProtectionScheme = 7; // type unconfirmed fully, but it's likely as WidevineCencHeader describesit (fourcc)
bytes UnknownHdcpDataField = 8;
}
message LicenseError {
enum Error {
DUMMY_NO_ERROR = 0; // dummy, added to satisfy proto3
INVALID_DEVICE_CERTIFICATE = 1;
REVOKED_DEVICE_CERTIFICATE = 2;
SERVICE_UNAVAILABLE = 3;
}
//LicenseRequest.RequestType ErrorCode; // clang mismatch
Error ErrorCode = 1;
}
message LicenseRequest {
message ContentIdentification {
message CENC {
// bytes Pssh = 1; // the client's definition is opaque, it doesn't care about the contents, but the PSSH has a clear definition that is understood and requested by the server, thus I'll replace it with:
WidevineCencHeader Pssh = 1;
LicenseType LicenseType = 2; // unfortunately the LicenseType symbols are not present, acceptable value seems to only be 1
bytes RequestId = 3;
}
message WebM {
bytes Header = 1; // identical to CENC, aside from PSSH and the parent field number used
LicenseType LicenseType = 2;
bytes RequestId = 3;
}
message ExistingLicense {
LicenseIdentification LicenseId = 1;
uint32 SecondsSinceStarted = 2;
uint32 SecondsSinceLastPlayed = 3;
bytes SessionUsageTableEntry = 4;
}
CENC CencId = 1;
WebM WebmId = 2;
ExistingLicense License = 3;
}
enum RequestType {
DUMMY_REQ_TYPE = 0; // dummy, added to satisfy proto3
NEW = 1;
RENEWAL = 2;
RELEASE = 3;
}
ClientIdentification ClientId = 1;
ContentIdentification ContentId = 2;
RequestType Type = 3;
uint32 RequestTime = 4;
bytes KeyControlNonceDeprecated = 5;
ProtocolVersion ProtocolVersion = 6; // lacking symbols for this
uint32 KeyControlNonce = 7;
EncryptedClientIdentification EncryptedClientId = 8;
}
message ProvisionedDeviceInfo {
enum WvSecurityLevel {
LEVEL_UNSPECIFIED = 0;
LEVEL_1 = 1;
LEVEL_2 = 2;
LEVEL_3 = 3;
}
uint32 SystemId = 1;
string Soc = 2;
string Manufacturer = 3;
string Model = 4;
string DeviceType = 5;
uint32 ModelYear = 6;
WvSecurityLevel SecurityLevel = 7;
uint32 TestDevice = 8; // bool?
}
// todo: fill
message ProvisioningOptions {
}
// todo: fill
message ProvisioningRequest {
}
// todo: fill
message ProvisioningResponse {
}
message RemoteAttestation {
EncryptedClientIdentification Certificate = 1;
string Salt = 2;
string Signature = 3;
}
// todo: fill
message SessionInit {
}
// todo: fill
message SessionState {
}
// todo: fill
message SignedCertificateStatusList {
}
message SignedDeviceCertificate {
//bytes DeviceCertificate = 1; // again, they use a buffer where it's supposed to be a message, so we'll replace it with what it really is:
DeviceCertificate _DeviceCertificate = 1; // how should we deal with duped names? will have to look at proto docs later
bytes Signature = 2;
SignedDeviceCertificate Signer = 3;
}
// todo: fill
message SignedProvisioningMessage {
}
// the root of all messages, from either server or client
message SignedMessage {
enum MessageType {
DUMMY_MSG_TYPE = 0; // dummy, added to satisfy proto3
LICENSE_REQUEST = 1;
LICENSE = 2;
ERROR_RESPONSE = 3;
SERVICE_CERTIFICATE_REQUEST = 4;
SERVICE_CERTIFICATE = 5;
}
MessageType Type = 1; // has in incorrect overlap with License_KeyContainer_SecurityLevel
bytes Msg = 2; // this has to be casted dynamically, to LicenseRequest, License or LicenseError (? unconfirmed), for Request, no other fields but Type need to be present
// for SERVICE_CERTIFICATE, only Type and Msg are present, and it's just a DeviceCertificate with CertificateType set to SERVICE
bytes Signature = 3; // might be different type of signatures (ex. RSA vs AES CMAC(??), unconfirmed for now)
bytes SessionKey = 4; // often RSA wrapped for licenses
RemoteAttestation RemoteAttestation = 5;
}
// This message is copied from google's docs, not reversed:
message WidevineCencHeader {
enum Algorithm {
UNENCRYPTED = 0;
AESCTR = 1;
};
Algorithm algorithm = 1;
repeated bytes key_id = 2;
// Content provider name.
string provider = 3;
// A content identifier, specified by content provider.
bytes content_id = 4;
// Track type. Acceptable values are SD, HD and AUDIO. Used to
// differentiate content keys used by an asset.
string track_type_deprecated = 5;
// The name of a registered policy to be used for this asset.
string policy = 6;
// Crypto period index, for media using key rotation.
uint32 crypto_period_index = 7;
// Optional protected context for group content. The grouped_license is a
// serialized SignedMessage.
bytes grouped_license = 8;
// Protection scheme identifying the encryption algorithm.
// Represented as one of the following 4CC values:
// 'cenc' (AESCTR), 'cbc1' (AESCBC),
// 'cens' (AESCTR subsample), 'cbcs' (AESCBC subsample).
uint32 protection_scheme = 9;
// Optional. For media using key rotation, this represents the duration
// of each crypto period in seconds.
uint32 crypto_period_seconds = 10;
}
// from here on, it's just for testing, these messages don't exist in the binaries, I'm adding them to avoid detecting type programmatically
message SignedLicenseRequest {
enum MessageType {
DUMMY_MSG_TYPE = 0; // dummy, added to satisfy proto3
LICENSE_REQUEST = 1;
LICENSE = 2;
ERROR_RESPONSE = 3;
SERVICE_CERTIFICATE_REQUEST = 4;
SERVICE_CERTIFICATE = 5;
}
MessageType Type = 1; // has in incorrect overlap with License_KeyContainer_SecurityLevel
LicenseRequest Msg = 2; // this has to be casted dynamically, to LicenseRequest, License or LicenseError (? unconfirmed), for Request, no other fields but Type need to be present
// for SERVICE_CERTIFICATE, only Type and Msg are present, and it's just a DeviceCertificate with CertificateType set to SERVICE
bytes Signature = 3; // might be different type of signatures (ex. RSA vs AES CMAC(??), unconfirmed for now)
bytes SessionKey = 4; // often RSA wrapped for licenses
RemoteAttestation RemoteAttestation = 5;
}
message SignedLicense {
enum MessageType {
DUMMY_MSG_TYPE = 0; // dummy, added to satisfy proto3
LICENSE_REQUEST = 1;
LICENSE = 2;
ERROR_RESPONSE = 3;
SERVICE_CERTIFICATE_REQUEST = 4;
SERVICE_CERTIFICATE = 5;
}
MessageType Type = 1; // has in incorrect overlap with License_KeyContainer_SecurityLevel
License Msg = 2; // this has to be casted dynamically, to LicenseRequest, License or LicenseError (? unconfirmed), for Request, no other fields but Type need to be present
// for SERVICE_CERTIFICATE, only Type and Msg are present, and it's just a DeviceCertificate with CertificateType set to SERVICE
bytes Signature = 3; // might be different type of signatures (ex. RSA vs AES CMAC(??), unconfirmed for now)
bytes SessionKey = 4; // often RSA wrapped for licenses
RemoteAttestation RemoteAttestation = 5;
}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,19 @@
# uncompyle6 version 3.3.2
# Python bytecode 3.6 (3379)
# Decompiled from: Python 3.7.3 (v3.7.3:ef4ec6ed12, Mar 25 2019, 22:22:05) [MSC v.1916 64 bit (AMD64)]
# Embedded file name: pywidevine\cdm\key.py
import binascii
class Key:
def __init__(self, kid, type, key, permissions=[]):
self.kid = kid
self.type = type
self.key = key
self.permissions = permissions
def __repr__(self):
if self.type == 'OPERATOR_SESSION':
return ('key(kid={}, type={}, key={}, permissions={})').format(self.kid, self.type, binascii.hexlify(self.key), self.permissions)
else:
return ('key(kid={}, type={}, key={})').format(self.kid, self.type, binascii.hexlify(self.key))

View File

@ -0,0 +1,23 @@
# uncompyle6 version 3.3.2
# Python bytecode 3.6 (3379)
# Decompiled from: Python 3.7.3 (v3.7.3:ef4ec6ed12, Mar 25 2019, 22:22:05) [MSC v.1916 64 bit (AMD64)]
# Embedded file name: pywidevine\cdm\session.py
class Session:
def __init__(self, session_id, init_data, device_config, offline):
self.session_id = session_id
self.init_data = init_data
self.offline = offline
self.device_config = device_config
self.device_key = None
self.session_key = None
self.derived_keys = {'enc':None,
'auth_1':None,
'auth_2':None}
self.license_request = None
self.license = None
self.service_certificate = None
self.privacy_mode = False
self.keys = []

Some files were not shown because too many files have changed in this diff Show More