This commit is contained in:
Khan
2021-09-01 02:57:54 +05:00
parent 9df940f1fd
commit bf3c3712dd
222 changed files with 1007430 additions and 0 deletions

629
helpers/Muxer.py Normal file
View File

@@ -0,0 +1,629 @@

import re, os, sys, subprocess, contextlib, json, glob
from configs.config import tool
from helpers.ripprocess import ripprocess
from pymediainfo import MediaInfo
import logging
class Muxer(object):
def __init__(self, **kwargs):
self.logger = logging.getLogger(__name__)
self.CurrentName_Original = kwargs.get("CurrentName", None)
self.CurrentName = kwargs.get("CurrentName", None)
self.SeasonFolder = kwargs.get("SeasonFolder", None)
self.CurrentHeigh = kwargs.get("CurrentHeigh", None)
self.CurrentWidth = kwargs.get("CurrentWidth", None)
self.source_tag = kwargs.get("Source", None)
self.AudioProfile = self.get_audio_id() # kwargs.get("AudioProfile", None)
self.VideoProfile = self.get_video_id() # kwargs.get("VideoProfile", None)
self.mkvmerge = tool().bin()["mkvmerge"]
self.merge = []
self.muxer_settings = tool().muxer()
##############################################################################
self.packer = kwargs.get("group", None)
self.extra_output_folder = self.packer["EXTRA_FOLDER"]
self.Group = (
self.packer["GROUP"]
if self.packer["GROUP"]
else self.muxer_settings["GROUP"]
)
self.muxer_scheme = (
self.packer["SCHEME"]
if self.packer["SCHEME"]
else self.muxer_settings["scheme"]
)
self.scheme = self.muxer_settings["schemeslist"][self.muxer_scheme]
self.Extras = self.muxer_settings["EXTRAS"]
self.fps24 = True if self.source_tag in self.muxer_settings["FPS24"] else False
self.default_mux = True if self.muxer_settings["DEFAULT"] else False
self.PrepareMuxer()
def is_extra_folder(self):
extra_folder = None
if self.extra_output_folder:
if not os.path.isabs(self.extra_output_folder):
raise ValueError("Error you should provide full path dir: {}.".format(self.extra_output_folder))
if not os.path.exists(self.extra_output_folder):
try:
os.makedirs(self.extra_output_folder)
except Exception as e:
raise ValueError("Error when create folder dir [{}]: {}.".format(e, self.extra_output_folder))
extra_folder = self.extra_output_folder
return extra_folder
if self.muxer_settings["mkv_folder"]:
if not os.path.isabs(self.muxer_settings["mkv_folder"]):
raise ValueError("Error you should provide full path dir: {}.".format(self.muxer_settings["mkv_folder"]))
if not os.path.exists(self.muxer_settings["mkv_folder"]):
try:
os.makedirs(self.muxer_settings["mkv_folder"])
except Exception as e:
raise ValueError("Error when create folder dir [{}]: {}.".format(e, self.muxer_settings["mkv_folder"]))
extra_folder = self.muxer_settings["mkv_folder"]
return extra_folder
return extra_folder
def PrepareMuxer(self):
if self.muxer_settings["noTitle"]:
self.CurrentName = self.noTitle()
extra_folder = self.is_extra_folder()
if extra_folder:
self.SeasonFolder = extra_folder
else:
if not self.default_mux:
if self.SeasonFolder:
self.SeasonFolder = self.setFolder()
return
def SortFilesBySize(self):
file_list = []
audio_tracks = (
glob.glob(f"{self.CurrentName_Original}*.eac3")
+ glob.glob(f"{self.CurrentName_Original}*.ac3")
+ glob.glob(f"{self.CurrentName_Original}*.aac")
+ glob.glob(f"{self.CurrentName_Original}*.m4a")
+ glob.glob(f"{self.CurrentName_Original}*.dts")
)
if audio_tracks == []:
raise FileNotFoundError("no audio files found")
for file in audio_tracks:
file_list.append({"file": file, "size": os.path.getsize(file)})
file_list = sorted(file_list, key=lambda k: int(k["size"]))
return file_list[-1]["file"]
def GetVideoFile(self):
videofiles = [
"{} [{}p]_Demuxed.mp4",
"{} [{}p]_Demuxed.mp4",
"{} [{}p] [UHD]_Demuxed.mp4",
"{} [{}p] [UHD]_Demuxed.mp4",
"{} [{}p] [VP9]_Demuxed.mp4",
"{} [{}p] [HIGH]_Demuxed.mp4",
"{} [{}p] [VP9]_Demuxed.mp4",
"{} [{}p] [HEVC]_Demuxed.mp4",
"{} [{}p] [HDR]_Demuxed.mp4",
"{} [{}p] [HDR-DV]_Demuxed.mp4",
]
for videofile in videofiles:
filename = videofile.format(self.CurrentName_Original, self.CurrentHeigh)
if os.path.isfile(filename):
return filename
return None
def get_video_id(self):
video_file = self.GetVideoFile()
if not video_file:
raise ValueError("No Video file in Dir...")
media_info = MediaInfo.parse(video_file)
track = [track for track in media_info.tracks if track.track_type == "Video"][0]
if track.format == "AVC":
if track.encoding_settings:
return "x264"
return "H.264"
elif track.format == "HEVC":
if track.commercial_name == "HDR10" and track.color_primaries:
return "HDR.HEVC"
if track.commercial_name == "HEVC" and track.color_primaries:
return "HEVC"
return "DV.HEVC"
return None
def get_audio_id(self):
audio_id = None
media_info = MediaInfo.parse(self.SortFilesBySize())
track = [track for track in media_info.tracks if track.track_type == "Audio"][0]
if track.format == "E-AC-3":
audioCodec = "DDP"
elif track.format == "AC-3":
audioCodec = "DD"
elif track.format == "AAC":
audioCodec = "AAC"
elif track.format == "DTS":
audioCodec = "DTS"
elif "DTS" in track.format:
audioCodec = "DTS"
else:
audioCodec = "DDP"
if track.channel_s == 8:
channels = "7.1"
elif track.channel_s == 6:
channels = "5.1"
elif track.channel_s == 2:
channels = "2.0"
elif track.channel_s == 1:
channels = "1.0"
else:
channels = "5.1"
audio_id = (
f"{audioCodec}{channels}.Atmos"
if "Atmos" in track.commercial_name
else f"{audioCodec}{channels}"
)
return audio_id
def Heigh(self):
try:
Width = int(self.CurrentWidth)
Heigh = int(self.CurrentHeigh)
except Exception:
return self.CurrentHeigh
res1080p = "1080p"
res720p = "720p"
sd = ""
if Width >= 3840:
return "2160p"
if Width >= 2560:
return "1440p"
if Width > 1920:
if Heigh > 1440:
return "2160p"
return "1440p"
if Width == 1920:
return res1080p
elif Width == 1280:
return res720p
if Width >= 1400:
return res1080p
if Width < 1400 and Width >= 1100:
return res720p
if Heigh == 1080:
return res1080p
elif Heigh == 720:
return res720p
if Heigh >= 900:
return res1080p
if Heigh < 900 and Heigh >= 700:
return res720p
return sd
def noTitle(self):
regex = re.compile("(.*) [S]([0-9]+)[E]([0-9]+)")
if regex.search(self.CurrentName):
return regex.search(self.CurrentName).group(0)
return self.CurrentName
def Run(self, command):
self.logger.debug("muxing command: {}".format(command))
def unbuffered(proc, stream="stdout"):
newlines = ["\n", "\r\n", "\r"]
stream = getattr(proc, stream)
with contextlib.closing(stream):
while True:
out = []
last = stream.read(1)
# Don't loop forever
if last == "" and proc.poll() is not None:
break
while last not in newlines:
# Don't loop forever
if last == "" and proc.poll() is not None:
break
out.append(last)
last = stream.read(1)
out = "".join(out)
yield out
proc = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
bufsize=1,
universal_newlines=True,
)
self.logger.info("\nStart Muxing...")
for line in unbuffered(proc):
if "Progress:" in line:
sys.stdout.write("\r%s" % (line))
sys.stdout.flush()
elif "Multiplexing" in line:
sys.stdout.write("\r%s" % (line.replace("Multiplexing", "Muxing")))
sys.stdout.flush()
elif "Error" in line:
sys.stdout.write("\r%s" % (line))
sys.stdout.flush()
self.logger.info("")
def setName(self):
outputVideo = (
self.scheme.replace(
"{t}", ripprocess().CleanMyFileNamePlease(self.CurrentName)
)
.replace("{r}", self.Heigh())
.replace("{s}", self.source_tag)
.replace("{ac}", self.AudioProfile)
.replace("{vc}", self.VideoProfile)
.replace("{gr}", self.Group)
)
for i in range(10):
outputVideo = re.sub(r"(\.\.)", ".", outputVideo)
if self.SeasonFolder:
outputVideo = os.path.join(os.path.abspath(self.SeasonFolder), outputVideo)
outputVideo = outputVideo.replace("\\", "/")
return f"{outputVideo}.mkv"
def setFolder(self):
folder = (
self.scheme.replace(
"{t}", ripprocess().CleanMyFileNamePlease(self.SeasonFolder)
)
.replace("{r}", self.Heigh())
.replace("{s}", self.source_tag)
.replace("{ac}", self.AudioProfile)
.replace("{vc}", self.VideoProfile)
.replace("{gr}", self.Group)
)
for i in range(10):
folder = re.sub(r"(\.\.)", ".", folder)
return folder
def LanguageList(self):
LanguageList = [
["Hindi", "hin", "hin", "Hindi"],
["Tamil", "tam", "tam", "Tamil"],
["Telugu", "tel", "tel", "Telugu"],
["English", "eng", "eng", "English"],
["Afrikaans", "af", "afr", "Afrikaans"],
["Arabic", "ara", "ara", "Arabic"],
["Arabic (Syria)", "araSy", "ara", "Arabic Syria"],
["Arabic (Egypt)", "araEG", "ara", "Arabic Egypt"],
["Arabic (Kuwait)", "araKW", "ara", "Arabic Kuwait"],
["Arabic (Lebanon)", "araLB", "ara", "Arabic Lebanon"],
["Arabic (Algeria)", "araDZ", "ara", "Arabic Algeria"],
["Arabic (Bahrain)", "araBH", "ara", "Arabic Bahrain"],
["Arabic (Iraq)", "araIQ", "ara", "Arabic Iraq"],
["Arabic (Jordan)", "araJO", "ara", "Arabic Jordan"],
["Arabic (Libya)", "araLY", "ara", "Arabic Libya"],
["Arabic (Morocco)", "araMA", "ara", "Arabic Morocco"],
["Arabic (Oman)", "araOM", "ara", "Arabic Oman"],
["Arabic (Saudi Arabia)", "araSA", "ara", "Arabic Saudi Arabia"],
["Arabic (Tunisia)", "araTN", "ara", "Arabic Tunisia"],
[
"Arabic (United Arab Emirates)",
"araAE",
"ara",
"Arabic United Arab Emirates",
],
["Arabic (Yemen)", "araYE", "ara", "Arabic Yemen"],
["Armenian", "hye", "arm", "Armenian"],
["Assamese", "asm", "asm", "Assamese"],
["Bengali", "ben", "ben", "Bengali"],
["Basque", "eus", "baq", "Basque"],
["British English", "enGB", "eng", "British English"],
["Bulgarian", "bul", "bul", "Bulgarian"],
["Cantonese", "None", "chi", "Cantonese"],
["Catalan", "cat", "cat", "Catalan"],
["Simplified Chinese", "zhoS", "chi", "Chinese Simplified"],
["Traditional Chinese", "zhoT", "chi", "Chinese Traditional"],
["Croatian", "hrv", "hrv", "Croatian"],
["Czech", "ces", "cze", "Czech"],
["Danish", "dan", "dan", "Danish"],
["Dutch", "nld", "dut", "Dutch"],
["Estonian", "est", "est", "Estonian"],
["Filipino", "fil", "fil", "Filipino"],
["Finnish", "fin", "fin", "Finnish"],
["Flemish", "nlBE", "dut", "Flemish"],
["French", "fra", "fre", "French"],
["French Canadian", "caFra", "fre", "French Canadian"],
["Canadian French", "caFra", "fre", "Canadian French"],
["German", "deu", "ger", "German"],
["Greek", "ell", "gre", "Greek"],
["Gujarati", "guj", "guj", "Gujarati"],
["Hebrew", "heb", "heb", "Hebrew"],
["Hungarian", "hun", "hun", "Hungarian"],
["Icelandic", "isl", "ice", "Icelandic"],
["Indonesian", "ind", "ind", "Indonesian"],
["Italian", "ita", "ita", "Italian"],
["Japanese", "jpn", "jpn", "Japanese"],
["Kannada (India)", "kan", "kan", "Kannada (India)"],
["Khmer", "khm", "khm", "Khmer"],
["Klingon", "tlh", "tlh", "Klingon"],
["Korean", "kor", "kor", "Korean"],
["Lithuanian", "lit", "lit", "Lithuanian"],
["Latvian", "lav", "lav", "Latvian"],
["Malay", "msa", "may", "Malay"],
["Malayalam", "mal", "mal", "Malayalam"],
["Mandarin", "None", "chi", "Mandarin"],
["Mandarin Chinese (Simplified)", "zh-Hans", "chi", "Simplified"],
["Mandarin Chinese (Traditional)", "zh-Hant", "chi", "Traditional"],
["Yue Chinese", "yue", "chi", "(Yue Chinese)"],
["Manipuri", "mni", "mni", "Manipuri"],
["Marathi", "mar", "mar", "Marathi"],
["No Dialogue", "zxx", "zxx", "No Dialogue"],
["Norwegian", "nor", "nor", "Norwegian"],
["Norwegian Bokmal", "nob", "nob", "Norwegian Bokmal"],
["Persian", "fas", "per", "Persian"],
["Polish", "pol", "pol", "Polish"],
["Portuguese", "por", "por", "Portuguese"],
["Brazilian Portuguese", "brPor", "por", "Brazilian Portuguese"],
["Punjabi", "pan", "pan", "Punjabi"],
["Panjabi", "pan", "pan", "Panjabi"],
["Romanian", "ron", "rum", "Romanian"],
["Russian", "rus", "rus", "Russian"],
["Serbian", "srp", "srp", "Serbian"],
["Sinhala", "sin", "sin", "Sinhala"],
["Slovak", "slk", "slo", "Slovak"],
["Slovenian", "slv", "slv", "Slovenian"],
["Spanish", "spa", "spa", "Spanish"],
["European Spanish", "euSpa", "spa", "European Spanish"],
["Swedish", "swe", "swe", "Swedish"],
["Thai", "tha", "tha", "Thai"],
["Tagalog", "tgl", "tgl", "Tagalog"],
["Turkish", "tur", "tur", "Turkish"],
["Ukrainian", "ukr", "ukr", "Ukrainian"],
["Urdu", "urd", "urd", "Urdu"],
["Vietnamese", "vie", "vie", "Vietnamese"],
]
return LanguageList
def ExtraLanguageList(self):
ExtraLanguageList = [
["Polish - Dubbing", "pol", "pol", "Polish - Dubbing"],
["Polish - Lektor", "pol", "pol", "Polish - Lektor"],
]
return ExtraLanguageList
def AddChapters(self):
if os.path.isfile(self.CurrentName_Original + " Chapters.txt"):
self.merge += [
"--chapter-charset",
"UTF-8",
"--chapters",
self.CurrentName_Original + " Chapters.txt",
]
return
def AddVideo(self):
inputVideo = None
videofiles = [
"{} [{}p]_Demuxed.mp4",
"{} [{}p]_Demuxed.mp4",
"{} [{}p] [UHD]_Demuxed.mp4",
"{} [{}p] [UHD]_Demuxed.mp4",
"{} [{}p] [VP9]_Demuxed.mp4",
"{} [{}p] [HIGH]_Demuxed.mp4",
"{} [{}p] [VP9]_Demuxed.mp4",
"{} [{}p] [HEVC]_Demuxed.mp4",
"{} [{}p] [HDR]_Demuxed.mp4",
"{} [{}p] [HDR-DV]_Demuxed.mp4",
]
for videofile in videofiles:
filename = videofile.format(self.CurrentName_Original, self.CurrentHeigh)
if os.path.isfile(filename):
inputVideo = filename
break
if not inputVideo:
self.logger.info("cannot found video file.")
exit(-1)
if self.default_mux:
outputVideo = (
re.compile("|".join([".h264", ".h265", ".vp9", ".mp4"])).sub("", inputVideo)
+ ".mkv"
)
if self.SeasonFolder:
outputVideo = os.path.join(
os.path.abspath(self.SeasonFolder), outputVideo
)
outputVideo = outputVideo.replace("\\", "/")
else:
outputVideo = self.setName()
self.outputVideo = outputVideo
if self.fps24:
self.merge += [
self.mkvmerge,
"--output",
outputVideo,
"--default-duration",
"0:24000/1001p",
"--language",
"0:und",
"--default-track",
"0:yes",
"(",
inputVideo,
")",
]
else:
self.merge += [
self.mkvmerge,
"--output",
outputVideo,
"--title",
'RAB',
"(",
inputVideo,
")",
]
return
def AddAudio(self):
audiofiles = [
"{} {}.ac3",
"{} {} - Audio Description.ac3",
"{} {}.eac3",
"{} {} - Audio Description.eac3",
"{} {}.aac",
"{} {} - Audio Description.aac",
]
for (audio_language, subs_language, language_id, language_name,) in (
self.LanguageList() + self.ExtraLanguageList()
):
for audiofile in audiofiles:
filename = audiofile.format(self.CurrentName_Original, audio_language)
if os.path.isfile(filename):
self.merge += [
"--language",
f"0:{language_id}",
"--track-name",
"0:Audio Description" if 'Audio Description' in filename
else f"0:{language_name}",
"--default-track",
"0:yes"
if subs_language == self.muxer_settings["AUDIO"]
else "0:no",
"(",
filename,
")",
]
return
def AddSubtitles(self):
srts = [
"{} {}.srt",
]
forceds = [
"{} forced-{}.srt",
]
sdhs = [
"{} sdh-{}.srt",
]
for (
audio_language,
subs_language,
language_id,
language_name,
) in self.LanguageList():
for subtitle in srts:
filename = subtitle.format(self.CurrentName_Original, subs_language)
if os.path.isfile(filename):
self.merge += [
"--language",
f"0:{language_id}",
"--track-name",
f"0:{language_name}",
"--forced-track",
"0:no",
"--default-track",
"0:yes"
if subs_language == self.muxer_settings["SUB"]
else "0:no",
"--compression",
"0:none",
"(",
filename,
")",
]
for subtitle in forceds:
filename = subtitle.format(self.CurrentName_Original, subs_language)
if os.path.isfile(filename):
self.merge += [
"--language",
f"0:{language_id}",
"--track-name",
f"0:Forced",
"--forced-track",
"0:yes",
"--default-track",
"0:no",
"--compression",
"0:none",
"(",
filename,
")",
]
for subtitle in sdhs:
filename = subtitle.format(self.CurrentName_Original, subs_language)
if os.path.isfile(filename):
self.merge += [
"--language",
f"0:{language_id}",
"--track-name",
f"0:SDH",
"--forced-track",
"0:no",
"--default-track",
"0:no",
"--compression",
"0:none",
"(",
filename,
")",
]
return
def startMux(self):
self.AddVideo()
self.AddAudio()
self.AddSubtitles()
self.AddChapters()
if not os.path.isfile(self.outputVideo):
self.Run(self.merge + self.Extras)
return self.outputVideo

View File

@@ -0,0 +1,551 @@
import base64, binascii, json, os, re, random, requests, string, time, traceback, logging
from datetime import datetime
from Cryptodome.Cipher import AES, PKCS1_OAEP
from Cryptodome.Util import Padding
from Cryptodome.Hash import HMAC, SHA256
from Cryptodome.PublicKey import RSA
from pywidevine.cdm import cdm, deviceconfig
from configs.config import tool
class MSLClient:
def __init__(self, profiles=None, wv_keyexchange=True, proxies=None):
self.session = requests.session()
self.logger = logging.getLogger(__name__)
if proxies:
self.session.proxies.update(proxies)
self.nf_endpoints = {
"manifest": "https://www.netflix.com/nq/msl_v1/cadmium/pbo_manifests/^1.0.0/router",
"license": "https://www.netflix.com/nq/msl_v1/cadmium/pbo_licenses/^1.0.0/router",
}
######################################################################
self.config = tool().config("NETFLIX")
self.email = self.config["email"]
self.password = self.config["password"]
self.device = tool().devices()["NETFLIX-MANIFEST"]
self.save_rsa_location = self.config["token_file"]
self.languages = self.config["manifest_language"]
self.license_path = None
######################################################################
if os.path.isfile(self.save_rsa_location):
self.generatePrivateKey = RSA.importKey(
json.loads(open(self.save_rsa_location, "r").read())["RSA_KEY"]
)
else:
self.generatePrivateKey = RSA.generate(2048)
if wv_keyexchange:
self.wv_keyexchange = True
self.cdm = cdm.Cdm()
self.cdm_session = None
else:
self.wv_keyexchange = False
self.cdm = None
self.cdm_session = None
self.manifest_challenge = '' # set desired wv data to overide wvexchange data
self.profiles = profiles
self.logger.debug("Using profiles: {}".format(self.profiles))
esn = self.config["androidEsn"]
if esn is None:
self.logger.error(
'\nandroid esn not found, set esn with cdm systemID in config.py'
)
else:
self.esn = esn
self.logger.debug("Using esn: " + self.esn)
self.messageid = random.randint(0, 2 ** 52)
self.session_keys = {} #~
self.header = {
"sender": self.esn,
"handshake": True,
"nonreplayable": 2,
"capabilities": {"languages": [], "compressionalgos": []},
"recipient": "Netflix",
"renewable": True,
"messageid": self.messageid,
"timestamp": time.time(),
}
self.setRSA()
def get_header_extra(self):
if self.wv_keyexchange:
self.cdm_session = self.cdm.open_session(
None,
deviceconfig.DeviceConfig(self.device),
b"\x0A\x7A\x00\x6C\x38\x2B",
True,
)
wv_request = base64.b64encode(
self.cdm.get_license_request(self.cdm_session)
).decode("utf-8")
self.header["keyrequestdata"] = [
{
"scheme": "WIDEVINE",
"keydata": {
"keyrequest": wv_request
}
}
]
else:
self.header["keyrequestdata"] = [
{
"scheme": "ASYMMETRIC_WRAPPED",
"keydata": {
"publickey": base64.b64encode(
self.generatePrivateKey.publickey().exportKey("DER")
).decode("utf8"),
"mechanism": "JWK_RSA",
"keypairid": "rsaKeypairId",
},
}
]
return self.header
def setRSA(self):
if os.path.isfile(self.save_rsa_location):
master_token = self.load_tokens()
expires = master_token["expiration"]
valid_until = datetime.utcfromtimestamp(int(expires))
present_time = datetime.now()
difference = valid_until - present_time
difference = difference.total_seconds() / 60 / 60
if difference < 10:
self.logger.debug("rsa file found. expired soon")
self.session_keys["session_keys"] = self.generate_handshake()
else:
self.logger.debug("rsa file found")
self.session_keys["session_keys"] = {
"mastertoken": master_token["mastertoken"],
"sequence_number": master_token["sequence_number"],
"encryption_key": master_token["encryption_key"],
"sign_key": master_token["sign_key"],
}
else:
self.logger.debug("rsa file not found")
self.session_keys["session_keys"] = self.generate_handshake()
def load_playlist(self, viewable_id):
payload = {
"version": 2,
"url": "/manifest", #"/licensedManifest"
"id": int(time.time()),
"languages": self.languages,
"params": {
#"challenge": self.manifest_challenge,
"type": "standard",
"viewableId": viewable_id,
"profiles": self.profiles,
"flavor": "STANDARD", #'PRE_FETCH'
"drmType": "widevine",
"usePsshBox": True,
"useHttpsStreams": True,
"supportsPreReleasePin": True,
"supportsWatermark": True,
'supportsUnequalizedDownloadables': True,
'requestEligibleABTests': True,
"isBranching": False,
'isNonMember': False,
'isUIAutoPlay': False,
"imageSubtitleHeight": 1080,
"uiVersion": "shakti-v4bf615c3",
'uiPlatform': 'SHAKTI',
"clientVersion": "6.0026.291.011",
'desiredVmaf': 'plus_lts', # phone_plus_exp
"showAllSubDubTracks": True,
#"preferredTextLocale": "ar",
#"preferredAudioLocale": "ar",
#"maxSupportedLanguages": 2,
"preferAssistiveAudio": False,
"deviceSecurityLevel": "3000",
'licenseType': 'standard',
'titleSpecificData': {
str(viewable_id): {
'unletterboxed': True
}
},
"videoOutputInfo": [
{
"type": "DigitalVideoOutputDescriptor",
"outputType": "unknown",
"supportedHdcpVersions": ['2.2'],
"isHdcpEngaged": True,
}
],
},
}
request_data = self.msl_request(payload)
response = self.session.post(self.nf_endpoints["manifest"], data=request_data)
manifest = json.loads(json.dumps(self.decrypt_response(response.text)))
if manifest.get("result"):
#with open('videoTraks.json', 'w', encoding='utf-8') as d:
#["result"]["video_tracks"]
# d.write(json.dumps(manifest, indent=2))
self.license_path = manifest["result"]["links"]["license"]["href"]
return manifest
if manifest.get("errormsg"):
self.logger.info(manifest["errormsg"])
return None
else:
self.logger.info(manifest)
return None
def decrypt_response(self, payload):
errored = False
try:
p = json.loads(payload)
if p.get("errordata"):
return json.loads(base64.b64decode(p["errordata"]).decode())
except:
payloads = re.split(
r',"signature":"[0-9A-Za-z/+=]+"}', payload.split("}}")[1]
)
payloads = [x + "}" for x in payloads]
new_payload = payloads[:-1]
chunks = []
for chunk in new_payload:
try:
payloadchunk = json.loads(chunk)["payload"]
encryption_envelope = payloadchunk
cipher = AES.new(
self.session_keys["session_keys"]["encryption_key"],
AES.MODE_CBC,
base64.b64decode(
json.loads(
base64.b64decode(encryption_envelope).decode("utf8")
)["iv"]
),
)
plaintext = cipher.decrypt(
base64.b64decode(
json.loads(
base64.b64decode(encryption_envelope).decode("utf8")
)["ciphertext"]
)
)
plaintext = json.loads(Padding.unpad(plaintext, 16).decode("utf8"))
data = plaintext["data"]
data = base64.b64decode(data).decode("utf8")
chunks.append(data)
except:
continue
decrypted_payload = "".join(chunks)
try:
return json.loads(decrypted_payload)
except:
traceback.print_exc()
self.logger.info("Unable to decrypt payloads...exiting")
exit(-1)
def generate_handshake(self):
self.logger.debug("generate_handshake")
header = self.get_header_extra()
request = {
"entityauthdata": {
"scheme": "NONE",
"authdata": {"identity": self.esn,}
},
"signature": "",
"headerdata": base64.b64encode(json.dumps(header).encode("utf8")).decode("utf8"),
}
response = self.session.post(
url=self.nf_endpoints["manifest"],
json=request,
)
try:
if response.json().get("errordata"):
self.logger.info("ERROR")
self.logger.info(
base64.b64decode(response.json()["errordata"]).decode()
)
exit(-1)
handshake = self.parse_handshake(response=response.json())
return handshake
except:
traceback.print_exc()
self.logger.info(response.text)
exit(-1)
def load_tokens(self):
with open(self.save_rsa_location, "r", encoding='utf-8') as f:
tokens_data = json.loads(f.read())
data = {
"mastertoken": tokens_data["mastertoken"],
"sequence_number": tokens_data["sequence_number"],
"encryption_key": base64.standard_b64decode(tokens_data["encryption_key"]),
"sign_key": base64.standard_b64decode(tokens_data["sign_key"]),
"RSA_KEY": tokens_data["RSA_KEY"],
"expiration": tokens_data["expiration"],
}
return data
def save_tokens(self, tokens_data):
data = {
"mastertoken": tokens_data["mastertoken"],
"sequence_number": tokens_data["sequence_number"],
"encryption_key": base64.standard_b64encode(
tokens_data["encryption_key"]
).decode("utf-8"),
"sign_key": base64.standard_b64encode(tokens_data["sign_key"]).decode(
"utf-8"
),
"RSA_KEY": tokens_data["RSA_KEY"],
"expiration": tokens_data["expiration"],
}
with open(self.save_rsa_location, 'w', encoding='utf-8') as f:
f.write(json.dumps(data, indent=2))
def parse_handshake(self, response):
headerdata = json.loads(base64.b64decode(response["headerdata"]).decode("utf8"))
keyresponsedata = headerdata["keyresponsedata"]
mastertoken = headerdata["keyresponsedata"]["mastertoken"]
sequence_number = json.loads(
base64.b64decode(mastertoken["tokendata"]).decode("utf8")
)["sequencenumber"]
if self.wv_keyexchange:
expected_scheme = "WIDEVINE"
else:
expected_scheme = "ASYMMETRIC_WRAPPED"
scheme = keyresponsedata["scheme"]
if scheme != expected_scheme:
self.logger.info("Key Exchange failed:")
return False
keydata = keyresponsedata["keydata"]
if self.wv_keyexchange:
encryption_key, sign_key = self.__process_wv_keydata(keydata)
else:
encryption_key, sign_key = self.__parse_rsa_wrapped_crypto_keys(keydata)
tokens_data = {
"mastertoken": mastertoken,
"sequence_number": sequence_number,
"encryption_key": encryption_key,
"sign_key": sign_key,
}
tokens_data_save = tokens_data
tokens_data_save.update(
{"RSA_KEY": self.generatePrivateKey.exportKey().decode()}
)
tokens_data_save.update(
{
"expiration": json.loads(
base64.b64decode(
json.loads(base64.b64decode(response["headerdata"]))[
"keyresponsedata"
]["mastertoken"]["tokendata"]
)
)["expiration"]
}
)
self.save_tokens(tokens_data_save)
return tokens_data
def __process_wv_keydata(self, keydata):
wv_response_b64 = keydata["cdmkeyresponse"] # pass as b64
encryptionkeyid = base64.standard_b64decode(keydata["encryptionkeyid"])
hmackeyid = base64.standard_b64decode(keydata["hmackeyid"])
self.cdm.provide_license(self.cdm_session, wv_response_b64)
keys = self.cdm.get_keys(self.cdm_session)
self.logger.debug("wv key exchange: obtained wv key exchange keys %s" % keys)
return (
self.__find_wv_key(encryptionkeyid, keys, ["AllowEncrypt", "AllowDecrypt"]),
self.__find_wv_key(hmackeyid, keys, ["AllowSign", "AllowSignatureVerify"]),
)
def __find_wv_key(self, kid, keys, permissions):
for key in keys:
if key.kid != kid:
continue
if key.type != "OPERATOR_SESSION":
self.logger.debug(
"wv key exchange: Wrong key type (not operator session) key %s"
% key
)
continue
if not set(permissions) <= set(key.permissions):
self.logger.debug(
"wv key exchange: Incorrect permissions, key %s, needed perms %s"
% (key, permissions)
)
continue
return key.key
return None
def __parse_rsa_wrapped_crypto_keys(self, keydata):
# Init Decryption
encrypted_encryption_key = base64.b64decode(keydata["encryptionkey"])
encrypted_sign_key = base64.b64decode(keydata["hmackey"])
oaep_cipher = PKCS1_OAEP.new(self.generatePrivateKey)
encryption_key_data = json.loads(
oaep_cipher.decrypt(encrypted_encryption_key).decode("utf8")
)
encryption_key = self.base64_check(encryption_key_data["k"])
sign_key_data = json.loads(
oaep_cipher.decrypt(encrypted_sign_key).decode("utf8")
)
sign_key = self.base64_check(sign_key_data["k"])
return (encryption_key, sign_key)
def base64key_decode(self, payload):
l = len(payload) % 4
if l == 2:
payload += "=="
elif l == 3:
payload += "="
elif l != 0:
raise ValueError("Invalid base64 string")
return base64.urlsafe_b64decode(payload.encode("utf-8"))
def base64_check(self, string):
while len(string) % 4 != 0:
string = string + "="
return base64.urlsafe_b64decode(string.encode())
def msl_request(self, data, is_handshake=False):
header = self.header.copy()
header["handshake"] = is_handshake
header["userauthdata"] = {
"scheme": "EMAIL_PASSWORD",
"authdata": {"email": self.email, "password": self.password},
}
header_envelope = self.msl_encrypt(self.session_keys, json.dumps(header))
header_signature = HMAC.new(
self.session_keys["session_keys"]["sign_key"], header_envelope, SHA256
).digest()
encrypted_header = {
"headerdata": base64.b64encode(header_envelope).decode("utf8"),
"signature": base64.b64encode(header_signature).decode("utf8"),
"mastertoken": self.session_keys["session_keys"]["mastertoken"],
}
payload = {
"messageid": self.messageid,
"data": base64.b64encode(json.dumps(data).encode()).decode("utf8"),
"sequencenumber": 1,
"endofmsg": True,
}
payload_envelope = self.msl_encrypt(self.session_keys, json.dumps(payload))
payload_signature = HMAC.new(
self.session_keys["session_keys"]["sign_key"], payload_envelope, SHA256
).digest()
payload_chunk = {
"payload": base64.b64encode(payload_envelope).decode("utf8"),
"signature": base64.b64encode(payload_signature).decode("utf8"),
}
return json.dumps(encrypted_header) + json.dumps(payload_chunk)
def msl_encrypt(self, msl_session, plaintext):
cbc_iv = os.urandom(16)
encryption_envelope = {
"keyid": "%s_%s"
% (self.esn, msl_session["session_keys"]["sequence_number"]),
"sha256": "AA==",
"iv": base64.b64encode(cbc_iv).decode("utf8"),
}
plaintext = Padding.pad(plaintext.encode("utf8"), 16)
cipher = AES.new(
msl_session["session_keys"]["encryption_key"], AES.MODE_CBC, cbc_iv
)
ciphertext = cipher.encrypt(plaintext)
encryption_envelope["ciphertext"] = base64.b64encode(ciphertext).decode("utf8")
return json.dumps(encryption_envelope).encode("utf8")
def get_license(self, challenge, session_id):
if not isinstance(challenge, bytes):
raise TypeError("challenge must be of type bytes")
if not isinstance(session_id, str):
raise TypeError("session_id must be of type string")
timestamp = int(time.time() * 10000)
license_request_data = {
"version": 2,
"url": self.license_path,
"id": timestamp,
"languages": "en_US",
"echo": "drmsessionId",
"params": [
{
"drmSessionId": session_id,
"clientTime": int(timestamp / 10000),
"challengeBase64": base64.b64encode(challenge).decode("utf8"),
"xid": str(timestamp + 1610),
}
]
}
request_data = self.msl_request(license_request_data)
resp = self.session.post(url=self.nf_endpoints["license"],data=request_data)
try:
resp.json()
except ValueError:
msl_license_data = json.loads(json.dumps(self.decrypt_response(resp.text)))
if msl_license_data.get("result"):
return msl_license_data
if msl_license_data.get("errormsg"):
raise ValueError(msl_license_data["errormsg"])
raise ValueError(msl_license_data)

View File

@@ -0,0 +1,159 @@
import time, os, json, logging, base64
from helpers.Parsers.Netflix.MSLClient import MSLClient
from configs.config import tool
from pywidevine.decrypt.wvdecryptcustom import WvDecrypt
logger = logging.getLogger(__name__)
''' "av1-main-L20-dash-cbcs-prk",
"av1-main-L21-dash-cbcs-prk",
"av1-main-L30-dash-cbcs-prk",
"av1-main-L31-dash-cbcs-prk",
"av1-main-L40-dash-cbcs-prk",
"av1-main-L41-dash-cbcs-prk",
"av1-main-L50-dash-cbcs-prk",
"av1-main-L51-dash-cbcs-prk",'''
''' "vp9-profile0-L21-dash-cenc",
"vp9-profile0-L30-dash-cenc",
"vp9-profile0-L31-dash-cenc",
"vp9-profile0-L40-dash-cenc",
"vp9-profile2-L30-dash-cenc-prk",
"vp9-profile2-L31-dash-cenc-prk",
"vp9-profile2-L40-dash-cenc-prk",
"vp9-profile2-L50-dash-cenc-prk",
"vp9-profile2-L51-dash-cenc-prk"'''
def from_kid(kid):
array_of_bytes = bytearray(b"\x00\x00\x002pssh\x00\x00\x00\x00")
array_of_bytes.extend(bytes.fromhex("edef8ba979d64acea3c827dcd51d21ed"))
array_of_bytes.extend(b"\x00\x00\x00\x12\x12\x10")
array_of_bytes.extend(bytes.fromhex(kid.replace("-", "")))
pssh = base64.b64encode(bytes.fromhex(array_of_bytes.hex()))
return pssh.decode()
def __profiles(profile, addHEVCDO=False):
profiles = [
"heaac-2-dash",
"dfxp-ls-sdh",
"webvtt-lssdh-ios8",
"BIF240",
"BIF320",
]
if profile == "High KEYS":
profiles += [
"playready-h264hpl22-dash",
"playready-h264hpl30-dash",
"playready-h264hpl31-dash",
#'playready-h264hpl40-dash'
]
elif profile == "Main KEYS":
profiles += [
"playready-h264mpl30-dash",
]
elif profile == "HEVC KEYS":
profiles += [
"hevc-main-L30-dash-cenc",
"hevc-main10-L30-dash-cenc",
"hevc-main10-L30-dash-cenc-prk",
"hevc-main-L31-dash-cenc"
"hevc-main10-L31-dash-cenc",
"hevc-main10-L31-dash-cenc-prk",
"hevc-main-L40-dash-cenc",
"hevc-main10-L40-dash-cenc",
"hevc-main10-L40-dash-cenc-prk",
"hevc-main-L41-dash-cenc",
"hevc-main10-L41-dash-cenc",
"hevc-main10-L41-dash-cenc-prk"
]
if addHEVCDO:
profiles += [
"hevc-main10-L31-dash-cenc-prk-do",
"hevc-main10-L31-dash-cenc-prk-do",
"hevc-main10-L40-dash-cenc-prk-do",
"hevc-main10-L41-dash-cenc-prk-do",
]
elif profile == 'HDR-10 KEYS':
profiles += [
"hevc-hdr-main10-L30-dash-cenc",
"hevc-hdr-main10-L30-dash-cenc-prk",
"hevc-hdr-main10-L31-dash-cenc",
"hevc-hdr-main10-L31-dash-cenc-prk",
"hevc-hdr-main10-L40-dash-cenc",
"hevc-hdr-main10-L41-dash-cenc",
"hevc-hdr-main10-L40-dash-cenc-prk",
"hevc-hdr-main10-L41-dash-cenc-prk"
]
else:
profiles += [
"playready-h264mpl30-dash",
]
return profiles
def GettingKEYS_Netflixv2(nfID, profile): #
KEYS = []
available_profiles = [
"High KEYS",
"HEVC KEYS",
"HDR-10 KEYS",
"Main KEYS"
]
if not profile in available_profiles:
logger.info("Error: Unknown profile: {}".format(profile))
exit(1)
logger.info(f"\nGetting {profile}...")
profiles = __profiles(profile)
try:
client = MSLClient(profiles=profiles)
resp = client.load_playlist(int(nfID))
if resp is None:
if profile == 'HEVC KEYS':
profiles = __profiles(profile, addHEVCDO=True)
client = MSLClient(profiles=profiles)
resp = client.load_playlist(int(nfID))
except Exception as e:
logger.error("Manifest Error: {}".format(e))
return KEYS
try:
#init_data_b64 = from_kid('0000000005edabd50000000000000000')
init_data_b64 = resp["result"]["video_tracks"][0]["drmHeader"]["bytes"]
except KeyError:
logger.error("cannot get pssh, {}".format(resp))
return KEYS
cert_data_b64 = "CAUSwwUKvQIIAxIQ5US6QAvBDzfTtjb4tU/7QxiH8c+TBSKOAjCCAQoCggEBAObzvlu2hZRsapAPx4Aa4GUZj4/GjxgXUtBH4THSkM40x63wQeyVxlEEo1D/T1FkVM/S+tiKbJiIGaT0Yb5LTAHcJEhODB40TXlwPfcxBjJLfOkF3jP6wIlqbb6OPVkDi6KMTZ3EYL6BEFGfD1ag/LDsPxG6EZIn3k4S3ODcej6YSzG4TnGD0szj5m6uj/2azPZsWAlSNBRUejmP6Tiota7g5u6AWZz0MsgCiEvnxRHmTRee+LO6U4dswzF3Odr2XBPD/hIAtp0RX8JlcGazBS0GABMMo2qNfCiSiGdyl2xZJq4fq99LoVfCLNChkn1N2NIYLrStQHa35pgObvhwi7ECAwEAAToQdGVzdC5uZXRmbGl4LmNvbRKAA4TTLzJbDZaKfozb9vDv5qpW5A/DNL9gbnJJi/AIZB3QOW2veGmKT3xaKNQ4NSvo/EyfVlhc4ujd4QPrFgYztGLNrxeyRF0J8XzGOPsvv9Mc9uLHKfiZQuy21KZYWF7HNedJ4qpAe6gqZ6uq7Se7f2JbelzENX8rsTpppKvkgPRIKLspFwv0EJQLPWD1zjew2PjoGEwJYlKbSbHVcUNygplaGmPkUCBThDh7p/5Lx5ff2d/oPpIlFvhqntmfOfumt4i+ZL3fFaObvkjpQFVAajqmfipY0KAtiUYYJAJSbm2DnrqP7+DmO9hmRMm9uJkXC2MxbmeNtJHAHdbgKsqjLHDiqwk1JplFMoC9KNMp2pUNdX9TkcrtJoEDqIn3zX9p+itdt3a9mVFc7/ZL4xpraYdQvOwP5LmXj9galK3s+eQJ7bkX6cCi+2X+iBmCMx4R0XJ3/1gxiM5LiStibCnfInub1nNgJDojxFA3jH/IuUcblEf/5Y0s1SzokBnR8V0KbA=="
device = tool().devices()["NETFLIX-LICENSE"]
wvdecrypt = WvDecrypt(
init_data_b64=init_data_b64, cert_data_b64=cert_data_b64, device=device
)
challenge = wvdecrypt.get_challenge()
current_sessionId = str(time.time()).replace(".", "")[0:-2]
data = client.get_license(challenge, current_sessionId)
try:
license_b64 = data["result"][0]["licenseResponseBase64"]
except Exception:
logger.error("MSL LICENSE Error Message: {}".format(data))
return KEYS
wvdecrypt.update_license(license_b64)
Correct, keyswvdecrypt = wvdecrypt.start_process()
KEYS = keyswvdecrypt
return KEYS

View File

@@ -0,0 +1,736 @@
from helpers.ripprocess import ripprocess
from helpers.Parsers.Netflix.MSLClient import MSLClient
from configs.config import tool
import re, os, json, logging
def MSLprofiles():
PROFILES = {
"BASICS": ["BIF240", "BIF320", "webvtt-lssdh-ios8", "dfxp-ls-sdh"],
"MAIN": {
"SD": [
"playready-h264bpl30-dash",
"playready-h264mpl22-dash",
"playready-h264mpl30-dash",
],
"HD": [
"playready-h264bpl30-dash",
"playready-h264mpl22-dash",
"playready-h264mpl30-dash",
"playready-h264mpl31-dash",
],
"FHD": [
"playready-h264bpl30-dash",
"playready-h264mpl22-dash",
"playready-h264mpl30-dash",
"playready-h264mpl31-dash",
"playready-h264mpl40-dash",
],
"ALL": [
"playready-h264bpl30-dash",
"playready-h264mpl22-dash",
"playready-h264mpl30-dash",
"playready-h264mpl31-dash",
"playready-h264mpl40-dash",
],
},
"HIGH": {
"SD": [
"playready-h264hpl22-dash",
"playready-h264hpl30-dash",
],
"HD": [
"playready-h264hpl22-dash",
"playready-h264hpl30-dash",
"playready-h264hpl31-dash",
],
"FHD": [
"playready-h264hpl22-dash",
"playready-h264hpl30-dash",
"playready-h264hpl31-dash",
"playready-h264hpl40-dash",
],
"ALL": [
"playready-h264hpl22-dash",
"playready-h264hpl30-dash",
"playready-h264hpl31-dash",
"playready-h264hpl40-dash",
],
},
"HEVC": {
"SD": [
"hevc-main-L30-dash-cenc",
"hevc-main10-L30-dash-cenc",
"hevc-main10-L30-dash-cenc-prk",
],
"HD": [
"hevc-main-L30-dash-cenc",
"hevc-main10-L30-dash-cenc",
"hevc-main10-L30-dash-cenc-prk",
"hevc-main-L31-dash-cenc",
"hevc-main10-L31-dash-cenc",
"hevc-main10-L31-dash-cenc-prk",
],
"FHD": [
"hevc-main-L30-dash-cenc",
"hevc-main10-L30-dash-cenc",
"hevc-main10-L30-dash-cenc-prk",
"hevc-main-L31-dash-cenc"
"hevc-main10-L31-dash-cenc",
"hevc-main10-L31-dash-cenc-prk",
"hevc-main-L40-dash-cenc",
"hevc-main10-L40-dash-cenc",
"hevc-main10-L40-dash-cenc-prk",
"hevc-main-L41-dash-cenc",
"hevc-main10-L41-dash-cenc",
"hevc-main10-L41-dash-cenc-prk",
],
"ALL": [
"hevc-main-L30-dash-cenc",
"hevc-main10-L30-dash-cenc",
"hevc-main10-L30-dash-cenc-prk",
"hevc-main-L31-dash-cenc"
"hevc-main10-L31-dash-cenc",
"hevc-main10-L31-dash-cenc-prk",
"hevc-main-L40-dash-cenc",
"hevc-main10-L40-dash-cenc",
"hevc-main10-L40-dash-cenc-prk",
"hevc-main-L41-dash-cenc",
"hevc-main10-L41-dash-cenc",
"hevc-main10-L41-dash-cenc-prk",
],
},
"HEVCDO": {
"SD": [
"hevc-main10-L30-dash-cenc-prk-do",
],
"HD": [
"hevc-main10-L30-dash-cenc-prk-do",
"hevc-main10-L31-dash-cenc-prk-do"
],
"FHD": [
"hevc-main10-L31-dash-cenc-prk-do",
"hevc-main10-L31-dash-cenc-prk-do",
"hevc-main10-L40-dash-cenc-prk-do",
"hevc-main10-L41-dash-cenc-prk-do",
],
"ALL": [
"hevc-main10-L31-dash-cenc-prk-do",
"hevc-main10-L31-dash-cenc-prk-do",
"hevc-main10-L40-dash-cenc-prk-do",
"hevc-main10-L41-dash-cenc-prk-do",
],
},
"HDR": {
"SD": [
"hevc-hdr-main10-L30-dash-cenc",
"hevc-hdr-main10-L30-dash-cenc-prk",
],
"HD": [
"hevc-hdr-main10-L30-dash-cenc",
"hevc-hdr-main10-L30-dash-cenc-prk",
"hevc-hdr-main10-L31-dash-cenc",
"hevc-hdr-main10-L31-dash-cenc-prk",
],
"FHD": [
"hevc-hdr-main10-L30-dash-cenc",
"hevc-hdr-main10-L30-dash-cenc-prk",
"hevc-hdr-main10-L31-dash-cenc",
"hevc-hdr-main10-L31-dash-cenc-prk",
"hevc-hdr-main10-L40-dash-cenc",
"hevc-hdr-main10-L41-dash-cenc",
"hevc-hdr-main10-L40-dash-cenc-prk",
"hevc-hdr-main10-L41-dash-cenc-prk",
],
"ALL": [
"hevc-hdr-main10-L30-dash-cenc",
"hevc-hdr-main10-L30-dash-cenc-prk",
"hevc-hdr-main10-L31-dash-cenc",
"hevc-hdr-main10-L31-dash-cenc-prk",
"hevc-hdr-main10-L40-dash-cenc",
"hevc-hdr-main10-L41-dash-cenc",
"hevc-hdr-main10-L40-dash-cenc-prk",
"hevc-hdr-main10-L41-dash-cenc-prk",
],
},
}
return PROFILES
class get_manifest:
def __init__(self, args, nfid):
self.logger = logging.getLogger(__name__)
self.args = args
self.nfid = nfid
self.ripprocess = ripprocess()
self.profiles = MSLprofiles()
self.config = tool().config("NETFLIX")
def LoadProfies(self, addHEVCDO=False):
getHigh = False
profiles = self.profiles["BASICS"]
if self.args.video_main:
if self.args.customquality:
if int(self.args.customquality[0]) == 1080:
profiles += self.profiles["MAIN"]["FHD"]
elif (
int(self.args.customquality[0]) < 1080
and int(self.args.customquality[0]) >= 720
):
profiles += self.profiles["MAIN"]["HD"]
elif int(self.args.customquality[0]) < 720:
profiles += self.profiles["MAIN"]["SD"]
else:
profiles += self.profiles["MAIN"]["ALL"]
else:
if self.args.video_high:
if self.args.customquality:
if int(self.args.customquality[0]) == 1080:
profiles += self.profiles["HIGH"]["FHD"]
elif (
int(self.args.customquality[0]) < 1080
and int(self.args.customquality[0]) >= 720
):
profiles += self.profiles["HIGH"]["HD"]
elif int(self.args.customquality[0]) < 720:
profiles += self.profiles["HIGH"]["SD"]
else:
profiles += self.profiles["HIGH"]["ALL"]
else:
if self.args.hdr:
if self.args.customquality:
if int(self.args.customquality[0]) == 1080:
profiles += self.profiles["HDR"]["FHD"]
elif (
int(self.args.customquality[0]) < 1080
and int(self.args.customquality[0]) >= 720
):
profiles += self.profiles["HDR"]["HD"]
elif int(self.args.customquality[0]) < 720:
profiles += self.profiles["HDR"]["SD"]
else:
profiles += self.profiles["HDR"]["ALL"]
elif self.args.hevc:
if self.args.customquality:
if int(self.args.customquality[0]) == 1080:
profiles += self.profiles["HEVC"]["FHD"]
if addHEVCDO:
profiles += self.profiles['HEVCDO']['FHD']
elif (
int(self.args.customquality[0]) < 1080
and int(self.args.customquality[0]) >= 720
):
profiles += self.profiles["HEVC"]["HD"]
if addHEVCDO:
profiles += self.profiles['HEVCDO']['HD']
elif int(self.args.customquality[0]) < 720:
profiles += self.profiles["HEVC"]["SD"]
if addHEVCDO:
profiles += self.profiles['HEVCDO']['SD']
else:
profiles += self.profiles["HEVC"]["ALL"]
if addHEVCDO:
profiles += self.profiles['HEVCDO']['ALL']
else:
getHigh = True
if self.args.customquality:
if int(self.args.customquality[0]) == 1080:
profiles += self.profiles["MAIN"]["FHD"]
elif (
int(self.args.customquality[0]) < 1080
and int(self.args.customquality[0]) >= 720
):
profiles += self.profiles["MAIN"]["HD"]
elif int(self.args.customquality[0]) < 720:
profiles += self.profiles["MAIN"]["SD"]
else:
profiles += self.profiles["MAIN"]["ALL"]
if self.args.aformat_2ch:
if str(self.args.aformat_2ch[0]) == "aac":
profiles.append("heaac-2-dash")
profiles.append("heaac-2hq-dash")
elif str(self.args.aformat_2ch[0]) == "eac3":
profiles.append("ddplus-2.0-dash")
elif str(self.args.aformat_2ch[0]) == "ogg":
profiles.append("playready-oggvorbis-2-dash")
else:
if self.args.only_2ch_audio:
profiles.append("ddplus-2.0-dash")
else:
if self.args.aformat_51ch:
if str(self.args.aformat_51ch[0]) == "aac":
profiles.append("heaac-5.1-dash")
profiles.append("heaac-5.1hq-dash")
elif str(self.args.aformat_51ch[0]) == "eac3":
profiles.append("ddplus-5.1-dash")
profiles.append("ddplus-5.1hq-dash")
elif str(self.args.aformat_51ch[0]) == "ac3":
profiles.append("dd-5.1-dash")
elif str(self.args.aformat_51ch[0]) == "atmos":
profiles.append("dd-5.1-dash")
profiles.append("ddplus-atmos-dash")
else:
profiles.append("dd-5.1-dash")
profiles.append("ddplus-5.1-dash")
profiles.append("ddplus-5.1hq-dash")
else:
profiles.append("ddplus-2.0-dash")
profiles.append("dd-5.1-dash")
profiles.append("ddplus-5.1-dash")
profiles.append("ddplus-5.1hq-dash")
profiles.append("ddplus-atmos-dash")
return list(set(profiles)), getHigh
def PyMSL(self, profiles):
client = MSLClient(profiles=profiles)
try:
resp = client.load_playlist(int(self.nfid))
return resp
except Exception as e:
self.logger.error("Manifest Error: {}".format(e))
return None
def HighVideoMSL(self):
# for bitrate compare with main ~
self.logger.info("Getting High Profile Manifest...")
profiles = self.profiles["BASICS"]
if self.args.customquality:
if int(self.args.customquality[0]) == 1080:
profiles += self.profiles["HIGH"]["FHD"]
elif (
int(self.args.customquality[0]) < 1080
and int(self.args.customquality[0]) >= 720
):
profiles += self.profiles["HIGH"]["HD"]
elif int(self.args.customquality[0]) < 720:
profiles += self.profiles["HIGH"]["SD"]
else:
profiles += self.profiles["HIGH"]["ALL"]
resp = self.PyMSL(profiles=profiles)
VideoList = list()
manifest = resp["result"]
for video_track in manifest["video_tracks"]:
for downloadable in video_track["streams"]:
size_in_bytes = int(float(downloadable["size"]))
vid_size = (
f"{size_in_bytes/1048576:0.2f} MiB"
if size_in_bytes < 1073741824
else f"{size_in_bytes/1073741824:0.2f} GiB"
)
vid_url = downloadable["urls"][0]["url"]
L3 = 'L3' if 'SEGMENT_MAP_2KEY' in str(downloadable['tags']) else '' #
VideoList.append(
{
"Type": "video",
"Drm": downloadable["isDrm"],
"vmaf": downloadable["vmaf"],
"FrameRate": downloadable["framerate_value"],
"Height": downloadable["res_h"],
"Width": downloadable["res_w"],
"Size": vid_size,
"Url": vid_url,
"Bitrate": str(downloadable["bitrate"]),
"Profile": downloadable["content_profile"],
"L3": L3 #
}
)
VideoList = sorted(VideoList, key=lambda k: int(k["Bitrate"]))
if self.args.customquality:
inp_height = int(self.args.customquality[0])
top_height = sorted(VideoList, key=lambda k: int(k["Height"]))[-1]["Height"]
if top_height >= inp_height:
height = [x for x in VideoList if int(x["Height"]) >= inp_height]
if not height == []:
VideoList = height
return VideoList
def ParseVideo(self, resp, getHigh):
manifest = resp["result"]
VideoList = []
checkerinfo = ""
for video_track in manifest["video_tracks"]:
for downloadable in video_track["streams"]:
size_in_bytes = int(float(downloadable["size"]))
vid_size = (
f"{size_in_bytes/1048576:0.2f} MiB"
if size_in_bytes < 1073741824
else f"{size_in_bytes/1073741824:0.2f} GiB"
)
vid_url = downloadable["urls"][0]["url"]
VideoList.append(
{
"Type": "video",
"Drm": downloadable["isDrm"],
"vmaf": downloadable["vmaf"],
"FrameRate": downloadable["framerate_value"],
"Height": downloadable["res_h"],
"Width": downloadable["res_w"],
"Size": vid_size,
"Url": vid_url,
"Bitrate": str(downloadable["bitrate"]),
"Profile": downloadable["content_profile"],
}
)
VideoList = sorted(VideoList, key=lambda k: int(k["Bitrate"]))
self.logger.debug("VideoList: {}".format(VideoList))
if self.args.customquality:
inp_height = int(self.args.customquality[0])
top_height = sorted(VideoList, key=lambda k: int(k["Height"]))[-1]["Height"]
if top_height >= inp_height:
height = [x for x in VideoList if int(x["Height"]) >= inp_height]
if not height == []:
VideoList = height
if getHigh:
HighVideoList = self.HighVideoMSL()
if not HighVideoList == []:
checkerinfo = "\nNetflix Profile Checker v1.0\nMAIN: {}kbps | {}\nHIGH: {}kbps | {}\n\n{}\n"
checkerinfo = checkerinfo.format(
str(dict(VideoList[-1])["Bitrate"]),
str(dict(VideoList[-1])["Profile"]),
str(dict(HighVideoList[-1])["Bitrate"]),
str(dict(HighVideoList[-1])["Profile"]),
"result: MAIN is Better"
if int(dict(VideoList[-1])["Bitrate"])
>= int(dict(HighVideoList[-1])["Bitrate"])
else "result: HIGH is Better",
)
VideoList += HighVideoList
self.logger.debug("HighVideoList: {}".format(HighVideoList))
VideoList = sorted(VideoList, key=lambda k: int(k["Bitrate"]))
return VideoList, checkerinfo
def ParseAudioSubs(self, resp):
def remove_dups(List, keyword=""):
# function to remove all dups based on list items ~
Added_ = set()
Proper_ = []
for L in List:
if L[keyword] not in Added_:
Proper_.append(L)
Added_.add(L[keyword])
return Proper_
def isOriginal(language_text):
# function to detect the original audio ~
if "Original" in language_text:
return True
brackets = re.search(r"\[(.*)\]", language_text)
if brackets:
return True
return False
def noOriginal(language_text):
# function to remove (original) from audio language to be detected in --alang ~
brackets = re.search(r"\[(.*)\]", language_text)
if brackets:
return language_text.replace(brackets[0], "").strip()
return language_text
# start audio, subs parsing ~
manifest = resp["result"]
AudioList, SubtitleList, ForcedList = list(), list(), list()
# parse audios and return all (AD, non AD) as a list
for audio_track in manifest["audio_tracks"]:
AudioDescription = 'Audio Description' if "audio description" in \
audio_track["languageDescription"].lower() else 'Audio'
Original = isOriginal(audio_track["languageDescription"])
LanguageName, LanguageCode = self.ripprocess.countrycode(
audio_track["language"]
)
LanguageName = noOriginal(audio_track["languageDescription"])
for downloadable in audio_track["streams"]:
aud_url = downloadable["urls"][0]["url"]
size = (
str(format(float(int(downloadable["size"])) / 1058816, ".2f"))
+ " MiB"
)
audioDict = {
"Type": AudioDescription,
"Drm": downloadable["isDrm"],
"Original": Original,
"Language": LanguageName,
"langAbbrev": LanguageCode,
"Size": size,
"Url": aud_url,
"channels": str(downloadable["channels"]),
"Bitrate": str(downloadable["bitrate"]),
"Profile": downloadable["content_profile"],
}
if self.args.custom_audio_bitrate:
# only append the audio langs with the given bitrate
if int(downloadable["bitrate"]) <= \
int(self.args.custom_audio_bitrate[0]):
AudioList.append(audioDict)
else:
AudioList.append(audioDict)
AudioList = sorted(AudioList, key=lambda k: int(k["Bitrate"]), reverse=True)
self.logger.debug("AudioList: {}".format(AudioList))
#################################################################################
AudioList = sorted( # keep only highest bitrate for every language
remove_dups(AudioList, keyword="Language"),
key=lambda k: int(k["Bitrate"]),
reverse=True,
)
OriginalAudioList = ( # for detect automatically forced subs ~
AudioList
if len(AudioList) == 1
else [x for x in AudioList if x["Original"]]
)
#################################################################################
# now parser AudioList based on user input to
# --alang X X --AD X X or original if none
if self.args.AD:
ADlist = list()
UserLanguagesLower = list(map(lambda x: x.lower(), self.args.AD))
for aud in AudioList:
if aud['Type'] == 'Audio':
if self.args.allaudios:
ADlist.append(aud)
else:
if aud["Original"]:
ADlist.append(aud)
if aud['Type'] == 'Audio Description':
if (
aud["Language"].lower() in UserLanguagesLower
or aud["langAbbrev"].lower() in UserLanguagesLower
):
ADlist.append(aud)
AudioList = ADlist
if self.args.audiolang:
NewAudioList = list()
UserLanguagesLower = list(map(lambda x: x.lower(), self.args.audiolang))
for aud in AudioList:
if self.args.AD:
# I already have AD langs parsed
if aud['Type'] == 'Audio Description':
NewAudioList.append(aud)
if aud['Type'] == 'Audio':
if (
aud["Language"].lower() in UserLanguagesLower
or aud["langAbbrev"].lower() in UserLanguagesLower
):
NewAudioList.append(aud)
AudioList = NewAudioList
else:
# so I know have the complete Audiolist
if self.args.allaudios: # remove AD tracks if not --AD X X
AllaudiosList = list()
if self.args.AD:
for aud in AudioList:
AllaudiosList.append(aud)
AudioList = AllaudiosList
else:
for aud in AudioList:
if aud['Type'] == 'Audio':
AllaudiosList.append(aud)
AudioList.clear()
AudioList = AllaudiosList
else:
if self.args.AD:
AudioList = AudioList # I mean the ADlist
else:
# I mean no audio options are given, so we go with the original
AudioList = [x for x in AudioList if x["Original"] or len(AudioList) == 1]
#####################################(Subtitles)#####################################
for text_track in manifest["timedtexttracks"]:
if (
not text_track["languageDescription"] == "Off"
and text_track["language"] is not None
):
Language, langAbbrev = self.ripprocess.countrycode(
text_track["language"]
)
Language = text_track["languageDescription"]
Type = text_track["trackType"]
rawTrackType = (
text_track["rawTrackType"]
.replace("closedcaptions", "CC")
.replace("subtitles", "SUB")
)
isForced = "NO"
if (
"CC" in rawTrackType
and langAbbrev != "ara"
and "dfxp-ls-sdh" in str(text_track["ttDownloadables"])
):
Profile = "dfxp-ls-sdh"
Url = next(
iter(
text_track["ttDownloadables"]["dfxp-ls-sdh"][
"downloadUrls"
].values()
)
)
else:
Profile = "webvtt-lssdh-ios8"
Url = next(
iter(
text_track["ttDownloadables"]["webvtt-lssdh-ios8"][
"downloadUrls"
].values()
)
)
SubtitleList.append(
{
"Type": Type,
"rawTrackType": rawTrackType,
"Language": Language,
"isForced": isForced,
"langAbbrev": langAbbrev,
"Url": Url,
"Profile": Profile,
}
)
self.logger.debug("SubtitleList: {}".format(SubtitleList))
SubtitleList = remove_dups(SubtitleList, keyword="Language")
if self.args.sublang:
NewSubtitleList = list()
UserLanguagesLower = list(map(lambda x: x.lower(), self.args.sublang))
for sub in SubtitleList:
if (
sub["Language"].lower() in UserLanguagesLower
or sub["langAbbrev"].lower() in UserLanguagesLower
):
NewSubtitleList.append(sub)
SubtitleList = remove_dups(NewSubtitleList, keyword="Language")
#####################################(Forced Subtitles)###############################
for text_track in manifest["timedtexttracks"]:
if text_track["isForcedNarrative"] and text_track["language"] is not None:
LanguageName, LanguageCode = self.ripprocess.countrycode(
text_track["language"]
)
# LanguageName = text_track["languageDescription"] # no i will use pycountry instead bcs it's off dude.
ForcedList.append(
{
"Type": text_track["trackType"],
"rawTrackType": text_track["rawTrackType"]
.replace("closedcaptions", "CC ")
.replace("subtitles", "SUB"),
"Language": LanguageName,
"isForced": "YES",
"langAbbrev": LanguageCode,
"Url": next(
iter(
text_track["ttDownloadables"]["webvtt-lssdh-ios8"][
"downloadUrls"
].values()
)
),
"Profile": "webvtt-lssdh-ios8",
}
)
ForcedList = remove_dups(ForcedList, keyword="Language")
if self.args.forcedlang:
NewForcedList = []
UserLanguagesLower = list(map(lambda x: x.lower(), self.args.forcedlang))
for sub in ForcedList:
if (
sub["Language"].lower() in UserLanguagesLower
or sub["langAbbrev"].lower() in UserLanguagesLower
):
NewForcedList.append(sub)
ForcedList = remove_dups(NewForcedList, keyword="Language")
else:
if not self.args.allforcedlang:
if len(OriginalAudioList) != 0:
OriginalLanguage = OriginalAudioList[0]["langAbbrev"]
ForcedList = [
x for x in ForcedList if x["langAbbrev"] == OriginalLanguage
]
return AudioList, SubtitleList, ForcedList
def LoadManifest(self):
profiles, getHigh = self.LoadProfies()
if self.args.hevc:
self.logger.info("Getting HEVC Manifest...")
elif self.args.hdr:
self.logger.info("Getting HDR-10 Manifest...")
elif self.args.video_high:
self.logger.info("Getting High Profile Manifest...")
else:
self.logger.info("Getting Main Profile Manifest...")
resp = self.PyMSL(profiles=profiles)
if not resp:
if self.args.hevc:
profiles, getHigh = self.LoadProfies(addHEVCDO=True)
self.logger.info('\nGetting HEVC DO Manifest...')
resp = self.PyMSL(profiles=profiles)
if not resp:
self.logger.info("Failed getting Manifest")
exit(-1)
VideoList, checkerinfo = self.ParseVideo(resp, getHigh)
AudioList, SubtitleList, ForcedList = self.ParseAudioSubs(resp)
return VideoList, AudioList, SubtitleList, ForcedList, checkerinfo

0
helpers/__init__.py Normal file
View File

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

369
helpers/aria2.py Normal file
View File

@@ -0,0 +1,369 @@
import os
import shutil
import subprocess
import sys
import re
import logging
from configs.config import tool
from helpers.ripprocess import ripprocess
class aria2Error(Exception):
pass
class aria2_moded:
def __init__(self, aria2_download_command):
self.logger = logging.getLogger(__name__)
self.aria2_download_command = aria2_download_command
self.env = self.aria2DisableProxies()
self.ripprocess = ripprocess()
self.tool = tool()
self.LOGA_PATH = self.tool.paths()["LOGA_PATH"]
self.bin = self.tool.bin()
self.aria2c_exe = self.bin["aria2c"]
self.last_message_printed = 0
self.speed_radar = "0kbps"
def aria2DisableProxies(self):
env = os.environ.copy()
if env.get("http_proxy"):
del env["http_proxy"]
if env.get("HTTP_PROXY"):
del env["HTTP_PROXY"]
if env.get("https_proxy"):
del env["https_proxy"]
if env.get("HTTPS_PROXY"):
del env["HTTPS_PROXY"]
return env
def read_stdout(self, line):
speed = re.search(r"DL:(.+?)ETA", line)
eta = re.search(r"ETA:(.+?)]", line)
connection = re.search(r"CN:(.+?)DL", line)
percent = re.search(r"\((.*?)\)", line)
size = re.search(r" (.*?)/(.*?)\(", line)
if speed and eta and connection and percent and size:
percent = percent.group().strip().replace(")", "").replace("(", "")
size = size.group().strip().replace(")", "").replace("(", "")
complete, total = size.split("/")
connection = connection.group(1).strip()
eta = eta.group(1).strip()
speed = speed.group(1).strip()
self.speed_radar = speed
stdout_data = {
"percent": str(percent),
"size": str(total),
"complete": str(complete),
"total": str(total),
"connection": str(connection),
"eta": str(eta),
"speed": str(speed),
}
return stdout_data
return None
def if_errors(self, line):
if "exception" in str(line).lower() or "errorcode" in str(line).lower():
return line
return None
def delete_last_message_printed(self):
print(" " * len(str(self.last_message_printed)), end="\r")
def get_status(self, stdout_data: dict):
return "Aria2c_Status; Size: {Size} | Speed: {Speed} | ETA: {ETA} | Progress: {Complete} -> {Total} ({Percent})".format(
Size=stdout_data.get("size"),
Speed=stdout_data.get("speed"),
ETA=stdout_data.get("eta"),
Complete=stdout_data.get("complete"),
Total=stdout_data.get("total"),
Percent=stdout_data.get("percent"),
)
def is_download_completed(self, line):
if "(ok):download completed." in str(line).lower():
return "Download completed: (OK) ({}\\s)".format(self.speed_radar)
return None
def start_download(self):
proc = subprocess.Popen(
self.aria2_download_command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
bufsize=1,
universal_newlines=True,
env=self.env,
)
check_errors = True
for line in getattr(proc, "stdout"):
if check_errors:
if self.if_errors(line):
raise aria2Error("Aria2c Error {}".format(self.if_errors(line)))
check_errors = False
stdout_data = self.read_stdout(line)
if stdout_data:
status_text = self.get_status(stdout_data)
self.delete_last_message_printed()
print(status_text, end="\r", flush=True)
self.last_message_printed = status_text
else:
download_finished = self.is_download_completed(line)
if download_finished:
self.delete_last_message_printed()
print(download_finished, end="\r", flush=True)
self.last_message_printed = download_finished
self.logger.info("")
return
class aria2:
def __init__(self,):
self.env = self.aria2DisableProxies()
self.ripprocess = ripprocess()
self.tool = tool()
self.bin = self.tool.bin()
self.LOGA_PATH = self.tool.paths()["LOGA_PATH"]
self.config = self.tool.aria2c()
self.aria2c_exe = self.bin["aria2c"]
self.logger = logging.getLogger(__name__)
def convert_args(self, arg):
if arg is True:
return "true"
elif arg is False:
return "false"
elif arg is None:
return "none"
else:
return str(arg)
def append_commands(self, command, option_define, option):
if option == "skip":
return []
return ["{}{}".format(option_define, option)]
def append_two_commands(self, command, cmd1, cmd2):
if cmd2 == "skip":
return []
return [cmd1] + [cmd2]
def aria2Options(
self,
allow_overwrite=True,
file_allocation=None,
auto_file_renaming=False,
async_dns=False,
retry_wait=5,
summary_interval=0,
enable_color=False,
connection=16,
concurrent_downloads=16,
split=16,
header="skip",
user_agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36",
uri_selector="inorder",
console_log_level="skip",
download_result="hide",
quiet="false",
extra_commands=[],
):
options = [] + extra_commands
allow_overwrite = self.convert_args(allow_overwrite)
quiet = self.convert_args(quiet)
file_allocation = self.convert_args(file_allocation)
auto_file_renaming = self.convert_args(auto_file_renaming)
async_dns = self.convert_args(async_dns)
retry_wait = self.convert_args(retry_wait)
enable_color = self.convert_args(enable_color)
connection = self.convert_args(connection)
concurrent_downloads = self.convert_args(concurrent_downloads)
split = self.convert_args(split)
header = self.convert_args(header)
uri_selector = self.convert_args(uri_selector)
console_log_level = self.convert_args(console_log_level)
download_result = self.convert_args(download_result)
##############################################################################
options += self.append_commands(options, "--allow-overwrite=", allow_overwrite)
options += self.append_commands(options, "--quiet=", quiet)
options += self.append_commands(options, "--file-allocation=", file_allocation)
options += self.append_commands(
options, "--auto-file-renaming=", auto_file_renaming
)
options += self.append_commands(options, "--async-dns=", async_dns)
options += self.append_commands(options, "--retry-wait=", retry_wait)
options += self.append_commands(options, "--enable-color=", enable_color)
options += self.append_commands(
options, "--max-connection-per-server=", connection
)
options += self.append_commands(
options, "--max-concurrent-downloads=", concurrent_downloads
)
options += self.append_commands(options, "--split=", split)
options += self.append_commands(options, "--header=", header)
options += self.append_commands(options, "--uri-selector=", uri_selector)
options += self.append_commands(
options, "--console-log-level=", console_log_level
)
options += self.append_commands(options, "--download-result=", download_result)
return options
def aria2DisableProxies(self):
env = os.environ.copy()
if env.get("http_proxy"):
del env["http_proxy"]
if env.get("HTTP_PROXY"):
del env["HTTP_PROXY"]
if env.get("https_proxy"):
del env["https_proxy"]
if env.get("HTTPS_PROXY"):
del env["HTTPS_PROXY"]
return env
def aria2DownloadUrl(self, url, output, options, debug=False, moded=False):
self.debug = debug
aria2_download_command = [self.aria2c_exe] + options
if self.config["enable_logging"]:
LogFile = os.path.join(self.LOGA_PATH, output.replace(".mp4", ".log"))
if os.path.isfile(LogFile):
os.remove(LogFile)
aria2_download_command.append("--log={}".format(LogFile))
if not url.startswith("http"):
raise aria2Error("Url does not start with http/https: {}".format(url))
aria2_download_command.append(url)
aria2_download_command += self.append_two_commands(
aria2_download_command, "-o", output
)
self.aria2Debug("Sending Commands to aria2c...")
self.aria2Debug(aria2_download_command)
self.logger.debug("aria2_download_command: {}".format(aria2_download_command))
if moded:
aria2_moded_download = aria2_moded(aria2_download_command)
aria2_moded_download.start_download()
else:
try:
aria = subprocess.call(aria2_download_command, env=self.env)
except FileNotFoundError:
self.logger.info("UNABLE TO FIND {}".format(self.aria2c_exe))
exit(-1)
if aria != 0:
raise aria2Error("Aria2c exited with code {}".format(aria))
return
def aria2DownloadDash(
self, segments, output, options, debug=False, moded=False, fixbytes=False
):
self.debug = debug
aria2_download_command = [self.aria2c_exe] + options
if self.config["enable_logging"]:
LogFile = os.path.join(self.LOGA_PATH, output.replace(".mp4", ".log"))
if os.path.isfile(LogFile):
os.remove(LogFile)
aria2_download_command.append("--log={}".format(LogFile))
if not isinstance(segments, list) or segments == []:
raise aria2Error("invalid list of urls: {}".format(segments))
if moded:
raise aria2Error("moded version not supported for dash downloads atm...")
txt = output.replace(".mp4", ".txt")
folder = output.replace(".mp4", "")
segments = list(dict.fromkeys(segments))
if os.path.exists(folder):
shutil.rmtree(folder)
if not os.path.exists(folder):
os.makedirs(folder)
segments_location = []
opened_txt = open(txt, "w+")
for num, url in enumerate(segments, start=1):
segment_name = str(num).zfill(5) + ".mp4"
segments_location.append(os.path.join(*[os.getcwd(), folder, segment_name]))
opened_txt.write(url + f"\n out={segment_name}" + f"\n dir={folder}" + "\n")
opened_txt.close()
aria2_download_command += self.append_commands(
aria2_download_command, "--input-file=", txt
)
try:
aria = subprocess.call(aria2_download_command, env=self.env)
except FileNotFoundError:
self.logger.info("UNABLE TO FIND {}".format(self.aria2c_exe))
exit(-1)
if aria != 0:
raise aria2Error("Aria2c exited with code {}".format(aria))
self.logger.info("\nJoining files...")
openfile = open(output, "wb")
total = int(len(segments_location))
for current, fragment in enumerate(segments_location):
if os.path.isfile(fragment):
if fixbytes:
with open(fragment, "rb") as f:
wvdll = f.read()
if (
re.search(
b"tfhd\x00\x02\x00\x1a\x00\x00\x00\x01\x00\x00\x00\x02",
wvdll,
re.MULTILINE | re.DOTALL,
)
is not None
):
fw = open(fragment, "wb")
m = re.search(
b"tfhd\x00\x02\x00\x1a\x00\x00\x00\x01\x00\x00\x00",
wvdll,
re.MULTILINE | re.DOTALL,
)
segment_fixed = (
wvdll[: m.end()] + b"\x01" + wvdll[m.end() + 1 :]
)
fw.write(segment_fixed)
fw.close()
shutil.copyfileobj(open(fragment, "rb"), openfile)
os.remove(fragment)
self.ripprocess.updt(total, current + 1)
openfile.close()
if os.path.isfile(txt):
os.remove(txt)
if os.path.exists(folder):
shutil.rmtree(folder)
def aria2Debug(self, txt):
if self.debug:
self.logger.info(txt)

116
helpers/dfxp_to_srt.py Normal file
View File

@@ -0,0 +1,116 @@
import codecs
import math
import os
import re
class dfxp_to_srt:
def __init__(self):
self.__replace__ = "empty_line"
def leading_zeros(self, value, digits=2):
value = "000000" + str(value)
return value[-digits:]
def convert_time(self, raw_time):
if int(raw_time) == 0:
return "{}:{}:{},{}".format(0, 0, 0, 0)
ms = "000"
if len(raw_time) > 4:
ms = self.leading_zeros(int(raw_time[:-4]) % 1000, 3)
time_in_seconds = int(raw_time[:-7]) if len(raw_time) > 7 else 0
second = self.leading_zeros(time_in_seconds % 60)
minute = self.leading_zeros(int(math.floor(time_in_seconds / 60)) % 60)
hour = self.leading_zeros(int(math.floor(time_in_seconds / 3600)))
return "{}:{}:{},{}".format(hour, minute, second, ms)
def xml_id_display_align_before(self, text):
align_before_re = re.compile(
u'<region.*tts:displayAlign="before".*xml:id="(.*)"/>'
)
has_align_before = re.search(align_before_re, text)
if has_align_before:
return has_align_before.group(1)
return u""
def xml_to_srt(self, text):
def append_subs(start, end, prev_content, format_time):
subs.append(
{
"start_time": self.convert_time(start) if format_time else start,
"end_time": self.convert_time(end) if format_time else end,
"content": u"\n".join(prev_content),
}
)
display_align_before = self.xml_id_display_align_before(text)
begin_re = re.compile(u"\s*<p begin=")
sub_lines = (l for l in text.split("\n") if re.search(begin_re, l))
subs = []
prev_time = {"start": 0, "end": 0}
prev_content = []
start = end = ""
start_re = re.compile(u'begin\="([0-9:\.]*)')
end_re = re.compile(u'end\="([0-9:\.]*)')
content_re = re.compile(u'">(.*)</p>')
# span tags are only used for italics, so we'll get rid of them
# and replace them by <i> and </i>, which is the standard for .srt files
span_start_re = re.compile(u'(<span style="[a-zA-Z0-9_.]+">)+')
span_end_re = re.compile(u"(</span>)+")
br_re = re.compile(u"(<br\s*\/?>)+")
fmt_t = True
for s in sub_lines:
span_start_tags = re.search(span_start_re, s)
if span_start_tags:
s = u"<i>".join(s.split(span_start_tags.group()))
string_region_re = (
r'<p(.*region="' + display_align_before + r'".*")>(.*)</p>'
)
s = re.sub(string_region_re, r"<p\1>{\\an8}\2</p>", s)
content = re.search(content_re, s).group(1)
br_tags = re.search(br_re, content)
if br_tags:
content = u"\n".join(content.split(br_tags.group()))
span_end_tags = re.search(span_end_re, content)
if span_end_tags:
content = u"</i>".join(content.split(span_end_tags.group()))
prev_start = prev_time["start"]
start = re.search(start_re, s).group(1)
end = re.search(end_re, s).group(1)
if len(start.split(":")) > 1:
fmt_t = False
start = start.replace(".", ",")
end = end.replace(".", ",")
if (prev_start == start and prev_time["end"] == end) or not prev_start:
# Fix for multiple lines starting at the same time
prev_time = {"start": start, "end": end}
prev_content.append(content)
continue
append_subs(prev_time["start"], prev_time["end"], prev_content, fmt_t)
prev_time = {"start": start, "end": end}
prev_content = [content]
append_subs(start, end, prev_content, fmt_t)
lines = (
u"{}\n{} --> {}\n{}\n".format(
s + 1, subs[s]["start_time"], subs[s]["end_time"], subs[s]["content"]
)
for s in range(len(subs))
)
return u"\n".join(lines)
def convert(self, Input, Output):
with codecs.open(Input, "rb", "utf-8") as f:
text = f.read()
with codecs.open(Output, "wb", "utf-8") as f:
f.write(self.xml_to_srt(text))
return

76
helpers/keyloader.py Normal file
View File

@@ -0,0 +1,76 @@
import os, json, sys
from helpers.ripprocess import ripprocess
class keysaver:
def __init__(self, **kwargs):
self.keys_file = kwargs.get("keys_file", None)
self.stored = self.get_stored()
def read_(self):
with open(self.keys_file, "r") as fr:
return json.load(fr)
def write_(self, data):
with open(self.keys_file, "w") as fr:
fr.write(json.dumps(data, indent=4))
def get_stored(self):
stored = []
if os.path.isfile(self.keys_file):
return self.read_()
return stored
def formatting(self, keys_list, pssh, name):
return [
{
"NAME": name,
"PSSH": pssh,
"ID": idx,
"KID": key.split(":")[0],
"KEY": key.split(":")[1],
}
for idx, key in enumerate(keys_list, start=1)
]
def dump_keys(self, keys, pssh=None, name=None):
old_keys = list(self.stored)
new_keys = list(self.formatting(keys, pssh, name))
self.write_(old_keys + new_keys)
self.stored = self.get_stored() # to update stored keys
return new_keys
def get_key_by_pssh(self, pssh):
keys = []
added = set()
for key in self.get_stored(): # read file again...
if key["PSSH"]:
if not key["KEY"] in added and pssh in key["PSSH"]:
keys.append(key)
added.add(key["KEY"])
return keys
def get_key_by_kid(self, kid):
keys = []
added = set()
for key in self.get_stored(): # read file again...
if not key["KEY"] in added and key["KID"] == kid:
keys.append(key)
added.add(key["KEY"])
return keys
def generate_kid(self, encrypted_file):
return ripprocess().getKeyId(encrypted_file)
def set_keys(self, keys, no_kid=False):
command_keys = []
for key in keys:
command_keys.append("--key")
command_keys.append(
"{}:{}".format(key["ID"] if no_kid else key["KID"], key["KEY"])
)
return command_keys

112
helpers/proxy_environ.py Normal file
View File

@@ -0,0 +1,112 @@
import os
import requests
import sys, json
import random
from configs.config import tool
from helpers.vpn import connect
import logging
class hold_proxy(object):
def __init__(self):
self.proxy = os.environ.get("http_proxy")
self.logger = logging.getLogger(__name__)
def disable(self):
os.environ["http_proxy"] = ""
os.environ["HTTP_PROXY"] = ""
os.environ["https_proxy"] = ""
os.environ["HTTPS_PROXY"] = ""
def enable(self):
if self.proxy:
os.environ["http_proxy"] = self.proxy
os.environ["HTTP_PROXY"] = self.proxy
os.environ["https_proxy"] = self.proxy
os.environ["HTTPS_PROXY"] = self.proxy
class proxy_env(object):
def __init__(self, args):
self.logger = logging.getLogger(__name__)
self.args = args
self.vpn = tool().vpn()
def Load(self):
proxies = None
proxy = {}
aria2c_proxy = []
if self.vpn["proxies"]:
proxies = self.vpn["proxies"]
if not self.vpn["proxies"]:
if self.args.privtvpn:
self.logger.info("Proxy Status: Activated-PrivateVpn")
proxy.update({"port": self.vpn["private"]["port"]})
proxy.update({"user": self.vpn["private"]["email"]})
proxy.update({"pass": self.vpn["private"]["passwd"]})
if "pvdata.host" in self.args.privtvpn:
proxy.update({"host": self.args.privtvpn})
else:
proxy.update(
{"host": connect(code=self.args.privtvpn).privateVPN()}
)
proxies = self.vpn["private"]["http"].format(
email=proxy["user"],
passwd=proxy["pass"],
ip=proxy["host"],
port=proxy["port"],
)
else:
if self.args.nordvpn:
self.logger.info("Proxy Status: Activated-NordVpn")
proxy.update({"port": self.vpn["nordvpn"]["port"]})
proxy.update({"user": self.vpn["nordvpn"]["email"]})
proxy.update({"pass": self.vpn["nordvpn"]["passwd"]})
if "nordvpn.com" in self.args.nordvpn:
proxy.update({"host": self.args.nordvpn})
else:
proxy.update(
{"host": connect(code=self.args.nordvpn).nordVPN()}
)
proxies = self.vpn["nordvpn"]["http"].format(
email=proxy["user"],
passwd=proxy["pass"],
ip=proxy["host"],
port=proxy["port"],
)
else:
self.logger.info("Proxy Status: Off")
if proxy.get("host"):
aria2c_proxy.append(
"--https-proxy={}:{}".format(proxy.get("host"), proxy.get("port"))
)
if proxy.get("user"):
aria2c_proxy.append("--https-proxy-user={}".format(proxy.get("user")))
if proxy.get("pass"):
aria2c_proxy.append("--https-proxy-passwd={}".format(proxy.get("pass")))
if proxies:
os.environ["http_proxy"] = proxies
os.environ["HTTP_PROXY"] = proxies
os.environ["https_proxy"] = proxies
os.environ["HTTPS_PROXY"] = proxies
ip = None
try:
self.logger.info("Getting IP...")
r = requests.get("https://ipinfo.io/json", timeout=5)
data = r.json()
ip = f'{data["ip"]} ({data["country"]})'
except Exception as e:
self.logger.info(f"({e.__class__.__name__}: {e})")
sys.exit(1)
return aria2c_proxy, ip

132
helpers/pssh_generator.py Normal file
View File

@@ -0,0 +1,132 @@
from utils.modules.pymp4.parser import Box
from io import BytesIO
import base64
import requests
import uuid
import binascii
import subprocess
import logging
import json
class pssh_generator(object):
def __init__(self, init, **kwargs):
self.init = init
self.logger = logging.getLogger(__name__)
self.proxies = kwargs.get("proxies", None)
self.mp4dumpexe = kwargs.get("mp4dumpexe", None)
def from_kid(self):
array_of_bytes = bytearray(b"\x00\x00\x002pssh\x00\x00\x00\x00")
array_of_bytes.extend(bytes.fromhex("edef8ba979d64acea3c827dcd51d21ed"))
array_of_bytes.extend(b"\x00\x00\x00\x12\x12\x10")
array_of_bytes.extend(bytes.fromhex(self.init.replace("-", "")))
pssh = base64.b64encode(bytes.fromhex(array_of_bytes.hex()))
return pssh.decode()
def Get_PSSH(self):
WV_SYSTEM_ID = "[ed ef 8b a9 79 d6 4a ce a3 c8 27 dc d5 1d 21 ed]"
pssh = None
data = subprocess.check_output(
[self.mp4dumpexe, "--format", "json", "--verbosity", "1", self.init]
)
data = json.loads(data)
for atom in data:
if atom["name"] == "moov":
for child in atom["children"]:
if child["name"] == "pssh":
if child["system_id"] == WV_SYSTEM_ID:
pssh = child["data"][1:-1].replace(" ", "")
pssh = binascii.unhexlify(pssh)
if pssh.startswith(b"\x08\x01"):
pssh = pssh[0:]
pssh = base64.b64encode(pssh).decode("utf-8")
return pssh
if not pssh:
self.logger.error("Error while generate pssh from file.")
return pssh
def get_moov_pssh(self, moov):
while True:
x = Box.parse_stream(moov)
if x.type == b"moov":
for y in x.children:
if y.type == b"pssh" and y.system_ID == uuid.UUID(
"edef8ba9-79d6-4ace-a3c8-27dcd51d21ed"
):
data = base64.b64encode(y.init_data)
return data
def build_init_segment_mp4(self, bytes_):
moov = BytesIO(bytes_)
data = self.get_moov_pssh(moov)
pssh = data.decode("utf-8")
return pssh
def getInitWithRange2(self, headers):
initbytes = requests.get(url=self.init, proxies=self.proxies, headers=headers,)
try:
pssh = self.build_init_segment_mp4(initbytes.content)
return pssh
except Exception as e:
self.logger.info("Error: " + str(e))
return None
def getInitWithRange(self, start: int, end: int):
initbytes = requests.get(
url=self.init,
proxies=self.proxies,
headers={"Range": "bytes={}-{}".format(start, end)},
)
try:
pssh = self.build_init_segment_mp4(initbytes.content)
return pssh
except Exception as e:
self.logger.info("Error: " + str(e))
return None
def loads(self):
req = requests.get(url=self.init, proxies=self.proxies)
initbytes = req.content
try:
pssh = self.build_init_segment_mp4(initbytes)
return pssh
except Exception as e:
self.logger.error("Error: " + str(e))
return None
def load(self):
with open(self.init, "rb") as f:
initbytes = f.read()
try:
pssh = self.build_init_segment_mp4(initbytes)
return pssh
except Exception as e:
self.logger.error("Error: " + str(e))
return None
def from_str(self):
initbytes = self.init
try:
pssh = self.build_init_segment_mp4(initbytes)
return pssh
except Exception as e:
self.logger.info("Error: " + str(e))
return None

819
helpers/ripprocess.py Normal file
View File

@@ -0,0 +1,819 @@
import ffmpy, json, os, sys, unidecode, requests, subprocess, time, pycountry, html, tqdm, re, glob, base64, binascii
from titlecase import titlecase
from configs.config import tool
from helpers.proxy_environ import hold_proxy
import tldextract
from collections import namedtuple, Sequence
from natsort import natsorted
import logging
import unicodedata, string
class EpisodesNumbersHandler:
def __init__(self):
return
def numberRange(self, start: int, end: int):
if list(range(start, end + 1)) != []:
return list(range(start, end + 1))
if list(range(end, start + 1)) != []:
return list(range(end, start + 1))
return [start]
def ListNumber(self, Number: str):
if Number.isdigit():
return [int(Number)]
if Number.strip() == "~" or Number.strip() == "":
return self.numberRange(1, 999)
if "-" in Number:
start, end = Number.split("-")
if start.strip() == "" or end.strip() == "":
raise ValueError("wrong Number: {}".format(Number))
return self.numberRange(int(start), int(end))
if "~" in Number:
start, _ = Number.split("~")
if start.strip() == "":
raise ValueError("wrong Number: {}".format(Number))
return self.numberRange(int(start), 999)
return
def sortNumbers(self, Numbers):
SortedNumbers = []
for Number in Numbers.split(","):
SortedNumbers += self.ListNumber(Number.strip())
return natsorted(list(set(SortedNumbers)))
class ripprocess(object):
def __init__(self):
self.tool = tool()
self.logger = logging.getLogger(__name__)
self.bin = self.tool.bin()
def sort_list(self, media_list, keyword1=None, keyword2=None):
if keyword1:
if keyword2:
return sorted(
media_list, key=lambda k: (int(k[keyword1]), int(k[keyword2]))
)
else:
sorted(media_list, key=lambda k: int(k[keyword1]))
return media_list
def yt2json(self, url, proxies=None):
jsonfile = "info.info.json"
yt_cmd = [
self.bin["youtube"],
"--skip-download",
"--write-info-json",
"--quiet",
"--no-warnings",
"-o",
"info",
url,
]
if proxies:
yt_cmd += ["--proxy", proxies.get("https")]
subprocess.call(yt_cmd)
while not os.path.isfile(jsonfile):
time.sleep(0.2)
with open(jsonfile) as js:
data = json.load(js)
if os.path.isfile(jsonfile):
os.remove(jsonfile)
return data
def getKeyId(self, mp4_file):
data = subprocess.check_output(
[self.bin["mp4dump"], "--format", "json", "--verbosity", "1", mp4_file]
)
try:
return re.sub(
" ",
"",
re.compile(r"default_KID.*\[(.*)\]").search(data.decode()).group(1),
)
except AttributeError:
return None
def flatten(self, l):
return list(self.flatten_g(l))
def flatten_g(self, l):
basestring = (str, bytes)
for el in l:
if isinstance(el, Sequence) and not isinstance(el, basestring):
for sub in self.flatten_g(el):
yield sub
else:
yield el
def removeExtentsion(self, string: str):
if "." in string:
return ".".join(string.split(".")[:-1])
else:
raise ValueError("string has no extentsion: {}".format(string))
def replaceExtentsion(self, string: str, ext: str):
if "." in string:
return ".".join(string.split(".")[:-1]) + f".{ext}"
else:
raise ValueError("string has no extentsion: {}".format(string))
def domain(self, url):
return "{0.domain}.{0.suffix}".format(tldextract.extract(url))
def remove_dups(self, List, keyword=""):
Added_ = set()
Proper_ = []
for L in List:
if L[keyword] not in Added_:
Proper_.append(L)
Added_.add(L[keyword])
return Proper_
def find_str(self, s, char):
index = 0
if char in s:
c = char[0]
for ch in s:
if ch == c:
if s[index : index + len(char)] == char:
return index
index += 1
return -1
def updt(self, total, progress):
barLength, status = 80, ""
progress = float(progress) / float(total)
if progress >= 1.0:
progress, status = 1, "\r\n"
block = int(round(barLength * progress))
text = "\rProgress: {} | {:.0f}% {}".format(
"" * block + "" * (barLength - block), round(progress * 100, 0), status,
)
sys.stdout.write(text)
sys.stdout.flush()
def Get_PSSH(self, mp4_file):
WV_SYSTEM_ID = "[ed ef 8b a9 79 d6 4a ce a3 c8 27 dc d5 1d 21 ed]"
pssh = None
data = subprocess.check_output(
[self.bin["mp4dump"], "--format", "json", "--verbosity", "1", mp4_file]
)
data = json.loads(data)
for atom in data:
if atom["name"] == "moov":
for child in atom["children"]:
if child["name"] == "pssh":
if child["system_id"] == WV_SYSTEM_ID:
pssh = child["data"][1:-1].replace(" ", "")
pssh = binascii.unhexlify(pssh)
if pssh.startswith(b"\x08\x01"):
pssh = pssh[0:]
pssh = base64.b64encode(pssh).decode("utf-8")
return pssh
return None
def SubtitleEdit(
self, contain=None, file=None, removeSDH=False, silent=True, extra_commands=[]
):
if file:
subtitle_command = [
self.bin["SubtitleEdit"],
"/convert",
file,
"srt",
"/overwrite",
"/multiplereplace:.",
"/MergeShortLines",
"/FixCommonErrors",
]
subtitle_command += extra_commands
if removeSDH:
subtitle_command.append("/RemoveTextForHI")
subprocess.call(
subtitle_command, stdout=open(os.devnull, "wb")
) if silent else subprocess.call(subtitle_command)
if contain:
subtitle_command = [
self.bin["SubtitleEdit"],
"/convert",
"{}*.srt".format(contain),
"srt",
"/overwrite",
"/multiplereplace:.",
"/MergeShortLines",
"/FixCommonErrors",
]
subtitle_command += extra_commands
if removeSDH:
subtitle_command.append("/removetextforhi")
subprocess.call(
subtitle_command, stdout=open(os.devnull, "wb")
) if silent else subprocess.call(subtitle_command)
return
def parseCookieFile(self, cookiesfile):
cookies = {}
with open(cookiesfile, "r") as fp:
for line in fp:
if not re.match(r"^\#", line):
lineFields = line.strip().split("\t")
try:
cookies[lineFields[5]] = lineFields[6]
except Exception:
pass
return cookies
def ReplaceCodeLanguages(self, X):
X = X.lower()
X = (
X.replace("_subtitle_dialog_0", "")
.replace("_narrative_dialog_0", "")
.replace("_caption_dialog_0", "")
.replace("_dialog_0", "")
.replace("_descriptive_0", "_descriptive")
.replace("_descriptive", "_descriptive")
.replace("_sdh", "-sdh")
.replace("es-es", "es")
.replace("en-es", "es")
.replace("kn-in", "kn")
.replace("gu-in", "gu")
.replace("ja-jp", "ja")
.replace("mni-in", "mni")
.replace("si-in", "si")
.replace("as-in", "as")
.replace("ml-in", "ml")
.replace("sv-se", "sv")
.replace("hy-hy", "hy")
.replace("sv-sv", "sv")
.replace("da-da", "da")
.replace("fi-fi", "fi")
.replace("nb-nb", "nb")
.replace("is-is", "is")
.replace("uk-uk", "uk")
.replace("hu-hu", "hu")
.replace("bg-bg", "bg")
.replace("hr-hr", "hr")
.replace("lt-lt", "lt")
.replace("et-et", "et")
.replace("el-el", "el")
.replace("he-he", "he")
.replace("ar-ar", "ar")
.replace("fa-fa", "fa")
.replace("ro-ro", "ro")
.replace("sr-sr", "sr")
.replace("cs-cs", "cs")
.replace("sk-sk", "sk")
.replace("mk-mk", "mk")
.replace("hi-hi", "hi")
.replace("bn-bn", "bn")
.replace("ur-ur", "ur")
.replace("pa-pa", "pa")
.replace("ta-ta", "ta")
.replace("te-te", "te")
.replace("mr-mr", "mr")
.replace("kn-kn", "kn")
.replace("gu-gu", "gu")
.replace("ml-ml", "ml")
.replace("si-si", "si")
.replace("as-as", "as")
.replace("mni-mni", "mni")
.replace("tl-tl", "tl")
.replace("id-id", "id")
.replace("ms-ms", "ms")
.replace("vi-vi", "vi")
.replace("th-th", "th")
.replace("km-km", "km")
.replace("ko-ko", "ko")
.replace("zh-zh", "zh")
.replace("ja-ja", "ja")
.replace("ru-ru", "ru")
.replace("tr-tr", "tr")
.replace("it-it", "it")
.replace("es-mx", "es-la")
.replace("ar-sa", "ar")
.replace("zh-cn", "zh")
.replace("nl-nl", "nl")
.replace("pl-pl", "pl")
.replace("pt-pt", "pt")
.replace("hi-in", "hi")
.replace("mr-in", "mr")
.replace("bn-in", "bn")
.replace("te-in", "te")
.replace("cmn-hans", "zh-hans")
.replace("cmn-hant", "zh-hant")
.replace("ko-kr", "ko")
.replace("en-au", "en")
.replace("es-419", "es-la")
.replace("es-us", "es-la")
.replace("en-us", "en")
.replace("en-gb", "en")
.replace("fr-fr", "fr")
.replace("de-de", "de")
.replace("las-419", "es-la")
.replace("ar-ae", "ar")
.replace("da-dk", "da")
.replace("yue-hant", "yue")
.replace("bn-in", "bn")
.replace("ur-in", "ur")
.replace("ta-in", "ta")
.replace("sl-si", "sl")
.replace("cs-cz", "cs")
.replace("hi-jp", "hi")
.replace("-001", "")
.replace("en-US", "en")
.replace("deu", "de")
.replace("eng", "en")
.replace("ca-es", "cat")
.replace("fil-ph", "fil")
.replace("en-ca", "en")
.replace("eu-es", "eu")
.replace("ar-eg", "ar")
.replace("he-il", "he")
.replace("el-gr", "he")
.replace("nb-no", "nb")
.replace("es-ar", "es-la")
.replace("en-ph", "en")
.replace("sq-al", "sq")
.replace("bs-ba", "bs")
)
return X
def countrycode(self, code, site_domain="None"):
languageCodes = {
"zh-Hans": "zhoS",
"zh-Hant": "zhoT",
"pt-BR": "brPor",
"es-ES": "euSpa",
"en-GB": "enGB",
"en-PH": "enPH",
"nl-BE": "nlBE",
"fil": "enPH",
"yue": "zhoS",
"fr-CA": "caFra",
}
if code == "cmn-Hans":
return "Mandarin Chinese (Simplified)", "zh-Hans"
elif code == "cmn-Hant":
return "Mandarin Chinese (Traditional)", "zh-Hant"
elif code == "es-419":
return "Spanish", "spa"
elif code == "es-ES":
return "European Spanish", "euSpa"
elif code == "pt-BR":
return "Brazilian Portuguese", "brPor"
elif code == "pt-PT":
return "Portuguese", "por"
elif code == "fr-CA":
return "French Canadian", "caFra"
elif code == "fr-FR":
return "French", "fra"
elif code == "iw":
return "Modern Hebrew", "heb"
elif code == "es" and site_domain == "google":
return "European Spanish", "euSpa"
lang_code = code[: code.index("-")] if "-" in code else code
lang = pycountry.languages.get(alpha_2=lang_code)
if lang is None:
lang = pycountry.languages.get(alpha_3=lang_code)
try:
languagecode = languageCodes[code]
except KeyError:
languagecode = lang.alpha_3
return lang.name, languagecode
def tqdm_downloader(self, url, file_name, proxies=None):
# self.logger.info(file_name)
r = requests.get(url, stream=True)
file_size = int(r.headers["Content-Length"])
chunk = 1
chunk_size = 1024
num_bars = int(file_size / chunk_size)
with open(file_name, "wb") as fp:
for chunk in tqdm.tqdm(
r.iter_content(chunk_size=chunk_size),
total=num_bars,
unit="KB",
desc=file_name,
leave=True, # progressbar stays
):
fp.write(chunk)
return
def silent_aria2c_download(self, url, file_name, disable_proxy=True):
holder = hold_proxy()
if disable_proxy:
holder.disable()
commands = [
self.bin["aria2c"],
url,
'--user-agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.132 Safari/537.36"',
"--allow-overwrite=true",
"--auto-file-renaming=false",
"--retry-wait=5",
"-x16",
"-j16",
"-s16",
"-o",
file_name,
]
try:
aria = subprocess.call(commands, stdout=open(os.devnull, "wb"),)
except FileNotFoundError:
self.logger.info("UNABLE TO FIND {}".format("aria2c.exe"))
exit(-1)
if aria != 0:
raise ValueError("Aria2c exited with code {}".format(aria))
if disable_proxy:
holder.enable()
def aria2c_download(self, commands, extra_commands, disable_proxy=False):
LogFile = self.bin["aria2c"].replace("exe", "log")
if os.path.isfile(LogFile):
os.remove(LogFile)
aria2_commands = []
aria2_commands.append(self.bin["aria2c"])
aria2_commands.append("--log={}".format(LogFile))
aria2_commands += commands + extra_commands
holder = hold_proxy()
if disable_proxy:
holder.disable()
try:
aria = subprocess.call(aria2_commands)
except FileNotFoundError:
self.logger.info("UNABLE TO FIND {}".format("aria2c.exe"))
exit(-1)
if aria != 0:
self.logger.info("Aria2c exited with code {}".format(aria))
exit(-1)
if disable_proxy:
holder.enable()
self.logger.info()
def isduplelist(self, a, b):
return set(a) == set(b) and len(a) == len(b)
def readfile(self, file, lines=False):
read = ""
if os.path.isfile(file):
with open(file, "r") as f:
if lines:
read = f.readlines()
return read
read = f.read()
else:
self.logger.info("File: %s, is not found" % file)
return None
return read
def strip(self, inputint, left=True, right=False):
if left:
return str(inputint.lstrip("0"))
if right:
return str(inputint.rstrip("0"))
return
def CleanMyFileNamePlease(self, filename):
# edit here...
filename = filename.replace("666", "666")
################################################################################################################################
# dont edit here...
filename = (
filename.replace(" ", ".")
.replace("'", "")
.replace(",", "")
.replace("-", "")
.replace("-.", ".")
.replace(".-.", ".")
)
filename = re.sub(" +", ".", filename)
for i in range(10):
filename = re.sub(r"(\.\.)", ".", filename)
return filename
def RemoveExtraWords(self, name):
if re.search("[eE]pisode [0-9]+", name):
name = name.replace((re.search("[eE]pisode [0-9]+", name)).group(0), "")
if re.search(r"(\(.+?)\)", name):
name = name.replace(re.search(r"(\(.+?)\)", name).group(), "")
name = re.sub(" +", " ", name)
name = name.strip()
name = (
name.replace(" : ", " - ")
.replace(": ", " - ")
.replace(":", " - ")
.replace("&", "and")
.replace("ó", "o")
.replace("*", "x")
)
return name
def DecodeString(self, text):
for encoding in ("utf-8-sig", "utf-8", "utf-16"):
try:
return text.decode(encoding)
except UnicodeDecodeError:
continue
return text.decode("latin-1")
def EncodeString(self, text):
for encoding in ("utf-8-sig", "utf-8", "utf-16"):
try:
return text.encode(encoding)
except UnicodeDecodeError:
continue
return text.encode("latin-1")
def clean_text(self, text):
whitelist = (
"-_.() %s%s" % (string.ascii_letters, string.digits) + "',&#$%@`~!^&+=[]{}"
)
cleaned_text = (
unicodedata.normalize("NFKD", text).encode("ASCII", "ignore").decode()
)
return "".join(c for c in cleaned_text if c in whitelist)
def RemoveCharcters(self, text):
text = self.EncodeString(text)
text = self.DecodeString(text)
text = self.RemoveExtraWords(text)
text = self.clean_text(text)
text = unidecode.unidecode(titlecase(text))
return text
def do_clean(self, contain, exclude=[], added=[]):
"""contain= string name in the file/files you want to delete.
exclude= the files that has a specified extension you do not want to delete. send by list like ['.sfv', '.whatever']
added= another extensions not in the default extension. send by list like ['.sfv', '.whatever']"""
error = []
extensions = [
".mp4",
".h265",
".h264",
".eac3",
".m4a",
".ac3",
".srt",
".vtt",
".txt",
".aac",
".m3u8",
".mpd",
]
extensions += added
erased_files = []
for ext in extensions:
if ext not in exclude:
erased_files += glob.glob(contain + f"*{ext}")
if not erased_files == []:
for files in erased_files:
try:
os.remove(files)
except Exception:
error.append(files)
if not error == []:
self.logger.info(
f"some files not deleted with extensions: "
+ ", ".join(str(x) for x in error)
+ "."
)
return
def mediainfo_(self, file):
mediainfo_output = subprocess.Popen(
[self.bin["MediaInfo"], "--Output=JSON", "-f", file],
stdout=subprocess.PIPE,
)
mediainfo_json = json.load(mediainfo_output.stdout)
return mediainfo_json
def DemuxAudio(self, inputName, replace_str):
if os.path.isfile(inputName):
self.logger.info("\nDemuxing audio...")
mediainfo = self.mediainfo_(inputName)
for m in mediainfo["media"]["track"]:
if m["@type"] == "Audio":
codec_name = m["Format"]
ext = ".ac3"
if codec_name == "AAC":
ext = ".m4a"
else:
if codec_name == "E-AC-3":
ext = ".eac3"
else:
if codec_name == "AC-3":
ext = ".ac3"
if codec_name == "DTS":
ext = ".dts"
outputName = inputName.replace(replace_str, ext)
self.logger.info(("{} -> {}").format(inputName, outputName))
ff = ffmpy.FFmpeg(
executable=self.bin["ffmpeg"],
inputs={inputName: None},
outputs={outputName: "-c:a copy"},
global_options="-vn -sn -y -hide_banner -loglevel panic",
)
ff.run()
time.sleep(0.05)
if os.path.isfile(outputName) and os.path.getsize(outputName) > 1024 * 1024:
os.remove(inputName)
self.logger.info("Done!")
return
def shaka_decrypt(self, encrypted, decrypted, keys, stream):
self.logger.info("\nDecrypting: {}".format(encrypted))
decrypt_command = [
self.bin["shaka-packager"],
"--enable_raw_key_decryption",
"-quiet",
"input={},stream={},output={}".format(encrypted, stream, decrypted),
]
for key in keys:
decrypt_command.append("--keys")
decrypt_command.append(
"key={}:key_id={}".format(
key["KEY"], "00000000000000000000000000000000"
)
)
self.logger.info("\nDecrypting KEYS: ")
for key in keys:
self.logger.info(("{}:{}".format(key["KID"], key["KEY"])))
wvdecrypt_process = subprocess.Popen(decrypt_command)
stdoutdata, stderrdata = wvdecrypt_process.communicate()
wvdecrypt_process.wait()
self.logger.info("Done!")
return True
def mp4_decrypt(
self,
encrypted,
decrypted,
keys,
moded_decrypter=True,
no_kid=True,
silent=False,
):
self.logger.info("\nDecrypting: {}".format(encrypted))
decrypt_command = [
self.bin["mp4decrypt"]
if not moded_decrypter
else self.bin["mp4decrypt_moded"]
]
decrypt_command += ["--show-progress", encrypted, decrypted]
for key in keys:
decrypt_command.append("--key")
decrypt_command.append(
"{}:{}".format(key["ID"] if no_kid else key["KID"], key["KEY"])
)
self.logger.info("\nDecrypting KEYS: ")
for key in keys:
self.logger.info(
("{}:{}".format(key["ID"] if no_kid else key["KID"], key["KEY"]))
)
if silent:
wvdecrypt_process = subprocess.Popen(
decrypt_command, stdout=open(os.devnull, "wb")
)
else:
wvdecrypt_process = subprocess.Popen(decrypt_command)
stdoutdata, stderrdata = wvdecrypt_process.communicate()
wvdecrypt_process.wait()
if wvdecrypt_process.returncode == 0:
self.logger.info("Done!")
return True
return False
def DemuxVideo(
self,
outputVideoTemp,
outputVideo,
ffmpeg=False,
mp4box=False,
ffmpeg_version="ffmpeg",
):
if ffmpeg:
self.logger.info("\nRemuxing video...")
# if not outputVideo.endswith(".h264"):
# os.rename(outputVideoTemp, outputVideo)
# self.logger.info("Done!")
# return True
ff = ffmpy.FFmpeg(
executable=self.bin[ffmpeg_version],
inputs={outputVideoTemp: None},
outputs={outputVideo: "-c copy"},
global_options="-y -hide_banner -loglevel panic",
).run()
time.sleep(0.05)
if (
os.path.isfile(outputVideo)
and os.path.getsize(outputVideo) > 1024 * 1024
):
os.remove(outputVideoTemp)
self.logger.info("Done!")
return True
if mp4box:
self.logger.info("\nRemuxing video...")
if not outputVideo.endswith(".h264"):
os.rename(outputVideoTemp, outputVideo)
self.logger.info("Done!")
return True
subprocess.call(
[
self.bin["mp4box"],
"-quiet",
"-raw",
"1",
"-out",
outputVideo,
outputVideoTemp,
]
)
if (
os.path.isfile(outputVideo)
and os.path.getsize(outputVideo) > 1024 * 1024
):
os.remove(outputVideoTemp)
self.logger.info("Done!")
return True
return False

90
helpers/sdh.py Normal file
View File

@@ -0,0 +1,90 @@
import codecs
import os
import re
import sys
import pysrt
class sdh_remover:
def __init__(self,):
self.__replace__ = "empty_line"
self.content = []
def cleanLine(self, line, regex):
line = re.sub("</i>", "", line)
line = re.sub("<i>", "", line)
if re.search(r"\[(.*)?\n(.*)?\]", line):
line = re.sub(
re.search(r"\[(.*)?\n(.*)?\]", line).group(), self.__replace__, line
)
if re.search(r"\((.*)?\n(.*)?\)", line):
line = re.sub(
re.search(r"\((.*)?\n(.*)?\)", line).group(), self.__replace__, line
)
try:
# is it inside a markup tag?
match = regex.match(line).group(1)
tag = re.compile("(<[A-z]+[^>]*>)").match(match).group(1)
line = re.sub(match, tag + self.__replace__, line)
except:
try:
line = re.sub(regex, self.__replace__, line)
except:
pass
return line
def _save(self, Output):
file = codecs.open(Output, "w", encoding="utf-8")
for idx, text in enumerate(self.content, start=1):
file.write(
"{}\n{} --> {}\n{}\n\n".format(
str(idx), text["start"], text["end"], text["text"].strip(),
)
)
file.close()
def clean(self):
if not self.content == []:
temp = self.content
self.content = []
for text in temp:
if text["text"].strip() == self.__replace__:
continue
text.update({"text": re.sub(self.__replace__, "", text["text"])})
if not text["text"].strip() == "":
self.content.append(text)
return
def noHI(self, Input=None, Output=None, content=None):
srt = pysrt.open(Input, encoding="utf-8")
for idx, line in enumerate(srt, start=1):
number = str(idx)
start = line.start
end = line.end
text = line.text
text = self.cleanLine(text, re.compile(r"(\[(.+)?\]|\[(.+)?|^(.+)?\])"))
text = self.cleanLine(text, re.compile(r"(\((.+)?\)|\((.+)?|^(.+)?\))"))
text = self.cleanLine(text, re.compile(r"(\[(.+)?\]|\[(.+)?|^(.+)?\])"))
text = self.cleanLine(
text,
re.compile(r"([♩♪♫♭♮♯]+(.+)?[♩♪♫♭♮♯]+|[♩♪♫♭♮♯]+(.+)?|^(.+)?[♩♪♫♭♮♯]+)"),
)
text = self.cleanLine(text, re.compile(r"(<font[^>]*>)|(<\/font>)"))
self.content.append(
{"number": number, "start": start, "end": end, "text": text,}
)
self.clean()
self._save(Output)

135
helpers/vpn.py Normal file
View File

@@ -0,0 +1,135 @@
import os
import requests
import sys
import random
import logging
class connect(object):
def __init__(self, code):
self.code = code.lower()
self.logger = logging.getLogger(__name__)
self.headers = {
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36"
}
def nordVPN(self):
nordvpn_codes = {
"al": "2",
"ar": "10",
"au": "13",
"at": "14",
"be": "21",
"ba": "27",
"br": "30",
"bg": "33",
"ca": "38",
"cl": "43",
"cr": "52",
"hr": "54",
"cy": "56",
"cz": "57",
"dk": "58",
"eg": "64",
"ee": "68",
"fi": "73",
"fr": "74",
"ge": "80",
"de": "81",
"gr": "84",
"hk": "97",
"hu": "98",
"is": "99",
"in": "100",
"id": "101",
"ie": "104",
"il": "105",
"it": "106",
"jp": "108",
"lv": "119",
"lu": "126",
"my": "131",
"mx": "140",
"md": "142",
"nl": "153",
"nz": "156",
"mk": "128",
"no": "163",
"ro": "179",
"pl": "174",
"si": "197",
"za": "200",
"kr": "114",
"rs": "192",
"sg": "195",
"sk": "196",
"es": "202",
"se": "208",
"ch": "209",
"tw": "211",
"th": "214",
"tr": "220",
"ua": "225",
"ae": "226",
"gb": "227",
"us": "228",
"vn": "234",
"uk": "227",
}
nord_proxy = {}
if nordvpn_codes.get(self.code):
resp = requests.get(
url="https://nordvpn.com/wp-admin/admin-ajax.php?action=servers_recommendations&filters={%22country_id%22:"
+ nordvpn_codes.get(self.code)
+ "}",
headers=self.headers,
)
nord_proxy = resp.json()[0]["hostname"]
else:
self.logger.info(
self.code
+ " : not listed in country codes, read country.doc for more info"
)
return nord_proxy
def load_privatevpn(self):
html_file = "html.html"
hosts = []
resp = requests.get(
"https://privatevpn.com/serverlist/", stream=True, headers=self.headers
)
resp = str(resp.text)
resp = resp.replace("<br>", "")
with open(html_file, "w", encoding="utf8") as file:
file.write(resp)
with open(html_file, "r") as file:
text = file.readlines()
if os.path.exists(html_file):
os.remove(html_file)
for p in text:
if ".pvdata.host" in p:
hosts.append(p.strip())
return hosts
def privateVPN(self):
private_proxy = {}
private_hosts = self.load_privatevpn()
self.logger.debug("private_hosts: {}".format(private_hosts))
search_host = [host for host in private_hosts if host[:2] == self.code]
if not search_host == []:
self.logger.info(f"Founded {str(len(search_host))} Proxies")
for n, p in enumerate(search_host):
self.logger.info(f"[{str(n+1)}] {p}")
inp = input("\nEnter Proxy Number, or Hit Enter for random one: ").strip()
if inp == "":
return random.choice(search_host)
private_proxy = search_host[int(inp) - 1]
else:
self.logger.info(f"no Proxies Found, you may entered wrong code, or search failed!...")
return private_proxy