initial commit
This commit is contained in:
commit
0007acad90
13 changed files with 1893 additions and 0 deletions
6
squid_dl/__init__.py
Normal file
6
squid_dl/__init__.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
import typing
|
||||
|
||||
from .downloader import main as real_main
|
||||
|
||||
def main(args: [str], name: str) -> int:
|
||||
return real_main(args=args, name=name)
|
||||
551
squid_dl/downloader.py
Normal file
551
squid_dl/downloader.py
Normal file
|
|
@ -0,0 +1,551 @@
|
|||
"""
|
||||
Fancy parallel downloader for a pre-
|
||||
retrieved YoutubeDL() info_dict JSON.
|
||||
⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣀⣀⣀⣠⣤⣤⣄⣀⣀⣀⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⠀⠀⠀⠀⠀⣀⠤⠖⠊⠉⠁⠀⠀⠀⠀⠀⠀⠀⠀⠈⠉⠙⠲⢤⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⠀⠀⠀⡤⠊⠁⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠈⢦⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⠀⠀⡜⠀⠀⠀⠀⠀⠀⢀⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢢⠀⠀⠀⠀⠀⢳⠀⠀⠀⠀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⠀⣸⠁⠀⠀⠀⠀⠀⠀⠀⠱⡀⠀⠀⠀⠀⠀⠀⠀⡀⠈⠀⡀⠀⠀⠀⠈⡇⠀⠀⠀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⠀⡏⠀⠀⠀⠀⠀⠀⠀⠀⡰⠁⠀⠀⠀⠀⠀⠀⠀⠘⡆⡜⠁⠀⠀⠀⠀⢧⡀⠀⠀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠸⡀⠀⠀⠀⠀⠀⣀⣤⡂⠀⠇⠱⠀⡀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⠀⢇⠀⠀⠀⠀⠀⠀⠀⠀⠈⢄⡀⢠⣟⢭⣥⣤⠽⡆⠀⡶⣊⣉⣲⣤⢀⡞⠀⠀⠀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⠀⠘⣆⠀⠀⠀⠀⠀⠀⡀⠀⠐⠂⠘⠄⣈⣙⡡⡴⠀⠀⠙⣄⠙⣛⠜⠘⣆⠀⠀⠀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⠀⠀⠈⢦⡀⠀⠀⠀⢸⠁⠀⠀⠀⠀⠀⠀⠄⠊⠀⠀⠀⠀⡸⠛⠀⠀⠀⢸⠆⠀⠀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⠀⠀⠀⠀⠈⠓⠦⢄⣘⣄⠀⠀⠀⠀⠀⠀⠀⡠⠀⠀⠀⠀⣇⡀⠀⠀⣠⠎⠀⠀⠀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣸⠁⠈⡟⠒⠲⣄⠀⠀⡰⠇⠖⢄⠀⠀⡹⡇⢀⠎⠁⠀⠀⠀⠀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢀⡇⠀⠀⡇⠀⠀⠹⠀⡞⠀⠀⢀⠤⣍⠭⡀⢱⢸⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⠀⠀⢀⣀⣀⣠⠞⠀⠀⢠⡇⠀⠀⠀⠀⠁⠀⢴⠥⠤⠦⠦⡼⠀⢸⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
|
||||
⣀⣤⣴⣶⣿⣿⡟⠁⠀⠋⠀⠀⠀⢸⠁⠀⠀⠀⠀⠀⠀⠀⠑⣠⢤⠐⠁⠀⢸⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
|
||||
⣿⣿⣿⣿⣿⡟⠀⠀⠀⠀⠀⠀⠀⢸⡀⠀⠀⠀⠀⠀⠀⠀⠀⠬⠥⣄⠀⠀⠈⠲⡄⠀⠀⠀⠀⠀⠀⠀⠀⠀
|
||||
⣿⣿⣿⣿⣿⡇⠀⠀⠀⠀⠀⠀⠀⠀⠙⠦⣄⠀⠀⠀⠀⠀⠀⠀⠀⠈⢳⠀⠀⢀⣿⡀⠀⠀⠀⠀⠀⠀⠀⠀
|
||||
⣿⣿⣿⣿⣿⣧⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠈⠙⠒⠦⠤⢤⣄⣀⣠⠤⢿⣶⣶⣿⣿⣿⣶⣤⡀⠀⠀⠀⠀⠀
|
||||
⣿⣿⣿⣿⣿⣿⣷⣄⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢀⡼⠁⠀⠀⣠⣾⣿⣿⣿⣿⣿⣿⣿⣿⣄⠀⠀⠀⠀
|
||||
⣿⣿⣿⣿⣿⣿⣿⣿⣿⣶⣦⣤⣤⣀⣀⣀⣀⣀⣀⣀⣤⣤⣤⣶⣾⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⠀⠀⠀
|
||||
|
||||
NOTE: Not my fault if your IP gets rate-
|
||||
limited or throttled by YouTube. With
|
||||
great power comes great responsibility!
|
||||
|
||||
ALSO NOTE: Have yet to test on other
|
||||
video sites besides YouTube...
|
||||
"""
|
||||
import argparse
|
||||
import json as j
|
||||
from multiprocessing import cpu_count, Process, Queue
|
||||
from multiprocessing.queues import Empty, Full
|
||||
import os
|
||||
from random import randint
|
||||
from time import sleep
|
||||
import typing
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.utils import encodeFilename, sanitize_path
|
||||
from yt_dlp.extractor.common import InfoExtractor as IE
|
||||
|
||||
from .linode import LinodeProxy
|
||||
from .util import die, eprint, runcmd
|
||||
|
||||
|
||||
def do_download(
|
||||
entry_q: Queue,
|
||||
opts: argparse.Namespace,
|
||||
sub_langs: [str],
|
||||
proxy: LinodeProxy = None,
|
||||
):
|
||||
|
||||
sub_opts = {
|
||||
"writesubtitles": True,
|
||||
"writeautomaticsub": True,
|
||||
}
|
||||
if sub_langs[0] == "all":
|
||||
sub_opts["allsubtitles"] = True
|
||||
else:
|
||||
sub_opts["subtitleslangs"] = sub_langs
|
||||
|
||||
yt_opts = {
|
||||
"noprogress": True,
|
||||
"http_chunk_size": 10485760,
|
||||
"writethumbnail": True,
|
||||
"ignoreerrors": True,
|
||||
"format_sort": IE.FormatSort.ytdl_default,
|
||||
"extractor_args": {
|
||||
"youtube": {"player_skip": ["webpage"]},
|
||||
"youtubetab": {"skip": ["webpage"]},
|
||||
},
|
||||
}
|
||||
if proxy is not None:
|
||||
yt_opts["proxy"] = proxy.proxy_url
|
||||
if opts.all_thumbnails:
|
||||
yt_opts["write_all_thumbnails"] = True
|
||||
|
||||
y = YoutubeDL({**yt_opts, **sub_opts})
|
||||
y_nosubs = YoutubeDL(yt_opts)
|
||||
|
||||
while True:
|
||||
try:
|
||||
try:
|
||||
entry = entry_q.get(block=True, timeout=0.5)
|
||||
except Empty:
|
||||
break
|
||||
|
||||
if entry["id"] is None:
|
||||
continue
|
||||
else:
|
||||
id_dir = entry["id"]
|
||||
|
||||
try:
|
||||
os.mkdir(id_dir)
|
||||
except FileExistsError:
|
||||
pass
|
||||
|
||||
try:
|
||||
os.chdir(id_dir)
|
||||
except OSError as oserr:
|
||||
eprint("[WARN]: Skipping {} due to {}".format(id_dir, oserr))
|
||||
continue
|
||||
|
||||
nfo_path = "playlist_entry.json"
|
||||
if not (os.path.exists(nfo_path) and os.path.isfile(nfo_path)):
|
||||
nfo_file = open(nfo_path, mode="w")
|
||||
nfo_file.write(j.dumps(entry, sort_keys=True, indent=2))
|
||||
nfo_file.close()
|
||||
|
||||
desc_path = "description"
|
||||
if not (os.path.exists(desc_path) and os.path.isfile(desc_path)):
|
||||
desc_file = open(desc_path, mode="w")
|
||||
desc_file.write(entry["description"])
|
||||
desc_file.close()
|
||||
|
||||
dl_url = "https://www.youtube.com/watch?v=" + entry["id"]
|
||||
try:
|
||||
if entry["tux_get_subs"] is True:
|
||||
y.download([dl_url])
|
||||
else:
|
||||
y_nosubs.download([dl_url])
|
||||
except KeyError:
|
||||
y.download([dl_url])
|
||||
|
||||
os.chdir("..")
|
||||
sleep(2)
|
||||
except KeyboardInterrupt:
|
||||
break
|
||||
|
||||
if proxy is not None:
|
||||
if proxy.exclusive:
|
||||
print(
|
||||
"[INFO]: Cleaning up worker {}'s exclusive proxy".format(
|
||||
os.getpid()
|
||||
)
|
||||
)
|
||||
proxy.cleanup()
|
||||
|
||||
print("[INFO]: Worker {} done...".format(os.getpid()))
|
||||
|
||||
|
||||
def get_entries(entries: dict, entry_q: Queue):
|
||||
try:
|
||||
for entry in entries:
|
||||
while True:
|
||||
try:
|
||||
entry_q.put(entry, block=True, timeout=0.2)
|
||||
break
|
||||
except Full:
|
||||
pass
|
||||
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
entry_q.close()
|
||||
|
||||
|
||||
def check_subs_done(entry: dict, basename: str, langs: [str] = None) -> bool:
|
||||
if langs is None:
|
||||
langs = entry["automatic_captions"].keys()
|
||||
|
||||
for lang in langs:
|
||||
subbase = basename + "." + lang
|
||||
|
||||
lang_sub_exists = False
|
||||
for subentry in entry["automatic_captions"][lang]:
|
||||
sfname = subbase + "." + subentry["ext"]
|
||||
if os.path.exists(sfname) and os.path.isfile(sfname):
|
||||
lang_sub_exists = True
|
||||
break
|
||||
if not lang_sub_exists:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check_video_done(entry: dict, basename: str) -> bool:
|
||||
for ext in (".mp4", ".webm", ".mkv"):
|
||||
vfname = basename + ext
|
||||
|
||||
if os.path.exists(vfname) and os.path.isfile(vfname):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def check_dl(in_q: Queue, out_q: Queue):
|
||||
while True:
|
||||
try:
|
||||
try:
|
||||
entry = in_q.get(block=True, timeout=0.5)
|
||||
except Empty:
|
||||
break
|
||||
|
||||
if entry["id"] is None:
|
||||
continue
|
||||
else:
|
||||
id_dir = entry["id"]
|
||||
|
||||
if os.path.isdir(id_dir):
|
||||
try:
|
||||
os.chdir(id_dir)
|
||||
except OSError as oserr:
|
||||
eprint(
|
||||
"[WARN]: Skipping {} due to {}".format(id_dir, oserr)
|
||||
)
|
||||
continue
|
||||
elif os.path.exists(id_dir):
|
||||
eprint(
|
||||
"[WARN]: Not downloading https://youtube.com/watch?v={} "
|
||||
+ "because {} exists and is not a directory!"
|
||||
)
|
||||
continue
|
||||
else:
|
||||
out_q.put(entry)
|
||||
continue
|
||||
|
||||
nfo_path = "playlist_entry.json"
|
||||
if not (os.path.exists(nfo_path) and os.path.isfile(nfo_path)):
|
||||
os.chdir("..")
|
||||
out_q.put(entry)
|
||||
continue
|
||||
|
||||
desc_path = "description"
|
||||
if not (os.path.exists(desc_path) and os.path.isfile(desc_path)):
|
||||
desc_file = open(desc_path, mode="w")
|
||||
desc_file.write(entry["description"])
|
||||
desc_file.close()
|
||||
|
||||
y = YoutubeDL({"ignoreerrors": True})
|
||||
basename = os.path.splitext(
|
||||
sanitize_path(encodeFilename(y.prepare_filename(entry)))
|
||||
)[0]
|
||||
try:
|
||||
if check_subs_done(entry, basename):
|
||||
entry["tux_get_subs"] = False
|
||||
else:
|
||||
entry["tux_get_subs"] = True
|
||||
except KeyError:
|
||||
eprint(
|
||||
"[WARN]: Couldn't find auto subs for {} in info".format(
|
||||
entry["id"]
|
||||
)
|
||||
)
|
||||
entry["tux_get_subs"] = False
|
||||
|
||||
if not (check_video_done(entry, basename)):
|
||||
out_q.put(entry)
|
||||
os.chdir("..")
|
||||
continue
|
||||
|
||||
os.chdir("..")
|
||||
except KeyboardInterrupt:
|
||||
break
|
||||
|
||||
|
||||
def testworker(in_q: Queue):
|
||||
i = 0
|
||||
while not in_q.empty():
|
||||
try:
|
||||
entry = in_q.get(block=True, timeout=0.5)
|
||||
except Empty:
|
||||
break
|
||||
try:
|
||||
i += 1
|
||||
print("{}: ".format(i), end="")
|
||||
print(entry["id"])
|
||||
acs = entry["automatic_captions"]
|
||||
except KeyError:
|
||||
eprint("couldn't get caps on vid {}".format(entry["id"]))
|
||||
|
||||
|
||||
def workers_alive(workers: [Process]):
|
||||
for worker in workers:
|
||||
if worker.is_alive():
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def resume_cleanup(workers: [Process], q_worker: Process):
|
||||
print("\n[CLEANUP]: Cleaning up...")
|
||||
|
||||
for worker in workers:
|
||||
if worker.is_alive():
|
||||
print("[CLEANUP]: Terminating resume worker {}".format(worker.pid))
|
||||
worker.terminate()
|
||||
|
||||
print("[CLEANUP]: Terminating queue worker {}".format(worker.pid))
|
||||
q_worker.terminate()
|
||||
|
||||
|
||||
def resume_preprocess(entries: [dict]) -> list:
|
||||
ncpus = cpu_count()
|
||||
n_workers = ncpus if len(entries) >= ncpus else len(entries)
|
||||
|
||||
in_q = Queue(n_workers)
|
||||
out_q = Queue(len(entries))
|
||||
iq_builder = Process(target=get_entries, args=(entries, in_q))
|
||||
workers = []
|
||||
|
||||
try:
|
||||
iq_builder.start()
|
||||
|
||||
for n in range(n_workers):
|
||||
workers.append(Process(target=check_dl, args=(in_q, out_q)))
|
||||
|
||||
while not in_q.full():
|
||||
sleep(0.2)
|
||||
|
||||
for w in workers:
|
||||
w.start()
|
||||
|
||||
unfinished_entries = []
|
||||
while workers_alive(workers):
|
||||
try:
|
||||
unfinished_entries.append(out_q.get(block=True, timeout=2))
|
||||
except Empty:
|
||||
continue
|
||||
except KeyboardInterrupt:
|
||||
resume_cleanup(workers, iq_builder)
|
||||
return []
|
||||
|
||||
if iq_builder.is_alive():
|
||||
iq_builder.terminate()
|
||||
die("[BUG]: Workers didn't verify whole list! Exiting...")
|
||||
|
||||
return unfinished_entries
|
||||
|
||||
|
||||
def validate_proxy(proxy: LinodeProxy) -> LinodeProxy:
|
||||
if not proxy.start():
|
||||
eprint(
|
||||
"[WARN]: "
|
||||
+ "Proxy, validation failed, deleting and rebuilding Linode..."
|
||||
)
|
||||
port = proxy.proxy_port
|
||||
proxy.cleanup()
|
||||
proxy = LinodeProxy(proxy_port=port)
|
||||
return validate_proxy(proxy)
|
||||
else:
|
||||
print(
|
||||
"[INFO]: SOCKS validation succeeded on port {} from ID {}".format(
|
||||
proxy.proxy_port, proxy.info["id"]
|
||||
)
|
||||
)
|
||||
return proxy
|
||||
|
||||
|
||||
def cleanup(workers: [Process], proxies: [LinodeProxy]) -> None:
|
||||
if len(workers) > 0:
|
||||
for worker in workers:
|
||||
if worker.is_alive():
|
||||
print(
|
||||
"[CLEANUP]: Terminating download worker {}".format(
|
||||
worker.pid
|
||||
)
|
||||
)
|
||||
worker.terminate()
|
||||
|
||||
if len(proxies) > 0:
|
||||
print("[CLEANUP]: Deleting Linode proxies...")
|
||||
for proxy in proxies:
|
||||
proxy.cleanup()
|
||||
|
||||
|
||||
def parse_args(args: list, name: str):
|
||||
parser = argparse.ArgumentParser(prog=name)
|
||||
|
||||
group = parser.add_argument_group("Proxy settings")
|
||||
group.add_argument(
|
||||
"-L",
|
||||
"--linode-proxy",
|
||||
action="store_true",
|
||||
help="Give each worker a Linode SOCKS proxy. Assumes you have already "
|
||||
+ "setup the linode-cli with an API key and default settings. See "
|
||||
+ "https://www.linode.com/docs/guides/linode-cli/ "
|
||||
+ "for more information.",
|
||||
)
|
||||
group.add_argument(
|
||||
"-p",
|
||||
"--proxy-base-port",
|
||||
type=int,
|
||||
default=1337,
|
||||
help="Port number proxy ports are derived from, does nothing without "
|
||||
"enabling a type of proxy (like --linode-proxy).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--resume-dump",
|
||||
action="store_true",
|
||||
help="Dump resume info_dict to JSON (for debugging).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--n-workers",
|
||||
type=int,
|
||||
default=8,
|
||||
help="Number of parallel download workers",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--subtitle-langs",
|
||||
type=str,
|
||||
default="en",
|
||||
help="Comma-delimited list of subtitle languages to download; "
|
||||
+ 'pass "all" to download all auto captions. '
|
||||
+ 'Downloads "en" subtitles by default.',
|
||||
)
|
||||
parser.add_argument(
|
||||
"-T",
|
||||
"--all-thumbnails",
|
||||
action="store_true",
|
||||
help="Download all thumbnails instead of just the best one.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"playlist_json",
|
||||
type=argparse.FileType("r"),
|
||||
help="JSON-ified playlist file to download",
|
||||
)
|
||||
|
||||
return parser.parse_args(args=args)
|
||||
|
||||
|
||||
def main(args: [str], name: str) -> int:
|
||||
opts = parse_args(args=args, name=name)
|
||||
sub_langs = opts.subtitle_langs.split(",")
|
||||
n_workers = opts.n_workers
|
||||
|
||||
key_path = os.path.abspath("./proxy_key")
|
||||
pubkey_path = os.path.abspath(key_path + ".pub")
|
||||
if not (
|
||||
os.path.isfile(pubkey_path)
|
||||
or os.path.isfile(os.path.splitext(pubkey_path)[0])
|
||||
):
|
||||
print("[INFO]: Creating SSH key for Linode proxying...")
|
||||
print(runcmd('ssh-keygen -f "{}" -N ""'.format(key_path)).decode())
|
||||
|
||||
info_dict = j.loads(opts.playlist_json.read())
|
||||
opts.playlist_json.close()
|
||||
|
||||
print("[INFO]: Starting squid-dl...")
|
||||
|
||||
dirname = info_dict["title"]
|
||||
print('[INFO]: saving videos to "{}" directory'.format(dirname))
|
||||
if not (os.path.exists(dirname) and os.path.isdir(dirname)):
|
||||
os.mkdir(dirname)
|
||||
os.chdir(dirname)
|
||||
else:
|
||||
os.chdir(dirname)
|
||||
playlist_size = len(info_dict["entries"])
|
||||
|
||||
info_dict["entries"] = resume_preprocess(info_dict["entries"])
|
||||
if len(info_dict["entries"]) == 0:
|
||||
print("[WARN]: Nothing left to download, exiting...")
|
||||
return 1
|
||||
|
||||
print(
|
||||
"Resuming download of {}/{} videos...".format(
|
||||
len(info_dict["entries"]), playlist_size
|
||||
)
|
||||
)
|
||||
if opts.resume_dump:
|
||||
rdump = open("resume.json", mode="w")
|
||||
rdump.write(j.dumps(info_dict, sort_keys=True, indent=2))
|
||||
rdump.close()
|
||||
|
||||
n_entries = len(info_dict["entries"])
|
||||
n_workers = n_workers if n_workers < n_entries else n_entries
|
||||
entry_q = Queue(n_workers)
|
||||
entry_getter = Process(
|
||||
target=get_entries, args=(info_dict["entries"], entry_q)
|
||||
)
|
||||
entry_getter.start()
|
||||
|
||||
base_port = 1337
|
||||
workers = []
|
||||
proxies = []
|
||||
try:
|
||||
for n in range(n_workers):
|
||||
port = base_port + n
|
||||
|
||||
if opts.linode_proxy:
|
||||
proxies.append(
|
||||
LinodeProxy(proxy_port=port, pubkey_path=pubkey_path)
|
||||
)
|
||||
worker_args = (entry_q, opts, sub_langs, proxies[n])
|
||||
else:
|
||||
worker_args = (entry_q, opts, sub_langs)
|
||||
|
||||
workers.append(
|
||||
Process(
|
||||
target=do_download,
|
||||
args=worker_args,
|
||||
)
|
||||
)
|
||||
|
||||
if len(proxies) > 0:
|
||||
if not (
|
||||
os.path.isfile(pubkey_path)
|
||||
or os.path.isfile(os.path.splitext(pubkey_path)[0])
|
||||
):
|
||||
die(
|
||||
'[ERROR]: SSH key file "{}" does not exist!'.format(
|
||||
pubkey_path
|
||||
)
|
||||
)
|
||||
print("[INFO]: Waiting for Linodes to come online", end="")
|
||||
nodes_to_ping = list(range(n_workers))
|
||||
while len(nodes_to_ping) > 0:
|
||||
print(".", end="")
|
||||
temp_list = []
|
||||
for proxy_idx in nodes_to_ping:
|
||||
if proxies[proxy_idx].get_status() != "running":
|
||||
temp_list.append(proxy_idx)
|
||||
sleep(0.2)
|
||||
nodes_to_ping = temp_list
|
||||
print()
|
||||
|
||||
while not entry_q.full():
|
||||
sleep(0.2)
|
||||
|
||||
for i in range(n_workers):
|
||||
if len(proxies) > 0:
|
||||
proxies[i] = validate_proxy(proxies[i])
|
||||
seconds = randint(0, 1)
|
||||
else:
|
||||
seconds = randint(1, 6)
|
||||
workers[i].start()
|
||||
sleep(seconds)
|
||||
|
||||
while workers_alive(workers):
|
||||
sleep(0.2)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
eprint("\n[CLEANUP]: Interrupted, cleaning up...")
|
||||
cleanup(workers, proxies)
|
||||
if entry_getter.is_alive():
|
||||
print(
|
||||
"[CLEANUP]: Terminating queue worker {}".format(
|
||||
entry_getter.pid
|
||||
)
|
||||
)
|
||||
entry_getter.terminate()
|
||||
return 1
|
||||
|
||||
print("[INFO]: All done!")
|
||||
cleanup(workers, proxies)
|
||||
|
||||
return 0
|
||||
220
squid_dl/linode.py
Normal file
220
squid_dl/linode.py
Normal file
|
|
@ -0,0 +1,220 @@
|
|||
"""
|
||||
You ever wanted to spawn n proxies?
|
||||
⠀⠀⠀⠀⠀⠀⠀⠀⢀⣀⡀⠠⠤⠀⣀⣀⠀⠀⠀⠀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⣀⢤⡒⠉⠁⠀⠒⢂⡀⠀⠀⠀⠈⠉⣒⠤⣀⠀⠀⠀⠀
|
||||
⠀⠀⣠⠾⠅⠈⠀⠙⠀⠀⠀⠈⠀⠀⢀⣀⣓⡀⠉⠀⠬⠕⢄⠀⠀
|
||||
⠀⣰⠁⠀⠀⠀⠀⠀⠀⠀⠀⠀⢀⡤⠶⢦⡀⠑⠀⠀⠀⠀⠈⢧⠀
|
||||
⠀⡇⠀⠀⠀⠀⠀⢤⣀⣀⣀⣀⡀⢀⣀⣀⠙⠀⠀⠀⠀⠀⠀⢸⡄
|
||||
⠀⢹⡀⠀⠀⠀⠀⡜⠁⠀⠀⠙⡴⠁⠀⠀⠱⡄⠀⠀⠀⠀⠀⣸⠀
|
||||
⠀⠀⠱⢄⡀⠀⢰⣁⣒⣒⣂⣰⣃⣀⣒⣒⣂⢣⠀⠀⠀⢀⡴⠁⠀
|
||||
⠀⠀⠀⠀⠙⠲⢼⡀⠀⠙⠀⢠⡇⠀⠛⠀⠀⣌⣀⡤⠖⠉⠀⠀⠀
|
||||
⠀⠀⠀⠀⠀⠀⢸⡗⢄⣀⡠⠊⠈⢦⣀⣀⠔⡏⠀⠀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⠀⠀⠈⡇⠀⢰⠁⠀⠀⠀⢣⠀⠀⣷⠀⠀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⣠⠔⠊⠉⠁⡏⠀⠀⠀⠀⠘⡆⠤⠿⣄⣀⠀⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⣧⠸⠒⣚⡩⡇⠀⠀⠀⠀⠀⣏⣙⠒⢴⠈⡇⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⠈⠋⠉⠀⠀⢳⡀⠀⠀⠀⣸⠁⠈⠉⠓⠚⠁⠀⠀⠀⠀
|
||||
⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠉⠓⠛⠛
|
||||
Well, here you go.
|
||||
"""
|
||||
|
||||
import json as j
|
||||
from os.path import splitext
|
||||
import socket
|
||||
import struct
|
||||
import subprocess
|
||||
from time import sleep
|
||||
import typing
|
||||
|
||||
from .util import eprint, runcmd
|
||||
|
||||
|
||||
class LinodeProxy:
|
||||
user_made = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
pubkey_path: str = "proxy_key.pub",
|
||||
proxy_port: int = 1337,
|
||||
proxy_user: str = "boing",
|
||||
debug: bool = False,
|
||||
exclusive: bool = True,
|
||||
):
|
||||
self.proxy_port = proxy_port
|
||||
self.proxy_user = proxy_user
|
||||
self.pubkey_path = pubkey_path
|
||||
self.debug = debug
|
||||
self.exclusive = exclusive
|
||||
|
||||
self.proxy_url = "socks5://127.0.0.1:" + str(self.proxy_port)
|
||||
|
||||
self.ssh_prefix = (
|
||||
'ssh -o "UserKnownHostsFile=/dev/null" '
|
||||
+ '-o "StrictHostKeyChecking=no" -i '
|
||||
+ splitext(self.pubkey_path)[0]
|
||||
+ " "
|
||||
)
|
||||
pubfile = open(self.pubkey_path, mode="r")
|
||||
self.pubkey = pubfile.readline().rstrip()
|
||||
pubfile.close()
|
||||
|
||||
self.passwd = runcmd(
|
||||
"echo $(cat /dev/random | strings | head -c 512 | "
|
||||
+ "grep -oE '[a-zA-Z0-9#%!]') | sed 's/\s//g' | head -c 32;"
|
||||
).decode()
|
||||
|
||||
create_cmd = (
|
||||
"linode-cli --json linodes create "
|
||||
+ "--image linode/arch "
|
||||
+ '--authorized_keys '
|
||||
+ '"'
|
||||
+ self.pubkey
|
||||
+ '"'
|
||||
+ ' --root_pass "'
|
||||
+ self.passwd
|
||||
+ '"'
|
||||
)
|
||||
self.info = j.loads(runcmd(create_cmd).decode())[0]
|
||||
print("[INFO]: Created Linode {}.".format(self.info["id"]))
|
||||
|
||||
def find_linode(self) -> bool:
|
||||
linodes = j.loads(runcmd("linode-cli --json linodes list").decode())
|
||||
|
||||
for linode in linodes:
|
||||
if linode["id"] == self.info["id"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def cleanup(self) -> None:
|
||||
if hasattr(self, "proxy_proc"):
|
||||
self.proxy_proc.terminate()
|
||||
|
||||
if hasattr(self, "info"):
|
||||
if self.find_linode():
|
||||
print(
|
||||
runcmd(
|
||||
"linode-cli --json linodes delete "
|
||||
+ str(self.info["id"])
|
||||
).decode(),
|
||||
end="",
|
||||
)
|
||||
print("[CLEANUP]: Deleted Linode {}.".format(self.info["id"]))
|
||||
delattr(self, "info")
|
||||
|
||||
def get_info(self) -> None:
|
||||
self.info = j.loads(
|
||||
runcmd(
|
||||
"linode-cli --json linodes view " + str(self.info["id"])
|
||||
).decode()
|
||||
)[0]
|
||||
|
||||
def get_status(self) -> str:
|
||||
self.get_info()
|
||||
return self.info["status"]
|
||||
|
||||
def setup_user(self) -> None:
|
||||
"""
|
||||
This will probably break on other distros that assign new accounts to
|
||||
the `users` primary group instead of one derived from their user name.
|
||||
(Patches welcome!)
|
||||
"""
|
||||
user_cmd = (
|
||||
"useradd -m "
|
||||
+ self.proxy_user
|
||||
+ "; "
|
||||
+ "mkdir /home/"
|
||||
+ self.proxy_user
|
||||
+ "/.ssh; "
|
||||
+ "touch /home/"
|
||||
+ self.proxy_user
|
||||
+ "/.ssh/authorized_keys; "
|
||||
+ "chown -R "
|
||||
+ self.proxy_user
|
||||
+ ":"
|
||||
+ self.proxy_user # change to "users" if using Red Hat/Fedora
|
||||
+ " /home/"
|
||||
+ self.proxy_user
|
||||
+ "/.ssh; "
|
||||
+ "chmod 700 /home/"
|
||||
+ self.proxy_user
|
||||
+ "/.ssh; "
|
||||
+ "chmod 600 /home/"
|
||||
+ self.proxy_user
|
||||
+ "/.ssh/authorized_keys; "
|
||||
+ "cat ~/.ssh/authorized_keys >> /home/"
|
||||
+ self.proxy_user
|
||||
+ "/.ssh/authorized_keys"
|
||||
)
|
||||
full_cmd = (
|
||||
self.ssh_prefix
|
||||
+ "root@"
|
||||
+ self.info["ipv4"][0]
|
||||
+ " '"
|
||||
+ user_cmd
|
||||
+ "'"
|
||||
)
|
||||
if self.debug:
|
||||
print(full_cmd)
|
||||
|
||||
print(runcmd(full_cmd).decode())
|
||||
self.user_made = True
|
||||
|
||||
def test_proxy(self) -> bool:
|
||||
sen = struct.pack("BBB", 0x05, 0x01, 0x00)
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
try:
|
||||
s.connect(("127.0.0.1", self.proxy_port))
|
||||
except ConnectionRefusedError as e:
|
||||
eprint(
|
||||
"[WARN]: Linode {} SOCKS proxy on {} got {}".format(
|
||||
self.info["id"], self.proxy_port, e
|
||||
)
|
||||
)
|
||||
return False
|
||||
s.sendall(sen)
|
||||
data = s.recv(2)
|
||||
|
||||
version, auth = struct.unpack("BB", data)
|
||||
if version == 5 and auth == 0:
|
||||
return True
|
||||
else:
|
||||
eprint("[WARN]: SOCKSv5 proxy anomaly!")
|
||||
return False
|
||||
|
||||
def start(self, headless: bool = True) -> bool:
|
||||
if not self.user_made:
|
||||
self.setup_user()
|
||||
|
||||
verbose_ssh = ""
|
||||
if self.debug:
|
||||
verbose_ssh = "-v "
|
||||
|
||||
proxy_cmd = (
|
||||
self.ssh_prefix
|
||||
+ "-D "
|
||||
+ str(self.proxy_port)
|
||||
+ " -NT "
|
||||
+ verbose_ssh
|
||||
+ self.proxy_user
|
||||
+ "@"
|
||||
+ self.info["ipv4"][0]
|
||||
)
|
||||
if self.debug:
|
||||
print(proxy_cmd)
|
||||
|
||||
print(
|
||||
"[INFO]: Starting proxy on port {} with Linode {}".format(
|
||||
self.proxy_port, self.info["id"]
|
||||
)
|
||||
)
|
||||
if headless is True:
|
||||
self.proxy_proc = subprocess.Popen(proxy_cmd, shell=True)
|
||||
else:
|
||||
self.proxy_proc = subprocess.Popen(
|
||||
proxy_cmd,
|
||||
shell=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
sleep(2)
|
||||
|
||||
return self.test_proxy()
|
||||
42
squid_dl/util.py
Normal file
42
squid_dl/util.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
#!/usr/bin/env python3
|
||||
import subprocess
|
||||
from sys import stderr as STDERR
|
||||
import typing
|
||||
|
||||
|
||||
def eprint(errmsg):
|
||||
print(errmsg, file=STDERR)
|
||||
|
||||
|
||||
def die(errmsg, stat: int = 1):
|
||||
"""Prints message and exits Python with a status of stat."""
|
||||
eprint(errmsg)
|
||||
exit(stat)
|
||||
|
||||
|
||||
def runcmd(args):
|
||||
"""
|
||||
Run a given program/shell command and return its output.
|
||||
|
||||
Error Handling
|
||||
==============
|
||||
If the spawned proccess returns a nonzero exit status, it will print the
|
||||
program's ``STDERR`` to the running Python iterpreter's ``STDERR``.
|
||||
"""
|
||||
proc = subprocess.Popen(
|
||||
args,
|
||||
shell=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
|
||||
try:
|
||||
if proc.wait() == 1:
|
||||
print(proc.stdout.read().decode())
|
||||
eprint(proc.stderr.read().decode())
|
||||
|
||||
return proc.stdout.read()
|
||||
except KeyboardInterrupt:
|
||||
proc.terminate()
|
||||
return b""
|
||||
|
||||
Loading…
Add table
Add a link
Reference in a new issue