archiving community contributions on YouTube: unpublished captions, title and description translations and caption credits
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 

269 lines
13 KiB

  1. from threading import Thread
  2. import requests
  3. from time import sleep
  4. from os import mkdir, rmdir, listdir, system, environ
  5. from os.path import isdir, isfile, getsize
  6. from json import dumps, loads
  7. import signal
  8. import tracker
  9. from youtube_dl import YoutubeDL
  10. from shutil import rmtree, which
  11. from queue import Queue
  12. from gc import collect
  13. from datetime import timedelta, datetime
  14. from discovery import getmetadata
  15. from export import subprrun
  16. #useful Queue example: https://stackoverflow.com/a/54658363
  17. jobs = Queue()
  18. langcnt = {}
  19. lasttask = datetime.min
  20. try:
  21. mkdir("out")
  22. except:
  23. pass
  24. try:
  25. mkdir("directory")
  26. except:
  27. pass
  28. HEROKU = False
  29. if isfile("../Procfile"):
  30. HEROKU = True
  31. langs = ['ab', 'aa', 'af', 'sq', 'ase', 'am', 'ar', 'arc', 'hy', 'as', 'ay', 'az', 'bn', 'ba', 'eu', 'be', 'bh', 'bi', 'bs', 'br',
  32. 'bg', 'yue', 'yue-HK', 'ca', 'chr', 'zh-CN', 'zh-HK', 'zh-Hans', 'zh-SG', 'zh-TW', 'zh-Hant', 'cho', 'co', 'hr', 'cs', 'da', 'nl',
  33. 'nl-BE', 'nl-NL', 'dz', 'en', 'en-CA', 'en-IN', 'en-IE', 'en-GB', 'en-US', 'eo', 'et', 'fo', 'fj', 'fil', 'fi', 'fr', 'fr-BE',
  34. 'fr-CA', 'fr-FR', 'fr-CH', 'ff', 'gl', 'ka', 'de', 'de-AT', 'de-DE', 'de-CH', 'el', 'kl', 'gn', 'gu', 'ht', 'hak', 'hak-TW', 'ha',
  35. 'iw', 'hi', 'hi-Latn', 'ho', 'hu', 'is', 'ig', 'id', 'ia', 'ie', 'iu', 'ik', 'ga', 'it', 'ja', 'jv', 'kn', 'ks', 'kk', 'km', 'rw',
  36. 'tlh', 'ko', 'ku', 'ky', 'lo', 'la', 'lv', 'ln', 'lt', 'lb', 'mk', 'mg', 'ms', 'ml', 'mt', 'mni', 'mi', 'mr', 'mas', 'nan',
  37. 'nan-TW', 'lus', 'mo', 'mn', 'my', 'na', 'nv', 'ne', 'no', 'oc', 'or', 'om', 'ps', 'fa', 'fa-AF', 'fa-IR', 'pl', 'pt', 'pt-BR',
  38. 'pt-PT', 'pa', 'qu', 'ro', 'rm', 'rn', 'ru', 'ru-Latn', 'sm', 'sg', 'sa', 'sc', 'gd', 'sr', 'sr-Cyrl', 'sr-Latn', 'sh', 'sdp', 'sn',
  39. 'scn', 'sd', 'si', 'sk', 'sl', 'so', 'st', 'es', 'es-419', 'es-MX', 'es-ES', 'es-US', 'su', 'sw', 'ss', 'sv', 'tl', 'tg', 'ta',
  40. 'tt', 'te', 'th', 'bo', 'ti', 'tpi', 'to', 'ts', 'tn', 'tr', 'tk', 'tw', 'uk', 'ur', 'uz', 'vi', 'vo', 'vor', 'cy', 'fy', 'wo',
  41. 'xh', 'yi', 'yo', 'zu']
  42. assert which("zip") and which("rsync") and which("curl"), "Please ensure the zip, rsync, and curl commands are installed on your system."
  43. #HSID, SSID, SID cookies required
  44. if "HSID" in environ.keys() and "SSID" in environ.keys() and "SID" in environ.keys():
  45. cookies = {"HSID": environ["HSID"], "SSID": environ["SSID"], "SID": environ["SID"]}
  46. elif isfile("config.json"):
  47. cookies = loads(open("config.json").read())
  48. else:
  49. print("HSID, SSID, and SID cookies from youtube.com are required. Specify in config.json or as environment variables.")
  50. assert False
  51. if not (cookies["HSID"] and cookies["SSID"] and cookies["SID"]):
  52. print("HSID, SSID, and SID cookies from youtube.com are required. Specify in config.json or as environment variables.")
  53. assert False
  54. mysession = requests.session()
  55. mysession.headers.update({"cookie": "HSID="+cookies["HSID"]+"; SSID="+cookies["SSID"]+"; SID="+cookies["SID"], "Accept-Language": "en-US",})
  56. validationtest = mysession.get("https://www.youtube.com/timedtext_editor?action_mde_edit_form=1&v=1iNTtHUwvq4&lang=en&bl=vmp&ui=hd&ref=player&tab=captions&o=U")
  57. assert not "accounts.google.com" in validationtest.url, "Please ensure you have correctly specified account cookies."
  58. assert """<button class="yt-uix-button yt-uix-button-size-default yt-uix-button-default yt-uix-button-has-icon" type="button" onclick=";return false;" id="yt-picker-language-button" data-button-action="yt.www.picker.load" data-button-menu-id="arrow-display" data-picker-key="language" data-picker-position="footer" data-button-toggle="true"><span class="yt-uix-button-icon-wrapper"><span class="yt-uix-button-icon yt-uix-button-icon-footer-language yt-sprite"></span></span><span class="yt-uix-button-content"> <span class="yt-picker-button-label">
  59. Language:
  60. </span>
  61. English
  62. </span><span class="yt-uix-button-arrow yt-sprite"></span></button>""" in validationtest.text, "Please make sure your YouTube and Google account language is set to English (United States)"
  63. del validationtest
  64. open("cookies.txt", "w").write("""# HTTP Cookie File
  65. .youtube.com TRUE / FALSE 1663793455 SID [SID]
  66. .youtube.com TRUE / FALSE 1663793455 HSID [HSID]
  67. .youtube.com TRUE / TRUE 1663793455 SSID [SSID]""".replace("[SID]", cookies["SID"]).replace("[HSID]", cookies["HSID"]).replace("[SSID]", cookies["SSID"]))
  68. del cookies
  69. #Graceful Shutdown
  70. class GracefulKiller:
  71. kill_now = False
  72. def __init__(self):
  73. signal.signal(signal.SIGINT, self.exit_gracefully)
  74. signal.signal(signal.SIGTERM, self.exit_gracefully)
  75. def exit_gracefully(self, signum, frame):
  76. print("Graceful exit process initiated, stopping all tasks...")
  77. self.kill_now = True
  78. gkiller = GracefulKiller()
  79. #microtasks
  80. def threadrunner(jobs: Queue):
  81. global langcnt
  82. global lasttask
  83. ydl = YoutubeDL({"extract_flat": "in_playlist", "simulate": True, "skip_download": True, "quiet": True, "cookiefile": "cookies.txt", "source_address": "0.0.0.0", "call_home": False})
  84. while not gkiller.kill_now:
  85. if not jobs.empty():
  86. task, vid, args = jobs.get()
  87. if task == "submitdiscovery":
  88. tracker.add_item_to_tracker(args, vid)
  89. elif task == "discovery":
  90. while True:
  91. try:
  92. info = getmetadata(mysession, str(vid).strip())
  93. break
  94. except BaseException as e:
  95. print(e)
  96. print("Error in retrieving information, waiting 30 seconds")
  97. sleep(30)
  98. if info[0] or info[1]: # ccenabled or creditdata
  99. if not isdir("out/"+str(vid).strip()):
  100. mkdir("out/"+str(vid).strip())
  101. if info[1]:
  102. open("out/"+str(vid).strip()+"/"+str(vid).strip()+"_published_credits.json", "w").write(dumps(info[1]))
  103. if info[0]:
  104. langcnt[vid] = 0
  105. for langcode in langs:
  106. jobs.put(("subtitles", vid, langcode))
  107. else:
  108. jobs.put(("complete", None, "video:"+vid))
  109. for videodisc in info[2]:
  110. jobs.put(("submitdiscovery", videodisc, tracker.ItemType.Video))
  111. for channeldisc in info[3]:
  112. jobs.put(("submitdiscovery", channeldisc, tracker.ItemType.Channel))
  113. for mixdisc in info[4]:
  114. jobs.put(("submitdiscovery", mixdisc, tracker.ItemType.MixPlaylist))
  115. for playldisc in info[5]:
  116. jobs.put(("submitdiscovery", playldisc, tracker.ItemType.Playlist))
  117. elif task == "subtitles":
  118. retval = subprrun(jobs, mysession, args, vid, "default")
  119. langcnt[vid] += retval
  120. if langcnt[vid] >= 585:
  121. jobs.put(("complete", None, "video:"+vid))
  122. elif task == "subtitles-forceedit-captions":
  123. subprrun(jobs, mysession, args, vid, "forceedit-captions")
  124. elif task == "subtitles-forceedit-metadata":
  125. subprrun(jobs, mysession, args, vid, "forceedit-metadata")
  126. elif task == "channel":
  127. while True:
  128. try:
  129. y = ydl.extract_info("https://www.youtube.com/channel/"+desit.split(":", 1)[1], download=False)
  130. for itemyv in y["entries"]:
  131. jobs.put(("submitdiscovery", itemyv["id"], tracker.ItemType.Video))
  132. jobs.put(("complete", None, "channel:"+args))
  133. break
  134. except:
  135. print("YouTube-DL error, ignoring but not marking as complete...", "https://www.youtube.com/channel/"+desit.split(":", 1)[1])
  136. elif task == "playlist":
  137. while True:
  138. try:
  139. y = ydl.extract_info("https://www.youtube.com/playlist?list="+desit.split(":", 1)[1], download=False)
  140. for itemyvp in y["entries"]:
  141. jobs.put(("submitdiscovery", itemyvp["id"], tracker.ItemType.Video))
  142. jobs.put(("complete", None, "playlist:"+args))
  143. break
  144. except:
  145. print("YouTube-DL error, ignoring but not marking as complete...", "https://www.youtube.com/playlist?list="+desit.split(":", 1)[1])
  146. elif task == "complete":
  147. size = 0
  148. if ":" in args:
  149. if args.split(":", 1)[0] == "video":
  150. #check if dir is empty, make zip if needed
  151. if isdir("out/"+args.split(":", 1)[1]):
  152. if not listdir("out/"+args.split(":", 1)[1]):
  153. rmdir("out/"+args.split(":", 1)[1])
  154. else:
  155. #zip it up
  156. if not isdir("directory/"+args.split(":", 1)[1]):
  157. mkdir("directory/"+args.split(":", 1)[1])
  158. while not isfile("directory/"+args.split(":", 1)[1]+"/"+args.split(":", 1)[1]+".zip"):
  159. print("Attempting to zip item...")
  160. system("zip -9 -r -j directory/"+args.split(":", 1)[1]+"/"+args.split(":", 1)[1]+".zip out/"+args.split(":", 1)[1])
  161. #get a target
  162. targetloc = None
  163. while not targetloc:
  164. targetloc = tracker.request_upload_target()
  165. if targetloc:
  166. break
  167. else:
  168. print("Waiting 5 minutes...")
  169. sleep(300)
  170. if targetloc.startswith("rsync"):
  171. system("rsync -rltv --timeout=300 --contimeout=300 --progress --bwlimit 0 --recursive --partial --partial-dir .rsync-tmp --min-size 1 --no-compress --compress-level 0 --files-from=- directory/"+args.split(":", 1)[1]+"/ "+targetloc)
  172. elif targetloc.startswith("http"):
  173. system("curl -F "+args.split(":", 1)[1]+".zip=@directory/"+args.split(":", 1)[1]+"/"+args.split(":", 1)[1]+".zip "+targetloc)
  174. size = getsize("directory/"+args.split(":", 1)[1]+"/"+args.split(":", 1)[1]+".zip")
  175. #cleanup
  176. try:
  177. del langcnt[args.split(":", 1)[1]]
  178. rmtree("directory/"+args.split(":", 1)[1]+"/")
  179. rmdir("directory/"+args.split(":", 1)[1]+"/")
  180. rmtree("out/"+args.split(":", 1)[1]+"/")
  181. rmdir("out/"+args.split(":", 1)[1]+"/")
  182. except:
  183. pass
  184. tracker.mark_item_as_done(args, size)
  185. jobs.task_done()
  186. else:
  187. # get a new task from tracker
  188. if datetime.now() - lasttask > timedelta(seconds=15): #only retrieve a task every 15 seconds to allow queue to build up
  189. collect() #cleanup
  190. desit = tracker.request_item_from_tracker()
  191. print("New task:", desit)
  192. if desit:
  193. if desit.split(":", 1)[0] == "video":
  194. lasttask = datetime.now()
  195. jobs.put(("discovery", desit.split(":", 1)[1], None))
  196. elif desit.split(":", 1)[0] == "channel":
  197. lasttask = datetime.now()
  198. jobs.put(("channel", None, desit.split(":", 1)[1]))
  199. elif desit.split(":", 1)[0] == "playlist":
  200. lasttask = datetime.now()
  201. jobs.put(("playlist", None, desit.split(":", 1)[1]))
  202. else:
  203. print("Ignoring item for now", desit)
  204. else:
  205. print("Ignoring item for now", desit)
  206. else:
  207. sleep(1)
  208. threads = []
  209. #start with 1 thread, give it a 5 second head start
  210. runthread = Thread(target=threadrunner, args=(jobs,))
  211. runthread.start()
  212. threads.append(runthread)
  213. del runthread
  214. sleep(5)
  215. #now create the other 49 threads
  216. for i in range(49):
  217. runthread = Thread(target=threadrunner, args=(jobs,))
  218. runthread.start()
  219. threads.append(runthread)
  220. del runthread
  221. #https://stackoverflow.com/a/11968881
  222. for x in threads:
  223. x.join()
  224. threads.remove(x)
  225. del x
  226. print("Exiting...")