archiving community contributions on YouTube: unpublished captions, title and description translations and caption credits
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 

135 line
4.7 KiB

  1. # This function adapted from https://github.com/cdown/srt/blob/11089f1e021f2e074d04c33fc7ffc4b7b52e7045/srt.py, lines 69 and 189 (MIT License)
  2. def timedelta_to_sbv_timestamp(timedelta_timestamp):
  3. r"""
  4. Convert a :py:class:`~datetime.timedelta` to an SRT timestamp.
  5. .. doctest::
  6. >>> import datetime
  7. >>> delta = datetime.timedelta(hours=1, minutes=23, seconds=4)
  8. >>> timedelta_to_sbv_timestamp(delta)
  9. '01:23:04,000'
  10. :param datetime.timedelta timedelta_timestamp: A datetime to convert to an
  11. SBV timestamp
  12. :returns: The timestamp in SBV format
  13. :rtype: str
  14. """
  15. SECONDS_IN_HOUR = 3600
  16. SECONDS_IN_MINUTE = 60
  17. HOURS_IN_DAY = 24
  18. MICROSECONDS_IN_MILLISECOND = 1000
  19. hrs, secs_remainder = divmod(timedelta_timestamp.seconds, SECONDS_IN_HOUR)
  20. hrs += timedelta_timestamp.days * HOURS_IN_DAY
  21. mins, secs = divmod(secs_remainder, SECONDS_IN_MINUTE)
  22. msecs = timedelta_timestamp.microseconds // MICROSECONDS_IN_MILLISECOND
  23. return "%1d:%02d:%02d.%03d" % (hrs, mins, secs, msecs)
  24. from bs4 import BeautifulSoup
  25. import html.parser
  26. from datetime import timedelta
  27. from json import dumps
  28. import requests
  29. # https://docs.python.org/3/library/html.parser.html
  30. from html.parser import HTMLParser
  31. class MyHTMLParser(HTMLParser):
  32. def __init__(self):
  33. self.captions = []
  34. def check_attr(self, attrs, attr, value):
  35. for item in attrs:
  36. if item[0] == attr and item[1] == value:
  37. return True
  38. return False
  39. def get_attr(self, attrs, attr):
  40. for item in attrs:
  41. if item[0] == attr:
  42. return item[1]
  43. return False
  44. def handle_starttag(self, tag, attrs):
  45. if tag == "input" and self.check_attr(attrs, "class", "yt-uix-form-input-text event-time-field event-start-time"):
  46. self.captions.append({"startTime": self.get_attr("data-start-ms")})
  47. elif tag == "input" and self.check_attr(attrs, "class", "yt-uix-form-input-text event-time-field event-end-time"):
  48. self.captions[len(self.captions-1)]["endTime"] = self.get_attr("data-end-ms")})
  49. elif tag == "textarea" and self.check_attr(attrs, "class", "yt-uix-form-input-textarea event-text goog-textarea"):
  50. pass #do this
  51. #def handle_endtag(self, tag):
  52. # print("Encountered an end tag :", tag)
  53. def handle_data(self, data):
  54. print("Encountered some data :", data)
  55. def subprrun(jobs, headers):
  56. while not jobs.empty():
  57. langcode, vid = jobs.get()
  58. print(langcode, vid)
  59. pparams = (
  60. ("v", vid),
  61. ("lang", langcode),
  62. ("action_mde_edit_form", 1),
  63. ("bl", "vmp"),
  64. ("ui", "hd"),
  65. ("tab", "captions"),
  66. ("o", "U")
  67. )
  68. page = requests.get("https://www.youtube.com/timedtext_editor", headers=headers, params=pparams)
  69. assert not "accounts.google.com" in page.url, "Please supply authentication cookie information in config.json. See README.md for more information."
  70. parser = MyHTMLParser()
  71. parser.feed(page.text)
  72. #soup = BeautifulSoup(page.text, features="html5lib")
  73. #del page
  74. divs = soup.find_all("div", class_="timed-event-line")
  75. myfs = open("out/"+vid+"/"+vid+"_"+langcode+".sbv", "w", encoding="utf-8")
  76. while divs:
  77. item = divs.pop(0)
  78. text = item.find("textarea").text
  79. startms = int(item.find("input", class_="event-start-time")["data-start-ms"])
  80. endms = int(item.find("input", class_="event-end-time")["data-end-ms"])
  81. myfs.write(timedelta_to_sbv_timestamp(timedelta(milliseconds=startms)) + "," + timedelta_to_sbv_timestamp(timedelta(milliseconds=endms)) + "\n" + text + "\n")
  82. #text.decompose()
  83. item.decompose()
  84. del item
  85. del text
  86. del startms
  87. del endms
  88. if divs:
  89. myfs.write("\n")
  90. del divs
  91. myfs.close()
  92. del myfs
  93. if soup.find("li", id="captions-editor-nav-metadata")["data-state"] != "locked":
  94. metadata = {}
  95. try:
  96. metadata["title"] = soup.find("input", id="metadata-title")["value"]
  97. except KeyError:
  98. metadata["title"] = ""
  99. metadata["description"] = soup.find("textarea", id="metadata-description").text
  100. open("out/"+vid+"/"+vid+"_"+langcode+".json", "w", encoding="utf-8").write(dumps(metadata))
  101. del metadata
  102. soup.decompose()
  103. del soup
  104. del langcode
  105. del vid
  106. del pparams
  107. jobs.task_done()
  108. return True