Browse Source

Initial threading implementation in worker, disable uploads temporarily

pull/3/head
tech234a 3 years ago
parent
commit
d1d10c458d
1 changed files with 65 additions and 43 deletions
  1. +65
    -43
      worker.py

+ 65
- 43
worker.py View File

@@ -2,6 +2,7 @@ import requests
from time import sleep
from os import mkdir
from json import dumps
import threading

from shutil import make_archive, rmtree

@@ -11,6 +12,51 @@ from export import getsubs
WORKER_VERSION = 1
SERVER_BASE_URL = "http://localhost:5000"

class batchthread(threading.Thread):
def run(self):
item = self.getName()
global recvids
global recchans
global recmixes
global recplayl

print("Video ID:", str(item).strip())
while True:
try:
info = getmetadata(str(item).strip())
break
except BaseException as e:
print(e)
print("Error in retrieving information, waiting 10 minutes")
sleep(600)

# Add any discovered videos
recvids.update(info[2])
recchans.update(info[3])
recmixes.update(info[4])
recplayl.update(info[5])

if info[0] or info[1]: # ccenabled or creditdata
mkdir("out/"+str(item).strip())

if info[1]: # creditdata
open("out/"+str(item).strip()+"/"+str(item).strip()+"_published_credits.json", "w").write(dumps(info[1]))

if info[0]: #ccenabled
while True:
gsres = False
try:
gsres = getsubs(str(item).strip())
except BaseException as e:
print(e)
if gsres:
break
else:
print("Error in retrieving subtitles, waiting 10 minutes")
sleep(600)

return True


# Get a worker ID
while True:
@@ -57,41 +103,17 @@ while True:
# Process the batch
batchcontent = requests.get(batchinfo["content"]).text.split("\n")

for item in batchcontent:
print("Video ID:", str(item).strip())
while True:
try:
info = getmetadata(str(item).strip())
break
except BaseException as e:
print(e)
print("Error in retrieving information, waiting 10 minutes")
sleep(600)

# Add any discovered videos
recvids.update(info[2])
recchans.update(info[3])
recmixes.update(info[4])
recplayl.update(info[5])

if info[0] or info[1]: # ccenabled or creditdata
mkdir("out/"+str(item).strip())
threads = []
for item in batchcontent:
runthread = batchthread(name = item)
runthread.start()
threads.append(runthread)

if info[1]: # creditdata
open("out/"+str(item).strip()+"/"+str(item).strip()+"_published_credits.json", "w").write(dumps(info[1]))
for x in threads:
x.join()

if info[0]: #ccenabled
while True:
gsres = False
try:
gsres = getsubs(str(item).strip())
except BaseException as e:
print(e)
if gsres:
break
else:
print("Error in retrieving subtitles, waiting 10 minutes")
sleep(600)
#https://stackoverflow.com/a/11968881

# TODO: put the data somewhere...
# TODO: put the discoveries somewhere...
@@ -99,16 +121,16 @@ while True:

make_archive("out.zip", "zip", "out") #check this

while True:
try:
uploadr = requests.post("https://transfersh.com/"+str(batchinfo["batchID"])+".zip", data=open("out.zip"))
if uploadr.status_code == 200:
resulturl = uploadr.text
break
except BaseException as e:
print(e)
print("Encountered error in uploading results... retrying in 10 minutes")
sleep(600)
# while True:
# try:
# uploadr = requests.post("https://transfersh.com/"+str(batchinfo["batchID"])+".zip", data=open("out.zip"))
# if uploadr.status_code == 200:
# resulturl = uploadr.text
# break
# except BaseException as e:
# print(e)
# print("Encountered error in uploading results... retrying in 10 minutes")
# sleep(600)

# Report the batch as complete (I can't think of a fail condition except for a worker exiting...)
# TODO: handle worker exit
@@ -119,7 +141,7 @@ while True:
("batchID", batchinfo["batchID"]),
("randomKey", batchinfo["randomKey"]),
("status", "c"),
("resulturl", resulturl),
#("resulturl", resulturl),
)
statusrequest = requests.get(SERVER_BASE_URL+"/worker/updateStatus", params=params)



Loading…
Cancel
Save