mirror of https://github.com/ekimekim/wubloader
Merge pull request #100 from ekimekim/mike/cutter/multiple-locations
cutter: Allow multiple upload locationspull/102/head
commit
1159a518f0
@ -0,0 +1,165 @@
|
|||||||
|
|
||||||
|
import errno
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from common.googleapis import GoogleAPIClient
|
||||||
|
|
||||||
|
|
||||||
|
class UploadBackend(object):
|
||||||
|
"""Represents a place a video can be uploaded,
|
||||||
|
and maintains any state needed to perform uploads.
|
||||||
|
|
||||||
|
Config args for the backend are passed into __init__ as kwargs,
|
||||||
|
along with credentials as the first arg.
|
||||||
|
|
||||||
|
Should have a method upload_video(title, description, tags, data).
|
||||||
|
Title, description and tags may have backend-specific meaning.
|
||||||
|
Tags is a list of string.
|
||||||
|
Data is an iterator of strings.
|
||||||
|
It should return (video_id, video_link).
|
||||||
|
|
||||||
|
If the video must undergo additional processing before it's available
|
||||||
|
(ie. it should go into the TRANSCODING state), then the backend should
|
||||||
|
define the 'needs_transcode' attribute as True.
|
||||||
|
If it does, it should also have a method check_status(ids) which takes a
|
||||||
|
list of video ids and returns a list of the ones who have finished processing.
|
||||||
|
|
||||||
|
The upload backend also determines the encoding settings for the cutting
|
||||||
|
process, this is given as a list of ffmpeg args
|
||||||
|
under the 'encoding_settings' attribute.
|
||||||
|
"""
|
||||||
|
|
||||||
|
needs_transcode = False
|
||||||
|
|
||||||
|
# reasonable default if settings don't otherwise matter
|
||||||
|
encoding_settings = [] # TODO
|
||||||
|
|
||||||
|
def upload_video(self, title, description, tags, data):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def check_status(self, ids):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class Youtube(UploadBackend):
|
||||||
|
"""Represents a youtube channel to upload to, and settings for doing so.
|
||||||
|
Config args besides credentials:
|
||||||
|
hidden:
|
||||||
|
If false, video is public. If true, video is unlisted. Default false.
|
||||||
|
"""
|
||||||
|
|
||||||
|
needs_transcode = True
|
||||||
|
encoding_settings = [] # TODO youtube's recommended settings
|
||||||
|
|
||||||
|
def __init__(self, credentials, hidden=False):
|
||||||
|
self.logger = logging.getLogger(type(self).__name__)
|
||||||
|
self.client = GoogleAPIClient(
|
||||||
|
credentials['client_id'],
|
||||||
|
credentials['client_secret'],
|
||||||
|
credentials['refresh_token'],
|
||||||
|
)
|
||||||
|
self.hidden = hidden
|
||||||
|
|
||||||
|
def upload_video(self, title, description, tags, data):
|
||||||
|
json = {
|
||||||
|
'snippet': {
|
||||||
|
'title': title,
|
||||||
|
'description': description,
|
||||||
|
'tags': tags,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if self.hidden:
|
||||||
|
json['status'] = {
|
||||||
|
'privacyStatus': 'unlisted',
|
||||||
|
}
|
||||||
|
resp = self.client.request('POST',
|
||||||
|
'https://www.googleapis.com/upload/youtube/v3/videos',
|
||||||
|
params={
|
||||||
|
'part': 'snippet,status' if self.hidden else 'snippet',
|
||||||
|
'uploadType': 'resumable',
|
||||||
|
},
|
||||||
|
json=json,
|
||||||
|
)
|
||||||
|
resp.raise_for_status()
|
||||||
|
upload_url = resp.headers['Location']
|
||||||
|
resp = self.client.request('POST', upload_url, data=data)
|
||||||
|
resp.raise_for_status()
|
||||||
|
id = resp.json()['id']
|
||||||
|
return id, 'https://youtu.be/{}'.format(id)
|
||||||
|
|
||||||
|
def check_status(self, ids):
|
||||||
|
output = []
|
||||||
|
# Break up into groups of 10 videos. I'm not sure what the limit is so this is reasonable.
|
||||||
|
for i in range(0, len(ids), 10):
|
||||||
|
group = ids[i:i+10]
|
||||||
|
resp = self.client.request('GET',
|
||||||
|
'https://www.googleapis.com/youtube/v3/videos',
|
||||||
|
params={
|
||||||
|
'part': 'id,status',
|
||||||
|
'id': ','.join(group),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
resp.raise_for_status()
|
||||||
|
for item in resp.json()['items']:
|
||||||
|
if item['status']['uploadStatus'] == 'processed':
|
||||||
|
output.append(item['id'])
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
class Local(UploadBackend):
|
||||||
|
"""An "upload" backend that just saves the file to local disk.
|
||||||
|
Needs no credentials. Config args:
|
||||||
|
path:
|
||||||
|
Where to save the file.
|
||||||
|
url_prefix:
|
||||||
|
The leading part of the URL to return.
|
||||||
|
The filename will be appended to this to form the full URL.
|
||||||
|
So for example, if you set "http://example.com/videos/",
|
||||||
|
then a returned video URL might look like:
|
||||||
|
"http://example.com/videos/my-example-video-1ffd816b-6496-45d4-b8f5-5eb06ee532f9.ts"
|
||||||
|
If not given, returns a file:// url with the full path.
|
||||||
|
write_info:
|
||||||
|
If true, writes a json file alongside the video file containing
|
||||||
|
the video title, description and tags.
|
||||||
|
This is intended primarily for testing purposes.
|
||||||
|
Saves files under their title, plus a random video id to avoid conflicts.
|
||||||
|
Ignores description and tags.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, credentials, path, url_prefix=None, write_info=False):
|
||||||
|
self.path = path
|
||||||
|
self.url_prefix = url_prefix
|
||||||
|
self.write_info = write_info
|
||||||
|
# make path if it doesn't already exist
|
||||||
|
try:
|
||||||
|
os.makedirs(self.path)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno != errno.EEXIST:
|
||||||
|
raise
|
||||||
|
# ignore already-exists errors
|
||||||
|
|
||||||
|
def upload_video(self, title, description, tags, data):
|
||||||
|
video_id = uuid.uuid4()
|
||||||
|
# make title safe by removing offending characters, replacing with '-'
|
||||||
|
title = re.sub('[^A-Za-z0-9_]', '-', title)
|
||||||
|
filename = '{}-{}.ts'.format(title, video_id) # TODO with re-encoding, this ext must change
|
||||||
|
filepath = os.path.join(self.path, filename)
|
||||||
|
if self.write_info:
|
||||||
|
with open(os.path.join(self.path, '{}-{}.json'.format(title, video_id))) as f:
|
||||||
|
f.write(json.dumps({
|
||||||
|
'title': title,
|
||||||
|
'description': description,
|
||||||
|
'tags': tags,
|
||||||
|
}) + '\n')
|
||||||
|
with open(filepath, 'w') as f:
|
||||||
|
for chunk in data:
|
||||||
|
f.write(chunk)
|
||||||
|
if self.url_prefix is not None:
|
||||||
|
url = self.url_prefix + filename
|
||||||
|
else:
|
||||||
|
url = 'file://{}'.format(filepath)
|
||||||
|
return video_id, url
|
@ -1,61 +0,0 @@
|
|||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from common.googleapis import GoogleAPIClient
|
|
||||||
|
|
||||||
|
|
||||||
class Youtube(object):
|
|
||||||
"""Manages youtube API operations"""
|
|
||||||
|
|
||||||
def __init__(self, client_id, client_secret, refresh_token):
|
|
||||||
self.logger = logging.getLogger(type(self).__name__)
|
|
||||||
self.client = GoogleAPIClient(client_id, client_secret, refresh_token)
|
|
||||||
|
|
||||||
def upload_video(self, title, description, tags, data, hidden=False):
|
|
||||||
"""Data may be a string, file-like object or iterator. Returns id."""
|
|
||||||
json = {
|
|
||||||
'snippet': {
|
|
||||||
'title': title,
|
|
||||||
'description': description,
|
|
||||||
'tags': tags,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
if hidden:
|
|
||||||
json['status'] = {
|
|
||||||
'privacyStatus': 'unlisted',
|
|
||||||
}
|
|
||||||
resp = self.client.request('POST',
|
|
||||||
'https://www.googleapis.com/upload/youtube/v3/videos',
|
|
||||||
params={
|
|
||||||
'part': 'snippet,status' if hidden else 'snippet',
|
|
||||||
'uploadType': 'resumable',
|
|
||||||
},
|
|
||||||
json=json,
|
|
||||||
)
|
|
||||||
resp.raise_for_status()
|
|
||||||
upload_url = resp.headers['Location']
|
|
||||||
resp = self.client.request('POST', upload_url, data=data)
|
|
||||||
resp.raise_for_status()
|
|
||||||
return resp.json()['id']
|
|
||||||
|
|
||||||
def get_video_status(self, ids):
|
|
||||||
"""For a list of video ids, returns a dict {id: upload status}.
|
|
||||||
A video is fully processed when upload status is 'processed'.
|
|
||||||
NOTE: Video ids may be missing from the result, this probably indicates
|
|
||||||
the video is errored.
|
|
||||||
"""
|
|
||||||
output = {}
|
|
||||||
# Break up into groups of 10 videos. I'm not sure what the limit is so this is reasonable.
|
|
||||||
for i in range(0, len(ids), 10):
|
|
||||||
group = ids[i:i+10]
|
|
||||||
resp = self.client.request('GET',
|
|
||||||
'https://www.googleapis.com/youtube/v3/videos',
|
|
||||||
params={
|
|
||||||
'part': 'id,status',
|
|
||||||
'id': ','.join(group),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
resp.raise_for_status()
|
|
||||||
for item in resp.json()['items']:
|
|
||||||
output[item['id']] = item['status']['uploadStatus']
|
|
||||||
return output
|
|
Loading…
Reference in New Issue