Merge pull request #103 from ekimekim/mike/cutter/full-cut

Resurrect non-experimental cut, now dubbed "full" (vs "fast") cut
pull/109/head
Mike Lang 5 years ago committed by GitHub
commit bf55b1c75e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -9,6 +9,7 @@ import itertools
import json
import logging
import os
import shutil
import sys
from collections import namedtuple
from contextlib import closing
@ -256,7 +257,9 @@ def streams_info(segment):
def ffmpeg_cut_segment(segment, cut_start=None, cut_end=None):
"""Return a Popen object which is ffmpeg cutting the given segment"""
"""Return a Popen object which is ffmpeg cutting the given single segment.
This is used when doing a fast cut.
"""
args = [
'ffmpeg',
'-hide_banner', '-loglevel', 'fatal', # suppress noisy output
@ -283,6 +286,22 @@ def ffmpeg_cut_segment(segment, cut_start=None, cut_end=None):
return subprocess.Popen(args, stdout=subprocess.PIPE)
def ffmpeg_cut_stdin(cut_start, cut_end, encode_args):
"""Return a Popen object which is ffmpeg cutting from stdin.
This is used when doing a full cut."""
args = [
'ffmpeg',
'-hide_banner', '-loglevel', 'fatal', # suppress noisy output
'-i', '-'
'-ss', cut_start,
'-to', cut_end,
] + list(encode_args) + [
'-', # output to stdout
]
logging.info("Running full cut with args: {}".format(" ".join(args)))
return subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
def read_chunks(fileobj, chunk_size=16*1024):
"""Read fileobj until EOF, yielding chunk_size sized chunks of data."""
while True:
@ -292,7 +311,7 @@ def read_chunks(fileobj, chunk_size=16*1024):
yield chunk
def cut_segments(segments, start, end):
def fast_cut_segments(segments, start, end):
"""Yields chunks of a MPEGTS video file covering the exact timestamp range.
segments should be a list of segments as returned by get_best_segments().
This method works by only cutting the first and last segments, and concatenating the rest.
@ -362,3 +381,47 @@ def cut_segments(segments, start, end):
with open(segment.path) as f:
for chunk in read_chunks(f):
yield chunk
def feed_input(segments, pipe):
"""Write each segment's data into the given pipe in order.
This is used to provide input to ffmpeg in a full cut."""
for segment in segments:
with open(segment.path) as f:
try:
shutil.copyfileobj(f, pipe)
except OSError as e:
# ignore EPIPE, as this just means the end cut meant we didn't need all input
if e.errno != errno.EPIPE:
raise
pipe.close()
def full_cut_segments(segments, start, end, encode_args):
# how far into the first segment to begin
cut_start = max(0, (start - segments[0].start).total_seconds())
# how much of final segment should be cut off
cut_end = max(0, (segments[-1].end - end).total_seconds())
ffmpeg = None
input_feeder = None
try:
ffmpeg = ffmpeg_cut_stdin(cut_start, cut_end, encode_args)
input_feeder = gevent.spawn(feed_input, segments, ffmpeg.stdin)
# stream the output until it is closed
for chunk in read_chunks(ffmpeg.stdout):
yield chunk
# check if any errors occurred in input writing, or if ffmpeg exited non-success.
if ffmpeg.wait() != 0:
raise Exception("Error while streaming cut: ffmpeg exited {}".format(ffmpeg.returncode))
input_feeder.get() # re-raise any errors from feed_input()
finally:
# if something goes wrong, try to clean up ignoring errors
if input_feeder is not None:
input_feeder.kill()
if ffmpeg is not None and ffmpeg.poll() is None:
for action in (ffmpeg.kill, ffmpeg.stdin.close, ffmpeg.stdout.close):
try:
action()
except (OSError, IOError):
pass

@ -16,7 +16,7 @@ from psycopg2 import sql
import common
from common.database import DBManager, query
from common.segments import get_best_segments, cut_segments, ContainsHoles
from common.segments import get_best_segments, fast_cut_segments, full_cut_segments, ContainsHoles
from .upload_backends import Youtube, Local
@ -267,7 +267,13 @@ class Cutter(object):
upload_backend = self.upload_locations[job.upload_location]
self.logger.info("Cutting and uploading job {} to {}".format(format_job(job), upload_backend))
cut = cut_segments(job.segments, job.video_start, job.video_end)
if upload_backend.encoding_settings is None:
self.logger.debug("No encoding settings, using fast cut")
cut = fast_cut_segments(job.segments, job.video_start, job.video_end)
else:
self.logger.debug("Using encoding settings for cut: {}".format(upload_backend.encoding_settings))
cut = full_cut_segments(job.segments, job.video_start, job.video_end, upload_backend.encoding_settings)
# This flag tracks whether we've told requests to finalize the upload,
# and serves to detect whether errors from the request call are recoverable.
@ -550,6 +556,9 @@ def main(
This is useful when multiple upload locations actually refer to the
same place just with different settings, and you only want one of them
to actually do the check.
cut_type:
One of 'fast' or 'full'. Default 'fast'. This indicates whether to use
fast_cut_segments() or full_cut_segments() for this location.
along with any additional config options defined for that backend type.
creds_file should contain any required credentials for the upload backends, as JSON.
@ -608,6 +617,7 @@ def main(
for location, backend_config in config.items():
backend_type = backend_config.pop('type')
no_transcode_check = backend_config.pop('no_transcode_check', False)
cut_type = backend_config.pop('cut_type', 'fast')
if type == 'youtube':
backend_type = Youtube
elif type == 'local':
@ -615,6 +625,11 @@ def main(
else:
raise ValueError("Unknown upload backend type: {!r}".format(type))
backend = backend_type(credentials, **backend_config)
if cut_type == 'fast':
# mark for fast cut by clearing encoding settings
backend.encoding_settings = None
elif cut_type != 'full':
raise ValueError("Unknown cut type: {!r}".format(cut_type))
upload_locations[location] = backend
if backend.needs_transcode and not no_transcode_check:
needs_transcode_check.append(backend)

@ -31,12 +31,14 @@ class UploadBackend(object):
The upload backend also determines the encoding settings for the cutting
process, this is given as a list of ffmpeg args
under the 'encoding_settings' attribute.
If this is None, instead uses the 'fast cut' strategy where nothing
is transcoded.
"""
needs_transcode = False
# reasonable default if settings don't otherwise matter
encoding_settings = [] # TODO
encoding_settings = ['-f', 'mp4']
def upload_video(self, title, description, tags, data):
raise NotImplementedError
@ -59,7 +61,18 @@ class Youtube(UploadBackend):
"""
needs_transcode = True
encoding_settings = [] # TODO youtube's recommended settings
encoding_settings = [
# Youtube's recommended settings:
'-codec:v', 'libx264', # Make the video codec x264
'-crf', '21', # Set the video quality, this produces the bitrate range that YT likes
'-bf', '2', # Have 2 consecutive bframes, as requested
'-flags', '+cgop', # Use closed GOP, as requested
'-pix_fmt', 'yuv420p', # chroma subsampling 4:2:0, as requrested
'-codec:a', 'aac', '-strict', '-2', # audio codec aac, as requested
'-b:a', '384k' # audio bitrate at 348k for 2 channel, use 512k if 5.1 audio
'-r:a', '48000', # set audio sample rate at 48000Hz, as requested
'-movflags', 'faststart', # put MOOV atom at the front of the file, as requested
]
def __init__(self, credentials, hidden=False, category_id=23, language="en"):
self.logger = logging.getLogger(type(self).__name__)

@ -13,7 +13,7 @@ import prometheus_client as prom
from flask import Flask, url_for, request, abort, Response
from gevent.pywsgi import WSGIServer
from common import dateutil, get_best_segments, cut_segments, PromLogCountsHandler, install_stacksampler
from common import dateutil, get_best_segments, fast_cut_segments, full_cut_segments, PromLogCountsHandler, install_stacksampler
from common.flask_stats import request_stats, after_request
import generate_hls
@ -235,6 +235,9 @@ def cut(channel, quality):
if any holes are detected, rather than producing a video with missing parts.
Set to true by passing "true" (case insensitive).
Even if holes are allowed, a 406 may result if the resulting video would be empty.
type: One of "fast" or "full". Default to "fast".
A fast cut is much faster but minor artifacting may be present near the start and end.
A fast cut is encoded as MPEG-TS, a full as mp4.
"""
start = dateutil.parse_utc_only(request.args['start'])
end = dateutil.parse_utc_only(request.args['end'])
@ -257,7 +260,14 @@ def cut(channel, quality):
if not any(segment is not None for segment in segments):
return "We have no content available within the requested time range.", 406
return Response(cut_segments(segments, start, end), mimetype='video/MP2T')
type = request.args.get('type', 'fast')
if type == 'fast':
return Response(fast_cut_segments(segments, start, end), mimetype='video/MP2T')
elif type == 'full':
# output as mp4 with no more specific encoding args
return Response(full_cut_segments(segments, start, end, ['-f', 'mp4']), mimetype='video/mp4')
else:
return "Unknown type {!r}. Must be 'fast' or 'full'.".format(type), 400
def main(host='0.0.0.0', port=8000, base_dir='.', backdoor_port=0):

Loading…
Cancel
Save