restreamer: Have playlist cache stream results

pull/389/head
Mike Lang 1 year ago committed by Mike Lang
parent 8ede4622ca
commit 7e90346b87

@ -20,6 +20,7 @@ from common.flask_stats import request_stats, after_request
from common.images import compose_thumbnail_template from common.images import compose_thumbnail_template
from common.segments import smart_cut_segments, feed_input, render_segments_waveform, extract_frame, list_segment_files, get_best_segments_for_frame from common.segments import smart_cut_segments, feed_input, render_segments_waveform, extract_frame, list_segment_files, get_best_segments_for_frame
from common.chat import get_batch_file_range, merge_messages from common.chat import get_batch_file_range, merge_messages
from common.cached_iterator import CachedIterator
from . import generate_hls from . import generate_hls
@ -275,32 +276,42 @@ def generate_media_playlist(channel, quality):
if end - start > datetime.timedelta(hours=12) and ('start' not in request.args or 'end' not in request.args): if end - start > datetime.timedelta(hours=12) and ('start' not in request.args or 'end' not in request.args):
return "Implicit range may not be longer than 12 hours", 400 return "Implicit range may not be longer than 12 hours", 400
cache_key = (hours_path, start, end) def _generate_media_playlist():
if cache_key in _media_playlist_cache: cache_key = (hours_path, start, end)
return _media_playlist_cache[cache_key].get()
# get_best_segments requires start be before end, special case that as no segments
# (not an error because someone might ask for a specific start, no end, but we ended up with
# end before start because that's the latest time we have)
if start < end:
segments = get_best_segments(hours_path, start, end)
else:
# Note the None to indicate there was a "hole" at both start and end
segments = [None]
if cache_key in _media_playlist_cache:
yield from _media_playlist_cache[cache_key].get()
return
result = gevent.event.AsyncResult()
try:
# Note we don't populate the cache until we're in the try block,
# so there is no point where an exception won't be transferred to the result.
_media_playlist_cache[cache_key] = result
iterator = CachedIterator(generate_hls.generate_media(segments, os.path.join(app.static_url_path, channel, quality)))
# We set the result immediately so that everyone can start returning it.
# Multiple readers from the CachedIterator is safe.
result.set(iterator)
except BaseException as ex:
result.set_exception(ex)
raise
# get_best_segments requires start be before end, special case that as no segments # send the whole response
# (not an error because someone might ask for a specific start, no end, but we ended up with yield from iterator
# end before start because that's the latest time we have)
if start < end:
segments = get_best_segments(hours_path, start, end)
else:
# Note the None to indicate there was a "hole" at both start and end
segments = [None]
result = gevent.event.AsyncResult() # Now we're done, remove the async result so a fresh request can start.
try: assert _media_playlist_cache.pop(cache_key) is result, "Got someone else's AsyncResult"
# Note we don't populate the cache until we're in the try block,
# so there is no point where an exception won't be transferred to the result. return _generate_media_playlist()
_media_playlist_cache[cache_key] = result
playlist = "".join(generate_hls.generate_media(segments, os.path.join(app.static_url_path, channel, quality)))
result.set(playlist)
except BaseException as ex:
result.set_exception(ex)
raise
# Now we're done, remove the async result so a fresh request can start.
assert _media_playlist_cache.pop(cache_key) is result, "Got someone else's AsyncResult"
return playlist
@app.route('/cut/<channel>/<quality>.ts') @app.route('/cut/<channel>/<quality>.ts')

Loading…
Cancel
Save