1
0
mirror of https://github.com/spl0k/supysonic.git synced 2024-12-22 08:56:17 +00:00

Finish transcoding and cache if close to the end

Ref #202
This commit is contained in:
Alban Féron 2020-11-15 16:22:24 +01:00
parent e29a09e2fa
commit dc5084ce47
No known key found for this signature in database
GPG Key ID: 8CE0313646D16165
3 changed files with 73 additions and 17 deletions

View File

@ -157,25 +157,50 @@ def stream_media():
except OSError:
raise ServerError("Error while running the transcoding process")
if estimateContentLength == "true":
estimate = dst_bitrate * 1000 * res.duration // 8
else:
estimate = None
def transcode():
while True:
data = proc.stdout.read(8192)
if not data:
break
yield data
def kill_processes():
if dec_proc != None:
dec_proc.kill()
proc.kill()
def handle_transcoding():
try:
while True:
data = proc.stdout.read(8192)
if not data:
break
sent = 0
for data in transcode():
sent += len(data)
yield data
except BaseException:
except (Exception, SystemExit, KeyboardInterrupt):
# Make sure child processes are always killed
if dec_proc != None:
dec_proc.kill()
proc.kill()
kill_processes()
raise
except GeneratorExit:
# Try to transcode/send more data if we're close to the end.
# The calling code have to support this as yielding more data
# after a GeneratorExit would normally raise a RuntimeError.
# Hopefully this generator is only used by the cache which
# handles this.
if estimate and sent >= estimate * 0.95:
yield from transcode()
else:
kill_processes()
raise
finally:
if dec_proc != None:
dec_proc.wait()
proc.wait()
resp_content = cache.set_generated(cache_key, transcode)
resp_content = cache.set_generated(cache_key, handle_transcoding)
logger.info(
"Transcoding track {0.id} for user {1.id}. Source: {2} at {0.bitrate}kbps. Dest: {3} at {4}kbps".format(
@ -183,10 +208,8 @@ def stream_media():
)
)
response = Response(resp_content, mimetype=dst_mimetype)
if estimateContentLength == "true":
response.headers.add(
"Content-Length", dst_bitrate * 1000 * res.duration // 8
)
if estimate is not None:
response.headers.add("Content-Length", estimate)
else:
response = send_file(res.path, mimetype=dst_mimetype, conditional=True)

View File

@ -159,7 +159,7 @@ class Cache(object):
self._make_space(size, key=key)
os.replace(f.name, self._filepath(key))
self._record_file(key, size)
except Exception:
except BaseException:
f.close()
with contextlib.suppress(OSError):
os.remove(f.name)
@ -183,9 +183,21 @@ class Cache(object):
... print(x)
"""
with self.set_fileobj(key) as f:
for data in gen_function():
f.write(data)
yield data
gen = gen_function()
try:
for data in gen:
f.write(data)
yield data
except GeneratorExit:
# Try to stop the generator but check it still wants to yield data.
# If it does allow caching of this data without forwarding it
try:
f.write(gen.throw(GeneratorExit))
for data in gen:
f.write(data)
except StopIteration:
# We stopped just at the end of the generator
pass
def get(self, key):
"""Return the path to the file where the cached data is stored"""

View File

@ -79,6 +79,7 @@ class TranscodingTestCase(ApiTestBase):
read = 0
it = iter(rv.response)
# Read up to the estimated length
while read < 48000:
read += len(next(it))
rv.response.close()
@ -106,6 +107,26 @@ class TranscodingTestCase(ApiTestBase):
self.assertFalse(current_app.transcode_cache.has(key))
self.assertEqual(current_app.transcode_cache.size, 0)
@unittest.skipIf(
sys.platform == "win32",
"Can't test transcoding on Windows because of a lack of simple commandline tools",
)
def test_last_chunk_close_transcoded_cached(self):
rv = self._stream(maxBitRate=96, estimateContentLength="true", format="rnd")
read = 0
it = iter(rv.response)
# Read up to the last chunk of data but keep the generator "alive"
while read < 52000:
read += len(next(it))
rv.response.close()
rv.close()
key = "{}-96.rnd".format(self.trackid)
with self.app_context():
self.assertTrue(current_app.transcode_cache.has(key))
self.assertEqual(current_app.transcode_cache.size, 52000)
if __name__ == "__main__":
unittest.main()