1
0
mirror of https://github.com/spl0k/supysonic.git synced 2024-12-22 17:06:17 +00:00

Fix issues with hanging transcoding processes

When a connection that is consuming a generated response is closed,
Flask closes the generator making it raise the special `GeneratorExit`
exception when the program tries to yield from it again. Because the
`transcode` function was called (returning a generator) before being
passed into `set_generated`, the exception was being handled in the
wrong order.

By passing the `transcode` function to `set_generated` and letting
`set_transcode` call it to return a generator while generating the
response for the client, the exception properly bubbles up through
`transcode` into `set_generated`. This allows both of them to handle it
properly by stopping the subproceses and not caching the incomplete
response data respectively.
This commit is contained in:
Carey Metcalfe 2019-02-03 19:02:56 -05:00
parent f106923f17
commit 0ac2376e07
3 changed files with 13 additions and 11 deletions

View File

@ -127,7 +127,7 @@ def stream_media():
if dec_proc != None:
dec_proc.wait()
proc.wait()
resp_content = cache.set_generated(cache_key, transcode())
resp_content = cache.set_generated(cache_key, transcode)
logger.info('Transcoding track {0.id} for user {1.id}. Source: {2} at {0.bitrate}kbps. Dest: {3} at {4}kbps'.format(res, request.user, src_suffix, dst_suffix, dst_bitrate))
response = Response(resp_content, mimetype=dst_mimetype)

View File

@ -161,17 +161,19 @@ class Cache(object):
f.write(value)
return self._filepath(key)
def set_generated(self, key, gen):
"""Pass the generated values through and set the end result in the cache
def set_generated(self, key, gen_function):
"""Pass the values yielded from the generator function through and set
the end result in the cache.
The contents will be set into the cache when the generator completes.
The contents will be set into the cache only if and when the generator
completes.
Ex:
>>> for x in cache.set_generated(key, some_generator()):
>>> for x in cache.set_generated(key, generator_function):
... print(x)
"""
with self.set_fileobj(key) as f:
for data in gen:
for data in gen_function():
f.write(data)
yield data

View File

@ -76,7 +76,7 @@ class CacheTestCase(unittest.TestCase):
yield b
t = []
for x in cache.set_generated("key", gen()):
for x in cache.set_generated("key", gen):
t.append(x)
self.assertEqual(cache.size, 0)
self.assertFalse(cache.has("key"))
@ -103,7 +103,7 @@ class CacheTestCase(unittest.TestCase):
self.assertEqual(cache.get_value("key"), val)
with cache.get_fileobj("key") as f:
self.assertEquals(f.read(), val)
self.assertEqual(f.read(), val)
with open(cache.get("key"), 'rb') as f:
self.assertEqual(f.read(), val)
@ -162,7 +162,7 @@ class CacheTestCase(unittest.TestCase):
yield b
with self.assertRaises(TypeError):
for x in cache.set_generated("key", gen()):
for x in cache.set_generated("key", gen):
pass
# Make sure no partial files are left after the error
@ -174,8 +174,8 @@ class CacheTestCase(unittest.TestCase):
for b in [b'0', b'12', b'345', b'6789']:
yield b
g1 = cache.set_generated("key", gen())
g2 = cache.set_generated("key", gen())
g1 = cache.set_generated("key", gen)
g2 = cache.set_generated("key", gen)
next(g1)
files = os.listdir(self.__dir)