mirror of
https://github.com/spl0k/supysonic.git
synced 2024-11-09 19:52:16 +00:00
Small touches on logging
Most notably not relying in Flask's logger, as this cause issues at the end of tests on Python 2.7 Fixes #96
This commit is contained in:
parent
8132e38d46
commit
f5bab00a1c
@ -141,7 +141,7 @@ def scrobble():
|
||||
t, submission = map(request.values.get, [ 'time', 'submission' ])
|
||||
t = int(t) / 1000 if t else int(time.time())
|
||||
|
||||
lfm = LastFm(current_app.config['LASTFM'], request.user, current_app.logger)
|
||||
lfm = LastFm(current_app.config['LASTFM'], request.user)
|
||||
|
||||
if submission in (None, '', True, 'true', 'True', 1, '1'):
|
||||
lfm.scrobble(res, t)
|
||||
|
@ -8,6 +8,7 @@
|
||||
# Distributed under terms of the GNU AGPLv3 license.
|
||||
|
||||
import codecs
|
||||
import logging
|
||||
import mimetypes
|
||||
import os.path
|
||||
import requests
|
||||
@ -26,6 +27,8 @@ from ..py23 import dict
|
||||
from . import api, get_entity
|
||||
from .exceptions import GenericError, MissingParameter, NotFound, ServerError, UnsupportedParameter
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def prepare_transcoding_cmdline(base_cmdline, input_file, input_format, output_format, output_bitrate):
|
||||
if not base_cmdline:
|
||||
return None
|
||||
@ -79,7 +82,7 @@ def stream_media():
|
||||
transcoder = config.get('transcoder')
|
||||
if not transcoder:
|
||||
message = 'No way to transcode from {} to {}'.format(src_suffix, dst_suffix)
|
||||
current_app.logger.info(message)
|
||||
logger.info(message)
|
||||
raise GenericError(message)
|
||||
|
||||
transcoder, decoder, encoder = map(lambda x: prepare_transcoding_cmdline(x, res.path, src_suffix, dst_suffix, dst_bitrate), [ transcoder, decoder, encoder ])
|
||||
@ -109,7 +112,7 @@ def stream_media():
|
||||
dec_proc.wait()
|
||||
proc.wait()
|
||||
|
||||
current_app.logger.info('Transcoding track {0.id} for user {1.id}. Source: {2} at {0.bitrate}kbps. Dest: {3} at {4}kbps'.format(res, request.user, src_suffix, dst_suffix, dst_bitrate))
|
||||
logger.info('Transcoding track {0.id} for user {1.id}. Source: {2} at {0.bitrate}kbps. Dest: {3} at {4}kbps'.format(res, request.user, src_suffix, dst_suffix, dst_bitrate))
|
||||
response = Response(transcode(), mimetype = dst_mimetype)
|
||||
if estimateContentLength == 'true':
|
||||
response.headers.add('Content-Length', dst_bitrate * 1000 * res.duration // 8)
|
||||
@ -183,14 +186,14 @@ def lyrics():
|
||||
for track in query:
|
||||
lyrics_path = os.path.splitext(track.path)[0] + '.txt'
|
||||
if os.path.exists(lyrics_path):
|
||||
current_app.logger.debug('Found lyrics file: ' + lyrics_path)
|
||||
logger.debug('Found lyrics file: ' + lyrics_path)
|
||||
|
||||
try:
|
||||
lyrics = read_file_as_unicode(lyrics_path)
|
||||
except UnicodeError:
|
||||
# Lyrics file couldn't be decoded. Rather than displaying an error, try with the potential next files or
|
||||
# return no lyrics. Log it anyway.
|
||||
current_app.logger.warning('Unsupported encoding for lyrics file ' + lyrics_path)
|
||||
logger.warning('Unsupported encoding for lyrics file ' + lyrics_path)
|
||||
continue
|
||||
|
||||
return request.formatter('lyrics', dict(
|
||||
@ -211,7 +214,7 @@ def lyrics():
|
||||
_value_ = root.find('cl:Lyric', namespaces = ns).text
|
||||
))
|
||||
except requests.exceptions.RequestException as e: # pragma: nocover
|
||||
current_app.logger.warning('Error while requesting the ChartLyrics API: ' + str(e))
|
||||
logger.warning('Error while requesting the ChartLyrics API: ' + str(e))
|
||||
|
||||
return request.formatter('lyrics', dict()) # pragma: nocover
|
||||
|
||||
@ -223,12 +226,12 @@ def read_file_as_unicode(path):
|
||||
for enc in encodings:
|
||||
try:
|
||||
contents = codecs.open(path, 'r', encoding = enc).read()
|
||||
current_app.logger.debug('Read file {} with {} encoding'.format(path, enc))
|
||||
logger.debug('Read file {} with {} encoding'.format(path, enc))
|
||||
# Maybe save the encoding somewhere to prevent going through this loop each time for the same file
|
||||
return contents
|
||||
except UnicodeError:
|
||||
pass
|
||||
|
||||
# Fallback to ASCII
|
||||
current_app.logger.debug('Reading file {} with ascii encoding'.format(path))
|
||||
logger.debug('Reading file {} with ascii encoding'.format(path))
|
||||
return unicode(open(path, 'r').read())
|
||||
|
@ -7,6 +7,8 @@
|
||||
#
|
||||
# Distributed under terms of the GNU AGPLv3 license.
|
||||
|
||||
import logging
|
||||
|
||||
from flask import flash, redirect, render_template, request, session, url_for
|
||||
from flask import current_app
|
||||
from functools import wraps
|
||||
@ -19,6 +21,8 @@ from ..py23 import dict
|
||||
|
||||
from . import admin_only, frontend
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def me_or_uuid(f, arg = 'uid'):
|
||||
@wraps(f)
|
||||
def decorated_func(*args, **kwargs):
|
||||
@ -78,7 +82,7 @@ def update_clients(uid, user):
|
||||
clients_opts[client] = dict([ (opt, value) ])
|
||||
else:
|
||||
clients_opts[client][opt] = value
|
||||
current_app.logger.debug(clients_opts)
|
||||
logger.debug(clients_opts)
|
||||
|
||||
for client, opts in clients_opts.items():
|
||||
prefs = user.clients.select(lambda c: c.client_name == client).first()
|
||||
@ -249,7 +253,7 @@ def lastfm_reg(uid, user):
|
||||
flash('Missing LastFM auth token')
|
||||
return redirect(url_for('frontend.user_profile', uid = uid))
|
||||
|
||||
lfm = LastFm(current_app.config['LASTFM'], user, current_app.logger)
|
||||
lfm = LastFm(current_app.config['LASTFM'], user)
|
||||
status, error = lfm.link_account(token)
|
||||
flash(error if not status else 'Successfully linked LastFM account')
|
||||
|
||||
@ -258,7 +262,7 @@ def lastfm_reg(uid, user):
|
||||
@frontend.route('/user/<uid>/lastfm/unlink')
|
||||
@me_or_uuid
|
||||
def lastfm_unreg(uid, user):
|
||||
lfm = LastFm(current_app.config['LASTFM'], user, current_app.logger)
|
||||
lfm = LastFm(current_app.config['LASTFM'], user)
|
||||
lfm.unlink_account()
|
||||
flash('Unlinked LastFM account')
|
||||
return redirect(url_for('frontend.user_profile', uid = uid))
|
||||
|
@ -7,12 +7,16 @@
|
||||
#
|
||||
# Distributed under terms of the GNU AGPLv3 license.
|
||||
|
||||
import requests, hashlib
|
||||
import hashlib
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from .py23 import strtype
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class LastFm:
|
||||
def __init__(self, config, user, logger):
|
||||
def __init__(self, config, user):
|
||||
if config['api_key'] is not None and config['secret'] is not None:
|
||||
self.__api_key = config['api_key']
|
||||
self.__api_secret = config['secret'].encode('utf-8')
|
||||
@ -20,7 +24,6 @@ class LastFm:
|
||||
else:
|
||||
self.__enabled = False
|
||||
self.__user = user
|
||||
self.__logger = logger
|
||||
|
||||
def link_account(self, token):
|
||||
if not self.__enabled:
|
||||
@ -81,14 +84,14 @@ class LastFm:
|
||||
else:
|
||||
r = requests.get('http://ws.audioscrobbler.com/2.0/', params = kwargs, timeout = 5)
|
||||
except requests.exceptions.RequestException as e:
|
||||
self.__logger.warning('Error while connecting to LastFM: ' + str(e))
|
||||
logger.warning('Error while connecting to LastFM: ' + str(e))
|
||||
return None
|
||||
|
||||
json = r.json()
|
||||
if 'error' in json:
|
||||
if json['error'] in (9, '9'):
|
||||
self.__user.lastfm_status = False
|
||||
self.__logger.warning('LastFM error %i: %s' % (json['error'], json['message']))
|
||||
logger.warning('LastFM error %i: %s' % (json['error'], json['message']))
|
||||
|
||||
return json
|
||||
|
||||
|
@ -29,24 +29,25 @@ OP_MOVE = 4
|
||||
FLAG_CREATE = 8
|
||||
FLAG_COVER = 16
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class SupysonicWatcherEventHandler(PatternMatchingEventHandler):
|
||||
def __init__(self, extensions, queue, logger):
|
||||
def __init__(self, extensions, queue):
|
||||
patterns = None
|
||||
if extensions:
|
||||
patterns = list(map(lambda e: "*." + e.lower(), extensions.split())) + list(map(lambda e: "*" + e, covers.EXTENSIONS))
|
||||
super(SupysonicWatcherEventHandler, self).__init__(patterns = patterns, ignore_directories = True)
|
||||
|
||||
self.__queue = queue
|
||||
self.__logger = logger
|
||||
|
||||
def dispatch(self, event):
|
||||
try:
|
||||
super(SupysonicWatcherEventHandler, self).dispatch(event)
|
||||
except Exception as e: # pragma: nocover
|
||||
self.__logger.critical(e)
|
||||
logger.critical(e)
|
||||
|
||||
def on_created(self, event):
|
||||
self.__logger.debug("File created: '%s'", event.src_path)
|
||||
logger.debug("File created: '%s'", event.src_path)
|
||||
|
||||
op = OP_SCAN | FLAG_CREATE
|
||||
if not covers.is_valid_cover(event.src_path):
|
||||
@ -61,7 +62,7 @@ class SupysonicWatcherEventHandler(PatternMatchingEventHandler):
|
||||
self.__queue.put(event.src_path, op | FLAG_COVER)
|
||||
|
||||
def on_deleted(self, event):
|
||||
self.__logger.debug("File deleted: '%s'", event.src_path)
|
||||
logger.debug("File deleted: '%s'", event.src_path)
|
||||
|
||||
op = OP_REMOVE
|
||||
_, ext = os.path.splitext(event.src_path)
|
||||
@ -70,12 +71,12 @@ class SupysonicWatcherEventHandler(PatternMatchingEventHandler):
|
||||
self.__queue.put(event.src_path, op)
|
||||
|
||||
def on_modified(self, event):
|
||||
self.__logger.debug("File modified: '%s'", event.src_path)
|
||||
logger.debug("File modified: '%s'", event.src_path)
|
||||
if not covers.is_valid_cover(event.src_path):
|
||||
self.__queue.put(event.src_path, OP_SCAN)
|
||||
|
||||
def on_moved(self, event):
|
||||
self.__logger.debug("File moved: '%s' -> '%s'", event.src_path, event.dest_path)
|
||||
logger.debug("File moved: '%s' -> '%s'", event.src_path, event.dest_path)
|
||||
|
||||
op = OP_MOVE
|
||||
_, ext = os.path.splitext(event.src_path)
|
||||
@ -127,10 +128,9 @@ class Event(object):
|
||||
return self.__src
|
||||
|
||||
class ScannerProcessingQueue(Thread):
|
||||
def __init__(self, delay, logger):
|
||||
def __init__(self, delay):
|
||||
super(ScannerProcessingQueue, self).__init__()
|
||||
|
||||
self.__logger = logger
|
||||
self.__timeout = delay
|
||||
self.__cond = Condition()
|
||||
self.__timer = None
|
||||
@ -141,7 +141,7 @@ class ScannerProcessingQueue(Thread):
|
||||
try:
|
||||
self.__run()
|
||||
except Exception as e: # pragma: nocover
|
||||
self.__logger.critical(e)
|
||||
logger.critical(e)
|
||||
raise e
|
||||
|
||||
def __run(self):
|
||||
@ -154,7 +154,7 @@ class ScannerProcessingQueue(Thread):
|
||||
if not self.__queue:
|
||||
continue
|
||||
|
||||
self.__logger.debug("Instantiating scanner")
|
||||
logger.debug("Instantiating scanner")
|
||||
scanner = Scanner()
|
||||
|
||||
item = self.__next_item()
|
||||
@ -167,37 +167,37 @@ class ScannerProcessingQueue(Thread):
|
||||
item = self.__next_item()
|
||||
|
||||
scanner.finish()
|
||||
self.__logger.debug("Freeing scanner")
|
||||
logger.debug("Freeing scanner")
|
||||
del scanner
|
||||
|
||||
def __process_regular_item(self, scanner, item):
|
||||
if item.operation & OP_MOVE:
|
||||
self.__logger.info("Moving: '%s' -> '%s'", item.src_path, item.path)
|
||||
logger.info("Moving: '%s' -> '%s'", item.src_path, item.path)
|
||||
scanner.move_file(item.src_path, item.path)
|
||||
|
||||
if item.operation & OP_SCAN:
|
||||
self.__logger.info("Scanning: '%s'", item.path)
|
||||
logger.info("Scanning: '%s'", item.path)
|
||||
scanner.scan_file(item.path)
|
||||
|
||||
if item.operation & OP_REMOVE:
|
||||
self.__logger.info("Removing: '%s'", item.path)
|
||||
logger.info("Removing: '%s'", item.path)
|
||||
scanner.remove_file(item.path)
|
||||
|
||||
def __process_cover_item(self, scanner, item):
|
||||
if item.operation & OP_SCAN:
|
||||
if os.path.isdir(item.path):
|
||||
self.__logger.info("Looking for covers: '%s'", item.path)
|
||||
logger.info("Looking for covers: '%s'", item.path)
|
||||
scanner.find_cover(item.path)
|
||||
else:
|
||||
self.__logger.info("Potentially adding cover: '%s'", item.path)
|
||||
logger.info("Potentially adding cover: '%s'", item.path)
|
||||
scanner.add_cover(item.path)
|
||||
|
||||
if item.operation & OP_REMOVE:
|
||||
self.__logger.info("Removing cover: '%s'", item.path)
|
||||
logger.info("Removing cover: '%s'", item.path)
|
||||
scanner.find_cover(os.path.dirname(item.path))
|
||||
|
||||
if item.operation & OP_MOVE:
|
||||
self.__logger.info("Moving cover: '%s' -> '%s'", item.src_path, item.path)
|
||||
logger.info("Moving cover: '%s' -> '%s'", item.src_path, item.path)
|
||||
scanner.find_cover(os.path.dirname(item.src_path))
|
||||
scanner.add_cover(item.path)
|
||||
|
||||
@ -252,22 +252,15 @@ class SupysonicWatcher(object):
|
||||
init_database(config.BASE['database_uri'])
|
||||
|
||||
def run(self):
|
||||
logger = logging.getLogger(__name__)
|
||||
if self.__config.DAEMON['log_file']:
|
||||
log_handler = TimedRotatingFileHandler(self.__config.DAEMON['log_file'], when = 'midnight')
|
||||
else:
|
||||
log_handler = logging.NullHandler()
|
||||
log_handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)s] %(message)s"))
|
||||
logger.addHandler(log_handler)
|
||||
if self.__config.DAEMON['log_level']:
|
||||
mapping = {
|
||||
'DEBUG': logging.DEBUG,
|
||||
'INFO': logging.INFO,
|
||||
'WARNING': logging.WARNING,
|
||||
'ERROR': logging.ERROR,
|
||||
'CRTICAL': logging.CRITICAL
|
||||
}
|
||||
logger.setLevel(mapping.get(self.__config.DAEMON['log_level'].upper(), logging.NOTSET))
|
||||
if 'log_level' in self.__config.DAEMON:
|
||||
level = getattr(logging, self.__config.DAEMON['log_level'].upper(), logging.NOTSET)
|
||||
logger.setLevel(level)
|
||||
|
||||
with db_session:
|
||||
folders = Folder.select(lambda f: f.root)
|
||||
@ -277,8 +270,8 @@ class SupysonicWatcher(object):
|
||||
release_database()
|
||||
return
|
||||
|
||||
queue = ScannerProcessingQueue(self.__config.DAEMON['wait_delay'], logger)
|
||||
handler = SupysonicWatcherEventHandler(self.__config.BASE['scanner_extensions'], queue, logger)
|
||||
queue = ScannerProcessingQueue(self.__config.DAEMON['wait_delay'])
|
||||
handler = SupysonicWatcherEventHandler(self.__config.BASE['scanner_extensions'], queue)
|
||||
observer = Observer()
|
||||
|
||||
with db_session:
|
||||
|
@ -9,6 +9,7 @@
|
||||
# Distributed under terms of the GNU AGPLv3 license.
|
||||
|
||||
import io
|
||||
import logging
|
||||
import mimetypes
|
||||
|
||||
from flask import Flask
|
||||
@ -18,6 +19,8 @@ from pony.orm import db_session
|
||||
from .config import IniConfig
|
||||
from .db import init_database
|
||||
|
||||
logger = logging.getLogger(__package__)
|
||||
|
||||
def create_application(config = None):
|
||||
global app
|
||||
|
||||
@ -32,20 +35,13 @@ def create_application(config = None):
|
||||
# Set loglevel
|
||||
logfile = app.config['WEBAPP']['log_file']
|
||||
if logfile: # pragma: nocover
|
||||
import logging
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
handler = TimedRotatingFileHandler(logfile, when = 'midnight')
|
||||
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)s] %(message)s"))
|
||||
logger.addHandler(handler)
|
||||
loglevel = app.config['WEBAPP']['log_level']
|
||||
if loglevel:
|
||||
mapping = {
|
||||
'DEBUG': logging.DEBUG,
|
||||
'INFO': logging.INFO,
|
||||
'WARNING': logging.WARNING,
|
||||
'ERROR': logging.ERROR,
|
||||
'CRTICAL': logging.CRITICAL
|
||||
}
|
||||
handler.setLevel(mapping.get(loglevel.upper(), logging.NOTSET))
|
||||
app.logger.addHandler(handler)
|
||||
logger.setLevel(getattr(logging, loglevel.upper(), logging.NOTSET))
|
||||
|
||||
# Initialize database
|
||||
init_database(app.config['BASE']['database_uri'])
|
||||
|
@ -17,9 +17,8 @@ class LastFmTestCase(unittest.TestCase):
|
||||
""" Designed only to have coverage on the most important method """
|
||||
|
||||
def test_request(self):
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.addHandler(logging.NullHandler())
|
||||
lastfm = LastFm({ 'api_key': 'key', 'secret': 'secret' }, None, logger)
|
||||
logging.getLogger('supysonic.lastfm').addHandler(logging.NullHandler())
|
||||
lastfm = LastFm({ 'api_key': 'key', 'secret': 'secret' }, None)
|
||||
|
||||
rv = lastfm._LastFm__api_request(False, method = 'dummy', accents = u'àéèùö')
|
||||
self.assertIsInstance(rv, dict)
|
||||
|
Loading…
Reference in New Issue
Block a user