Merge remote-tracking branch 'basilfx/logfix' into develop

This commit is contained in:
rembo10
2014-04-04 14:00:33 -07:00
14 changed files with 297 additions and 273 deletions

View File

@@ -146,7 +146,7 @@ def main():
# Force the http port if neccessary
if args.port:
http_port = args.port
logger.info('Using forced port: %i' % http_port)
logger.info('Using forced port: %i', http_port)
else:
http_port = int(headphones.HTTP_PORT)
@@ -176,7 +176,7 @@ def main():
except KeyboardInterrupt:
headphones.SIGNAL = 'shutdown'
else:
logger.info('Received signal: ' + headphones.SIGNAL)
logger.info('Received signal: %s', headphones.SIGNAL)
if headphones.SIGNAL == 'shutdown':
headphones.shutdown()
elif headphones.SIGNAL == 'restart':

View File

@@ -306,7 +306,7 @@ def check_setting_int(config, cfg_name, item_name, def_val):
except:
config[cfg_name] = {}
config[cfg_name][item_name] = my_val
logger.debug(item_name + " -> " + str(my_val))
logger.debug("%s -> %s", item_name, my_val)
return my_val
################################################################################
@@ -323,10 +323,7 @@ def check_setting_str(config, cfg_name, item_name, def_val, log=True):
config[cfg_name] = {}
config[cfg_name][item_name] = my_val
if log:
logger.debug(item_name + " -> " + my_val)
else:
logger.debug(item_name + " -> ******")
logger.debug("%s -> %s", item_name, my_val if log else "******")
return my_val
def initialize():
@@ -690,10 +687,10 @@ def initialize():
os.makedirs(LOG_DIR)
except OSError:
if VERBOSE:
print 'Unable to create the log directory. Logging to screen only.'
sys.stderr.write('Unable to create the log directory. Logging to screen only.\n')
# Start the logger, silence console logging if we need to
logger.headphones_log.initLogger(verbose=VERBOSE)
logger.initLogger(verbose=VERBOSE)
if not CACHE_DIR:
# Put the cache dir in the data dir for now
@@ -702,7 +699,7 @@ def initialize():
try:
os.makedirs(CACHE_DIR)
except OSError:
logger.error('Could not create cache dir. Check permissions of datadir: ' + DATA_DIR)
logger.error('Could not create cache dir. Check permissions of datadir: %s', DATA_DIR)
# Sanity check for search interval. Set it to at least 6 hours
if SEARCH_INTERVAL < 360:
@@ -714,7 +711,7 @@ def initialize():
try:
dbcheck()
except Exception, e:
logger.error("Can't connect to the database: %s" % e)
logger.error("Can't connect to the database: %s", e)
# Get the currently installed version - returns None, 'win32' or the git hash
# Also sets INSTALL_TYPE variable to 'win', 'git' or 'source'
@@ -751,7 +748,7 @@ def daemonize():
if pid != 0:
sys.exit(0)
except OSError, e:
raise RuntimeError("1st fork failed: %s [%d]" % (e.strerror, e.errno))
raise RuntimeError("1st fork failed: %s [%d]", e.strerror, e.errno)
os.setsid()
@@ -765,7 +762,7 @@ def daemonize():
if pid != 0:
sys.exit(0)
except OSError, e:
raise RuntimeError("2nd fork failed: %s [%d]" % (e.strerror, e.errno))
raise RuntimeError("2nd fork failed: %s [%d]", e.strerror, e.errno)
dev_null = file('/dev/null', 'r')
os.dup2(dev_null.fileno(), sys.stdin.fileno())
@@ -778,12 +775,13 @@ def daemonize():
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
pid = str(os.getpid())
logger.info('Daemonized to PID: %s' % pid)
pid = os.getpid()
logger.info('Daemonized to PID: %d', pid)
if CREATEPID:
logger.info("Writing PID " + pid + " to " + str(PIDFILE))
file(PIDFILE, 'w').write("%s\n" % pid)
logger.info("Writing PID %d to %s", pid, PIDFILE)
with file(PIDFILE, 'w') as fp:
fp.write("%s\n" % pid)
def launch_browser(host, port, root):
@@ -798,7 +796,7 @@ def launch_browser(host, port, root):
try:
webbrowser.open('%s://%s:%i%s' % (protocol, host, port, root))
except Exception, e:
logger.error('Could not launch browser: %s' % e)
logger.error('Could not launch browser: %s', e)
def config_write():
@@ -1082,7 +1080,7 @@ def start():
def sig_handler(signum=None, frame=None):
if type(signum) != type(None):
logger.info("Signal %i caught, saving and exiting..." % int(signum))
logger.info("Signal %i caught, saving and exiting...", signum)
shutdown()
def dbcheck():
@@ -1301,10 +1299,10 @@ def shutdown(restart=False, update=False):
try:
versioncheck.update()
except Exception, e:
logger.warn('Headphones failed to update: %s. Restarting.' % e)
logger.warn('Headphones failed to update: %s. Restarting.', e)
if CREATEPID :
logger.info ('Removing pidfile %s' % PIDFILE)
logger.info ('Removing pidfile %s', PIDFILE)
os.remove(PIDFILE)
if restart:
@@ -1313,7 +1311,7 @@ def shutdown(restart=False, update=False):
popen_list += ARGS
if '--nolaunch' not in popen_list:
popen_list += ['--nolaunch']
logger.info('Restarting Headphones with ' + str(popen_list))
logger.info('Restarting Headphones with %s', popen_list)
subprocess.Popen(popen_list, cwd=os.getcwd())
os._exit(0)

View File

@@ -44,7 +44,7 @@ def getCachedArt(albumid):
artwork = urllib2.urlopen(artwork_path, timeout=20).read()
return artwork
except:
logger.warn("Unable to open url: " + artwork_path)
logger.warn("Unable to open url: %s", artwork_path)
return None
else:
artwork = open(artwork_path, "r").read()

View File

@@ -78,8 +78,8 @@ class Api(object):
def fetchData(self):
if self.data == 'OK':
logger.info('Recieved API command: ' + self.cmd)
if self.data == 'OK':
logger.info('Recieved API command: %s', self.cmd)
methodToCall = getattr(self, "_" + self.cmd)
result = methodToCall(**self.kwargs)
if 'callback' not in self.kwargs:

View File

@@ -218,12 +218,12 @@ class Cache(object):
}
url = "http://ws.audioscrobbler.com/2.0/?" + urllib.urlencode(params)
logger.debug('Retrieving artist information from: ' + url)
logger.debug('Retrieving artist information from: %s', url)
try:
result = urllib2.urlopen(url, timeout=20).read()
except:
logger.warn('Could not open url: ' + url)
logger.warn('Could not open url: %s', url)
return
if result:
@@ -231,18 +231,18 @@ class Cache(object):
try:
data = simplejson.JSONDecoder().decode(result)
except:
logger.warn('Could not parse data from url: ' + url)
logger.warn('Could not parse data from url: %s', url)
return
try:
image_url = data['artist']['image'][-1]['#text']
except Exception:
logger.debug('No artist image found on url: ' + url)
logger.debug('No artist image found on url: %s', url)
image_url = None
thumb_url = self._get_thumb_url(data)
if not thumb_url:
logger.debug('No artist thumbnail image found on url: ' + url)
logger.debug('No artist thumbnail image found on url: %s', url)
else:
@@ -255,12 +255,12 @@ class Cache(object):
}
url = "http://ws.audioscrobbler.com/2.0/?" + urllib.urlencode(params)
logger.debug('Retrieving album information from: ' + url)
logger.debug('Retrieving album information from: %s', url)
try:
result = urllib2.urlopen(url, timeout=20).read()
except:
logger.warn('Could not open url: ' + url)
logger.warn('Could not open url: %s', url)
return
if result:
@@ -268,19 +268,19 @@ class Cache(object):
try:
data = simplejson.JSONDecoder().decode(result)
except:
logger.warn('Could not parse data from url: ' + url)
logger.warn('Could not parse data from url: %s', url)
return
try:
image_url = data['artist']['image'][-1]['#text']
except Exception:
logger.debug('No artist image found on url: ' + url)
logger.debug('No artist image found on url: %s', url)
image_url = None
thumb_url = self._get_thumb_url(data)
if not thumb_url:
logger.debug('No artist thumbnail image found on url: ' + url)
logger.debug('No artist thumbnail image found on url: %s', url)
image_dict = {'artwork' : image_url, 'thumbnail' : thumb_url }
return image_dict
@@ -301,12 +301,12 @@ class Cache(object):
}
url = "http://ws.audioscrobbler.com/2.0/?" + urllib.urlencode(params)
logger.debug('Retrieving artist information from: ' + url)
logger.debug('Retrieving artist information from: %s', url)
try:
result = urllib2.urlopen(url, timeout=20).read()
except:
logger.warn('Could not open url: ' + url)
logger.warn('Could not open url: %s', url)
return
if result:
@@ -314,27 +314,27 @@ class Cache(object):
try:
data = simplejson.JSONDecoder().decode(result)
except:
logger.warn('Could not parse data from url: ' + url)
logger.warn('Could not parse data from url: %s', url)
return
try:
self.info_summary = data['artist']['bio']['summary']
except Exception:
logger.debug('No artist bio summary found on url: ' + url)
logger.debug('No artist bio summary found on url: %s', url)
self.info_summary = None
try:
self.info_content = data['artist']['bio']['content']
except Exception:
logger.debug('No artist bio found on url: ' + url)
logger.debug('No artist bio found on url: %s', url)
self.info_content = None
try:
image_url = data['artist']['image'][-1]['#text']
except Exception:
logger.debug('No artist image found on url: ' + url)
logger.debug('No artist image found on url: %s', url)
image_url = None
thumb_url = self._get_thumb_url(data)
if not thumb_url:
logger.debug('No artist thumbnail image found on url: ' + url)
logger.debug('No artist thumbnail image found on url: %s', url)
else:
@@ -349,39 +349,39 @@ class Cache(object):
url = "http://ws.audioscrobbler.com/2.0/?" + urllib.urlencode(params)
logger.debug('Retrieving artist information from: ' + url)
logger.debug('Retrieving artist information from: %s', url)
try:
result = urllib2.urlopen(url, timeout=20).read()
except:
logger.warn('Could not open url: ' + url)
logger.warn('Could not open url: %s', url)
return
if result:
try:
data = simplejson.JSONDecoder().decode(result)
except:
logger.warn('Could not parse data from url: ' + url)
logger.warn('Could not parse data from url: %s', url)
return
try:
self.info_summary = data['album']['wiki']['summary']
except Exception:
logger.debug('No album summary found from: ' + url)
logger.debug('No album summary found from: %s', url)
self.info_summary = None
try:
self.info_content = data['album']['wiki']['content']
except Exception:
logger.debug('No album infomation found from: ' + url)
logger.debug('No album infomation found from: %s', url)
self.info_content = None
try:
image_url = data['album']['image'][-1]['#text']
except Exception:
logger.debug('No album image link found on url: ' + url)
logger.debug('No album image link found on url: %s', url)
image_url = None
thumb_url = self._get_thumb_url(data)
if not thumb_url:
logger.debug('No album thumbnail image found on url: ' + url)
logger.debug('No album thumbnail image found on url: %s', url)
#Save the content & summary to the database no matter what if we've opened up the url
if self.id_type == 'artist':
@@ -414,7 +414,7 @@ class Cache(object):
try:
artwork = urllib2.urlopen(image_url, timeout=20).read()
except Exception, e:
logger.error('Unable to open url "' + image_url + '". Error: ' + str(e))
logger.error('Unable to open url "%s". Error: %s', image_url, e)
artwork = None
if artwork:
@@ -424,7 +424,7 @@ class Cache(object):
try:
os.makedirs(self.path_to_art_cache)
except Exception, e:
logger.error('Unable to create artwork cache dir. Error: ' + str(e))
logger.error('Unable to create artwork cache dir. Error: %s', e)
self.artwork_errors = True
self.artwork_url = image_url
@@ -433,7 +433,7 @@ class Cache(object):
try:
os.remove(artwork_file)
except:
logger.error('Error deleting file from the cache: ' + artwork_file)
logger.error('Error deleting file from the cache: %s', artwork_file)
ext = os.path.splitext(image_url)[1]
@@ -443,7 +443,7 @@ class Cache(object):
f.write(artwork)
f.close()
except Exception, e:
logger.error('Unable to write to the cache dir: ' + str(e))
logger.error('Unable to write to the cache dir: %s', e)
self.artwork_errors = True
self.artwork_url = image_url
@@ -453,7 +453,7 @@ class Cache(object):
try:
artwork = urllib2.urlopen(thumb_url, timeout=20).read()
except Exception, e:
logger.error('Unable to open url "' + thumb_url + '". Error: ' + str(e))
logger.error('Unable to open url "%s". Error: %s', thumb_url, e)
artwork = None
if artwork:
@@ -463,7 +463,7 @@ class Cache(object):
try:
os.makedirs(self.path_to_art_cache)
except Exception, e:
logger.error('Unable to create artwork cache dir. Error: ' + str(e))
logger.error('Unable to create artwork cache dir. Error: %s' + e)
self.thumb_errors = True
self.thumb_url = thumb_url
@@ -472,7 +472,7 @@ class Cache(object):
try:
os.remove(thumb_file)
except:
logger.error('Error deleting file from the cache: ' + thumb_file)
logger.error('Error deleting file from the cache: %s', thumb_file)
ext = os.path.splitext(image_url)[1]
@@ -482,7 +482,7 @@ class Cache(object):
f.write(artwork)
f.close()
except Exception, e:
logger.error('Unable to write to the cache dir: ' + str(e))
logger.error('Unable to write to the cache dir: %s', e)
self.thumb_errors = True
self.thumb_url = image_url

View File

@@ -73,14 +73,14 @@ class DBConnection:
break
except sqlite3.OperationalError, e:
if "unable to open database file" in e.message or "database is locked" in e.message:
logger.warn('Database Error: %s' % e)
logger.warn('Database Error: %s', e)
attempt += 1
time.sleep(1)
else:
logger.error('Database error: %s' % e)
logger.error('Database error: %s', e)
raise
except sqlite3.DatabaseError, e:
logger.error('Fatal Error executing %s :: %s' % (query, e))
logger.error('Fatal Error executing %s :: %s', query, e)
raise
return sqlResult

View File

@@ -341,7 +341,7 @@ def extract_song_data(s):
year = match.group("year")
return (name, album, year)
else:
logger.info("Couldn't parse " + s + " into a valid default format")
logger.info("Couldn't parse %s into a valid default format", s)
#newzbin default format
pattern = re.compile(r'(?P<name>.*?)\s\-\s(?P<album>.*?)\s\((?P<year>\d+?\))', re.VERBOSE)
@@ -352,7 +352,7 @@ def extract_song_data(s):
year = match.group("year")
return (name, album, year)
else:
logger.info("Couldn't parse " + s + " into a valid Newbin format")
logger.info("Couldn't parse %s into a valid Newbin format", s)
return (name, album, year)
def smartMove(src, dest, delete=True):
@@ -363,7 +363,7 @@ def smartMove(src, dest, delete=True):
filename = os.path.basename(src)
if os.path.isfile(os.path.join(dest, filename)):
logger.info('Destination file exists: %s' % os.path.join(dest, filename).decode(headphones.SYS_ENCODING, 'replace'))
logger.info('Destination file exists: %s', os.path.join(dest, filename).decode(headphones.SYS_ENCODING, 'replace'))
title = os.path.splitext(filename)[0]
ext = os.path.splitext(filename)[1]
i = 1
@@ -372,12 +372,12 @@ def smartMove(src, dest, delete=True):
if os.path.isfile(os.path.join(dest, newfile)):
i += 1
else:
logger.info('Renaming to %s' % newfile)
try:
logger.info('Renaming to %s', newfile)
try:
os.rename(src, os.path.join(source_dir, newfile))
filename = newfile
except Exception, e:
logger.warn('Error renaming %s: %s' % (src.decode(headphones.SYS_ENCODING, 'replace'), str(e).decode(headphones.SYS_ENCODING, 'replace')))
logger.warn('Error renaming %s: %s', src.decode(headphones.SYS_ENCODING, 'replace'), e)
break
try:
@@ -387,7 +387,7 @@ def smartMove(src, dest, delete=True):
shutil.copy(os.path.join(source_dir, filename), os.path.join(dest, filename))
return True
except Exception, e:
logger.warn('Error moving file %s: %s' % (filename.decode(headphones.SYS_ENCODING, 'replace'), str(e).decode(headphones.SYS_ENCODING, 'replace')))
logger.warn('Error moving file %s: %s', filename.decode(headphones.SYS_ENCODING, 'replace'), e)
#########################
#Sab renaming functions #
@@ -453,12 +453,12 @@ def create_https_certificates(ssl_cert, ssl_key):
Create self-signed HTTPS certificares and store in paths 'ssl_cert' and 'ssl_key'
"""
from headphones import logger
try:
from OpenSSL import crypto #@UnresolvedImport
from lib.certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, serial #@UnresolvedImport
from OpenSSL import crypto
from lib.certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, serial
except:
logger.warn(u"pyopenssl module missing, please install for https access")
logger.warn("pyOpenSSL module missing, please install to enable HTTPS")
return False
# Create the CA Certificate
@@ -475,8 +475,8 @@ def create_https_certificates(ssl_cert, ssl_key):
try:
open(ssl_key, 'w').write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
open(ssl_cert, 'w').write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
except:
logger.error(u"Error creating SSL key and certificate")
except Exception, e:
logger.error("Error creating SSL key and certificate: %s", e)
return False
return True

View File

@@ -521,10 +521,10 @@ def addReleaseById(rid):
try:
release_dict = mb.getRelease(rid)
except Exception, e:
logger.info('Unable to get release information for Release: ' + str(rid) + " " + str(e))
logger.info('Unable to get release information for Release %s: %s', rid, e)
return
if not release_dict:
logger.info('Unable to get release information for Release: ' + str(rid) + " no dict")
logger.info('Unable to get release information for Release %s: no dict', rid)
return
rgid = release_dict['rgid']
@@ -664,7 +664,7 @@ def getHybridRelease(fullreleaselist):
'2xVinyl': '2',
'Vinyl': '2',
'CD': '0',
'Cassette': '3',
'Cassette': '3',
'2xCD': '1',
'Digital Media': '0'
}
@@ -681,7 +681,7 @@ def getHybridRelease(fullreleaselist):
format = 3
try:
country = int(countries[release['Country']])
country = int(countries[release['Country']])
except:
country = 3

View File

@@ -82,7 +82,7 @@ def getSimilar():
myDB.action('''DELETE from lastfmcloud''')
for tuple in top_list:
artist_name, artist_mbid = tuple[0]
count = tuple[1]
count = tuple[1]
myDB.action('INSERT INTO lastfmcloud VALUES( ?, ?, ?)', [artist_name, artist_mbid, count])
def getArtists():
@@ -95,7 +95,9 @@ def getArtists():
else:
username = headphones.LASTFM_USERNAME
logger.info("Starting Last.FM artists import with username '%s'", username)
url = 'http://ws.audioscrobbler.com/2.0/?method=library.getartists&limit=10000&api_key=%s&user=%s' % (api_key, username)
data = urllib2.urlopen(url, timeout=20).read()
@@ -106,6 +108,7 @@ def getArtists():
return
artists = d.getElementsByTagName("artist")
logger.info("Fetched %d artists from Last.FM", len(artists))
artistlist = []
@@ -125,6 +128,8 @@ def getArtists():
for artistid in artistlist:
importer.addArtisttoDB(artistid)
logger.info("Imported %d new artists from Last.FM", len(artistid))
def getTagTopArtists(tag, limit=50):
myDB = db.DBConnection()
@@ -136,7 +141,7 @@ def getTagTopArtists(tag, limit=50):
try:
d = minidom.parseString(data)
except:
logger.error("Could not parse artist list from last.fm data")
logger.error("Could not parse artist list from Last.FM data")
return
artists = d.getElementsByTagName("artist")
@@ -200,6 +205,7 @@ def getAlbumDescription(rgid, artist, album):
myDB.upsert("descriptions", newValueDict, controlValueDict)
except:
logger.exception("Unhandled exception")
return
def getAlbumDescriptionOld(rgid, releaselist):
@@ -208,7 +214,7 @@ def getAlbumDescriptionOld(rgid, releaselist):
because I may use it to fetch and cache album art
"""
myDB = db.DBConnection()
myDB = db.DBConnection()
result = myDB.select('SELECT Summary from descriptions WHERE ReleaseGroupID=?', [rgid])
if result:
@@ -235,15 +241,13 @@ def getAlbumDescriptionOld(rgid, releaselist):
summary = node.data
for node in contentnode:
content = node.data
controlValueDict = {'ReleaseGroupID': rgid}
newValueDict = {'ReleaseID': mbid,
'Summary': summary,
'Content': content}
myDB.upsert("descriptions", newValueDict, controlValueDict)
myDB.upsert("descriptions", newValueDict, controlValueDict)
break
except:
continue
logger.exception("Unhandled exception")
continue

View File

@@ -14,83 +14,132 @@
# along with Headphones. If not, see <http://www.gnu.org/licenses/>.
import os
import threading
import sys
import logging
from logging import handlers
import traceback
import threading
import headphones
from logging import handlers
from headphones import helpers
MAX_SIZE = 1000000 # 1mb
# These settings are for file logging only
FILENAME = 'headphones.log'
MAX_SIZE = 1000000 # 1 MB
MAX_FILES = 5
# Headphones logger
logger = logging.getLogger('headphones')
# Simple rotating log handler that uses RotatingFileHandler
class RotatingLogger(object):
class LogListHandler(logging.Handler):
"""
Log handler for Web UI.
"""
def __init__(self, filename, max_size, max_files):
self.filename = filename
self.max_size = max_size
self.max_files = max_files
def initLogger(self, verbose=1):
l = logging.getLogger('headphones')
l.setLevel(logging.DEBUG)
self.filename = os.path.join(headphones.LOG_DIR, self.filename)
filehandler = handlers.RotatingFileHandler(self.filename, maxBytes=self.max_size, backupCount=self.max_files)
filehandler.setLevel(logging.DEBUG)
fileformatter = logging.Formatter('%(asctime)s - %(levelname)-7s :: %(message)s', '%d-%b-%Y %H:%M:%S')
filehandler.setFormatter(fileformatter)
l.addHandler(filehandler)
if verbose:
consolehandler = logging.StreamHandler()
if verbose == 1:
consolehandler.setLevel(logging.INFO)
if verbose == 2:
consolehandler.setLevel(logging.DEBUG)
consoleformatter = logging.Formatter('%(asctime)s - %(levelname)s :: %(message)s', '%d-%b-%Y %H:%M:%S')
consolehandler.setFormatter(consoleformatter)
l.addHandler(consolehandler)
def log(self, message, level):
def emit(self, record):
message = self.format(record)
message = message.replace("\n", "<br />")
logger = logging.getLogger('headphones')
threadname = threading.currentThread().getName()
if level != 'DEBUG':
headphones.LOG_LIST.insert(0, (helpers.now(), message, level, threadname))
message = threadname + ' : ' + message
headphones.LOG_LIST.insert(0, (helpers.now(), message, record.levelname, record.threadName))
if level == 'DEBUG':
logger.debug(message)
elif level == 'INFO':
logger.info(message)
elif level == 'WARNING':
logger.warn(message)
else:
logger.error(message)
def initLogger(verbose=1):
"""
Setup logging for Headphones. It uses the logger instance with the name
'headphones'. Three log handlers are added:
headphones_log = RotatingLogger('headphones.log', MAX_SIZE, MAX_FILES)
* RotatingFileHandler: for the file headphones.log
* LogListHandler: for Web UI
* StreamHandler: for console (if verbose > 0)
"""
def debug(message):
headphones_log.log(message, level='DEBUG')
# Configure the logger to accept all messages
logger.propagate = False
logger.setLevel(logging.DEBUG)
def info(message):
headphones_log.log(message, level='INFO')
def warn(message):
headphones_log.log(message, level='WARNING')
def error(message):
headphones_log.log(message, level='ERROR')
# Setup file logger
filename = os.path.join(headphones.LOG_DIR, FILENAME)
file_formatter = logging.Formatter('%(asctime)s - %(levelname)-7s :: %(threadName)s : %(message)s', '%d-%b-%Y %H:%M:%S')
file_handler = handlers.RotatingFileHandler(filename, maxBytes=MAX_SIZE, backupCount=MAX_FILES)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(file_formatter)
logger.addHandler(file_handler)
# Add list logger
loglist_handler = LogListHandler()
loglist_handler.setLevel(logging.INFO)
logger.addHandler(loglist_handler)
# Setup console logger
if verbose:
console_formatter = logging.Formatter('%(asctime)s - %(levelname)s :: %(threadName)s : %(message)s', '%d-%b-%Y %H:%M:%S')
console_handler = logging.StreamHandler()
console_handler.setFormatter(console_formatter)
if verbose == 1:
console_handler.setLevel(logging.INFO)
elif verbose == 2:
console_handler.setLevel(logging.DEBUG)
logger.addHandler(console_handler)
# Install exception hooks
initHooks()
def initHooks(global_exceptions=True, thread_exceptions=True, pass_original=True):
"""
This method installs exception catching mechanisms. Any exception caught
will pass through the exception hook, and will be logged to the logger as
an error. Additionally, a traceback is provided.
This is very useful for crashing threads and any other bugs, that may not
be exposed when running as daemon.
The default exception hook is still considered, if pass_original is True.
"""
def excepthook(*exception_info):
# We should always catch this to prevent loops!
try:
message = "".join(traceback.format_exception(*exception_info))
logger.error("Uncaught excaption: %s", message)
except:
pass
# Original excepthook
if pass_original:
sys.__excepthook__(*exception_info)
# Global exception hook
if global_exceptions:
sys.excepthook = excepthook
# Thread exception hook
if thread_exceptions:
old_init = threading.Thread.__init__
def new_init(self, *args, **kwargs):
old_init(self, *args, **kwargs)
old_run = self.run
def new_run(*args, **kwargs):
try:
old_run(*args, **kwargs)
except (KeyboardInterrupt, SystemExit):
raise
except:
excepthook(*sys.exc_info())
self.run = new_run
# Monkey patch the run() by monkey patching the __init__ method
threading.Thread.__init__ = new_init
# Expose logger methods
info = logger.info
warn = logger.warn
error = logger.error
debug = logger.debug
warning = logger.warning
exception = logger.exception

View File

@@ -23,11 +23,6 @@ import subprocess
from headphones import logger
from beets.mediafile import MediaFile
try:
import argparse
except ImportError:
import lib.argparse as argparse
# xld
if headphones.ENCODER == 'xld':
import getXldProfile
@@ -42,33 +37,36 @@ def encode(albumPath):
global xldProfile
(xldProfile, xldFormat, xldBitrate) = getXldProfile.getXldProfile(headphones.XLDPROFILE)
if not xldFormat:
logger.error(u'Details for xld profile %s not found, files will not be re-encoded' % (xldProfile))
logger.error('Details for xld profile \'%s\' not found, files will not be re-encoded', xldProfile)
return None
tempDirEncode=os.path.join(albumPath,"temp")
musicFiles=[]
musicFinalFiles=[]
musicTempFiles=[]
encoder =""
if not os.path.exists(tempDirEncode):
encoder = ""
# Create temporary directory, but remove the old one first.
try:
if os.path.exists(tempDirEncode):
shutil.rmtree(tempDirEncode)
time.sleep(1)
os.mkdir(tempDirEncode)
else:
shutil.rmtree(tempDirEncode)
time.sleep(1)
os.mkdir(tempDirEncode)
except Exception, e:
logger.exception("Unable to create temporary directory")
return None
for r,d,f in os.walk(albumPath):
for music in f:
if any(music.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS):
if not XLD:
encoderFormat = headphones.ENCODEROUTPUTFORMAT.encode(headphones.SYS_ENCODING)
else:
xldMusicFile = os.path.join(r, music)
xldInfoMusic = MediaFile(xldMusicFile)
encoderFormat = xldFormat
if (headphones.ENCODERLOSSLESS):
ext = os.path.normpath(os.path.splitext(music)[1].lstrip(".")).lower()
if not XLD and ext == 'flac' or XLD and (ext != xldFormat and (xldInfoMusic.bitrate / 1000 > 400)):
@@ -76,7 +74,7 @@ def encode(albumPath):
musicTemp = os.path.normpath(os.path.splitext(music)[0] + '.' + encoderFormat)
musicTempFiles.append(os.path.join(tempDirEncode, musicTemp))
else:
logger.debug('%s is already encoded' % (music))
logger.debug('%s is already encoded', music)
else:
musicFiles.append(os.path.join(r, music))
musicTemp = os.path.normpath(os.path.splitext(music)[0] + '.' + encoderFormat)
@@ -86,7 +84,7 @@ def encode(albumPath):
encoder = headphones.ENCODER_PATH.encode(headphones.SYS_ENCODING)
else:
if XLD:
encoder = os.path.join('/Applications', 'xld')
encoder = os.path.join('/Applications', 'xld')
elif headphones.ENCODER =='lame':
if headphones.SYS_PLATFORM == "win32":
## NEED THE DEFAULT LAME INSTALL ON WIN!
@@ -103,32 +101,32 @@ def encode(albumPath):
encoder_failed = False
jobs = []
for music in musicFiles:
for music in musicFiles:
infoMusic=MediaFile(music)
encode = False
if XLD:
if xldBitrate and (infoMusic.bitrate / 1000 <= xldBitrate):
logger.info('%s has bitrate <= %skb, will not be re-encoded' % (music.decode(headphones.SYS_ENCODING, 'replace'), xldBitrate))
logger.info('%s has bitrate <= %skb, will not be re-encoded', music.decode(headphones.SYS_ENCODING, 'replace'), xldBitrate)
else:
encode = True
elif headphones.ENCODER == 'lame':
if not any(music.decode(headphones.SYS_ENCODING, 'replace').lower().endswith('.' + x) for x in ["mp3", "wav"]):
logger.warn(u'Lame cannot encode %s format for %s, use ffmpeg' % (os.path.splitext(music)[1].decode(headphones.SYS_ENCODING, 'replace'),music.decode(headphones.SYS_ENCODING, 'replace')))
logger.warn('Lame cannot encode %s format for %s, use ffmpeg', os.path.splitext(music)[1], music)
else:
if (music.decode(headphones.SYS_ENCODING, 'replace').lower().endswith('.mp3') and (int(infoMusic.bitrate / 1000) <= headphones.BITRATE)):
logger.info('%s has bitrate <= %skb, will not be re-encoded' % (music.decode(headphones.SYS_ENCODING, 'replace'),headphones.BITRATE))
logger.info('%s has bitrate <= %skb, will not be re-encoded', music, headphones.BITRATE)
else:
encode = True
else:
if headphones.ENCODEROUTPUTFORMAT=='ogg':
if music.decode(headphones.SYS_ENCODING, 'replace').lower().endswith('.ogg'):
logger.warn('Cannot re-encode .ogg %s' % (music.decode(headphones.SYS_ENCODING, 'replace')))
logger.warn('Cannot re-encode .ogg %s', music.decode(headphones.SYS_ENCODING, 'replace'))
else:
encode = True
elif (headphones.ENCODEROUTPUTFORMAT=='mp3' or headphones.ENCODEROUTPUTFORMAT=='m4a'):
if (music.decode(headphones.SYS_ENCODING, 'replace').lower().endswith('.'+headphones.ENCODEROUTPUTFORMAT) and (int(infoMusic.bitrate / 1000 ) <= headphones.BITRATE)):
logger.info('%s has bitrate <= %skb, will not be re-encoded' % (music.decode(headphones.SYS_ENCODING, 'replace'),headphones.BITRATE))
logger.info('%s has bitrate <= %skb, will not be re-encoded', music, headphones.BITRATE)
else:
encode = True
# encode
@@ -149,7 +147,7 @@ def encode(albumPath):
else:
processes = headphones.ENCODER_MULTICORE_COUNT
logger.debug("Multi-core encoding enabled, %d processes" % processes)
logger.debug("Multi-core encoding enabled, %d processes", processes)
else:
processes = 1
@@ -180,7 +178,7 @@ def encode(albumPath):
for dest in musicTempFiles:
if not os.path.exists(dest):
encoder_failed = True
logger.error('Encoded file %s does not exist in the destination temp directory' % (dest.decode(headphones.SYS_ENCODING, 'replace')))
logger.error('Encoded file \'%s\' does not exist in the destination temp directory', dest)
# No errors, move from temp to parent
if not encoder_failed and musicTempFiles:
@@ -196,7 +194,7 @@ def encode(albumPath):
try:
shutil.move(dest, albumPath)
except Exception, e:
logger.error('Could not move %s to %s : %s' % (dest.decode(headphones.SYS_ENCODING, 'replace'), albumPath.decode(headphones.SYS_ENCODING, 'replace'), e))
logger.error('Could not move %s to %s: %s', dest, albumPath, e)
encoder_failed = True
break
i += 1
@@ -206,7 +204,7 @@ def encode(albumPath):
# Return with error if any encoding errors
if encoder_failed:
logger.error('One or more files failed to encode, check debuglog and ensure you have the latest version of %s installed' % (headphones.ENCODER))
logger.error('One or more files failed to encode, check debuglog and ensure you have the latest version of %s installed', headphones.ENCODER)
return None
time.sleep(1)
@@ -214,17 +212,24 @@ def encode(albumPath):
for music in f:
if any(music.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS):
musicFinalFiles.append(os.path.join(r, music))
if not musicTempFiles:
logger.info('Encoding for folder %s is not required' % (albumPath.decode(headphones.SYS_ENCODING, 'replace')))
logger.info('Encoding for folder \'%s\' is not required', albumPath)
return musicFinalFiles
def command_map(args):
return command(*args)
"""
This method is used for the multiprocessing.map() method as a wrapper.
"""
def command(encoder,musicSource,musicDest,albumPath):
try:
return command(*args)
except Exception, e:
logger.exception("Encoder exception, will return failed")
return False
def command(encoder, musicSource ,musicDest, albumPath):
cmd=[]
startMusicTime=time.time()
@@ -303,7 +308,7 @@ def command(encoder,musicSource,musicDest,albumPath):
logger.debug(out)
encoded = False
else:
logger.info('%s encoded in %s' % (musicSource.decode(headphones.SYS_ENCODING, 'replace'),getTimeEncode(startMusicTime)))
logger.info('%s encoded in %s', musicSource, getTimeEncode(startMusicTime))
encoded = True
return encoded

View File

@@ -922,14 +922,14 @@ def renameFiles(albumpath, downloaded_track_list, release):
def updateFilePermissions(albumpaths):
for folder in albumpaths:
logger.info("Updating file permissions in " + folder.decode(headphones.SYS_ENCODING, 'replace'))
logger.info("Updating file permissions in %s", folder)
for r,d,f in os.walk(folder):
for files in f:
full_path = os.path.join(r, files)
try:
os.chmod(full_path, int(headphones.FILE_PERMISSIONS, 8))
except:
logger.error("Could not change permissions for file: " + full_path.decode(headphones.SYS_ENCODING, 'replace'))
logger.error("Could not change permissions for file: %s", full_path)
continue
def renameUnprocessedFolder(albumpath):
@@ -949,7 +949,6 @@ def renameUnprocessedFolder(albumpath):
return
def forcePostProcess(dir=None):
download_dirs = []
if dir:
download_dirs.append(dir.encode(headphones.SYS_ENCODING, 'replace'))
@@ -961,12 +960,12 @@ def forcePostProcess(dir=None):
# If DOWNLOAD_DIR and DOWNLOAD_TORRENT_DIR are the same, remove the duplicate to prevent us from trying to process the same folder twice.
download_dirs = list(set(download_dirs))
logger.info('Checking to see if there are any folders to process in download_dir(s): %s' % str(download_dirs).decode(headphones.SYS_ENCODING, 'replace'))
logger.info('Checking to see if there are any folders to process in download_dir(s): %s', download_dirs)
# Get a list of folders in the download_dir
folders = []
for download_dir in download_dirs:
if not os.path.isdir(download_dir):
logger.warn('Directory ' + download_dir.decode(headphones.SYS_ENCODING, 'replace') + ' does not exist. Skipping')
logger.warn('Directory %s does not exist. Skipping', download_dir)
continue
for folder in os.listdir(download_dir):
path_to_folder = os.path.join(download_dir, folder)
@@ -974,16 +973,16 @@ def forcePostProcess(dir=None):
folders.append(path_to_folder)
if len(folders):
logger.info('Found %i folders to process' % len(folders))
logger.info('Found %i folders to process', len(folders))
else:
logger.info('Found no folders to process in: %s' % str(download_dirs).decode(headphones.SYS_ENCODING, 'replace'))
logger.info('Found no folders to process in: %s', download_dirs)
# Parse the folder names to get artist album info
myDB = db.DBConnection()
for folder in folders:
folder_basename = os.path.basename(folder).decode(headphones.SYS_ENCODING, 'replace')
logger.info('Processing: %s' % folder_basename)
logger.info('Processing: %s', folder_basename)
# Attempt 1: First try to see if there's a match in the snatched table,
# then we'll try to parse the foldername.
@@ -995,10 +994,10 @@ def forcePostProcess(dir=None):
if snatched:
if headphones.KEEP_TORRENT_FILES and snatched['Kind'] == 'torrent' and snatched['Status'] == 'Processed':
logger.info(folder_basename + ' is a torrent folder being preserved for seeding and has already been processed. Skipping.')
logger.info('%s is a torrent folder being preserved for seeding and has already been processed. Skipping.', folder_basename)
continue
else:
logger.info('Found a match in the database: %s. Verifying to make sure it is the correct album' % snatched['Title'])
logger.info('Found a match in the database: %s. Verifying to make sure it is the correct album', snatched['Title'])
verify(snatched['AlbumID'], folder, snatched['Kind'])
continue
@@ -1020,11 +1019,11 @@ def forcePostProcess(dir=None):
if name and album and year:
release = myDB.action('SELECT AlbumID, ArtistName, AlbumTitle from albums WHERE ArtistName LIKE ? and AlbumTitle LIKE ?', [name, album]).fetchone()
if release:
logger.info('Found a match in the database: %s - %s. Verifying to make sure it is the correct album' % (release['ArtistName'], release['AlbumTitle']))
logger.info('Found a match in the database: %s - %s. Verifying to make sure it is the correct album', release['ArtistName'], release['AlbumTitle'])
verify(release['AlbumID'], folder)
continue
else:
logger.info('Querying MusicBrainz for the release group id for: %s - %s' % (name, album))
logger.info('Querying MusicBrainz for the release group id for: %s - %s', name, album)
from headphones import mb
try:
rgid = mb.findAlbumID(helpers.latinToAscii(name), helpers.latinToAscii(album))
@@ -1036,7 +1035,7 @@ def forcePostProcess(dir=None):
verify(rgid, folder)
continue
else:
logger.info('No match found on MusicBrainz for: %s - %s' % (name, album))
logger.info('No match found on MusicBrainz for: %s - %s', name, album)
# Attempt 3: strip release group id from filename
try:
@@ -1045,14 +1044,14 @@ def forcePostProcess(dir=None):
# re pattern match: [0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}
rgid = uuid.UUID(possible_rgid)
except:
logger.info("Couldn't parse " + folder_basename + " into any valid format. If adding albums from another source, they must be in an 'Artist - Album [Year]' format, or end with the musicbrainz release group id")
logger.info("Couldn't parse '%s' into any valid format. If adding albums from another source, they must be in an 'Artist - Album [Year]' format, or end with the musicbrainz release group id", folder_basename)
rgid = possible_rgid = None
if rgid:
rgid = possible_rgid
release = myDB.action('SELECT ArtistName, AlbumTitle, AlbumID from albums WHERE AlbumID=?', [rgid]).fetchone()
if release:
logger.info('Found a match in the database: %s - %s. Verifying to make sure it is the correct album' % (release['ArtistName'], release['AlbumTitle']))
logger.info('Found a match in the database: %s - %s. Verifying to make sure it is the correct album', release['ArtistName'], release['AlbumTitle'])
verify(release['AlbumID'], folder, forced=True)
else:
logger.info('Found a (possibly) valid Musicbrainz identifier in album folder name - continuing post-processing')

View File

@@ -238,7 +238,7 @@ def sort_search_results(resultlist, album, new):
if high_size_limit and (int(result[1]) > high_size_limit):
logger.info(result[0] + " is too large for this album - not considering it. (Size: " + helpers.bytes_to_mb(result[1]) + ", Maxsize: " + helpers.bytes_to_mb(high_size_limit) + ")")
logger.info("%s is too large for this album - not considering it. (Size: %s, Maxsize: %s)", result[0], helpers.bytes_to_mb(result[1]), helpers.bytes_to_mb(high_size_limit))
# Add lossless nzbs to the "flac list" which we can use if there are no good lossy matches
if 'flac' in result[0].lower():
@@ -247,7 +247,7 @@ def sort_search_results(resultlist, album, new):
continue
if low_size_limit and (int(result[1]) < low_size_limit):
logger.info(result[0] + " is too small for this album - not considering it. (Size: " + helpers.bytes_to_mb(result[1]) + ", Minsize: " + helpers.bytes_to_mb(low_size_limit) + ")")
logger.info("%s is too small for this album - not considering it. (Size: %s, Minsize: %s)", result[0], helpers.bytes_to_mb(result[1]), helpers.bytes_to_mb(low_size_limit))
continue
delta = abs(targetsize - int(result[1]))
@@ -258,11 +258,9 @@ def sort_search_results(resultlist, album, new):
if not len(finallist) and len(flac_list) and headphones.PREFERRED_BITRATE_ALLOW_LOSSLESS:
logger.info("Since there were no appropriate lossy matches (and at least one lossless match, going to use lossless instead")
finallist = sorted(flac_list, key=lambda title: (title[5], int(title[1])), reverse=True)
except Exception, e:
logger.debug('Error: %s' % str(e))
logger.info('No track information for %s - %s. Defaulting to highest quality' % (album['ArtistName'], album['AlbumTitle']))
logger.exception('Unhandled exception')
logger.info('No track information for %s - %s. Defaulting to highest quality', (album['ArtistName'], album['AlbumTitle']))
finallist = sorted(resultlist, key=lambda title: (title[5], int(title[1])), reverse=True)
@@ -289,7 +287,7 @@ def sort_search_results(resultlist, album, new):
return None
if not len(finallist):
logger.info('No appropriate matches found for %s - %s' % (album['ArtistName'], album['AlbumTitle']))
logger.info('No appropriate matches found for %s - %s', album['ArtistName'], album['AlbumTitle'])
return None
return finallist
@@ -307,7 +305,6 @@ def searchNZB(album, new=False, losslessOnly=False):
albumid = album['AlbumID']
reldate = album['ReleaseDate']
year = get_year_from_release_date(reldate)
dic = {'...':'', ' & ':' ', ' = ': ' ', '?':'', '$':'s', ' + ':' ', '"':'', ',':'', '*':'', '.':'', ':':''}
@@ -332,7 +329,6 @@ def searchNZB(album, new=False, losslessOnly=False):
# Replace bad characters in the term and unicode it
term = re.sub('[\.\-\/]', ' ', term).encode('utf-8')
artistterm = re.sub('[\.\-\/]', ' ', cleanartist).encode('utf-8')
logger.info("Searching for %s since it was marked as wanted" % term)
@@ -446,22 +442,19 @@ def searchNZB(album, new=False, losslessOnly=False):
request.add_header('User-Agent', USER_AGENT)
opener = urllib2.build_opener()
logger.info(u'Parsing results from <a href="%s">%s</a>' % (searchURL, newznab_host[0]))
logger.info(u'Parsing results from <a href="%s">%s</a>', searchURL, newznab_host[0])
try:
data = opener.open(request).read()
except Exception, e:
logger.warn('Error fetching data from %s: %s' % (newznab_host[0], e))
logger.warn('Error fetching data from %s: %s', newznab_host[0], e)
data = False
if data:
d = feedparser.parse(data)
if not len(d.entries):
logger.info(u"No results found from %s for %s" % (newznab_host[0], term))
pass
logger.info(u"No results found from %s for %s", newznab_host[0], term)
else:
for item in d.entries:
try:
@@ -506,7 +499,6 @@ def searchNZB(album, new=False, losslessOnly=False):
data = False
if data:
d = feedparser.parse(data)
if not len(d.entries):
@@ -522,12 +514,10 @@ def searchNZB(album, new=False, losslessOnly=False):
resultlist.append((title, size, url, provider, 'nzb'))
logger.info('Found %s. Size: %s' % (title, helpers.bytes_to_mb(size)))
except Exception, e:
logger.error(u"An unknown error occurred trying to parse the feed: %s" % e)
logger.exception("Unhandled exception while parsing feed")
if headphones.NZBSRUS:
provider = "nzbsrus"
categories = "54"
@@ -562,17 +552,15 @@ def searchNZB(album, new=False, losslessOnly=False):
try:
data = opener.open(request).read()
except Exception, e:
logger.warn('Error fetching data from NZBsRus: %s' % e)
logger.warn('Error fetching data from NZBsRus: %s', e)
data = False
if data:
d = json.loads(data)
if d['matches'] <= 0:
logger.info(u"No results found from NZBsRus for %s" % term)
pass
else:
for item in d['results']:
try:
@@ -581,14 +569,12 @@ def searchNZB(album, new=False, losslessOnly=False):
size = int(item['size'])
resultlist.append((title, size, url, provider, 'nzb'))
logger.info('Found %s. Size: %s' % (title, helpers.bytes_to_mb(size)))
logger.info('Found %s. Size: %s', title, helpers.bytes_to_mb(size))
except Exception, e:
logger.error(u"An unknown error occurred trying to parse the feed: %s" % e)
logger.exception("Unhandled exception")
if headphones.OMGWTFNZBS:
provider = "omgwtfnzbs"
if headphones.PREFERRED_QUALITY == 3 or losslessOnly:
@@ -621,15 +607,15 @@ def searchNZB(album, new=False, losslessOnly=False):
try:
data = opener.open(request).read()
except Exception, e:
logger.warn('Error fetching data from omgwtfnzbs: %s' % e)
logger.warn('Error fetching data from omgwtfnzbs: %s', e)
data = False
if data:
d = json.loads(data)
if 'notice' in data:
logger.info(u"No results returned from omgwtfnzbs: %s" % d['notice'])
if 'notice' in data:
logger.info(u"No results returned from omgwtfnzbs: %s", d['notice'])
pass
else:
@@ -640,10 +626,10 @@ def searchNZB(album, new=False, losslessOnly=False):
size = int(item['sizebytes'])
resultlist.append((title, size, url, provider, 'nzb'))
logger.info('Found %s. Size: %s' % (title, helpers.bytes_to_mb(size)))
logger.info('Found %s. Size: %s', title, helpers.bytes_to_mb(size))
except Exception, e:
logger.error(u"An unknown error occurred trying to parse the results: %s" % e)
logger.exception("Unhandled exception")
# attempt to verify that this isn't a substring result
# when looking for "Foo - Foo" we don't want "Foobar"
@@ -661,7 +647,7 @@ def searchNZB(album, new=False, losslessOnly=False):
def send_to_downloader(data, bestqual, album):
logger.info(u'Found best result from %s: <a href="%s">%s</a> - %s' % (bestqual[3], bestqual[2], bestqual[0], helpers.bytes_to_mb(bestqual[1])))
logger.info(u'Found best result from %s: <a href="%s">%s</a> - %s', bestqual[3], bestqual[2], bestqual[0], helpers.bytes_to_mb(bestqual[1]))
# Get rid of any dodgy chars here so we can prevent sab from renaming our downloads
kind = bestqual[4]
@@ -702,7 +688,7 @@ def send_to_downloader(data, bestqual, album):
os.umask(prev)
logger.info('File saved to: %s' % nzb_name)
except Exception, e:
logger.error('Couldn\'t write NZB file: %s' % e)
logger.error('Couldn\'t write NZB file: %s', e)
return
else:
folder_name = '%s - %s [%s]' % (helpers.latinToAscii(album['ArtistName']).encode('UTF-8').replace('/', '_'), helpers.latinToAscii(album['AlbumTitle']).encode('UTF-8').replace('/', '_'), get_year_from_release_date(album['ReleaseDate']))
@@ -795,24 +781,24 @@ def verifyresult(title, artistterm, term, lossless):
# Filter out remix search results (if we're not looking for it)
if 'remix' not in term.lower() and 'remix' in title.lower():
logger.info("Removed " + title + " from results because it's a remix album and we're not looking for a remix album right now")
logger.info("Removed %s from results because it's a remix album and we're not looking for a remix album right now.", title)
return False
# Filter out FLAC if we're not specifically looking for it
if headphones.PREFERRED_QUALITY == (0 or '0') and 'flac' in title.lower() and not lossless:
logger.info("Removed " + title + " from results because it's a lossless album and we're not looking for a lossless album right now")
logger.info("Removed %s from results because it's a lossless album and we're not looking for a lossless album right now.", title)
return False
if headphones.IGNORED_WORDS:
for each_word in helpers.split_string(headphones.IGNORED_WORDS):
if each_word.lower() in title.lower():
logger.info("Removed " + title + " from results because it contains ignored word: '" + each_word + "'")
logger.info("Removed '%s' from results because it contains ignored word: '%s'", title, each_word)
return False
if headphones.REQUIRED_WORDS:
for each_word in helpers.split_string(headphones.REQUIRED_WORDS):
if each_word.lower() not in title.lower():
logger.info("Removed " + title + " from results because it doesn't contain required word: '" + each_word + "'")
logger.info("Removed '%s' from results because it doesn't contain required word: '%s'", title, each_word)
return False
tokens = re.split('\W', term, re.IGNORECASE | re.UNICODE)
@@ -828,7 +814,7 @@ def verifyresult(title, artistterm, term, lossless):
dic = {'!':'i', '$':'s'}
dumbtoken = helpers.replace_all(token, dic)
if not not re.search('(?:\W|^)+' + dumbtoken + '(?:\W|$)+', title, re.IGNORECASE | re.UNICODE):
logger.info("Removed from results: " + title + " (missing tokens: " + token + " and " + cleantoken + ")")
logger.info("Removed from results: %s (missing tokens: %s and %s)", title, token, cleantoken)
return False
return True
@@ -862,7 +848,7 @@ def getresultNZB(result):
try:
nzb = opener.open(request).read()
except Exception, e:
logger.warn('Error fetching nzb from url: ' + result[2] + ' %s' % e)
logger.warn('Error fetching NZB from url: %s, %s', result[2], e)
else:
request = urllib2.Request(result[2])
request.add_header('User-Agent', USER_AGENT)
@@ -871,11 +857,10 @@ def getresultNZB(result):
try:
nzb = opener.open(request).read()
except Exception, e:
logger.warn('Error fetching nzb from url: ' + result[2] + ' %s' % e)
logger.warn('Error fetching NZB from url: %s, %s', result[2], e)
return nzb
def searchTorrent(album, new=False, losslessOnly=False):
global gazelle # persistent what.cd api object to reduce number of login attempts
# rutracker login
@@ -960,19 +945,16 @@ def searchTorrent(album, new=False, losslessOnly=False):
try:
data = urllib2.urlopen(searchURL, timeout=20)
except urllib2.URLError, e:
logger.warn('Error fetching data from %s: %s' % (provider, e))
logger.warn('Error fetching data from %s: %s', provider, e)
data = False
if data:
logger.info(u'Parsing results from <a href="%s">KAT</a>' % searchURL)
logger.info(u'Parsing results from <a href="%s">KAT</a>', searchURL)
d = feedparser.parse(data)
if not len(d.entries):
logger.info(u"No results found from %s for %s" % (provider, term))
pass
logger.info(u"No results found from %s for %s" % provider, term)
else:
for item in d.entries:
try:
@@ -1002,9 +984,8 @@ def searchTorrent(album, new=False, losslessOnly=False):
logger.info('Found %s. Size: %s' % (title, helpers.bytes_to_mb(size)))
else:
logger.info('%s is larger than the maxsize, the wrong format or has too little seeders for this category, skipping. (Size: %i bytes, Seeders: %i, Format: %s)' % (title, size, int(seeders), rightformat))
except Exception, e:
logger.error(u"An unknown error occurred in the KAT parser: %s" % e)
logger.exception("Unhandled exception in the KAT parser")
if headphones.WAFFLES:
provider = "Waffles.fm"
@@ -1231,7 +1212,7 @@ def searchTorrent(album, new=False, losslessOnly=False):
maxsize = 10000000000
else:
category = '101' #mp3
maxsize = 300000000
maxsize = 300000000
searchURL = providerurl + category
@@ -1253,8 +1234,6 @@ def searchTorrent(album, new=False, losslessOnly=False):
if not rows or len(rows) == '1':
logger.info(u"No results found from %s for %s" % (provider, term))
pass
else:
for item in rows[1:]:
try:
@@ -1304,18 +1283,15 @@ def searchTorrent(album, new=False, losslessOnly=False):
try:
data = urllib2.urlopen(searchURL, timeout=20).read()
except urllib2.URLError, e:
logger.warn('Error fetching data from %s: %s' % (provider, e))
logger.warn('Error fetching data from %s: %s', provider, e)
data = False
if data:
logger.info(u'Parsing results from <a href="%s">isoHunt</a>' % searchURL)
d = feedparser.parse(data)
if not len(d.entries):
logger.info(u"No results found from %s for %s" % (provider, term))
pass
logger.info(u"No results found from %s for %s", provider, term)
else:
for item in d.entries:
try:
@@ -1352,9 +1328,8 @@ def searchTorrent(album, new=False, losslessOnly=False):
logger.info('Found %s. Size: %s' % (title, helpers.bytes_to_mb(size)))
else:
logger.info('%s is larger than the maxsize, the wrong format or has too little seeders for this category, skipping. (Size: %i bytes, Seeders: %i, Format: %s)' % (title, size, int(seeds), rightformat))
except Exception, e:
logger.error(u"An unknown error occurred in the isoHunt parser: %s" % e)
logger.exception(u"Unhandled exception in isoHunt parser: %s")
if headphones.MININOVA:
provider = "Mininova"
@@ -1377,18 +1352,15 @@ def searchTorrent(album, new=False, losslessOnly=False):
try:
data = urllib2.urlopen(searchURL, timeout=20).read()
except urllib2.URLError, e:
logger.warn('Error fetching data from %s: %s' % (provider, e))
logger.warn('Error fetching data from %s: %s', provider, e)
data = False
if data:
logger.info(u'Parsing results from <a href="%s">Mininova</a>' % searchURL)
d = feedparser.parse(data)
if not len(d.entries):
logger.info(u"No results found from %s for %s" % (provider, term))
pass
else:
for item in d.entries:
try:
@@ -1436,7 +1408,6 @@ def searchTorrent(album, new=False, losslessOnly=False):
# THIS IS KIND OF A MESS AND PROBABLY NEEDS TO BE CLEANED UP
def preprocess(resultlist):
for result in resultlist:
if result[4] == 'torrent':
@@ -1493,14 +1464,12 @@ def preprocess(resultlist):
#TODO: Do we want rar checking in here to try to keep unknowns out?
#or at least the option to do so?
except Exception, e:
logger.error('Unable to parse the best result NZB. Error: ' + str(e) + '. (Make sure your username/password/API is correct for provider: ' + result[3])
logger.error('Unable to parse the best result NZB. Error: %s. (Make sure your username/password/API is correct for provider: %s', e ,result[3])
continue
return nzb, result
else:
logger.error("Couldn't retrieve the best nzb. Skipping.")
continue
return (None, None)
return (None, None)

View File

@@ -62,7 +62,7 @@ def initialize(options={}):
else:
protocol = "http"
logger.info(u"Starting Headphones on " + protocol + "://" + str(options['http_host']) + ":" + str(options['http_port']) + "/")
logger.info("Starting Headphones on %s://%s:%d/", protocol, options['http_host'], options['http_port'])
cherrypy.config.update(options_dict)
conf = {