mirror of
https://github.com/rembo10/headphones.git
synced 2026-01-10 15:28:11 -05:00
autopep8 E231 whitespace after some punctuation
This commit is contained in:
@@ -59,12 +59,12 @@ class Cache(object):
|
||||
self.info_summary = None
|
||||
self.info_content = None
|
||||
|
||||
def _findfilesstartingwith(self,pattern,folder):
|
||||
def _findfilesstartingwith(self, pattern, folder):
|
||||
files = []
|
||||
if os.path.exists(folder):
|
||||
for fname in os.listdir(folder):
|
||||
if fname.startswith(pattern):
|
||||
files.append(os.path.join(folder,fname))
|
||||
files.append(os.path.join(folder, fname))
|
||||
return files
|
||||
|
||||
def _exists(self, type):
|
||||
@@ -72,14 +72,14 @@ class Cache(object):
|
||||
self.thumb_files = []
|
||||
|
||||
if type == 'artwork':
|
||||
self.artwork_files = self._findfilesstartingwith(self.id,self.path_to_art_cache)
|
||||
self.artwork_files = self._findfilesstartingwith(self.id, self.path_to_art_cache)
|
||||
if self.artwork_files:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
elif type == 'thumb':
|
||||
self.thumb_files = self._findfilesstartingwith("T_" + self.id,self.path_to_art_cache)
|
||||
self.thumb_files = self._findfilesstartingwith("T_" + self.id, self.path_to_art_cache)
|
||||
if self.thumb_files:
|
||||
return True
|
||||
else:
|
||||
@@ -400,7 +400,7 @@ class Cache(object):
|
||||
self.artwork_url = image_url
|
||||
|
||||
# Grab the thumbnail as well if we're getting the full artwork (as long as it's missing/outdated
|
||||
if thumb_url and self.query_type in ['thumb','artwork'] and not (self.thumb_files and self._is_current(self.thumb_files[0])):
|
||||
if thumb_url and self.query_type in ['thumb', 'artwork'] and not (self.thumb_files and self._is_current(self.thumb_files[0])):
|
||||
artwork = request.request_content(thumb_url, timeout=20)
|
||||
|
||||
if artwork:
|
||||
|
||||
@@ -106,7 +106,7 @@ class Quality:
|
||||
if x == Quality.UNKNOWN:
|
||||
continue
|
||||
|
||||
regex = '\W'+Quality.qualityStrings[x].replace(' ','\W')+'\W'
|
||||
regex = '\W'+Quality.qualityStrings[x].replace(' ', '\W')+'\W'
|
||||
regex_match = re.search(regex, name, re.I)
|
||||
if regex_match:
|
||||
return x
|
||||
|
||||
@@ -219,7 +219,7 @@ class Directory:
|
||||
n = int(search.group(1))
|
||||
if n:
|
||||
return n
|
||||
for n in range(0,100):
|
||||
for n in range(0, 100):
|
||||
search = re.search(int_to_str(n), filename)
|
||||
if search:
|
||||
# TODO: not part of other value such as year
|
||||
|
||||
@@ -178,4 +178,4 @@ def getXldProfile(xldProfile):
|
||||
|
||||
return(xldProfileForCmd, xldFormat, xldBitrate)
|
||||
|
||||
return(xldProfileNotFound, None, None)
|
||||
return(xldProfileNotFound, None, None)
|
||||
|
||||
@@ -61,31 +61,31 @@ def latinToAscii(unicrap):
|
||||
"""
|
||||
From couch potato
|
||||
"""
|
||||
xlate = {0xc0:'A', 0xc1:'A', 0xc2:'A', 0xc3:'A', 0xc4:'A', 0xc5:'A',
|
||||
0xc6:'Ae', 0xc7:'C',
|
||||
0xc8:'E', 0xc9:'E', 0xca:'E', 0xcb:'E', 0x86:'e',
|
||||
0xcc:'I', 0xcd:'I', 0xce:'I', 0xcf:'I',
|
||||
0xd0:'Th', 0xd1:'N',
|
||||
0xd2:'O', 0xd3:'O', 0xd4:'O', 0xd5:'O', 0xd6:'O', 0xd8:'O',
|
||||
0xd9:'U', 0xda:'U', 0xdb:'U', 0xdc:'U',
|
||||
0xdd:'Y', 0xde:'th', 0xdf:'ss',
|
||||
0xe0:'a', 0xe1:'a', 0xe2:'a', 0xe3:'a', 0xe4:'a', 0xe5:'a',
|
||||
0xe6:'ae', 0xe7:'c',
|
||||
0xe8:'e', 0xe9:'e', 0xea:'e', 0xeb:'e', 0x0259:'e',
|
||||
0xec:'i', 0xed:'i', 0xee:'i', 0xef:'i',
|
||||
0xf0:'th', 0xf1:'n',
|
||||
0xf2:'o', 0xf3:'o', 0xf4:'o', 0xf5:'o', 0xf6:'o', 0xf8:'o',
|
||||
0xf9:'u', 0xfa:'u', 0xfb:'u', 0xfc:'u',
|
||||
0xfd:'y', 0xfe:'th', 0xff:'y',
|
||||
0xa1:'!', 0xa2:'{cent}', 0xa3:'{pound}', 0xa4:'{currency}',
|
||||
0xa5:'{yen}', 0xa6:'|', 0xa7:'{section}', 0xa8:'{umlaut}',
|
||||
0xa9:'{C}', 0xaa:'{^a}', 0xab:'<<', 0xac:'{not}',
|
||||
0xad:'-', 0xae:'{R}', 0xaf:'_', 0xb0:'{degrees}',
|
||||
0xb1:'{+/-}', 0xb2:'{^2}', 0xb3:'{^3}', 0xb4:"'",
|
||||
0xb5:'{micro}', 0xb6:'{paragraph}', 0xb7:'*', 0xb8:'{cedilla}',
|
||||
0xb9:'{^1}', 0xba:'{^o}', 0xbb:'>>',
|
||||
0xbc:'{1/4}', 0xbd:'{1/2}', 0xbe:'{3/4}', 0xbf:'?',
|
||||
0xd7:'*', 0xf7:'/'
|
||||
xlate = {0xc0: 'A', 0xc1: 'A', 0xc2: 'A', 0xc3: 'A', 0xc4: 'A', 0xc5: 'A',
|
||||
0xc6: 'Ae', 0xc7: 'C',
|
||||
0xc8: 'E', 0xc9: 'E', 0xca: 'E', 0xcb: 'E', 0x86: 'e',
|
||||
0xcc: 'I', 0xcd: 'I', 0xce: 'I', 0xcf: 'I',
|
||||
0xd0: 'Th', 0xd1: 'N',
|
||||
0xd2: 'O', 0xd3: 'O', 0xd4: 'O', 0xd5: 'O', 0xd6: 'O', 0xd8: 'O',
|
||||
0xd9: 'U', 0xda: 'U', 0xdb: 'U', 0xdc: 'U',
|
||||
0xdd: 'Y', 0xde: 'th', 0xdf: 'ss',
|
||||
0xe0: 'a', 0xe1: 'a', 0xe2: 'a', 0xe3: 'a', 0xe4: 'a', 0xe5: 'a',
|
||||
0xe6: 'ae', 0xe7: 'c',
|
||||
0xe8: 'e', 0xe9: 'e', 0xea: 'e', 0xeb: 'e', 0x0259: 'e',
|
||||
0xec: 'i', 0xed: 'i', 0xee: 'i', 0xef: 'i',
|
||||
0xf0: 'th', 0xf1: 'n',
|
||||
0xf2: 'o', 0xf3: 'o', 0xf4: 'o', 0xf5: 'o', 0xf6: 'o', 0xf8: 'o',
|
||||
0xf9: 'u', 0xfa: 'u', 0xfb: 'u', 0xfc: 'u',
|
||||
0xfd: 'y', 0xfe: 'th', 0xff: 'y',
|
||||
0xa1: '!', 0xa2: '{cent}', 0xa3: '{pound}', 0xa4: '{currency}',
|
||||
0xa5: '{yen}', 0xa6: '|', 0xa7: '{section}', 0xa8: '{umlaut}',
|
||||
0xa9: '{C}', 0xaa: '{^a}', 0xab: '<<', 0xac: '{not}',
|
||||
0xad: '-', 0xae: '{R}', 0xaf: '_', 0xb0: '{degrees}',
|
||||
0xb1: '{+/-}', 0xb2: '{^2}', 0xb3: '{^3}', 0xb4: "'",
|
||||
0xb5: '{micro}', 0xb6: '{paragraph}', 0xb7: '*', 0xb8: '{cedilla}',
|
||||
0xb9: '{^1}', 0xba: '{^o}', 0xbb: '>>',
|
||||
0xbc: '{1/4}', 0xbd: '{1/2}', 0xbe: '{3/4}', 0xbf: '?',
|
||||
0xd7: '*', 0xf7: '/'
|
||||
}
|
||||
|
||||
r = ''
|
||||
@@ -589,9 +589,9 @@ def smartMove(src, dest, delete=True):
|
||||
# TODO: Grab config values from sab to know when these options are checked. For now we'll just iterate through all combinations
|
||||
|
||||
def sab_replace_dots(name):
|
||||
return name.replace('.',' ')
|
||||
return name.replace('.', ' ')
|
||||
def sab_replace_spaces(name):
|
||||
return name.replace(' ','_')
|
||||
return name.replace(' ', '_')
|
||||
|
||||
def sab_sanitize_foldername(name):
|
||||
""" Return foldername with dodgy chars converted to safe ones
|
||||
|
||||
@@ -23,7 +23,7 @@ import threading
|
||||
import headphones
|
||||
|
||||
blacklisted_special_artist_names = ['[anonymous]', '[data]', '[no artist]',
|
||||
'[traditional]','[unknown]','Various Artists']
|
||||
'[traditional]', '[unknown]', 'Various Artists']
|
||||
blacklisted_special_artists = ['f731ccc4-e22a-43af-a747-64213329e088',
|
||||
'33cf029c-63b0-41a0-9855-be2a3665fb3b',
|
||||
'314e1c25-dde7-4e4d-b2f4-0a7b9f7c56dc',
|
||||
@@ -243,12 +243,12 @@ def addArtisttoDB(artistid, extrasonly=False, forcefull=False):
|
||||
|
||||
if new_release_group:
|
||||
logger.info("[%s] Now adding: %s (New Release Group)" % (artist['artist_name'], rg['title']))
|
||||
new_releases = mb.get_new_releases(rgid,includeExtras)
|
||||
new_releases = mb.get_new_releases(rgid, includeExtras)
|
||||
|
||||
else:
|
||||
if check_release_date is None or check_release_date == u"None":
|
||||
logger.info("[%s] Now updating: %s (No Release Date)" % (artist['artist_name'], rg['title']))
|
||||
new_releases = mb.get_new_releases(rgid,includeExtras,True)
|
||||
new_releases = mb.get_new_releases(rgid, includeExtras, True)
|
||||
else:
|
||||
if len(check_release_date) == 10:
|
||||
release_date = check_release_date
|
||||
@@ -260,7 +260,7 @@ def addArtisttoDB(artistid, extrasonly=False, forcefull=False):
|
||||
release_date = today
|
||||
if helpers.get_age(today) - helpers.get_age(release_date) < pause_delta:
|
||||
logger.info("[%s] Now updating: %s (Release Date <%s Days)", artist['artist_name'], rg['title'], pause_delta)
|
||||
new_releases = mb.get_new_releases(rgid,includeExtras,True)
|
||||
new_releases = mb.get_new_releases(rgid, includeExtras, True)
|
||||
else:
|
||||
logger.info("[%s] Skipping: %s (Release Date >%s Days)", artist['artist_name'], rg['title'], pause_delta)
|
||||
skip_log = 1
|
||||
@@ -273,7 +273,7 @@ def addArtisttoDB(artistid, extrasonly=False, forcefull=False):
|
||||
new_releases = new_releases
|
||||
else:
|
||||
logger.info("[%s] Now adding/updating: %s (Comprehensive Force)", artist['artist_name'], rg['title'])
|
||||
new_releases = mb.get_new_releases(rgid,includeExtras,forcefull)
|
||||
new_releases = mb.get_new_releases(rgid, includeExtras, forcefull)
|
||||
|
||||
if new_releases != 0:
|
||||
# Dump existing hybrid release since we're repackaging/replacing it
|
||||
@@ -325,7 +325,7 @@ def addArtisttoDB(artistid, extrasonly=False, forcefull=False):
|
||||
logger.info('[%s] Packaging %s releases into hybrid title' % (artist['artist_name'], rg['title']))
|
||||
except Exception as e:
|
||||
errors = True
|
||||
logger.warn('[%s] Unable to get hybrid release information for %s: %s' % (artist['artist_name'],rg['title'],e))
|
||||
logger.warn('[%s] Unable to get hybrid release information for %s: %s' % (artist['artist_name'], rg['title'], e))
|
||||
continue
|
||||
|
||||
# Use the ReleaseGroupID as the ReleaseID for the hybrid release to differentiate it
|
||||
@@ -786,7 +786,7 @@ def getHybridRelease(fullreleaselist):
|
||||
else:
|
||||
return releaseDate + '13-32'
|
||||
|
||||
sortable_release_list.sort(key=lambda x:getSortableReleaseDate(x['releasedate']))
|
||||
sortable_release_list.sort(key=lambda x: getSortableReleaseDate(x['releasedate']))
|
||||
|
||||
average_tracks = sum(x['trackscount'] for x in sortable_release_list) / float(len(sortable_release_list))
|
||||
for item in sortable_release_list:
|
||||
|
||||
@@ -159,4 +159,4 @@ def getTagTopArtists(tag, limit=50):
|
||||
for artistid in artistlist:
|
||||
importer.addArtisttoDB(artistid)
|
||||
|
||||
logger.debug("Added %d new artists from Last.FM", len(artistlist))
|
||||
logger.debug("Added %d new artists from Last.FM", len(artistlist))
|
||||
|
||||
@@ -78,7 +78,7 @@ def libraryScan(dir=None, append=False, ArtistID=None, ArtistName=None, cron=Fal
|
||||
|
||||
latest_subdirectory = []
|
||||
|
||||
for r,d,f in os.walk(dir, followlinks=True):
|
||||
for r, d, f in os.walk(dir, followlinks=True):
|
||||
# Need to abuse slicing to get a copy of the list, doing it directly
|
||||
# will skip the element after a deleted one using a list comprehension
|
||||
# will not work correctly for nested subdirectories (os.walk keeps its
|
||||
@@ -91,9 +91,9 @@ def libraryScan(dir=None, append=False, ArtistID=None, ArtistName=None, cron=Fal
|
||||
# MEDIA_FORMATS = music file extensions, e.g. mp3, flac, etc
|
||||
if any(files.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS):
|
||||
|
||||
subdirectory = r.replace(dir,'')
|
||||
subdirectory = r.replace(dir, '')
|
||||
latest_subdirectory.append(subdirectory)
|
||||
if file_count == 0 and r.replace(dir,'') !='':
|
||||
if file_count == 0 and r.replace(dir, '') !='':
|
||||
logger.info("[%s] Now scanning subdirectory %s" % (dir.decode(headphones.SYS_ENCODING, 'replace'), subdirectory.decode(headphones.SYS_ENCODING, 'replace')))
|
||||
elif latest_subdirectory[file_count] != latest_subdirectory[file_count-1] and file_count !=0:
|
||||
logger.info("[%s] Now scanning subdirectory %s" % (dir.decode(headphones.SYS_ENCODING, 'replace'), subdirectory.decode(headphones.SYS_ENCODING, 'replace')))
|
||||
|
||||
@@ -217,4 +217,4 @@ warn = logger.warn
|
||||
error = logger.error
|
||||
debug = logger.debug
|
||||
warning = logger.warning
|
||||
exception = logger.exception
|
||||
exception = logger.exception
|
||||
|
||||
@@ -54,7 +54,7 @@ def startmb():
|
||||
else:
|
||||
return False
|
||||
|
||||
musicbrainzngs.set_useragent("headphones","0.0","https://github.com/rembo10/headphones")
|
||||
musicbrainzngs.set_useragent("headphones", "0.0", "https://github.com/rembo10/headphones")
|
||||
musicbrainzngs.set_hostname(mbhost + ":" + str(mbport))
|
||||
if sleepytime == 0:
|
||||
musicbrainzngs.set_rate_limit(False)
|
||||
@@ -67,7 +67,7 @@ def startmb():
|
||||
if not mbuser and mbpass:
|
||||
logger.warn("No username or password set for VIP server")
|
||||
else:
|
||||
musicbrainzngs.hpauth(mbuser,mbpass)
|
||||
musicbrainzngs.hpauth(mbuser, mbpass)
|
||||
|
||||
logger.debug('Using the following server values: MBHost: %s, MBPort: %i, Sleep Interval: %i', mbhost, mbport, sleepytime)
|
||||
|
||||
@@ -131,7 +131,7 @@ def findRelease(name, limit=1, artist=None):
|
||||
|
||||
# additional artist search
|
||||
if not artist and ':' in name:
|
||||
name, artist = name.rsplit(":",1)
|
||||
name, artist = name.rsplit(":", 1)
|
||||
|
||||
chars = set('!?*-')
|
||||
if any((c in chars) for c in name):
|
||||
@@ -140,7 +140,7 @@ def findRelease(name, limit=1, artist=None):
|
||||
artist = '"'+artist+'"'
|
||||
|
||||
try:
|
||||
releaseResults = musicbrainzngs.search_releases(query=name,limit=limit,artist=artist)['release-list']
|
||||
releaseResults = musicbrainzngs.search_releases(query=name, limit=limit, artist=artist)['release-list']
|
||||
except musicbrainzngs.WebServiceError as e: #need to update exceptions
|
||||
logger.warn('Attempt to query MusicBrainz for "%s" failed: %s' % (name, str(e)))
|
||||
time.sleep(5)
|
||||
@@ -214,12 +214,12 @@ def getArtist(artistid, extrasonly=False):
|
||||
newRgs = None
|
||||
artist['release-group-list'] = []
|
||||
while newRgs == None or len(newRgs) >= limit:
|
||||
newRgs = musicbrainzngs.browse_release_groups(artistid,release_type="album",offset=len(artist['release-group-list']),limit=limit)['release-group-list']
|
||||
newRgs = musicbrainzngs.browse_release_groups(artistid, release_type="album", offset=len(artist['release-group-list']), limit=limit)['release-group-list']
|
||||
artist['release-group-list'] += newRgs
|
||||
except musicbrainzngs.WebServiceError as e:
|
||||
logger.warn('Attempt to retrieve artist information from MusicBrainz failed for artistid: %s (%s)' % (artistid, str(e)))
|
||||
time.sleep(5)
|
||||
except Exception,e:
|
||||
except Exception, e:
|
||||
pass
|
||||
|
||||
if not artist:
|
||||
@@ -296,7 +296,7 @@ def getArtist(artistid, extrasonly=False):
|
||||
limit = 200
|
||||
newRgs = None
|
||||
while newRgs == None or len(newRgs) >= limit:
|
||||
newRgs = musicbrainzngs.browse_release_groups(artistid,release_type=include,offset=len(mb_extras_list),limit=limit)['release-group-list']
|
||||
newRgs = musicbrainzngs.browse_release_groups(artistid, release_type=include, offset=len(mb_extras_list), limit=limit)['release-group-list']
|
||||
mb_extras_list += newRgs
|
||||
except musicbrainzngs.WebServiceError as e:
|
||||
logger.warn('Attempt to retrieve artist information from MusicBrainz failed for artistid: %s (%s)' % (artistid, str(e)))
|
||||
@@ -332,7 +332,7 @@ def getReleaseGroup(rgid):
|
||||
releaseGroup = None
|
||||
|
||||
try:
|
||||
releaseGroup = musicbrainzngs.get_release_group_by_id(rgid,["artists","releases","media","discids",])['release-group']
|
||||
releaseGroup = musicbrainzngs.get_release_group_by_id(rgid, ["artists", "releases", "media", "discids", ])['release-group']
|
||||
except musicbrainzngs.WebServiceError as e:
|
||||
logger.warn('Attempt to retrieve information from MusicBrainz for release group "%s" failed (%s)' % (rgid, str(e)))
|
||||
time.sleep(5)
|
||||
@@ -353,9 +353,9 @@ def getRelease(releaseid, include_artist_info=True):
|
||||
|
||||
try:
|
||||
if include_artist_info:
|
||||
results = musicbrainzngs.get_release_by_id(releaseid,["artists","release-groups","media","recordings"]).get('release')
|
||||
results = musicbrainzngs.get_release_by_id(releaseid, ["artists", "release-groups", "media", "recordings"]).get('release')
|
||||
else:
|
||||
results = musicbrainzngs.get_release_by_id(releaseid,["media","recordings"]).get('release')
|
||||
results = musicbrainzngs.get_release_by_id(releaseid, ["media", "recordings"]).get('release')
|
||||
except musicbrainzngs.WebServiceError as e:
|
||||
logger.warn('Attempt to retrieve information from MusicBrainz for release "%s" failed (%s)' % (releaseid, str(e)))
|
||||
time.sleep(5)
|
||||
@@ -404,7 +404,7 @@ def getRelease(releaseid, include_artist_info=True):
|
||||
|
||||
return release
|
||||
|
||||
def get_new_releases(rgid,includeExtras=False,forcefull=False):
|
||||
def get_new_releases(rgid, includeExtras=False, forcefull=False):
|
||||
|
||||
myDB = db.DBConnection()
|
||||
results = []
|
||||
@@ -412,7 +412,7 @@ def get_new_releases(rgid,includeExtras=False,forcefull=False):
|
||||
limit = 100
|
||||
newResults = None
|
||||
while newResults == None or len(newResults) >= limit:
|
||||
newResults = musicbrainzngs.browse_releases(release_group=rgid,includes=['artist-credits','labels','recordings','release-groups','media'],limit=limit,offset=len(results))
|
||||
newResults = musicbrainzngs.browse_releases(release_group=rgid, includes=['artist-credits', 'labels', 'recordings', 'release-groups', 'media'], limit=limit, offset=len(results))
|
||||
if 'release-list' not in newResults:
|
||||
break #may want to raise an exception here instead ?
|
||||
newResults = newResults['release-list']
|
||||
|
||||
@@ -40,7 +40,7 @@ def encode(albumPath):
|
||||
logger.error('Details for xld profile \'%s\' not found, files will not be re-encoded', xldProfile)
|
||||
return None
|
||||
|
||||
tempDirEncode=os.path.join(albumPath,"temp")
|
||||
tempDirEncode=os.path.join(albumPath, "temp")
|
||||
musicFiles=[]
|
||||
musicFinalFiles=[]
|
||||
musicTempFiles=[]
|
||||
@@ -57,7 +57,7 @@ def encode(albumPath):
|
||||
logger.exception("Unable to create temporary directory")
|
||||
return None
|
||||
|
||||
for r,d,f in os.walk(albumPath):
|
||||
for r, d, f in os.walk(albumPath):
|
||||
for music in f:
|
||||
if any(music.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS):
|
||||
if not XLD:
|
||||
@@ -216,7 +216,7 @@ def encode(albumPath):
|
||||
return None
|
||||
|
||||
time.sleep(1)
|
||||
for r,d,f in os.walk(albumPath):
|
||||
for r, d, f in os.walk(albumPath):
|
||||
for music in f:
|
||||
if any(music.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS):
|
||||
musicFinalFiles.append(os.path.join(r, music))
|
||||
@@ -363,4 +363,4 @@ def getTimeEncode(start):
|
||||
seconds -= 3600*hours
|
||||
minutes = seconds / 60
|
||||
seconds -= 60*minutes
|
||||
return "%02d:%02d:%02d" % (hours, minutes, seconds)
|
||||
return "%02d:%02d:%02d" % (hours, minutes, seconds)
|
||||
|
||||
@@ -255,7 +255,7 @@ class XBMC(object):
|
||||
request = self._sendhttp(host, notifycommand)
|
||||
|
||||
else: #Frodo
|
||||
params = {'title':header, 'message': message, 'displaytime': int(time), 'image': albumartpath}
|
||||
params = {'title': header, 'message': message, 'displaytime': int(time), 'image': albumartpath}
|
||||
request = self._sendjson(host, 'GUI.ShowNotification', params)
|
||||
|
||||
if not request:
|
||||
@@ -273,7 +273,7 @@ class LMS(object):
|
||||
self.hosts = headphones.CONFIG.LMS_HOST
|
||||
|
||||
def _sendjson(self, host):
|
||||
data = {'id': 1, 'method': 'slim.request', 'params': ["",["rescan"]]}
|
||||
data = {'id': 1, 'method': 'slim.request', 'params': ["", ["rescan"]]}
|
||||
data = json.JSONEncoder().encode(data)
|
||||
|
||||
content = {'Content-Type': 'application/json'}
|
||||
@@ -815,4 +815,4 @@ class SubSonicNotifier(object):
|
||||
|
||||
# Invoke request
|
||||
request.request_response(self.host + "musicFolderSettings.view?scanNow",
|
||||
auth=(self.username, self.password))
|
||||
auth=(self.username, self.password))
|
||||
|
||||
@@ -43,10 +43,10 @@ def sendNZB(nzb):
|
||||
|
||||
if headphones.CONFIG.NZBGET_HOST.startswith('https://'):
|
||||
nzbgetXMLrpc = 'https://' + nzbgetXMLrpc
|
||||
headphones.CONFIG.NZBGET_HOST.replace('https://','',1)
|
||||
headphones.CONFIG.NZBGET_HOST.replace('https://', '', 1)
|
||||
else:
|
||||
nzbgetXMLrpc = 'http://' + nzbgetXMLrpc
|
||||
headphones.CONFIG.NZBGET_HOST.replace('http://','',1)
|
||||
headphones.CONFIG.NZBGET_HOST.replace('http://', '', 1)
|
||||
|
||||
|
||||
url = nzbgetXMLrpc % {"host": headphones.CONFIG.NZBGET_HOST, "username": headphones.CONFIG.NZBGET_USERNAME, "password": headphones.CONFIG.NZBGET_PASSWORD}
|
||||
|
||||
@@ -48,7 +48,7 @@ def checkFolder():
|
||||
else:
|
||||
download_dir = headphones.CONFIG.DOWNLOAD_TORRENT_DIR
|
||||
|
||||
album_path = os.path.join(download_dir, album['FolderName']).encode(headphones.SYS_ENCODING,'replace')
|
||||
album_path = os.path.join(download_dir, album['FolderName']).encode(headphones.SYS_ENCODING, 'replace')
|
||||
logger.info("Checking if %s exists" % album_path)
|
||||
if os.path.exists(album_path):
|
||||
logger.info('Found "' + album['FolderName'] + '" in ' + album['Kind'] + ' download folder. Verifying....')
|
||||
@@ -167,7 +167,7 @@ def verify(albumid, albumpath, Kind=None, forced=False):
|
||||
downloaded_track_list = []
|
||||
downloaded_cuecount = 0
|
||||
|
||||
for r,d,f in os.walk(albumpath):
|
||||
for r, d, f in os.walk(albumpath):
|
||||
for files in f:
|
||||
if any(files.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS):
|
||||
downloaded_track_list.append(os.path.join(r, files))
|
||||
@@ -296,7 +296,7 @@ def doPostProcessing(albumid, albumpath, release, tracks, downloaded_track_list,
|
||||
# but this is good to make sure we're not counting files that may have failed to move
|
||||
downloaded_track_list = []
|
||||
|
||||
for r,d,f in os.walk(albumpath):
|
||||
for r, d, f in os.walk(albumpath):
|
||||
for files in f:
|
||||
if any(files.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS):
|
||||
downloaded_track_list.append(os.path.join(r, files))
|
||||
@@ -555,7 +555,7 @@ def addAlbumArt(artwork, albumpath, release):
|
||||
def cleanupFiles(albumpath):
|
||||
logger.info('Cleaning up files')
|
||||
|
||||
for r,d,f in os.walk(albumpath):
|
||||
for r, d, f in os.walk(albumpath):
|
||||
for files in f:
|
||||
if not any(files.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS):
|
||||
logger.debug('Removing: %s' % files)
|
||||
@@ -567,7 +567,7 @@ def cleanupFiles(albumpath):
|
||||
def renameNFO(albumpath):
|
||||
logger.info('Renaming NFO')
|
||||
|
||||
for r,d,f in os.walk(albumpath):
|
||||
for r, d, f in os.walk(albumpath):
|
||||
for file in f:
|
||||
if file.lower().endswith('.nfo'):
|
||||
logger.debug('Renaming: "%s" to "%s"' % (file.decode(headphones.SYS_ENCODING, 'replace'), file.decode(headphones.SYS_ENCODING, 'replace') + '-orig'))
|
||||
@@ -602,7 +602,7 @@ def moveFiles(albumpath, release, tracks):
|
||||
else:
|
||||
firstchar = sortname[0]
|
||||
|
||||
for r,d,f in os.walk(albumpath):
|
||||
for r, d, f in os.walk(albumpath):
|
||||
try:
|
||||
origfolder = os.path.basename(os.path.normpath(r).decode(headphones.SYS_ENCODING, 'replace'))
|
||||
except:
|
||||
@@ -627,7 +627,7 @@ def moveFiles(albumpath, release, tracks):
|
||||
folder = helpers.replace_all(headphones.CONFIG.FOLDER_FORMAT.strip(), values, normalize=True)
|
||||
|
||||
folder = helpers.replace_illegal_chars(folder, type="folder")
|
||||
folder = folder.replace('./', '_/').replace('/.','/_')
|
||||
folder = folder.replace('./', '_/').replace('/.', '/_')
|
||||
|
||||
if folder.endswith('.'):
|
||||
folder = folder[:-1] + '_'
|
||||
@@ -641,7 +641,7 @@ def moveFiles(albumpath, release, tracks):
|
||||
lossy_media = False
|
||||
lossless_media = False
|
||||
|
||||
for r,d,f in os.walk(albumpath):
|
||||
for r, d, f in os.walk(albumpath):
|
||||
for files in f:
|
||||
files_to_move.append(os.path.join(r, files))
|
||||
if any(files.lower().endswith('.' + x.lower()) for x in headphones.LOSSY_MEDIA_FORMATS):
|
||||
@@ -973,7 +973,7 @@ def renameFiles(albumpath, downloaded_track_list, release):
|
||||
|
||||
ext = os.path.splitext(downloaded_track)[1]
|
||||
|
||||
new_file_name = helpers.replace_all(headphones.CONFIG.FILE_FORMAT.strip(), values).replace('/','_') + ext
|
||||
new_file_name = helpers.replace_all(headphones.CONFIG.FILE_FORMAT.strip(), values).replace('/', '_') + ext
|
||||
|
||||
|
||||
new_file_name = helpers.replace_illegal_chars(new_file_name).encode(headphones.SYS_ENCODING, 'replace')
|
||||
@@ -990,7 +990,7 @@ def renameFiles(albumpath, downloaded_track_list, release):
|
||||
logger.debug("Renaming for: " + downloaded_track.decode(headphones.SYS_ENCODING, 'replace') + " is not neccessary")
|
||||
continue
|
||||
|
||||
logger.debug('Renaming %s ---> %s', downloaded_track.decode(headphones.SYS_ENCODING,'replace'), new_file_name.decode(headphones.SYS_ENCODING,'replace'))
|
||||
logger.debug('Renaming %s ---> %s', downloaded_track.decode(headphones.SYS_ENCODING, 'replace'), new_file_name.decode(headphones.SYS_ENCODING, 'replace'))
|
||||
try:
|
||||
os.rename(downloaded_track, new_file)
|
||||
except Exception, e:
|
||||
@@ -1001,7 +1001,7 @@ def updateFilePermissions(albumpaths):
|
||||
|
||||
for folder in albumpaths:
|
||||
logger.info("Updating file permissions in %s", folder)
|
||||
for r,d,f in os.walk(folder):
|
||||
for r, d, f in os.walk(folder):
|
||||
for files in f:
|
||||
full_path = os.path.join(r, files)
|
||||
try:
|
||||
|
||||
@@ -235,4 +235,4 @@ def server_message(response):
|
||||
if len(message) > 150:
|
||||
message = message[:150] + "..."
|
||||
|
||||
logger.debug("Server responded with message: %s", message)
|
||||
logger.debug("Server responded with message: %s", message)
|
||||
|
||||
@@ -351,9 +351,9 @@ def sort_search_results(resultlist, album, new, albumlength):
|
||||
# add a search provider priority (weighted based on position)
|
||||
i = next((i for i, word in enumerate(preferred_words) if word in result[3].lower()), None)
|
||||
if i is not None:
|
||||
priority += round((len(preferred_words) - i) / float(len(preferred_words)),2)
|
||||
priority += round((len(preferred_words) - i) / float(len(preferred_words)), 2)
|
||||
|
||||
temp_list.append((result[0],result[1],result[2],result[3],result[4],priority))
|
||||
temp_list.append((result[0], result[1], result[2], result[3], result[4], priority))
|
||||
|
||||
resultlist = temp_list
|
||||
|
||||
@@ -416,7 +416,7 @@ def searchNZB(album, new=False, losslessOnly=False, albumlength=None):
|
||||
reldate = album['ReleaseDate']
|
||||
year = get_year_from_release_date(reldate)
|
||||
|
||||
dic = {'...':'', ' & ':' ', ' = ': ' ', '?':'', '$':'s', ' + ':' ', '"':'', ',':'', '*':'', '.':'', ':':''}
|
||||
dic = {'...': '', ' & ': ' ', ' = ': ' ', '?': '', '$': 's', ' + ': ' ', '"': '', ',': '', '*': '', '.': '', ':': ''}
|
||||
|
||||
cleanalbum = helpers.latinToAscii(helpers.replace_all(album['AlbumTitle'], dic)).strip()
|
||||
cleanartist = helpers.latinToAscii(helpers.replace_all(album['ArtistName'], dic)).strip()
|
||||
@@ -885,15 +885,15 @@ def send_to_downloader(data, bestqual, album):
|
||||
if headphones.CONFIG.GROWL_ENABLED and headphones.CONFIG.GROWL_ONSNATCH:
|
||||
logger.info(u"Sending Growl notification")
|
||||
growl = notifiers.GROWL()
|
||||
growl.notify(name,"Download started")
|
||||
growl.notify(name, "Download started")
|
||||
if headphones.CONFIG.PROWL_ENABLED and headphones.CONFIG.PROWL_ONSNATCH:
|
||||
logger.info(u"Sending Prowl notification")
|
||||
prowl = notifiers.PROWL()
|
||||
prowl.notify(name,"Download started")
|
||||
prowl.notify(name, "Download started")
|
||||
if headphones.CONFIG.PUSHOVER_ENABLED and headphones.CONFIG.PUSHOVER_ONSNATCH:
|
||||
logger.info(u"Sending Pushover notification")
|
||||
prowl = notifiers.PUSHOVER()
|
||||
prowl.notify(name,"Download started")
|
||||
prowl.notify(name, "Download started")
|
||||
if headphones.CONFIG.PUSHBULLET_ENABLED and headphones.CONFIG.PUSHBULLET_ONSNATCH:
|
||||
logger.info(u"Sending PushBullet notification")
|
||||
pushbullet = notifiers.PUSHBULLET()
|
||||
@@ -909,7 +909,7 @@ def send_to_downloader(data, bestqual, album):
|
||||
if headphones.CONFIG.PUSHALOT_ENABLED and headphones.CONFIG.PUSHALOT_ONSNATCH:
|
||||
logger.info(u"Sending Pushalot notification")
|
||||
pushalot = notifiers.PUSHALOT()
|
||||
pushalot.notify(name,"Download started")
|
||||
pushalot.notify(name, "Download started")
|
||||
if headphones.CONFIG.OSX_NOTIFY_ENABLED and headphones.CONFIG.OSX_NOTIFY_ONSNATCH:
|
||||
logger.info(u"Sending OS X notification")
|
||||
osx_notify = notifiers.OSX_NOTIFY()
|
||||
@@ -977,7 +977,7 @@ def verifyresult(title, artistterm, term, lossless):
|
||||
if not re.search('(?:\W|^)+' + token + '(?:\W|$)+', title, re.IGNORECASE | re.UNICODE):
|
||||
cleantoken = ''.join(c for c in token if c not in string.punctuation)
|
||||
if not not re.search('(?:\W|^)+' + cleantoken + '(?:\W|$)+', title, re.IGNORECASE | re.UNICODE):
|
||||
dic = {'!':'i', '$':'s'}
|
||||
dic = {'!': 'i', '$': 's'}
|
||||
dumbtoken = helpers.replace_all(token, dic)
|
||||
if not not re.search('(?:\W|^)+' + dumbtoken + '(?:\W|$)+', title, re.IGNORECASE | re.UNICODE):
|
||||
logger.info("Removed from results: %s (missing tokens: %s and %s)", title, token, cleantoken)
|
||||
@@ -1001,7 +1001,7 @@ def searchTorrent(album, new=False, losslessOnly=False, albumlength=None):
|
||||
year = get_year_from_release_date(reldate)
|
||||
|
||||
# MERGE THIS WITH THE TERM CLEANUP FROM searchNZB
|
||||
dic = {'...':'', ' & ':' ', ' = ': ' ', '?':'', '$':'s', ' + ':' ', '"':'', ',':' ', '*':''}
|
||||
dic = {'...': '', ' & ': ' ', ' = ': ' ', '?': '', '$': 's', ' + ': ' ', '"': '', ',': ' ', '*': ''}
|
||||
|
||||
semi_cleanalbum = helpers.replace_all(album['AlbumTitle'], dic)
|
||||
cleanalbum = helpers.latinToAscii(semi_cleanalbum)
|
||||
@@ -1355,7 +1355,7 @@ def searchTorrent(album, new=False, losslessOnly=False, albumlength=None):
|
||||
|
||||
if headphones.CONFIG.TORRENT_DOWNLOADER == 0:
|
||||
try:
|
||||
url = item.find("a", {"title":"Download this torrent"})['href']
|
||||
url = item.find("a", {"title": "Download this torrent"})['href']
|
||||
except TypeError:
|
||||
if headphones.MAGNET_LINKS != 0:
|
||||
url = item.findAll("a")[3]['href']
|
||||
|
||||
@@ -33,11 +33,11 @@ def addTorrent(link):
|
||||
if link.endswith('.torrent'):
|
||||
with open(link, 'rb') as f:
|
||||
metainfo = str(base64.b64encode(f.read()))
|
||||
arguments = {'metainfo': metainfo, 'download-dir':headphones.CONFIG.DOWNLOAD_TORRENT_DIR}
|
||||
arguments = {'metainfo': metainfo, 'download-dir': headphones.CONFIG.DOWNLOAD_TORRENT_DIR}
|
||||
else:
|
||||
arguments = {'filename': link, 'download-dir': headphones.CONFIG.DOWNLOAD_TORRENT_DIR}
|
||||
|
||||
response = torrentAction(method,arguments)
|
||||
response = torrentAction(method, arguments)
|
||||
|
||||
if not response:
|
||||
return False
|
||||
@@ -62,7 +62,7 @@ def addTorrent(link):
|
||||
|
||||
def getTorrentFolder(torrentid):
|
||||
method = 'torrent-get'
|
||||
arguments = { 'ids': torrentid, 'fields': ['name','percentDone']}
|
||||
arguments = { 'ids': torrentid, 'fields': ['name', 'percentDone']}
|
||||
|
||||
response = torrentAction(method, arguments)
|
||||
percentdone = response['arguments']['torrents'][0]['percentDone']
|
||||
|
||||
@@ -48,7 +48,7 @@ class utorrentclient(object):
|
||||
def _make_opener(self, realm, base_url, username, password):
|
||||
"""uTorrent API need HTTP Basic Auth and cookie support for token verify."""
|
||||
auth = urllib2.HTTPBasicAuthHandler()
|
||||
auth.add_password(realm=realm,uri=base_url,user=username,passwd=password)
|
||||
auth.add_password(realm=realm, uri=base_url, user=username, passwd=password)
|
||||
opener = urllib2.build_opener(auth)
|
||||
urllib2.install_opener(opener)
|
||||
|
||||
@@ -160,7 +160,7 @@ def labelTorrent(hash):
|
||||
label = headphones.CONFIG.UTORRENT_LABEL
|
||||
uTorrentClient = utorrentclient()
|
||||
if label:
|
||||
uTorrentClient.setprops(hash,'label',label)
|
||||
uTorrentClient.setprops(hash, 'label', label)
|
||||
|
||||
def removeTorrent(hash, remove_data = False):
|
||||
uTorrentClient = utorrentclient()
|
||||
@@ -181,10 +181,10 @@ def setSeedRatio(hash, ratio):
|
||||
uTorrentClient = utorrentclient()
|
||||
uTorrentClient.setprops(hash, 'seed_override', '1')
|
||||
if ratio != 0:
|
||||
uTorrentClient.setprops(hash,'seed_ratio', ratio * 10)
|
||||
uTorrentClient.setprops(hash, 'seed_ratio', ratio * 10)
|
||||
else:
|
||||
# TODO passing -1 should be unlimited
|
||||
uTorrentClient.setprops(hash,'seed_ratio', -10)
|
||||
uTorrentClient.setprops(hash, 'seed_ratio', -10)
|
||||
|
||||
def dirTorrent(hash, cacheid=None, return_name=None):
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
HEADPHONES_VERSION = "master"
|
||||
HEADPHONES_VERSION = "master"
|
||||
|
||||
@@ -322,11 +322,11 @@ class WebInterface(object):
|
||||
for result in results:
|
||||
|
||||
result_dict = {
|
||||
'title':result[0],
|
||||
'size':result[1],
|
||||
'url':result[2],
|
||||
'provider':result[3],
|
||||
'kind':result[4]
|
||||
'title': result[0],
|
||||
'size': result[1],
|
||||
'url': result[2],
|
||||
'provider': result[3],
|
||||
'kind': result[4]
|
||||
}
|
||||
results_as_dicts.append(result_dict)
|
||||
|
||||
@@ -344,9 +344,9 @@ class WebInterface(object):
|
||||
url = urllib2.quote(url, safe=":?/=&") + '&' + urllib.urlencode(kwargs)
|
||||
|
||||
try:
|
||||
result = [(title,int(size),url,provider,kind)]
|
||||
result = [(title, int(size), url, provider, kind)]
|
||||
except ValueError:
|
||||
result = [(title,float(size),url,provider,kind)]
|
||||
result = [(title, float(size), url, provider, kind)]
|
||||
|
||||
logger.info(u"Making sure we can download the chosen result")
|
||||
(data, bestqual) = searcher.preprocess(result)
|
||||
@@ -713,7 +713,7 @@ class WebInterface(object):
|
||||
|
||||
def forcePostProcess(self, dir=None, album_dir=None):
|
||||
from headphones import postprocessor
|
||||
threading.Thread(target=postprocessor.forcePostProcess, kwargs={'dir':dir,'album_dir':album_dir}).start()
|
||||
threading.Thread(target=postprocessor.forcePostProcess, kwargs={'dir': dir, 'album_dir': album_dir}).start()
|
||||
raise cherrypy.HTTPRedirect("home")
|
||||
forcePostProcess.exposed = True
|
||||
|
||||
@@ -747,7 +747,7 @@ class WebInterface(object):
|
||||
raise cherrypy.HTTPRedirect("logs")
|
||||
toggleVerbose.exposed = True
|
||||
|
||||
def getLog(self,iDisplayStart=0,iDisplayLength=100,iSortCol_0=0,sSortDir_0="desc",sSearch="",**kwargs):
|
||||
def getLog(self, iDisplayStart=0, iDisplayLength=100, iSortCol_0=0, sSortDir_0="desc", sSearch="", **kwargs):
|
||||
|
||||
iDisplayStart = int(iDisplayStart)
|
||||
iDisplayLength = int(iDisplayLength)
|
||||
@@ -763,19 +763,19 @@ class WebInterface(object):
|
||||
sortcolumn = 2
|
||||
elif iSortCol_0 == '2':
|
||||
sortcolumn = 1
|
||||
filtered.sort(key=lambda x:x[sortcolumn],reverse=sSortDir_0 == "desc")
|
||||
filtered.sort(key=lambda x: x[sortcolumn], reverse=sSortDir_0 == "desc")
|
||||
|
||||
rows = filtered[iDisplayStart:(iDisplayStart+iDisplayLength)]
|
||||
rows = [[row[0],row[2],row[1]] for row in rows]
|
||||
rows = [[row[0], row[2], row[1]] for row in rows]
|
||||
|
||||
return json.dumps({
|
||||
'iTotalDisplayRecords':len(filtered),
|
||||
'iTotalRecords':len(headphones.LOG_LIST),
|
||||
'aaData':rows,
|
||||
'iTotalDisplayRecords': len(filtered),
|
||||
'iTotalRecords': len(headphones.LOG_LIST),
|
||||
'aaData': rows,
|
||||
})
|
||||
getLog.exposed = True
|
||||
|
||||
def getArtists_json(self,iDisplayStart=0,iDisplayLength=100,sSearch="",iSortCol_0='0',sSortDir_0='asc',**kwargs):
|
||||
def getArtists_json(self, iDisplayStart=0, iDisplayLength=100, sSearch="", iSortCol_0='0', sSortDir_0='asc', **kwargs):
|
||||
iDisplayStart = int(iDisplayStart)
|
||||
iDisplayLength = int(iDisplayLength)
|
||||
filtered = []
|
||||
@@ -793,16 +793,16 @@ class WebInterface(object):
|
||||
sortbyhavepercent = True
|
||||
|
||||
if sSearch == "":
|
||||
query = 'SELECT * from artists order by %s COLLATE NOCASE %s' % (sortcolumn,sSortDir_0)
|
||||
query = 'SELECT * from artists order by %s COLLATE NOCASE %s' % (sortcolumn, sSortDir_0)
|
||||
filtered = myDB.select(query)
|
||||
totalcount = len(filtered)
|
||||
else:
|
||||
query = 'SELECT * from artists WHERE ArtistSortName LIKE "%' + sSearch + '%" OR LatestAlbum LIKE "%' + sSearch +'%"' + 'ORDER BY %s COLLATE NOCASE %s' % (sortcolumn,sSortDir_0)
|
||||
query = 'SELECT * from artists WHERE ArtistSortName LIKE "%' + sSearch + '%" OR LatestAlbum LIKE "%' + sSearch +'%"' + 'ORDER BY %s COLLATE NOCASE %s' % (sortcolumn, sSortDir_0)
|
||||
filtered = myDB.select(query)
|
||||
totalcount = myDB.select('SELECT COUNT(*) from artists')[0][0]
|
||||
|
||||
if sortbyhavepercent:
|
||||
filtered.sort(key=lambda x:(float(x['HaveTracks'])/x['TotalTracks'] if x['TotalTracks'] > 0 else 0.0,x['HaveTracks'] if x['HaveTracks'] else 0.0),reverse=sSortDir_0 == "asc")
|
||||
filtered.sort(key=lambda x: (float(x['HaveTracks'])/x['TotalTracks'] if x['TotalTracks'] > 0 else 0.0, x['HaveTracks'] if x['HaveTracks'] else 0.0), reverse=sSortDir_0 == "asc")
|
||||
|
||||
#can't figure out how to change the datatables default sorting order when its using an ajax datasource so ill
|
||||
#just reverse it here and the first click on the "Latest Album" header will sort by descending release date
|
||||
@@ -813,16 +813,16 @@ class WebInterface(object):
|
||||
artists = filtered[iDisplayStart:(iDisplayStart+iDisplayLength)]
|
||||
rows = []
|
||||
for artist in artists:
|
||||
row = {"ArtistID":artist['ArtistID'],
|
||||
"ArtistName":artist["ArtistName"],
|
||||
"ArtistSortName":artist["ArtistSortName"],
|
||||
"Status":artist["Status"],
|
||||
"TotalTracks":artist["TotalTracks"],
|
||||
"HaveTracks":artist["HaveTracks"],
|
||||
"LatestAlbum":"",
|
||||
"ReleaseDate":"",
|
||||
"ReleaseInFuture":"False",
|
||||
"AlbumID":"",
|
||||
row = {"ArtistID": artist['ArtistID'],
|
||||
"ArtistName": artist["ArtistName"],
|
||||
"ArtistSortName": artist["ArtistSortName"],
|
||||
"Status": artist["Status"],
|
||||
"TotalTracks": artist["TotalTracks"],
|
||||
"HaveTracks": artist["HaveTracks"],
|
||||
"LatestAlbum": "",
|
||||
"ReleaseDate": "",
|
||||
"ReleaseInFuture": "False",
|
||||
"AlbumID": "",
|
||||
}
|
||||
|
||||
if not row['HaveTracks']:
|
||||
@@ -841,9 +841,9 @@ class WebInterface(object):
|
||||
rows.append(row)
|
||||
|
||||
|
||||
dict = {'iTotalDisplayRecords':len(filtered),
|
||||
'iTotalRecords':totalcount,
|
||||
'aaData':rows,
|
||||
dict = {'iTotalDisplayRecords': len(filtered),
|
||||
'iTotalRecords': totalcount,
|
||||
'aaData': rows,
|
||||
}
|
||||
s = json.dumps(dict)
|
||||
cherrypy.response.headers['Content-type'] = 'application/json'
|
||||
@@ -1367,7 +1367,7 @@ class Artwork(object):
|
||||
return "Artwork"
|
||||
index.exposed = True
|
||||
|
||||
def default(self,ArtistOrAlbum="",ID=None):
|
||||
def default(self, ArtistOrAlbum="", ID=None):
|
||||
from headphones import cache
|
||||
ArtistID = None
|
||||
AlbumID = None
|
||||
@@ -1376,23 +1376,23 @@ class Artwork(object):
|
||||
elif ArtistOrAlbum == "album":
|
||||
AlbumID = ID
|
||||
|
||||
relpath = cache.getArtwork(ArtistID,AlbumID)
|
||||
relpath = cache.getArtwork(ArtistID, AlbumID)
|
||||
|
||||
if not relpath:
|
||||
relpath = "data/interfaces/default/images/no-cover-art.png"
|
||||
basedir = os.path.dirname(sys.argv[0])
|
||||
path = os.path.join(basedir,relpath)
|
||||
path = os.path.join(basedir, relpath)
|
||||
cherrypy.response.headers['Content-type'] = 'image/png'
|
||||
cherrypy.response.headers['Cache-Control'] = 'no-cache'
|
||||
else:
|
||||
relpath = relpath.replace('cache/','',1)
|
||||
path = os.path.join(headphones.CONFIG.CACHE_DIR,relpath)
|
||||
relpath = relpath.replace('cache/', '', 1)
|
||||
path = os.path.join(headphones.CONFIG.CACHE_DIR, relpath)
|
||||
fileext = os.path.splitext(relpath)[1][1::]
|
||||
cherrypy.response.headers['Content-type'] = 'image/' + fileext
|
||||
cherrypy.response.headers['Cache-Control'] = 'max-age=31556926'
|
||||
|
||||
path = os.path.normpath(path)
|
||||
f = open(path,'rb')
|
||||
f = open(path, 'rb')
|
||||
return f.read()
|
||||
default.exposed = True
|
||||
|
||||
@@ -1400,7 +1400,7 @@ class Artwork(object):
|
||||
def index(self):
|
||||
return "Here be thumbs"
|
||||
index.exposed = True
|
||||
def default(self,ArtistOrAlbum="",ID=None):
|
||||
def default(self, ArtistOrAlbum="", ID=None):
|
||||
from headphones import cache
|
||||
ArtistID = None
|
||||
AlbumID = None
|
||||
@@ -1409,23 +1409,23 @@ class Artwork(object):
|
||||
elif ArtistOrAlbum == "album":
|
||||
AlbumID = ID
|
||||
|
||||
relpath = cache.getThumb(ArtistID,AlbumID)
|
||||
relpath = cache.getThumb(ArtistID, AlbumID)
|
||||
|
||||
if not relpath:
|
||||
relpath = "data/interfaces/default/images/no-cover-artist.png"
|
||||
basedir = os.path.dirname(sys.argv[0])
|
||||
path = os.path.join(basedir,relpath)
|
||||
path = os.path.join(basedir, relpath)
|
||||
cherrypy.response.headers['Content-type'] = 'image/png'
|
||||
cherrypy.response.headers['Cache-Control'] = 'no-cache'
|
||||
else:
|
||||
relpath = relpath.replace('cache/','',1)
|
||||
path = os.path.join(headphones.CONFIG.CACHE_DIR,relpath)
|
||||
relpath = relpath.replace('cache/', '', 1)
|
||||
path = os.path.join(headphones.CONFIG.CACHE_DIR, relpath)
|
||||
fileext = os.path.splitext(relpath)[1][1::]
|
||||
cherrypy.response.headers['Content-type'] = 'image/' + fileext
|
||||
cherrypy.response.headers['Cache-Control'] = 'max-age=31556926'
|
||||
|
||||
path = os.path.normpath(path)
|
||||
f = open(path,'rb')
|
||||
f = open(path, 'rb')
|
||||
return f.read()
|
||||
default.exposed = True
|
||||
|
||||
|
||||
@@ -71,28 +71,28 @@ def initialize(options=None):
|
||||
'tools.staticdir.root': os.path.join(headphones.PROG_DIR, 'data'),
|
||||
'tools.proxy.on': options['http_proxy'] # pay attention to X-Forwarded-Proto header
|
||||
},
|
||||
'/interfaces':{
|
||||
'/interfaces': {
|
||||
'tools.staticdir.on': True,
|
||||
'tools.staticdir.dir': "interfaces"
|
||||
},
|
||||
'/images':{
|
||||
'/images': {
|
||||
'tools.staticdir.on': True,
|
||||
'tools.staticdir.dir': "images"
|
||||
},
|
||||
'/css':{
|
||||
'/css': {
|
||||
'tools.staticdir.on': True,
|
||||
'tools.staticdir.dir': "css"
|
||||
},
|
||||
'/js':{
|
||||
'/js': {
|
||||
'tools.staticdir.on': True,
|
||||
'tools.staticdir.dir': "js"
|
||||
},
|
||||
'/favicon.ico':{
|
||||
'/favicon.ico': {
|
||||
'tools.staticfile.on': True,
|
||||
'tools.staticfile.filename': os.path.join(os.path.abspath(
|
||||
os.curdir), "images" + os.sep + "favicon.ico")
|
||||
},
|
||||
'/cache':{
|
||||
'/cache': {
|
||||
'tools.staticdir.on': True,
|
||||
'tools.staticdir.dir': headphones.CONFIG.CACHE_DIR
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user