Skip to content

Commit

Permalink
Bump to version 4.0.0
Browse files Browse the repository at this point in the history
* Update scrobble_cache to use Pickle instead of Shelve.
* encode values returned from ETree to 'utf-8' in fetch_metadata
* Add tests to mock respones from fetch_metadata
* More Python 3 clean-up
* Swap file open to io.open to we can force encoding='utf-8' on reads.
* Adding mutex locks on scrobble_cache access.
  • Loading branch information
jesseward committed Feb 13, 2017
1 parent a9fe322 commit 84c3e60
Show file tree
Hide file tree
Showing 8 changed files with 139 additions and 55 deletions.
3 changes: 1 addition & 2 deletions plex_scrobble/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,14 +51,13 @@ def cache_retry(config, logger):
cache = ScrobbleCache(api_key, api_secret, user_name, password,
cache_location=cache_location)
except Exception as e:
logger.warn('ERROR: {0}, retrying in {1} seconds'.format(e, retry))
logger.warning('ERROR: {0}, retrying in {1} seconds'.format(e, retry))
time.sleep(retry)
continue
# do not retry if cache is empty.
if cache.length() > 0:
cache.retry_queue()

cache.close()
time.sleep(retry)


Expand Down
30 changes: 15 additions & 15 deletions plex_scrobble/plex_monitor.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
import io
import logging
import os
import re
Expand Down Expand Up @@ -33,7 +34,7 @@ def parse_line(log_line):
m = regex.match(log_line)

if m:
logger.info('Found played song and extracted library id \'{l_id}\' from plex log '.format(l_id=m.group(1)))
logger.info('Found played song and extracted library id "{l_id}" from plex log '.format(l_id=m.group(1)))
return m.group(1)


Expand All @@ -51,13 +52,12 @@ def fetch_metadata(l_id, config):

# fail if request is greater than 2 seconds.
try:
metadata = requests.get(url, headers=headers)
metadata = requests.get(url, headers=headers).text
except requests.exceptions.RequestException as e:
logger.error('urllib2 error reading from {url} \'{error}\''.format(url=url,
error=e))
logger.error('error reading from {url} "{error}"'.format(url=url, error=e))
return False

tree = ET.fromstring(metadata.text)
tree = ET.fromstring(metadata)
track = tree.find('Track')

# BUGFIX: https://github.com/jesseward/plex-lastfm-scrobbler/issues/7
Expand All @@ -78,16 +78,16 @@ def fetch_metadata(l_id, config):
# add support for fetching album metadata from the track object.
album = track.get('parentTitle')
if not album:
logger.warn('unable to locate album name for ibary-id={l_id}'.format(
logger.warning('unable to locate album name for ibary-id={l_id}'.format(
l_id=l_id))
album = None

if not all((artist, song)):
logger.warn('unable to retrieve meatadata keys for libary-id={l_id}'.
format(l_id=l_id))
logger.warning('unable to retrieve meatadata keys for libary-id={l_id}'.
format(l_id=l_id))
return False

return {'title': song, 'artist': artist, 'album': album}
return {'title': song.encode('utf-8'), 'artist': artist.encode('utf-8'), 'album': album.encode('utf-8')}


def monitor_log(config):
Expand All @@ -103,7 +103,7 @@ def monitor_log(config):
cache_location = config['plex-scrobble']['cache_location']

try:
f = open(config['plex-scrobble']['mediaserver_log_location'])
f = io.open(config['plex-scrobble']['mediaserver_log_location'], 'r', encoding='utf-8')
except IOError:
logger.error('Unable to read log-file {0}. Shutting down.'.format(config[
'plex-scrobble']['mediaserver_log_location']))
Expand All @@ -112,10 +112,10 @@ def monitor_log(config):

try:
lastfm = pylast.LastFMNetwork(
api_key=config['lastfm']['api_key'],
api_secret=config['lastfm']['api_secret'],
username=config['lastfm']['user_name'],
password_hash=pylast.md5(config['lastfm']['password']))
api_key=api_key,
api_secret=api_secret,
username=user_name,
password_hash=pylast.md5(password))
except Exception as e:
logger.error('FATAL {0}. Aborting execution'.format(e))
os._exit(1)
Expand Down Expand Up @@ -160,7 +160,7 @@ def monitor_log(config):
# when playing via a client, log lines are duplicated (seen via iOS)
# this skips dupes. Note: will also miss songs that have been repeated
if played == last_played:
logger.warn('Dupe detection : {0}, not submitting'.format(last_played))
logger.warning('Dupe detection : {0}, not submitting'.format(last_played))
continue

metadata = fetch_metadata(played, config)
Expand Down
82 changes: 59 additions & 23 deletions plex_scrobble/scrobble_cache.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,16 @@
# -*- coding: utf-8 -*-
import logging
import shelve
import os
import pickle
import time
import threading

from uuid import uuid1

import pylast

lock = threading.Lock()


class ScrobbleCache(object):
"""
Expand All @@ -28,51 +35,80 @@ def __init__(self, api_key, api_secret, user_name, password,
self.api_secret = api_secret
self.user_name = user_name
self.password = password
self.cache = shelve.open(cache_location, writeback=True)
self.logger = logging.getLogger(__name__)
self.cache_location = cache_location
self.cache = {}

try:
self._load()
except IOError as e:
self.logger.warning('Unable to open cache file. resetting cache. error={0}'.format(e))
self.sync()
except (EOFError, KeyError) as e:
self.logger.error('Unable to read cache file type. possibly corrupted or not of Python Pickle type, renaming to .old. error={0}'.format(e))
os.rename(self.cache_location, self.cache_location + '.old')
self.sync()


def length(self):
return len(self.cache)

def add(self, key, value, album, cache_hit=1):
def _load(self):
lock.acquire()
try:
with open(self.cache_location, 'rb') as fh:
self.cache = pickle.load(fh)
except Exception as e:
lock.release()
raise e
lock.release()

def sync(self):
lock.acquire()
with open(self.cache_location, 'wb') as fh:
pickle.dump(self.cache, fh)
lock.release()
self._load()
return True

def add(self, artist, title, album, cache_hit=1):
"""
Add missed scrobble to the retry cache.
:param key: a time - timestamp
:param value: a str representing an artist name
:param artist: a str representing the artist name
:param title: a str representing the song title
:param album: a str representing an album name
:param cache_hit: number of times the item has been retried.
"""

self.logger.info(u'adding "{key}" "{value}" ({album}) to retry cache.'.format(
key=key, value=value, album=album))
self.logger.info('adding "{artist}" "{title}" ({album}) to retry cache.'.format(
artist=artist, title=title, album=album))

self.cache[str(time.time())] = [key, value, cache_hit, album]
self.cache.sync()
self.cache[str(uuid1())] = [artist, title, cache_hit, album]
return self.sync()

def remove(self, key):
"""
remove an existing entry from cache file.
:param key: a timestamp.
:param key: a string representing a uuid1 value.
"""

self.logger.info(u'removing "{key}": "{artist}" - "{title}" ({album}) from retry cache.'.format(
key=key, artist=self.cache[key][0], title=self.cache[key][1],
album=self.cache[key][3]))
del self.cache[key]
self.cache.sync()
self.logger.info('removing "{key}" from retry cache.'.format(key=key))

def close(self):
""" cleans up cache and flushes to disk. """
try:
del self.cache[key]
except KeyError:
self.logger.warning('Unable to remove, {0} not found in cache.'.format(key))
return

self.cache.close()
return self.sync()

def cache_items(self):
""" debug method to dump cache to stdout. """

for key in self.cache:
print('time={key}, artist={artist}, title={title}, album={album}age={age}'.format(
print('time={key}, artist={artist}, title={title}, album={album}, age={age}'.format(
key=key, artist=self.cache[key][0],
title=self.cache[key][1],
album=self.cache[key][3],
Expand All @@ -82,7 +118,7 @@ def retry_queue(self):

self.logger.info('Retrying scrobble cache.')

for key in self.cache:
for key in self.cache.keys():
# do submissions retry
try:
self.cache[key][2] += 1
Expand All @@ -96,14 +132,13 @@ def retry_queue(self):
timestamp=int(time.time()),
album=self.cache[key][3])
except:
self.logger.warn('Failed to resubmit artist={artist}, title={title}, album={album}age={age}'.format(
self.logger.warning('Failed to resubmit artist={artist}, title={title}, album={album}, age={age}'.format(
artist=self.cache[key][0],
title=self.cache[key][1],
album=self.cache[key][3],
age=self.cache[key][2]))
if self.cache[key][2] >= ScrobbleCache.MAX_CACHE_AGE:
self.logger.info(u'MAX_CACHE_AGE for {key} : {artist} - {title}'.format(
key, self.cache[key][0], self.key[1]))
self.logger.info('MAX_CACHE_AGE for {key} : {artist} - {title}'.format(key=key, artist=self.cache[key][0], title=self.cache[key][1]))
self.remove(key)
continue

Expand All @@ -114,3 +149,4 @@ def retry_queue(self):
album=self.cache[key][3],
age=self.cache[key][2]))
self.remove(key)
self.sync()
23 changes: 12 additions & 11 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,26 +2,27 @@
from setuptools import setup

NAME = 'plex-lastfm-scrobbler'
VERSION = '3.0.0'
VERSION = '4.0.0'

setup(
name = 'plex_scrobble',
version = VERSION,
author = 'Jesse Ward',
author_email = '[email protected]',
description = ('Scrobble audio tracks played via Plex Media Center'),
license = 'MIT',
url = 'https://github.com/jesseward/plex-lastfm-scrobbler',
name='plex_scrobble',
version=VERSION,
author='Jesse Ward',
author_email='[email protected]',
description=('Scrobble audio tracks played via Plex Media Center'),
license='MIT',
url='https://github.com/jesseward/plex-lastfm-scrobbler',
packages=['plex_scrobble'],
entry_points={
'console_scripts': [
'plex-scrobble = plex_scrobble.__main__:main'
]
'console_scripts': [
'plex-scrobble = plex_scrobble.__main__:main'
]
},
install_requires=[
'click>=6.2',
'pylast>=1.6.0',
'toml>=0.9.1',
'requests>=2.12.0',
'mock>=2.0.0',
]
)
10 changes: 10 additions & 0 deletions tests/data/unicode_audio_payload_fetch_metadata.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<MediaContainer size="1" allowSync="1" identifier="com.plexapp.plugins.library" librarySectionID="2" librarySectionTitle="Music" librarySectionUUID="aaaaaaaa-1234-aaaa-aaaa-123451234512" mediaTagPrefix="/system/bundle/media/flags/" mediaTagVersion="1486068602">
<Track ratingKey="538" key="/library/metadata/538" parentRatingKey="537" grandparentRatingKey="524" guid="com.plexapp.agents.plexmusic://gracenote/track/431995094-D73A524D5EE92E857CCCDB331E0911D0/444891092-70D20C5F59D82102B7B91FCD4A41E1C6?lang=en" librarySectionID="2" type="track" title="daze" grandparentKey="/library/metadata/524" parentKey="/library/metadata/537" grandparentTitle="&#12376;&#12435;(&#33258;&#28982;&#12398;&#25973;P)" parentTitle="daze / days" originalTitle="&#12376;&#12435; Feat. &#12513;&#12452;&#12522;&#12450;" summary="" index="1" parentIndex="1" viewCount="1" lastViewedAt="1486331020" year="2014" thumb="/library/metadata/537/thumb/1486266973" parentThumb="/library/metadata/537/thumb/1486266973" grandparentThumb="/library/metadata/524/thumb/1486266954" duration="235502" addedAt="1486266669" updatedAt="1486266973">
<Media id="438" duration="235502" bitrate="4621" audioChannels="2" audioCodec="flac" container="flac">
<Part id="438" key="/library/parts/438/1486266413/file.flac" duration="235502" file="/path/to/file/01. daze.flac" size="136024317" container="flac">
<Stream id="766" streamType="2" selected="1" codec="flac" index="0" channels="2" bitrate="4621" audioChannelLayout="stereo" bitDepth="24" samplingRate="96000" />
</Part>
</Media>
</Track>
</MediaContainer>
14 changes: 12 additions & 2 deletions tests/test_cache.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,19 @@
# -*- coding: utf-8 -*-
import logging
import os.path
import six
import sys
import unittest
from os import remove

from config import config
from plex_scrobble.scrobble_cache import ScrobbleCache

logging.disable(logging.CRITICAL)
# forcing to ASCII in Python 2 to ensure clients running in something
# other than a UTF8 enabled shell are working.
if sys.version[0] == '2':
reload(sys)
sys.setdefaultencoding("ascii")


class TestScrobbleCache(unittest.TestCase):
Expand All @@ -21,6 +28,9 @@ def setUp(self):
user_name = password = api_key = api_secret = config['lastfm']['user_name']
self.sc = ScrobbleCache(api_key, api_secret, user_name, password,
cache_location=config['plex-scrobble']['cache_location'])
self.album = six.u('Björk').encode('utf-8')
self.artist = six.u('CR∑∑KS').encode('utf-8')
self.title = six.u('deep burnt').encode('utf-8')
self._clean_file()

def tearDown(self):
Expand All @@ -29,7 +39,7 @@ def tearDown(self):

def test_add_record_to_cache(self):
""" tests the addition of a test item to the cache. """
self.sc.add('artist', 'title', 'album')
self.sc.add(self.artist, self.title, self.album)

self.assertTrue(self.sc.length() == 1)

Expand Down
27 changes: 27 additions & 0 deletions tests/test_fetch_metadata.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# -*- coding: utf-8 -*-
import io
import unittest

from mock import patch

from config import config
from plex_scrobble.plex_monitor import fetch_metadata


class TestFetchMetaData(unittest.TestCase):

def test_fetch_metadata_unicode(self):
""" Validates parsing of response data from the PMS metadata API. """

with patch('plex_scrobble.plex_monitor.requests.get') as mock_get:
with io.open('data/unicode_audio_payload_fetch_metadata.xml', 'r', encoding='utf-8') as fh:
mock_get.return_value.text = fh.read()
metadata = fetch_metadata(64738, config)

self.assertEqual(metadata['artist'], b'\xe3\x81\x98\xe3\x82\x93 Feat. \xe3\x83\xa1\xe3\x82\xa4\xe3\x83\xaa\xe3\x82\xa2')
self.assertEqual(metadata['album'], b'daze / days')
self.assertEqual(metadata['title'], b'daze')


if __name__ == '__main__':
unittest.main()
5 changes: 3 additions & 2 deletions tests/test_log_parser.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import io
import logging
import unittest

Expand All @@ -9,7 +10,7 @@
class TestUnicodeLogParser(unittest.TestCase):

def setUp(self):
with open('data/unicode_pms.log', 'r') as fh:
with io.open('data/unicode_pms.log', 'r', encoding='utf-8') as fh:
self.found = [parse_line(line) for line in fh if parse_line(line)]

def test_unicode_logparser_5_ids(self):
Expand All @@ -20,7 +21,7 @@ def test_unicode_logparser_5_ids(self):
class TestUniversalLogParser(unittest.TestCase):

def setUp(self):
with open('data/universal_transcode.log', 'r') as fh:
with io.open('data/universal_transcode.log', 'r') as fh:
self.found = [parse_line(line) for line in fh if parse_line(line)]

def test_universal_logparser_2_ids(self):
Expand Down

0 comments on commit 84c3e60

Please sign in to comment.