Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Backwards compatible support for python 3 #37

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 5 additions & 6 deletions pyechonest/artist.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@
The Artist module loosely covers http://developer.echonest.com/docs/v4/artist.html
Refer to the official api documentation if you are unsure about something.
"""
import util
from proxies import ArtistProxy, ResultList
from song import Song
from . import util
from .proxies import ArtistProxy, ResultList
from .song import Song


class Artist(ArtistProxy):
Expand Down Expand Up @@ -248,12 +248,11 @@ def get_foreign_id(self, idspace='musicbrainz', cache=True):
u'7digital:artist:186042'
>>>
"""
if not (cache and ('foreign_ids' in self.cache) and filter(lambda d: d.get('catalog') == idspace, self.cache['foreign_ids'])):
if not (cache and ('foreign_ids' in self.cache) and [d for d in self.cache['foreign_ids'] if d.get('catalog') == idspace]):
response = self.get_attribute('profile', bucket=['id:'+idspace])
foreign_ids = response['artist'].get("foreign_ids", [])
self.cache['foreign_ids'] = self.cache.get('foreign_ids', []) + foreign_ids
cval = filter(lambda d: d.get('catalog') == util.map_idspace(idspace),
self.cache.get('foreign_ids'))
cval = [d for d in self.cache.get('foreign_ids') if d.get('catalog') == util.map_idspace(idspace)]
return cval[0].get('foreign_id') if cval else None

def get_twitter_id(self, cache=True):
Expand Down
6 changes: 3 additions & 3 deletions pyechonest/catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@
import datetime

import warnings
import util
from proxies import CatalogProxy, ResultList
import artist, song
from . import util
from .proxies import CatalogProxy, ResultList
from . import artist, song

# deal with datetime in json
dthandler = lambda obj: obj.isoformat() if isinstance(obj, datetime.datetime) else None
Expand Down
8 changes: 4 additions & 4 deletions pyechonest/playlist.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,10 @@
Refer to the official api documentation if you are unsure about something.
"""

import util
from proxies import PlaylistProxy
from song import Song
import catalog
from . import util
from .proxies import PlaylistProxy
from .song import Song
from . import catalog
import logging
logger = logging.getLogger(__name__)

Expand Down
22 changes: 11 additions & 11 deletions pyechonest/proxies.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
Copyright (c) 2010 The Echo Nest. All rights reserved.
Created by Tyler Williams on 2010-04-25.
"""
import util
from . import util

class ResultList(list):
def __init__(self, li, start=0, total=0):
Expand Down Expand Up @@ -35,7 +35,7 @@ def __init__(self, identifier, buckets = None, **kwargs):
buckets = buckets or []
self.id = identifier
self._object_type = 'artist'
kwargs = dict((str(k), v) for (k,v) in kwargs.iteritems())
kwargs = dict((str(k), v) for (k,v) in kwargs.items())
# the following are integral to all artist objects... the rest is up to you!
core_attrs = ['name']

Expand All @@ -59,7 +59,7 @@ def __init__(self, identifier, type, buckets = None, **kwargs):
buckets = buckets or []
self.id = identifier
self._object_type = 'catalog'
kwargs = dict((str(k), v) for (k,v) in kwargs.iteritems())
kwargs = dict((str(k), v) for (k,v) in kwargs.items())
# the following are integral to all catalog objects... the rest is up to you!
core_attrs = ['name']
if not all(ca in kwargs for ca in core_attrs):
Expand Down Expand Up @@ -113,10 +113,10 @@ def __init__(self, session_id = None, buckets = None, **kwargs):
kwargs['bucket'] = buckets
kwargs['genre'] = kwargs['genres']
del kwargs['genres']
kwargs = dict((str(k), v) for (k,v) in kwargs.iteritems())
kwargs = dict((str(k), v) for (k,v) in kwargs.items())

if not all(ca in kwargs for ca in core_attrs):
kwargs = dict((str(k), v) for (k,v) in kwargs.iteritems())
kwargs = dict((str(k), v) for (k,v) in kwargs.items())
profile = self.get_attribute('create', **kwargs)
kwargs.update(profile)
[self.__dict__.update({ca:kwargs.pop(ca)}) for ca in core_attrs if ca in kwargs]
Expand All @@ -131,18 +131,18 @@ def __init__(self, identifier, buckets = None, **kwargs):
buckets = buckets or []
self.id = identifier
self._object_type = 'song'
kwargs = dict((str(k), v) for (k,v) in kwargs.iteritems())
kwargs = dict((str(k), v) for (k,v) in kwargs.items())

# BAW -- this is debug output from identify that returns a track_id. i am not sure where else to access this..
if kwargs.has_key("track_id"):
if "track_id" in kwargs:
self.track_id = kwargs["track_id"]
if kwargs.has_key("tag"):
if "tag" in kwargs:
self.tag = kwargs["tag"]
if kwargs.has_key("score"):
if "score" in kwargs:
self.score = kwargs["score"]
if kwargs.has_key('audio'):
if 'audio' in kwargs:
self.audio = kwargs['audio']
if kwargs.has_key('release_image'):
if 'release_image' in kwargs:
self.release_image = kwargs['release_image']

# the following are integral to all song objects... the rest is up to you!
Expand Down
4 changes: 2 additions & 2 deletions pyechonest/sandbox.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
import simplejson as json
import datetime

import util
from proxies import ResultList
from . import util
from .proxies import ResultList

def list(sandbox_name, results=15, start=0):
"""
Expand Down
14 changes: 7 additions & 7 deletions pyechonest/song.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
Refer to the official api documentation if you are unsure about something.
"""
import os
import util
from proxies import SongProxy
from . import util
from .proxies import SongProxy

try:
import json
Expand Down Expand Up @@ -163,7 +163,7 @@ def get_song_type(self, cache=True):
"""
if not (cache and ('song_type' in self.cache)):
response = self.get_attribute('profile', bucket='song_type')
if response['songs'][0].has_key('song_type'):
if 'song_type' in response['songs'][0]:
self.cache['song_type'] = response['songs'][0]['song_type']
else:
self.cache['song_type'] = []
Expand Down Expand Up @@ -264,14 +264,14 @@ def get_foreign_id(self, idspace='', cache=True):
>>>
"""
idspace = util.map_idspace(idspace)
if not (cache and ('foreign_ids' in self.cache) and filter(lambda d: d.get('catalog') == idspace, self.cache['foreign_ids'])):
if not (cache and ('foreign_ids' in self.cache) and [d for d in self.cache['foreign_ids'] if d.get('catalog') == idspace]):
response = self.get_attribute('profile', bucket=['id:'+idspace])
rsongs = response['songs']
if len(rsongs) == 0:
return None
foreign_ids = rsongs[0].get("foreign_ids", [])
self.cache['foreign_ids'] = self.cache.get('foreign_ids', []) + foreign_ids
cval = filter(lambda d: d.get('catalog') == idspace, self.cache.get('foreign_ids'))
cval = [d for d in self.cache.get('foreign_ids') if d.get('catalog') == idspace]
return cval[0].get('foreign_id') if cval else None

def get_song_discovery(self, cache=True):
Expand Down Expand Up @@ -351,9 +351,9 @@ def get_tracks(self, catalog, cache=True):
# don't blow away the cache for other catalogs
potential_tracks = response['songs'][0].get('tracks', [])
existing_track_ids = [tr['foreign_id'] for tr in self.cache['tracks']]
new_tds = filter(lambda tr: tr['foreign_id'] not in existing_track_ids, potential_tracks)
new_tds = [tr for tr in potential_tracks if tr['foreign_id'] not in existing_track_ids]
self.cache['tracks'].extend(new_tds)
return filter(lambda tr: tr['catalog']==util.map_idspace(catalog), self.cache['tracks'])
return [tr for tr in self.cache['tracks'] if tr['catalog']==util.map_idspace(catalog)]


def search(title=None, artist=None, artist_id=None, combined=None, description=None, style=None, mood=None,
Expand Down
14 changes: 8 additions & 6 deletions pyechonest/track.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
import urllib2
try:
import json
except ImportError:
import simplejson as json

import hashlib
from proxies import TrackProxy
import util
import time

from six.moves import urllib

from .proxies import TrackProxy
from . import util

# Seconds to wait for asynchronous track/upload or track/analyze jobs to complete.
DEFAULT_ASYNC_TIMEOUT = 60

Expand Down Expand Up @@ -122,14 +124,14 @@ def get_analysis(self):
# Try the existing analysis_url first. This expires shortly
# after creation.
try:
json_string = urllib2.urlopen(self.analysis_url).read()
except urllib2.HTTPError:
json_string = urllib.request.urlopen(self.analysis_url).read()
except urllib.error.HTTPError:
# Probably the analysis_url link has expired. Refresh it.
param_dict = dict(id = self.id)
new_track = _profile(param_dict, DEFAULT_ASYNC_TIMEOUT)
if new_track and new_track.analysis_url:
self.analysis_url = new_track.analysis_url
json_string = urllib2.urlopen(self.analysis_url).read()
json_string = urllib.request.urlopen(self.analysis_url).read()
else:
raise Exception("Failed to create track analysis.")

Expand Down
45 changes: 23 additions & 22 deletions pyechonest/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,24 +7,25 @@

Utility functions to support the Echo Nest web API interface.
"""
import urllib
import urllib2
import httplib
import config
import logging
import socket
import re
import time
import os
import subprocess
import traceback
from types import StringType, UnicodeType

try:
import json
except ImportError:
import simplejson as json

import six
from six.moves import urllib
from six.moves import http_client

from . import config

logger = logging.getLogger(__name__)
TYPENAMES = (
('AR', 'artist'),
Expand All @@ -43,24 +44,24 @@
long_regex = re.compile(r'music://id.echonest.com/.+?/(%s)/(%s)[0-9A-Z]{16}\^?([0-9\.]+)?' % (r'|'.join(n[0] for n in TYPENAMES), r'|'.join(n[0] for n in TYPENAMES)))
headers = [('User-Agent', 'Pyechonest %s' % (config.__version__,))]

class MyBaseHandler(urllib2.BaseHandler):
class MyBaseHandler(urllib.request.BaseHandler):
def default_open(self, request):
if config.TRACE_API_CALLS:
logger.info("%s" % (request.get_full_url(),))
request.start_time = time.time()
return None

class MyErrorProcessor(urllib2.HTTPErrorProcessor):
class MyErrorProcessor(urllib.request.HTTPErrorProcessor):
def http_response(self, request, response):
code = response.code
if config.TRACE_API_CALLS:
logger.info("took %2.2fs: (%i)" % (time.time()-request.start_time,code))
if code/100 in (2, 4, 5):
return response
else:
urllib2.HTTPErrorProcessor.http_response(self, request, response)
urllib.request.HTTPErrorProcessor.http_response(self, request, response)

opener = urllib2.build_opener(MyBaseHandler(), MyErrorProcessor())
opener = urllib.request.build_opener(MyBaseHandler(), MyErrorProcessor())
opener.addheaders = headers

class EchoNestException(Exception):
Expand Down Expand Up @@ -113,7 +114,7 @@ def get_successful_response(raw_json):
http_status = None
raw_json = raw_json.read()
try:
response_dict = json.loads(raw_json)
response_dict = json.loads(raw_json.decode('utf-8'))
status_dict = response_dict['response']['status']
code = int(status_dict['code'])
message = status_dict['message']
Expand Down Expand Up @@ -141,15 +142,15 @@ def callm(method, param_dict, POST=False, socket_timeout=None, data=None):
if not socket_timeout:
socket_timeout = config.CALL_TIMEOUT

for key,val in param_dict.iteritems():
for key,val in param_dict.items():
if isinstance(val, list):
param_list.extend( [(key,subval) for subval in val] )
elif val is not None:
if isinstance(val, unicode):
if isinstance(val, six.text_type):
val = val.encode('utf-8')
param_list.append( (key,val) )

params = urllib.urlencode(param_list)
params = urllib.parse.urlencode(param_list)

orig_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(socket_timeout)
Expand All @@ -164,7 +165,7 @@ def callm(method, param_dict, POST=False, socket_timeout=None, data=None):

if data is None:
data = ''
data = urllib.urlencode(data)
data = urllib.parse.urlencode(data)
data = "&".join([data, params])

f = opener.open(url, data=data)
Expand All @@ -184,7 +185,7 @@ def callm(method, param_dict, POST=False, socket_timeout=None, data=None):

if config.TRACE_API_CALLS:
logger.info("%s/%s" % (host+':'+str(port), url,))
conn = httplib.HTTPConnection(host, port = port)
conn = http_client.HTTPConnection(host, port = port)
conn.request('POST', url, body = data, headers = dict([('Content-Type', 'application/octet-stream')]+headers))
f = conn.getresponse()

Expand All @@ -203,7 +204,7 @@ def callm(method, param_dict, POST=False, socket_timeout=None, data=None):
response_dict = get_successful_response(f)
return response_dict

except IOError, e:
except IOError as e:
if hasattr(e, 'reason'):
raise EchoNestIOError(error=e.reason)
elif hasattr(e, 'code'):
Expand Down Expand Up @@ -243,15 +244,15 @@ def build_request(url):
if not socket_timeout:
socket_timeout = config.CALL_TIMEOUT

for key,val in param_dict.iteritems():
for key,val in param_dict.items():
if isinstance(val, list):
param_list.extend( [(key,subval) for subval in val] )
elif val is not None:
if isinstance(val, unicode):
if isinstance(val, six.text_type):
val = val.encode('utf-8')
param_list.append( (key,val) )

params = urllib.urlencode(param_list)
params = urllib.parse.urlencode(param_list)

orig_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(socket_timeout)
Expand Down Expand Up @@ -284,9 +285,9 @@ def postChunked(host, selector, fields, files):
memory) and the ability to work from behind a proxy (due to its
basis on urllib2).
"""
params = urllib.urlencode(fields)
params = urllib.parse.urlencode(fields)
url = 'http://%s%s?%s' % (host, selector, params)
u = urllib2.urlopen(url, files)
u = urllib.request.urlopen(url, files)
result = u.read()
[fp.close() for (key, fp) in files]
return result
Expand All @@ -295,7 +296,7 @@ def postChunked(host, selector, fields, files):
def fix(x):
# we need this to fix up all the dict keys to be strings, not unicode objects
assert(isinstance(x,dict))
return dict((str(k), v) for (k,v) in x.iteritems())
return dict((str(k), v) for (k,v) in x.items())


def map_idspace(input_idspace):
Expand Down
Loading