Skip to content

Commit

Permalink
fix issues when running with python 2.7
Browse files Browse the repository at this point in the history
  • Loading branch information
timotheus committed Oct 29, 2013
1 parent 5b20db2 commit f1adcfa
Show file tree
Hide file tree
Showing 4 changed files with 31 additions and 12 deletions.
20 changes: 12 additions & 8 deletions ebaysdk/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,14 @@
import sys
import re
import traceback
import io
import yaml
import pycurl
import urllib.request, urllib.parse, urllib.error
from io import BytesIO as StringIO

try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode

try:
import simplejson as json
Expand Down Expand Up @@ -309,8 +313,8 @@ def _execute_http_request(self):
self._curl.setopt(pycurl.URL, str(request_url))
self._curl.setopt(pycurl.SSL_VERIFYPEER, 0)

self._response_header = io.StringIO()
self._response_body = io.StringIO()
self._response_header = StringIO()
self._response_body = StringIO()

self._curl.setopt(pycurl.CONNECTTIMEOUT, self.timeout)
self._curl.setopt(pycurl.TIMEOUT, self.timeout)
Expand Down Expand Up @@ -672,7 +676,7 @@ def _execute_http_request(self):

request_url = self.url
if self.call_data and self.method == 'GET':
request_url = request_url + '?' + urllib.parse.urlencode(self.call_data)
request_url = request_url + '?' + urlencode(self.call_data)

elif self.method == 'POST':
request_xml = self._build_request_xml()
Expand All @@ -683,8 +687,8 @@ def _execute_http_request(self):
self._curl.setopt(pycurl.URL, str(request_url))
self._curl.setopt(pycurl.SSL_VERIFYPEER, 0)

self._response_header = io.StringIO()
self._response_body = io.StringIO()
self._response_header = StringIO()
self._response_body = StringIO()

self._curl.setopt(pycurl.CONNECTTIMEOUT, self.timeout)
self._curl.setopt(pycurl.TIMEOUT, self.timeout)
Expand Down Expand Up @@ -748,7 +752,7 @@ def _build_request_xml(self):
if type(self.call_data) is str:
self.call_xml = self.call_data
else:
self.call_xml = urllib.parse.urlencode(self.call_data)
self.call_xml = urlencode(self.call_data)

return self.call_xml

Expand Down
9 changes: 6 additions & 3 deletions ebaysdk/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import xml.etree.ElementTree as ET
import re
from io import StringIO
from io import BytesIO as StringIO

class object_dict(dict):
"""object view of dict, you can
Expand Down Expand Up @@ -423,10 +423,13 @@ def to_string(root, pretty=False):

tree = ET.ElementTree(root)
fileobj = StringIO()
# fileobj.write('<?xml version="1.0" encoding="%s"?>' % encoding)

# asdf fileobj.write('<?xml version="1.0" encoding="%s"?>' % encoding)

if pretty:
fileobj.write('\n')
tree.write(fileobj, 'unicode')

tree.write(fileobj, 'utf-8')
return fileobj.getvalue()


Expand Down
6 changes: 6 additions & 0 deletions samples/shopping.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,11 @@
import json
from optparse import OptionParser

try:
input = raw_input
except NameError:
pass

sys.path.insert(0, '%s/../' % os.path.dirname(__file__))

import ebaysdk
Expand Down Expand Up @@ -86,6 +91,7 @@ def popularSearches(opts):
choice = True

while choice:

choice = input('Search: ')

if choice == 'quit':
Expand Down
8 changes: 7 additions & 1 deletion samples/trading.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import sys
import datetime
import json
import types
from optparse import OptionParser

sys.path.insert(0, '%s/../' % os.path.dirname(__file__))
Expand Down Expand Up @@ -256,7 +257,12 @@ def memberMessages(opts):
dump(api)

if api.response_dict().MemberMessage:
for m in api.response_dict().MemberMessage.MemberMessageExchange:
messages = api.response_dict().MemberMessage.MemberMessageExchange

if type(messages) != types.ListType:
messages = [ messages ]

for m in messages:
print("%s: %s" % (m.CreationDate, m.Question.Subject[:50]))

def getUser(opts):
Expand Down

0 comments on commit f1adcfa

Please sign in to comment.