diff --git a/scrapy_jsonrpc/jsonrpc.py b/scrapy_jsonrpc/jsonrpc.py index 7710bde..295cc67 100644 --- a/scrapy_jsonrpc/jsonrpc.py +++ b/scrapy_jsonrpc/jsonrpc.py @@ -7,7 +7,12 @@ import traceback from six.moves import urllib -from scrapy.utils.python import unicode_to_str +# Scrapy >= 1.0 +try: + from scrapy.utils.python import to_bytes +# Scrapy 0.24 and below +except ImportError: + from scrapy.utils.python import unicode_to_str as to_bytes from scrapy_jsonrpc.serialize import ScrapyJSONDecoder @@ -37,7 +42,7 @@ def jsonrpc_client_call(url, method, *args, **kwargs): if args and kwargs: raise ValueError("Pass *args or **kwargs but not both to jsonrpc_client_call") req = {'jsonrpc': '2.0', 'method': method, 'params': args or kwargs, 'id': 1} - data = unicode_to_str(json.dumps(req)) + data = to_bytes(json.dumps(req)) body = urllib.request.urlopen(url, data).read() res = json.loads(body.decode('utf-8')) if 'result' in res: diff --git a/scrapy_jsonrpc/serialize.py b/scrapy_jsonrpc/serialize.py index 6bf0890..2902526 100644 --- a/scrapy_jsonrpc/serialize.py +++ b/scrapy_jsonrpc/serialize.py @@ -6,7 +6,12 @@ import six from twisted.internet import defer -from scrapy.spiders import Spider +# Scrapy >= 1.0 +try: + from scrapy.spiders import Spider +# Scrapy 0.24 and below +except ImportError: + from scrapy.spider import Spider from scrapy.http import Request, Response from scrapy.item import BaseItem