Skip to content

Commit 9f6ddbb

Browse files
authored
Python 3.10 compatibility (#166)
1 parent 34ecb80 commit 9f6ddbb

File tree

17 files changed

+67
-49
lines changed

17 files changed

+67
-49
lines changed

.github/workflows/main.yml

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -12,20 +12,26 @@ jobs:
1212
strategy:
1313
fail-fast: false
1414
matrix:
15-
include:
15+
include:
16+
- python-version: "pypy3"
17+
env:
18+
TOXENV: "msgpack"
19+
- python-version: "pypy3"
20+
env:
21+
TOXENV: "json"
22+
1623
- python-version: "2.7"
1724
env:
1825
TOXENV: "msgpack"
1926
- python-version: "2.7"
2027
env:
2128
TOXENV: "json"
22-
- python-version: "pypy3"
29+
- python-version: "3.5"
2330
env:
2431
TOXENV: "msgpack"
25-
- python-version: "pypy3"
32+
- python-version: "3.5"
2633
env:
2734
TOXENV: "json"
28-
2935
- python-version: "3.6"
3036
env:
3137
TOXENV: "msgpack"
@@ -50,7 +56,13 @@ jobs:
5056
- python-version: "3.9"
5157
env:
5258
TOXENV: "json"
53-
59+
- python-version: "3.10"
60+
env:
61+
TOXENV: "msgpack"
62+
- python-version: "3.10"
63+
env:
64+
TOXENV: "json"
65+
5466
steps:
5567
- uses: actions/checkout@v2
5668
- name: Set up Python ${{ matrix.python-version }}
@@ -65,4 +77,4 @@ jobs:
6577
tox
6678
6779
- name: Upload to codecov
68-
uses: codecov/codecov-action@v2
80+
uses: codecov/codecov-action@v2

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,3 +15,4 @@ docs/_build
1515
.DS_Store
1616
pytestdebug.log
1717
.idea
18+
coverage.xml

README.rst

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,16 +2,25 @@
22
Client interface for Scrapinghub API
33
====================================
44

5+
.. image:: https://img.shields.io/pypi/v/scrapinghub.svg
6+
:target: https://pypi.org/project/scrapinghub
7+
8+
.. image:: https://img.shields.io/pypi/pyversions/scrapinghub.svg
9+
:target: https://pypi.org/project/scrapinghub
10+
511
.. image:: https://github.com/scrapinghub/python-scrapinghub/actions/workflows/main.yml/badge.svg
612
:target: https://github.com/scrapinghub/python-scrapinghub/actions/workflows/main.yml
713

14+
.. image:: https://codecov.io/gh/scrapinghub/python-scrapinghub/branch/master/graph/badge.svg
15+
:target: https://app.codecov.io/gh/scrapinghub/python-scrapinghub
16+
817
The ``scrapinghub`` is a Python library for communicating with the `Scrapinghub API`_.
918

1019

1120
Requirements
1221
============
1322

14-
* Python 2.7 or above
23+
* Python 2.7 or Python 3.5+
1524

1625

1726
Installation

requirements-test.txt

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,5 @@
11
mock
2-
vcrpy==1.10.3
3-
# FIXME remove the constraint after resolving
4-
# https://github.com/pytest-dev/pytest/issues/2966
5-
pytest<3.3.0
6-
pytest-cov<2.6.0
7-
pytest-catchlog
8-
responses==0.10.6
2+
vcrpy
3+
pytest
4+
pytest-cov
5+
responses

scrapinghub/client/collections.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from __future__ import absolute_import
2-
import collections
32

43
from six import string_types
4+
from six.moves import collections_abc
55

66
from ..hubstorage.collectionsrt import Collection as _Collection
77

@@ -82,7 +82,7 @@ def iter(self):
8282
8383
:return: an iterator over collections list where each collection is
8484
represented by a dictionary with ('name','type') fields.
85-
:rtype: :class:`collections.Iterable[dict]`
85+
:rtype: :class:`collections.abc.Iterable[dict]`
8686
"""
8787
return self._origin.apiget('list')
8888

@@ -130,9 +130,9 @@ class Collection(object):
130130
>>> for elem in foo_store.iter(count=1)):
131131
... print(elem)
132132
[{'_key': '002d050ee3ff6192dcbecc4e4b4457d7', 'value': '1447221694537'}]
133-
133+
134134
- get generator over item keys::
135-
135+
136136
>>> keys = foo_store.iter(nodata=True, meta=["_key"]))
137137
>>> next(keys)
138138
{'_key': '002d050ee3ff6192dcbecc4e4b4457d7'}
@@ -185,7 +185,7 @@ def delete(self, keys):
185185
The method returns ``None`` (original method returns an empty generator).
186186
"""
187187
if (not isinstance(keys, string_types) and
188-
not isinstance(keys, collections.Iterable)):
188+
not isinstance(keys, collections_abc.Iterable)):
189189
raise ValueError("You should provide string key or iterable "
190190
"object providing string keys")
191191
self._origin.delete(keys)
@@ -219,7 +219,7 @@ def iter(self, key=None, prefix=None, prefixcount=None, startts=None,
219219
:param requests_params: (optional) a dict with optional requests params.
220220
:param \*\*params: (optional) additional query params for the request.
221221
:return: an iterator over items list.
222-
:rtype: :class:`collections.Iterable[dict]`
222+
:rtype: :class:`collections.abc.Iterable[dict]`
223223
"""
224224
update_kwargs(params, key=key, prefix=prefix, prefixcount=prefixcount,
225225
startts=startts, endts=endts,

scrapinghub/client/frontiers.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ def iter(self):
9999
"""Iterate through frontiers.
100100
101101
:return: an iterator over frontiers names.
102-
:rtype: :class:`collections.Iterable[str]`
102+
:rtype: :class:`collections.abc.Iterable[str]`
103103
"""
104104
return iter(self.list())
105105

@@ -174,7 +174,7 @@ def iter(self):
174174
"""Iterate through slots.
175175
176176
:return: an iterator over frontier slots names.
177-
:rtype: :class:`collections.Iterable[str]`
177+
:rtype: :class:`collections.abc.Iterable[str]`
178178
"""
179179
return iter(self.list())
180180

@@ -321,7 +321,7 @@ def iter(self, **params):
321321
322322
:param \*\*params: (optional) additional query params for the request.
323323
:return: an iterator over fingerprints.
324-
:rtype: :class:`collections.Iterable[str]`
324+
:rtype: :class:`collections.abc.Iterable[str]`
325325
"""
326326
origin = self._frontier._frontiers._origin
327327
path = (self._frontier.key, 's', self.key, 'f')
@@ -358,7 +358,7 @@ def iter(self, mincount=None, **params):
358358
:param \*\*params: (optional) additional query params for the request.
359359
:return: an iterator over request batches in the queue where each
360360
batch is represented with a dict with ('id', 'requests') field.
361-
:rtype: :class:`collections.Iterable[dict]`
361+
:rtype: :class:`collections.abc.Iterable[dict]`
362362
"""
363363
origin = self._frontier._frontiers._origin
364364
path = (self._frontier.key, 's', self.key, 'q')

scrapinghub/client/items.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ def list_iter(self, chunksize=1000, *args, **kwargs):
107107
down by `chunksize`.
108108
109109
:return: an iterator over items, yielding lists of items.
110-
:rtype: :class:`collections.Iterable`
110+
:rtype: :class:`collections.abc.Iterable`
111111
"""
112112

113113
start = kwargs.pop("start", 0)

scrapinghub/client/projects.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ def iter(self):
6363
Provided for the sake of API consistency.
6464
6565
:return: an iterator over project ids list.
66-
:rtype: :class:`collections.Iterable[int]`
66+
:rtype: :class:`collections.abc.Iterable[int]`
6767
"""
6868
return iter(self.list())
6969

scrapinghub/client/proxy.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ def iter(self, _path=None, count=None, requests_params=None, **apiparams):
106106
107107
:param count: limit amount of elements.
108108
:return: an iterator over elements list.
109-
:rtype: :class:`collections.Iterable`
109+
:rtype: :class:`collections.abc.Iterable`
110110
"""
111111
update_kwargs(apiparams, count=count)
112112
apiparams = self._modify_iter_params(apiparams)
@@ -165,7 +165,7 @@ def iter(self):
165165
"""Iterate through key/value pairs.
166166
167167
:return: an iterator over key/value pairs.
168-
:rtype: :class:`collections.Iterable`
168+
:rtype: :class:`collections.abc.Iterable`
169169
"""
170170
return six.iteritems(next(self._origin.apiget()))
171171

scrapinghub/hubstorage/resourcetype.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,11 @@
1-
import time
21
import json
3-
import socket
42
import logging
5-
from collections import MutableMapping
3+
import socket
4+
import time
65

76
import six
8-
from six.moves import range
97
import requests.exceptions as rexc
8+
from six.moves import range, collections_abc
109

1110
from .utils import urlpathjoin, xauth
1211
from .serialization import jlencode, jldecode, mpdecode
@@ -230,7 +229,7 @@ def stats(self):
230229
return next(self.apiget('stats', chunk_size=STATS_CHUNK_SIZE))
231230

232231

233-
class MappingResourceType(ResourceType, MutableMapping):
232+
class MappingResourceType(ResourceType, collections_abc.MutableMapping):
234233

235234
_cached = None
236235
ignore_fields = ()

setup.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,18 +28,19 @@
2828
package_data={'scrapinghub': ['VERSION']},
2929
install_requires=['requests>=1.0', 'retrying>=1.3.3', 'six>=1.10.0'],
3030
extras_require={'msgpack': mpack_required},
31+
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*',
3132
classifiers=[
3233
'Development Status :: 5 - Production/Stable',
3334
'License :: OSI Approved :: BSD License',
3435
'Operating System :: OS Independent',
3536
'Programming Language :: Python',
36-
'Programming Language :: Python :: 2',
3737
'Programming Language :: Python :: 2.7',
38-
'Programming Language :: Python :: 3',
39-
'Programming Language :: Python :: 3.3',
4038
'Programming Language :: Python :: 3.5',
4139
'Programming Language :: Python :: 3.6',
4240
'Programming Language :: Python :: 3.7',
41+
'Programming Language :: Python :: 3.8',
42+
'Programming Language :: Python :: 3.9',
43+
'Programming Language :: Python :: 3.10',
4344
'Programming Language :: Python :: Implementation :: CPython',
4445
'Programming Language :: Python :: Implementation :: PyPy',
4546
'Topic :: Internet :: WWW/HTTP',

tests/client/conftest.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,8 @@ def project(client):
6666
return client.get_project(TEST_PROJECT_ID)
6767

6868

69-
@my_vcr.use_cassette()
7069
@pytest.fixture(scope='session')
70+
@my_vcr.use_cassette()
7171
def spider(project, request):
7272
# on normal conditions you can't create a new spider this way:
7373
# it can only be created on project deploy as usual

tests/client/test_frontiers.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
import time
22
from types import GeneratorType
3-
from collections import Iterable
43

54
from six import string_types
5+
from six.moves import collections_abc
66

77
from scrapinghub.client.frontiers import Frontiers, Frontier, FrontierSlot
88
from ..conftest import TEST_FRONTIER_SLOT
@@ -36,7 +36,7 @@ def test_frontiers(project, frontier, frontier_name):
3636

3737
# test for iter() method
3838
frontiers_names = frontiers.iter()
39-
assert isinstance(frontiers_names, Iterable)
39+
assert isinstance(frontiers_names, collections_abc.Iterable)
4040
assert frontier_name in list(frontiers_names)
4141

4242
# test for list() method
@@ -58,7 +58,7 @@ def test_frontier(project, frontier):
5858
_add_test_requests_to_frontier(frontier)
5959

6060
slots = frontier.iter()
61-
assert isinstance(slots, Iterable)
61+
assert isinstance(slots, collections_abc.Iterable)
6262
assert TEST_FRONTIER_SLOT in list(slots)
6363

6464
slots = frontier.list()

tests/client/test_job.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
1-
from collections import Iterator
2-
31
import pytest
2+
from six.moves import collections_abc
43

54
from scrapinghub.client.items import Items
65
from scrapinghub.client.jobs import Job
@@ -224,7 +223,7 @@ def test_metadata_delete(spider):
224223
def test_metadata_iter_list(spider):
225224
job = spider.jobs.run(meta={'meta1': 'data1', 'meta2': 'data2'})
226225
meta_iter = job.metadata.iter()
227-
assert isinstance(meta_iter, Iterator)
226+
assert isinstance(meta_iter, collections_abc.Iterator)
228227
meta_list = job.metadata.list()
229228
assert ('meta1', 'data1') in meta_list
230229
assert ('meta2', 'data2') in meta_list

tests/client/test_projects.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
import types
2-
from collections import defaultdict, Iterator
2+
from collections import defaultdict
33

44
import pytest
55
import responses
6-
from six.moves import range
76
from requests.compat import urljoin
7+
from six.moves import range, collections_abc
88

99
from scrapinghub import ScrapinghubClient
1010
from scrapinghub.client.activity import Activity
@@ -288,7 +288,7 @@ def test_settings_delete(project):
288288
def test_settings_iter_list(project):
289289
project.settings.set('job_runtime_limit', 24)
290290
settings_iter = project.settings.iter()
291-
assert isinstance(settings_iter, Iterator)
291+
assert isinstance(settings_iter, collections_abc.Iterator)
292292
settings_list = project.settings.list()
293293
assert ('job_runtime_limit', 24) in settings_list
294294
assert settings_list == list(settings_iter)

tests/hubstorage/conftest.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,8 +62,8 @@ def hsproject(hsclient):
6262
return hsclient.get_project(TEST_PROJECT_ID)
6363

6464

65-
@my_vcr.use_cassette()
6665
@pytest.fixture(scope='session')
66+
@my_vcr.use_cassette()
6767
def hsspiderid(hsproject):
6868
return str(hsproject.ids.spider(TEST_SPIDER_NAME, create=1))
6969

tox.ini

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,12 @@
44
# and then run "tox" from this directory.
55

66
[tox]
7-
envlist = py{36,py3,37,38,39}-{json,msgpack}
7+
envlist = py{27,35,36,py3,37,38,39}-{json,msgpack}
88

99
[testenv]
1010
deps =
1111
-r{toxinidir}/requirements-base.txt
1212
-r{toxinidir}/requirements-test.txt
1313
msgpack: -r{toxinidir}/requirements.txt
1414
pypy-msgpack: -r{toxinidir}/requirements-pypy.txt
15-
commands = py.test --cov=scrapinghub --cov-report=xml
15+
commands = py.test --cov=scrapinghub --cov-report=xml {posargs: scrapinghub tests}

0 commit comments

Comments
 (0)