Skip to content

Commit

Permalink
Merge pull request #309 from Gallaecio/master
Browse files Browse the repository at this point in the history
Prepare scrapy-splash 0.9.0
  • Loading branch information
kmike authored Feb 3, 2023
2 parents f5273b3 + cfb1ded commit 3c6a39a
Show file tree
Hide file tree
Showing 10 changed files with 47 additions and 56 deletions.
29 changes: 7 additions & 22 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,27 +9,11 @@ jobs:
fail-fast: false
matrix:
include:
- os: ubuntu-18.04
python-version: 2.7
env:
TOXENV: py27
- os: ubuntu-18.04
python-version: 3.4
env:
TOXENV: py
- os: ubuntu-18.04
python-version: 3.5
env:
TOXENV: py35
- python-version: 3.6
env:
TOXENV: py
- python-version: 3.7
env:
TOXENV: py
- python-version: 3.8
env:
TOXENV: py
- python-version: '3.7'
- python-version: '3.8'
- python-version: '3.9'
- python-version: '3.10'
- python-version: '3.11'

steps:
- uses: actions/checkout@v2
Expand All @@ -44,7 +28,8 @@ jobs:
python-version: ${{ matrix.python-version }}

- name: Run tests
env: ${{ matrix.env }}
env:
TOXENV: py
run: |
pip install -U tox
SPLASH_URL=http://127.0.0.1:8050 tox
Expand Down
11 changes: 11 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,17 @@
Changes
=======

0.9.0 (to be released)
----------------------

* Removed official support for Python 2.7, 3.4, 3.5 and 3.6, and added official
support for Python 3.9, 3.10 and 3.11.

* Deprecated ``SplashJsonResponse.body_as_unicode()``, to be replaced by
``SplashJsonResponse.text``.

* Removed calls to obsolete ``to_native_str``, removed in Scrapy 2.8.

0.8.0 (2021-10-05)
------------------

Expand Down
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,7 @@ Run a simple `Splash Lua Script`_::

# ...
def parse_result(self, response):
doc_title = response.body_as_unicode()
doc_title = response.text
# ...


Expand Down
6 changes: 2 additions & 4 deletions scrapy_splash/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,13 +90,11 @@ def _original_url(self):
def _original_method(self):
return self._splash_args.get('http_method', 'GET')

def __str__(self):
def __repr__(self):
if not self._processed:
return super(SplashRequest, self).__str__()
return super().__repr__()
return "<%s %s via %s>" % (self._original_method, self._original_url, self.url)

__repr__ = __str__


class SplashFormRequest(SplashRequest, FormRequest):
"""
Expand Down
9 changes: 9 additions & 0 deletions scrapy_splash/response.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import json
import base64
import re
from warnings import warn

from scrapy.http import Response, TextResponse
from scrapy import Selector
Expand Down Expand Up @@ -129,6 +130,14 @@ def text(self):
return self._ubody

def body_as_unicode(self):
warn(
(
"The body_as_unicode() method is deprecated, use the text "
"property instead."
),
DeprecationWarning,
stacklevel=2,
)
return self._ubody

@property
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

setup(
name='scrapy-splash',
version='0.7.2',
version='0.8.0',
url='https://github.com/scrapy-plugins/scrapy-splash',
description='JavaScript support for Scrapy using Splash',
long_description=open('README.rst').read() + "\n\n" + open("CHANGES.rst").read(),
Expand Down
2 changes: 1 addition & 1 deletion tests/test_fingerprints.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import pytest
import scrapy
from scrapy.dupefilters import request_fingerprint
from scrapy.utils.request import request_fingerprint

from scrapy_splash import SplashRequest
from scrapy_splash.dupefilter import splash_request_fingerprint
Expand Down
26 changes: 13 additions & 13 deletions tests/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,7 +340,7 @@ def test_access_http_auth(settings):
items, url, crawler = yield crawl_items(LuaSpider, HelloWorldProtected,
settings, kwargs)
response = assert_single_response(items)
assert 'hello' in response.body_as_unicode()
assert 'hello' in response.text
assert response.status == 200
assert response.splash_response_status == 200

Expand All @@ -351,8 +351,8 @@ def test_protected_splash_no_auth(settings_auth):
items, url, crawler = yield crawl_items(LuaSpider, HelloWorld,
settings_auth)
response = assert_single_response(items)
assert 'Unauthorized' in response.body_as_unicode()
assert 'hello' not in response.body_as_unicode()
assert 'Unauthorized' in response.text
assert 'hello' not in response.text
assert response.status == 401
assert response.splash_response_status == 401

Expand All @@ -367,15 +367,15 @@ def test_protected_splash_manual_headers_auth(settings_auth):
items, url, crawler = yield crawl_items(LuaSpider, HelloWorld,
settings_auth, kwargs)
response = assert_single_response(items)
assert 'hello' in response.body_as_unicode()
assert 'hello' in response.text
assert response.status == 200
assert response.splash_response_status == 200

# but only for Splash, not for a remote website
items, url, crawler = yield crawl_items(LuaSpider, HelloWorldProtected,
settings_auth, kwargs)
response = assert_single_response(items)
assert 'hello' not in response.body_as_unicode()
assert 'hello' not in response.text
assert response.status == 401
assert response.splash_response_status == 200

Expand All @@ -390,8 +390,8 @@ def test_protected_splash_settings_auth(settings_auth):
items, url, crawler = yield crawl_items(LuaSpider, HelloWorld,
settings_auth)
response = assert_single_response(items)
assert 'Unauthorized' not in response.body_as_unicode()
assert 'hello' in response.body_as_unicode()
assert 'Unauthorized' not in response.text
assert 'hello' in response.text
assert response.status == 200
assert response.splash_response_status == 200

Expand All @@ -418,7 +418,7 @@ def test_protected_splash_settings_auth(settings_auth):
response = assert_single_response(items)
assert response.status == 200
assert response.splash_response_status == 200
assert 'hello' in response.body_as_unicode()
assert 'hello' in response.text

# enable remote auth, but not splash auth - request should fail
del settings_auth['SPLASH_USER']
Expand All @@ -439,8 +439,8 @@ def test_protected_splash_httpauth_middleware(settings_auth):
items, url, crawler = yield crawl_items(ScrapyAuthSpider, HelloWorld,
settings_auth)
response = assert_single_response(items)
assert 'Unauthorized' not in response.body_as_unicode()
assert 'hello' in response.body_as_unicode()
assert 'Unauthorized' not in response.text
assert 'hello' in response.text
assert response.status == 200
assert response.splash_response_status == 200

Expand All @@ -449,7 +449,7 @@ def test_protected_splash_httpauth_middleware(settings_auth):
HelloWorldProtected,
settings_auth)
response = assert_single_response(items)
assert 'hello' not in response.body_as_unicode()
assert 'hello' not in response.text
assert response.status == 401
assert response.splash_response_status == 200

Expand All @@ -458,7 +458,7 @@ def test_protected_splash_httpauth_middleware(settings_auth):
HelloWorldDisallowAuth,
settings_auth)
response = assert_single_response(items)
assert 'hello' in response.body_as_unicode()
assert 'hello' in response.text
assert response.status == 200
assert response.splash_response_status == 200

Expand All @@ -467,7 +467,7 @@ def test_protected_splash_httpauth_middleware(settings_auth):
HelloWorldProtected,
settings_auth)
response = assert_single_response(items)
assert 'hello' in response.body_as_unicode()
assert 'hello' in response.text
assert response.status == 200
assert not hasattr(response, 'splash_response_status')

Expand Down
2 changes: 1 addition & 1 deletion tests/test_middleware.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ def cb():
assert response2.url == req.meta['splash']['args']['url']
assert response2.data == res
assert response2.body == res_body.encode('utf8')
assert response2.text == response2.body_as_unicode() == res_body
assert response2.text == response2.text == res_body
assert response2.encoding == 'utf8'
assert response2.headers == {b'Content-Type': [b'application/json']}
assert response2.splash_response_headers == response2.headers
Expand Down
14 changes: 1 addition & 13 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
# and then run "tox" from this directory.

[tox]
envlist = py27,py34,py35,py36,py37,py38
envlist = py37,py38,py39,py310,py311

[common]
deps =
Expand All @@ -24,15 +24,3 @@ deps =
commands =
pip install -e .
py.test --doctest-modules --cov=scrapy_splash {posargs:scrapy_splash tests}

[testenv:py27]
deps =
{[common]deps}
queuelib < 1.6.0
scrapy < 2

[testenv:py35]
deps =
{[common]deps}
# https://github.com/scrapy/scrapy/pull/4094#issuecomment-704092404
scrapy < 2

0 comments on commit 3c6a39a

Please sign in to comment.