Skip to content

Commit

Permalink
Merge pull request scrapy#827 from dangra/tests-on-root
Browse files Browse the repository at this point in the history
Move Test cases under project root dir
  • Loading branch information
dangra committed Jul 31, 2014
2 parents 51feb4b + 99fb4eb commit 8d1f267
Show file tree
Hide file tree
Showing 141 changed files with 160 additions and 134 deletions.
2 changes: 0 additions & 2 deletions MANIFEST.in
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,7 @@ include MANIFEST.in
include scrapy/VERSION
include scrapy/mime.types
recursive-include scrapy/templates *
recursive-include scrapy/tests/sample_data *
recursive-include scrapy license.txt
recursive-include scrapy/tests *.egg
recursive-include docs *
prune docs/build
recursive-include extras *
Expand Down
6 changes: 0 additions & 6 deletions bin/runtests.bat

This file was deleted.

50 changes: 0 additions & 50 deletions bin/runtests.sh

This file was deleted.

2 changes: 1 addition & 1 deletion scrapy/conftest.py → conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from scrapy import optional_features

collect_ignore = ["stats.py"]
collect_ignore = ["scrapy/stats.py"]
if 'django' not in optional_features:
collect_ignore.append("tests/test_djangoitem/models.py")

Expand Down
24 changes: 11 additions & 13 deletions docs/contributing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -135,23 +135,20 @@ Documentation policies
Tests
=====

Tests are implemented using the `Twisted unit-testing framework`_ called
``trial``.
Tests are implemented using the `Twisted unit-testing framework`_, running
tests requires `tox`_.

Running tests
-------------

To run all tests go to the root directory of Scrapy source code and run:

``bin/runtests.sh`` (on unix)
``tox``

``bin\runtests.bat`` (on windows)
To run a specific test (say ``tests/test_contrib_loader.py``) use:

To run a specific test (say ``scrapy.tests.test_contrib_loader``) use:
``tox -- tests/test_contrib_loader.py``

``bin/runtests.sh scrapy.tests.test_contrib_loader`` (on unix)

``bin\runtests.bat scrapy.tests.test_contrib_loader`` (on windows)

Writing tests
-------------
Expand All @@ -160,20 +157,21 @@ All functionality (including new features and bug fixes) must include a test
case to check that it works as expected, so please include tests for your
patches if you want them to get accepted sooner.

Scrapy uses unit-tests, which are located in the ``scrapy.tests`` package
(`scrapy/tests`_ directory). Their module name typically resembles the full
path of the module they're testing. For example, the item loaders code is in::
Scrapy uses unit-tests, which are located in the `tests/`_ directory.
Their module name typically resembles the full path of the module they're
testing. For example, the item loaders code is in::

scrapy.contrib.loader

And their unit-tests are in::

scrapy.tests.test_contrib_loader
tests/test_contrib_loader.py

.. _issue tracker: https://github.com/scrapy/scrapy/issues
.. _scrapy-users: http://groups.google.com/group/scrapy-users
.. _Twisted unit-testing framework: http://twistedmatrix.com/documents/current/core/development/policy/test-standard.html
.. _AUTHORS: https://github.com/scrapy/scrapy/blob/master/AUTHORS
.. _scrapy/tests: https://github.com/scrapy/scrapy/tree/master/scrapy/tests
.. _tests/: https://github.com/scrapy/scrapy/tree/master/tests
.. _open issues: https://github.com/scrapy/scrapy/issues
.. _pull request: http://help.github.com/send-pull-requests/
.. _tox: https://pypi.python.org/pypi/tox
2 changes: 1 addition & 1 deletion extras/coverage-report.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@
#
# Requires: coverage 3.3 or above from http://pypi.python.org/pypi/coverage

coverage run --branch $(which trial) --reporter=text scrapy.tests
coverage run --branch $(which trial) --reporter=text tests
coverage html -i
python -m webbrowser htmlcov/index.html
2 changes: 2 additions & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,5 @@
usefixtures = chdir setlog
python_files=test_*.py __init__.py
addopts = --doctest-modules --assert=plain
twisted = 1

48 changes: 44 additions & 4 deletions scrapy/commands/bench.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
import sys
import time
import subprocess

from six.moves.urllib.parse import urlencode

import scrapy
from scrapy.command import ScrapyCommand
from scrapy.tests.spiders import FollowAllSpider
from scrapy.tests.mockserver import MockServer
from scrapy.contrib.linkextractors import LinkExtractor


class Command(ScrapyCommand):

Expand All @@ -14,8 +21,41 @@ def short_desc(self):
return "Run quick benchmark test"

def run(self, args, opts):
with MockServer():
spider = FollowAllSpider(total=100000)
with _BenchServer():
spider = _BenchSpider(total=100000)
crawler = self.crawler_process.create_crawler()
crawler.crawl(spider)
self.crawler_process.start()


class _BenchServer(object):

def __enter__(self):
from scrapy.utils.test import get_testenv
pargs = [sys.executable, '-u', '-m', 'scrapy.utils.benchserver']
self.proc = subprocess.Popen(pargs, stdout=subprocess.PIPE,
env=get_testenv())
self.proc.stdout.readline()

def __exit__(self, exc_type, exc_value, traceback):
self.proc.kill()
self.proc.wait()
time.sleep(0.2)


class _BenchSpider(scrapy.Spider):
"""A spider that follows all links"""
name = 'follow'
total = 10000
show = 20
baseurl = 'http://localhost:8998'
link_extractor = LinkExtractor()

def start_requests(self):
qargs = {'total': self.total, 'show': self.show}
url = '{}?{}'.format(self.baseurl, urlencode(qargs, doseq=1))
return [scrapy.Request(url, dont_filter=True)]

def parse(self, response):
for link in self.link_extractor.extract_links(response):
yield scrapy.Request(link.url, callback=self.parse)
5 changes: 0 additions & 5 deletions scrapy/tests/test_cmdline/settings.py

This file was deleted.

44 changes: 44 additions & 0 deletions scrapy/utils/benchserver.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import random
from six.moves.urllib.parse import urlencode
from twisted.web.server import Site
from twisted.web.resource import Resource
from twisted.internet import reactor


class Root(Resource):

isLeaf = True

def getChild(self, name, request):
return self

def render(self, request):
total = _getarg(request, 'total', 100, int)
show = _getarg(request, 'show', 10, int)
nlist = [random.randint(1, total) for _ in range(show)]
request.write("<html><head></head><body>")
args = request.args.copy()
for nl in nlist:
args['n'] = nl
argstr = urlencode(args, doseq=True)
request.write("<a href='/follow?{0}'>follow {1}</a><br>"
.format(argstr, nl))
request.write("</body></html>")
return ''


def _getarg(request, name, default=None, type=str):
return type(request.args[name][0]) \
if name in request.args else default


if __name__ == '__main__':
root = Root()
factory = Site(root)
httpPort = reactor.listenTCP(8998, Site(root))

def _print_listening():
httpHost = httpPort.getHost()
print("Bench server at http://{}:{}".format(httpHost.host, httpHost.port))
reactor.callWhenRunning(_print_listening)
reactor.run()
2 changes: 1 addition & 1 deletion scrapy/utils/url.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def canonicalize_url(url, keep_blank_values=True, keep_fragments=False,
The url passed can be a str or unicode, while the url returned is always a
str.
For examples see the tests in scrapy.tests.test_utils_url
For examples see the tests in tests/test_utils_url.py
"""

scheme, netloc, path, params, query, fragment = parse_url(url)
Expand Down
2 changes: 1 addition & 1 deletion scrapy/tests/__init__.py → tests/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""
scrapy.tests: this package contains all Scrapy unittests
tests: this package contains all Scrapy unittests
To run all Scrapy unittests go to Scrapy main dir and type:
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
2 changes: 1 addition & 1 deletion scrapy/tests/mockserver.py → tests/mockserver.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ class MockServer():

def __enter__(self):
from scrapy.utils.test import get_testenv
self.proc = Popen([sys.executable, '-u', '-m', 'scrapy.tests.mockserver'],
self.proc = Popen([sys.executable, '-u', '-m', 'tests.mockserver'],
stdout=PIPE, env=get_testenv())
self.proc.stdout.readline()

Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from twisted.internet import defer
from twisted.trial.unittest import TestCase
from scrapy.utils.test import docrawl
from scrapy.tests.spiders import FollowAllSpider, ItemSpider, ErrorSpider
from scrapy.tests.mockserver import MockServer
from tests.spiders import FollowAllSpider, ItemSpider, ErrorSpider
from tests.mockserver import MockServer


class TestCloseSpider(TestCase):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ class CmdlineTest(unittest.TestCase):

def setUp(self):
self.env = get_testenv()
self.env['SCRAPY_SETTINGS_MODULE'] = 'scrapy.tests.test_cmdline.settings'
self.env['SCRAPY_SETTINGS_MODULE'] = 'tests.test_cmdline.settings'

def _execute(self, *new_args, **kwargs):
args = (sys.executable, '-m', 'scrapy.cmdline') + new_args
Expand Down
File renamed without changes.
5 changes: 5 additions & 0 deletions tests/test_cmdline/settings.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
EXTENSIONS = [
'tests.test_cmdline.extensions.TestExtension'
]

TEST1 = 'default'
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from scrapy.contrib.linkextractors.htmlparser import HtmlParserLinkExtractor
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor, BaseSgmlLinkExtractor
from scrapy.contrib.linkextractors.lxmlhtml import LxmlLinkExtractor
from scrapy.tests import get_testdata
from tests import get_testdata


class LinkExtractorTestCase(unittest.TestCase):
Expand Down
File renamed without changes.
File renamed without changes.
4 changes: 2 additions & 2 deletions scrapy/tests/test_crawl.py → tests/test_crawl.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
from twisted.internet import defer
from twisted.trial.unittest import TestCase
from scrapy.utils.test import docrawl, get_testlog
from scrapy.tests.spiders import FollowAllSpider, DelaySpider, SimpleSpider, \
from tests.spiders import FollowAllSpider, DelaySpider, SimpleSpider, \
BrokenStartRequestsSpider, SingleRequestSpider, DuplicateStartRequestsSpider
from scrapy.tests.mockserver import MockServer
from tests.mockserver import MockServer
from scrapy.http import Request


Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from scrapy.contrib.djangoitem import DjangoItem, Field
from scrapy import optional_features

os.environ['DJANGO_SETTINGS_MODULE'] = 'scrapy.tests.test_djangoitem.settings'
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.test_djangoitem.settings'

if 'django' in optional_features:
from .models import Person, IdentifiedPerson
Expand Down
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,13 @@ def __init__(self, crawler):
class LoadTestCase(unittest.TestCase):

def test_enabled_handler(self):
handlers = {'scheme': 'scrapy.tests.test_downloader_handlers.DummyDH'}
handlers = {'scheme': 'tests.test_downloader_handlers.DummyDH'}
dh = DownloadHandlers(get_crawler({'DOWNLOAD_HANDLERS': handlers}))
self.assertIn('scheme', dh._handlers)
self.assertNotIn('scheme', dh._notconfigured)

def test_not_configured_handler(self):
handlers = {'scheme': 'scrapy.tests.test_downloader_handlers.OffDH'}
handlers = {'scheme': 'tests.test_downloader_handlers.OffDH'}
dh = DownloadHandlers(get_crawler({'DOWNLOAD_HANDLERS': handlers}))
self.assertNotIn('scheme', dh._handlers)
self.assertIn('scheme', dh._notconfigured)
Expand Down
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from scrapy.http import Response, XmlResponse
from scrapy.contrib_exp.downloadermiddleware.decompression import DecompressionMiddleware
from scrapy.spider import Spider
from scrapy.tests import get_testdata
from tests import get_testdata
from scrapy.utils.test import assert_samelines


Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ class DbmStorageTest(DefaultStorageTest):

class DbmStorageWithCustomDbmModuleTest(DbmStorageTest):

dbm_module = 'scrapy.tests.mocks.dummydbm'
dbm_module = 'tests.mocks.dummydbm'

def _get_settings(self, **new_settings):
new_settings.setdefault('HTTPCACHE_DBM_MODULE', self.dbm_module)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from scrapy.spider import Spider
from scrapy.http import Response, Request, HtmlResponse
from scrapy.contrib.downloadermiddleware.httpcompression import HttpCompressionMiddleware
from scrapy.tests import tests_datadir
from tests import tests_datadir
from w3lib.encoding import resolve_encoding


Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
2 changes: 1 addition & 1 deletion scrapy/tests/test_engine.py → tests/test_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from scrapy import signals
from scrapy.utils.test import get_crawler
from scrapy.xlib.pydispatch import dispatcher
from scrapy.tests import tests_datadir
from tests import tests_datadir
from scrapy.spider import Spider
from scrapy.item import Item, Field
from scrapy.contrib.linkextractors import LinkExtractor
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ class TestMiddlewareManager(MiddlewareManager):

@classmethod
def _get_mwlist_from_settings(cls, settings):
return ['scrapy.tests.test_middleware.%s' % x for x in ['M1', 'MOff', 'M3']]
return ['tests.test_middleware.%s' % x for x in ['M1', 'MOff', 'M3']]

def _add_middleware(self, mw):
super(TestMiddlewareManager, self)._add_middleware(mw)
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
Loading

0 comments on commit 8d1f267

Please sign in to comment.