Skip to content

Commit

Permalink
Merge pull request #334 from Digenis/1.2
Browse files Browse the repository at this point in the history
1.2.1
  • Loading branch information
Digenis authored Jun 17, 2019
2 parents 891b50c + 5a9c9eb commit b4e7d5b
Show file tree
Hide file tree
Showing 11 changed files with 29 additions and 15 deletions.
2 changes: 0 additions & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@ matrix:
env: TOXENV=py27
- python: 2.7
env: TOXENV=pypy
- python: 3.3
env: TOXENV=py33
- python: 3.4
env: TOXENV=py34
- python: 3.5
Expand Down
11 changes: 11 additions & 0 deletions docs/news.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,17 @@
Release notes
=============

1.2.1
-----
*Release date: 2019-06-17*

fixed
~~~~~
- http header types were breaking newer twisted versions
- DeferredQueue was hiding a pending job when reaching max_proc
- AddVersion's arguments' string types were breaking the environment in windows
- Tests: Updated binary eggs to be scrapy-1.x compatible

1.2.0
-----
*Release date: 2017-04-12*
Expand Down
2 changes: 1 addition & 1 deletion scrapyd/VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.2.0
1.2.1
14 changes: 9 additions & 5 deletions scrapyd/config.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import glob
from io import StringIO
import io
from pkgutil import get_data
from six.moves.configparser import SafeConfigParser, NoSectionError, NoOptionError
from os.path import expanduser
Expand All @@ -17,10 +17,14 @@ def __init__(self, values=None, extra_sources=()):
sources = self._getsources()
default_config = get_data(__package__, 'default_scrapyd.conf').decode('utf8')
self.cp = SafeConfigParser()
self.cp.readfp(StringIO(default_config))
self.cp.read(sources)
for fp in extra_sources:
self.cp.readfp(fp)
self.cp.readfp(io.StringIO(default_config))
sources.extend(extra_sources)
for fname in sources:
try:
with io.open(fname) as fp:
self.cp.readfp(fp)
except (IOError, OSError):
pass
else:
self.cp = SafeConfigParser(values)
self.cp.add_section(self.SECTION)
Expand Down
4 changes: 2 additions & 2 deletions scrapyd/poller.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ class QueuePoller(object):
def __init__(self, config):
self.config = config
self.update_projects()
self.dq = DeferredQueue(size=1)
self.dq = DeferredQueue()

@inlineCallbacks
def poll(self):
if self.dq.pending:
if not self.dq.waiting:
return
for p, q in iteritems(self.queues):
c = yield maybeDeferred(q.count)
Expand Down
Binary file modified scrapyd/tests/mybot.egg
Binary file not shown.
Binary file modified scrapyd/tests/mybot2.egg
Binary file not shown.
Binary file modified scrapyd/tests/mybotunicode.egg
Binary file not shown.
2 changes: 1 addition & 1 deletion scrapyd/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def render_object(self, obj, txrequest):
txrequest.setHeader('Access-Control-Allow-Origin', '*')
txrequest.setHeader('Access-Control-Allow-Methods', 'GET, POST, PATCH, PUT, DELETE')
txrequest.setHeader('Access-Control-Allow-Headers',' X-Requested-With')
txrequest.setHeader('Content-Length', len(r))
txrequest.setHeader('Content-Length', str(len(r)))
return r

class UtilsCache:
Expand Down
7 changes: 4 additions & 3 deletions scrapyd/webservice.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,10 @@ def render_POST(self, txrequest):
class AddVersion(WsResource):

def render_POST(self, txrequest):
project = txrequest.args[b'project'][0].decode('utf-8')
version = txrequest.args[b'version'][0].decode('utf-8')
eggf = BytesIO(txrequest.args[b'egg'][0])
eggf = BytesIO(txrequest.args.pop(b'egg')[0])
args = native_stringify_dict(copy(txrequest.args), keys_only=False)
project = args['project'][0]
version = args['version'][0]
self.root.eggstorage.put(eggf, project, version)
spiders = get_spider_list(project, version=version)
self.root.update_projects()
Expand Down
2 changes: 1 addition & 1 deletion scrapyd/website.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,6 @@ def render(self, txrequest):
s += "</html>"

txrequest.setHeader('Content-Type', 'text/html; charset=utf-8')
txrequest.setHeader('Content-Length', len(s))
txrequest.setHeader('Content-Length', str(len(s)))

return s.encode('utf-8')

0 comments on commit b4e7d5b

Please sign in to comment.