diff --git a/.travis.yml b/.travis.yml
index 2907e30e3..18bb51e75 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,9 +2,7 @@ language: python
sudo: false
-cache:
- directories:
- - $HOME/.pip-cache/
+cache: pip
python:
- '2.7'
@@ -12,20 +10,7 @@ python:
- '3.5'
install:
- - |
- if [ "$TRAVIS_PYTHON_VERSION" = "pypy" ]; then
- export PYENV_ROOT="$HOME/.pyenv"
- if [ -f "$PYENV_ROOT/bin/pyenv" ]; then
- pushd "$PYENV_ROOT" && git pull && popd
- else
- rm -rf "$PYENV_ROOT" && git clone --depth 1 https://github.com/yyuu/pyenv.git "$PYENV_ROOT"
- fi
- export PYPY_VERSION="5.0.1"
- "$PYENV_ROOT/bin/pyenv" install --skip-existing "pypy-$PYPY_VERSION"
- virtualenv --python="$PYENV_ROOT/versions/pypy-$PYPY_VERSION/bin/python" "$HOME/virtualenvs/pypy-$PYPY_VERSION"
- source "$HOME/virtualenvs/pypy-$PYPY_VERSION/bin/activate"
- fi
- - if [[ $TRAVIS_PYTHON_VERSION == '3.5' ]]; then pip install pycrypto pg8000; fi;
+ - if [[ $TRAVIS_PYTHON_VERSION == '3.5' ]]; then pip install pycrypto; fi;
- if [[ $TRAVIS_PYTHON_VERSION != '3.5' ]]; then pip install -e .; fi;
before_script:
diff --git a/gluon/contrib/pg8000/__init__.py b/gluon/contrib/pg8000/__init__.py
index c30a8cb6a..4a997174c 100644
--- a/gluon/contrib/pg8000/__init__.py
+++ b/gluon/contrib/pg8000/__init__.py
@@ -1,3 +1,14 @@
+from .core import (
+ Warning, Bytea, DataError, DatabaseError, InterfaceError, ProgrammingError,
+ Error, OperationalError, IntegrityError, InternalError, NotSupportedError,
+ ArrayContentNotHomogenousError, ArrayContentEmptyError,
+ ArrayDimensionsNotConsistentError, ArrayContentNotSupportedError, utc,
+ Connection, Cursor, Binary, Date, DateFromTicks, Time, TimeFromTicks,
+ Timestamp, TimestampFromTicks, BINARY, Interval)
+from ._version import get_versions
+__version__ = get_versions()['version']
+del get_versions
+
# Copyright (c) 2007-2009, Mathieu Fenniak
# All rights reserved.
#
@@ -28,258 +39,9 @@
__author__ = "Mathieu Fenniak"
-exec("from struct import Struct")
-for fmt in (
- "i", "h", "q", "d", "f", "iii", "ii", "qii", "dii", "ihihih", "ci",
- "bh", "cccc"):
- exec(fmt + "_struct = Struct('!" + fmt + "')")
- exec(fmt + "_unpack = " + fmt + "_struct.unpack_from")
- exec(fmt + "_pack = " + fmt + "_struct.pack")
-
-import datetime
-import time
-from .six import binary_type, integer_types, PY2
-
-min_int2, max_int2 = -2 ** 15, 2 ** 15
-min_int4, max_int4 = -2 ** 31, 2 ** 31
-min_int8, max_int8 = -2 ** 63, 2 ** 63
-
-
-class Warning(Exception):
- """Generic exception raised for important database warnings like data
- truncations. This exception is not currently used by pg8000.
-
- This exception is part of the `DBAPI 2.0 specification
- `_.
- """
- pass
-
-
-class Error(Exception):
- """Generic exception that is the base exception of all other error
- exceptions.
-
- This exception is part of the `DBAPI 2.0 specification
- `_.
- """
- pass
-
-
-class InterfaceError(Error):
- """Generic exception raised for errors that are related to the database
- interface rather than the database itself. For example, if the interface
- attempts to use an SSL connection but the server refuses, an InterfaceError
- will be raised.
-
- This exception is part of the `DBAPI 2.0 specification
- `_.
- """
- pass
-
-
-class DatabaseError(Error):
- """Generic exception raised for errors that are related to the database.
- This exception is currently never raised by pg8000.
-
- This exception is part of the `DBAPI 2.0 specification
- `_.
- """
- pass
-
-
-class DataError(DatabaseError):
- """Generic exception raised for errors that are due to problems with the
- processed data. This exception is not currently raised by pg8000.
-
- This exception is part of the `DBAPI 2.0 specification
- `_.
- """
- pass
-
-
-class OperationalError(DatabaseError):
- """
- Generic exception raised for errors that are related to the database's
- operation and not necessarily under the control of the programmer. This
- exception is currently never raised by pg8000.
-
- This exception is part of the `DBAPI 2.0 specification
- `_.
- """
- pass
-
-
-class IntegrityError(DatabaseError):
- """
- Generic exception raised when the relational integrity of the database is
- affected. This exception is not currently raised by pg8000.
-
- This exception is part of the `DBAPI 2.0 specification
- `_.
- """
- pass
-
-
-class InternalError(DatabaseError):
- """Generic exception raised when the database encounters an internal error.
- This is currently only raised when unexpected state occurs in the pg8000
- interface itself, and is typically the result of a interface bug.
-
- This exception is part of the `DBAPI 2.0 specification
- `_.
- """
- pass
-
-
-class ProgrammingError(DatabaseError):
- """Generic exception raised for programming errors. For example, this
- exception is raised if more parameter fields are in a query string than
- there are available parameters.
-
- This exception is part of the `DBAPI 2.0 specification
- `_.
- """
- pass
-
-
-class NotSupportedError(DatabaseError):
- """Generic exception raised in case a method or database API was used which
- is not supported by the database.
-
- This exception is part of the `DBAPI 2.0 specification
- `_.
- """
- pass
-
-
-class ArrayContentNotSupportedError(NotSupportedError):
- """
- Raised when attempting to transmit an array where the base type is not
- supported for binary data transfer by the interface.
- """
- pass
-
-
-class ArrayContentNotHomogenousError(ProgrammingError):
- """
- Raised when attempting to transmit an array that doesn't contain only a
- single type of object.
- """
- pass
-
-
-class ArrayContentEmptyError(ProgrammingError):
- """Raised when attempting to transmit an empty array. The type oid of an
- empty array cannot be determined, and so sending them is not permitted.
- """
- pass
-
-
-class ArrayDimensionsNotConsistentError(ProgrammingError):
- """
- Raised when attempting to transmit an array that has inconsistent
- multi-dimension sizes.
- """
- pass
-
-
-class Bytea(binary_type):
- """Bytea is a str-derived class that is mapped to a PostgreSQL byte array.
- This class is only used in Python 2, the built-in ``bytes`` type is used in
- Python 3.
- """
- pass
-
-
-class Interval(object):
- """An Interval represents a measurement of time. In PostgreSQL, an interval
- is defined in the measure of months, days, and microseconds; as such, the
- pg8000 interval type represents the same information.
-
- Note that values of the :attr:`microseconds`, :attr:`days` and
- :attr:`months` properties are independently measured and cannot be
- converted to each other. A month may be 28, 29, 30, or 31 days, and a day
- may occasionally be lengthened slightly by a leap second.
-
- .. attribute:: microseconds
-
- Measure of microseconds in the interval.
-
- The microseconds value is constrained to fit into a signed 64-bit
- integer. Any attempt to set a value too large or too small will result
- in an OverflowError being raised.
-
- .. attribute:: days
-
- Measure of days in the interval.
-
- The days value is constrained to fit into a signed 32-bit integer.
- Any attempt to set a value too large or too small will result in an
- OverflowError being raised.
-
- .. attribute:: months
-
- Measure of months in the interval.
-
- The months value is constrained to fit into a signed 32-bit integer.
- Any attempt to set a value too large or too small will result in an
- OverflowError being raised.
- """
-
- def __init__(self, microseconds=0, days=0, months=0):
- self.microseconds = microseconds
- self.days = days
- self.months = months
-
- def _setMicroseconds(self, value):
- if not isinstance(value, integer_types):
- raise TypeError("microseconds must be an integer type")
- elif not (min_int8 < value < max_int8):
- raise OverflowError(
- "microseconds must be representable as a 64-bit integer")
- else:
- self._microseconds = value
-
- def _setDays(self, value):
- if not isinstance(value, integer_types):
- raise TypeError("days must be an integer type")
- elif not (min_int4 < value < max_int4):
- raise OverflowError(
- "days must be representable as a 32-bit integer")
- else:
- self._days = value
-
- def _setMonths(self, value):
- if not isinstance(value, integer_types):
- raise TypeError("months must be an integer type")
- elif not (min_int4 < value < max_int4):
- raise OverflowError(
- "months must be representable as a 32-bit integer")
- else:
- self._months = value
-
- microseconds = property(lambda self: self._microseconds, _setMicroseconds)
- days = property(lambda self: self._days, _setDays)
- months = property(lambda self: self._months, _setMonths)
-
- def __repr__(self):
- return "" % (
- self.months, self.days, self.microseconds)
-
- def __eq__(self, other):
- return other is not None and isinstance(other, Interval) and \
- self.months == other.months and self.days == other.days and \
- self.microseconds == other.microseconds
-
- def __neq__(self, other):
- return not self.__eq__(other)
-
-from .core import Connection
-
-
def connect(
user=None, host='localhost', unix_sock=None, port=5432, database=None,
- password=None, ssl=False, **kwargs):
+ password=None, ssl=False, timeout=None, **kwargs):
"""Creates a connection to a PostgreSQL database.
This function is part of the `DBAPI 2.0 specification
@@ -287,9 +49,7 @@ def connect(
function are not defined by the specification.
:param user:
- The username to connect to the PostgreSQL server with. If this is not
- provided, pg8000 looks first for the PGUSER then the USER environment
- variables.
+ The username to connect to the PostgreSQL server with.
If your server character encoding is not ``ascii`` or ``utf8``, then
you need to provide ``user`` as bytes, eg.
@@ -325,15 +85,24 @@ def connect(
authentication, the connection will fail to open. If this parameter
is provided but not requested by the server, no error will occur.
+ If your server character encoding is not ``ascii`` or ``utf8``, then
+ you need to provide ``user`` as bytes, eg.
+ ``"my_password".encode('EUC-JP')``.
+
:keyword ssl:
Use SSL encryption for TCP/IP sockets if ``True``. Defaults to
``False``.
+ :keyword timeout:
+ Only used with Python 3, this is the time in seconds before the
+ connection to the database will time out. The default is ``None`` which
+ means no timeout.
+
:rtype:
A :class:`Connection` object.
"""
return Connection(
- user, host, unix_sock, port, database, password, ssl)
+ user, host, unix_sock, port, database, password, ssl, timeout)
apilevel = "2.0"
"""The DBAPI level supported, currently "2.0".
@@ -382,10 +151,6 @@ def connect(
STRING = 1043
"""String type oid."""
-if PY2:
- BINARY = Bytea
-else:
- BINARY = bytes
NUMBER = 1700
"""Numeric type oid"""
@@ -396,104 +161,15 @@ def connect(
ROWID = 26
"""ROWID type oid"""
-
-def Date(year, month, day):
- """Constuct an object holding a date value.
-
- This function is part of the `DBAPI 2.0 specification
- `_.
-
- :rtype: :class:`datetime.date`
- """
- return datetime.date(year, month, day)
-
-
-def Time(hour, minute, second):
- """Construct an object holding a time value.
-
- This function is part of the `DBAPI 2.0 specification
- `_.
-
- :rtype: :class:`datetime.time`
- """
- return datetime.time(hour, minute, second)
-
-
-def Timestamp(year, month, day, hour, minute, second):
- """Construct an object holding a timestamp value.
-
- This function is part of the `DBAPI 2.0 specification
- `_.
-
- :rtype: :class:`datetime.datetime`
- """
- return datetime.datetime(year, month, day, hour, minute, second)
-
-
-def DateFromTicks(ticks):
- """Construct an object holding a date value from the given ticks value
- (number of seconds since the epoch).
-
- This function is part of the `DBAPI 2.0 specification
- `_.
-
- :rtype: :class:`datetime.date`
- """
- return Date(*time.localtime(ticks)[:3])
-
-
-def TimeFromTicks(ticks):
- """Construct an objet holding a time value from the given ticks value
- (number of seconds since the epoch).
-
- This function is part of the `DBAPI 2.0 specification
- `_.
-
- :rtype: :class:`datetime.time`
- """
- return Time(*time.localtime(ticks)[3:6])
-
-
-def TimestampFromTicks(ticks):
- """Construct an object holding a timestamp value from the given ticks value
- (number of seconds since the epoch).
-
- This function is part of the `DBAPI 2.0 specification
- `_.
-
- :rtype: :class:`datetime.datetime`
- """
- return Timestamp(*time.localtime(ticks)[:6])
-
-
-def Binary(value):
- """Construct an object holding binary data.
-
- This function is part of the `DBAPI 2.0 specification
- `_.
-
- :rtype: :class:`pg8000.types.Bytea` for Python 2, otherwise :class:`bytes`
- """
- if PY2:
- return Bytea(value)
- else:
- return value
-
-
-from .core import utc, Cursor
-
__all__ = [
Warning, Bytea, DataError, DatabaseError, connect, InterfaceError,
ProgrammingError, Error, OperationalError, IntegrityError, InternalError,
NotSupportedError, ArrayContentNotHomogenousError, ArrayContentEmptyError,
ArrayDimensionsNotConsistentError, ArrayContentNotSupportedError, utc,
- Connection, Cursor]
+ Connection, Cursor, Binary, Date, DateFromTicks, Time, TimeFromTicks,
+ Timestamp, TimestampFromTicks, BINARY, Interval]
"""Version string for pg8000.
.. versionadded:: 1.9.11
"""
-
-from ._version import get_versions
-__version__ = get_versions()['version']
-del get_versions
diff --git a/gluon/contrib/pg8000/_version.py b/gluon/contrib/pg8000/_version.py
index 982cf246c..5677d1e06 100644
--- a/gluon/contrib/pg8000/_version.py
+++ b/gluon/contrib/pg8000/_version.py
@@ -6,22 +6,58 @@
# that just contains the computed version number.
# This file is released into the public domain. Generated by
-# versioneer-0.12 (https://github.com/warner/python-versioneer)
-
-# these strings will be replaced by git during git-archive
-git_refnames = "$Format:%d$"
-git_full = "$Format:%H$"
-
-# these strings are filled in when 'setup.py versioneer' creates _version.py
-tag_prefix = ""
-parentdir_prefix = "pg8000-"
-versionfile_source = "pg8000/_version.py"
+# versioneer-0.15 (https://github.com/warner/python-versioneer)
+import errno
import os
-import sys
import re
import subprocess
-import errno
+import sys
+
+
+def get_keywords():
+ # these strings will be replaced by git during git-archive.
+ # setup.py/versioneer.py will grep for the variable names, so they must
+ # each be defined on a line of their own. _version.py will just call
+ # get_keywords().
+ git_refnames = " (tag: 1.10.6)"
+ git_full = "4098abf6be90683ab10b7b080983ed6f08476485"
+ keywords = {"refnames": git_refnames, "full": git_full}
+ return keywords
+
+
+class VersioneerConfig:
+ pass
+
+
+def get_config():
+ # these strings are filled in when 'setup.py versioneer' creates
+ # _version.py
+ cfg = VersioneerConfig()
+ cfg.VCS = "git"
+ cfg.style = "pep440"
+ cfg.tag_prefix = ""
+ cfg.parentdir_prefix = "pg8000-"
+ cfg.versionfile_source = "pg8000/_version.py"
+ cfg.verbose = False
+ return cfg
+
+
+class NotThisMethod(Exception):
+ pass
+
+
+LONG_VERSION_PY = {}
+HANDLERS = {}
+
+
+def register_vcs_handler(vcs, method): # decorator
+ def decorate(f):
+ if vcs not in HANDLERS:
+ HANDLERS[vcs] = {}
+ HANDLERS[vcs][method] = f
+ return f
+ return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
@@ -29,6 +65,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
p = None
for c in commands:
try:
+ dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
@@ -39,7 +76,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
if e.errno == errno.ENOENT:
continue
if verbose:
- print("unable to run %s" % args[0])
+ print("unable to run %s" % dispcmd)
print(e)
return None
else:
@@ -47,28 +84,30 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
print("unable to find command, tried %s" % (commands,))
return None
stdout = p.communicate()[0].strip()
- if sys.version >= '3':
+ if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
- print("unable to run %s (error)" % args[0])
+ print("unable to run %s (error)" % dispcmd)
return None
return stdout
-def versions_from_parentdir(parentdir_prefix, root, verbose=False):
+def versions_from_parentdir(parentdir_prefix, root, verbose):
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
- print(
- "guessing rootdir is '%s', but '%s' doesn't start with "
- "prefix '%s'" % (root, dirname, parentdir_prefix))
- return None
- return {"version": dirname[len(parentdir_prefix):], "full": ""}
+ print("guessing rootdir is '%s', but '%s' doesn't start with "
+ "prefix '%s'" % (root, dirname, parentdir_prefix))
+ raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
+ return {"version": dirname[len(parentdir_prefix):],
+ "full-revisionid": None,
+ "dirty": False, "error": None}
+@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
@@ -92,14 +131,15 @@ def git_get_keywords(versionfile_abs):
return keywords
-def git_versions_from_keywords(keywords, tag_prefix, verbose=False):
+@register_vcs_handler("git", "keywords")
+def git_versions_from_keywords(keywords, tag_prefix, verbose):
if not keywords:
- return {} # keyword-finding function failed to find keywords
+ raise NotThisMethod("no keywords at all, weird")
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
- return {} # unexpanded, so not in an unpacked git-archive tarball
+ raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
@@ -124,18 +164,20 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose=False):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
- return {
- "version": r,
- "full": keywords["full"].strip()}
- # no suitable tags, so we use the full revision id
+ return {"version": r,
+ "full-revisionid": keywords["full"].strip(),
+ "dirty": False, "error": None
+ }
+ # no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
- print("no suitable tags, using full revision id")
- return {
- "version": keywords["full"].strip(),
- "full": keywords["full"].strip()}
+ print("no suitable tags, using unknown + full revision id")
+ return {"version": "0+unknown",
+ "full-revisionid": keywords["full"].strip(),
+ "dirty": False, "error": "no suitable tags"}
-def git_versions_from_vcs(tag_prefix, root, verbose=False):
+@register_vcs_handler("git", "pieces_from_vcs")
+def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# this runs 'git' from the root of the source tree. This only gets called
# if the git-archive 'subst' keywords were *not* expanded, and
# _version.py hasn't already been rewritten with a short version string,
@@ -144,52 +186,275 @@ def git_versions_from_vcs(tag_prefix, root, verbose=False):
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
- return {}
+ raise NotThisMethod("no .git directory")
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
- stdout = run_command(GITS, ["describe", "--tags", "--dirty", "--always"],
- cwd=root)
- if stdout is None:
- return {}
- if not stdout.startswith(tag_prefix):
- if verbose:
- print(
- "tag '%s' doesn't start with prefix '%s'" %
- (stdout, tag_prefix))
- return {}
- tag = stdout[len(tag_prefix):]
- stdout = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
- if stdout is None:
- return {}
- full = stdout.strip()
- if tag.endswith("-dirty"):
- full += "-dirty"
- return {"version": tag, "full": full}
-
-
-def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
+ # if there is a tag, this yields TAG-NUM-gHEX[-dirty]
+ # if there are no tags, this yields HEX[-dirty] (no NUM)
+ describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
+ "--always", "--long"],
+ cwd=root)
+ # --long was added in git-1.5.5
+ if describe_out is None:
+ raise NotThisMethod("'git describe' failed")
+ describe_out = describe_out.strip()
+ full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
+ if full_out is None:
+ raise NotThisMethod("'git rev-parse' failed")
+ full_out = full_out.strip()
+
+ pieces = {}
+ pieces["long"] = full_out
+ pieces["short"] = full_out[:7] # maybe improved later
+ pieces["error"] = None
+
+ # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
+ # TAG might have hyphens.
+ git_describe = describe_out
+
+ # look for -dirty suffix
+ dirty = git_describe.endswith("-dirty")
+ pieces["dirty"] = dirty
+ if dirty:
+ git_describe = git_describe[:git_describe.rindex("-dirty")]
+
+ # now we have TAG-NUM-gHEX or HEX
+
+ if "-" in git_describe:
+ # TAG-NUM-gHEX
+ mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
+ if not mo:
+ # unparseable. Maybe git-describe is misbehaving?
+ pieces["error"] = ("unable to parse git-describe output: '%s'"
+ % describe_out)
+ return pieces
+
+ # tag
+ full_tag = mo.group(1)
+ if not full_tag.startswith(tag_prefix):
+ if verbose:
+ fmt = "tag '%s' doesn't start with prefix '%s'"
+ print(fmt % (full_tag, tag_prefix))
+ pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
+ % (full_tag, tag_prefix))
+ return pieces
+ pieces["closest-tag"] = full_tag[len(tag_prefix):]
+
+ # distance: number of commits since tag
+ pieces["distance"] = int(mo.group(2))
+
+ # commit: short hex revision ID
+ pieces["short"] = mo.group(3)
+
+ else:
+ # HEX: no tags
+ pieces["closest-tag"] = None
+ count_out = run_command(GITS, ["rev-list", "HEAD", "--count"],
+ cwd=root)
+ pieces["distance"] = int(count_out) # total number of commits
+
+ return pieces
+
+
+def plus_or_dot(pieces):
+ if "+" in pieces.get("closest-tag", ""):
+ return "."
+ return "+"
+
+
+def render_pep440(pieces):
+ # now build up version string, with post-release "local version
+ # identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
+ # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
+
+ # exceptions:
+ # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
+
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += plus_or_dot(pieces)
+ rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0+untagged.%d.g%s" % (pieces["distance"],
+ pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
+def render_pep440_pre(pieces):
+ # TAG[.post.devDISTANCE] . No -dirty
+
+ # exceptions:
+ # 1: no tags. 0.post.devDISTANCE
+
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"]:
+ rendered += ".post.dev%d" % pieces["distance"]
+ else:
+ # exception #1
+ rendered = "0.post.dev%d" % pieces["distance"]
+ return rendered
+
+
+def render_pep440_post(pieces):
+ # TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that
+ # .dev0 sorts backwards (a dirty tree will appear "older" than the
+ # corresponding clean one), but you shouldn't be releasing software with
+ # -dirty anyways.
+
+ # exceptions:
+ # 1: no tags. 0.postDISTANCE[.dev0]
+
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%d" % pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "g%s" % pieces["short"]
+ else:
+ # exception #1
+ rendered = "0.post%d" % pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ rendered += "+g%s" % pieces["short"]
+ return rendered
+
+
+def render_pep440_old(pieces):
+ # TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty.
+
+ # exceptions:
+ # 1: no tags. 0.postDISTANCE[.dev0]
+
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%d" % pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ else:
+ # exception #1
+ rendered = "0.post%d" % pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ return rendered
+
+
+def render_git_describe(pieces):
+ # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty
+ # --always'
+
+ # exceptions:
+ # 1: no tags. HEX[-dirty] (note: no 'g' prefix)
+
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"]:
+ rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
+ else:
+ # exception #1
+ rendered = pieces["short"]
+ if pieces["dirty"]:
+ rendered += "-dirty"
+ return rendered
+
+
+def render_git_describe_long(pieces):
+ # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty
+ # --always -long'. The distance/hash is unconditional.
+
+ # exceptions:
+ # 1: no tags. HEX[-dirty] (note: no 'g' prefix)
+
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
+ else:
+ # exception #1
+ rendered = pieces["short"]
+ if pieces["dirty"]:
+ rendered += "-dirty"
+ return rendered
+
+
+def render(pieces, style):
+ if pieces["error"]:
+ return {"version": "unknown",
+ "full-revisionid": pieces.get("long"),
+ "dirty": None,
+ "error": pieces["error"]}
+
+ if not style or style == "default":
+ style = "pep440" # the default
+
+ if style == "pep440":
+ rendered = render_pep440(pieces)
+ elif style == "pep440-pre":
+ rendered = render_pep440_pre(pieces)
+ elif style == "pep440-post":
+ rendered = render_pep440_post(pieces)
+ elif style == "pep440-old":
+ rendered = render_pep440_old(pieces)
+ elif style == "git-describe":
+ rendered = render_git_describe(pieces)
+ elif style == "git-describe-long":
+ rendered = render_git_describe_long(pieces)
+ else:
+ raise ValueError("unknown style '%s'" % style)
+
+ return {"version": rendered, "full-revisionid": pieces["long"],
+ "dirty": pieces["dirty"], "error": None}
+
+
+def get_versions():
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
- keywords = {"refnames": git_refnames, "full": git_full}
- ver = git_versions_from_keywords(keywords, tag_prefix, verbose)
- if ver:
- return ver
+ cfg = get_config()
+ verbose = cfg.verbose
try:
- root = os.path.abspath(__file__)
+ return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
+ verbose)
+ except NotThisMethod:
+ pass
+
+ try:
+ root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
- for i in range(len(versionfile_source.split(os.sep))):
+ for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
- return default
+ return {"version": "0+unknown", "full-revisionid": None,
+ "dirty": None,
+ "error": "unable to find root of source tree"}
+
+ try:
+ pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
+ return render(pieces, cfg.style)
+ except NotThisMethod:
+ pass
+
+ try:
+ if cfg.parentdir_prefix:
+ return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
+ except NotThisMethod:
+ pass
- return (git_versions_from_vcs(tag_prefix, root, verbose)
- or versions_from_parentdir(parentdir_prefix, root, verbose)
- or default)
+ return {"version": "0+unknown", "full-revisionid": None,
+ "dirty": None,
+ "error": "unable to compute version"}
diff --git a/gluon/contrib/pg8000/core.py b/gluon/contrib/pg8000/core.py
index f19481bd8..29f88d9aa 100644
--- a/gluon/contrib/pg8000/core.py
+++ b/gluon/contrib/pg8000/core.py
@@ -1,3 +1,23 @@
+import datetime
+from datetime import timedelta
+from warnings import warn
+import socket
+import threading
+from struct import pack
+from hashlib import md5
+from decimal import Decimal
+from collections import deque, defaultdict
+from itertools import count, islice
+from .six.moves import map
+from .six import b, PY2, integer_types, next, text_type, u, binary_type
+from uuid import UUID
+from copy import deepcopy
+from calendar import timegm
+from distutils.version import LooseVersion
+from struct import Struct
+import time
+
+
# Copyright (c) 2007-2009, Mathieu Fenniak
# All rights reserved.
#
@@ -27,34 +47,6 @@
__author__ = "Mathieu Fenniak"
-import datetime
-from datetime import timedelta
-from . import (
- Interval, min_int2, max_int2, min_int4, max_int4, min_int8, max_int8,
- Bytea, NotSupportedError, ProgrammingError, InternalError, IntegrityError,
- OperationalError, DatabaseError, InterfaceError, Error,
- ArrayContentNotHomogenousError, ArrayContentEmptyError,
- ArrayDimensionsNotConsistentError, ArrayContentNotSupportedError, Warning,
- i_unpack, ii_unpack, iii_unpack, h_pack, d_unpack, q_unpack, d_pack,
- f_unpack, q_pack, i_pack, h_unpack, dii_unpack, qii_unpack, ci_unpack,
- bh_unpack, ihihih_unpack, cccc_unpack, ii_pack, iii_pack, dii_pack,
- qii_pack)
-from warnings import warn
-import socket
-import threading
-from struct import pack
-from hashlib import md5
-from decimal import Decimal
-from collections import deque, defaultdict
-from itertools import count, islice
-from .six.moves import map
-from .six import b, PY2, integer_types, next, PRE_26, text_type, u
-from sys import exc_info
-from uuid import UUID
-from copy import deepcopy
-from calendar import timegm
-import os
-from distutils.version import LooseVersion
try:
from json import loads
@@ -78,10 +70,351 @@ def dst(self, dt):
utc = UTC()
-if PRE_26:
- bytearray = list
+
+class Interval(object):
+ """An Interval represents a measurement of time. In PostgreSQL, an
+ interval is defined in the measure of months, days, and microseconds; as
+ such, the pg8000 interval type represents the same information.
+
+ Note that values of the :attr:`microseconds`, :attr:`days` and
+ :attr:`months` properties are independently measured and cannot be
+ converted to each other. A month may be 28, 29, 30, or 31 days, and a day
+ may occasionally be lengthened slightly by a leap second.
+
+ .. attribute:: microseconds
+
+ Measure of microseconds in the interval.
+
+ The microseconds value is constrained to fit into a signed 64-bit
+ integer. Any attempt to set a value too large or too small will result
+ in an OverflowError being raised.
+
+ .. attribute:: days
+
+ Measure of days in the interval.
+
+ The days value is constrained to fit into a signed 32-bit integer.
+ Any attempt to set a value too large or too small will result in an
+ OverflowError being raised.
+
+ .. attribute:: months
+
+ Measure of months in the interval.
+
+ The months value is constrained to fit into a signed 32-bit integer.
+ Any attempt to set a value too large or too small will result in an
+ OverflowError being raised.
+ """
+
+ def __init__(self, microseconds=0, days=0, months=0):
+ self.microseconds = microseconds
+ self.days = days
+ self.months = months
+
+ def _setMicroseconds(self, value):
+ if not isinstance(value, integer_types):
+ raise TypeError("microseconds must be an integer type")
+ elif not (min_int8 < value < max_int8):
+ raise OverflowError(
+ "microseconds must be representable as a 64-bit integer")
+ else:
+ self._microseconds = value
+
+ def _setDays(self, value):
+ if not isinstance(value, integer_types):
+ raise TypeError("days must be an integer type")
+ elif not (min_int4 < value < max_int4):
+ raise OverflowError(
+ "days must be representable as a 32-bit integer")
+ else:
+ self._days = value
+
+ def _setMonths(self, value):
+ if not isinstance(value, integer_types):
+ raise TypeError("months must be an integer type")
+ elif not (min_int4 < value < max_int4):
+ raise OverflowError(
+ "months must be representable as a 32-bit integer")
+ else:
+ self._months = value
+
+ microseconds = property(lambda self: self._microseconds, _setMicroseconds)
+ days = property(lambda self: self._days, _setDays)
+ months = property(lambda self: self._months, _setMonths)
+
+ def __repr__(self):
+ return "" % (
+ self.months, self.days, self.microseconds)
+
+ def __eq__(self, other):
+ return other is not None and isinstance(other, Interval) and \
+ self.months == other.months and self.days == other.days and \
+ self.microseconds == other.microseconds
+
+ def __neq__(self, other):
+ return not self.__eq__(other)
+def pack_funcs(fmt):
+ struc = Struct('!' + fmt)
+ return struc.pack, struc.unpack_from
+
+i_pack, i_unpack = pack_funcs('i')
+h_pack, h_unpack = pack_funcs('h')
+q_pack, q_unpack = pack_funcs('q')
+d_pack, d_unpack = pack_funcs('d')
+f_pack, f_unpack = pack_funcs('f')
+iii_pack, iii_unpack = pack_funcs('iii')
+ii_pack, ii_unpack = pack_funcs('ii')
+qii_pack, qii_unpack = pack_funcs('qii')
+dii_pack, dii_unpack = pack_funcs('dii')
+ihihih_pack, ihihih_unpack = pack_funcs('ihihih')
+ci_pack, ci_unpack = pack_funcs('ci')
+bh_pack, bh_unpack = pack_funcs('bh')
+cccc_pack, cccc_unpack = pack_funcs('cccc')
+
+
+Struct('!i')
+
+
+min_int2, max_int2 = -2 ** 15, 2 ** 15
+min_int4, max_int4 = -2 ** 31, 2 ** 31
+min_int8, max_int8 = -2 ** 63, 2 ** 63
+
+
+class Warning(Exception):
+ """Generic exception raised for important database warnings like data
+ truncations. This exception is not currently used by pg8000.
+
+ This exception is part of the `DBAPI 2.0 specification
+ `_.
+ """
+ pass
+
+
+class Error(Exception):
+ """Generic exception that is the base exception of all other error
+ exceptions.
+
+ This exception is part of the `DBAPI 2.0 specification
+ `_.
+ """
+ pass
+
+
+class InterfaceError(Error):
+ """Generic exception raised for errors that are related to the database
+ interface rather than the database itself. For example, if the interface
+ attempts to use an SSL connection but the server refuses, an InterfaceError
+ will be raised.
+
+ This exception is part of the `DBAPI 2.0 specification
+ `_.
+ """
+ pass
+
+
+class DatabaseError(Error):
+ """Generic exception raised for errors that are related to the database.
+ This exception is currently never raised by pg8000.
+
+ This exception is part of the `DBAPI 2.0 specification
+ `_.
+ """
+ pass
+
+
+class DataError(DatabaseError):
+ """Generic exception raised for errors that are due to problems with the
+ processed data. This exception is not currently raised by pg8000.
+
+ This exception is part of the `DBAPI 2.0 specification
+ `_.
+ """
+ pass
+
+
+class OperationalError(DatabaseError):
+ """
+ Generic exception raised for errors that are related to the database's
+ operation and not necessarily under the control of the programmer. This
+ exception is currently never raised by pg8000.
+
+ This exception is part of the `DBAPI 2.0 specification
+ `_.
+ """
+ pass
+
+
+class IntegrityError(DatabaseError):
+ """
+ Generic exception raised when the relational integrity of the database is
+ affected. This exception is not currently raised by pg8000.
+
+ This exception is part of the `DBAPI 2.0 specification
+ `_.
+ """
+ pass
+
+
+class InternalError(DatabaseError):
+ """Generic exception raised when the database encounters an internal error.
+ This is currently only raised when unexpected state occurs in the pg8000
+ interface itself, and is typically the result of a interface bug.
+
+ This exception is part of the `DBAPI 2.0 specification
+ `_.
+ """
+ pass
+
+
+class ProgrammingError(DatabaseError):
+ """Generic exception raised for programming errors. For example, this
+ exception is raised if more parameter fields are in a query string than
+ there are available parameters.
+
+ This exception is part of the `DBAPI 2.0 specification
+ `_.
+ """
+ pass
+
+
+class NotSupportedError(DatabaseError):
+ """Generic exception raised in case a method or database API was used which
+ is not supported by the database.
+
+ This exception is part of the `DBAPI 2.0 specification
+ `_.
+ """
+ pass
+
+
+class ArrayContentNotSupportedError(NotSupportedError):
+ """
+ Raised when attempting to transmit an array where the base type is not
+ supported for binary data transfer by the interface.
+ """
+ pass
+
+
+class ArrayContentNotHomogenousError(ProgrammingError):
+ """
+ Raised when attempting to transmit an array that doesn't contain only a
+ single type of object.
+ """
+ pass
+
+
+class ArrayContentEmptyError(ProgrammingError):
+ """Raised when attempting to transmit an empty array. The type oid of an
+ empty array cannot be determined, and so sending them is not permitted.
+ """
+ pass
+
+
+class ArrayDimensionsNotConsistentError(ProgrammingError):
+ """
+ Raised when attempting to transmit an array that has inconsistent
+ multi-dimension sizes.
+ """
+ pass
+
+
+class Bytea(binary_type):
+ """Bytea is a str-derived class that is mapped to a PostgreSQL byte array.
+ This class is only used in Python 2, the built-in ``bytes`` type is used in
+ Python 3.
+ """
+ pass
+
+
+def Date(year, month, day):
+ """Constuct an object holding a date value.
+
+ This function is part of the `DBAPI 2.0 specification
+ `_.
+
+ :rtype: :class:`datetime.date`
+ """
+ return datetime.date(year, month, day)
+
+
+def Time(hour, minute, second):
+ """Construct an object holding a time value.
+
+ This function is part of the `DBAPI 2.0 specification
+ `_.
+
+ :rtype: :class:`datetime.time`
+ """
+ return datetime.time(hour, minute, second)
+
+
+def Timestamp(year, month, day, hour, minute, second):
+ """Construct an object holding a timestamp value.
+
+ This function is part of the `DBAPI 2.0 specification
+ `_.
+
+ :rtype: :class:`datetime.datetime`
+ """
+ return datetime.datetime(year, month, day, hour, minute, second)
+
+
+def DateFromTicks(ticks):
+ """Construct an object holding a date value from the given ticks value
+ (number of seconds since the epoch).
+
+ This function is part of the `DBAPI 2.0 specification
+ `_.
+
+ :rtype: :class:`datetime.date`
+ """
+ return Date(*time.localtime(ticks)[:3])
+
+
+def TimeFromTicks(ticks):
+ """Construct an objet holding a time value from the given ticks value
+ (number of seconds since the epoch).
+
+ This function is part of the `DBAPI 2.0 specification
+ `_.
+
+ :rtype: :class:`datetime.time`
+ """
+ return Time(*time.localtime(ticks)[3:6])
+
+
+def TimestampFromTicks(ticks):
+ """Construct an object holding a timestamp value from the given ticks value
+ (number of seconds since the epoch).
+
+ This function is part of the `DBAPI 2.0 specification
+ `_.
+
+ :rtype: :class:`datetime.datetime`
+ """
+ return Timestamp(*time.localtime(ticks)[:6])
+
+
+def Binary(value):
+ """Construct an object holding binary data.
+
+ This function is part of the `DBAPI 2.0 specification
+ `_.
+
+ :rtype: :class:`pg8000.types.Bytea` for Python 2, otherwise :class:`bytes`
+ """
+ if PY2:
+ return Bytea(value)
+ else:
+ return value
+
+if PY2:
+ BINARY = Bytea
+else:
+ BINARY = bytes
+
FC_TEXT = 0
FC_BINARY = 1
@@ -271,13 +604,13 @@ def timestamp_recv_integer(data, offset, length):
micros = q_unpack(data, offset)[0]
try:
return EPOCH + timedelta(microseconds=micros)
- except OverflowError:
+ except OverflowError as e:
if micros == INFINITY_MICROSECONDS:
return datetime.datetime.max
elif micros == MINUS_INFINITY_MICROSECONDS:
return datetime.datetime.min
else:
- raise exc_info()[1]
+ raise e
# data is double-precision float representing seconds since 2000-01-01
@@ -299,7 +632,7 @@ def timestamp_send_integer(v):
# data is double-precision float representing seconds since 2000-01-01
def timestamp_send_float(v):
- return d_pack(timegm(v.timetuple) + v.microsecond / 1e6 - EPOCH_SECONDS)
+ return d_pack(timegm(v.timetuple()) + v.microsecond / 1e6 - EPOCH_SECONDS)
def timestamptz_send_integer(v):
@@ -325,13 +658,13 @@ def timestamptz_recv_integer(data, offset, length):
micros = q_unpack(data, offset)[0]
try:
return EPOCH_TZ + timedelta(microseconds=micros)
- except OverflowError:
+ except OverflowError as e:
if micros == INFINITY_MICROSECONDS:
return DATETIME_MAX_TZ
elif micros == MINUS_INFINITY_MICROSECONDS:
return DATETIME_MIN_TZ
else:
- raise exc_info()[1]
+ raise e
def timestamptz_recv_float(data, offset, length):
@@ -565,21 +898,19 @@ def execute(self, operation, args=None, stream=None):
.. versionadded:: 1.9.11
"""
try:
- self._c._lock.acquire()
- self.stream = stream
+ with self._c._lock:
+ self.stream = stream
- if not self._c.in_transaction and not self._c.autocommit:
- self._c.execute(self, "begin transaction", None)
- self._c.execute(self, operation, args)
- except AttributeError:
+ if not self._c.in_transaction and not self._c.autocommit:
+ self._c.execute(self, "begin transaction", None)
+ self._c.execute(self, operation, args)
+ except AttributeError as e:
if self._c is None:
raise InterfaceError("Cursor closed")
elif self._c._sock is None:
raise InterfaceError("connection is closed")
else:
- raise exc_info()[1]
- finally:
- self._c._lock.release()
+ raise e
def executemany(self, operation, param_sets):
"""Prepare a database operation, and then execute it against all
@@ -690,28 +1021,26 @@ def setoutputsize(self, size, column=None):
pass
def __next__(self):
- try:
- self._c._lock.acquire()
- return self._cached_rows.popleft()
- except IndexError:
- if self.portal_suspended:
- self._c.send_EXECUTE(self)
- self._c._write(SYNC_MSG)
- self._c._flush()
- self._c.handle_messages(self)
- if not self.portal_suspended:
- self._c.close_portal(self)
+ with self._c._lock:
try:
return self._cached_rows.popleft()
except IndexError:
- if self.ps is None:
- raise ProgrammingError("A query hasn't been issued.")
- elif len(self.ps['row_desc']) == 0:
- raise ProgrammingError("no result set")
- else:
- raise StopIteration()
- finally:
- self._c._lock.release()
+ if self.portal_suspended:
+ self._c.send_EXECUTE(self)
+ self._c._write(SYNC_MSG)
+ self._c._flush()
+ self._c.handle_messages(self)
+ if not self.portal_suspended:
+ self._c.close_portal(self)
+ try:
+ return self._cached_rows.popleft()
+ except IndexError:
+ if self.ps is None:
+ raise ProgrammingError("A query hasn't been issued.")
+ elif len(self.ps['row_desc']) == 0:
+ raise ProgrammingError("no result set")
+ else:
+ raise StopIteration()
if PY2:
Cursor.next = Cursor.__next__
@@ -737,6 +1066,7 @@ def __next__(self):
COPY_DATA = b("d")
COPY_IN_RESPONSE = b("G")
COPY_OUT_RESPONSE = b("H")
+EMPTY_QUERY_RESPONSE = b("I")
BIND = b("B")
PARSE = b("P")
@@ -776,14 +1106,6 @@ def __next__(self):
IDLE_IN_FAILED_TRANSACTION = b("E")
-# Byte1('N') - Identifier
-# Int32 - Message length
-# Any number of these, followed by a zero byte:
-# Byte1 - code identifying the field type (see responseKeys)
-# String - field value
-def data_into_dict(data):
- return dict((s[0:1], s[1:]) for s in data.split(NULL_BYTE))
-
arr_trans = dict(zip(map(ord, u("[] 'u")), list(u('{}')) + [None] * 3))
@@ -895,7 +1217,9 @@ def _getError(self, error):
error.__name__, stacklevel=3)
return error
- def __init__(self, user, host, unix_sock, port, database, password, ssl):
+ def __init__(
+ self, user, host, unix_sock, port, database, password, ssl,
+ timeout):
self._client_encoding = "utf8"
self._commands_with_count = (
b("INSERT"), b("DELETE"), b("UPDATE"), b("MOVE"),
@@ -903,23 +1227,19 @@ def __init__(self, user, host, unix_sock, port, database, password, ssl):
self._lock = threading.Lock()
if user is None:
- try:
- self.user = os.environ['PGUSER']
- except KeyError:
- try:
- self.user = os.environ['USER']
- except KeyError:
- raise InterfaceError(
- "The 'user' connection parameter was omitted, and "
- "neither the PGUSER or USER environment variables "
- "were set.")
+ raise InterfaceError(
+ "The 'user' connection parameter cannot be None")
+
+ if isinstance(user, text_type):
+ self.user = user.encode('utf8')
else:
self.user = user
- if isinstance(self.user, text_type):
- self.user = self.user.encode('utf8')
+ if isinstance(password, text_type):
+ self.password = password.encode('utf8')
+ else:
+ self.password = password
- self.password = password
self.autocommit = False
self._xid = None
@@ -939,41 +1259,38 @@ def __init__(self, user, host, unix_sock, port, database, password, ssl):
else:
raise ProgrammingError(
"one of host or unix_sock must be provided")
+ if not PY2 and timeout is not None:
+ self._usock.settimeout(timeout)
+
if unix_sock is None and host is not None:
self._usock.connect((host, port))
elif unix_sock is not None:
self._usock.connect(unix_sock)
if ssl:
- try:
- self._lock.acquire()
- import ssl as sslmodule
- # Int32(8) - Message length, including self.
- # Int32(80877103) - The SSL request code.
- self._usock.sendall(ii_pack(8, 80877103))
- resp = self._usock.recv(1)
- if resp == b('S'):
- self._usock = sslmodule.wrap_socket(self._usock)
- else:
- raise InterfaceError("Server refuses SSL")
- except ImportError:
- raise InterfaceError(
- "SSL required but ssl module not available in "
- "this python installation")
- finally:
- self._lock.release()
+ with self._lock:
+ try:
+ import ssl as sslmodule
+ # Int32(8) - Message length, including self.
+ # Int32(80877103) - The SSL request code.
+ self._usock.sendall(ii_pack(8, 80877103))
+ resp = self._usock.recv(1)
+ if resp == b('S'):
+ self._usock = sslmodule.wrap_socket(self._usock)
+ else:
+ raise InterfaceError("Server refuses SSL")
+ except ImportError:
+ raise InterfaceError(
+ "SSL required but ssl module not available in "
+ "this python installation")
self._sock = self._usock.makefile(mode="rwb")
- except socket.error:
+ except socket.error as e:
self._usock.close()
- raise InterfaceError("communication error", exc_info()[1])
+ raise InterfaceError("communication error", e)
self._flush = self._sock.flush
self._read = self._sock.read
-
- if PRE_26:
- self._write = self._sock.writelines
- else:
- self._write = self._sock.write
+ self._write = self._sock.write
self._backend_key_data = None
##
@@ -1182,7 +1499,6 @@ def numeric_out(d):
bool: (16, FC_BINARY, bool_send),
int: (705, FC_TEXT, unknown_out),
float: (701, FC_BINARY, d_pack), # float8
- str: (705, FC_TEXT, text_out), # unknown
datetime.date: (1082, FC_TEXT, date_out), # date
datetime.time: (1083, FC_TEXT, time_out), # time
1114: (1114, FC_BINARY, timestamp_send_integer), # timestamp
@@ -1203,10 +1519,12 @@ def numeric_out(d):
if PY2:
self.py_types[Bytea] = (17, FC_BINARY, bytea_send) # bytea
self.py_types[text_type] = (705, FC_TEXT, text_out) # unknown
+ self.py_types[str] = (705, FC_TEXT, bytea_send) # unknown
self.py_types[long] = (705, FC_TEXT, unknown_out) # noqa
else:
self.py_types[bytes] = (17, FC_BINARY, bytea_send) # bytea
+ self.py_types[str] = (705, FC_TEXT, text_out) # unknown
try:
from ipaddress import (
@@ -1240,6 +1558,7 @@ def inet_in(data, offset, length):
READY_FOR_QUERY: self.handle_READY_FOR_QUERY,
ROW_DESCRIPTION: self.handle_ROW_DESCRIPTION,
ERROR_RESPONSE: self.handle_ERROR_RESPONSE,
+ EMPTY_QUERY_RESPONSE: self.handle_EMPTY_QUERY_RESPONSE,
DATA_ROW: self.handle_DATA_ROW,
COMMAND_COMPLETE: self.handle_COMMAND_COMPLETE,
PARSE_COMPLETE: self.handle_PARSE_COMPLETE,
@@ -1272,32 +1591,37 @@ def inet_in(data, offset, length):
self._flush()
self._cursor = self.cursor()
- try:
- self._lock.acquire()
- code = self.error = None
- while code not in (READY_FOR_QUERY, ERROR_RESPONSE):
- code, data_len = ci_unpack(self._read(5))
- self.message_types[code](self._read(data_len - 4), None)
- if self.error is not None:
- raise self.error
- except:
- self._close()
- raise
- finally:
- self._lock.release()
+ with self._lock:
+ try:
+ code = self.error = None
+ while code not in (READY_FOR_QUERY, ERROR_RESPONSE):
+ code, data_len = ci_unpack(self._read(5))
+ self.message_types[code](self._read(data_len - 4), None)
+ if self.error is not None:
+ raise self.error
+ except Exception as e:
+ try:
+ self._close()
+ except Exception:
+ pass
+ raise e
self.in_transaction = False
self.notifies = []
self.notifies_lock = threading.Lock()
def handle_ERROR_RESPONSE(self, data, ps):
- msg_dict = data_into_dict(data)
+ responses = tuple(
+ (s[0:1], s[1:].decode(self._client_encoding)) for s in
+ data.split(NULL_BYTE))
+ msg_dict = dict(responses)
if msg_dict[RESPONSE_CODE] == "28000":
self.error = InterfaceError("md5 password authentication failed")
else:
- self.error = ProgrammingError(
- msg_dict[RESPONSE_SEVERITY], msg_dict[RESPONSE_CODE],
- msg_dict[RESPONSE_MSG])
+ self.error = ProgrammingError(*tuple(v for k, v in responses))
+
+ def handle_EMPTY_QUERY_RESPONSE(self, data, ps):
+ self.error = ProgrammingError("query was empty")
def handle_CLOSE_COMPLETE(self, data, ps):
pass
@@ -1391,11 +1715,8 @@ def handle_NOTIFICATION_RESPONSE(self, data, ps):
# additional_info = data[idx:idx + null]
# psycopg2 compatible notification interface
- try:
- self.notifies_lock.acquire()
+ with self.notifies_lock:
self.notifies.append((backend_pid, condition))
- finally:
- self.notifies_lock.release()
def cursor(self):
"""Creates a :class:`Cursor` object bound to this
@@ -1412,11 +1733,8 @@ def commit(self):
This function is part of the `DBAPI 2.0 specification
`_.
"""
- try:
- self._lock.acquire()
+ with self._lock:
self.execute(self._cursor, "commit", None)
- finally:
- self._lock.release()
def rollback(self):
"""Rolls back the current database transaction.
@@ -1424,11 +1742,8 @@ def rollback(self):
This function is part of the `DBAPI 2.0 specification
`_.
"""
- try:
- self._lock.acquire()
+ with self._lock:
self.execute(self._cursor, "rollback", None)
- finally:
- self._lock.release()
def _close(self):
try:
@@ -1437,12 +1752,15 @@ def _close(self):
self._write(TERMINATE_MSG)
self._flush()
self._sock.close()
- self._usock.close()
- self._sock = None
except AttributeError:
raise InterfaceError("connection is closed")
except ValueError:
raise InterfaceError("connection is closed")
+ except socket.error as e:
+ raise OperationalError(str(e))
+ finally:
+ self._usock.close()
+ self._sock = None
def close(self):
"""Closes the database connection.
@@ -1450,11 +1768,8 @@ def close(self):
This function is part of the `DBAPI 2.0 specification
`_.
"""
- try:
- self._lock.acquire()
+ with self._lock:
self._close()
- finally:
- self._lock.release()
def handle_AUTHENTICATION_REQUEST(self, data, cursor):
assert self._lock.locked()
@@ -1463,7 +1778,7 @@ def handle_AUTHENTICATION_REQUEST(self, data, cursor):
# 0 = AuthenticationOk
# 5 = MD5 pwd
# 2 = Kerberos v5 (not supported by pg8000)
- # 3 = Cleartext pwd (not supported by pg8000)
+ # 3 = Cleartext pwd
# 4 = crypt() pwd (not supported by pg8000)
# 6 = SCM credential (not supported by pg8000)
# 7 = GSSAPI (not supported by pg8000)
@@ -1480,8 +1795,7 @@ def handle_AUTHENTICATION_REQUEST(self, data, cursor):
raise InterfaceError(
"server requesting password authentication, but no "
"password was provided")
- self._send_message(
- PASSWORD, self.password.encode("ascii") + NULL_BYTE)
+ self._send_message(PASSWORD, self.password + NULL_BYTE)
self._flush()
elif auth_code == 5:
##
@@ -1497,8 +1811,8 @@ def handle_AUTHENTICATION_REQUEST(self, data, cursor):
"server requesting MD5 password authentication, but no "
"password was provided")
pwd = b("md5") + md5(
- md5(self.password.encode("ascii") + self.user).
- hexdigest().encode("ascii") + salt).hexdigest().encode("ascii")
+ md5(self.password + self.user).hexdigest().encode("ascii") +
+ salt).hexdigest().encode("ascii")
# Byte1('p') - Identifies the message as a password message.
# Int32 - Message length including self.
# String - The password. Password may be encrypted.
@@ -1536,11 +1850,10 @@ def make_params(self, values):
except KeyError:
try:
params.append(self.inspect_funcs[typ](value))
- except KeyError:
+ except KeyError as e:
raise NotSupportedError(
- "type " + str(exc_info()[1]) +
- "not mapped to pg type")
- return params
+ "type " + str(e) + "not mapped to pg type")
+ return tuple(params)
def handle_ROW_DESCRIPTION(self, data, cursor):
count = h_unpack(data)[0]
@@ -1572,8 +1885,7 @@ def execute(self, cursor, operation, vals):
args = make_args(vals)
params = self.make_params(args)
-
- key = tuple(oid for oid, x, y in params), operation
+ key = operation, params
try:
ps = cache['ps'][key]
@@ -1617,11 +1929,13 @@ def execute(self, cursor, operation, vals):
try:
self._flush()
- except AttributeError:
+ except AttributeError as e:
if self._sock is None:
raise InterfaceError("connection is closed")
else:
- raise exc_info()[1]
+ raise e
+ except socket.error as e:
+ raise OperationalError(str(e))
self.handle_messages(cursor)
@@ -1711,11 +2025,11 @@ def _send_message(self, code, data):
self._write(i_pack(len(data) + 4))
self._write(data)
self._write(FLUSH_MSG)
- except ValueError:
- if str(exc_info()[1]) == "write to closed file":
+ except ValueError as e:
+ if str(e) == "write to closed file":
raise InterfaceError("connection is closed")
else:
- raise exc_info()[1]
+ raise e
except AttributeError:
raise InterfaceError("connection is closed")
@@ -1783,8 +2097,13 @@ def close_portal(self, cursor):
self._flush()
self.handle_messages(cursor)
+ # Byte1('N') - Identifier
+ # Int32 - Message length
+ # Any number of these, followed by a zero byte:
+ # Byte1 - code identifying the field type (see responseKeys)
+ # String - field value
def handle_NOTICE_RESPONSE(self, data, ps):
- resp = data_into_dict(data)
+ resp = dict((s[0:1], s[1:]) for s in data.split(NULL_BYTE))
self.NoticeReceived(resp)
def handle_PARAMETER_STATUS(self, data, ps):
diff --git a/gluon/contrib/pg8000/six.py b/gluon/contrib/pg8000/six.py
index 6ec96ed87..190c0239c 100644
--- a/gluon/contrib/pg8000/six.py
+++ b/gluon/contrib/pg8000/six.py
@@ -1,6 +1,6 @@
"""Utilities for writing code that runs on Python 2 and 3"""
-# Copyright (c) 2010-2013 Benjamin Peterson
+# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
@@ -9,8 +9,8 @@
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
-# The above copyright notice and this permission notice shall be included in
-# all copies or substantial portions of the Software.
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
@@ -21,22 +21,21 @@
# SOFTWARE.
from __future__ import absolute_import
+
+import functools
+import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson "
-__version__ = "1.4.1"
+__version__ = "1.10.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
-
-PRE_26 = PY2 and sys.version_info[1] < 6
-
-IS_JYTHON = sys.platform.lower().count('java') > 0
-
+PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = str,
@@ -47,10 +46,10 @@
MAXSIZE = sys.maxsize
else:
- string_types = basestring, # noqa
- integer_types = (int, long) # noqa
+ string_types = basestring,
+ integer_types = (int, long)
class_types = (type, types.ClassType)
- text_type = unicode # noqa
+ text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
@@ -59,6 +58,7 @@
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
+
def __len__(self):
return 1 << 31
try:
@@ -90,9 +90,13 @@ def __init__(self, name):
def __get__(self, obj, tp):
result = self._resolve()
- setattr(obj, self.name, result)
- # This is a bit ugly, but it avoids running this again.
- delattr(tp, self.name)
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
return result
@@ -110,6 +114,27 @@ def __init__(self, name, old, new=None):
def _resolve(self):
return _import_module(self.mod)
+ def __getattr__(self, attr):
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
class MovedAttribute(_LazyDescr):
@@ -136,38 +161,109 @@ def _resolve(self):
return getattr(module, self.attr)
-class _MovedItems(types.ModuleType):
+class _SixMetaPathImporter(object):
+
+ """
+ A meta path importer to import six.moves and its submodules.
+
+ This class implements a PEP302 finder and loader. It should be compatible
+ with Python 2.5 and all existing versions of Python3
+ """
+
+ def __init__(self, six_module_name):
+ self.name = six_module_name
+ self.known_modules = {}
+
+ def _add_module(self, mod, *fullnames):
+ for fullname in fullnames:
+ self.known_modules[self.name + "." + fullname] = mod
+
+ def _get_module(self, fullname):
+ return self.known_modules[self.name + "." + fullname]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.known_modules:
+ return self
+ return None
+
+ def __get_module(self, fullname):
+ try:
+ return self.known_modules[fullname]
+ except KeyError:
+ raise ImportError("This loader does not know module " + fullname)
+
+ def load_module(self, fullname):
+ try:
+ # in case of a reload
+ return sys.modules[fullname]
+ except KeyError:
+ pass
+ mod = self.__get_module(fullname)
+ if isinstance(mod, MovedModule):
+ mod = mod._resolve()
+ else:
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ return mod
+
+ def is_package(self, fullname):
+ """
+ Return true, if the named module is a package.
+
+ We need this method to get correct spec objects with
+ Python 3.4 (see PEP451)
+ """
+ return hasattr(self.__get_module(fullname), "__path__")
+
+ def get_code(self, fullname):
+ """Return None
+
+ Required, if is_package is implemented"""
+ self.__get_module(fullname) # eventually raises ImportError
+ return None
+ get_source = get_code # same as get_code
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
"""Lazy loading of moved objects"""
+ __path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
- MovedAttribute(
- "filterfalse", "itertools", "itertools", "ifilterfalse",
- "filterfalse"),
+ MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
- MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
+ MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserDict", "UserDict", "collections"),
+ MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
- MovedAttribute(
- "zip_longest", "itertools", "itertools", "izip_longest",
- "zip_longest"),
+ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
- MovedModule(
- "email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
@@ -177,14 +273,14 @@ class _MovedItems(types.ModuleType):
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
- MovedModule(
- "tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
- MovedModule(
- "tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
@@ -194,30 +290,41 @@ class _MovedItems(types.ModuleType):
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
- MovedModule(
- "tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"),
- MovedModule(
- "urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
- MovedModule(
- "urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
- MovedModule(
- "urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
- MovedModule("winreg", "_winreg"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+ MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ _importer._add_module(attr, "moves." + attr.name)
del attr
-moves = sys.modules[__name__ + ".moves"] = _MovedItems(__name__ + ".moves")
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
-class Module_six_moves_urllib_parse(types.ModuleType):
+class Module_six_moves_urllib_parse(_LazyModule):
+
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
@@ -231,18 +338,27 @@ class Module_six_moves_urllib_parse(types.ModuleType):
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
+ MovedAttribute("splitquery", "urllib", "urllib.parse"),
+ MovedAttribute("splittag", "urllib", "urllib.parse"),
+ MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
-sys.modules[__name__ + ".moves.urllib_parse"] = Module_six_moves_urllib_parse(
- __name__ + ".moves.urllib_parse")
-sys.modules[__name__ + ".moves.urllib.parse"] = Module_six_moves_urllib_parse(
- __name__ + ".moves.urllib.parse")
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse", "moves.urllib.parse")
+
+class Module_six_moves_urllib_error(_LazyModule):
-class Module_six_moves_urllib_error(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_error"""
@@ -255,13 +371,14 @@ class Module_six_moves_urllib_error(types.ModuleType):
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
-sys.modules[__name__ + ".moves.urllib_error"] = Module_six_moves_urllib_error(
- __name__ + ".moves.urllib_error")
-sys.modules[__name__ + ".moves.urllib.error"] = Module_six_moves_urllib_error(
- __name__ + ".moves.urllib.error")
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error", "moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
-class Module_six_moves_urllib_request(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_request"""
@@ -280,8 +397,7 @@ class Module_six_moves_urllib_request(types.ModuleType):
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
- MovedAttribute(
- "HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
@@ -299,18 +415,20 @@ class Module_six_moves_urllib_request(types.ModuleType):
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
-sys.modules[__name__ + ".moves.urllib_request"] = \
- Module_six_moves_urllib_request(__name__ + ".moves.urllib_request")
-sys.modules[__name__ + ".moves.urllib.request"] = \
- Module_six_moves_urllib_request(__name__ + ".moves.urllib.request")
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request", "moves.urllib.request")
-class Module_six_moves_urllib_response(types.ModuleType):
+class Module_six_moves_urllib_response(_LazyModule):
+
"""Lazy loading of moved objects in six.moves.urllib_response"""
@@ -324,13 +442,14 @@ class Module_six_moves_urllib_response(types.ModuleType):
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
-sys.modules[__name__ + ".moves.urllib_response"] = \
- Module_six_moves_urllib_response(__name__ + ".moves.urllib_response")
-sys.modules[__name__ + ".moves.urllib.response"] = \
- Module_six_moves_urllib_response(__name__ + ".moves.urllib.response")
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response", "moves.urllib.response")
-class Module_six_moves_urllib_robotparser(types.ModuleType):
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
@@ -341,25 +460,27 @@ class Module_six_moves_urllib_robotparser(types.ModuleType):
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
-sys.modules[__name__ + ".moves.urllib_robotparser"] = \
- Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib_robotparser")
-sys.modules[__name__ + ".moves.urllib.robotparser"] = \
- Module_six_moves_urllib_robotparser(
- __name__ + ".moves.urllib.robotparser")
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
- """Create a six.moves.urllib namespace that resembles the Python 3
- namespace"""
- parse = sys.modules[__name__ + ".moves.urllib_parse"]
- error = sys.modules[__name__ + ".moves.urllib_error"]
- request = sys.modules[__name__ + ".moves.urllib_request"]
- response = sys.modules[__name__ + ".moves.urllib_response"]
- robotparser = sys.modules[__name__ + ".moves.urllib_robotparser"]
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+ __path__ = [] # mark as package
+ parse = _importer._get_module("moves.urllib_parse")
+ error = _importer._get_module("moves.urllib_error")
+ request = _importer._get_module("moves.urllib_request")
+ response = _importer._get_module("moves.urllib_response")
+ robotparser = _importer._get_module("moves.urllib_robotparser")
+
+ def __dir__(self):
+ return ['parse', 'error', 'request', 'response', 'robotparser']
-sys.modules[__name__ + ".moves.urllib"] = Module_six_moves_urllib(
- __name__ + ".moves.urllib")
+_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
+ "moves.urllib")
def add_move(move):
@@ -386,11 +507,6 @@ def remove_move(name):
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
-
- _iterkeys = "keys"
- _itervalues = "values"
- _iteritems = "items"
- _iterlists = "lists"
else:
_meth_func = "im_func"
_meth_self = "im_self"
@@ -400,11 +516,6 @@ def remove_move(name):
_func_defaults = "func_defaults"
_func_globals = "func_globals"
- _iterkeys = "iterkeys"
- _itervalues = "itervalues"
- _iteritems = "iteritems"
- _iterlists = "iterlists"
-
try:
advance_iterator = next
@@ -427,6 +538,9 @@ def get_unbound_function(unbound):
create_bound_method = types.MethodType
+ def create_unbound_method(func, cls):
+ return func
+
Iterator = object
else:
def get_unbound_function(unbound):
@@ -435,6 +549,9 @@ def get_unbound_function(unbound):
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
class Iterator(object):
def next(self):
@@ -453,24 +570,49 @@ def next(self):
get_function_globals = operator.attrgetter(_func_globals)
-def iterkeys(d, **kw):
- """Return an iterator over the keys of a dictionary."""
- return iter(getattr(d, _iterkeys)(**kw))
+if PY3:
+ def iterkeys(d, **kw):
+ return iter(d.keys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.values(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.items(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.lists(**kw))
+ viewkeys = operator.methodcaller("keys")
-def itervalues(d, **kw):
- """Return an iterator over the values of a dictionary."""
- return iter(getattr(d, _itervalues)(**kw))
+ viewvalues = operator.methodcaller("values")
+ viewitems = operator.methodcaller("items")
+else:
+ def iterkeys(d, **kw):
+ return d.iterkeys(**kw)
+
+ def itervalues(d, **kw):
+ return d.itervalues(**kw)
+
+ def iteritems(d, **kw):
+ return d.iteritems(**kw)
-def iteritems(d, **kw):
- """Return an iterator over the (key, value) pairs of a dictionary."""
- return iter(getattr(d, _iteritems)(**kw))
+ def iterlists(d, **kw):
+ return d.iterlists(**kw)
+ viewkeys = operator.methodcaller("viewkeys")
-def iterlists(d, **kw):
- """Return an iterator over the (key, [values]) pairs of a dictionary."""
- return iter(getattr(d, _iterlists)(**kw))
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems,
+ "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(iterlists,
+ "Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
@@ -480,24 +622,29 @@ def b(s):
def u(s):
return s
unichr = chr
- if sys.version_info[1] <= 1:
- def int2byte(i):
- return bytes((i,))
- else:
- # This is about 2x faster than the implementation above on 3.2+
- int2byte = operator.methodcaller("to_bytes", 1, "big")
+ import struct
+ int2byte = struct.Struct(">B").pack
+ del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
else:
def b(s):
return s
+ # Workaround for standalone backslash
def u(s):
- return unicode(s, "unicode_escape") # noqa
+ return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
@@ -506,27 +653,38 @@ def byte2int(bs):
def indexbytes(buf, i):
return ord(buf[i])
-
- def iterbytes(buf):
- return (ord(byte) for byte in buf)
+ iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
if PY3:
- import builtins
- exec_ = getattr(builtins, "exec")
+ exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
+ if value is None:
+ value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
- print_ = getattr(builtins, "print")
- del builtins
-
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
@@ -544,26 +702,52 @@ def exec_(_code_, _globs_=None, _locs_=None):
raise tp, value, tb
""")
+
+if sys.version_info[:2] == (3, 2):
+ exec_("""def raise_from(value, from_value):
+ if from_value is None:
+ raise value
+ raise value from from_value
+""")
+elif sys.version_info[:2] > (3, 2):
+ exec_("""def raise_from(value, from_value):
+ raise value from from_value
+""")
+else:
+ def raise_from(value, from_value):
+ raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
def print_(*args, **kwargs):
- """The new-style print function."""
+ """The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
- if not isinstance(data, basestring): # noqa
+ if not isinstance(data, basestring):
data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (isinstance(fp, file) and
+ isinstance(data, unicode) and
+ fp.encoding is not None):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
- if isinstance(sep, unicode): # noqa
+ if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
- if isinstance(end, unicode): # noqa
+ if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
@@ -571,12 +755,12 @@ def write(data):
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
- if isinstance(arg, unicode): # noqa
+ if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
- newline = unicode("\n") # noqa
- space = unicode(" ") # noqa
+ newline = unicode("\n")
+ space = unicode(" ")
else:
newline = "\n"
space = " "
@@ -589,22 +773,96 @@ def write(data):
write(sep)
write(arg)
write(end)
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
_add_doc(reraise, """Reraise an exception.""")
+if sys.version_info[0:2] < (3, 4):
+ def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ def wrapper(f):
+ f = functools.wraps(wrapped, assigned, updated)(f)
+ f.__wrapped__ = wrapped
+ return f
+ return wrapper
+else:
+ wraps = functools.wraps
+
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
- return meta("NewBase", bases, {})
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(meta):
+
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+ return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
+ slots = orig_vars.get('__slots__')
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
- for slots_var in orig_vars.get('__slots__', ()):
- orig_vars.pop(slots_var)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
+
+
+def python_2_unicode_compatible(klass):
+ """
+ A decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if '__str__' not in klass.__dict__:
+ raise ValueError("@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." %
+ klass.__name__)
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = [] # required for PEP 302 and PEP 451
+__package__ = __name__ # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+ __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+ for i, importer in enumerate(sys.meta_path):
+ # Here's some real nastiness: Another "instance" of the six module might
+ # be floating around. Therefore, we can't use isinstance() to check for
+ # the six meta path importer, since the other six instance will have
+ # inserted an importer with different class.
+ if (type(importer).__name__ == "_SixMetaPathImporter" and
+ importer.name == __name__):
+ del sys.meta_path[i]
+ break
+ del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)