86>Jul 25 10:39:37 userdel[2088922]: delete user 'rooter' <86>Jul 25 10:39:37 userdel[2088922]: removed group 'rooter' owned by 'rooter' <86>Jul 25 10:39:37 groupadd[2088927]: group added to /etc/group: name=rooter, GID=699 <86>Jul 25 10:39:37 groupadd[2088927]: group added to /etc/gshadow: name=rooter <86>Jul 25 10:39:37 groupadd[2088927]: new group: name=rooter, GID=699 <86>Jul 25 10:39:37 useradd[2088931]: new user: name=rooter, UID=699, GID=699, home=/root, shell=/bin/bash <86>Jul 25 10:39:37 userdel[2088939]: delete user 'builder' <86>Jul 25 10:39:37 userdel[2088939]: removed group 'builder' owned by 'builder' <86>Jul 25 10:39:37 userdel[2088939]: removed shadow group 'builder' owned by 'builder' <86>Jul 25 10:39:37 groupadd[2088944]: group added to /etc/group: name=builder, GID=700 <86>Jul 25 10:39:37 groupadd[2088944]: group added to /etc/gshadow: name=builder <86>Jul 25 10:39:37 groupadd[2088944]: new group: name=builder, GID=700 <86>Jul 25 10:39:37 useradd[2088949]: new user: name=builder, UID=700, GID=700, home=/usr/src, shell=/bin/bash <13>Jul 25 10:39:38 rpmi: libgdbm-1.8.3-alt10 1454943334 installed <13>Jul 25 10:39:38 rpmi: libexpat-2.2.9-alt1 sisyphus+252464.200.2.1 1590958865 installed <13>Jul 25 10:39:38 rpmi: libp11-kit-0.23.15-alt2 sisyphus+252784.100.2.2 1591274901 installed <13>Jul 25 10:39:38 rpmi: libtasn1-4.16.0-alt1 sisyphus+245480.100.1.1 1580825062 installed <13>Jul 25 10:39:38 rpmi: rpm-macros-alternatives-0.5.1-alt1 sisyphus+226946.100.1.1 1554830426 installed <13>Jul 25 10:39:38 rpmi: alternatives-0.5.1-alt1 sisyphus+226946.100.1.1 1554830426 installed <13>Jul 25 10:39:38 rpmi: ca-certificates-2020.06.29-alt1 sisyphus+254237.300.1.1 1593450881 installed <13>Jul 25 10:39:38 rpmi: ca-trust-0.1.2-alt1 sisyphus+233348.100.1.1 1561653823 installed <13>Jul 25 10:39:38 rpmi: p11-kit-trust-0.23.15-alt2 sisyphus+252784.100.2.2 1591274901 installed <13>Jul 25 10:39:39 rpmi: libcrypto1.1-1.1.1g-alt1 sisyphus+249982.60.8.1 1587743711 installed <13>Jul 25 10:39:39 rpmi: libssl1.1-1.1.1g-alt1 sisyphus+249982.60.8.1 1587743711 installed <13>Jul 25 10:39:39 rpmi: python3-3.8.5-alt1 sisyphus+244405.100.3.1 1595544514 installed <13>Jul 25 10:39:39 rpmi: python3-base-3.8.5-alt1 sisyphus+244405.100.3.1 1595544514 installed <13>Jul 25 10:39:39 rpmi: libpython3-3.8.5-alt1 sisyphus+244405.100.3.1 1595544514 installed <13>Jul 25 10:39:39 rpmi: tests-for-installed-python3-pkgs-0.1.13.1-alt2 1535450458 installed <13>Jul 25 10:39:39 rpmi: rpm-build-python3-0.1.13.1-alt2 1535450458 installed <13>Jul 25 10:39:39 rpmi: rpm-macros-sphinx-1:1.6.5-alt8 sisyphus+244988.100.1.1 1588163023 installed <13>Jul 25 10:39:42 rpmi: python-base-2.7.17-alt3 sisyphus+240580.60.5.1 1573516905 installed <13>Jul 25 10:39:42 rpmi: python-module-repoze-2.2-alt4.git20140327 sisyphus+228552.100.1.2 1556748368 installed <13>Jul 25 10:39:42 rpmi: python-module-sphinxcontrib-2.1.1-alt2.1.2 sisyphus+227529.1100.1.2 1555924423 installed <13>Jul 25 10:39:42 rpmi: libsqlite3-3.32.3-alt1 sisyphus+253798.100.1.1 1592756163 installed <13>Jul 25 10:39:42 rpmi: libffi6-1:3.2.1-alt4 sisyphus+251953.300.2.1 1589891360 installed <13>Jul 25 10:39:42 rpmi: python-module-imagesize-1.1.0-alt1 sisyphus+229015.100.2.1 1557233756 installed <13>Jul 25 10:39:42 rpmi: python3-module-pkg_resources-1:46.1.3-alt1 sisyphus+250566.200.3.1 1587973342 installed <13>Jul 25 10:39:42 rpmi: python-sphinx-objects.inv-1:2.3.10.20200721-alt1 sisyphus+255227.100.1.1 1595375747 installed <13>Jul 25 10:39:42 rpmi: python-module-google-0.4.2-alt2.1 sisyphus+228356.1500.3.1 1556633057 installed <13>Jul 25 10:39:42 rpmi: python-module-markupsafe-1.1.1-alt1 sisyphus+248369.100.1.1 1585046136 installed <13>Jul 25 10:39:42 rpmi: python-module-enum34-1.1.6-alt3 1527251693 installed <13>Jul 25 10:39:42 rpmi: libxapian-1.4.15-alt1 sisyphus+248304.20.4.1 1585071490 installed <13>Jul 25 10:39:42 rpmi: libgpg-error-1.36-alt1 sisyphus+225621.300.1.1 1553521082 installed <13>Jul 25 10:39:42 rpmi: libgcrypt20-1.8.5-alt3 sisyphus+239622.100.1.1 1571746654 installed <13>Jul 25 10:39:42 rpmi: libxslt-1.1.34-alt2 sisyphus+248264.100.1.1 1584829770 installed <13>Jul 25 10:39:42 rpmi: python-modules-curses-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:42 rpmi: libtinfo-devel-6.1.20180407-alt2 sisyphus+222164.200.1.1 1550686226 installed <13>Jul 25 10:39:42 rpmi: libncurses-devel-6.1.20180407-alt2 sisyphus+222164.200.1.1 1550686226 installed <13>Jul 25 10:39:42 rpmi: python3-dev-3.8.5-alt1 sisyphus+244405.100.3.1 1595544514 installed <13>Jul 25 10:39:42 rpmi: libverto-0.3.0-alt1_7 sisyphus+225932.100.1.1 1553994919 installed <13>Jul 25 10:39:42 rpmi: liblmdb-0.9.23-alt1 sisyphus+225277.100.2.1 1553001679 installed <13>Jul 25 10:39:42 rpmi: libkeyutils-1.6-alt2 sisyphus+226520.100.2.1 1554512089 installed <13>Jul 25 10:39:42 rpmi: libcom_err-1.44.6-alt1 sisyphus+224154.100.1.1 1552091678 installed <86>Jul 25 10:39:42 groupadd[2094735]: group added to /etc/group: name=_keytab, GID=499 <86>Jul 25 10:39:42 groupadd[2094735]: group added to /etc/gshadow: name=_keytab <86>Jul 25 10:39:42 groupadd[2094735]: new group: name=_keytab, GID=499 <13>Jul 25 10:39:43 rpmi: libkrb5-1.18.2-alt2 sisyphus+254565.100.4.1 1594375666 installed <13>Jul 25 10:39:43 rpmi: libtirpc-1.2.6-alt1 sisyphus+250076.100.1.1 1587038270 installed <13>Jul 25 10:39:43 rpmi: libnsl2-1.1.0-alt1_1 1511548749 installed <13>Jul 25 10:39:43 rpmi: python-modules-compiler-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:43 rpmi: python-modules-email-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:43 rpmi: python-modules-unittest-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:43 rpmi: python-modules-encodings-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:43 rpmi: python-modules-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:43 rpmi: python-modules-nis-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:43 rpmi: python-module-six-1.14.0-alt1 sisyphus+251567.100.1.1 1589268039 installed <13>Jul 25 10:39:43 rpmi: python-module-pkg_resources-1:42.0.0-alt2 sisyphus+250566.100.6.1 1588187716 installed <13>Jul 25 10:39:43 rpmi: python-modules-json-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:43 rpmi: python-modules-xml-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:43 rpmi: python-modules-ctypes-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:43 rpmi: python-modules-multiprocessing-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:43 rpmi: python-modules-logging-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:43 rpmi: python-module-jinja2-2.11.2-alt1 sisyphus+254573.100.1.1 1594043344 installed <13>Jul 25 10:39:43 rpmi: python-modules-hotshot-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:43 rpmi: python-module-chardet-3.0.4-alt1 sisyphus+227476.1700.1.2 1555756717 installed <13>Jul 25 10:39:43 rpmi: python-tools-2to3-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:43 rpmi: python-module-whoosh-2.7.4-alt1 1527697941 installed <13>Jul 25 10:39:43 rpmi: python-module-webencodings-0.5.1-alt2 sisyphus+245915.100.1.1 1581496070 installed <13>Jul 25 10:39:43 rpmi: python-module-webob-1.8.6-alt1 sisyphus+248985.100.1.1 1585651975 installed <13>Jul 25 10:39:43 rpmi: python-modules-bsddb-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:43 rpmi: python-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:43 rpmi: python-strict-2.7.18-alt1 sisyphus+250345.100.4.1 1588665039 installed <13>Jul 25 10:39:43 rpmi: python-modules-distutils-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:43 rpmi: python-module-pycparser-2.19-alt2 sisyphus+245734.100.1.1 1581192421 installed <13>Jul 25 10:39:43 rpmi: python-module-cffi-1.14.0-alt1 sisyphus+244999.200.3.1 1585219427 installed <13>Jul 25 10:39:43 rpmi: python-module-PyStemmer-1.0.1-alt1.2.1 1321388303 installed <13>Jul 25 10:39:43 rpmi: python-module-snowballstemmer-1.2.0-alt2.1 1457859319 installed <13>Jul 25 10:39:43 rpmi: python-module-cssselect-0.9.1-alt2 sisyphus+250566.2300.6.1 1588188959 installed <13>Jul 25 10:39:43 rpmi: python-module-html5lib-1:1.0.1-alt1 sisyphus+238807.100.2.1 1570465973 installed <13>Jul 25 10:39:43 rpmi: python-module-lxml-4.5.0-alt2 sisyphus+250566.2700.6.1 1588189778 installed <13>Jul 25 10:39:44 rpmi: python-module-docutils-0.14-alt3 sisyphus+239215.40.2.1 1576588058 installed <13>Jul 25 10:39:44 rpmi: python-module-idna-2.10-alt1 sisyphus+255040.100.1.1 1594995857 installed <13>Jul 25 10:39:44 rpmi: python-module-ntlm-1.1.0-alt1.2 sisyphus+228512.100.1.1 1556654575 installed <13>Jul 25 10:39:44 rpmi: python-module-pytz-1:2020.1-alt1 sisyphus+254576.100.1.1 1594046074 installed <13>Jul 25 10:39:44 rpmi: python-module-babel-1:2.6.0-alt1 sisyphus+228351.2400.6.2 1556652168 installed <13>Jul 25 10:39:44 rpmi: python-module-ipaddress-1.0.18-alt2 sisyphus+245673.200.1.1 1581087895 installed <13>Jul 25 10:39:44 rpmi: python-module-cryptography-3.0-alt1 sisyphus+255181.100.1.1 1595326578 installed <13>Jul 25 10:39:44 rpmi: python-module-OpenSSL-19.0.0-alt1 sisyphus+238757.100.1.1 1570339901 installed <13>Jul 25 10:39:44 rpmi: python-module-simplejson-3.15.0-alt3.qa1 sisyphus+245000.7400.78.1 1583199104 installed <13>Jul 25 10:39:44 rpmi: python-module-ndg-0.4.2-alt1.qa1 sisyphus+227504.1300.1.2 1555853234 installed <13>Jul 25 10:39:44 rpmi: python-module-ndg-httpsclient-0.4.2-alt1.qa1 sisyphus+227504.1300.1.2 1555853234 installed <13>Jul 25 10:39:44 rpmi: python-module-backports-3.5.0.1-alt1.1.1 1517645428 installed <13>Jul 25 10:39:44 rpmi: python-module-backports.ssl_match_hostname-3.5.0.1-alt1.1.1 1517645428 installed <13>Jul 25 10:39:44 rpmi: python-module-urllib3-2:1.25.10-alt1 sisyphus+255342.100.1.1 1595592746 installed <13>Jul 25 10:39:44 rpmi: python-module-requests-2.23.0-alt1 sisyphus+248062.400.2.1 1584568195 installed <13>Jul 25 10:39:44 rpmi: python-module-typing-3.6.6-alt2 sisyphus+220108.100.1.1 1548749697 installed <13>Jul 25 10:39:44 rpmi: python-modules-sqlite3-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:44 rpmi: python-module-SQLAlchemy-1.3.18-alt1 sisyphus+254609.100.1.1 1594120614 installed <13>Jul 25 10:39:44 rpmi: python-module-repoze.lru-0.6-alt2.git20140202.2 sisyphus+228533.100.1.1 1556656200 installed <13>Jul 25 10:39:44 rpmi: python-module-xapian-1.4.15-alt1 sisyphus+248304.200.4.1 1585071699 installed <13>Jul 25 10:39:44 rpmi: python-module-sphinxcontrib-websupport-1.0.1-alt5 sisyphus+238085.40.2.1 1569316750 installed <13>Jul 25 10:39:44 rpmi: python-module-alabaster-0.7.6-alt3 sisyphus+228351.700.4.1 1556637370 installed <13>Jul 25 10:39:45 rpmi: python-module-Pygments-2.4.2-alt3 sisyphus+251222.100.1.1 1588756657 installed <13>Jul 25 10:39:45 rpmi: python-module-sphinx-1:1.6.5-alt8 sisyphus+244988.100.1.1 1588163023 installed <13>Jul 25 10:39:45 rpmi: libnsl2-devel-1.1.0-alt1_1 1511548749 installed <13>Jul 25 10:39:45 rpmi: python-dev-2.7.18-alt1 sisyphus+250345.100.4.1 1588665080 installed <13>Jul 25 10:39:45 rpmi: python-module-setuptools-1:42.0.0-alt2 sisyphus+250566.100.6.1 1588187716 installed <13>Jul 25 10:39:45 rpmi: python-module-routes-2.4.1-alt1 sisyphus+227525.1400.1.2 1555924482 installed <13>Jul 25 10:39:45 rpmi: python-module-nose-1:1.3.7-alt6.git20160316 sisyphus+250566.3300.7.1 1588252211 installed <13>Jul 25 10:39:45 rpmi: python-module-pysqlite2-2.8.3-alt1 1526055403 installed <13>Jul 25 10:39:45 rpmi: python3-module-setuptools-1:46.1.3-alt1 sisyphus+250566.200.3.1 1587973342 installed Building target platforms: i586 Building for target i586 Wrote: /usr/src/in/nosrpm/python-module-webhelpers-1.3-alt2.1.1.1.nosrc.rpm Installing python-module-webhelpers-1.3-alt2.1.1.1.src.rpm Building target platforms: i586 Building for target i586 Executing(%prep): /bin/sh -e /usr/src/tmp/rpm-tmp.5645 + umask 022 + /bin/mkdir -p /usr/src/RPM/BUILD + cd /usr/src/RPM/BUILD + cd /usr/src/RPM/BUILD + rm -rf python-module-webhelpers-1.3 + echo 'Source #0 (WebHelpers-1.3.tar.gz):' Source #0 (WebHelpers-1.3.tar.gz): + /bin/gzip -dc /usr/src/RPM/SOURCES/WebHelpers-1.3.tar.gz + /bin/tar -xf - + cd python-module-webhelpers-1.3 + /bin/chmod -c -Rf u+rwX,go-w . + cp -fR . ../python3 + find ../python3 -type f -name '*.py' -exec 2to3 -w -n '{}' + RefactoringTool: Skipping optional fixer: buffer RefactoringTool: Skipping optional fixer: idioms RefactoringTool: Skipping optional fixer: set_literal RefactoringTool: Skipping optional fixer: ws_comma RefactoringTool: Refactored ../python3/webhelpers/util.py RefactoringTool: Refactored ../python3/webhelpers/textile.py --- ../python3/webhelpers/util.py (original) +++ ../python3/webhelpers/util.py (refactored) @@ -7,8 +7,8 @@ import cgi import copy import sys -import urllib -import urlparse +import urllib.request, urllib.parse, urllib.error +import urllib.parse try: from UserDict import DictMixin except ImportError: @@ -16,7 +16,7 @@ from xml.sax.saxutils import XMLGenerator try: - from urlparse import parse_qs + from urllib.parse import parse_qs except ImportError: # Python < 2.6 from cgi import parse_qs @@ -56,20 +56,20 @@ 'http://www.mau.de?foo=C&foo=D' """ - url, fragment = urlparse.urldefrag(_url) + url, fragment = urllib.parse.urldefrag(_url) if "?" in url: url, qs = url.split("?", 1) query = parse_qs(qs) else: query = {} - for key, value in params.iteritems(): + for key, value in params.items(): if value is not None: query[key] = value elif key in query: del query[key] if _debug: return url, query, fragment - qs = urllib.urlencode(query, True) + qs = urllib.parse.urlencode(query, True) if qs: qs = "?" + qs if fragment: @@ -116,13 +116,13 @@ """ if s is None: return '' - if not isinstance(s, basestring): + if not isinstance(s, str): if hasattr(s, '__unicode__'): - s = unicode(s) + s = str(s) else: s = str(s) s = cgi_escape(s, True) - if isinstance(s, unicode): + if isinstance(s, str): s = s.encode('ascii', 'xmlcharrefreplace') return s @@ -147,7 +147,7 @@ # section 3.1 of RFC 3987. if iri is None: return iri - return urllib.quote(iri, safe='/#%[]=:;$&()+,!?') + return urllib.parse.quote(iri, safe='/#%[]=:;$&()+,!?') class Partial(object): @@ -267,7 +267,7 @@ """ unicode_mixed = {} - for key, value in self.multi.mixed().iteritems(): + for key, value in self.multi.mixed().items(): if isinstance(value, list): value = [self._decode_value(value) for value in value] else: @@ -278,7 +278,7 @@ def dict_of_lists(self): """Return dict where each key is associated with a list of values.""" unicode_dict = {} - for key, value in self.multi.dict_of_lists().iteritems(): + for key, value in self.multi.dict_of_lists().items(): value = [self._decode_value(value) for value in value] unicode_dict[self._decode_key(key)] = value return unicode_dict @@ -308,7 +308,7 @@ return (self._decode_key(k), self._decode_value(v)) def __repr__(self): - items = ', '.join(['(%r, %r)' % v for v in self.items()]) + items = ', '.join(['(%r, %r)' % v for v in list(self.items())]) return '%s([%s])' % (self.__class__.__name__, items) def __len__(self): @@ -319,25 +319,25 @@ ## def keys(self): - return [self._decode_key(k) for k in self.multi.iterkeys()] + return [self._decode_key(k) for k in self.multi.keys()] def iterkeys(self): - for k in self.multi.iterkeys(): + for k in self.multi.keys(): yield self._decode_key(k) __iter__ = iterkeys def items(self): return [(self._decode_key(k), self._decode_value(v)) for \ - k, v in self.multi.iteritems()] + k, v in self.multi.items()] def iteritems(self): - for k, v in self.multi.iteritems(): + for k, v in self.multi.items(): yield (self._decode_key(k), self._decode_value(v)) def values(self): - return [self._decode_value(v) for v in self.multi.itervalues()] + return [self._decode_value(v) for v in self.multi.values()] def itervalues(self): - for v in self.multi.itervalues(): + for v in self.multi.values(): yield self._decode_value(v) --- ../python3/webhelpers/textile.py (original) +++ ../python3/webhelpers/textile.py (refacRefactoringTool: Refactored ../python3/webhelpers/text.py tored) @@ -193,7 +193,7 @@ try: #from twisted.python import htmlizer import htmlizer - from StringIO import StringIO + from io import StringIO def _color(code): """Colorizer Python code. @@ -264,7 +264,7 @@ This function outputs debug information if DEBUGLEVEL is higher than a given treshold. """ - if DEBUGLEVEL >= level: print >> sys.stderr, s + if DEBUGLEVEL >= level: print(s, file=sys.stderr) ############################# @@ -516,7 +516,7 @@ def normalize_attrs(self, attrs): # utility method to be called by descendants - attrs = [(k.lower(), sgmllib.charref.sub(lambda m: unichr(int(m.groups()[0])), v).strip()) for k, v in attrs] + attrs = [(k.lower(), sgmllib.charref.sub(lambda m: chr(int(m.groups()[0])), v).strip()) for k, v in attrs] attrs = [(k, k in ('rel', 'type') and v.lower() or v) for k, v in attrs] return attrs @@ -851,7 +851,7 @@ # Convert to desired output. if isinstance(text, str): - text = unicode(text, encoding) + text = str(text, encoding) text = text.encode(output, 'xmlcharrefreplace') # Sanitize? @@ -984,19 +984,19 @@ if extending and not captures.get('dot', None): output[-1][1]['text'] += block break - elif captures.has_key('dot'): + elif 'dot' in captures: del captures['dot'] # If a signature matches, we are not extending a block. extending = 0 # Check if we should extend this block. - if captures.has_key('extend'): + if 'extend' in captures: extending = captures['extend'] del captures['extend'] # Apply head_offset. - if captures.has_key('header'): + if 'header' in captures: captures['header'] = int(captures['header']) + self.head_offset # Apply clear. @@ -1165,7 +1165,7 @@ output['style'] = output.get('style', '') + ''.join(style) # Remove excess whitespace. - if output.has_key('class'): + if 'class' in output: output['class'] = output['class'].strip() return output @@ -1180,7 +1180,7 @@ """ # Open tag. open_tag = ['<%s' % tag] - for k,v in attributes.items(): + for k,v in list(attributes.items()): # The ALT attribute can be empty. if k == 'alt' or v: open_tag.append(' %s="%s"' % (k, v)) @@ -1237,7 +1237,7 @@ close_tag = '
' # Pop the id because it must be unique. - if attributes.has_key('id'): del attributes['id'] + if 'id' in attributes: del attributes['id'] # Break lines. line = preg_replace(r'( can't have the attribute lang.
- if attributes.has_key('lang'):
+ if 'lang' in attributes:
lang = attributes['lang']
del attributes['lang']
else:
@@ -2804,7 +2804,7 @@
query = query.replace(' ', '+')
# Look for smart search.
- if self.searches.has_key(proto):
+ if proto in self.searches:
link = self.searches[proto] % query
# Fix URL.
@@ -2879,4 +2879,4 @@
if __name__ == '__main__':
- print textile('tell me about textile.', head_offset=1)
+ print(textile('tell me about textile.', head_offset=1))
--- ../python3/webhelpers/text.py (original)
+++ ../python3/webhelpers/text.py RefactoringTool: No changes to ../python3/webhelpers/pylonslib/secure_form.py
RefactoringTool: Refactored ../python3/webhelpers/pylonslib/minify.py
RefactoringTool: No changes to ../python3/webhelpers/pylonslib/grid.py
RefactoringTool: No changes to ../python3/webhelpers/pylonslib/flash.py
RefactoringTool: No changes to ../python3/webhelpers/pylonslib/_jsmin.py
RefactoringTool: No changes to ../python3/webhelpers/pylonslib/__init__.py
RefactoringTool: Refactored ../python3/webhelpers/markdown.py
(refactored)
@@ -6,7 +6,7 @@
import re
import textwrap
-import urllib
+import urllib.request, urllib.parse, urllib.error
from webhelpers.html.tools import strip_tags
@@ -275,7 +275,7 @@
s = remove_formatting(string).lower()
s = replace_whitespace(s, '-')
s = collapse(s, '-')
- return urllib.quote(s)
+ return urllib.parse.quote(s)
def remove_formatting(string):
@@ -356,7 +356,7 @@
"(#190|frac34)": "three fourths",
"(#176|deg)": " degrees"
}
- for textiled, normal in replace_dict.items():
+ for textiled, normal in list(replace_dict.items()):
string = re.sub(r'\&%s;' % textiled, normal, string)
return re.sub(r'\&[^;]+;', '', string)
--- ../python3/webhelpers/pylonslib/minify.py (original)
+++ ../python3/webhelpers/pylonslib/minify.py (refactored)
@@ -29,7 +29,7 @@
import re
import os
import logging
-import StringIO
+import io
import warnings
from webhelpers.html.tags import javascript_link as __javascript_link
@@ -63,7 +63,7 @@
return sources
names = list()
- js_buffer = StringIO.StringIO()
+ js_buffer = io.StringIO()
base = os.path.commonprefix([os.path.dirname(s) for s in sources])
for source in sources:
@@ -175,7 +175,7 @@
def do_css_CSSStyleDeclaration(self, style, separator=None):
try:
color = style.getPropertyValue('color')
- if color and color is not u'':
+ if color and color is not '':
color = self.change_colors(color)
style.setProperty('color', color)
except:
--- ../python3/webhelpers/markdown.py (original)
+++ ../python3/webhelpers/markdown.py (refactored)
@@ -62,9 +62,9 @@
SMART_EMPHASIS = 1 # this_or_that does not become thisorthat
HTML_REMOVED_TEXT = "[HTML_REMOVED]" # text used instead of HTML in safe mode
-RTL_BIDI_RANGES = ( (u'\u0590', u'\u07FF'),
+RTL_BIDI_RANGES = ( ('\u0590', '\u07FF'),
# from Hebrew to Nko (includes Arabic, Syriac and Thaana)
- (u'\u2D30', u'\u2D7F'),
+ ('\u2D30', '\u2D7F'),
# Tifinagh
)
@@ -82,7 +82,7 @@
}
def removeBOM(text, encoding):
- convert = isinstance(text, unicode)
+ convert = isinstance(text, str)
for bom in BOMS[encoding]:
bom = convert and bom.decode(encoding) or bom
if text.startswith(bom):
@@ -141,7 +141,7 @@
ch = text[0]
- if not isinstance(ch, unicode) or not ch.isalpha():
+ if not isinstance(ch, str) or not ch.isalpha():
return None
else:
@@ -325,7 +325,7 @@
if self.nodeName in ['p', 'li', 'ul', 'ol',
'h1', 'h2', 'h3', 'h4', 'h5', 'h6']:
- if not self.attribute_values.has_key("dir"):
+ if "dir" not in self.attribute_values:
if self.bidi:
bidi = self.bidi
else:
@@ -832,7 +832,7 @@
# we'll use "google" as the id
id = m.group(2).lower()
- if not self.references.has_key(id): # ignore undefined refs
+ if id not in self.references: # ignore undefined refs
return None
href, title = self.references[id]
text = m.group(2)
@@ -1136,7 +1136,7 @@
def __init__ (self):
self.regExp = {}
- for key in self.patterns.keys():
+ for key in list(self.patterns.keys()):
self.regExp[key] = re.compile("^%s$" % self.patterns[key],
re.DOTALL)
@@ -1237,7 +1237,7 @@
% (ext, extension_module_name) )
else:
- if configs.has_key(ext):
+ if ext in configs:
configs_for_ext = configs[ext]
else:
configs_for_ext = []
@@ -1621,7 +1621,7 @@
x = parts[i]
- if isinstance(x, (str, unicode)):
+ RefactoringTool: Refactored ../python3/webhelpers/html/tools.py
RefactoringTool: Refactored ../python3/webhelpers/html/tags.py
if isinstance(x, str):
result = self._applyPattern(x, \
self.inlinePatterns[patternIndex], \
patternIndex)
@@ -1637,7 +1637,7 @@
for i in range(len(parts)):
x = parts[i]
- if isinstance(x, (str, unicode)):
+ if isinstance(x, str):
parts[i] = self.doc.createTextNode(x)
return parts
@@ -1693,7 +1693,7 @@
for item in result:
- if isinstance(item, (str, unicode)):
+ if isinstance(item, str):
if len(item) > 0:
node.insertChild(position,
self.doc.createTextNode(item))
@@ -1721,13 +1721,13 @@
self.source = source
if not self.source:
- return u""
+ return ""
try:
- self.source = unicode(self.source)
+ self.source = str(self.source)
except UnicodeDecodeError:
message(CRITICAL, 'UnicodeDecodeError: Markdown only accepts unicode or ascii input.')
- return u""
+ return ""
for pp in self.textPreprocessors:
self.source = pp.run(self.source)
@@ -1828,13 +1828,13 @@
self.config = configs
def getConfig(self, key):
- if self.config.has_key(key):
+ if key in self.config:
return self.config[key][0]
else:
return ""
def getConfigInfo(self):
- return [(key, self.config[key][1]) for key in self.config.keys()]
+ return [(key, self.config[key][1]) for key in list(self.config.keys())]
def setConfig(self, key, value):
self.config[key][0] = value
@@ -1862,7 +1862,7 @@
'encoding': None }
else:
- print OPTPARSE_WARNING
+ print(OPTPARSE_WARNING)
return None
parser = optparse.OptionParser(usage="%prog INPUTFILE [options]")
--- ../python3/webhelpers/html/tools.py (original)
+++ ../python3/webhelpers/html/tools.py (refactored)
@@ -5,7 +5,7 @@
"""
import re
-import urllib
+import urllib.request, urllib.parse, urllib.error
import warnings
from webhelpers.html import HTML, literal, lit_sub, escape
@@ -206,7 +206,7 @@
if not isinstance(option, literal):
item = (item[0], escape(option))
extras.append(item)
- options_query = urllib.urlencode(extras).replace("+", "%20")
+ options_query = urllib.parse.urlencode(extras).replace("+", "%20")
protocol = 'mailto:'
email_address_obfuscated = email_address
@@ -300,7 +300,7 @@
flags = re.IGNORECASE
if highlighter:
return _legacy_highlight(text, phrase, highlighter, flags)
- if isinstance(phrase, basestring):
+ if isinstance(phrase, str):
pat = re.escape(phrase)
rx = re.compile(pat, flags)
elif isinstance(phrase, (list, tuple)):
@@ -342,7 +342,7 @@
"""
if not text:
- return literal(u"")
+ return literal("")
text = escape(text)
if link == "all":
return _auto_link_urls(_auto_link_email_addresses(text), **href_attrs)
--- ../python3/webhelpers/html/tags.py (original)
+++ ../python3/webhelpers/html/tags.py (refactored)
@@ -16,8 +16,8 @@
import logging
import os
import re
-import urllib
-import urlparse
+import urllib.request, urllib.parse, urllib.error
+import urllib.parse
from webhelpers import containers
from webhelpers.html import escape, HTML, literal, url_escape
@@ -109,7 +109,7 @@
fields.append(field)
if hidden_fields is not None:
try:
- it = hidden_fields.items()
+ it = list(hidden_fields.items())
except AttributeError:
it = hidden_fields
for name, value in it:
@@ -368,10 +368,10 @@
if selected_values is None:
selected_values = ('',)
# Turn a single RefactoringTool: Refactored ../python3/webhelpers/html/render.py
RefactoringTool: Refactored ../python3/webhelpers/html/grid_demo.py
string or integer into a list
- elif isinstance(selected_values, (basestring, int, long)):
+ elif isinstance(selected_values, (str, int)):
selected_values = (selected_values,)
# Cast integer values to strings
- selected_values = map(unicode, selected_values)
+ selected_values = list(map(str, selected_values))
# Prepend the prompt
prompt = attrs.pop("prompt", None)
if prompt:
@@ -687,10 +687,10 @@
continue
else:
value = label = opt
- if not isinstance(value, unicode):
- value = unicode(value)
- if not isinstance(label, unicode): # Preserves literal.
- label = unicode(label)
+ if not isinstance(value, str):
+ value = str(value)
+ if not isinstance(label, str): # Preserves literal.
+ label = str(label)
opt = Option(value, label)
opts.append(opt)
return super(Options, class_).__new__(class_, opts)
@@ -1239,9 +1239,9 @@
"""
for a in bool_attrs:
- if attrs.has_key(a) and attrs[a]:
+ if a in attrs and attrs[a]:
attrs[a] = a
- elif attrs.has_key(a):
+ elif a in attrs:
del attrs[a]
def _set_input_attrs(attrs, type, name, value):
--- ../python3/webhelpers/html/render.py (original)
+++ ../python3/webhelpers/html/render.py (refactored)
@@ -39,8 +39,8 @@
"""An HTML-to-text formatter and HTML sanitizer.
"""
-from HTMLParser import HTMLParser
-import htmlentitydefs
+from html.parser import HTMLParser
+import html.entities
import re
import textwrap
@@ -189,12 +189,12 @@
def handle_entityref(self, name):
name = name.lower()
- if name not in htmlentitydefs.entitydefs:
+ if name not in html.entities.entitydefs:
# bad entity, just let it through
# (like a &var=value in a URL)
self.handle_data('&'+name)
return
- result = htmlentitydefs.entitydefs[name]
+ result = html.entities.entitydefs[name]
if result.startswith('&'):
self.handle_charref(result[2:-1])
else:
@@ -202,7 +202,7 @@
def handle_charref(self, name):
try:
- self.handle_data(unichr(int(name)))
+ self.handle_data(chr(int(name)))
except ValueError:
self.handle_data('&' + name)
@@ -315,7 +315,7 @@
return value.lower()
return self._default_align
- def __nonzero__(self):
+ def __bool__(self):
for t in self.text:
if t:
return True
@@ -335,7 +335,7 @@
def add_cell(self, value):
self.rows[-1].append(value)
- def __nonzero__(self):
+ def __bool__(self):
return not not self.rows
def to_text(self, context):
@@ -380,7 +380,7 @@
def normalize(text):
text = re.sub(r'\s+', ' ', text)
# nbsp:
- if not isinstance(text, unicode):
+ if not isinstance(text, str):
text = text.replace('\xa0', ' ')
return text
@@ -392,6 +392,6 @@
prog = os.path.basename(sys.argv[0])
sys.exit("usage: %s >> format_attrs(p=None)
literal(u'')
"""
- strings = [u' %s="%s"' % (_attr_decode(attr), escape(value))
- for attr, value in sorted(attrs.iteritems())
+ strings = [' %s="%s"' % (_attr_decode(attr), escape(value))
+ for attr, value in sorted(attrs.items())
if value is not None]
return literal("".join(strings))
@@ -357,8 +357,8 @@
HTML = HTMLBuilder()
# Constants depending on ``literal()`` and/or ``HTML``.
-NL = literal(u"\n")
-EMPTY = literal(u"")
+NL = literal("\n")
+EMPTY = literal("")
BR = HTML.br(_nl=True)
-_CDATA_START = literal(u"")
+_CDATA_START = literal("")
--- ../python3/webhelpers/feedgenerator.py (original)
+++ ../python3/webhelpers/feedgenerator.py (refactored)
@@ -118,7 +118,7 @@
if date is not None:
tag = re.sub('/', ',%s:/' % date.strftime('%Y-%m-%d'), tag, 1)
tag = re.sub('#', '/', tag)
- return u'tag:' + tag
+ return 'tag:' + tag
class SyndicationFeed(object):
"Base class for all syndication feeds. Subclasses should provide write()"
@@ -212,7 +212,7 @@
"""
Returns the feed in the given encoding as a string.
"""
- from StringIO import StringIO
+ from io import StringIO
s = StringIO()
self.write(s, encoding)
return s.getvalue()
@@ -238,178 +238,178 @@
class RssFeed(SyndicationFeed):
mime_type = 'application/rss+xml'
- _version = u"?"
+ _version = "?"
def write(self, outfile, encoding):
handler = SimplerXMLGenerator(outfile, encoding)
handler.startDocument()
- handler.startElement(u"rss", self.rss_attributes())
- handler.startElement(u"channel", self.root_attributes())
+ handler.startElement("rss", self.rss_attributes())
+ handler.startElement("channel", self.root_attributes())
self.add_root_elements(handler)
self.write_items(handler)
self.endChannelElement(handler)
- handler.endElement(u"rss")
+ handler.endElement("rss")
def rss_attributes(self):
- return {u"version": self._version}
+ return {"version": self._version}
def write_items(self, handler):
for item in self.items:
- handler.startElement(u'item', self.item_attributes(item))
+ handler.startElement('item', self.item_attributes(item))
self.add_item_elements(handler, item)
- handler.endElement(u"item")
+ handler.endElement("item")
def add_root_elements(self, handler):
- handler.addQuickElement(u"title", self.feed['title'])
- handler.addQuickElement(u"link", self.feed['link'])
- handler.addQuickElement(u"description", self.feed['description'])
+ handler.addQuickElement("title", self.feed['title'])
+ handler.addQuickElement("link", self.feed['link'])
+ handler.addQuickElement("description", self.feed['description'])
if self.feed['language'] is not None:
- handler.addQuickElement(u"language", self.feed['language'])
+ handler.addQuickElement("language", self.feed['language'])
for cat in self.feed['categories']:
- handler.addQuickElement(u"category", cat)
+ handler.addQuickElement("category", cat)
if self.feed['feed_copyright'] is not None:
- handler.addQuickElement(u"copyright", self.feed['feed_copyright'])
- handler.addQuickElement(u"lastBuildDate", rfc2822_date(self.latest_post_date()).decode('utf-8'))
+ handler.addQuickElement("copyright", self.feed['feed_copyright'])
+ handler.addQuickElement("lastBuildDate", rfc2822_date(self.latest_post_date()).decode('utf-8'))
if self.feed['ttl'] is not None:
- handler.addQuickElement(u"ttl", self.feed['ttl'])
+ handler.addQuickElement("ttl", self.feed['ttl'])
def endChannelElement(self, handler):
- handler.endElement(u"channel")
+ handler.endElement("channel")
class RssUserland091Feed(RssFeed):
- _version = u"0.91"
+ _version = "0.91"
def add_item_elements(self, handler, item):
- handler.addQuickElement(u"title", item['title'])
- handler.addQuickElement(u"link", item['link'])
+ handler.addQuickElement("title", item['title'])
+ handler.addQuickElement("link", item['link'])
if item['description'] is not None:
- handler.addQuickElement(u"description", item['description'])
+ handler.addQuickElement("description", item['description'])
class Rss201rev2Feed(RssFeed):
# Spec: http://blogs.law.harvard.edu/tech/rss
- _version = u"2.0"
+ _version = "2.0"
def add_item_elements(self, handler, item):
- handler.addQuickElement(u"title", item['title'])
- handler.addQuickElement(u"link", item['link'])
+ handler.addQuickElement("title", item['title'])
+ handler.addQuickElement("link", item['link'])
if item['description'] is not None:
- handler.addQuickElement(u"description", item['description'])
+ handler.addQuickElement("description", item['description'])
# Author information.
if item["author_name"] and item["author_email"]:
- handler.addQuickElement(u"author", "%s (%s)" % \
+ handler.addQuickElement("author", "%s (%s)" % \
(item['author_email'], item['author_name']))
elif item["author_email"]:
- handler.addQuickElement(u"author", item["author_email"])
+ handler.addQuickElement("author", item["author_email"])
elif item["author_name"]:
- handler.addQuickElement(u"dc:creator", item["author_name"], {"xmlns:dc": u"http://purl.org/dc/elements/1.1/"})
+ handler.addQuickElement("dc:creator", item["author_name"], {"xmlns:dc": "http://purl.org/dc/elements/1.1/"})
if item['pubdate'] is not None:
- handler.addQuickElement(u"pubDate", rfc2822_date(item['pubdate']).decode('utf-8'))
+ handler.addQuickElement("pubDate", rfc2822_date(item['pubdate']).decode('utf-8'))
if item['comments'] is not None:
- handler.addQuickElement(u"comments", item['comments'])
+ handler.addQuickElement("comments", item['comments'])
if item['unique_id'] is not None:
- handler.addQuickElement(u"guid", item['unique_id'])
+ handler.addQuickElement("guid", item['unique_id'])
if item['ttl'] is not None:
- handler.addQuickElement(u"ttl", item['ttl'])
+ handler.addQuickElement("ttl", item['ttl'])
# Enclosure.
if item['enclosure'] is not None:
- handler.addQuickElement(u"enclosure", '',
- {u"url": item['enclosure'].url, u"length": item['enclosure'].length,
- u"type": item['enclosure'].mime_type})
+ handler.addQuickElement("enclosure", '',
+ {"url": item['enclosure'].url, "length": item['enclosure'].length,
+ "type": item['enclosure'].mime_type})
# Categories.
for cat in item['categories']:
- handler.addQuickElement(u"category", cat)
+ handler.addQuickElement("category", cat)
class Atom1Feed(SyndicationFeed):
# Spec: http://atompub.org/2005/07/11/draft-ietf-atompub-format-10.html
mime_type = 'application/atom+xml'
- ns = u"http://www.w3.org/2005/Atom"
+ ns = "http://www.w3.org/2005/Atom"
def write(self, outfile, encoding):
handler = SimplerXMLGenerator(outfile, encoding)
handler.startDocument()
- handler.startElement(u'feed', self.root_attributes())
+ handler.startElement('feed', self.root_attributes())
self.add_root_elements(handler)
self.write_items(handler)
- handler.endElement(u"feed")
+ handler.endElement("feed")
def root_attributes(self):
if self.feed['language'] is not None:
- return {u"xmlns": self.ns, u"xml:lang": self.feed['language']}
+ return {"xmlns": self.ns, "xml:lang": self.feed['language']}
else:
- return {u"xmlns": self.ns}
+ return {"xmlns": self.ns}
def add_root_elements(self, handler):
- handler.addQuickElement(u"title", self.feed['title'])
- handler.addQuickElement(u"link", "", {u"rel": u"alternate", u"href": self.feed['link']})
+ handler.addQuickElement("title", self.feed['title'])
+ handler.addQuickElement("link", "", {"rel": "alternate", "href": self.feed['link']})
if self.feed['feed_url'] is not None:
- handler.addQuickElement(u"link", "", {u"rel": u"self", u"href": self.feed['feed_url']})
- handler.addQuickElement(u"id", self.feed['id'])
- handler.addQuickElement(u"updated", rfc3339_date(self.latest_post_date()).decode('utf-8'))
+ handler.addQuickElement("link", "", {"rel": "self", "href": self.feed['feed_url']})
+ handler.addQuickElement("id", self.feed['id'])
+ handler.addQuickElement("updated", rfc3339_date(self.latest_post_date()).decode('utf-8'))
if self.feed['author_name'] is not None:
- handler.startElement(u"author", {})
- handler.addQuickElement(u"name", self.feed['author_name'])
+ handler.startElement("author", {})
+ handler.addQuickElement("name", self.feed['author_name'])
if self.feed['author_email'] is not None:
- handler.addQuickElement(u"email", self.feed['author_email'])
+ handler.addQuickElement("email", self.feed['author_email'])
if self.feed['author_link'] is not None:
- handler.addQuickElement(u"uri", self.feed['author_link'])
- handler.endElement(u"author")
+ handler.addQuickElement("uri", self.feed['author_link'])
+ handler.endElement("author")
if self.feed['subtitle'] is not None:
- handler.addQuickElement(u"subtitle", self.feed['subtitle'])
+ handler.addQuickElement("subtitle", self.feed['subtitle'])
for cat in self.feed['categories']:
- handler.addQuickElement(u"category", "", {u"term": cat})
+ handler.addQuickElement("category", "", {"term": cat})
if self.feed['feed_copyright'] is not None:
- handler.addQuickElement(u"rights", self.feed['feed_copyright'])
+ handler.addQuickElement("rights", self.feed['feed_copyright'])
def write_items(self, handler):
for item in self.items:
- handler.startElement(u"entry", self.item_attributes(item))
+ handler.startElement("entry", self.item_attributes(item))
self.add_item_elements(handler, item)
- handler.endElement(u"entry")
+ handler.endElement("entry")
def add_item_elements(self, handler, item):
- handler.addQuickElement(u"title", item['title'])
- handler.addQuickElement(u"link", u"", {u"href": item['link'], u"rel": u"alternate"})
+ handler.addQuickElement("title", item['title'])
+ handler.addQuickElement("link", "", {"href": item['link'], "rel": "alternate"})
if item['pubdate'] is not None:
- handler.addQuickElement(u"updated", rfc3339_date(item['pubdate']).decode('utf-8'))
- handler.addQuickElement(u"published", rfc3339_date(item['pubdate']).decode('utf-8'))
+ handler.addQuickElement("updated", rfc3339_date(item['pubdate']).decode('utf-8'))
+ handler.addQuickElement("published", rfc3339_date(item['pubdate']).decode('utf-8'))
# Author information.
if item['author_name'] is not None:
- handler.startElement(u"author", {})
- handler.addQuickElement(u"name", item['author_name'])
+ handler.startElement("author", {})
+ handler.addQuickElement("name", item['author_name'])
if item['author_email'] is not None:
- handler.addQuickElement(u"email", item['author_email'])
+ handler.addQuickElement("email", item['author_email'])
if item['author_link'] is not None:
- handler.addQuickElement(u"uri", item['author_link'])
- handler.endElement(u"author")
+ handler.addQuickElement("uri", item['author_link'])
+ handler.endElement("author")
# Unique ID.
if item['unique_id'] is not None:
unique_id = item['unique_id']
else:
unique_id = get_tag_uri(item['link'], item['pubdate'])
- handler.addQuickElement(u"id", unique_id)
+ handler.addQuickElement("id", unique_id)
# Summary.
if item['description'] is not None:
- handler.addQuickElement(u"summary", item['description'], {u"type": u"html"})
+ handler.addQuickElement("summary", item['description'], {"type": "html"})
# Enclosure.
if item['enclosure'] is not None:
- handler.addQuickElement(u"link", '',
- {u"rel": u"enclosure",
- u"href": item['enclosure'].url,
- u"length": item['enclosure'].length,
- u"type": item['enclosure'].mime_type})
+ handler.addQuickElement("link", '',
+ {"rel": "enclosure",
+ "href": item['enclosure'].url,
+ "length": item['enclosure'].length,
+ "type": item['enclosure'].mime_type})
# Categories.
for cat in item['categories']:
- handler.addQuickElement(u"category", u"", {u"term": cat})
+ handler.addQuickElement("category", "", {"term": cat})
# Rights.
if item['item_copyright'] is not None:
- handler.addQuickElement(u"rights", item['item_copyright'])
+ handler.addQuickElement("rights", item['item_copyright'])
# This isolates the decision of what the system default is, so calling code can
# do "feedgenerator.DefaultFeed" instead of "feedgenerator.Rss201rev2Feed".
@@ -465,9 +465,9 @@
a unicode GeoRSS representation.
"""
if self.is_input_latitude_first:
- return u' '.join([u'%f %f' % x for x in coords])
+ return ' '.join(['%f %f' % x for x in coords])
else:
- return u' '.jRefactoringTool: No changes to ../python3/webhelpers/date.py
RefactoringTool: Refactored ../python3/webhelpers/containers.py
oin([u'%f %f' % (x[1], x[0]) for x in coords])
+ return ' '.join(['%f %f' % (x[1], x[0]) for x in coords])
def add_georss_point(self, handler, coords, w3c_geo=False):
"""
@@ -480,10 +480,10 @@
lat, lon = coords[:2]
else:
lon, lat = coords[:2]
- handler.addQuickElement(u'geo:lat', u'%f' % lat)
- handler.addQuickElement(u'geo:lon', u'%f' % lon)
+ handler.addQuickElement('geo:lat', '%f' % lat)
+ handler.addQuickElement('geo:lon', '%f' % lon)
else:
- handler.addQuickElement(u'georss:point', self.georss_coords((coords,)))
+ handler.addQuickElement('georss:point', self.georss_coords((coords,)))
def add_georss_element(self, handler, item, w3c_geo=False):
"""
@@ -514,7 +514,7 @@
# If a GeoRSS box was given via tuple.
if not box_coords is None:
if w3c_geo: raise ValueError('Cannot use simple GeoRSS box in W3C Geo feeds.')
- handler.addQuickElement(u'georss:box', self.georss_coords(box_coords))
+ handler.addQuickElement('georss:box', self.georss_coords(box_coords))
else:
# Getting the lower-case geometry type.
gtype = str(geom.geom_type).lower()
@@ -525,10 +525,10 @@
# For formatting consistent w/the GeoRSS simple standard:
# http://georss.org/1.0#simple
if gtype in ('linestring', 'linearring'):
- handler.addQuickElement(u'georss:line', self.georss_coords(geom.coords))
+ handler.addQuickElement('georss:line', self.georss_coords(geom.coords))
elif gtype in ('polygon',):
# Only support the exterior ring.
- handler.addQuickElement(u'georss:polygon', self.georss_coords(geom[0].coords))
+ handler.addQuickElement('georss:polygon', self.georss_coords(geom[0].coords))
else:
raise ValueError('Geometry type "%s" not supported.' % geom.geom_type)
@@ -536,7 +536,7 @@
class GeoRSSFeed(Rss201rev2Feed, GeoFeedMixin):
def rss_attributes(self):
attrs = super(GeoRSSFeed, self).rss_attributes()
- attrs[u'xmlns:georss'] = u'http://www.georss.org/georss'
+ attrs['xmlns:georss'] = 'http://www.georss.org/georss'
return attrs
def add_item_elements(self, handler, item):
@@ -550,7 +550,7 @@
class GeoAtom1Feed(Atom1Feed, GeoFeedMixin):
def root_attributes(self):
attrs = super(GeoAtom1Feed, self).root_attributes()
- attrs[u'xmlns:georss'] = u'http://www.georss.org/georss'
+ attrs['xmlns:georss'] = 'http://www.georss.org/georss'
return attrs
def add_item_elements(self, handler, item):
@@ -564,7 +564,7 @@
class W3CGeoFeed(Rss201rev2Feed, GeoFeedMixin):
def rss_attributes(self):
attrs = super(W3CGeoFeed, self).rss_attributes()
- attrs[u'xmlns:geo'] = u'http://www.w3.org/2003/01/geo/wgs84_pos#'
+ attrs['xmlns:geo'] = 'http://www.w3.org/2003/01/geo/wgs84_pos#'
return attrs
def add_item_elements(self, handler, item):
--- ../python3/webhelpers/containers.py (original)
+++ ../python3/webhelpers/containers.py (refactored)
@@ -40,7 +40,7 @@
args = tuple()
else:
args = self.default_factory,
- return type(self), args, None, None, self.items()
+ return type(self), args, None, None, list(self.items())
def copy(self):
return self.__copy__()
def __copy__(self):
@@ -48,7 +48,7 @@
def __deepcopy__(self, memo):
import copy
return type(self)(self.default_factory,
- copy.deepcopy(self.items()))
+ copy.deepcopy(list(self.items())))
def __repr__(self):
return 'defaultdict(%s, %s)' % (self.defauRefactoringTool: Refactored ../python3/webhelpers/constants.py
WARNING: couldn't encode ../python3/webhelpers/constants.py's diff for your terminal
RefactoringTool: No changes to ../python3/webhelpers/__init__.py
RefactoringTool: Refactored ../python3/webhelpers/paginate.py
lt_factory,
dict.__repr__(self))
@@ -116,8 +116,8 @@
If ``max_items`` is provided, return no more than that many items.
"""
- data = [(x[1], x[0]) for x in self.result.iteritems()]
- data.sort(key=lambda x: (sys.maxint - x[0], x[1]))
+ data = [(x[1], x[0]) for x in self.result.items()]
+ data.sort(key=lambda x: (sys.maxsize - x[0], x[1]))
if max_items:
return data[:max_items]
else:
@@ -126,7 +126,7 @@
def get_sorted_items(self):
"""Return the result as a list of ``(item, count)`` pairs sorted by item.
"""
- data = self.result.items()
+ data = list(self.result.items())
data.sort()
return data
@@ -279,7 +279,7 @@
raise KeyError("key %r is not in original mapping" % k)
r1 = {}
r2 = {}
- for k, v in dic.items():
+ for k, v in list(dic.items()):
if k in keys:
r1[k] = v
else:
@@ -316,7 +316,7 @@
elif default is not NotGiven:
yield key, default
if other_keys:
- for key, value in d.iteritems():
+ for key, value in d.items():
yield key, value
def get_many(d, required=None, optional=None, one_of=None):
--- ../python3/webhelpers/constants.py (original)
+++ ../python3/webhelpers/constants.py (refactored)
@@ -107,7 +107,7 @@
return _country_codes
else:
- text_directly_from_iso_website = u"""
+ text_directly_from_iso_website = """
A
AFGHANISTAN AF
--- ../python3/webhelpers/paginate.py (original)
+++ ../python3/webhelpers/paginate.py (refactored)
@@ -173,7 +173,7 @@
import re
from string import Template
-import urllib
+import urllib.request, urllib.parse, urllib.error
import warnings
from webhelpers.html import literal, HTML
@@ -247,7 +247,7 @@
def __getitem__(self, range):
if not isinstance(range, slice):
- raise Exception, "__getitem__ without slicing not supported"
+ raise Exception("__getitem__ without slicing not supported")
offset = range.start
limit = range.stop - range.start
select = self.obj.offset(offset).limit(limit)
@@ -265,7 +265,7 @@
def __getitem__(self, range):
if not isinstance(range, slice):
- raise Exception, "__getitem__ without slicing not supported"
+ raise Exception("__getitem__ without slicing not supported")
return self.obj[range]
def __len__(self):
@@ -441,7 +441,7 @@
first = self.first_item - 1
last = self.last_item
self.items = list(self.collection[first:last])
- except TypeError, e:
+ except TypeError as e:
if str(e) == "unhashable type":
# Assume this means collection is unsliceable.
raise TypeError(INCOMPATIBLE_COLLECTION_TYPE)
@@ -759,7 +759,7 @@
text = HTML.span(c=text, **self.dotdot_attr)
nav_items.append(text)
- for thispage in xrange(leftmost_page, rightmost_page+1):
+ for thispage in range(leftmost_page, rightmost_page+1):
# Hilight the current page number and do not use a link
if thispage == self.page:
text = '%s' % (thispage,)
@@ -833,7 +833,7 @@
# the controller and action manually
if config.mapper.explicit:
if hasattr(config, 'mapper_dict'):
- for k, v in config.mapper_dict.items():
+ for k, v in list(config.mapper_dict.items()):
if k != self.page_param:
link_params[k] = v
@@ -881,9 +881,9 @@
if partial:
params["partial"] = "1"
if sort:
- params = params.items()
+ params = list(params.items())
params.sort()
- qs = urllib.urlencode(params, True)
+ qRefactoringTool: Refactored ../python3/webhelpers/number.py
RefactoringTool: Refactored ../python3/webhelpers/misc.py
RefactoringTool: No changes to ../python3/webhelpers/mimehelper.py
RefactoringTool: Refactored ../python3/webhelpers/media.py
RefactoringTool: Refactored ../python3/unfinished/multimedia.py
s = urllib.parse.urlencode(params, True)
return "%s?%s" % (path, qs)
class PageURL(object):
--- ../python3/webhelpers/number.py (original)
+++ ../python3/webhelpers/number.py (refactored)
@@ -199,7 +199,7 @@
self.max = None
self._init_stats()
- def __nonzero__(self):
+ def __bool__(self):
"""The instance is true if it has seen any data."""
return bool(self.count)
--- ../python3/webhelpers/misc.py (original)
+++ ../python3/webhelpers/misc.py (refactored)
@@ -25,7 +25,7 @@
From recipe in itertools docs.
"""
- for elm in itertools.ifilterfalse(pred, seq):
+ for elm in itertools.filterfalse(pred, seq):
return False
return True
@@ -48,7 +48,7 @@
From recipe in itertools docs.
"""
- for elm in itertools.ifilter(pred, seq):
+ for elm in filter(pred, seq):
return True
return False
@@ -70,7 +70,7 @@
From recipe in itertools docs.
"""
- for elm in itertools.ifilter(pred, seq):
+ for elm in filter(pred, seq):
return False
return True
@@ -145,8 +145,8 @@
This is mainly used to exclude abstract subclasses.
"""
if isinstance(it, dict):
- it = it.itervalues()
- class_types = (type, types.ClassType)
+ it = iter(it.values())
+ class_types = (type, type)
ignore = [class_]
if exclude:
ignore.extend(exclude)
--- ../python3/webhelpers/media.py (original)
+++ ../python3/webhelpers/media.py (refactored)
@@ -117,19 +117,19 @@
sys.exit("usage: %s FILES ...\nPrints dimensions of each image")
for file in files:
apath = os.path.abspath(file)
- print "%s:" % apath,
+ print("%s:" % apath, end=' ')
if not os.path.isfile(file):
- print "does not exist or is not a plain file"
+ print("does not exist or is not a plain file")
continue
width, height = get_dimensions(file)
if width is None and height is None:
- print "could not get dimensions"
+ print("could not get dimensions")
else:
if width is None:
width = "UNKNOWN"
if height is None:
height = "UNKNOWN"
- print "%s x %s" % (width, height)
+ print("%s x %s" % (width, height))
if __name__ == "__main__": test_get_dimensions()
--- ../python3/unfinished/multimedia.py (original)
+++ ../python3/unfinished/multimedia.py (refactored)
@@ -87,7 +87,7 @@
"""
try:
im = Image.open(image_path)
- except IOError, e:
+ except IOError as e:
if str(e) == "cannot identify image file":
return None
else:
@@ -118,7 +118,7 @@
im = im.convert() # Convert GIF palette to RGB mode.
try:
im.thumbnail((width, height), Image.ANTIALIAS)
- except IOError, e:
+ except IOError as e:
reason = str(e)
if RX_DECODER_NOT_AVAILABLE.search(reason):
return None # PIL error, cannot thumbnail.
@@ -184,9 +184,9 @@
return os.path.join(dir, new_name)
def test():
- print "Height for 600x480 @ width 200 is", choose_height(200, 600, 480)
- print "Path 200 for a/foo.jpg is", get_thumb_path('a/foo.jpg', 200)
- print "Path 200 for a/foo.png is", get_thumb_path('a/foo.png', 200)
+ print("Height for 600x480 @ width 200 is", choose_height(200, 600, 480))
+ print("Path 200 for a/foo.jpg is", get_thumb_path('a/foo.jpg', 200))
+ print("Path 200 for a/foo.png is", get_thumb_path('a/foo.png', 200))
if __name__ == "__main__": test()
@@ -328,7 +328,7 @@
width = 200
dst = make_pdf_thumbnail2(source_file, width)
- print "Thumbnail made:", dst
+ print("Thumbnail made:", dst)
#ps_cmd = "save pop currentglobal true setglobal false/product where{pop product(Ghostscript)search{pop pop pop revision 600 ge{pop true}if}{pop}ifelse}if{/pdfdict where{pop pdfdict begin/pdfshowpage_setpage[pdfdict/pdfshowpage_setpage get{dup type/nametype eq{dup/OutputFileRefactoringTool: Refactored ../python3/unfinished/logging_optparse.py
RefactoringTool: No changes to ../python3/unfinished/document.py
RefactoringTool: Refactored ../python3/unfinished/disabled_test_pylonslib_minify.py
eq{pop/AntiRotationHack}{dup/MediaBox eq revision 650 ge and{/THB.CropHack{1 index/CropBox pget{2 index exch/MediaBox exch put}if}def/THB.CropHack cvx}if}ifelse}if}forall]cvx def end}if}if setglobal"
--- ../python3/unfinished/logging_optparse.py (original)
+++ ../python3/unfinished/logging_optparse.py (refactored)
@@ -215,5 +215,5 @@
if logger == "__main__":
# Set root logger to same level.
logging.getLogger().setLevel(level)
- except LogLevelError, e:
+ except LogLevelError as e:
parser.error("log level '%s' not defined" % e.level)
--- ../python3/unfinished/disabled_test_pylonslib_minify.py (original)
+++ ../python3/unfinished/disabled_test_pylonslib_minify.py (refactored)
@@ -79,42 +79,42 @@
# minify and combine
js_source = self.minify.javascript_link('/deep/a.js', '/b.js', combined=True, minified=True)
css_source = self.minify.stylesheet_link('/deep/a.css', '/b.css', combined=True, minified=True)
- self.assert_('"/a.b.COMBINED.min.css"' in css_source)
- self.assert_('"/a.b.COMBINED.min.js"' in js_source)
+ self.assertTrue('"/a.b.COMBINED.min.css"' in css_source)
+ self.assertTrue('"/a.b.COMBINED.min.js"' in js_source)
# combine
js_source = self.minify.javascript_link('/deep/a.js', '/b.js', combined=True)
css_source = self.minify.stylesheet_link('/deep/a.css', '/b.css', combined=True)
- self.assert_('"/a.b.COMBINED.css"' in css_source)
- self.assert_('"/a.b.COMBINED.js"' in js_source)
+ self.assertTrue('"/a.b.COMBINED.css"' in css_source)
+ self.assertTrue('"/a.b.COMBINED.js"' in js_source)
# minify
js_source = self.minify.javascript_link('/deep/a.js', '/b.js', minified=True)
css_source = self.minify.stylesheet_link('/deep/a.css', '/b.css', minified=True)
- self.assert_('"/deep/a.min.css"' in css_source)
- self.assert_('"/b.min.css"' in css_source)
- self.assert_('"/deep/a.min.js"' in js_source)
- self.assert_('"/b.min.js"' in js_source)
+ self.assertTrue('"/deep/a.min.css"' in css_source)
+ self.assertTrue('"/b.min.css"' in css_source)
+ self.assertTrue('"/deep/a.min.js"' in js_source)
+ self.assertTrue('"/b.min.js"' in js_source)
# root minify and combined
js_source = self.minify.javascript_link('/c.js', '/b.js', combined=True, minified=True)
css_source = self.minify.stylesheet_link('/c.css', '/b.css', combined=True, minified=True)
- self.assert_('"/c.b.COMBINED.min.css"' in css_source)
- self.assert_('"/c.b.COMBINED.min.js"' in js_source)
+ self.assertTrue('"/c.b.COMBINED.min.css"' in css_source)
+ self.assertTrue('"/c.b.COMBINED.min.js"' in js_source)
# root minify
js_source = self.minify.javascript_link('/c.js', '/b.js', minified=True)
css_source = self.minify.stylesheet_link('/c.css', '/b.css', minified=True)
- self.assert_('"/b.min.css"' in css_source)
- self.assert_('"/b.min.js"' in js_source)
- self.assert_('"/c.min.js"' in js_source)
- self.assert_('"/c.min.js"' in js_source)
+ self.assertTrue('"/b.min.css"' in css_source)
+ self.assertTrue('"/b.min.js"' in js_source)
+ self.assertTrue('"/c.min.js"' in js_source)
+ self.assertTrue('"/c.min.js"' in js_source)
# both root minify and combined
js_source = self.minify.javascript_link('/deep/a.js', '/deep/d.js', combined=True, minified=True)
css_source = self.minify.stylesheet_link('/deep/a.css', '/deep/d.css', combined=True, minified=True)
- self.assert_('"/deep/a.d.COMBINED.min.css"' in css_source)
- self.assert_('"/deep/a.d.COMBINED.min.js"' in js_source)
+ self.assertTrue('"/deep/a.d.COMBINED.min.css"' in css_source)
+ self.assertTrue('"/deep/a.d.COMBINED.min.js"' in js_source)
# Cleanup -- done by .tearDown()
#self.purge_files('a.b.COMBINED.min.js', 'a.b.CORefactoringTool: Refactored ../python3/unfinished/containers.py
RefactoringTool: Refactored ../python3/unfinished/baseN.py
RefactoringTool: No changes to ../python3/unfinished/sanitize_filename.py
RefactoringTool: No changes to ../python3/unfinished/opener.py
RefactoringTool: No changes to ../python3/unfinished/number_to_human_size.py
RefactoringTool: No changes to ../python3/tests/util.py
RefactoringTool: Refactored ../python3/tests/test_tools.py
MBINED.min.css')
--- ../python3/unfinished/containers.py (original)
+++ ../python3/unfinished/containers.py (refactored)
@@ -16,7 +16,7 @@
if n > 0:
extension = [fill] * n
lis.extend(extension)
- return zip(*data)
+ return list(zip(*data))
def izip_fill(*iterables, **kw):
"""Like itertools.izip but use a default value for the missing elements
@@ -26,18 +26,18 @@
``default`` is the default value (default ``None``, must be a keyword
arg.
"""
- iterables = map(iter, iterables)
+ iterables = list(map(iter, iterables))
default = kw.pop('default', None)
if kw:
raise TypeError("unrecognized keyword arguments")
columns = len(iterables)
- columns_range = range(columns)
+ columns_range = list(range(columns))
while True:
found_data = False
row = [None] * columns
for i in columns_range:
try:
- row[i] = iterables[i].next()
+ row[i] = next(iterables[i])
found_data = True
except StopIteration:
row[i] = default
--- ../python3/unfinished/baseN.py (original)
+++ ../python3/unfinished/baseN.py (refactored)
@@ -59,7 +59,7 @@
s = list(s)
alphabet = list(alphabet)
base = len(alphabet)
- inverse_alphabet = dict(zip(alphabet, xrange(0, base)))
+ inverse_alphabet = dict(list(zip(alphabet, list(range(0, base)))))
n = 0
exp = 0
for i in reversed(s):
--- ../python3/tests/test_tools.py (original)
+++ ../python3/tests/test_tools.py (refactored)
@@ -74,50 +74,50 @@
}
result_values = {}
- for k, v in result_values_templates.iteritems():
+ for k, v in result_values_templates.items():
result_values[k] = Template(v).substitute(raw_values)
self.assertEqual(result_values["email_result"], auto_link(raw_values['email_raw'], 'email_addresses'))
- self.assertEqual(u"hello %(email_result)s" % result_values, auto_link("hello %(email_raw)s" % raw_values, 'email_addresses'))
- self.assertEqual(u"Go to %(link_result)s" % result_values, auto_link("Go to %(link_raw)s" % raw_values, 'urls'))
- self.assertEqual(u"Go to %(link_raw)s" % raw_values, auto_link("Go to %(link_raw)s" % raw_values, 'email_addresses'))
- self.assertEqual(u"Go to %(link_result)s and say hello to %(email_result)s" % result_values, auto_link("Go to %(link_raw)s and say hello to %(email_raw)s" % raw_values))
- self.assertEqual(u"Link %(link_result)s
" % result_values, auto_link(literal("Link %(link_raw)s
") % raw_values))
- self.assertEqual(u"%(link_result)s Link
" % result_values, auto_link(literal("%(link_raw)s Link
") % raw_values))
- self.assertEqual(u"Link %(link_result_with_options)s
" % result_values, auto_link(literal("Link %(link_raw)s
") % raw_values, 'all', target='_blank'))
- self.assertEqual(u"Go to %(link_result)s." % result_values, auto_link("Go to %(link_raw)s." % raw_values))
- self.assertEqual(u"Go to %(link_result)s, then say hello to %(email_result)s.
" % result_values, auto_link(literal("Go to %(link_raw)s, then say hello to %(email_raw)s.
") % raw_values))
- self.assertEqual(u"Go to %(link2_result)s" % result_values, auto_link("Go to %(link2_raw)s" % raw_values, 'urls'))
- self.assertEqual(u"Go to %(link2_raw)s" % raw_values, auto_link("Go to %(link2_raw)s" % raw_values, 'email_addresses'))
- self.assertEqual(u"Link %(link2_result)s
" % result_values, auto_link(literal("Link %(link2_raw)s
") % raw_values))
- self.assertEqual(u"%(link2_result)s Link
" % result_values, auto_link(literal("%(link2_raw)s Link
") % raw_values))
- self.assertEqual(u"Go to %(link2_result)s." % result_values, auto_link(literal("Go to %(link2_raw)s.") % raw_values))
- self.assertEqual(u"Say hello to %(email_result)s, then go to %(link2_result)s.
" % result_values, auto_link(literal("Say hello to %(email_raw)s, then go to %(link2_raw)s.
") % raw_values))
- self.assertEqual(u"Go to %(link3_result)s" % result_values, auto_link("Go to %(link3_raw)s" % raw_values, 'urls'))
- self.assertEqual(u"Go to %(link3_raw)s" % raw_values, auto_link("Go to %(link3_raw)s" % raw_values, 'email_addresses'))
- self.assertEqual(u"Link %(link3_result)s
" % result_values, auto_link(literal("Link %(link3_raw)s
") % raw_values))
- self.assertEqual(u"%(link3_result)s Link
" % result_values, auto_link(literal("%(link3_raw)s Link
") % raw_values))
- self.assertEqual(u"Go to %(link3_result)s." % result_values, auto_link("Go to %(link3_raw)s." % raw_values))
- self.assertEqual(u"Go to %(link3_result)s. seriously, %(link3_result)s? i think I'll say hello to %(email_result)s. instead.
" % result_values, auto_link(literal("Go to %(link3_raw)s. seriously, %(link3_raw)s? i think I'll say hello to %(email_raw)s. instead.
") % raw_values))
- self.assertEqual(u"Link %(link4_result)s
" % result_values, auto_link(literal("Link %(link4_raw)s
") % raw_values))
- self.assertEqual(u"%(link4_result)s Link
" % result_values, auto_link(literal("%(link4_raw)s Link
") % raw_values))
- self.assertEqual(u"%(link5_result)s Link
" % result_values, auto_link(literal("%(link5_raw)s Link
") % raw_values))
- self.assertEqual(u"%(link6_result)s Link
" % result_values, auto_link(literal("%(link6_raw)s Link
") % raw_values))
- self.assertEqual(u"%(link7_result)s Link
" % result_values, auto_link(literal("%(link7_raw)s Link
") % raw_values))
- self.assertEqual(u"Go to %(link8_result)s" % result_values, auto_link("Go to %(link8_raw)s" % raw_values, 'urls'))
- self.assertEqual(u"Go to %(link8_raw)s" % raw_values, auto_link("Go to %(link8_raw)s" % raw_values, 'email_addresses'))
- self.assertEqual(u"Link %(link8_result)s
" % result_values, auto_link(literal("Link %(link8_raw)s
") % raw_values))
- self.assertEqual(u"%(link8_result)s Link
" % result_values, auto_link(literal("%(link8_raw)s Link
") % raw_values))
- self.assertEqual(u"Go to %(link8_result)s." % result_values, auto_link("Go to %(link8_raw)s." % raw_values))
- self.assertEqual(u"Go to %(link8_result)s. seriously, %(link8_result)s? i think I'll say hello to %(email_result)s. instead.
" % result_values, auto_link(literal("Go to %(link8_raw)s. seriously, %(link8_raw)s? i think I'll say hello to %(email_raw)s. instead.
") % raw_values))
- self.assertEqual(u"Go to %(link9_result)s" % result_values, auto_link("Go to %(link9_raw)s" % raw_values, 'urls'))
- self.assertEqual(u"Go to %(link9_raw)s" % raw_values, auto_link("Go to %(link9_raw)s" % raw_values, 'email_addresses'))
- self.assertEqual(u"Link %(link9_result)s
" % result_values, auto_link(literal("Link %(link9_raw)s
") % raw_values))
- self.assertEqual(u"%(link9_result)s Link
" % result_values, auto_link(literal("%(link9_raw)s Link
") % raw_values))
- self.assertEqual(u"Go to %(link9_result)s." % result_values, auto_link("Go to %(link9_raw)s." % raw_values))
- self.assertEqual(u"Go to %(link9_result)s. seriously, %(link9_result)s? i think I'll say hello to %(email_result)s. instead.
" % result_values, auto_link(literal("Go to %(link9_raw)s. seriously, %(link9_raw)s? i think I'll say hello to %(email_raw)s. instead.
") % raw_values))
- self.assertEqual(u"", auto_link(None))
- self.assertEqual(u"", auto_link(""))
+ self.assertEqual("hello %(email_result)s" % result_values, auto_link("hello %(email_raw)s" % raw_values, 'email_addresses'))
+ self.assertEqual("Go to %(link_result)s" % result_values, auto_link("Go to %(link_raw)s" % raw_values, 'urls'))
+ self.assertEqual("Go to %(link_raw)s" % raw_values, auto_link("Go to %(link_raw)s" % raw_values, 'email_addresses'))
+ self.assertEqual("Go to %(link_result)s and say hello to %(email_result)s" % result_values, auto_link("Go to %(link_raw)s and say hello to %(email_raw)s" % raw_values))
+ self.assertEqual("Link %(link_result)s
" % result_values, auto_link(literal("Link %(link_raw)s
") % raw_values))
+ self.assertEqual("%(link_result)s Link
" % result_values, auto_link(literal("%(link_raw)s Link
") % raw_values))
+ self.assertEqual("Link %(link_result_with_options)s
" % result_values, auto_link(literal("Link %(link_raw)s
") % raw_values, 'all', target='_blank'))
+ self.assertEqual("Go to %(link_result)s." % result_values, auto_link("Go to %(link_raw)s." % raw_values))
+ self.assertEqual("Go to %(link_result)s, then say hello to %(email_result)s.
" % result_values, auto_link(literal("Go to %(link_raw)s, then say hello to %(email_raw)s.
") % raw_values))
+ self.assertEqual("Go to %(link2_result)s" % result_values, auto_link("Go to %(link2_raw)s" % raw_values, 'urls'))
+ self.assertEqual("Go to %(link2_raw)s" % raw_values, auto_link("Go to %(link2_raw)s" % raw_values, 'email_addresses'))
+ self.assertEqual("Link %(link2_result)s
" % result_values, auto_link(literal("Link %(link2_raw)s
") % raw_values))
+ self.assertEqual("%(link2_result)s Link
" % result_values, auto_link(literal("%(link2_raw)s Link
") % raw_values))
+ self.assertEqual("Go to %(link2_result)s." % result_values, auto_link(literal("Go to %(link2_raw)s.") % raw_values))
+ self.assertEqual("Say hello to %(email_result)s, then go to %(link2_result)s.
" % result_values, auto_link(literal("Say hello to %(email_raw)s, then go to %(link2_raw)s.
") % raw_values))
+ self.assertEqual("Go to %(link3_result)s" % result_values, auto_link("Go to %(link3_raw)s" % raw_values, 'urls'))
+ self.assertEqual("Go to %(link3_raw)s" % raw_values, auto_link("Go to %(link3_raw)s" % raw_values, 'email_addresses'))
+ self.assertEqual("Link %(link3_result)s
" % result_values, auto_link(literal("Link %(link3_raw)s
") % raw_values))
+ self.assertEqual("%(link3_result)s Link
" % result_values, auto_link(literal("%(link3_raw)s Link
") % raw_values))
+ self.assertEqual("Go to %(link3_result)s." % result_values, auto_link("Go to %(link3_raw)s." % raw_values))
+ self.assertEqual("Go to %(link3_result)s. seriously, %(link3_result)s? i think I'll say hello to %(email_result)s. instead.
" % result_values, auto_link(literal("Go to %(link3_raw)s. seriously, %(link3_raw)s? i think I'll say hello to %(email_raw)s. instead.
") % raw_values))
+ self.assertEqual("Link %(link4_result)s
" % result_values, auto_link(literal("Link %(link4_raw)s
") % raw_values))
+ self.assertEqual("%(link4_result)s Link
" % result_values, auto_link(literal("%(link4_raw)s Link
") % raw_values))
+ self.assertEqual("%(link5_result)s Link
" % result_values, auto_link(literal("%(link5_raw)s Link
") % raw_values))
+ self.assertEqual("%(link6_result)s Link
" % result_values, auto_link(literal("%(link6_raw)s Link
") % raw_values))
+ self.assertEqual("%(link7_result)s Link
" % result_values, auto_link(literal("%(link7_raw)s Link
") % raw_values))
+ self.assertEqual("Go to %(link8_result)s" % result_values, auto_link("Go to %(link8_raw)s" % raw_values, 'urls'))
+ self.assertEqual("Go to %(link8_raw)s" % raw_values, auto_link("Go to %(link8_raw)s" % raw_values, 'email_addresses'))
+ self.assertEqual("Link %(link8_result)s
" % result_values, auto_link(literal("Link %(link8_raw)s
") % raw_values))
+ self.assertEqual("%(link8_result)s Link
" % result_values, auto_link(literal("%(link8_raw)s Link
") % raw_values))
+ self.assertEqual("Go to %(link8_result)s." % result_values, auto_link("Go to %(link8_raw)s." % raw_values))
+ self.assertEqual("Go to %(link8_result)s. seriously, %(link8_result)s? i think I'll say hello to %(email_result)s. instead.
" % result_values, auto_link(literal("Go to %(link8_raw)s. seriously, %(link8_raw)s? i think I'll say hello to %(email_raw)s. instead.
") % raw_values))
+ self.assertEqual("Go to %(link9_result)s" % result_values, auto_link("Go to %(link9_raw)s" % raw_values, 'urls'))
+ self.assertEqual("Go to %(link9_raw)s" % raw_values, auto_link("Go to %(link9_raw)s" % raw_values, 'email_addresses'))
+ self.assertEqual("Link %(link9_result)s
" % result_values, auto_link(literal("Link %(link9_raw)s
") % raw_values))
+ self.assertEqual("%(link9_result)s Link
" % result_values, auto_link(literal("%(link9_raw)s Link
") % raw_values))
+ self.assertEqual("Go to %(link9_result)s." % result_values, auto_link("Go to %(link9_raw)s." % raw_values))
+ self.assertEqual("Go to %(link9_result)s. seriously, %(link9_result)s? i think I'll say hello to %(email_result)s. instead.
" % result_values, auto_link(literal("Go to %(link9_raw)s. seriously, %(link9_raw)s? i think I'll say hello to %(email_raw)s. instead.
") % raw_values))
+ self.assertEqual("", auto_link(None))
+ self.assertEqual("", auto_link(""))
# Failing test: PylonsHQ bug #657
#self.assertEqual(u'<www.google.com>', auto_link(""))
@@ -131,80 +131,80 @@
class TestURLHelper(WebHelpersTestCase):
def test_button_to_with_straight_url(self):
- self.assertEqual(u"",
+ self.assertEqual("",
button_to("Hello", "http://www.example.com"))
def test_button_to_with_query(self):
- self.assertEqual(u"",
+ self.assertEqual("",
button_to("Hello", "http://www.example.com/q1=v1&q2=v2"))
def test_button_to_with_escaped_query(self):
- self.assertEqual(u"",
+ self.assertEqual("",
button_to("Hello", "http://www.example.com/q1=v1&q2=v2"))
def test_button_to_with_query_and_no_name(self):
- self.assertEqual(u"",
+ self.assertEqual("",
button_to(None, "http://www.example.com?q1=v1&q2=v2"))
def test_button_to_enabled_disabled(self):
- self.assertEqual(u"",
+ self.assertEqual("",
button_to("Hello", "http://www.example.com", disabled=False))
- self.assertEqual(u"",
+ self.assertEqual("",
button_to("Hello", "http://www.example.com", disabled=True))
def test_button_to_with_method_delete(self):
- self.assertEqual(u"",
+ self.assertEqual("",
button_to("Hello", "http://www.example.com", method='DELETE'))
- self.assertEqual(u"",
+ self.assertEqual("",
button_to("Hello", "http://www.example.com", method='delete'))
def test_button_to_with_method_get(self):
- self.assertEqual(u"",
+ self.assertEqual("",
button_to("Hello", "http://www.example.com", method='get'))
- self.assertEqual(u"",
+ self.assertEqual("",
button_to("Hello", "http://www.example.com", method='GET'))
def test_button_to_with_img(self):
- self.assertEqual(u'',
+ self.assertEqual('',
button_to("Edit", '/content/edit/3', type='image', src='/images/icon_delete.gif'))
- self.assertEqual(u'',
+ self.assertEqual('',
button_to("Submit", '/content/submit/3', type='image', src='submit.png', alt='Complete the form'))
def test_mail_to(self):
- self.assertEqual(u'justin@example.com', mail_to("justin@example.com"))
- self.assertEqual(u'Justin Example', mail_to("justin@example.com", "Justin Example"))
- self.assertEqual(u'Justin Example',
+ self.assertEqual('justin@example.com', mail_to("justin@example.com"))
+ self.assertEqual('Justin Example', mail_to("justin@example.com", "Justin Example"))
+ self.assertEqual('Justin Example',
mail_to("justin@example.com", "Justin Example", class_="admin"))
def test_mail_to_with_javascript(self):
- self.assertEqual(u"", mail_to("me@domain.com", "My email", encode = "javascript"))
+ self.assertEqual("", mail_to("me@domain.com", "My email", encode = "javascript"))
def test_mail_to_with_options(self):
- self.assertEqual(u'My email',
+ self.assertEqual('My email',
mail_to("me@example.com", "My email", cc="ccaddress@example.com",
bcc="bccaddress@example.com", subject="This is an example email",
body="This is the body of the message."))
def test_mail_to_with_img(self):
- self.assertEqual(u'',
+ self.assertEqual('',
mail_to('feedback@example.com', HTML.literal('')))
def test_mail_to_with_hex(self):
- self.assertEqual(u"My email",
+ self.assertEqual("My email",
mail_to("me@domain.com", "My email", encode = "hex"))
- self.assertEqual(u"me@domain.com",
+ self.assertEqual("me@domain.com",
mail_to("me@domain.com", None, encode = "hex"))
def test_mail_to_with_replace_options(self):
- self.assertEqual(u'wolfgang(at)stufenlos(dot)net',
+ self.assertEqual('wolfgang(at)stufenlos(dot)net',
mail_to("wolfgang@stufenlos.net", None, replace_at="(at)", replace_dot="(dot)"))
- self.assertEqual(u"me(at)domain.com",
+ self.assertEqual("me(at)domain.com",
mail_to("me@domain.com", None, encode = "hex", replace_at = "(at)"))
- self.assertEqual(u"My email",
+ self.assertEqual("My email",
mail_to("me@domain.com", "My email", encode = "hex", replace_at = "(at)"))
- self.assertEqual(u"me(at)domain(dot)com",
+ self.assertEqual("me(at)domain(dot)com",
mail_to("me@domain.com", None, encode = "hex", replace_at = "(at)", replace_dot = "(dot)"))
- self.assertEqual(u"",
+ self.assertEqual("",
mail_to("me@domain.com", "My email", encode = "javascript", replace_at = "(at)", replace_dot = "(dot)"))
@@ -247,9 +247,9 @@
style="color:red"))
def test_highlight_literal(self):
- eq_(literal(u'The <red> cat.'),
+ eq_(literal('The <red> cat.'),
highlight("The cat.", "at"))
- eq_(literal(u'The cat.'),
+ eq_(literal('The cat.'),
highlight(literal("The cat."), "at"))
def test_highlight_legacy_highlighter(self):
@@ -259,15 +259,15 @@
class TestStripTagsHelper(WebHelpersTestCase):
def test_compare_strip_tags_to_sanitize(self):
- text = u'I really like steak!'
+ text = 'I really like steak!'
eq_(strip_tags(text), render.sanitize(text))
if __name__ == '__main__':
- suite = map(unittest.makeSuite, [
+ suite = list(map(unittest.makeSuite, [
TestToolsHelper,
TestHighlightHelper,
TestURLHelper,
TestStripTagsHelper,
- ])
+ ]))
for testsuite in suite:
unittest.TextTestRunner(verbosity=1).run(testsuite)
--- ../python3/tests/test_text.py (original)
+++ ../python3/tests/test_text.py (refactored)
@@ -27,8 +27,8 @@
excerpt('This is a beautiful? morning', 'beautiful', 5))
def test_excerpt_with_utf8(self):
--- ../python3/tests/test_tags.py (original)
+++ ../python3/tests/test_tags.py (refactored)
@@ -11,148 +11,148 @@
def test_check_box(self):
eq_(
checkbox("admin"),
- u'',
+ '',
)
def test_form(self):
eq_(
form(url="http://www.example.com"),
- u'