Skip to content

Commit 8d3bd1a

Browse files
committed
Merge remote-tracking branch 'origin/master' into webencodings
2 parents dc96abd + 7cce65b commit 8d3bd1a

File tree

8 files changed

+14
-14
lines changed

8 files changed

+14
-14
lines changed

.travis.yml

+3-2
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ python:
44
- "2.7"
55
- "3.2"
66
- "3.3"
7+
- "3.4"
78
- "pypy"
89

910
env:
@@ -14,12 +15,12 @@ matrix:
1415
exclude:
1516
- python: "2.7"
1617
env: USE_OPTIONAL=false
17-
- python: "3.3"
18+
- python: "3.4"
1819
env: USE_OPTIONAL=false
1920
include:
2021
- python: "2.7"
2122
env: USE_OPTIONAL=false FLAKE=true
22-
- python: "3.3"
23+
- python: "3.4"
2324
env: USE_OPTIONAL=false FLAKE=true
2425

2526
before_install:

html5lib/html5parser.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -1216,8 +1216,7 @@ def startTagIsIndex(self, token):
12161216
attributes["name"] = "isindex"
12171217
self.processStartTag(impliedTagToken("input", "StartTag",
12181218
attributes=attributes,
1219-
selfClosing=
1220-
token["selfClosing"]))
1219+
selfClosing=token["selfClosing"]))
12211220
self.processEndTag(impliedTagToken("label"))
12221221
self.processStartTag(impliedTagToken("hr", "StartTag"))
12231222
self.processEndTag(impliedTagToken("form"))

html5lib/sanitizer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -245,7 +245,7 @@ def sanitize_css(self, style):
245245
elif prop.split('-')[0].lower() in ['background', 'border', 'margin',
246246
'padding']:
247247
for keyword in value.split():
248-
if not keyword in self.acceptable_css_keywords and \
248+
if keyword not in self.acceptable_css_keywords and \
249249
not re.match("^(#[0-9a-f]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword):
250250
break
251251
else:

html5lib/serializer/htmlserializer.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
v = utils.surrogatePairToCodepoint(v)
3636
else:
3737
v = ord(v)
38-
if not v in encode_entity_map or k.islower():
38+
if v not in encode_entity_map or k.islower():
3939
# prefer < over < and similarly for &, >, etc.
4040
encode_entity_map[v] = k
4141

@@ -291,7 +291,7 @@ def serialize(self, treewalker, encoding=None):
291291
elif type == "Entity":
292292
name = token["name"]
293293
key = name + ";"
294-
if not key in entities:
294+
if key not in entities:
295295
self.serializeError(_("Entity %s not recognized" % name))
296296
if self.resolve_entities and key not in xmlEntities:
297297
data = entities[key]

html5lib/tests/test_tokenizer.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -68,8 +68,8 @@ def processParseError(self, token):
6868
def concatenateCharacterTokens(tokens):
6969
outputTokens = []
7070
for token in tokens:
71-
if not "ParseError" in token and token[0] == "Character":
72-
if (outputTokens and not "ParseError" in outputTokens[-1] and
71+
if "ParseError" not in token and token[0] == "Character":
72+
if (outputTokens and "ParseError" not in outputTokens[-1] and
7373
outputTokens[-1][0] == "Character"):
7474
outputTokens[-1][1] += token[1]
7575
else:
@@ -112,7 +112,7 @@ def tokensMatch(expectedTokens, receivedTokens, ignoreErrorOrder,
112112
# Sort the tokens into two groups; non-parse errors and parse errors
113113
tokens = {"expected": [[], []], "received": [[], []]}
114114
for tokenType, tokenList in zip(list(tokens.keys()),
115-
(expectedTokens, receivedTokens)):
115+
(expectedTokens, receivedTokens)):
116116
for token in tokenList:
117117
if token != "ParseError":
118118
tokens[tokenType][0].append(token)

html5lib/tests/testdata

Submodule testdata updated 62 files

html5lib/treebuilders/dom.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ def insertText(self, data, parent=None):
158158
else:
159159
# HACK: allow text nodes as children of the document node
160160
if hasattr(self.dom, '_child_node_types'):
161-
if not Node.TEXT_NODE in self.dom._child_node_types:
161+
if Node.TEXT_NODE not in self.dom._child_node_types:
162162
self.dom._child_node_types = list(self.dom._child_node_types)
163163
self.dom._child_node_types.append(Node.TEXT_NODE)
164164
self.dom.appendChild(self.dom.createTextNode(data))

tox.ini

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
[tox]
2-
envlist = py26,py27,py32,py33,pypy
2+
envlist = py26,py27,py32,py33,py34,pypy
33

44
[testenv]
55
deps =
@@ -10,7 +10,7 @@ commands =
1010
{envbindir}/nosetests -q
1111
{toxinidir}/flake8-run.sh
1212
install_command =
13-
pip install --allow-external Genshi --allow-insecure Genshi {opts} {packages}
13+
pip install {opts} {packages}
1414

1515
[testenv:pypy]
1616
# lxml doesn't work and datrie doesn't make sense

0 commit comments

Comments
 (0)