Skip to content

Commit d107420

Browse files
committed
Merge pull request #208 from dpkp/add_pylint_to_tox_ini
Use PyLint for static error checking
2 parents 695ea22 + e151529 commit d107420

10 files changed

+54
-58
lines changed

.travis.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ python:
66
- pypy
77

88
env:
9-
-
9+
- UNIT_AND_LINT_ONLY=true
1010
- KAFKA_VERSION=0.8.0
1111
- KAFKA_VERSION=0.8.1
1212
- KAFKA_VERSION=0.8.1.1
@@ -35,4 +35,4 @@ deploy:
3535
# branch: master
3636

3737
script:
38-
- tox -e `./travis_selector.sh $TRAVIS_PYTHON_VERSION`
38+
- if [ -n "$UNIT_AND_LINT_ONLY" ]; then tox -e lint,`./travis_selector.sh $TRAVIS_PYTHON_VERSION`; else tox -e `./travis_selector.sh $TRAVIS_PYTHON_VERSION`; fi

kafka/client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ def __repr__(self):
187187
def _raise_on_response_error(self, resp):
188188
try:
189189
kafka.common.check_error(resp)
190-
except (UnknownTopicOrPartitionError, NotLeaderForPartitionError) as e:
190+
except (UnknownTopicOrPartitionError, NotLeaderForPartitionError):
191191
self.reset_topic_metadata(resp.topic)
192192
raise
193193

kafka/conn.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -93,8 +93,8 @@ def _read_bytes(self, num_bytes):
9393
# that the socket is in error. we will never get
9494
# more data from this socket
9595
if data == '':
96-
raise socket.error('Not enough data to read message -- did server kill socket?')
97-
96+
raise socket.error("Not enough data to read message -- did server kill socket?")
97+
9898
except socket.error:
9999
log.exception('Unable to receive data from Kafka')
100100
self._raise_connection_error()
@@ -170,7 +170,7 @@ def close(self):
170170
except socket.error:
171171
pass
172172

173-
# Closing the socket should always succeed
173+
# Closing the socket should always succeed
174174
self._sock.close()
175175
self._sock = None
176176
else:

test/fixtures.py

Lines changed: 22 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import logging
2-
import glob
32
import os
43
import os.path
54
import shutil
@@ -9,8 +8,8 @@
98
import uuid
109

1110
from urlparse import urlparse
12-
from service import ExternalService, SpawnedService
13-
from testutil import get_open_port
11+
from test.service import ExternalService, SpawnedService
12+
from test.testutil import get_open_port
1413

1514
class Fixture(object):
1615
kafka_version = os.environ.get('KAFKA_VERSION', '0.8.0')
@@ -36,23 +35,23 @@ def download_official_distribution(cls,
3635
output_file = os.path.join(output_dir, distfile + '.tgz')
3736

3837
if os.path.isfile(output_file):
39-
logging.info("Found file already on disk: %s" % output_file)
38+
logging.info("Found file already on disk: %s", output_file)
4039
return output_file
4140

4241
# New tarballs are .tgz, older ones are sometimes .tar.gz
4342
try:
4443
url = url_base + distfile + '.tgz'
45-
logging.info("Attempting to download %s" % (url,))
44+
logging.info("Attempting to download %s", url)
4645
response = urllib2.urlopen(url)
4746
except urllib2.HTTPError:
4847
logging.exception("HTTP Error")
4948
url = url_base + distfile + '.tar.gz'
50-
logging.info("Attempting to download %s" % (url,))
49+
logging.info("Attempting to download %s", url)
5150
response = urllib2.urlopen(url)
5251

53-
logging.info("Saving distribution file to %s" % (output_file,))
54-
with open(os.path.join(output_dir, distfile + '.tgz'), 'w') as f:
55-
f.write(response.read())
52+
logging.info("Saving distribution file to %s", output_file)
53+
with open(output_file, 'w') as output_file_fd:
54+
output_file_fd.write(response.read())
5655

5756
return output_file
5857

@@ -117,11 +116,9 @@ def open(self):
117116
self.render_template(template, properties, vars(self))
118117

119118
# Configure Zookeeper child process
120-
self.child = SpawnedService(args=self.kafka_run_class_args(
121-
"org.apache.zookeeper.server.quorum.QuorumPeerMain",
122-
properties),
123-
env=self.kafka_run_class_env()
124-
)
119+
args = self.kafka_run_class_args("org.apache.zookeeper.server.quorum.QuorumPeerMain", properties)
120+
env = self.kafka_run_class_env()
121+
self.child = SpawnedService(args, env)
125122

126123
# Party!
127124
self.out("Starting...")
@@ -162,7 +159,7 @@ def __init__(self, host, port, broker_id, zk_host, zk_port, zk_chroot, replicas=
162159
self.zk_port = zk_port
163160
self.zk_chroot = zk_chroot
164161

165-
self.replicas = replicas
162+
self.replicas = replicas
166163
self.partitions = partitions
167164

168165
self.tmp_dir = None
@@ -199,21 +196,19 @@ def open(self):
199196
self.render_template(template, properties, vars(self))
200197

201198
# Configure Kafka child process
202-
self.child = SpawnedService(args=self.kafka_run_class_args(
203-
"kafka.Kafka", properties),
204-
env=self.kafka_run_class_env()
205-
)
199+
args = self.kafka_run_class_args("kafka.Kafka", properties)
200+
env = self.kafka_run_class_env()
201+
self.child = SpawnedService(args, env)
206202

207203
# Party!
208204
self.out("Creating Zookeeper chroot node...")
209-
proc = subprocess.Popen(self.kafka_run_class_args(
210-
"org.apache.zookeeper.ZooKeeperMain",
211-
"-server", "%s:%d" % (self.zk_host, self.zk_port),
212-
"create", "/%s" % self.zk_chroot, "kafka-python"
213-
),
214-
env=self.kafka_run_class_env(),
215-
stdout=subprocess.PIPE,
216-
stderr=subprocess.PIPE)
205+
args = self.kafka_run_class_args("org.apache.zookeeper.ZooKeeperMain",
206+
"-server", "%s:%d" % (self.zk_host, self.zk_port),
207+
"create",
208+
"/%s" % self.zk_chroot,
209+
"kafka-python")
210+
env = self.kafka_run_class_env()
211+
proc = subprocess.Popen(args, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
217212

218213
if proc.wait() != 0:
219214
self.out("Failed to create Zookeeper chroot node")

test/service.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
import re
33
import select
44
import subprocess
5-
import sys
65
import threading
76
import time
87

@@ -14,7 +13,7 @@
1413

1514
class ExternalService(object):
1615
def __init__(self, host, port):
17-
print("Using already running service at %s:%d" % (host, port))
16+
logging.info("Using already running service at %s:%d", host, port)
1817
self.host = host
1918
self.port = port
2019

@@ -26,9 +25,11 @@ def close(self):
2625

2726

2827
class SpawnedService(threading.Thread):
29-
def __init__(self, args=[], env=None):
28+
def __init__(self, args=None, env=None):
3029
threading.Thread.__init__(self)
3130

31+
if args is None:
32+
raise TypeError("args parameter is required")
3233
self.args = args
3334
self.env = env
3435
self.captured_stdout = []
@@ -49,7 +50,7 @@ def run_with_handles(self):
4950
alive = True
5051

5152
while True:
52-
(rds, wds, xds) = select.select([self.child.stdout, self.child.stderr], [], [], 1)
53+
(rds, _, _) = select.select([self.child.stdout, self.child.stderr], [], [], 1)
5354

5455
if self.child.stdout in rds:
5556
line = self.child.stdout.readline()

test/test_client.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,3 @@
1-
import os
2-
import random
3-
import struct
41
import unittest2
52

63
from mock import MagicMock, patch
@@ -11,9 +8,7 @@
118
TopicAndPartition, KafkaUnavailableError,
129
LeaderUnavailableError, PartitionUnavailableError
1310
)
14-
from kafka.protocol import (
15-
create_message, KafkaProtocol
16-
)
11+
from kafka.protocol import create_message
1712

1813
class TestKafkaClient(unittest2.TestCase):
1914
def test_init_with_list(self):

test/test_client_integration.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,11 @@
11
import os
2-
import random
32
import socket
4-
import time
53
import unittest2
64

75
import kafka
86
from kafka.common import *
9-
from fixtures import ZookeeperFixture, KafkaFixture
10-
from testutil import *
7+
from test.fixtures import ZookeeperFixture, KafkaFixture
8+
from test.testutil import *
119

1210
class TestKafkaClientIntegration(KafkaIntegrationTestCase):
1311
@classmethod
@@ -34,7 +32,7 @@ def test_timeout(self):
3432

3533
with Timer() as t:
3634
with self.assertRaises((socket.timeout, socket.error)):
37-
conn = kafka.conn.KafkaConnection("localhost", server_port, 1.0)
35+
kafka.conn.KafkaConnection("localhost", server_port, 1.0)
3836
self.assertGreaterEqual(t.interval, 1.0)
3937

4038
@kafka_versions("all")

test/test_conn.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -24,13 +24,13 @@ def setUp(self):
2424
self.addCleanup(patcher.stop)
2525

2626
# Also mock socket.sendall() to appear successful
27-
socket.create_connection().sendall.return_value = None
27+
self.MockCreateConn().sendall.return_value = None
2828

2929
# And mock socket.recv() to return two payloads, then '', then raise
3030
# Note that this currently ignores the num_bytes parameter to sock.recv()
3131
payload_size = len(self.config['payload'])
3232
payload2_size = len(self.config['payload2'])
33-
socket.create_connection().recv.side_effect = [
33+
self.MockCreateConn().recv.side_effect = [
3434
struct.pack('>i', payload_size),
3535
struct.pack('>%ds' % payload_size, self.config['payload']),
3636
struct.pack('>i', payload2_size),
@@ -42,7 +42,7 @@ def setUp(self):
4242
self.conn = KafkaConnection(self.config['host'], self.config['port'])
4343

4444
# Reset any mock counts caused by __init__
45-
socket.create_connection.reset_mock()
45+
self.MockCreateConn.reset_mock()
4646

4747
def test_collect_hosts__happy_path(self):
4848
hosts = "localhost:1234,localhost"
@@ -81,7 +81,7 @@ def test_send(self):
8181

8282
def test_init_creates_socket_connection(self):
8383
KafkaConnection(self.config['host'], self.config['port'])
84-
socket.create_connection.assert_called_with((self.config['host'], self.config['port']), DEFAULT_SOCKET_TIMEOUT_SECONDS)
84+
self.MockCreateConn.assert_called_with((self.config['host'], self.config['port']), DEFAULT_SOCKET_TIMEOUT_SECONDS)
8585

8686
def test_init_failure_raises_connection_error(self):
8787

@@ -102,9 +102,9 @@ def test_send__reconnects_on_dirty_conn(self):
102102
pass
103103

104104
# Now test that sending attempts to reconnect
105-
self.assertEqual(socket.create_connection.call_count, 0)
105+
self.assertEqual(self.MockCreateConn.call_count, 0)
106106
self.conn.send(self.config['request_id'], self.config['payload'])
107-
self.assertEqual(socket.create_connection.call_count, 1)
107+
self.assertEqual(self.MockCreateConn.call_count, 1)
108108

109109
def test_send__failure_sets_dirty_connection(self):
110110

@@ -131,9 +131,9 @@ def test_recv__reconnects_on_dirty_conn(self):
131131
pass
132132

133133
# Now test that recv'ing attempts to reconnect
134-
self.assertEqual(socket.create_connection.call_count, 0)
134+
self.assertEqual(self.MockCreateConn.call_count, 0)
135135
self.conn.recv(self.config['request_id'])
136-
self.assertEqual(socket.create_connection.call_count, 1)
136+
self.assertEqual(self.MockCreateConn.call_count, 1)
137137

138138
def test_recv__failure_sets_dirty_connection(self):
139139

@@ -160,5 +160,5 @@ def test_close__object_is_reusable(self):
160160
# will re-connect and send data to the socket
161161
self.conn.close()
162162
self.conn.send(self.config['request_id'], self.config['payload'])
163-
self.assertEqual(socket.create_connection.call_count, 1)
163+
self.assertEqual(self.MockCreateConn.call_count, 1)
164164
self.conn._sock.sendall.assert_called_with(self.config['payload'])

test/testutil.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@ def get_open_port():
4848
class KafkaIntegrationTestCase(unittest2.TestCase):
4949
create_client = True
5050
topic = None
51+
server = None
5152

5253
def setUp(self):
5354
super(KafkaIntegrationTestCase, self).setUp()

tox.ini

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
[tox]
2-
envlist = py26, py27, pypy
2+
envlist = lint, py26, py27, pypy
33
[testenv]
44
deps =
55
unittest2
@@ -12,3 +12,9 @@ commands =
1212
nosetests {posargs:-v --with-id --with-timer --timer-top-n 10 --with-coverage --cover-erase --cover-package kafka}
1313
setenv =
1414
PROJECT_ROOT = {toxinidir}
15+
[testenv:lint]
16+
deps =
17+
unittest2
18+
mock
19+
pylint
20+
commands = pylint {posargs: -E --ignore=queue.py kafka test}

0 commit comments

Comments
 (0)