Skip to content

Commit c1dd525

Browse files
committed
collector.py: import json and add more logging for the user
These changes were brought up by @freemanlutsk on GH while testing the new Kafka client code. Change-Id: I812b3ddb108fa849f8a178984b0e20b3a6e536e7 GH: #11
1 parent 93eaecd commit c1dd525

File tree

1 file changed

+10
-4
lines changed

1 file changed

+10
-4
lines changed

druid_exporter/collector.py

+10-4
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
# See the License for the specific language governing permissions and
1414
# limitations under the License.
1515

16+
import json
1617
import logging
1718
import queue
1819
import threading
@@ -56,10 +57,15 @@ def __init__(self, metrics_config, kafka_config=None):
5657
# the HTTP server uses. In this way the exporter allows a mixed
5758
# configuration for Druid Brokers between HTTPEmitter and
5859
# KafkaEmitter (for daemons emitting too many datapoints/s).
59-
if kafka_config and KafkaConsumer:
60-
threading.Thread(
61-
target=self.pull_datapoints_from_kafka,
62-
args=(kafka_config,self.stop_threads)).start()
60+
if kafka_config:
61+
if KafkaConsumer:
62+
threading.Thread(
63+
target=self.pull_datapoints_from_kafka,
64+
args=(kafka_config, self.stop_threads)).start()
65+
else:
66+
log.error('A Kafka configuration was provided, but it seems '
67+
'that the Kafka client library is not available. '
68+
'Please install the correct dependencies.')
6369

6470
# Datapoints successfully registered
6571
self.datapoints_registered = 0

0 commit comments

Comments
 (0)