1 /**
2 * collectd - src/write_kafka.c
3 * Copyright (C) 2014 Pierre-Yves Ritschard
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
22 *
23 * Authors:
24 * Pierre-Yves Ritschard <pyr at spootnik.org>
25 */
27 #include "collectd.h"
28 #include "plugin.h"
29 #include "common.h"
30 #include "configfile.h"
31 #include "utils_cache.h"
32 #include "utils_cmd_putval.h"
33 #include "utils_format_graphite.h"
34 #include "utils_format_json.h"
35 #include "utils_crc32.h"
37 #include <sys/types.h>
38 #include <librdkafka/rdkafka.h>
39 #include <pthread.h>
40 #include <zlib.h>
42 struct kafka_topic_context {
43 #define KAFKA_FORMAT_JSON 0
44 #define KAFKA_FORMAT_COMMAND 1
45 #define KAFKA_FORMAT_GRAPHITE 2
46 u_int8_t format;
47 unsigned int graphite_flags;
48 _Bool store_rates;
49 rd_kafka_topic_conf_t *conf;
50 rd_kafka_topic_t *topic;
51 rd_kafka_t *kafka;
52 int has_key;
53 u_int32_t key;
54 char *prefix;
55 char *postfix;
56 char escape_char;
57 char *topic_name;
58 };
60 static int kafka_write(const data_set_t *, const value_list_t *, user_data_t *);
61 static int32_t kafka_partition(const rd_kafka_topic_t *, const void *, size_t,
62 int32_t, void *, void *);
63 static void kafka_log(const rd_kafka_t *, int, const char *, const char *);
65 static void kafka_log(const rd_kafka_t *rkt, int level,
66 const char *fac, const char *msg)
67 {
68 plugin_log(level, "%s", msg);
69 }
71 static int32_t kafka_partition(const rd_kafka_topic_t *rkt,
72 const void *keydata, size_t keylen,
73 int32_t partition_cnt, void *p, void *m)
74 {
75 u_int32_t key = *((u_int32_t *)keydata );
77 return key % partition_cnt;
78 }
80 static int kafka_write(const data_set_t *ds, /* {{{ */
81 const value_list_t *vl,
82 user_data_t *ud)
83 {
84 int status = 0;
85 u_int32_t key;
86 char buffer[8192];
87 size_t bfree = sizeof(buffer);
88 size_t bfill = 0;
89 size_t blen = 0;
90 struct kafka_topic_context *ctx = ud->data;
92 if ((ds == NULL) || (vl == NULL) || (ctx == NULL))
93 return EINVAL;
95 bzero(buffer, sizeof(buffer));
97 switch (ctx->format) {
98 case KAFKA_FORMAT_COMMAND:
99 status = create_putval(buffer, sizeof(buffer), ds, vl);
100 if (status != 0) {
101 ERROR("write_kafka plugin: create_putval failed with status %i.",
102 status);
103 return status;
104 }
105 blen = strlen(buffer);
106 break;
107 case KAFKA_FORMAT_JSON:
109 format_json_initialize(buffer, &bfill, &bfree);
110 format_json_value_list(buffer, &bfill, &bfree, ds, vl,
111 ctx->store_rates);
112 format_json_finalize(buffer, &bfill, &bfree);
113 blen = strlen(buffer);
114 break;
115 case KAFKA_FORMAT_GRAPHITE:
116 status = format_graphite(buffer, sizeof(buffer), ds, vl,
117 ctx->prefix, ctx->postfix, ctx->escape_char,
118 ctx->graphite_flags);
119 if (status != 0) {
120 ERROR("write_kafka plugin: format_graphite failed with status %i.",
121 status);
122 return status;
123 }
124 blen = strlen(buffer);
125 break;
126 default:
127 ERROR("write_kafka plugin: invalid format %i.", ctx->format);
128 return -1;
129 }
131 /*
132 * We partition our stream by metric name
133 */
134 if (ctx->has_key)
135 key = ctx->key;
136 else
137 key = rand();
139 rd_kafka_produce(ctx->topic, RD_KAFKA_PARTITION_UA,
140 RD_KAFKA_MSG_F_COPY, buffer, blen,
141 &key, sizeof(key), NULL);
143 return status;
144 } /* }}} int kafka_write */
146 static void kafka_topic_context_free(void *p) /* {{{ */
147 {
148 struct kafka_topic_context *ctx = p;
150 if (ctx == NULL)
151 return;
153 if (ctx->topic_name != NULL)
154 sfree(ctx->topic_name);
155 if (ctx->topic != NULL)
156 rd_kafka_topic_destroy(ctx->topic);
157 if (ctx->conf != NULL)
158 rd_kafka_topic_conf_destroy(ctx->conf);
160 sfree(ctx);
161 } /* }}} void kafka_topic_context_free */
163 static void kafka_config_topic(rd_kafka_conf_t *conf, oconfig_item_t *ci) /* {{{ */
164 {
165 int status;
166 int i;
167 struct kafka_topic_context *tctx;
168 char *key = NULL;
169 char *val;
170 char callback_name[DATA_MAX_NAME_LEN];
171 char errbuf[1024];
172 user_data_t ud;
173 oconfig_item_t *child;
174 rd_kafka_conf_res_t ret;
176 if ((tctx = calloc(1, sizeof (*tctx))) == NULL) {
177 ERROR ("write_kafka plugin: calloc failed.");
178 return;
179 }
181 tctx->escape_char = '.';
182 tctx->store_rates = 1;
183 tctx->format = KAFKA_FORMAT_JSON;
185 rd_kafka_conf_set_log_cb(conf, kafka_log);
186 if ((tctx->kafka = rd_kafka_new(RD_KAFKA_PRODUCER, conf,
187 errbuf, sizeof(errbuf))) == NULL) {
188 sfree(tctx);
189 ERROR("write_kafka plugin: cannot create kafka handle.");
190 return;
191 }
192 conf = NULL;
194 if ((tctx->conf = rd_kafka_topic_conf_new()) == NULL) {
195 rd_kafka_destroy(tctx->kafka);
196 sfree(tctx);
197 ERROR ("write_kafka plugin: cannot create topic configuration.");
198 return;
199 }
201 if (ci->values_num != 1) {
202 WARNING("kafka topic name needed.");
203 goto errout;
204 }
206 if (ci->values[0].type != OCONFIG_TYPE_STRING) {
207 WARNING("kafka topic needs a string argument.");
208 goto errout;
209 }
211 if ((tctx->topic_name = strdup(ci->values[0].value.string)) == NULL) {
212 ERROR("write_kafka plugin: cannot copy topic name.");
213 goto errout;
214 }
216 for (i = 0; i < ci->children_num; i++) {
217 /*
218 * The code here could be simplified but makes room
219 * for easy adding of new options later on.
220 */
221 child = &ci->children[i];
222 status = 0;
224 if (strcasecmp ("Property", child->key) == 0) {
225 if (child->values_num != 2) {
226 WARNING("kafka properties need both a key and a value.");
227 goto errout;
228 }
229 if (child->values[0].type != OCONFIG_TYPE_STRING ||
230 child->values[1].type != OCONFIG_TYPE_STRING) {
231 WARNING("kafka properties needs string arguments.");
232 goto errout;
233 }
234 key = child->values[0].value.string;
235 val = child->values[0].value.string;
236 ret = rd_kafka_topic_conf_set(tctx->conf,key, val,
237 errbuf, sizeof(errbuf));
238 if (ret != RD_KAFKA_CONF_OK) {
239 WARNING("cannot set kafka topic property %s to %s: %s.",
240 key, val, errbuf);
241 goto errout;
242 }
244 } else if (strcasecmp ("Key", child->key) == 0) {
245 char *tmp_buf = NULL;
246 status = cf_util_get_string(child, &tmp_buf);
247 if (status != 0) {
248 WARNING("write_kafka plugin: invalid key supplied");
249 break;
250 }
252 if (strcasecmp(tmp_buf, "Random") != 0) {
253 tctx->has_key = 1;
254 tctx->key = crc32_buffer((u_char *)tmp_buf, strlen(tmp_buf));
255 }
256 sfree(tmp_buf);
258 } else if (strcasecmp ("Format", child->key) == 0) {
259 status = cf_util_get_string(child, &key);
260 if (status != 0)
261 goto errout;
263 assert(key != NULL);
265 if (strcasecmp(key, "Command") == 0) {
266 tctx->format = KAFKA_FORMAT_COMMAND;
268 } else if (strcasecmp(key, "Graphite") == 0) {
269 tctx->format = KAFKA_FORMAT_GRAPHITE;
271 } else if (strcasecmp(key, "Json") == 0) {
272 tctx->format = KAFKA_FORMAT_JSON;
274 } else {
275 WARNING ("write_kafka plugin: Invalid format string: %s",
276 key);
277 }
279 sfree(key);
281 } else if (strcasecmp ("StoreRates", child->key) == 0) {
282 status = cf_util_get_boolean (child, &tctx->store_rates);
283 (void) cf_util_get_flag (child, &tctx->graphite_flags,
284 GRAPHITE_STORE_RATES);
286 } else if (strcasecmp ("GraphiteSeparateInstances", child->key) == 0) {
287 status = cf_util_get_flag (child, &tctx->graphite_flags,
288 GRAPHITE_SEPARATE_INSTANCES);
290 } else if (strcasecmp ("GraphiteAlwaysAppendDS", child->key) == 0) {
291 status = cf_util_get_flag (child, &tctx->graphite_flags,
292 GRAPHITE_ALWAYS_APPEND_DS);
294 } else if (strcasecmp ("GraphitePrefix", child->key) == 0) {
295 status = cf_util_get_string (child, &tctx->prefix);
296 } else if (strcasecmp ("GraphitePostfix", child->key) == 0) {
297 status = cf_util_get_string (child, &tctx->postfix);
298 } else if (strcasecmp ("GraphiteEscapeChar", child->key) == 0) {
299 char *tmp_buff = NULL;
300 status = cf_util_get_string (child, &tmp_buff);
301 if (strlen (tmp_buff) > 1)
302 WARNING ("write_kafka plugin: The option \"GraphiteEscapeChar\" handles "
303 "only one character. Others will be ignored.");
304 tctx->escape_char = tmp_buff[0];
305 sfree (tmp_buff);
306 } else {
307 WARNING ("write_kafka plugin: Invalid directive: %s.", child->key);
308 }
310 if (status != 0)
311 break;
312 }
314 rd_kafka_topic_conf_set_partitioner_cb(tctx->conf, kafka_partition);
315 rd_kafka_topic_conf_set_opaque(tctx->conf, tctx);
317 if ((tctx->topic = rd_kafka_topic_new(tctx->kafka, tctx->topic_name,
318 tctx->conf)) == NULL) {
319 ERROR("write_kafka plugin: cannot create topic.");
320 goto errout;
321 }
322 tctx->conf = NULL;
324 ssnprintf(callback_name, sizeof(callback_name),
325 "write_kafka/%s", tctx->topic_name);
327 ud.data = tctx;
328 ud.free_func = kafka_topic_context_free;
330 status = plugin_register_write (callback_name, kafka_write, &ud);
331 if (status != 0) {
332 WARNING ("write_kafka plugin: plugin_register_write (\"%s\") "
333 "failed with status %i.",
334 callback_name, status);
335 goto errout;
336 }
337 return;
338 errout:
339 if (conf != NULL)
340 rd_kafka_conf_destroy(conf);
341 if (tctx->kafka != NULL)
342 rd_kafka_destroy(tctx->kafka);
343 if (tctx->topic != NULL)
344 rd_kafka_topic_destroy(tctx->topic);
345 if (tctx->topic_name != NULL)
346 free(tctx->topic_name);
347 if (tctx->conf != NULL)
348 rd_kafka_topic_conf_destroy(tctx->conf);
349 sfree(tctx);
350 } /* }}} int kafka_config_topic */
352 static int kafka_config(oconfig_item_t *ci) /* {{{ */
353 {
354 int i;
355 oconfig_item_t *child;
356 rd_kafka_conf_t *conf;
357 rd_kafka_conf_t *cloned;
358 rd_kafka_conf_res_t ret;
359 char errbuf[1024];
361 if ((conf = rd_kafka_conf_new()) == NULL) {
362 WARNING("cannot allocate kafka configuration.");
363 return -1;
364 }
366 for (i = 0; i < ci->children_num; i++) {
367 child = &ci->children[i];
369 if (strcasecmp("Topic", child->key) == 0) {
370 if ((cloned = rd_kafka_conf_dup(conf)) == NULL) {
371 WARNING("write_kafka plugin: cannot allocate memory for kafka config");
372 goto errout;
373 }
374 kafka_config_topic (cloned, child);
375 } else if (strcasecmp(child->key, "Property") == 0) {
376 char *key = NULL;
377 char *val = NULL;
379 if (child->values_num != 2) {
380 WARNING("kafka properties need both a key and a value.");
381 goto errout;
382 }
383 if (child->values[0].type != OCONFIG_TYPE_STRING ||
384 child->values[1].type != OCONFIG_TYPE_STRING) {
385 WARNING("kafka properties needs string arguments.");
386 goto errout;
387 }
388 if ((key = strdup(child->values[0].value.string)) == NULL) {
389 WARNING("cannot allocate memory for attribute key.");
390 goto errout;
391 }
392 if ((val = strdup(child->values[1].value.string)) == NULL) {
393 WARNING("cannot allocate memory for attribute value.");
394 goto errout;
395 }
396 ret = rd_kafka_conf_set(conf, key, val, errbuf, sizeof(errbuf));
397 if (ret != RD_KAFKA_CONF_OK) {
398 WARNING("cannot set kafka property %s to %s: %s",
399 key, val, errbuf);
400 goto errout;
401 }
402 sfree(key);
403 sfree(val);
404 } else {
405 WARNING ("write_kafka plugin: Ignoring unknown "
406 "configuration option \"%s\" at top level.",
407 child->key);
408 }
409 }
410 if (conf != NULL)
411 rd_kafka_conf_destroy(conf);
412 return (0);
413 errout:
414 if (conf != NULL)
415 rd_kafka_conf_destroy(conf);
416 return -1;
417 } /* }}} int kafka_config */
419 void module_register(void)
420 {
421 plugin_register_complex_config ("write_kafka", kafka_config);
422 }
424 /* vim: set sw=8 sts=8 ts=8 noet : */