diff --git a/README.md b/README.md index 00a05ccb..c068adfd 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,7 @@ Otherwise follow directions below. * librdkafka - https://github.com/edenhill/librdkafka * libyajl (for JSON support, optional) - * libavro and libserdes (for Avro support, optional. See https://github.com/confluentinc/libserdes) + * libavro-c and libserdes (for Avro support, optional. See https://github.com/confluentinc/libserdes) On Ubuntu or Debian: `sudo apt-get install librdkafka-dev libyajl-dev` @@ -115,18 +115,30 @@ Output consumed messages in JSON envelope: $ kafkacat -b mybroker -t syslog -J -Decode Avro key (`-a key`), value (`-a value`) or both (`-a .`) to JSON: - $ kafkacat -b mybroker -t ledger -a . -s http://schema-registry-url:8080 +Decode Avro key (`-s key=avro`), value (`-s value=avro`) or both (`-s avro`) to JSON using schema from the Schema-Registry: + + $ kafkacat -b mybroker -t ledger -s avro -r http://schema-registry-url:8080 + Decode Avro message value and extract Avro record's "age" field: - $ kafkacat -b mybroker -t ledger -a value -s http://schema-registry-url:8080 | jq .payload.age + $ kafkacat -b mybroker -t ledger -s value=avro -r http://schema-registry-url:8080 | jq .payload.age + + +Decode key as 32-bit signed integer and value as 16-bit signed integer followed by an unsigned byte followed by string: + + $ kafkacat -b mybroker -t mytopic -s key='i$' -s value='hB s' + + +*Hint: see `./kafkacat -h` for all available deserializer options.* + Output consumed messages according to format string: $ kafkacat -b mybroker -t syslog -f 'Topic %t[%p], offset: %o, key: %k, payload: %S bytes: %s\n' + Read the last 100 messages from topic 'syslog' with librdkafka configuration parameter 'broker.version.fallback' set to '0.8.2.1' : $ kafkacat -C -b mybroker -X broker.version.fallback=0.8.2.1 -t syslog -p 0 -o -100 -e @@ -136,19 +148,23 @@ Produce a tombstone (a "delete" for compacted topics) for key "abc" by providing $ echo "abc:" | kafkacat -b mybroker -t mytopic -Z -K: + Produce with headers: $ echo "hello there" | kafkacat -b mybroker -H "header1=header value" -H "nullheader" -H "emptyheader=" -H "header1=duplicateIsOk" + Print headers in consumer: $ kafkacat -b mybroker -C -t mytopic -f 'Headers: %h: Message value: %s\n' + Enable the idempotent producer, providing exactly-once and strict-ordering **producer** guarantees: $ kafkacat -b mybroker -X enable.idempotence=true -P -t mytopic .... + Metadata listing: ```` @@ -172,10 +188,12 @@ Metadata for all topics (from broker 1: mybroker:9092/1): .... ```` + JSON metadata listing $ kafkacat -b mybroker -L -J + Pretty-printed JSON metadata listing $ kafkacat -b mybroker -L -J | jq . @@ -209,12 +227,12 @@ Here are two short examples of using kafkacat from Docker. See the [Docker Hub l EOF ``` -* Consume messages: +* Consume messages: ``` docker run --tty --interactive --rm \ confluentinc/cp-kafkacat \ - kafkacat -b kafka-broker:9092 \ + kafkacat -b kafka-broker:9092 \ -C \ -f '\nKey (%K bytes): %k\t\nValue (%S bytes): %s\n\Partition: %p\tOffset: %o\n--\n' \ -t test diff --git a/bootstrap.sh b/bootstrap.sh index 9ed40485..6ab2f06a 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -97,14 +97,17 @@ pushd tmp-bootstrap > /dev/null export DEST="$PWD/usr" export CFLAGS="-I$DEST/include" -export LDFLAGS="-L$DEST/lib -Wl,-rpath-link=$DEST/lib" +if [[ $(uname -s) == Linux ]]; then + export LDFLAGS="-L$DEST/lib -Wl,-rpath-link=$DEST/lib" +else + export LDFLAGS="-L$DEST/lib" +fi export PKG_CONFIG_PATH="$DEST/lib/pkgconfig" github_download "edenhill/librdkafka" "$LIBRDKAFKA_VERSION" "librdkafka" -github_download "edenhill/yajl" "edenhill" "libyajl" - -build librdkafka "([ -f config.h ] || ./configure --prefix=$DEST) && make && make install" || (echo "Failed to build librdkafka: bootstrap failed" ; false) +build librdkafka "([ -f config.h ] || ./configure --prefix=$DEST --enable-static --install-deps --disable-lz4-ext) && make -j && make install" || (echo "Failed to build librdkafka: bootstrap failed" ; false) +github_download "edenhill/yajl" "edenhill" "libyajl" build libyajl "([ -d build ] || ./configure --prefix $DEST) && make distro && make install" || (echo "Failed to build libyajl: JSON support will probably be disabled" ; true) download http://www.digip.org/jansson/releases/jansson-2.12.tar.gz libjansson diff --git a/format.c b/format.c index 639c7648..25199ab3 100644 --- a/format.c +++ b/format.c @@ -27,10 +27,7 @@ */ #include "kafkacat.h" - -#ifndef _MSC_VER -#include /* for htonl() */ -#endif +#include "rdendian.h" static void fmt_add (fmt_type_t type, const char *str, int len) { if (conf.fmt_cnt == KC_FMT_MAX_SIZE) @@ -200,6 +197,181 @@ static int print_headers (FILE *fp, const rd_kafka_headers_t *hdrs) { } #endif + +/** + * @brief Check that the pack-format string is valid. + */ +void pack_check (const char *what, const char *fmt) { + const char *f = fmt; + static const char *valid = " <>bBhHiIqQcs$"; + + if (!*fmt) + KC_FATAL("%s pack-format must not be empty", what); + + while (*f) { + if (!strchr(valid, (int)(*f))) + KC_FATAL("Invalid token '%c' in %s pack-format, " + "see -s usage.", + (int)(*f), what); + f++; + } +} + + +/** + * @brief Unpack (deserialize) the data at \p buf using the + * pack-format string \p fmt. + * \p fmt must be a valid pack-format string. + * Prints result to \p fp. + * + * Format is inspired by Python's struct.unpack() + * + * @returns 0 on success or -1 on error. + */ +static int unpack (FILE *fp, const char *what, const char *fmt, + const char *buf, size_t len, + char *errstr, size_t errstr_size) { + const char *b = buf; + const char *end = buf+len; + const char *f = fmt; + enum { + big_endian, + little_endian + } endian = big_endian; + +#define endian_swap(val,to_little,to_big) \ + (endian == big_endian ? to_big(val) : to_little(val)) + +#define fup_copy(dst,sz) do { \ + if ((sz) > remaining) { \ + snprintf(errstr, errstr_size, \ + "%s truncated, expected %d bytes " \ + "to unpack %c but only %d bytes remaining", \ + what, (int)(sz), (int)(*f), \ + (int)remaining); \ + return -1; \ + } \ + memcpy(dst, b, sz); \ + b += (sz); \ + } while (0) + + while (*f) { + size_t remaining = (int)(end - b); + + switch ((int)*f) + { + case ' ': + fprintf(fp, " "); + break; + case '<': + endian = little_endian; + break; + case '>': + endian = big_endian; + break; + case 'b': + { + int8_t v; + fup_copy(&v, sizeof(v)); + fprintf(fp, "%d", (int)v); + } + break; + case 'B': + { + uint8_t v; + fup_copy(&v, sizeof(v)); + fprintf(fp, "%u", (unsigned int)v); + } + break; + case 'h': + { + int16_t v; + fup_copy(&v, sizeof(v)); + v = endian_swap(v, be16toh, be16toh); + fprintf(fp, "%hd", v); + } + break; + case 'H': + { + uint16_t v; + fup_copy(&v, sizeof(v)); + v = endian_swap(v, be16toh, be16toh); + fprintf(fp, "%hu", v); + } + break; + case 'i': + { + int32_t v; + fup_copy(&v, sizeof(v)); + v = endian_swap(v, be32toh, htobe32); + fprintf(fp, "%d", v); + } + break; + case 'I': + { + uint32_t v; + fup_copy(&v, sizeof(v)); + v = endian_swap(v, be32toh, htobe32); + fprintf(fp, "%u", v); + } + break; + case 'q': + { + int64_t v; + fup_copy(&v, sizeof(v)); + v = endian_swap(v, be64toh, htobe64); + fprintf(fp, "%"PRId64, v); + } + break; + case 'Q': + { + uint64_t v; + fup_copy(&v, sizeof(v)); + v = endian_swap(v, be64toh, htobe64); + fprintf(fp, "%"PRIu64, v); + } + break; + case 'c': + fprintf(fp, "%c", (int)*b); + b++; + break; + case 's': + { + fprintf(fp, "%.*s", (int)remaining, b); + b += remaining; + } + break; + case '$': + { + if (remaining > 0) { + snprintf(errstr, errstr_size, + "expected %s end-of-input, " + "but %d bytes remaining", + what, (int)remaining); + return -1; + } + } + break; + + default: + KC_FATAL("Invalid pack-format token '%c'", (int)*f); + break; + + } + + f++; + } + + /* Ignore remaining input bytes, if any. */ + return 0; + +#undef endian_swap +#undef fup_copy +} + + + + /** * Delimited output */ @@ -223,8 +395,8 @@ static void fmt_msg_output_str (FILE *fp, case KC_FMT_KEY: if (rkmessage->key_len) { -#if ENABLE_AVRO if (conf.flags & CONF_F_FMT_AVRO_KEY) { +#if ENABLE_AVRO char *json = kc_avro_to_json( rkmessage->key, rkmessage->key_len, @@ -232,14 +404,26 @@ static void fmt_msg_output_str (FILE *fp, if (!json) { what_failed = + "Avro/Schema-registry " "key deserialization"; goto fail; } r = fprintf(fp, "%s", json); free(json); - } else +#else + KC_FATAL("NOTREACHED"); #endif + } else if (conf.pack[KC_MSG_FIELD_KEY]) { + if (unpack(fp, + "key", + conf.pack[KC_MSG_FIELD_KEY], + rkmessage->key, + rkmessage->key_len, + errstr, sizeof(errstr)) == + -1) + goto fail; + } else r = fwrite(rkmessage->key, rkmessage->key_len, 1, fp); @@ -257,8 +441,8 @@ static void fmt_msg_output_str (FILE *fp, case KC_FMT_PAYLOAD: if (rkmessage->len) { -#if ENABLE_AVRO if (conf.flags & CONF_F_FMT_AVRO_VALUE) { +#if ENABLE_AVRO char *json = kc_avro_to_json( rkmessage->payload, rkmessage->len, @@ -267,6 +451,7 @@ static void fmt_msg_output_str (FILE *fp, if (!json) { what_failed = + "Avro/Schema-registry " "message " "deserialization"; goto fail; @@ -274,8 +459,19 @@ static void fmt_msg_output_str (FILE *fp, r = fprintf(fp, "%s", json); free(json); - } else +#else + KC_FATAL("NOTREACHED"); #endif + } else if (conf.pack[KC_MSG_FIELD_VALUE]) { + if (unpack(fp, + "value", + conf.pack[KC_MSG_FIELD_VALUE], + rkmessage->payload, + rkmessage->len, + errstr, sizeof(errstr)) == + -1) + goto fail; + } else r = fwrite(rkmessage->payload, rkmessage->len, 1, fp); @@ -292,8 +488,9 @@ static void fmt_msg_output_str (FILE *fp, case KC_FMT_PAYLOAD_LEN_BINARY: /* Use -1 to indicate NULL messages */ - belen = htonl((uint32_t)(rkmessage->payload ? - (ssize_t)rkmessage->len : -1)); + belen = htobe32((uint32_t)(rkmessage->payload ? + (ssize_t)rkmessage->len : + -1)); r = fwrite(&belen, sizeof(uint32_t), 1, fp); break; @@ -360,11 +557,12 @@ static void fmt_msg_output_str (FILE *fp, fail: KC_ERROR("Failed to format message in %s [%"PRId32"] " - "at offset %"PRId64": %s: %s", + "at offset %"PRId64": %s%s%s", rd_kafka_topic_name(rkmessage->rkt), rkmessage->partition, rkmessage->offset, - what_failed, errstr); + what_failed, *what_failed ? ": " : "", + errstr); return; } diff --git a/kafkacat.c b/kafkacat.c index 6ed74737..d271db8f 100644 --- a/kafkacat.c +++ b/kafkacat.c @@ -1,7 +1,7 @@ /* * kafkacat - Apache Kafka consumer and producer * - * Copyright (c) 2014, Magnus Edenhill + * Copyright (c) 2014-2019, Magnus Edenhill * All rights reserved. * * Redistribution and use in source and binary forms, with or without @@ -914,7 +914,7 @@ static void RD_NORETURN usage (const char *argv0, int exitcode, fprintf(out, "kafkacat - Apache Kafka producer and consumer tool\n" "https://github.com/edenhill/kafkacat\n" - "Copyright (c) 2014-2017, Magnus Edenhill\n" + "Copyright (c) 2014-2019, Magnus Edenhill\n" "Version %s (%slibrdkafka %s builtin.features=%s)\n" "\n", KAFKACAT_VERSION, @@ -923,7 +923,7 @@ static void RD_NORETURN usage (const char *argv0, int exitcode, "JSON, " #endif #if ENABLE_AVRO - "AVRO, " + "Avro, " #endif , rd_kafka_version_str(), features @@ -1006,10 +1006,34 @@ static void RD_NORETURN usage (const char *argv0, int exitcode, #if ENABLE_JSON " -J Output with JSON envelope\n" #endif + " -s key= Deserialize non-NULL keys using .\n" + " -s value= Deserialize non-NULL values using .\n" + " -s Deserialize non-NULL keys and values using .\n" + " Available deserializers ():\n" + " - A combination of:\n" + " <: little-endian,\n" + " >: big-endian (recommended),\n" + " b: signed 8-bit integer\n" + " B: unsigned 8-bit integer\n" + " h: signed 16-bit integer\n" + " H: unsigned 16-bit integer\n" + " i: signed 32-bit integer\n" + " I: unsigned 32-bit integer\n" + " q: signed 64-bit integer\n" + " Q: unsigned 64-bit integer\n" + " c: ASCII character\n" + " s: remaining data is string\n" + " $: match end-of-input (no more bytes remaining or a parse error is raised).\n" + " Not including this token skips any\n" + " remaining data after the pack-str is\n" + " exhausted.\n" #if ENABLE_AVRO - " -a . Convert Avro keys and values to JSON (requires -s)\n" - " -a key|value Convert Avro keys or values to JSON (requires -s)\n" - " -s Schema registry URL (requires -a)\n" + " avro - Avro-formatted with schema in Schema-Registry (requires -r)\n" + " E.g.: -s key=i -s value=avro - key is 32-bit integer, value is Avro.\n" + " or: -s avro - both key and value are Avro-serialized\n" +#endif +#if ENABLE_AVRO + " -r Schema registry URL (requires avro deserializer to be used with -s)\n" #endif " -D Delimiter to separate messages on output\n" " -K Print message keys prefixing the message\n" @@ -1474,13 +1498,11 @@ static void argparse (int argc, char **argv, char tmp_fmt[64]; int do_conf_dump = 0; int conf_files_read = 0; -#if ENABLE_AVRO - char *arg_a = NULL; -#endif + int i; while ((opt = getopt(argc, argv, ":PCG:LQt:p:b:z:o:eED:K:k:H:Od:qvF:X:c:Tuf:ZlVh" - "a:s:J")) != -1) { + "s:r:J")) != -1) { switch (opt) { case 'P': case 'C': @@ -1552,15 +1574,33 @@ static void argparse (int argc, char **argv, #endif break; - case 'a': -#if ENABLE_AVRO - arg_a = optarg; -#else - KC_FATAL("This build of kafkacat lacks " - "Avro/Schema-Registry support"); -#endif - break; case 's': + { + int field = -1; + const char *t = optarg; + + if (!strncmp(t, "key=", strlen("key="))) { + t += strlen("key="); + field = KC_MSG_FIELD_KEY; + } else if (!strncmp(t, "value=", strlen("value="))) { + t += strlen("value="); + field = KC_MSG_FIELD_VALUE; + } + + if (field == -1 || field == KC_MSG_FIELD_KEY) { + if (strcmp(t, "avro")) + pack_check("key", t); + conf.pack[KC_MSG_FIELD_KEY] = t; + } + + if (field == -1 || field == KC_MSG_FIELD_VALUE) { + if (strcmp(t, "avro")) + pack_check("value", t); + conf.pack[KC_MSG_FIELD_VALUE] = t; + } + } + break; + case 'r': #if ENABLE_AVRO if (!*optarg) KC_FATAL("-s url must not be empty"); @@ -1720,18 +1760,61 @@ static void argparse (int argc, char **argv, fmt_init(); + /* + * Verify serdes + */ + for (i = 0 ; i < KC_MSG_FIELD_CNT ; i++) { + if (!conf.pack[i]) + continue; + + if (!strchr("GC", conf.mode)) + KC_FATAL("-s serdes only available in the consumer"); + + if (conf.pack[i] && !strcmp(conf.pack[i], "avro")) { +#if !ENABLE_AVRO + KC_FATAL("This build of kafkacat lacks " + "Avro/Schema-Registry support"); +#endif +#if ENABLE_JSON + /* Avro is decoded to JSON which needs to be + * written verbatim to the JSON envelope when + * using -J: libyajl does not support this, + * but my own fork of yajl does. */ + if (conf.flags & CONF_F_FMT_JSON && + !json_can_emit_verbatim()) + KC_FATAL("This build of kafkacat lacks " + "support for emitting " + "JSON-formatted " + "message keys and values: " + "try without -J or build " + "kafkacat with yajl from " + "https://github.com/edenhill/yajl"); +#endif + + if (i == KC_MSG_FIELD_VALUE) + conf.flags |= CONF_F_FMT_AVRO_VALUE; + else if (i == KC_MSG_FIELD_KEY) + conf.flags |= CONF_F_FMT_AVRO_KEY; + continue; + } + } + + /* * Verify and initialize Avro/SR */ #if ENABLE_AVRO - if (!conf.schema_registry_url != !arg_a) - KC_FATAL("-s requires -a and vice-versa"); + if (!!conf.schema_registry_url != + !!(conf.flags & (CONF_F_FMT_AVRO_VALUE|CONF_F_FMT_AVRO_KEY))) + KC_FATAL("-r requires -s avro and vice-versa"); - if (arg_a) { - char *key_schema_name = NULL, *key_schema_path = NULL; - char *value_schema_name = NULL, *value_schema_path = NULL; + if (conf.schema_registry_url) { char *t; + if (!strchr("GC", conf.mode)) + KC_FATAL("Schema-registry support is only available " + "in the consumer"); + /* Trim trailing slashes from URL to avoid 404 */ t = &conf.schema_registry_url[strlen(conf. schema_registry_url)-1]; @@ -1745,107 +1828,9 @@ static void argparse (int argc, char **argv, errstr, sizeof(errstr)) == -1) KC_FATAL("%s", errstr); - /* Check that both -s and -a are configured */ - if (strchr("GC", conf.mode)) { - if (!strcmp(arg_a, ".") || !strcmp(arg_a, "key,value")) - conf.flags |= CONF_F_FMT_AVRO_KEY | - CONF_F_FMT_AVRO_VALUE; - else if (!strcmp(arg_a, "key")) - conf.flags |= CONF_F_FMT_AVRO_KEY; - else if (!strcmp(arg_a, "value")) - conf.flags |= CONF_F_FMT_AVRO_VALUE; - else - KC_FATAL("Unknown message field: %s: expected " - "-a ., -a key, -a value, or " - "-a key,value", arg_a); - - if (conf.flags & - (CONF_F_FMT_AVRO_KEY|CONF_F_FMT_AVRO_VALUE)) { - -#if ENABLE_JSON - /* Avro is decoded to JSON which needs to be - * written verbatim to the JSON envelope when - * using -J: libyajl does not support this, - * but my own fork of yajl does. */ - if (conf.flags & CONF_F_FMT_JSON && - !json_can_emit_verbatim()) - KC_FATAL("This build of kafkacat lacks " - "support for emitting " - "JSON-formatted " - "message keys and values: " - "try without -J or build " - "kafkacat with yajl from " - "https://github.com/edenhill/yajl"); -#endif - } - -#if 0 /* FIXME: Avro Producer support */ - } else if (strchr("P", conf.mode)) { - char *s; - -#if ENABLE_JSON - if (!(conf.flags & CONF_F_FMT_JSON)) -#endif - KC_FATAL("Producing with Avro serialization " - "requires JSON-formatted " - "input messages (-J)"); - - /* Parse -a [key=path,][value=path] */ - s = arg_a; - while (*s) { - char *next, *t, *t2 = NULL; - - next = strchr(s, ','); - if (!next) - next = s + strlen(s); - else { - *next = '\0'; - next++; - } - - t = strchr(s, '='); - if (t) { - *t = '\0'; - t++; - - t2 = strchr(t, ':'); - if (t2) { - *t2 = '\0'; - t2++; - } - } - - if (!*s || !t || !*t || !t2 || !*t2 || - !(!strcmp(s, "key") || !strcmp(s, "value"))) - KC_FATAL("Malformed -a: expected " - "-a key=name:path," - "name:value=path, " - "-a key=name:path " - "or -a value=name:path"); - - if (!strcmp(s, "key")) { - key_schema_name = t; - key_schema_path = t2; - }else if (!strcmp(s, "value")) { - value_schema_name = t; - value_schema_path = t2; - } else - KC_FATAL("Malformed -a: %s=%s:%s", - s, t, t2); /* NOTREACHED */ - - s = next; - } -#endif - } else { - KC_FATAL("-a and -s are only relevant " - "in consumer modes"); - } /* Initialize Avro/Schema-Registry client */ - kc_avro_init(key_schema_name, - key_schema_path, - value_schema_name, - value_schema_path); + kc_avro_init(NULL, NULL, NULL, NULL); } #endif diff --git a/kafkacat.h b/kafkacat.h index 28906618..c7a1c11c 100644 --- a/kafkacat.h +++ b/kafkacat.h @@ -1,7 +1,7 @@ /* * kafkacat - Apache Kafka consumer and producer * - * Copyright (c) 2015, Magnus Edenhill + * Copyright (c) 2015-2019, Magnus Edenhill * All rights reserved. * * Redistribution and use in source and binary forms, with or without @@ -76,6 +76,12 @@ typedef enum { #define KC_FMT_MAX_SIZE 128 +typedef enum { + KC_MSG_FIELD_VALUE, + KC_MSG_FIELD_KEY, + KC_MSG_FIELD_CNT +} kc_msg_field_t; + struct conf { int run; int verbosity; @@ -104,6 +110,11 @@ struct conf { int str_len; } fmt[KC_FMT_MAX_SIZE]; int fmt_cnt; + + /**< Producer: per-field pack-format, see pack() + * Consumer: per-field unpack-format, see unpack(). */ + const char *pack[KC_MSG_FIELD_CNT]; + int msg_size; char *brokers; char *topic; @@ -156,6 +167,8 @@ void error0 (int erroronexit, const char *func, int line, /* * format.c */ +void pack_check (const char *what, const char *fmt); + void fmt_msg_output (FILE *fp, const rd_kafka_message_t *rkmessage); void fmt_parse (const char *fmt); diff --git a/rdendian.h b/rdendian.h new file mode 100644 index 00000000..0ab0a007 --- /dev/null +++ b/rdendian.h @@ -0,0 +1,177 @@ +/* + * librdkafka - Apache Kafka C library + * + * Copyright (c) 2012-2015 Magnus Edenhill + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +#ifndef _RDENDIAN_H_ +#define _RDENDIAN_H_ + +/** + * Provides portable endian-swapping macros/functions. + * + * be64toh() + * htobe64() + * be32toh() + * htobe32() + * be16toh() + * htobe16() + * le64toh() + */ + +#ifdef __FreeBSD__ + #include +#elif defined __GLIBC__ + #include + #ifndef be64toh + /* Support older glibc (<2.9) which lack be64toh */ + #include + #if __BYTE_ORDER == __BIG_ENDIAN + #define be16toh(x) (x) + #define be32toh(x) (x) + #define be64toh(x) (x) + #define le64toh(x) __bswap_64 (x) + #define le32toh(x) __bswap_32 (x) + #else + #define be16toh(x) __bswap_16 (x) + #define be32toh(x) __bswap_32 (x) + #define be64toh(x) __bswap_64 (x) + #define le64toh(x) (x) + #define le32toh(x) (x) + #endif + #endif + +#elif defined __CYGWIN__ + #include +#elif defined __BSD__ + #include +#elif defined __sun + #include + #include +#define __LITTLE_ENDIAN 1234 +#define __BIG_ENDIAN 4321 +#ifdef _BIG_ENDIAN +#define __BYTE_ORDER __BIG_ENDIAN +#define be64toh(x) (x) +#define be32toh(x) (x) +#define be16toh(x) (x) +#define le16toh(x) ((uint16_t)BSWAP_16(x)) +#define le32toh(x) BSWAP_32(x) +#define le64toh(x) BSWAP_64(x) +# else +#define __BYTE_ORDER __LITTLE_ENDIAN +#define be64toh(x) BSWAP_64(x) +#define be32toh(x) ntohl(x) +#define be16toh(x) ntohs(x) +#define le16toh(x) (x) +#define le32toh(x) (x) +#define le64toh(x) (x) +#define htole16(x) (x) +#define htole64(x) (x) +#endif /* __sun */ + +#elif defined __APPLE__ + #include + #include +#if __DARWIN_BYTE_ORDER == __DARWIN_BIG_ENDIAN +#define be64toh(x) (x) +#define be32toh(x) (x) +#define be16toh(x) (x) +#define le16toh(x) OSSwapInt16(x) +#define le32toh(x) OSSwapInt32(x) +#define le64toh(x) OSSwapInt64(x) +#else +#define be64toh(x) OSSwapInt64(x) +#define be32toh(x) OSSwapInt32(x) +#define be16toh(x) OSSwapInt16(x) +#define le16toh(x) (x) +#define le32toh(x) (x) +#define le64toh(x) (x) +#endif + +#elif defined(_MSC_VER) +#include + +#define be64toh(x) _byteswap_uint64(x) +#define be32toh(x) _byteswap_ulong(x) +#define be16toh(x) _byteswap_ushort(x) +#define le16toh(x) (x) +#define le32toh(x) (x) +#define le64toh(x) (x) + +#elif defined _AIX /* AIX is always big endian */ +#define be64toh(x) (x) +#define be32toh(x) (x) +#define be16toh(x) (x) +#define le32toh(x) \ + ((((x) & 0xff) << 24) | \ + (((x) & 0xff00) << 8) | \ + (((x) & 0xff0000) >> 8) | \ + (((x) & 0xff000000) >> 24)) +#define le64toh(x) \ + ((((x) & 0x00000000000000ffL) << 56) | \ + (((x) & 0x000000000000ff00L) << 40) | \ + (((x) & 0x0000000000ff0000L) << 24) | \ + (((x) & 0x00000000ff000000L) << 8) | \ + (((x) & 0x000000ff00000000L) >> 8) | \ + (((x) & 0x0000ff0000000000L) >> 24) | \ + (((x) & 0x00ff000000000000L) >> 40) | \ + (((x) & 0xff00000000000000L) >> 56)) +#else + #include +#endif + + + +/* + * On Solaris, be64toh is a function, not a macro, so there's no need to error + * if it's not defined. + */ +#if !defined(__sun) && !defined(be64toh) +#error Missing definition for be64toh +#endif + +#ifndef be32toh +#define be32toh(x) ntohl(x) +#endif + +#ifndef be16toh +#define be16toh(x) ntohs(x) +#endif + +#ifndef htobe64 +#define htobe64(x) be64toh(x) +#endif +#ifndef htobe32 +#define htobe32(x) be32toh(x) +#endif +#ifndef htobe16 +#define htobe16(x) be16toh(x) +#endif + +#ifndef htole32 +#define htole32(x) le32toh(x) +#endif + +#endif /* _RDENDIAN_H_ */