# See https://docs.confluent.io/current/kafka-rest/docs/intro.html#produce-and-consume-avro-messages
# Create consumer
-response <- POST(url=paste(kafka_rest_proxy, "consumers", consumer, sep="/"),
- content_type("application/vnd.kafka.v2+json"),
- accept("application/vnd.kafka.v2+json"),
- body=paste0('{"name": "',
- consumer_instance,
- '", "format": "json", "auto.offset.reset": "earliest"}')
- )
-consumerDetails <- fromJSON(content(response, "text"))
+source("R/CreateKafkaConsumer.R")
+consumerDetails <- CreateKafkaConsumer(kafka.rest.proxy = kafka_rest_proxy, consumer = consumer, consumer_instance = consumer_instance)
# Subscribe it to topic
-response <- POST(url=paste(consumerDetails$base_uri,
- "subscription", sep="/"),
- content_type("application/vnd.kafka.v2+json"),
- body = paste0('{"topics":["',
- topic,
- '"]}')
- )
-response
+source("R/SubscribeKafkaTopic.R")
+response <- SubscribeKafkaTopic(consumerDetails$base_uri,topic = topic)
# Obtain all (or latest) messages on the topic
-messagesJSON <- GET(url = paste(consumerDetails$base_uri,"records", sep = "/"),
- accept("application/vnd.kafka.json.v2+json"),
- encode="json")
-
-messages <- fromJSON(content(messagesJSON,"text"))
+source("R/ConsumeKafkaMessages.R")
+messages <- ConsumeKafkaMessages(consumerDetails$base_uri)
createPlot(messages$value)
createPlot(apachelog)
# Remove the consumer
-DELETE(url = consumerDetails$base_uri,
- content_type("application/vnd.kafka.v2+json"))
+source("R/DestroyKafkaConsumer.R")
+response <- DestroyKafkaConsumer(consumer.base.uri = consumerDetails$base_uri)