with Splunk HTTP Event Collector and Kafka?
Checks the health status of the Splunk HTTP Event Collector to ensure it is available and ready to receive events. See the documentation
Sends multiple events in a single request to the Splunk HTTP Event Collector. See the documentation
Sends an event to Splunk HTTP Event Collector. See the documentation
import { axios } from "@pipedream/platform"
export default defineComponent({
props: {
splunk_http_event_collector: {
type: "app",
app: "splunk_http_event_collector",
}
},
async run({steps, $}) {
const data = {
"event": `Hello world!`,
"sourcetype": `manual`,
}
return await axios($, {
method: "post",
url: `${this.splunk_http_event_collector.$auth.api_url}:${this.splunk_http_event_collector.$auth.port}/services/collector`,
headers: {
"authorization": `Splunk ${this.splunk_http_event_collector.$auth.api_token}`,
},
params: {
channel: `2AC79941-CB26-421C-8826-F57AE23E9702`,
},
data,
})
},
})
import { Kafka } from "kafkajs"
export default defineComponent({
props: {
kafka: {
type: "app",
app: "kafka",
}
},
async run({steps, $}) {
const kafka = new Kafka({
brokers: [`${this.kafka.$auth.host}:${this.kafka.$auth.port}`],
});
const consumer = kafka.consumer({ groupId: 'TestGroup' });
await consumer.connect()
await consumer.subscribe({ topic: 'SampleTopic', fromBeginning: true });
let consumedMessage = "";
const eachMessage = async function({ topic, partition, message }){
consumedMessage = message.value.toString();
return consumedMessage;
};
await consumer.run({
eachMessage,
});
const producer = kafka.producer();
await producer.connect()
await producer.send({
topic: 'SampleTopic',
messages: [
{ value: 'Welcome KafkaJS + Pipedream users! '+ new Date().toISOString() },
],
});
await producer.disconnect();
const data = await consumer.describeGroup();
return { consumedMessage, groupDescription: data };
},
})