From 532045cce3daf1cbe9ec16fc9c26f8c39fa2c35d Mon Sep 17 00:00:00 2001 From: Vaibhav Date: Mon, 30 Mar 2020 16:20:37 -0400 Subject: [PATCH] docs: added kafka trigger (#567) * fix: tests --- docs/triggers/kafka-trigger.md | 71 ++++++++++++++++++++++++++++++ mkdocs.yml | 1 + sensors/triggers/http/http_test.go | 2 +- 3 files changed, 73 insertions(+), 1 deletion(-) create mode 100644 docs/triggers/kafka-trigger.md diff --git a/docs/triggers/kafka-trigger.md b/docs/triggers/kafka-trigger.md new file mode 100644 index 0000000000..c542f875c9 --- /dev/null +++ b/docs/triggers/kafka-trigger.md @@ -0,0 +1,71 @@ +# Kafka Trigger + +Kafka trigger allows sensor to publish events on Kafka topic. This trigger helps source the events from outside world into your messaging queues. + +## Specification +The Kafka trigger specification is available [here](https://github.com/argoproj/argo-events/blob/master/api/sensor.md#kafkatrigger). + +## Walkthrough + +1. Consider a scenario where you are expecting a file drop onto a Minio bucket and want to place that event + on a Kafka topic. + +1. Set up the Minio Event Source and Gateway [here](https://argoproj.github.io/argo-events/setup/minio/). + Do not create the Minio sensor, we are going to create it in next step. + +1. Lets create the sensor, + + apiVersion: argoproj.io/v1alpha1 + kind: Sensor + metadata: + name: minio-sensor + labels: + sensors.argoproj.io/sensor-controller-instanceid: argo-events + spec: + template: + spec: + containers: + - name: sensor + image: argoproj/sensor:v0.13.0 + imagePullPolicy: Always + serviceAccountName: argo-events-sa + dependencies: + - name: test-dep + gatewayName: minio-gateway + eventName: example + subscription: + http: + port: 9300 + triggers: + - template: + name: kafka-trigger + kafka: + # Kafka URL + url: kafka.argo-events.svc:9092 + # Name of the topic + topic: minio-events + # partition id + partition: 0 + payload: + - src: + dependencyName: test-dep + dataKey: notification.0.s3.object.key + dest: fileName + - src: + dependencyName: test-dep + dataKey: notification.0.s3.bucket.name + dest: bucket + +1. The Kafka message needs a body. In order to construct message based on the event data, sensor offers + `payload` field as a part of the Kafka trigger. + + The `payload` contains the list of `src` which refers to the source event and `dest` which refers to destination key within result request payload. + + The `payload` declared above will generate a message body like below, + + { + "fileName": "hello.txt" // name/key of the object + "bucket": "input" // name of the bucket + } + +1. Drop a file called `hello.txt` onto the bucket `input` and you will receive the message on Kafka topic diff --git a/mkdocs.yml b/mkdocs.yml index dd10a5abfe..2cbb4066c7 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -60,6 +60,7 @@ nav: - 'triggers/aws-lambda.md' - 'triggers/http-trigger.md' - 'triggers/nats-trigger.md' + - 'triggers/kafka-trigger.md' - 'developer_guide.md' - 'controllers.md' - 'FAQ.md' diff --git a/sensors/triggers/http/http_test.go b/sensors/triggers/http/http_test.go index 4521a1e206..49969f608e 100644 --- a/sensors/triggers/http/http_test.go +++ b/sensors/triggers/http/http_test.go @@ -121,7 +121,7 @@ func TestHTTPTrigger_ApplyResourceParameters(t *testing.T) { updatedTrigger, ok := resource.(*v1alpha1.HTTPTrigger) assert.Nil(t, err) assert.Equal(t, true, ok) - assert.Equal(t, "http://another-fake.com", updatedTrigger.URL) + assert.Equal(t, "http://fake.com:12000", updatedTrigger.URL) assert.Equal(t, http.MethodGet, updatedTrigger.Method) }