From 74a25137c0d483ba8357ffa718e7a547b0605465 Mon Sep 17 00:00:00 2001 From: yoution Date: Fri, 10 Jan 2020 17:28:54 +0800 Subject: [PATCH 1/4] Improve local setup --- README.md | 101 +++++++++++++++++----------- docker-es/docker-compose.yml | 6 -- local/docker-compose.yml | 28 ++++++++ local/kafka-client/Dockerfile | 5 ++ local/kafka-client/create-topics.sh | 5 ++ local/kafka-client/topics.txt | 3 + 6 files changed, 103 insertions(+), 45 deletions(-) delete mode 100644 docker-es/docker-compose.yml create mode 100644 local/docker-compose.yml create mode 100644 local/kafka-client/Dockerfile create mode 100755 local/kafka-client/create-topics.sh create mode 100644 local/kafka-client/topics.txt diff --git a/README.md b/README.md index 15c2086..cf2b33c 100644 --- a/README.md +++ b/README.md @@ -40,40 +40,24 @@ Also note that there is a `/health` endpoint that checks for the health of the a Config for tests are at `config/test.js`, it overrides some default config. -## Local Kafka setup -- Call extracted directory kafka_2.11-0.11.0.1 : `path_to_kafka` -- Call our project root directory : `our_project_root_directory` -- `http://kafka.apache.org/quickstart` contains details to setup and manage Kafka server, - below provides details to setup Kafka server in Mac, Windows will use bat commands in bin/windows instead -- Download kafka at `https://www.apache.org/dyn/closer.cgi?path=/kafka/1.1.0/kafka_2.11-1.1.0.tgz` -- Extract out the doanlowded tgz file -- Go to extracted directory kafka_2.11-0.11.0.1 -- Start ZooKeeper server: - `bin/zookeeper-server-start.sh config/zookeeper.properties` -- Use another terminal, go to same directory, start the Kafka server: - `bin/kafka-server-start.sh config/server.properties` -- Note that the zookeeper server is at localhost:2181, and Kafka server is at localhost:9092 -- Use another terminal, go to same directory, create some topics: - `bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic project.action.create` - `bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic project.action.update` - `bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic project.action.delete` -- Verify that the topics are created: - `bin/kafka-topics.sh --list --zookeeper localhost:2181`, - it should list out the created topics -- run the producer and then write some message into the console to send to the `project.action.create` topic: - `bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.action.create` - in the console, write message, one message per line: - `{"topic":"project.action.create","originator":"project-api","timestamp":"2019-06-20T13:43:25.817Z","mime-type":"application/json","payload":{"resource":"project","createdAt":"2019-06-20T13:43:23.554Z","updatedAt":"2019-06-20T13:43:23.555Z","terms":[],"id":1,"name":"test project","description":"Hello I am a test project","type":"app","createdBy":40051333,"updatedBy":40051333,"projectEligibility":[],"bookmarks":[],"external":null,"status":"draft","lastActivityAt":"2019-06-20T13:43:23.514Z","lastActivityUserId":"40051333","members":[{"createdAt":"2019-06-20T13:43:23.555Z","updatedAt":"2019-06-20T13:43:23.625Z","id":2,"isPrimary":true,"role":"manager","userId":40051333,"updatedBy":40051333,"createdBy":40051333,"projectId":2,"deletedAt":null,"deletedBy":null}],"version":"v2","directProjectId":null,"billingAccountId":null,"estimatedPrice":null,"actualPrice":null,"details":null,"cancelReason":null,"templateId":null,"deletedBy":null,"attachments":null,"phases":null,"projectUrl":"https://connect.topcoder-dev.com/projects/2"}}` -- Optionally, use another terminal, go to same directory, start a consumer to view the messages: - `bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic project.action.create --from-beginning` -- If the kafka don't allow to input long message you can use this script to write message from file: - `path_to_kafka/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.action.create < our_project_root_directory/test/data/project/project.action.create.json` -- Writing/reading messages to/from other topics are similar. All example for messages are in: -`our_project_root_directory/test/data` -## Local Elasticsearch setup +### Local Deployment for Kafka + +* There exists an alternate `docker-compose.yml` file that can be used to spawn containers for the following services: + + | Service | Name | Port | + |----------|:-----:|:----:| + | ElasticSearch | esearch | 9200 | + | Zookeeper | zookeeper | 2181 | + | Kafka | kafka | 9092 | -- In the `docker-es` folder, run `docker-compose up` +* To have kafka create a list of desired topics on startup, there exists a file with the path `local/kafka-client/topics.txt`. Each line from the file will be added as a topic. +* To run these services simply run the following commands: + + ```bash + cd local + docker-compose up -d + ``` ## Local deployment - Install dependencies `npm i` @@ -135,12 +119,10 @@ npm run test:cov ``` ## Verification - -- Call extracted directory kafka_2.11-0.11.0.1 : `path_to_kafka` - Call our project root directory : `our_project_root_directory` -- Start kafka server, start elasticsearch, initialize Elasticsearch, start processor app +- Start Docker servicees, initialize Elasticsearch, start processor app - Send message: - `path_to_kafka/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.action.create < our_project_root_directory/test/data/project/project.action.create.json` + `docker exec -i tc-projects-kafka /opt/kafka//bin/kafka-console-producer.sh --topic project.action.create --broker-list localhost:9092 < our_project_root_directory/test/data/project/project.action.create.json` - run command `npm run view-data projects 1` to view the created data, you will see the data are properly created: ```bash @@ -193,7 +175,8 @@ info: { - Run the producer and then write some invalid message into the console to send to the `project.action.create` topic: - `bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.action.create` + + `docker exec -it tc-projects-kafka /opt/kafka//bin/kafka-console-producer.sh --topic project.action.create --broker-list localhost:9092` in the console, write message, one message per line: `{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2019-02-16T00:00:00", "mime-type": "application/json", "payload": { "id": "invalid", "typeId": "8e17090c-465b-4c17-b6d9-dfa16300b0ff", "track": "Code", "name": "test", "description": "desc", "timelineTemplateId": "8e17090c-465b-4c17-b6d9-dfa16300b0aa", "phases": [{ "id": "8e17090c-465b-4c17-b6d9-dfa16300b012", "name": "review", "isActive": true, "duration": 10000 }], "prizeSets": [{ "type": "prize", "prizes": [{ "type": "winning prize", "value": 500 }] }], "reviewType": "code review", "tags": ["code"], "projectId": 123, "forumId": 456, "status": "Active", "created": "2019-02-16T00:00:00", "createdBy": "admin" } }` @@ -203,7 +186,8 @@ info: { - Then in the app console, you will see error messages - Sent message to update data: - `path_to_kafka/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.action.update < our_project_root_directory/test/data/project/project.action.update.json` + + `docker exec -i tc-projects-kafka /opt/kafka//bin/kafka-console-producer.sh --topic project.action.update --broker-list localhost:9092 < our_project_root_directory/test/data/project/project.action.update.json` - Run command `npm run view-data projects 1` to view the updated data, you will see the data are properly updated: ```bash @@ -258,7 +242,7 @@ info: { - Run the producer and then write some invalid message into the console to send to the `project.action.create` topic: - `bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.action.create` + `docker exec -it tc-projects-kafka /opt/kafka//bin/kafka-console-producer.sh --topic project.action.create` in the console, write message, one message per line: `{ "topic": "project.action.update", "originator": "project-api", "timestamp": "2019-02-17T01:00:00", "mime-type": "application/json", "payload": { "id": "173803d3-019e-4033-b1cf-d7205c7f774c", "typeId": "123", "track": "Code", "name": "test3", "description": "desc3", "timelineTemplateId": "8e17090c-465b-4c17-b6d9-dfa16300b0dd", "groups": ["group2", "group3"], "updated": "2019-02-17T01:00:00", "updatedBy": "admin" } }` @@ -270,3 +254,42 @@ info: { - To test the health check API, run `export PORT=5000`, start the processor, then browse `http://localhost:5000/health` in a browser, and you will see result `{"checksRun":1}` + + + +### Kafka Commands + +If you've used `docker-compose` with the file `local/docker-compose.yml` to spawn kafka & zookeeper, you can use the following commands to manipulate kafka topics and messages: +(Replace TOPIC_NAME with the name of the desired topic) + +**Create Topic** + +```bash +docker exec tc-projects-kafka /opt/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic TOPIC_NAME +``` + +**List Topics** + +```bash +docker exec tc-projects-kafka /opt/kafka/bin/kafka-topics.sh --list --zookeeper zookeeper:2181 +``` + +**Watch Topic** + +```bash +docker exec tc-projects-kafka /opt/kafka/bin/kafka-console-consumer --bootstrap-server localhost:9092 --zookeeper zookeeper:2181 --topic TOPIC_NAME +``` + +**Post Message to Topic** + +```bash +docker exec -it tc-projects-kafka /opt/kafka/bin/kafka-console-producer --topic TOPIC_NAME --broker-list localhost:9092 +``` +The message can be passed using `stdin` + +### Test +```bash +docker exec -i tc-projects-kafka /opt/kafka//bin/kafka-console-producer.sh --topic project.action.create --broker-list localhost:9092 < test_message.json + +``` +All example for messages are in: our_project_root_directory/test/data diff --git a/docker-es/docker-compose.yml b/docker-es/docker-compose.yml deleted file mode 100644 index efc2bdc..0000000 --- a/docker-es/docker-compose.yml +++ /dev/null @@ -1,6 +0,0 @@ -version: "2" -services: - esearch: - image: "elasticsearch:2.3" - ports: - - "9200:9200" diff --git a/local/docker-compose.yml b/local/docker-compose.yml new file mode 100644 index 0000000..b1775aa --- /dev/null +++ b/local/docker-compose.yml @@ -0,0 +1,28 @@ +version: "2" +services: + esearch: + image: "elasticsearch:2.3" + ports: + - "9200:9200" + zookeeper: + image: confluent/zookeeper + ports: + - "2181:2181" + environment: + zk_id: "1" + kafka: + image: wurstmeister/kafka + container_name: tc-projects-kafka + depends_on: + - zookeeper + ports: + - "9092:9092" + environment: + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:9092 + KAFKA_LISTENERS: PLAINTEXT://0.0.0.0:9092 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + kafka-client: + build: ./kafka-client + depends_on: + - kafka + - zookeeper diff --git a/local/kafka-client/Dockerfile b/local/kafka-client/Dockerfile new file mode 100644 index 0000000..3d332b8 --- /dev/null +++ b/local/kafka-client/Dockerfile @@ -0,0 +1,5 @@ +From confluent/kafka +WORKDIR /app/ +COPY topics.txt . +COPY create-topics.sh . +ENTRYPOINT ["/app/create-topics.sh"] diff --git a/local/kafka-client/create-topics.sh b/local/kafka-client/create-topics.sh new file mode 100755 index 0000000..c5b33e6 --- /dev/null +++ b/local/kafka-client/create-topics.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +while read topic; do + /usr/bin/kafka-topics --create --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic $topic +done < topics.txt diff --git a/local/kafka-client/topics.txt b/local/kafka-client/topics.txt new file mode 100644 index 0000000..0dde473 --- /dev/null +++ b/local/kafka-client/topics.txt @@ -0,0 +1,3 @@ +project.action.create +project.action.delete +project.action.update From 3ae25a2d72b911f19fe5bc96874100dba2a52b39 Mon Sep 17 00:00:00 2001 From: yoution Date: Sun, 12 Jan 2020 15:12:29 +0800 Subject: [PATCH 2/4] change confluent/kafka to wurstmeister/kafka --- README.md | 18 +++++++++--------- local/docker-compose.yml | 4 ++-- local/kafka-client/Dockerfile | 2 +- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index cf2b33c..0641777 100644 --- a/README.md +++ b/README.md @@ -122,7 +122,7 @@ npm run test:cov - Call our project root directory : `our_project_root_directory` - Start Docker servicees, initialize Elasticsearch, start processor app - Send message: - `docker exec -i tc-projects-kafka /opt/kafka//bin/kafka-console-producer.sh --topic project.action.create --broker-list localhost:9092 < our_project_root_directory/test/data/project/project.action.create.json` + `docker exec -i project-processor-es-kafka /opt/kafka/bin/kafka-console-producer.sh --topic project.action.create --broker-list localhost:9092 < our_project_root_directory/test/data/project/project.action.create.json` - run command `npm run view-data projects 1` to view the created data, you will see the data are properly created: ```bash @@ -176,7 +176,7 @@ info: { - Run the producer and then write some invalid message into the console to send to the `project.action.create` topic: - `docker exec -it tc-projects-kafka /opt/kafka//bin/kafka-console-producer.sh --topic project.action.create --broker-list localhost:9092` + `docker exec -it project-processor-es-kafka /opt/kafka/bin/kafka-console-producer.sh --topic project.action.create --broker-list localhost:9092` in the console, write message, one message per line: `{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2019-02-16T00:00:00", "mime-type": "application/json", "payload": { "id": "invalid", "typeId": "8e17090c-465b-4c17-b6d9-dfa16300b0ff", "track": "Code", "name": "test", "description": "desc", "timelineTemplateId": "8e17090c-465b-4c17-b6d9-dfa16300b0aa", "phases": [{ "id": "8e17090c-465b-4c17-b6d9-dfa16300b012", "name": "review", "isActive": true, "duration": 10000 }], "prizeSets": [{ "type": "prize", "prizes": [{ "type": "winning prize", "value": 500 }] }], "reviewType": "code review", "tags": ["code"], "projectId": 123, "forumId": 456, "status": "Active", "created": "2019-02-16T00:00:00", "createdBy": "admin" } }` @@ -187,7 +187,7 @@ info: { - Sent message to update data: - `docker exec -i tc-projects-kafka /opt/kafka//bin/kafka-console-producer.sh --topic project.action.update --broker-list localhost:9092 < our_project_root_directory/test/data/project/project.action.update.json` + `docker exec -i project-processor-es-kafka /opt/kafka/bin/kafka-console-producer.sh --topic project.action.update --broker-list localhost:9092 < our_project_root_directory/test/data/project/project.action.update.json` - Run command `npm run view-data projects 1` to view the updated data, you will see the data are properly updated: ```bash @@ -242,7 +242,7 @@ info: { - Run the producer and then write some invalid message into the console to send to the `project.action.create` topic: - `docker exec -it tc-projects-kafka /opt/kafka//bin/kafka-console-producer.sh --topic project.action.create` + `docker exec -it project-processor-es-kafka /opt/kafka/bin/kafka-console-producer.sh --topic project.action.create` in the console, write message, one message per line: `{ "topic": "project.action.update", "originator": "project-api", "timestamp": "2019-02-17T01:00:00", "mime-type": "application/json", "payload": { "id": "173803d3-019e-4033-b1cf-d7205c7f774c", "typeId": "123", "track": "Code", "name": "test3", "description": "desc3", "timelineTemplateId": "8e17090c-465b-4c17-b6d9-dfa16300b0dd", "groups": ["group2", "group3"], "updated": "2019-02-17T01:00:00", "updatedBy": "admin" } }` @@ -265,31 +265,31 @@ If you've used `docker-compose` with the file `local/docker-compose.yml` to spaw **Create Topic** ```bash -docker exec tc-projects-kafka /opt/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic TOPIC_NAME +docker exec project-processor-es-kafka /opt/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic TOPIC_NAME ``` **List Topics** ```bash -docker exec tc-projects-kafka /opt/kafka/bin/kafka-topics.sh --list --zookeeper zookeeper:2181 +docker exec project-processor-es-kafka /opt/kafka/bin/kafka-topics.sh --list --zookeeper zookeeper:2181 ``` **Watch Topic** ```bash -docker exec tc-projects-kafka /opt/kafka/bin/kafka-console-consumer --bootstrap-server localhost:9092 --zookeeper zookeeper:2181 --topic TOPIC_NAME +docker exec project-processor-es-kafka /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --zookeeper zookeeper:2181 --topic TOPIC_NAME ``` **Post Message to Topic** ```bash -docker exec -it tc-projects-kafka /opt/kafka/bin/kafka-console-producer --topic TOPIC_NAME --broker-list localhost:9092 +docker exec -it project-processor-es-kafka /opt/kafka/bin/kafka-console-producer.sh --topic TOPIC_NAME --broker-list localhost:9092 ``` The message can be passed using `stdin` ### Test ```bash -docker exec -i tc-projects-kafka /opt/kafka//bin/kafka-console-producer.sh --topic project.action.create --broker-list localhost:9092 < test_message.json +docker exec -i project-processor-es-kafka /opt/kafka/bin/kafka-console-producer.sh --topic project.action.create --broker-list localhost:9092 < test_message.json ``` All example for messages are in: our_project_root_directory/test/data diff --git a/local/docker-compose.yml b/local/docker-compose.yml index b1775aa..e066659 100644 --- a/local/docker-compose.yml +++ b/local/docker-compose.yml @@ -5,14 +5,14 @@ services: ports: - "9200:9200" zookeeper: - image: confluent/zookeeper + image: wurstmeister/zookeeper ports: - "2181:2181" environment: zk_id: "1" kafka: image: wurstmeister/kafka - container_name: tc-projects-kafka + container_name: project-processor-es-kafka depends_on: - zookeeper ports: diff --git a/local/kafka-client/Dockerfile b/local/kafka-client/Dockerfile index 3d332b8..15c2083 100644 --- a/local/kafka-client/Dockerfile +++ b/local/kafka-client/Dockerfile @@ -1,4 +1,4 @@ -From confluent/kafka +From wurstmeister/kafka WORKDIR /app/ COPY topics.txt . COPY create-topics.sh . From 04d3faad11475821479fc4aff05620d12f9e8ddd Mon Sep 17 00:00:00 2001 From: Maksym Mykhailenko Date: Mon, 13 Jan 2020 12:03:46 +0800 Subject: [PATCH 3/4] feat: improved README to be easier to follow during local setup --- README.md | 199 +++++++++++++++++++++++++++++++++--------------------- 1 file changed, 121 insertions(+), 78 deletions(-) diff --git a/README.md b/README.md index 0641777..0411e1a 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,122 @@ - ElasticSearch - Docker, Docker Compose +## Local setup + +1. Install node dependencies: + + ```bash + npm install + ``` + +2. Run docker compose with dependant services: + + ```bash + cd local/ + docker-compose up + ``` + +
Click to see details + + This docker-compose run all the dependencies which are necessary for `project-processor-es` to work. + + | Service | Name | Port | + |----------|:-----:|:----:| + | Elasticsearch | esearch | 9200 | + | Zookeeper | zookeeper | 2181 | + | Kafka | kafka | 9092 | + + `docker-compose` automatically creates Kafka topics which are used by `project-processor-es` listed in `local/kafka-client/topics.txt`. + +
+ + +3. Set environment variables for M2M authentication: `AUTH0_CLIENT_ID`, `AUTH0_CLIENT_SECRET`, `AUTH0_URL`, `AUTH0_AUDIENCE`, `AUTH0_PROXY_SERVER_URL`: + + ```bash + export AUTH0_CLIENT_ID= + export AUTH0_CLIENT_SECRET= + export AUTH0_URL= + export AUTH0_AUDIENCE= + export AUTH0_PROXY_SERVER_URL= + ``` + +4. Initialize Elasticsearch indexes: + + ```bash + npm run sync:es + ``` + +5. Start processor app: + + ```bash + npm start + ``` + +## Commands + +### Lint & Tests commands + +| Command | Description | +|----------|--------------| +| `npm run lint` | Run lint check. | +| `npm run lin:fix` | Run lint check with automatic fixing of errors and warnings where possible. | +| `npm run test` | Run integration tests. | +| `npm run test:cov` | Run integration tests with coverage report. | + +### View data in Elasticsearch indexes + +You may run the next command to output documents in the Elasticsearch indexes for debugging purposes. + +```bash +npm run view-data +``` + +##### Examples + +- `npm run view-data projects 1` view document with id `1` in `projects` index +- `npm run view-data timelines 1` view document with id `1` in `timelines` index +- `npm run view-data metadata 1` view document with id `1` in `timelines` index *(this index has only one document and all the data is stored inside one document which might be very big)*. + +### Kafka commands + +If you've used `docker-compose` with the file `local/docker-compose.yml` during local setup to spawn kafka & zookeeper, you can use the following commands to manipulate kafka topics and messages: +(Replace `TOPIC_NAME` with the name of the desired topic) + +#### Create Topic + +```bash +docker exec project-processor-es-kafka /opt/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic TOPIC_NAME +``` + +#### List Topics + +```bash +docker exec project-processor-es-kafka /opt/kafka/bin/kafka-topics.sh --list --zookeeper zookeeper:2181 +``` + +#### Watch Topic + +```bash +docker exec project-processor-es-kafka /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic TOPIC_NAME +``` + +#### Post Message to Topic (from stdin) + +```bash +docker exec -it project-processor-es-kafka /opt/kafka/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic TOPIC_NAME +``` + +- Enter or copy/paste the message into the console after starting this command. + +#### Post Message to Topic (from file) + +```bash +docker exec -i project-processor-es-kafka /opt/kafka/bin/kafka-console-producer.sh --topic project.action.create --broker-list localhost:9092 < test_message.json +``` + +- All example for messages are in: `./test/data`. + ## Configuration Configuration for the processor is at `config/default.js`. @@ -40,40 +156,6 @@ Also note that there is a `/health` endpoint that checks for the health of the a Config for tests are at `config/test.js`, it overrides some default config. - -### Local Deployment for Kafka - -* There exists an alternate `docker-compose.yml` file that can be used to spawn containers for the following services: - - | Service | Name | Port | - |----------|:-----:|:----:| - | ElasticSearch | esearch | 9200 | - | Zookeeper | zookeeper | 2181 | - | Kafka | kafka | 9092 | - -* To have kafka create a list of desired topics on startup, there exists a file with the path `local/kafka-client/topics.txt`. Each line from the file will be added as a topic. -* To run these services simply run the following commands: - - ```bash - cd local - docker-compose up -d - ``` - -## Local deployment -- Install dependencies `npm i` -- Run code lint check `npm run lint`, running `npm run lint:fix` can fix some lint errors if any -- Initialize Elasticsearch, create configured Elasticsearch index if not present: `npm run sync:es` -- Start processor app `npm start` - -Note that you need to set AUTH0 related environment variables belows before you can start the processor. - -- AUTH0_URL -- AUTH0_AUDIENCE -- TOKEN_CACHE_TIME -- AUTH0_CLIENT_ID -- AUTH0_CLIENT_SECRET -- AUTH0_PROXY_SERVER_URL - ## Local Deployment with Docker To run the Challenge ES Processor using docker, follow the below steps @@ -119,10 +201,10 @@ npm run test:cov ``` ## Verification -- Call our project root directory : `our_project_root_directory` -- Start Docker servicees, initialize Elasticsearch, start processor app +- Start Docker services, initialize Elasticsearch, start processor app +- Navigate to the repository root directory. - Send message: - `docker exec -i project-processor-es-kafka /opt/kafka/bin/kafka-console-producer.sh --topic project.action.create --broker-list localhost:9092 < our_project_root_directory/test/data/project/project.action.create.json` + `docker exec -i project-processor-es-kafka /opt/kafka/bin/kafka-console-producer.sh --topic project.action.create --broker-list localhost:9092 < ./test/data/project/project.action.create.json` - run command `npm run view-data projects 1` to view the created data, you will see the data are properly created: ```bash @@ -187,7 +269,7 @@ info: { - Sent message to update data: - `docker exec -i project-processor-es-kafka /opt/kafka/bin/kafka-console-producer.sh --topic project.action.update --broker-list localhost:9092 < our_project_root_directory/test/data/project/project.action.update.json` + `docker exec -i project-processor-es-kafka /opt/kafka/bin/kafka-console-producer.sh --topic project.action.update --broker-list localhost:9092 < ./test/data/project/project.action.update.json` - Run command `npm run view-data projects 1` to view the updated data, you will see the data are properly updated: ```bash @@ -242,7 +324,7 @@ info: { - Run the producer and then write some invalid message into the console to send to the `project.action.create` topic: - `docker exec -it project-processor-es-kafka /opt/kafka/bin/kafka-console-producer.sh --topic project.action.create` + `docker exec -it project-processor-es-kafka /opt/kafka/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.action.create` in the console, write message, one message per line: `{ "topic": "project.action.update", "originator": "project-api", "timestamp": "2019-02-17T01:00:00", "mime-type": "application/json", "payload": { "id": "173803d3-019e-4033-b1cf-d7205c7f774c", "typeId": "123", "track": "Code", "name": "test3", "description": "desc3", "timelineTemplateId": "8e17090c-465b-4c17-b6d9-dfa16300b0dd", "groups": ["group2", "group3"], "updated": "2019-02-17T01:00:00", "updatedBy": "admin" } }` @@ -254,42 +336,3 @@ info: { - To test the health check API, run `export PORT=5000`, start the processor, then browse `http://localhost:5000/health` in a browser, and you will see result `{"checksRun":1}` - - - -### Kafka Commands - -If you've used `docker-compose` with the file `local/docker-compose.yml` to spawn kafka & zookeeper, you can use the following commands to manipulate kafka topics and messages: -(Replace TOPIC_NAME with the name of the desired topic) - -**Create Topic** - -```bash -docker exec project-processor-es-kafka /opt/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic TOPIC_NAME -``` - -**List Topics** - -```bash -docker exec project-processor-es-kafka /opt/kafka/bin/kafka-topics.sh --list --zookeeper zookeeper:2181 -``` - -**Watch Topic** - -```bash -docker exec project-processor-es-kafka /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --zookeeper zookeeper:2181 --topic TOPIC_NAME -``` - -**Post Message to Topic** - -```bash -docker exec -it project-processor-es-kafka /opt/kafka/bin/kafka-console-producer.sh --topic TOPIC_NAME --broker-list localhost:9092 -``` -The message can be passed using `stdin` - -### Test -```bash -docker exec -i project-processor-es-kafka /opt/kafka/bin/kafka-console-producer.sh --topic project.action.create --broker-list localhost:9092 < test_message.json - -``` -All example for messages are in: our_project_root_directory/test/data From c70bd7d42861eeb75f9d7eb5d4f3a15d3591de93 Mon Sep 17 00:00:00 2001 From: Maksym Mykhailenko Date: Fri, 17 Jan 2020 15:39:56 +0800 Subject: [PATCH 4/4] fix: command to create topics --- local/kafka-client/create-topics.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/local/kafka-client/create-topics.sh b/local/kafka-client/create-topics.sh index c5b33e6..a411690 100755 --- a/local/kafka-client/create-topics.sh +++ b/local/kafka-client/create-topics.sh @@ -1,5 +1,5 @@ #!/bin/bash while read topic; do - /usr/bin/kafka-topics --create --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic $topic + /opt/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic $topic done < topics.txt