docker/docker-compose-all.yaml (85 lines of code) (raw):
version: '3.5'
services:
zookeeper:
image: 'confluentinc/cp-zookeeper:7.4.0'
hostname: 'zookeeper'
ports:
- '2181:2181'
environment:
- 'ZOOKEEPER_CLIENT_PORT=2181'
- 'ZOOKEEPER_TICK_TIME=2000'
restart: 'on-failure'
kafka:
# "`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-
# An important note about accessing Kafka from clients on other machines:
# -----------------------------------------------------------------------
#
# The config used here exposes port 9092 for _external_ connections to the broker
# i.e. those from _outside_ the docker network. This could be from the host machine
# running docker, or maybe further afield if you've got a more complicated setup.
# If the latter is true, you will need to change the value 'localhost' in
# KAFKA_ADVERTISED_LISTENERS to one that is resolvable to the docker host from those
# remote clients
#
# For connections _internal_ to the docker network, such as from other services
# and components, use kafka:29092.
#
# See https://rmoff.net/2018/08/02/kafka-listeners-explained/ for details
# "`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-
#
image: 'confluentinc/cp-kafka:7.4.0'
hostname: 'kafka'
depends_on:
- 'zookeeper'
ports:
- '9092:9092'
environment:
- 'KAFKA_BROKER_ID=1'
- 'KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181'
- 'KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092'
- 'KAFKA_LISTENER_SECURITY_PROTOCOL_MAP=PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
- 'KAFKA_INTER_BROKER_LISTENER_NAME=PLAINTEXT'
- 'KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1'
- 'KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR=1'
- 'KAFKA_TRANSACTION_STATE_LOG_MIN_ISR=1'
- 'KAFKA_AUTHORIZER_CLASS_NAME=kafka.security.authorizer.AclAuthorizer'
- 'KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND=true'
restart: 'on-failure'
schema-registry:
image: 'confluentinc/cp-schema-registry:7.4.0'
hostname: 'schema-registry'
depends_on:
- 'zookeeper'
- 'kafka'
ports:
- '8081:8081'
environment:
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'PLAINTEXT://kafka:29092'
SCHEMA_REGISTRY_HOST_NAME: 'schema-registry'
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
#SCHEMA_REGISTRY_MODE_MUTABILITY: 'true'
restart: 'on-failure'
schema-catalog-rest:
# build:
# context: '../'
# dockerfile: './rest/Dockerfile'
image: 'epam/eco-schema-catalog-rest:latest'
hostname: 'schema-catalog-rest'
depends_on:
- 'schema-registry'
- 'elasticsearch'
environment:
- 'KAFKA_SERVERS_URL=kafka:29092'
- 'SCHEMA_REGISTRY_URL=http://schema-registry:8081'
- 'ELASTIC_URIS=localhost:9200'
ports:
- '8082:8082'
restart: 'on-failure'
#elastic requires more memory
#sudo sysctl -w vm.max_map_count=262144
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:7.9.3
environment:
- node.name=es01
- cluster.name=schema_catalog
- bootstrap.memory_lock=true
- discovery.type=single-node
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
- xpack.security.enabled=false
ulimits:
memlock:
soft: -1
hard: -1
ports:
- "9200:9200"
schema-catalog-ui:
# build: '../ui'
image: 'epam/eco-schema-catalog-ui:latest'
hostname: 'schema-catalog-ui'
depends_on:
- 'schema-catalog-rest'
environment:
- 'PORT=8282'
- 'BASE_HREF='
- 'NODE_ENV=${NODE_ENV:-development}'
- 'GA_UA='
- 'TARGET_API=http://schema-catalog-rest:8082'
ports:
- '8282:8282'
restart: 'on-failure'