blob: 712cbcb1119ce0a2f378483e8c0d7de8590a8ac6 [file] [log] [blame]
Niranjana3d457a02020-08-12 13:33:22 +05301# LICENSE_START=======================================================
2# org.onap.dmaap
3# ================================================================================
4# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
5# Copyright (C) 2020 Wipro Limited.
6# ================================================================================
7# Licensed under the Apache License, Version 2.0 (the "License");
8# you may not use this file except in compliance with the License.
9# You may obtain a copy of the License at
10# http://www.apache.org/licenses/LICENSE-2.0
11#
12# Unless required by applicable law or agreed to in writing, software
13# distributed under the License is distributed on an "AS IS" BASIS,
14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15# See the License for the specific language governing permissions and
16# limitations under the License.
17# ============LICENSE_END=========================================================
18#
19# ECOMP is a trademark and service mark of AT&T Intellectual Property.
20#
21###############################################################################
22###############################################################################
23##
24## Cambria API Server config
25##
26## - Default values are shown as commented settings.
27##
28
29###############################################################################
30##
31## HTTP service
32##
33## - 3904 is standard as of 7/29/14.
34#
35## Zookeeper Connection
36##
37## Both Cambria and Kafka make use of Zookeeper.
38##
39#config.zk.servers=172.18.1.1
40config.zk.servers=zookeeper:2181
41#config.zk.root=/fe3c/cambria/config
42
43
44###############################################################################
45##
46## Kafka Connection
47##
48## Items below are passed through to Kafka's producer and consumer
49## configurations (after removing "kafka.")
50## if you want to change request.required.acks it can take this one value
51#kafka.metadata.broker.list=localhost:9092,localhost:9093
52kafka.metadata.broker.list=kafka:9092
53##kafka.request.required.acks=-1
54#kafka.client.zookeeper=${config.zk.servers}
55consumer.timeout.ms=100
56zookeeper.connection.timeout.ms=6000
57zookeeper.session.timeout.ms=20000
58zookeeper.sync.time.ms=2000
59auto.commit.interval.ms=1000
60fetch.message.max.bytes =1000000
61auto.commit.enable=false
62
63#(backoff*retries > zksessiontimeout)
64kafka.rebalance.backoff.ms=10000
65kafka.rebalance.max.retries=6
66
67
68###############################################################################
69##
70## Secured Config
71##
72## Some data stored in the config system is sensitive -- API keys and secrets,
73## for example. to protect it, we use an encryption layer for this section
74## of the config.
75##
76## The key is a base64 encode AES key. This must be created/configured for
77## each installation.
78#cambria.secureConfig.key=
79##
80## The initialization vector is a 16 byte value specific to the secured store.
81## This must be created/configured for each installation.
82#cambria.secureConfig.iv=
83
84## Southfield Sandbox
85cambria.secureConfig.key=b/7ouTn9FfEw2PQwL0ov/Q==
86cambria.secureConfig.iv=wR9xP5k5vbz/xD0LmtqQLw==
87authentication.adminSecret=fe3cCompound
88#cambria.secureConfig.key[pc569h]=YT3XPyxEmKCTLI2NK+Sjbw==
89#cambria.secureConfig.iv[pc569h]=rMm2jhR3yVnU+u2V9Ugu3Q==
90
91
92###############################################################################
93##
94## Consumer Caching
95##
96## Kafka expects live connections from the consumer to the broker, which
97## obviously doesn't work over connectionless HTTP requests. The Cambria
98## server proxies HTTP requests into Kafka consumer sessions that are kept
99## around for later re-use. Not doing so is costly for setup per request,
100## which would substantially impact a high volume consumer's performance.
101##
102## This complicates Cambria server failover, because we often need server
103## A to close its connection before server B brings up the replacement.
104##
105
106## The consumer cache is normally enabled.
107#cambria.consumer.cache.enabled=true
108
109## Cached consumers are cleaned up after a period of disuse. The server inspects
110## consumers every sweepFreqSeconds and will clean up any connections that are
111## dormant for touchFreqMs.
112#cambria.consumer.cache.sweepFreqSeconds=15
113cambria.consumer.cache.touchFreqMs=120000
114##stickforallconsumerrequests=false
115## The cache is managed through ZK. The default value for the ZK connection
116## string is the same as config.zk.servers.
117#cambria.consumer.cache.zkConnect=${config.zk.servers}
118
119##
120## Shared cache information is associated with this node's name. The default
121## name is the hostname plus the HTTP service port this host runs on. (The
122## hostname is determined via InetAddress.getLocalHost ().getCanonicalHostName(),
123## which is not always adequate.) You can set this value explicitly here.
124##
125#cambria.api.node.identifier=<use-something-unique-to-this-instance>
126
127#cambria.rateLimit.maxEmptyPollsPerMinute=30
128#cambria.rateLimitActual.delay.ms=10
129
130###############################################################################
131##
132## Metrics Reporting
133##
134## This server can report its metrics periodically on a topic.
135##
136#metrics.send.cambria.enabled=true
137#metrics.send.cambria.topic=cambria.apinode.metrics #msgrtr.apinode.metrics.dmaap
138#metrics.send.cambria.sendEverySeconds=60
139
140cambria.consumer.cache.zkBasePath=/fe3c/cambria/consumerCache
141consumer.timeout=17
142default.partitions=3
143default.replicas=3
144##############################################################################
145#100mb
146maxcontentlength=10000
147
148
149##############################################################################
150#AAF Properties
151msgRtr.namespace.aaf=org.onap.dmaap.mr.topic
152msgRtr.topicfactory.aaf=org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic:
153enforced.topic.name.AAF=org.onap
154forceAAF=false
155transidUEBtopicreqd=false
156defaultNSforUEB=org.onap.dmaap.mr
157##############################################################################
158#Mirror Maker Agent
159msgRtr.mirrormakeradmin.aaf=com.onap.dmaap.mr.dev.mirrormaker|*|admin
160msgRtr.mirrormakeruser.aaf=com.onap.dmaap.mr.dev.mirrormaker|*|user
161msgRtr.mirrormakeruser.aaf.create=com.onap.dmaap.mr.dev.topicFactory|:com.onap.dmaap.mr.dev.topic:
162msgRtr.mirrormaker.timeout=15000
163msgRtr.mirrormaker.topic=com.onap.dmaap.mr.prod.mm.agent
164msgRtr.mirrormaker.consumergroup=mmagentserver
165msgRtr.mirrormaker.consumerid=1
166
167kafka.max.poll.interval.ms=300000
168kafka.heartbeat.interval.ms=60000
169kafka.session.timeout.ms=240000
170kafka.max.poll.records=1000
171
172