Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions app/controllers/Logkafka.scala
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,10 @@ class Logkafka (val messagesApi: MessagesApi, val kafkaManagerContext: KafkaMana
LogkafkaNewConfigs.configMaps(Kafka_1_1_1).map{case(k,v) => LKConfig(k,Some(v))}.toList)
val kafka_2_0_0_Default = CreateLogkafka("","",
LogkafkaNewConfigs.configMaps(Kafka_2_0_0).map{case(k,v) => LKConfig(k,Some(v))}.toList)
val kafka_2_1_0_Default = CreateLogkafka("","",
LogkafkaNewConfigs.configMaps(Kafka_2_1_0).map{case(k,v) => LKConfig(k,Some(v))}.toList)
val kafka_2_1_1_Default = CreateLogkafka("","",
LogkafkaNewConfigs.configMaps(Kafka_2_1_1).map{case(k,v) => LKConfig(k,Some(v))}.toList)

val defaultCreateForm = Form(
mapping(
Expand Down Expand Up @@ -149,6 +153,8 @@ class Logkafka (val messagesApi: MessagesApi, val kafkaManagerContext: KafkaMana
case Kafka_1_1_0 => (defaultCreateForm.fill(kafka_1_1_0_Default), clusterContext)
case Kafka_1_1_1 => (defaultCreateForm.fill(kafka_1_1_1_Default), clusterContext)
case Kafka_2_0_0 => (defaultCreateForm.fill(kafka_2_0_0_Default), clusterContext)
case Kafka_2_1_0 => (defaultCreateForm.fill(kafka_2_1_0_Default), clusterContext)
case Kafka_2_1_1 => (defaultCreateForm.fill(kafka_2_1_1_Default), clusterContext)
}
}
}
Expand Down Expand Up @@ -250,6 +256,8 @@ class Logkafka (val messagesApi: MessagesApi, val kafkaManagerContext: KafkaMana
case Kafka_1_1_0 => LogkafkaNewConfigs.configNames(Kafka_1_1_0).map(n => (n,LKConfig(n,None))).toMap
case Kafka_1_1_1 => LogkafkaNewConfigs.configNames(Kafka_1_1_1).map(n => (n,LKConfig(n,None))).toMap
case Kafka_2_0_0 => LogkafkaNewConfigs.configNames(Kafka_2_0_0).map(n => (n,LKConfig(n,None))).toMap
case Kafka_2_1_0 => LogkafkaNewConfigs.configNames(Kafka_2_1_0).map(n => (n,LKConfig(n,None))).toMap
case Kafka_2_1_1 => LogkafkaNewConfigs.configNames(Kafka_2_1_1).map(n => (n,LKConfig(n,None))).toMap
}
val identityOption = li.identityMap.get(log_path)
if (identityOption.isDefined) {
Expand Down
7 changes: 7 additions & 0 deletions app/controllers/Topic.scala
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,9 @@ class Topic (val messagesApi: MessagesApi, val kafkaManagerContext: KafkaManager
val kafka_1_1_0_Default = CreateTopic("",1,1,TopicConfigs.configNames(Kafka_1_1_0).map(n => TConfig(n,None)).toList)
val kafka_1_1_1_Default = CreateTopic("",1,1,TopicConfigs.configNames(Kafka_1_1_1).map(n => TConfig(n,None)).toList)
val kafka_2_0_0_Default = CreateTopic("",1,1,TopicConfigs.configNames(Kafka_2_0_0).map(n => TConfig(n,None)).toList)
val kafka_2_1_0_Default = CreateTopic("",1,1,TopicConfigs.configNames(Kafka_2_1_0).map(n => TConfig(n,None)).toList)
val kafka_2_1_1_Default = CreateTopic("",1,1,TopicConfigs.configNames(Kafka_2_1_1).map(n => TConfig(n,None)).toList)


val defaultCreateForm = Form(
mapping(
Expand Down Expand Up @@ -162,6 +165,8 @@ class Topic (val messagesApi: MessagesApi, val kafkaManagerContext: KafkaManager
case Kafka_1_1_0 => (defaultCreateForm.fill(kafka_1_1_0_Default), clusterContext)
case Kafka_1_1_1 => (defaultCreateForm.fill(kafka_1_1_1_Default), clusterContext)
case Kafka_2_0_0 => (defaultCreateForm.fill(kafka_2_0_0_Default), clusterContext)
case Kafka_2_1_0 => (defaultCreateForm.fill(kafka_2_1_0_Default), clusterContext)
case Kafka_2_1_1 => (defaultCreateForm.fill(kafka_2_1_1_Default), clusterContext)
}
}
}
Expand Down Expand Up @@ -409,6 +414,8 @@ class Topic (val messagesApi: MessagesApi, val kafkaManagerContext: KafkaManager
case Kafka_1_1_0 => TopicConfigs.configNames(Kafka_1_1_0).map(n => (n,TConfig(n,None))).toMap
case Kafka_1_1_1 => TopicConfigs.configNames(Kafka_1_1_1).map(n => (n,TConfig(n,None))).toMap
case Kafka_2_0_0 => TopicConfigs.configNames(Kafka_2_0_0).map(n => (n,TConfig(n,None))).toMap
case Kafka_2_1_0 => TopicConfigs.configNames(Kafka_2_1_0).map(n => (n,TConfig(n,None))).toMap
case Kafka_2_1_1 => TopicConfigs.configNames(Kafka_2_1_1).map(n => (n,TConfig(n,None))).toMap
}
val combinedMap = defaultConfigMap ++ ti.config.toMap.map(tpl => tpl._1 -> TConfig(tpl._1,Option(tpl._2)))
(defaultUpdateConfigForm.fill(UpdateTopicConfig(ti.topic,combinedMap.toList.map(_._2),ti.configReadVersion)),
Expand Down
2 changes: 1 addition & 1 deletion app/kafka/manager/actor/cluster/KafkaStateActor.scala
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ class KafkaAdminClient(context: => ActorContext, adminClientActorPath: ActorPath


object KafkaManagedOffsetCache {
val supportedVersions: Set[KafkaVersion] = Set(Kafka_0_8_2_0, Kafka_0_8_2_1, Kafka_0_8_2_2, Kafka_0_9_0_0, Kafka_0_9_0_1, Kafka_0_10_0_0, Kafka_0_10_0_1, Kafka_0_10_1_0, Kafka_0_10_1_1, Kafka_0_10_2_0, Kafka_0_10_2_1, Kafka_0_11_0_0, Kafka_0_11_0_2, Kafka_1_0_0, Kafka_1_0_1, Kafka_1_1_0, Kafka_1_1_1, Kafka_2_0_0)
val supportedVersions: Set[KafkaVersion] = Set(Kafka_0_8_2_0, Kafka_0_8_2_1, Kafka_0_8_2_2, Kafka_0_9_0_0, Kafka_0_9_0_1, Kafka_0_10_0_0, Kafka_0_10_0_1, Kafka_0_10_1_0, Kafka_0_10_1_1, Kafka_0_10_2_0, Kafka_0_10_2_1, Kafka_0_11_0_0, Kafka_0_11_0_2, Kafka_1_0_0, Kafka_1_0_1, Kafka_1_1_0, Kafka_1_1_1, Kafka_2_0_0, Kafka_2_1_0, Kafka_2_1_1)
val ConsumerOffsetTopic = "__consumer_offsets"

def isSupported(version: KafkaVersion) : Boolean = {
Expand Down
12 changes: 11 additions & 1 deletion app/kafka/manager/model/model.scala
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,14 @@ case object Kafka_2_0_0 extends KafkaVersion {
override def toString = "2.0.0"
}

case object Kafka_2_1_0 extends KafkaVersion {
override def toString = "2.1.0"
}

case object Kafka_2_1_1 extends KafkaVersion {
override def toString = "2.1.1"
}

object KafkaVersion {
val supportedVersions: Map[String,KafkaVersion] = Map(
"0.8.1.1" -> Kafka_0_8_1_1,
Expand All @@ -109,7 +117,9 @@ object KafkaVersion {
"1.0.1" -> Kafka_1_0_1,
"1.1.0" -> Kafka_1_1_0,
"1.1.1" -> Kafka_1_1_1,
"2.0.0" -> Kafka_2_0_0
"2.0.0" -> Kafka_2_0_0,
"2.1.0" -> Kafka_2_1_0,
"2.1.1" -> Kafka_2_1_1
)

val formSelectList : IndexedSeq[(String,String)] = supportedVersions.toIndexedSeq.filterNot(_._1.contains("beta")).map(t => (t._1,t._2.toString)).sortWith((a, b) => sortVersion(a._1, b._1))
Expand Down
6 changes: 4 additions & 2 deletions app/kafka/manager/utils/LogkafkaNewConfigs.scala
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,10 @@ object LogkafkaNewConfigs {
Kafka_1_0_1 -> logkafka82.LogConfig,
Kafka_1_1_0 -> logkafka82.LogConfig,
Kafka_1_1_1 -> logkafka82.LogConfig,
Kafka_2_0_0 -> logkafka82.LogConfig
)
Kafka_2_0_0 -> logkafka82.LogConfig,
Kafka_2_1_0 -> logkafka82.LogConfig,
Kafka_2_1_1 -> logkafka82.LogConfig
)

def configNames(version: KafkaVersion) : Set[String] = {
logkafkaConfigsByVersion.get(version) match {
Expand Down
6 changes: 4 additions & 2 deletions app/kafka/manager/utils/TopicConfigs.scala
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,10 @@ object TopicConfigs {
Kafka_1_0_1 -> zero90.LogConfig,
Kafka_1_1_0 -> zero90.LogConfig,
Kafka_1_1_1 -> zero90.LogConfig,
Kafka_2_0_0 -> zero90.LogConfig
)
Kafka_2_0_0 -> zero90.LogConfig,
Kafka_2_1_0 -> zero90.LogConfig,
Kafka_2_1_1 -> zero90.LogConfig
)

def configNames(version: KafkaVersion) : Set[String] = {
topicConfigsByVersion.get(version) match {
Expand Down
2 changes: 1 addition & 1 deletion test/controller/api/TestKafkaStateCheck.scala
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ class TestKafkaStateCheck extends CuratorAwareTest with KafkaServerInTest with M

private[this] def createCluster() = {
val future = kafkaManagerContext.get.getKafkaManager.addCluster(
testClusterName,"2.0.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(kafkaManagerContext.get.getKafkaManager.defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None
testClusterName,"2.1.1",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(kafkaManagerContext.get.getKafkaManager.defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None
)
val result = Await.result(future,duration)
result.toEither.left.foreach(apiError => sys.error(apiError.msg))
Expand Down
6 changes: 3 additions & 3 deletions test/kafka/manager/TestKafkaManager.scala
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ class TestKafkaManager extends CuratorAwareTest with BaseTest {
}

test("add cluster") {
val future = kafkaManager.addCluster("dev","2.0.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(kafkaManager.defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val future = kafkaManager.addCluster("dev","2.1.1",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(kafkaManager.defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val result = Await.result(future,duration)
assert(result.isRight === true)
Thread.sleep(2000)
Expand Down Expand Up @@ -376,7 +376,7 @@ class TestKafkaManager extends CuratorAwareTest with BaseTest {
}

test("update cluster zkhost") {
val future = kafkaManager.updateCluster("dev","2.0.0",testServer.getConnectString, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxSsl = false, jmxPass = None, tuning = Option(defaultTuning), securityProtocol = "PLAINTEXT", saslMechanism = None, jaasConfig = None)
val future = kafkaManager.updateCluster("dev","2.1.1",testServer.getConnectString, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxSsl = false, jmxPass = None, tuning = Option(defaultTuning), securityProtocol = "PLAINTEXT", saslMechanism = None, jaasConfig = None)
val result = Await.result(future,duration)
assert(result.isRight === true)

Expand Down Expand Up @@ -433,7 +433,7 @@ class TestKafkaManager extends CuratorAwareTest with BaseTest {
}

test("update cluster logkafka enabled and activeOffsetCache enabled") {
val future = kafkaManager.updateCluster("dev","2.0.0",testServer.getConnectString, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, activeOffsetCacheEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol = "PLAINTEXT", saslMechanism = None, jaasConfig = None)
val future = kafkaManager.updateCluster("dev","2.1.1",testServer.getConnectString, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, activeOffsetCacheEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol = "PLAINTEXT", saslMechanism = None, jaasConfig = None)
val result = Await.result(future,duration)
assert(result.isRight === true)

Expand Down
14 changes: 7 additions & 7 deletions test/kafka/manager/TestKafkaManagerActor.scala
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
}

test("add cluster") {
val cc = ClusterConfig("dev","2.0.0",testServer.getConnectString, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val cc = ClusterConfig("dev","2.1.1",testServer.getConnectString, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
withKafkaManagerActor(KMAddCluster(cc)) { result: KMCommandResult =>
result.result.get
Thread.sleep(1000)
Expand All @@ -79,7 +79,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
}

test("update cluster zkhost") {
val cc2 = ClusterConfig("dev","2.0.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val cc2 = ClusterConfig("dev","2.1.1",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult =>
result.result.get
Thread.sleep(3000)
Expand Down Expand Up @@ -111,7 +111,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
}

test("update cluster version") {
val cc2 = ClusterConfig("dev","2.0.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val cc2 = ClusterConfig("dev","2.1.1",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult =>
result.result.get
Thread.sleep(3000)
Expand All @@ -138,7 +138,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
println(result)
result.msg.contains("dev")
}
val cc2 = ClusterConfig("dev","2.0.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val cc2 = ClusterConfig("dev","2.1.1",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
withKafkaManagerActor(KMAddCluster(cc2)) { result: KMCommandResult =>
result.result.get
Thread.sleep(1000)
Expand All @@ -155,7 +155,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
}

test("update cluster logkafka enabled") {
val cc2 = ClusterConfig("dev","2.0.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val cc2 = ClusterConfig("dev","2.1.1",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult =>
result.result.get
Thread.sleep(3000)
Expand All @@ -167,7 +167,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {

test("update cluster tuning") {
val newTuning = getClusterTuning(3, 101, 11, 10000, 10000, 1)
val cc2 = ClusterConfig("dev","2.0.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false,
val cc2 = ClusterConfig("dev","2.1.1",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false,
tuning = Option(newTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None
)
withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult =>
Expand All @@ -184,7 +184,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
}

test("update cluster security protocol") {
val cc2 = ClusterConfig("dev","2.0.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="SASL_PLAINTEXT", saslMechanism = Option("PLAIN"), jaasConfig = Option("blah"))
val cc2 = ClusterConfig("dev","2.1.1",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="SASL_PLAINTEXT", saslMechanism = Option("PLAIN"), jaasConfig = Option("blah"))
withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult =>
result.result.get
Thread.sleep(3000)
Expand Down
8 changes: 6 additions & 2 deletions test/kafka/manager/model/KafkaVersionTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,9 @@ class KafkaVersionTest extends FunSuite {
"1.0.1" -> Kafka_1_0_1,
"1.1.0" -> Kafka_1_1_0,
"1.1.1" -> Kafka_1_1_1,
"2.0.0" -> Kafka_2_0_0
"2.0.0" -> Kafka_2_0_0,
"2.1.0" -> Kafka_2_1_0,
"2.1.1" -> Kafka_2_1_1
)

test("apply method: supported version.") {
Expand Down Expand Up @@ -69,7 +71,9 @@ class KafkaVersionTest extends FunSuite {
("1.0.1","1.0.1"),
("1.1.0","1.1.0"),
("1.1.1","1.1.1"),
("2.0.0","2.0.0")
("2.0.0","2.0.0"),
("2.1.0","2.1.0"),
("2.1.1","2.1.1")
)
assertResult(expected)(KafkaVersion.formSelectList)
}
Expand Down
18 changes: 17 additions & 1 deletion test/kafka/manager/utils/TestClusterConfig.scala
Original file line number Diff line number Diff line change
Expand Up @@ -197,12 +197,28 @@ class TestClusterConfig extends FunSuite with Matchers {
assert(cc == deserialize.get)
}

test("serialize and deserialize 2.0.0") {
test("serialize and deserialize 2.0.0") {
val cc = ClusterConfig("qa", "2.0.0", "localhost:2181", jmxEnabled = false, pollConsumers = true, filterConsumers = true, activeOffsetCacheEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = None, securityProtocol = "SASL_PLAINTEXT", saslMechanism = Option("PLAIN"), jaasConfig = Option("blah"))
val serialize: String = ClusterConfig.serialize(cc)
val deserialize = ClusterConfig.deserialize(serialize)
assert(deserialize.isSuccess === true)
assert(cc == deserialize.get)
}

test("serialize and deserialize 2.1.0") {
val cc = ClusterConfig("qa", "2.1.0", "localhost:2181", jmxEnabled = false, pollConsumers = true, filterConsumers = true, activeOffsetCacheEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = None, securityProtocol = "SASL_PLAINTEXT", saslMechanism = Option("PLAIN"), jaasConfig = Option("blah"))
val serialize: String = ClusterConfig.serialize(cc)
val deserialize = ClusterConfig.deserialize(serialize)
assert(deserialize.isSuccess === true)
assert(cc == deserialize.get)
}

test("serialize and deserialize 2.1.1") {
val cc = ClusterConfig("qa", "2.1.1", "localhost:2181", jmxEnabled = false, pollConsumers = true, filterConsumers = true, activeOffsetCacheEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = None, securityProtocol = "SASL_PLAINTEXT", saslMechanism = Option("PLAIN"), jaasConfig = Option("blah"))
val serialize: String = ClusterConfig.serialize(cc)
val deserialize = ClusterConfig.deserialize(serialize)
assert(deserialize.isSuccess === true)
assert(cc == deserialize.get)
}

}