Conflicts caused by the use of kafka and HBase
.everyoneloves__top-leaderboard:empty,.everyoneloves__mid-leaderboard:empty,.everyoneloves__bot-mid-leaderboard:empty height:90px;width:728px;box-sizing:border-box;
When I use "spark streaming" to read "kafka" (requiring sasl validation) and then store the data to "HBase", "HBase" gives the following error
java.io.IOException: java.lang.reflect.InvocationTargetException
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:240)
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:218)
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:181)
at com.xueersi.datamining.ups.database.implement.HbaseClient.connect(HbaseClient.scala:91)
at com.xueersi.datamining.ups.stream.start.BaseInfoLogAnalysisStart$$anonfun$main$1$$anonfun$apply$2.apply(BaseInfoLogAnalysisStart.scala:78)
at com.xueersi.datamining.ups.stream.start.BaseInfoLogAnalysisStart$$anonfun$main$1$$anonfun$apply$2.apply(BaseInfoLogAnalysisStart.scala:75)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$29.apply(RDD.scala:925)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$29.apply(RDD.scala:925)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1956)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1956)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:99)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:325)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:238)
... 15 more
Caused by: java.lang.ExceptionInInitializerError
at org.apache.hadoop.hbase.ClusterId.parseFrom(ClusterId.java:64)
at org.apache.hadoop.hbase.zookeeper.ZKClusterId.readClusterIdZNode(ZKClusterId.java:75)
at org.apache.hadoop.hbase.client.ZooKeeperRegistry.getClusterId(ZooKeeperRegistry.java:105)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.retrieveClusterId(ConnectionManager.java:931)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.(ConnectionManager.java:658)
... 20 more
Caused by: java.lang.NullPointerException
at org.apache.kafka.common.security.plain.PlainSaslServer$PlainSaslServerFactory.getMechanismNames(PlainSaslServer.java:163)
at org.apache.hadoop.security.SaslRpcServer$FastSaslServerFactory.(SaslRpcServer.java:381)
at org.apache.hadoop.security.SaslRpcServer.init(SaslRpcServer.java:186)
at org.apache.hadoop.ipc.RPC.getProtocolProxy(RPC.java:570)
at org.apache.hadoop.hdfs.NameNodeProxies.createNNProxyWithClientProtocol(NameNodeProxies.java:418)
at org.apache.hadoop.hdfs.NameNodeProxies.createNonHAProxy(NameNodeProxies.java:314)
at org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider$DefaultProxyFactory.createProxy(ConfiguredFailoverProxyProvider.java:68)
at org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider.getProxy(ConfiguredFailoverProxyProvider.java:152)
at org.apache.hadoop.io.retry.RetryInvocationHandler.(RetryInvocationHandler.java:75)
at org.apache.hadoop.io.retry.RetryInvocationHandler.(RetryInvocationHandler.java:66)
at org.apache.hadoop.io.retry.RetryProxy.create(RetryProxy.java:58)
at org.apache.hadoop.hdfs.NameNodeProxies.createProxy(NameNodeProxies.java:181)
at org.apache.hadoop.hdfs.DFSClient.(DFSClient.java:762)
at org.apache.hadoop.hdfs.DFSClient.(DFSClient.java:693)
at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:158)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2816)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:98)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2853)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2835)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:387)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:186)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371)
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:296)
at org.apache.hadoop.hbase.util.DynamicClassLoader.initTempDir(DynamicClassLoader.java:120)
at org.apache.hadoop.hbase.util.DynamicClassLoader.(DynamicClassLoader.java:98)
at org.apache.hadoop.hbase.protobuf.ProtobufUtil.(ProtobufUtil.java:246)
... 25 more
But when I read another "Kafka" (without sasl validation), there was no problem with "HBase.
In addition, "HBase" is required for "kerberos" authentication
I think there is a conflict between kafka's sasl certification and hbase's kerberos certification
Is there anyone who can give me some advice?
apache-spark apache-kafka hbase
add a comment |
When I use "spark streaming" to read "kafka" (requiring sasl validation) and then store the data to "HBase", "HBase" gives the following error
java.io.IOException: java.lang.reflect.InvocationTargetException
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:240)
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:218)
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:181)
at com.xueersi.datamining.ups.database.implement.HbaseClient.connect(HbaseClient.scala:91)
at com.xueersi.datamining.ups.stream.start.BaseInfoLogAnalysisStart$$anonfun$main$1$$anonfun$apply$2.apply(BaseInfoLogAnalysisStart.scala:78)
at com.xueersi.datamining.ups.stream.start.BaseInfoLogAnalysisStart$$anonfun$main$1$$anonfun$apply$2.apply(BaseInfoLogAnalysisStart.scala:75)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$29.apply(RDD.scala:925)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$29.apply(RDD.scala:925)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1956)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1956)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:99)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:325)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:238)
... 15 more
Caused by: java.lang.ExceptionInInitializerError
at org.apache.hadoop.hbase.ClusterId.parseFrom(ClusterId.java:64)
at org.apache.hadoop.hbase.zookeeper.ZKClusterId.readClusterIdZNode(ZKClusterId.java:75)
at org.apache.hadoop.hbase.client.ZooKeeperRegistry.getClusterId(ZooKeeperRegistry.java:105)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.retrieveClusterId(ConnectionManager.java:931)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.(ConnectionManager.java:658)
... 20 more
Caused by: java.lang.NullPointerException
at org.apache.kafka.common.security.plain.PlainSaslServer$PlainSaslServerFactory.getMechanismNames(PlainSaslServer.java:163)
at org.apache.hadoop.security.SaslRpcServer$FastSaslServerFactory.(SaslRpcServer.java:381)
at org.apache.hadoop.security.SaslRpcServer.init(SaslRpcServer.java:186)
at org.apache.hadoop.ipc.RPC.getProtocolProxy(RPC.java:570)
at org.apache.hadoop.hdfs.NameNodeProxies.createNNProxyWithClientProtocol(NameNodeProxies.java:418)
at org.apache.hadoop.hdfs.NameNodeProxies.createNonHAProxy(NameNodeProxies.java:314)
at org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider$DefaultProxyFactory.createProxy(ConfiguredFailoverProxyProvider.java:68)
at org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider.getProxy(ConfiguredFailoverProxyProvider.java:152)
at org.apache.hadoop.io.retry.RetryInvocationHandler.(RetryInvocationHandler.java:75)
at org.apache.hadoop.io.retry.RetryInvocationHandler.(RetryInvocationHandler.java:66)
at org.apache.hadoop.io.retry.RetryProxy.create(RetryProxy.java:58)
at org.apache.hadoop.hdfs.NameNodeProxies.createProxy(NameNodeProxies.java:181)
at org.apache.hadoop.hdfs.DFSClient.(DFSClient.java:762)
at org.apache.hadoop.hdfs.DFSClient.(DFSClient.java:693)
at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:158)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2816)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:98)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2853)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2835)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:387)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:186)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371)
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:296)
at org.apache.hadoop.hbase.util.DynamicClassLoader.initTempDir(DynamicClassLoader.java:120)
at org.apache.hadoop.hbase.util.DynamicClassLoader.(DynamicClassLoader.java:98)
at org.apache.hadoop.hbase.protobuf.ProtobufUtil.(ProtobufUtil.java:246)
... 25 more
But when I read another "Kafka" (without sasl validation), there was no problem with "HBase.
In addition, "HBase" is required for "kerberos" authentication
I think there is a conflict between kafka's sasl certification and hbase's kerberos certification
Is there anyone who can give me some advice?
apache-spark apache-kafka hbase
Please paste the error so that other people can search for it. You're much more likely to receive an answer that way.
– Ben Watson
Nov 16 '18 at 9:19
add a comment |
When I use "spark streaming" to read "kafka" (requiring sasl validation) and then store the data to "HBase", "HBase" gives the following error
java.io.IOException: java.lang.reflect.InvocationTargetException
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:240)
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:218)
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:181)
at com.xueersi.datamining.ups.database.implement.HbaseClient.connect(HbaseClient.scala:91)
at com.xueersi.datamining.ups.stream.start.BaseInfoLogAnalysisStart$$anonfun$main$1$$anonfun$apply$2.apply(BaseInfoLogAnalysisStart.scala:78)
at com.xueersi.datamining.ups.stream.start.BaseInfoLogAnalysisStart$$anonfun$main$1$$anonfun$apply$2.apply(BaseInfoLogAnalysisStart.scala:75)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$29.apply(RDD.scala:925)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$29.apply(RDD.scala:925)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1956)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1956)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:99)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:325)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:238)
... 15 more
Caused by: java.lang.ExceptionInInitializerError
at org.apache.hadoop.hbase.ClusterId.parseFrom(ClusterId.java:64)
at org.apache.hadoop.hbase.zookeeper.ZKClusterId.readClusterIdZNode(ZKClusterId.java:75)
at org.apache.hadoop.hbase.client.ZooKeeperRegistry.getClusterId(ZooKeeperRegistry.java:105)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.retrieveClusterId(ConnectionManager.java:931)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.(ConnectionManager.java:658)
... 20 more
Caused by: java.lang.NullPointerException
at org.apache.kafka.common.security.plain.PlainSaslServer$PlainSaslServerFactory.getMechanismNames(PlainSaslServer.java:163)
at org.apache.hadoop.security.SaslRpcServer$FastSaslServerFactory.(SaslRpcServer.java:381)
at org.apache.hadoop.security.SaslRpcServer.init(SaslRpcServer.java:186)
at org.apache.hadoop.ipc.RPC.getProtocolProxy(RPC.java:570)
at org.apache.hadoop.hdfs.NameNodeProxies.createNNProxyWithClientProtocol(NameNodeProxies.java:418)
at org.apache.hadoop.hdfs.NameNodeProxies.createNonHAProxy(NameNodeProxies.java:314)
at org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider$DefaultProxyFactory.createProxy(ConfiguredFailoverProxyProvider.java:68)
at org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider.getProxy(ConfiguredFailoverProxyProvider.java:152)
at org.apache.hadoop.io.retry.RetryInvocationHandler.(RetryInvocationHandler.java:75)
at org.apache.hadoop.io.retry.RetryInvocationHandler.(RetryInvocationHandler.java:66)
at org.apache.hadoop.io.retry.RetryProxy.create(RetryProxy.java:58)
at org.apache.hadoop.hdfs.NameNodeProxies.createProxy(NameNodeProxies.java:181)
at org.apache.hadoop.hdfs.DFSClient.(DFSClient.java:762)
at org.apache.hadoop.hdfs.DFSClient.(DFSClient.java:693)
at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:158)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2816)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:98)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2853)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2835)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:387)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:186)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371)
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:296)
at org.apache.hadoop.hbase.util.DynamicClassLoader.initTempDir(DynamicClassLoader.java:120)
at org.apache.hadoop.hbase.util.DynamicClassLoader.(DynamicClassLoader.java:98)
at org.apache.hadoop.hbase.protobuf.ProtobufUtil.(ProtobufUtil.java:246)
... 25 more
But when I read another "Kafka" (without sasl validation), there was no problem with "HBase.
In addition, "HBase" is required for "kerberos" authentication
I think there is a conflict between kafka's sasl certification and hbase's kerberos certification
Is there anyone who can give me some advice?
apache-spark apache-kafka hbase
When I use "spark streaming" to read "kafka" (requiring sasl validation) and then store the data to "HBase", "HBase" gives the following error
java.io.IOException: java.lang.reflect.InvocationTargetException
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:240)
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:218)
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:181)
at com.xueersi.datamining.ups.database.implement.HbaseClient.connect(HbaseClient.scala:91)
at com.xueersi.datamining.ups.stream.start.BaseInfoLogAnalysisStart$$anonfun$main$1$$anonfun$apply$2.apply(BaseInfoLogAnalysisStart.scala:78)
at com.xueersi.datamining.ups.stream.start.BaseInfoLogAnalysisStart$$anonfun$main$1$$anonfun$apply$2.apply(BaseInfoLogAnalysisStart.scala:75)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$29.apply(RDD.scala:925)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$29.apply(RDD.scala:925)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1956)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1956)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:99)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:325)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:238)
... 15 more
Caused by: java.lang.ExceptionInInitializerError
at org.apache.hadoop.hbase.ClusterId.parseFrom(ClusterId.java:64)
at org.apache.hadoop.hbase.zookeeper.ZKClusterId.readClusterIdZNode(ZKClusterId.java:75)
at org.apache.hadoop.hbase.client.ZooKeeperRegistry.getClusterId(ZooKeeperRegistry.java:105)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.retrieveClusterId(ConnectionManager.java:931)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.(ConnectionManager.java:658)
... 20 more
Caused by: java.lang.NullPointerException
at org.apache.kafka.common.security.plain.PlainSaslServer$PlainSaslServerFactory.getMechanismNames(PlainSaslServer.java:163)
at org.apache.hadoop.security.SaslRpcServer$FastSaslServerFactory.(SaslRpcServer.java:381)
at org.apache.hadoop.security.SaslRpcServer.init(SaslRpcServer.java:186)
at org.apache.hadoop.ipc.RPC.getProtocolProxy(RPC.java:570)
at org.apache.hadoop.hdfs.NameNodeProxies.createNNProxyWithClientProtocol(NameNodeProxies.java:418)
at org.apache.hadoop.hdfs.NameNodeProxies.createNonHAProxy(NameNodeProxies.java:314)
at org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider$DefaultProxyFactory.createProxy(ConfiguredFailoverProxyProvider.java:68)
at org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider.getProxy(ConfiguredFailoverProxyProvider.java:152)
at org.apache.hadoop.io.retry.RetryInvocationHandler.(RetryInvocationHandler.java:75)
at org.apache.hadoop.io.retry.RetryInvocationHandler.(RetryInvocationHandler.java:66)
at org.apache.hadoop.io.retry.RetryProxy.create(RetryProxy.java:58)
at org.apache.hadoop.hdfs.NameNodeProxies.createProxy(NameNodeProxies.java:181)
at org.apache.hadoop.hdfs.DFSClient.(DFSClient.java:762)
at org.apache.hadoop.hdfs.DFSClient.(DFSClient.java:693)
at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:158)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2816)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:98)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2853)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2835)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:387)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:186)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371)
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:296)
at org.apache.hadoop.hbase.util.DynamicClassLoader.initTempDir(DynamicClassLoader.java:120)
at org.apache.hadoop.hbase.util.DynamicClassLoader.(DynamicClassLoader.java:98)
at org.apache.hadoop.hbase.protobuf.ProtobufUtil.(ProtobufUtil.java:246)
... 25 more
But when I read another "Kafka" (without sasl validation), there was no problem with "HBase.
In addition, "HBase" is required for "kerberos" authentication
I think there is a conflict between kafka's sasl certification and hbase's kerberos certification
Is there anyone who can give me some advice?
apache-spark apache-kafka hbase
apache-spark apache-kafka hbase
edited Nov 19 '18 at 12:21
there
asked Nov 15 '18 at 14:15
therethere
12
12
Please paste the error so that other people can search for it. You're much more likely to receive an answer that way.
– Ben Watson
Nov 16 '18 at 9:19
add a comment |
Please paste the error so that other people can search for it. You're much more likely to receive an answer that way.
– Ben Watson
Nov 16 '18 at 9:19
Please paste the error so that other people can search for it. You're much more likely to receive an answer that way.
– Ben Watson
Nov 16 '18 at 9:19
Please paste the error so that other people can search for it. You're much more likely to receive an answer that way.
– Ben Watson
Nov 16 '18 at 9:19
add a comment |
1 Answer
1
active
oldest
votes
I seem to have found the answer: https://issues.apache.org/jira/browse/KAFKA-5294
Then I manually specify the dependencies(The version I used to use was 0.10.2.1)
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>0.11.0.0</version>
<scope>compile</scope>
</dependency>
it works
add a comment |
Your Answer
StackExchange.ifUsing("editor", function ()
StackExchange.using("externalEditor", function ()
StackExchange.using("snippets", function ()
StackExchange.snippets.init();
);
);
, "code-snippets");
StackExchange.ready(function()
var channelOptions =
tags: "".split(" "),
id: "1"
;
initTagRenderer("".split(" "), "".split(" "), channelOptions);
StackExchange.using("externalEditor", function()
// Have to fire editor after snippets, if snippets enabled
if (StackExchange.settings.snippets.snippetsEnabled)
StackExchange.using("snippets", function()
createEditor();
);
else
createEditor();
);
function createEditor()
StackExchange.prepareEditor(
heartbeatType: 'answer',
autoActivateHeartbeat: false,
convertImagesToLinks: true,
noModals: true,
showLowRepImageUploadWarning: true,
reputationToPostImages: 10,
bindNavPrevention: true,
postfix: "",
imageUploader:
brandingHtml: "Powered by u003ca class="icon-imgur-white" href="https://imgur.com/"u003eu003c/au003e",
contentPolicyHtml: "User contributions licensed under u003ca href="https://creativecommons.org/licenses/by-sa/3.0/"u003ecc by-sa 3.0 with attribution requiredu003c/au003e u003ca href="https://stackoverflow.com/legal/content-policy"u003e(content policy)u003c/au003e",
allowUrls: true
,
onDemand: true,
discardSelector: ".discard-answer"
,immediatelyShowMarkdownHelp:true
);
);
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
StackExchange.ready(
function ()
StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53321408%2fconflicts-caused-by-the-use-of-kafka-and-hbase%23new-answer', 'question_page');
);
Post as a guest
Required, but never shown
1 Answer
1
active
oldest
votes
1 Answer
1
active
oldest
votes
active
oldest
votes
active
oldest
votes
I seem to have found the answer: https://issues.apache.org/jira/browse/KAFKA-5294
Then I manually specify the dependencies(The version I used to use was 0.10.2.1)
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>0.11.0.0</version>
<scope>compile</scope>
</dependency>
it works
add a comment |
I seem to have found the answer: https://issues.apache.org/jira/browse/KAFKA-5294
Then I manually specify the dependencies(The version I used to use was 0.10.2.1)
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>0.11.0.0</version>
<scope>compile</scope>
</dependency>
it works
add a comment |
I seem to have found the answer: https://issues.apache.org/jira/browse/KAFKA-5294
Then I manually specify the dependencies(The version I used to use was 0.10.2.1)
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>0.11.0.0</version>
<scope>compile</scope>
</dependency>
it works
I seem to have found the answer: https://issues.apache.org/jira/browse/KAFKA-5294
Then I manually specify the dependencies(The version I used to use was 0.10.2.1)
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>0.11.0.0</version>
<scope>compile</scope>
</dependency>
it works
answered Nov 22 '18 at 9:45
therethere
12
12
add a comment |
add a comment |
Thanks for contributing an answer to Stack Overflow!
- Please be sure to answer the question. Provide details and share your research!
But avoid …
- Asking for help, clarification, or responding to other answers.
- Making statements based on opinion; back them up with references or personal experience.
To learn more, see our tips on writing great answers.
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
StackExchange.ready(
function ()
StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53321408%2fconflicts-caused-by-the-use-of-kafka-and-hbase%23new-answer', 'question_page');
);
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Please paste the error so that other people can search for it. You're much more likely to receive an answer that way.
– Ben Watson
Nov 16 '18 at 9:19