Cannot start secure DataNode due to incorrect config?

343 Views Asked by At

all principals are created with own keytabs, .... REALM is HADOOP.COM. HOSTNAME is server.hadoop.com. ssh key are created, I copy the public_key to authorized_key, I gave it read and write for owner chmod 600 . I used Unlimited JCE Extensions for encreption type AES-256. Also, I used JSVC to run datanode as root.

core-site.xml

<property>
  <name>fs.default.name</name>
  <value>hdfs://localhost:9000</value>
</property>
<property>
  <name>hadoop.security.authentication</name>
  <value>kerberos</value>
</property>
<property>
  <name>hadoop.security.authorization</name>
  <value>true</value>
</property>
<property>
  <name>hadoop.rpc.protection</name>
  <value>privacy</value>
</property>

hdfs-site.xml

<property>
 <name>dfs.replication</name>
 <value>2</value>
</property>

<property>
  <name>dfs.name.dir</name>
  <value>/home/hdoop/hadoopdata/hdfs/namenode</value>
</property>

<property>
  <name>dfs.data.dir</name>
  <value>/home/hdoop/hadoopdata/hdfs/datanode</value>
</property>

<!-- General HDFS security config -->
<property>
   <name>dfs.block.access.token.enable</name>
   <value>true</value>
</property>
<!-- NameNode security config -->
<property>
   <name>dfs.https.address</name>
   <value>server.hadoop.com:50470</value>
</property>
<property>
   <name>dfs.https.port</name>
   <value>50470</value>
</property>
<property>
   <name>dfs.namenode.keytab.file</name>
   <value>/etc/security/keytabs/hdfs.service.keytab</value> <!-- path to the HDFS keytab -->
</property>
<property>
   <name>dfs.namenode.kerberos.principal</name>
   <value>hdfs/[email protected]</value>
</property>
<property>
  <name>dfs.namenode.kerberos.internal.spnego.principal</name>
  <value>HTTP/[email protected]</value>
</property>

<!-- secondaryNameNode security config -->
<property>
  <name>dfs.secondary.namenode.keytab.file</name>
  <value>/etc/security/keytabs/hdfs.service.keytab</value>
</property>
<property>
  <name>dfs.secondary.namenode.kerberos.principal</name>
  <value>hdfs/[email protected]</value>
</property>
<property>
  <name>dfs.secondary.namenode.kerberos.internal.spnego.principal</name>
  <value>HTTP/[email protected]</value>
</property>

<!-- DataNode security config -->
<property>
   <name>dfs.datanode.data.dir.perm</name>
   <value>750</value>
</property>

<property>
   <name>dfs.datanode.address</name>
   <value>0.0.0.0:1004</value>
</property>
<property>
   <name>dfs.datanode.http.address</name>
   <value>0.0.0.0:1006</value>
</property>


<property>
   <name>dfs.datanode.keytab.file</name>
   <value>/etc/security/keytabs/dn.service.keytab</value>  <!-- path to the HDFS keytab -->
</property>
<property>
   <name>dfs.datanode.kerberos.principal</name>
   <value>dn/[email protected]</value>
</property>
<property>
  <name>dfs.web.authentication.kerberos.principal</name>
  <value>HTTP/[email protected]</value>
</property>
<property>
  <name>dfs.web.authentication.kerberos.keytab</name>
  <value>/etc/security/keytabs/http.service.keytab</value>
</property>

hadoop-env.sh

# Set Hadoop-specific environment variables here.
export HDFS_NAMENODE_USER=hdoop
export HDFS_DATANODE_USER=hdoop
export HDFS_SECONDARYNAMENODE_USER=hdoop


export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
export JAVA_HOME=/usr/lib/jvm/java-11-openjdk-amd64


export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
export JSVC_HOME=/home/hdoop #here I have my commons-daemon-1.3.1-src
0

There are 0 best solutions below