Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,7 @@ private void initHadoopAuthenticator() {
}
if (this.hiveMetastoreAuthenticationType.equalsIgnoreCase("kerberos")) {
hiveConf.set("hadoop.security.authentication", "kerberos");
hiveConf.set("hive.metastore.sasl.enabled", "true");
KerberosAuthenticationConfig authenticationConfig = new KerberosAuthenticationConfig(
this.hiveMetastoreClientPrincipal, this.hiveMetastoreClientKeytab, hiveConf);
this.hmsAuthenticator = HadoopAuthenticator.getHadoopAuthenticator(authenticationConfig);
Expand All @@ -175,6 +176,7 @@ private void initHadoopAuthenticator() {
KerberosAuthenticationConfig authenticationConfig = new KerberosAuthenticationConfig(
this.hdfsKerberosPrincipal, this.hdfsKerberosKeytab, hiveConf);
hiveConf.set("hadoop.security.authentication", "kerberos");
hiveConf.set("hive.metastore.sasl.enabled", "true");
this.hmsAuthenticator = HadoopAuthenticator.getHadoopAuthenticator(authenticationConfig);
return;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,7 @@ private void initBackendConfigProperties() {
}
props.put("hdfs.security.authentication", hdfsAuthenticationType);
if ("kerberos".equalsIgnoreCase(hdfsAuthenticationType)) {
props.put("hadoop.security.authentication", "kerberos");
props.put("hadoop.kerberos.principal", hdfsKerberosPrincipal);
props.put("hadoop.kerberos.keytab", hdfsKerberosKeytab);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@

import org.apache.doris.common.Config;
import org.apache.doris.common.UserException;
import org.apache.doris.common.security.authentication.HadoopKerberosAuthenticator;
import org.apache.doris.common.security.authentication.HadoopSimpleAuthenticator;
import org.apache.doris.datasource.property.storage.exception.StoragePropertiesException;

import com.google.common.collect.Maps;
Expand All @@ -38,12 +40,13 @@ public void testBasicHdfsCreate() throws UserException {
// Test 1: Check default authentication type (should be "simple")
Map<String, String> simpleHdfsProperties = new HashMap<>();
simpleHdfsProperties.put("uri", "hdfs://test/1.orc");
Assertions.assertEquals(HdfsProperties.class, StorageProperties.createPrimary(simpleHdfsProperties).getClass());
Assertions.assertEquals(HdfsProperties.class, StorageProperties.createPrimary(simpleHdfsProperties).getClass());
Map<String, String> origProps = createBaseHdfsProperties();
List<StorageProperties> storageProperties = StorageProperties.createAll(origProps);
HdfsProperties hdfsProperties = (HdfsProperties) storageProperties.get(0);
Configuration conf = hdfsProperties.getHadoopStorageConfig();
Assertions.assertEquals("simple", conf.get("hadoop.security.authentication"));
Assertions.assertEquals(HadoopSimpleAuthenticator.class, hdfsProperties.getHadoopAuthenticator().getClass());

// Test 2: Kerberos without necessary configurations (should throw exception)
origProps.put("hdfs.authentication.type", "kerberos");
Expand All @@ -61,6 +64,7 @@ public void testBasicHdfsCreate() throws UserException {
Assertions.assertEquals("kerberos", configuration.get("hdfs.security.authentication"));
Assertions.assertEquals("hadoop", configuration.get("hadoop.kerberos.principal"));
Assertions.assertEquals("keytab", configuration.get("hadoop.kerberos.keytab"));
Assertions.assertEquals(HadoopKerberosAuthenticator.class, properties.hadoopAuthenticator.getClass());
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,12 +126,18 @@ suite("test_paimon_hms_catalog", "p2,external,paimon,new_catalog_property") {
"hadoop.kerberos.principal"="hive/[email protected]",
"hadoop.kerberos.keytab" = "${keytab_root_dir}/hive-presto-master.keytab"
"""

String hdfs_new_kerberos_properties = """
"fs.defaultFS" = "hdfs://${extHiveHmsHost}:8520",
"hdfs.authentication.type" = "kerberos",
"hdfs.authentication.kerberos.principal"="hive/[email protected]",
"hdfs.authentication.kerberos.keytab" = "${keytab_root_dir}/hive-presto-master.keytab"
"""
String hms_kerberos_new_prop = """
"hive.metastore.uris" = "thrift://${extHiveHmsHost}:9583",
"hive.metastore.client.principal"="hive/[email protected]",
"hive.metastore.client.keytab" = "${keytab_root_dir}/hive-presto-master.keytab",
"hive.metastore.service.principal" = "hive/[email protected]",
"hive.metastore.sasl.enabled " = "true",
"hadoop.security.auth_to_local" = "RULE:[2:\\\$1@\\\$0](.*@LABS.TERADATA.COM)s/@.*//
RULE:[2:\\\$1@\\\$0](.*@OTHERLABS.TERADATA.COM)s/@.*//
RULE:[2:\\\$1@\\\$0](.*@OTHERREALM.COM)s/@.*//
Expand Down Expand Up @@ -182,6 +188,7 @@ suite("test_paimon_hms_catalog", "p2,external,paimon,new_catalog_property") {
"""
testQuery(paimon_hms_catalog_properties + hdfs_warehouse_properties + hdfs_storage_properties, "hdfs", "hdfs_db")
testQuery(paimon_hms_type_prop + hdfs_warehouse_properties + hms_kerberos_new_prop + hdfs_kerberos_properties, "hdfs_kerberos", "hdfs_db")
testQuery(paimon_hms_type_prop + hdfs_warehouse_properties + hms_kerberos_new_prop + hdfs_new_kerberos_properties, "hdfs_new_kerberos", "hdfs_db")
testQuery(paimon_hms_catalog_properties + oss_warehouse_properties + oss_storage_properties, "oss", "ali_db")
testQuery(paimon_hms_catalog_properties + obs_warehouse_properties + obs_storage_properties, "obs", "hw_db")
testQuery(paimon_hms_catalog_properties + cos_warehouse_properties + cos_storage_properties, "cos", "tx_db")
Expand Down Expand Up @@ -209,6 +216,7 @@ suite("test_paimon_hms_catalog", "p2,external,paimon,new_catalog_property") {
"""
testQuery(paimon_hms_catalog_properties + paimon_fs_hdfs_support + hdfs_warehouse_properties + hdfs_storage_properties, "support_hdfs", "hdfs_db")
testQuery(paimon_hms_type_prop + paimon_fs_hdfs_support + hdfs_warehouse_properties + hms_kerberos_new_prop + hdfs_kerberos_properties, "support_hdfs_kerberos", "hdfs_db")
testQuery(paimon_hms_type_prop + paimon_fs_hdfs_support + hdfs_warehouse_properties + hms_kerberos_new_prop + hdfs_new_kerberos_properties, "support_hdfs_new_kerberos", "hdfs_db")
testQuery(paimon_hms_catalog_properties + paimon_fs_oss_support + oss_warehouse_properties + oss_storage_properties, "support_oss", "ali_db")
testQuery(paimon_hms_catalog_properties + paimon_fs_obs_support + obs_warehouse_properties + obs_storage_properties, "support_obs", "hw_db")
testQuery(paimon_hms_catalog_properties + paimon_fs_cos_support + cos_warehouse_properties + cos_storage_properties, "support_cos", "tx_db")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -412,6 +412,13 @@ suite("hive_on_hms_and_dlf", "p2,external,new_catalog_property") {
"hadoop.kerberos.principal"="hive/[email protected]",
"hadoop.kerberos.keytab" = "${keytab_root_dir}/hive-presto-master.keytab"
"""
String hdfs_new_kerberos_properties = """
"fs.defaultFS" = "hdfs://${externalEnvIp}:8520",
"hdfs.authentication.type" = "kerberos",
"hdfs.authentication.kerberos.principal"="hive/[email protected]",
"hdfs.authentication.kerberos.keytab" = "${keytab_root_dir}/hive-presto-master.keytab"
"""

String dlf_access_key = context.config.otherConfigs.get("dlf_access_key")
String dlf_secret_key = context.config.otherConfigs.get("dlf_secret_key")
/**************** DLF *******************/
Expand All @@ -432,7 +439,6 @@ suite("hive_on_hms_and_dlf", "p2,external,new_catalog_property") {
RULE:[2:\\\$1@\\\$0](.*@OTHERLABS.TERADATA.COM)s/@.*//
RULE:[2:\\\$1@\\\$0](.*@OTHERREALM.COM)s/@.*//
DEFAULT",
"hive.metastore.sasl.enabled " = "true",
"hive.metastore.kerberos.principal" = "hive/[email protected]",
"""

Expand All @@ -442,7 +448,6 @@ suite("hive_on_hms_and_dlf", "p2,external,new_catalog_property") {
"hive.metastore.client.principal"="hive/[email protected]",
"hive.metastore.client.keytab" = "${keytab_root_dir}/hive-presto-master.keytab",
"hive.metastore.service.principal" = "hive/[email protected]",
"hive.metastore.sasl.enabled " = "true",
"hive.metastore.authentication.type"="kerberos",
"hadoop.security.auth_to_local" = "RULE:[2:\\\$1@\\\$0](.*@LABS.TERADATA.COM)s/@.*//
RULE:[2:\\\$1@\\\$0](.*@OTHERLABS.TERADATA.COM)s/@.*//
Expand Down Expand Up @@ -538,6 +543,7 @@ suite("hive_on_hms_and_dlf", "p2,external,new_catalog_property") {
db_location = "hdfs://${externalEnvIp}:8520/hive/hms/" + System.currentTimeMillis()

testQueryAndInsert(hms_type_properties + hms_kerberos_new_prop + hdfs_kerberos_properties, "hive_hms_hdfs_kerberos_test", db_location)
testQueryAndInsert(hms_type_properties + hms_kerberos_new_prop + hdfs_new_kerberos_properties, "hive_hms_hdfs_new_kerberos_test", db_location)

/**************** DLF *******************/
String dlf_warehouse = "oss://selectdb-qa-datalake-test/hive-dlf-oss-warehouse"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -308,6 +308,15 @@ suite("iceberg_on_hms_and_filesystem_and_dlf", "p2,external,new_catalog_property
"hadoop.kerberos.keytab" = "${keytab_root_dir}/hive-presto-master.keytab"
"""

String hdfs_new_kerberos_properties = """
"fs.defaultFS" = "hdfs://${externalEnvIp}:8520",
"io-impl" = "org.apache.doris.datasource.iceberg.fileio.DelegateFileIO",
"hdfs.authentication.type" = "kerberos",
"hdfs.authentication.kerberos.principal"="hive/[email protected]",
"hdfs.authentication.kerberos.keytab" = "${keytab_root_dir}/hive-presto-master.keytab"
"""


String hms_prop = """
'hive.metastore.uris' = 'thrift://${externalEnvIp}:9383',
"""
Expand All @@ -321,13 +330,11 @@ suite("iceberg_on_hms_and_filesystem_and_dlf", "p2,external,new_catalog_property
RULE:[2:\\\$1@\\\$0](.*@OTHERLABS.TERADATA.COM)s/@.*//
RULE:[2:\\\$1@\\\$0](.*@OTHERREALM.COM)s/@.*//
DEFAULT",
"hive.metastore.sasl.enabled " = "true",
"hive.metastore.kerberos.principal" = "hive/[email protected]",
"""

String hms_kerberos_old_prop_not_include_kerberos_prop = """
"hive.metastore.uris" = "thrift://${externalEnvIp}:9583",
"hive.metastore.sasl.enabled " = "true",
"hive.metastore.kerberos.principal" = "hive/[email protected]",
"""

Expand All @@ -336,7 +343,6 @@ suite("iceberg_on_hms_and_filesystem_and_dlf", "p2,external,new_catalog_property
"hive.metastore.client.principal"="hive/[email protected]",
"hive.metastore.client.keytab" = "${keytab_root_dir}/hive-presto-master.keytab",
"hive.metastore.service.principal" = "hive/[email protected]",
"hive.metastore.sasl.enabled " = "true",
"hive.metastore.authentication.type"="kerberos",
"hadoop.security.auth_to_local" = "RULE:[2:\\\$1@\\\$0](.*@LABS.TERADATA.COM)s/@.*//
RULE:[2:\\\$1@\\\$0](.*@OTHERLABS.TERADATA.COM)s/@.*//
Expand Down Expand Up @@ -437,6 +443,8 @@ suite("iceberg_on_hms_and_filesystem_and_dlf", "p2,external,new_catalog_property
testQueryAndInsert(iceberg_hms_type_prop + hms_kerberos_old_prop_not_include_kerberos_prop + warehouse + hdfs_kerberos_properties, "iceberg_hms_on_hdfs_kerberos_old")
//new kerberos
testQueryAndInsert(iceberg_hms_type_prop + hms_kerberos_new_prop + warehouse + hdfs_kerberos_properties, "iceberg_hms_on_hdfs_kerberos_hdfs")
//new hdfs kerberos
testQueryAndInsert(iceberg_hms_type_prop + hms_kerberos_new_prop + warehouse + hdfs_new_kerberos_properties, "iceberg_hms_on_hdfs_kerberos_hdfs")


/*--------HMS END-----------*/
Expand Down Expand Up @@ -500,6 +508,7 @@ suite("iceberg_on_hms_and_filesystem_and_dlf", "p2,external,new_catalog_property
'warehouse' = 'hdfs://${externalEnvIp}:8520/iceberg-fs-hdfs-warehouse',
"""
testQueryAndInsert(iceberg_file_system_catalog_properties + warehouse + hdfs_kerberos_properties, "iceberg_fs_on_hdfs_kerberos")
testQueryAndInsert(iceberg_file_system_catalog_properties + warehouse + hdfs_new_kerberos_properties, "iceberg_fs_on_hdfs_new_kerberos")


/* *//** S3 **/
Expand Down
Loading