Browse Source

Merge branch 'dev' of https://github.com/apache/incubator-dolphinscheduler into dev

break60 4 years ago
parent
commit
140ff37dd5

+ 87 - 0
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HiveConfUtils.java

@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.dolphinscheduler.common.utils;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * hive conf utils
+ */
+public class HiveConfUtils {
+
+    private HiveConfUtils() {
+        throw new UnsupportedOperationException("Construct HiveConfUtils");
+    }
+
+    private static class HiveConfHandler {
+        private static HiveConf singleton;
+
+        private static Map<String,Object> hiveConfVars;
+
+        static {
+            singleton = new HiveConf();
+            hiveConfVars = new HashMap<>();
+            Arrays.stream(ConfVars.values()).forEach(confVar -> hiveConfVars.put(confVar.varname,confVar));
+        }
+    }
+
+    /**
+     * get HiveConf instance
+     * @return HiveConf hiveConf
+     */
+    public static HiveConf getInstance() {
+        return HiveConfHandler.singleton;
+    }
+
+    /**
+     * get hive conf vars
+     * @return
+     */
+    public static Map<String,Object> getHiveConfVars() {
+        return HiveConfHandler.hiveConfVars;
+    }
+
+    /**
+     * Determine if it belongs to a hive conf property
+     * @param conf config
+     * @return boolean result
+     */
+    public static boolean isHiveConfVar(String conf) {
+        // the default hive conf var name
+        String confKey = conf.split("=")[0];
+        Map<String, Object> hiveConfVars = HiveConfUtils.getHiveConfVars();
+        if (hiveConfVars.get(confKey) != null) {
+            return true;
+        }
+
+        // the security authorization hive conf var name
+        HiveConf hiveConf = HiveConfUtils.getInstance();
+        String hiveAuthorizationSqlStdAuthConfigWhitelist = hiveConf.getVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_SQL_STD_AUTH_CONFIG_WHITELIST);
+        Pattern modWhiteListPattern = Pattern.compile(hiveAuthorizationSqlStdAuthConfigWhitelist);
+        Matcher matcher = modWhiteListPattern.matcher(confKey);
+        return matcher.matches();
+    }
+
+}

+ 47 - 0
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HiveConfUtilsTest.java

@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.dolphinscheduler.common.utils;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * hive conf utils test
+ */
+public class HiveConfUtilsTest {
+
+    /**
+     * test is hive conf var
+     */
+    @Test
+    public void testIsHiveConfVar() {
+
+        String conf = "hive.exec.script.wrapper=123";
+        boolean hiveConfVar = HiveConfUtils.isHiveConfVar(conf);
+        Assert.assertTrue(hiveConfVar);
+
+        conf = "hive.test.v1=v1";
+        hiveConfVar = HiveConfUtils.isHiveConfVar(conf);
+        Assert.assertFalse(hiveConfVar);
+
+        conf = "tez.queue.name=tezQueue";
+        hiveConfVar = HiveConfUtils.isHiveConfVar(conf);
+        Assert.assertTrue(hiveConfVar);
+
+    }
+}

+ 3 - 10
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java

@@ -20,14 +20,10 @@ package org.apache.dolphinscheduler.dao.datasource;
 import org.apache.dolphinscheduler.common.Constants;
 import org.apache.dolphinscheduler.common.enums.DbType;
 import org.apache.dolphinscheduler.common.utils.CommonUtils;
+import org.apache.dolphinscheduler.common.utils.HiveConfUtils;
 import org.apache.dolphinscheduler.common.utils.StringUtils;
 
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-
 import java.sql.Connection;
-import java.util.Set;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
 
 /**
  * data source of hive
@@ -73,12 +69,8 @@ public class HiveDataSource extends BaseDataSource {
 
         String[] otherArray = otherParams.split(";", -1);
 
-        // get the default hive conf var name
-        Set<String> hiveConfSet = Stream.of(ConfVars.values()).map(confVars -> confVars.varname)
-            .collect(Collectors.toSet());
-
         for (String conf : otherArray) {
-            if (hiveConfSet.contains(conf.split("=")[0])) {
+            if (HiveConfUtils.isHiveConfVar(conf)) {
                 hiveConfListSb.append(conf).append(";");
             } else {
                 sessionVarListSb.append(conf).append(";");
@@ -107,4 +99,5 @@ public class HiveDataSource extends BaseDataSource {
         CommonUtils.loadKerberosConf();
         return super.getConnection();
     }
+
 }

+ 47 - 43
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSourceTest.java

@@ -25,61 +25,65 @@ import org.junit.Test;
  */
 public class HiveDataSourceTest {
 
-  @Test
-  public void testfilterOther() {
-    BaseDataSource hiveDataSource = new HiveDataSource();
+    @Test
+    public void testFilterOther() {
+        BaseDataSource hiveDataSource = new HiveDataSource();
 
-    // not contain hive_site_conf
-    String other = hiveDataSource.filterOther("charset=UTF-8");
-    Assert.assertEquals("charset=UTF-8", other);
+        // not contain hive_site_conf
+        String other = hiveDataSource.filterOther("charset=UTF-8");
+        Assert.assertEquals("charset=UTF-8", other);
 
-    // not contain
-    other = hiveDataSource.filterOther("");
-    Assert.assertEquals("", other);
+        // not contain
+        other = hiveDataSource.filterOther("");
+        Assert.assertEquals("", other);
 
-    // only contain hive_site_conf
-    other = hiveDataSource.filterOther("hive.mapred.mode=strict");
-    Assert.assertEquals("?hive.mapred.mode=strict", other);
+        // only contain hive_site_conf
+        other = hiveDataSource.filterOther("hive.mapred.mode=strict");
+        Assert.assertEquals("?hive.mapred.mode=strict", other);
 
-    // contain hive_site_conf at the first
-    other = hiveDataSource.filterOther("hive.mapred.mode=strict;charset=UTF-8");
-    Assert.assertEquals("charset=UTF-8?hive.mapred.mode=strict", other);
+        // contain hive_site_conf at the first
+        other = hiveDataSource.filterOther("hive.mapred.mode=strict;charset=UTF-8");
+        Assert.assertEquals("charset=UTF-8?hive.mapred.mode=strict", other);
 
-    // contain hive_site_conf in the middle
-    other = hiveDataSource.filterOther("charset=UTF-8;hive.mapred.mode=strict;foo=bar");
-    Assert.assertEquals("charset=UTF-8;foo=bar?hive.mapred.mode=strict", other);
+        // contain hive_site_conf in the middle
+        other = hiveDataSource.filterOther("charset=UTF-8;hive.mapred.mode=strict;foo=bar");
+        Assert.assertEquals("charset=UTF-8;foo=bar?hive.mapred.mode=strict", other);
 
-    // contain hive_site_conf at the end
-    other = hiveDataSource.filterOther("charset=UTF-8;foo=bar;hive.mapred.mode=strict");
-    Assert.assertEquals("charset=UTF-8;foo=bar?hive.mapred.mode=strict", other);
+        // contain hive_site_conf at the end
+        other = hiveDataSource.filterOther("charset=UTF-8;foo=bar;hive.mapred.mode=strict");
+        Assert.assertEquals("charset=UTF-8;foo=bar?hive.mapred.mode=strict", other);
 
-    // contain multi hive_site_conf
-    other = hiveDataSource.filterOther("charset=UTF-8;foo=bar;hive.mapred.mode=strict;hive.exec.parallel=true");
-    Assert.assertEquals("charset=UTF-8;foo=bar?hive.mapred.mode=strict;hive.exec.parallel=true", other);
-  }
+        // contain multi hive_site_conf
+        other = hiveDataSource.filterOther("charset=UTF-8;foo=bar;hive.mapred.mode=strict;hive.exec.parallel=true");
+        Assert.assertEquals("charset=UTF-8;foo=bar?hive.mapred.mode=strict;hive.exec.parallel=true", other);
 
-  @Test
-  public void testGetHiveJdbcUrlOther() {
+        // the security authorization hive conf var
+        other = hiveDataSource.filterOther("tez.queue.name=tezTest");
+        Assert.assertEquals("?tez.queue.name=tezTest", other);
 
-    BaseDataSource hiveDataSource = new HiveDataSource();
-    hiveDataSource.setAddress("jdbc:hive2://127.0.0.1:10000");
-    hiveDataSource.setDatabase("test");
-    hiveDataSource.setPassword("123456");
-    hiveDataSource.setUser("test");
-    Assert.assertEquals("jdbc:hive2://127.0.0.1:10000/test", hiveDataSource.getJdbcUrl());
+    }
 
-    hiveDataSource.setOther("charset=UTF-8;hive.mapred.mode=strict;hive.server2.thrift.http.path=hs2");
+    @Test
+    public void testGetHiveJdbcUrlOther() {
 
-    Assert.assertEquals(
-        "jdbc:hive2://127.0.0.1:10000/test;charset=UTF-8?hive.mapred.mode=strict;hive.server2.thrift.http.path=hs2",
-        hiveDataSource.getJdbcUrl());
+        BaseDataSource hiveDataSource = new HiveDataSource();
+        hiveDataSource.setAddress("jdbc:hive2://127.0.0.1:10000");
+        hiveDataSource.setDatabase("test");
+        hiveDataSource.setPassword("123456");
+        hiveDataSource.setUser("test");
+        Assert.assertEquals("jdbc:hive2://127.0.0.1:10000/test", hiveDataSource.getJdbcUrl());
 
-    hiveDataSource.setOther("hive.mapred.mode=strict;hive.server2.thrift.http.path=hs2");
+        hiveDataSource.setOther("charset=UTF-8;hive.mapred.mode=strict;hive.server2.thrift.http.path=hs2");
 
-    Assert.assertEquals(
-        "jdbc:hive2://127.0.0.1:10000/test;?hive.mapred.mode=strict;hive.server2.thrift.http.path=hs2",
-        hiveDataSource.getJdbcUrl());
+        Assert.assertEquals(
+                "jdbc:hive2://127.0.0.1:10000/test;charset=UTF-8?hive.mapred.mode=strict;hive.server2.thrift.http.path=hs2",
+                hiveDataSource.getJdbcUrl());
 
-  }
+        hiveDataSource.setOther("hive.mapred.mode=strict;hive.server2.thrift.http.path=hs2");
+        Assert.assertEquals(
+                "jdbc:hive2://127.0.0.1:10000/test;?hive.mapred.mode=strict;hive.server2.thrift.http.path=hs2",
+                hiveDataSource.getJdbcUrl());
 
-}
+    }
+
+}

+ 2 - 3
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/kinship/index.vue

@@ -99,11 +99,10 @@
         })
       },
       async onChange (item) {
-        const { value, label } = item || {}
         this.isLoading = true
-        this.currentItemName = label
+        this.currentItemName = item
         try {
-          await this.getWorkFlowDAG(value)
+          await this.getWorkFlowDAG(item)
         } catch (error) {
           this.$message.error(error.msg || '')
         }

+ 1 - 0
pom.xml

@@ -790,6 +790,7 @@
                         <include>**/common/utils/HadoopUtilsTest.java</include>
                         <include>**/common/utils/HttpUtilsTest.java</include>
                         <include>**/common/utils/KerberosHttpClientTest.java</include>
+                        <include>**/common/utils/HiveConfUtilsTest.java</include>
                         <include>**/common/ConstantsTest.java</include>
                         <include>**/common/utils/HadoopUtils.java</include>
                         <include>**/common/utils/RetryerUtilsTest.java</include>