refactor hdfs output refactor_hdfs_output 159/head
authorcoderzc <zc1217zc@126.com>
Thu, 2 Dec 2021 02:36:51 +0000 (10:36 +0800)
committercoderzc <zc1217zc@126.com>
Thu, 2 Dec 2021 02:36:51 +0000 (10:36 +0800)
computer-core/src/main/java/com/baidu/hugegraph/computer/core/config/ComputerOptions.java
computer-core/src/main/java/com/baidu/hugegraph/computer/core/output/hdfs/HdfsOutput.java
computer-core/src/main/java/com/baidu/hugegraph/computer/core/output/hdfs/HdfsOutputMerger.java
computer-core/src/main/java/com/baidu/hugegraph/computer/core/util/HdfsUtil.java [new file with mode: 0644]
computer-test/src/main/java/com/baidu/hugegraph/computer/suite/unit/UnitTestSuite.java

index 5ce76fff8e5791698cf706a7055310656104e626..7f62eecbbf916ae6b7ec2694cebca8867ea94ce8 100644 (file)
@@ -183,6 +183,22 @@ public class ComputerOptions extends OptionHolder {
                     4
             );
 
+    public static final ConfigOption<String> HDFS_URL =
+            new ConfigOption<>(
+                    "hdfs.url",
+                    "The hdfs url.",
+                    disallowEmpty(),
+                    "hdfs://127.0.0.1:9000"
+            );
+
+    public static final ConfigOption<String> HDFS_USER =
+            new ConfigOption<>(
+                    "hdfs.user",
+                    "The hdfs user.",
+                    disallowEmpty(),
+                    "hadoop"
+            );
+
     public static final ConfigOption<Class<?>> OUTPUT_CLASS =
             new ConfigOption<>(
                     "output.output_class",
@@ -274,22 +290,6 @@ public class ComputerOptions extends OptionHolder {
                     10
             );
 
-    public static final ConfigOption<String> OUTPUT_HDFS_URL =
-            new ConfigOption<>(
-                    "output.hdfs_url",
-                    "The hdfs url of output.",
-                    disallowEmpty(),
-                    "hdfs://127.0.0.1:9000"
-            );
-
-    public static final ConfigOption<String> OUTPUT_HDFS_USER =
-            new ConfigOption<>(
-                    "output.hdfs_user",
-                    "The hdfs user of output.",
-                    disallowEmpty(),
-                    "hadoop"
-            );
-
     public static final ConfigOption<Short> OUTPUT_HDFS_REPLICATION =
             new ConfigOption<>(
                     "output.hdfs_replication",
index d335e80d033314c6f8787ab61bd4d92b36e67bad..0ae317550372ec31d768df701eb8a89437e94929 100644 (file)
@@ -21,7 +21,6 @@ package com.baidu.hugegraph.computer.core.output.hdfs;
 
 
 import java.io.IOException;
-import java.net.URI;
 import java.net.URISyntaxException;
 
 import org.apache.hadoop.conf.Configuration;
@@ -35,6 +34,7 @@ import com.baidu.hugegraph.computer.core.config.ComputerOptions;
 import com.baidu.hugegraph.computer.core.config.Config;
 import com.baidu.hugegraph.computer.core.graph.vertex.Vertex;
 import com.baidu.hugegraph.computer.core.output.AbstractComputerOutput;
+import com.baidu.hugegraph.computer.core.util.HdfsUtil;
 import com.baidu.hugegraph.computer.core.util.StringEncoding;
 import com.baidu.hugegraph.util.Log;
 
@@ -69,9 +69,7 @@ public class HdfsOutput extends AbstractComputerOutput {
         Configuration configuration = new Configuration();
         Short replication = config.get(ComputerOptions.OUTPUT_HDFS_REPLICATION);
         configuration.set(REPLICATION_KEY, String.valueOf(replication));
-        String url = config.get(ComputerOptions.OUTPUT_HDFS_URL);
-        String user = config.get(ComputerOptions.OUTPUT_HDFS_USER);
-        this.fs = FileSystem.get(new URI(url), configuration, user);
+        this.fs = HdfsUtil.openHdfs(config, configuration);
 
         String dir = config.get(ComputerOptions.OUTPUT_HDFS_DIR);
         String jobId = config.get(ComputerOptions.JOB_ID);
index ee02d3d9dde93416e606295bc372a7d342babc92..9a8e94994fec50445302c662f4cb14ada1a4a3ca 100644 (file)
@@ -20,7 +20,6 @@
 package com.baidu.hugegraph.computer.core.output.hdfs;
 
 import java.io.IOException;
-import java.net.URI;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -31,6 +30,7 @@ import org.apache.hadoop.fs.Path;
 import com.baidu.hugegraph.computer.core.common.exception.ComputerException;
 import com.baidu.hugegraph.computer.core.config.ComputerOptions;
 import com.baidu.hugegraph.computer.core.config.Config;
+import com.baidu.hugegraph.computer.core.util.HdfsUtil;
 
 public class HdfsOutputMerger {
 
@@ -45,9 +45,7 @@ public class HdfsOutputMerger {
     protected void init(Config config) {
         try {
             Configuration configuration = new Configuration();
-            String url = config.get(ComputerOptions.OUTPUT_HDFS_URL);
-            String user = config.get(ComputerOptions.OUTPUT_HDFS_USER);
-            this.fs = FileSystem.get(new URI(url), configuration, user);
+            this.fs = HdfsUtil.openHdfs(config, configuration);
 
             String dir = config.get(ComputerOptions.OUTPUT_HDFS_DIR);
             String jobId = config.get(ComputerOptions.JOB_ID);
diff --git a/computer-core/src/main/java/com/baidu/hugegraph/computer/core/util/HdfsUtil.java b/computer-core/src/main/java/com/baidu/hugegraph/computer/core/util/HdfsUtil.java
new file mode 100644 (file)
index 0000000..b74edbf
--- /dev/null
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2017 HugeGraph Authors
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.baidu.hugegraph.computer.core.util;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+
+import com.baidu.hugegraph.computer.core.config.ComputerOptions;
+import com.baidu.hugegraph.computer.core.config.Config;
+
+public class HdfsUtil {
+
+    public static FileSystem openHdfs(Config config, Configuration conf)
+                                      throws IOException, URISyntaxException,
+                                             InterruptedException {
+        String url = config.get(ComputerOptions.HDFS_URL);
+        String user = config.get(ComputerOptions.HDFS_USER);
+        return FileSystem.get(new URI(url), conf, user);
+    }
+}
index a337ed8e3bab849d5b12e341cdaed3df0118c4ee..6b2c85247fad5a2a6035d448d7a2d60ea870e55b 100644 (file)
@@ -90,10 +90,10 @@ public class UnitTestSuite {
         Whitebox.setInternalState(ComputerOptions.HUGEGRAPH_GRAPH_NAME,
                                   "defaultValue",
                                   "hugegraph");
-        Whitebox.setInternalState(ComputerOptions.OUTPUT_HDFS_URL,
+        Whitebox.setInternalState(ComputerOptions.HDFS_URL,
                                   "defaultValue",
                                   "hdfs://127.0.0.1:9000");
-        Whitebox.setInternalState(ComputerOptions.OUTPUT_HDFS_USER,
+        Whitebox.setInternalState(ComputerOptions.HDFS_USER,
                                   "defaultValue",
                                   System.getProperty("user.name"));
     }