4
);
+ public static final ConfigOption<String> HDFS_URL =
+ new ConfigOption<>(
+ "hdfs.url",
+ "The hdfs url.",
+ disallowEmpty(),
+ "hdfs://127.0.0.1:9000"
+ );
+
+ public static final ConfigOption<String> HDFS_USER =
+ new ConfigOption<>(
+ "hdfs.user",
+ "The hdfs user.",
+ disallowEmpty(),
+ "hadoop"
+ );
+
public static final ConfigOption<Class<?>> OUTPUT_CLASS =
new ConfigOption<>(
"output.output_class",
10
);
- public static final ConfigOption<String> OUTPUT_HDFS_URL =
- new ConfigOption<>(
- "output.hdfs_url",
- "The hdfs url of output.",
- disallowEmpty(),
- "hdfs://127.0.0.1:9000"
- );
-
- public static final ConfigOption<String> OUTPUT_HDFS_USER =
- new ConfigOption<>(
- "output.hdfs_user",
- "The hdfs user of output.",
- disallowEmpty(),
- "hadoop"
- );
-
public static final ConfigOption<Short> OUTPUT_HDFS_REPLICATION =
new ConfigOption<>(
"output.hdfs_replication",
import java.io.IOException;
-import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import com.baidu.hugegraph.computer.core.config.Config;
import com.baidu.hugegraph.computer.core.graph.vertex.Vertex;
import com.baidu.hugegraph.computer.core.output.AbstractComputerOutput;
+import com.baidu.hugegraph.computer.core.util.HdfsUtil;
import com.baidu.hugegraph.computer.core.util.StringEncoding;
import com.baidu.hugegraph.util.Log;
Configuration configuration = new Configuration();
Short replication = config.get(ComputerOptions.OUTPUT_HDFS_REPLICATION);
configuration.set(REPLICATION_KEY, String.valueOf(replication));
- String url = config.get(ComputerOptions.OUTPUT_HDFS_URL);
- String user = config.get(ComputerOptions.OUTPUT_HDFS_USER);
- this.fs = FileSystem.get(new URI(url), configuration, user);
+ this.fs = HdfsUtil.openHdfs(config, configuration);
String dir = config.get(ComputerOptions.OUTPUT_HDFS_DIR);
String jobId = config.get(ComputerOptions.JOB_ID);
package com.baidu.hugegraph.computer.core.output.hdfs;
import java.io.IOException;
-import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import com.baidu.hugegraph.computer.core.common.exception.ComputerException;
import com.baidu.hugegraph.computer.core.config.ComputerOptions;
import com.baidu.hugegraph.computer.core.config.Config;
+import com.baidu.hugegraph.computer.core.util.HdfsUtil;
public class HdfsOutputMerger {
protected void init(Config config) {
try {
Configuration configuration = new Configuration();
- String url = config.get(ComputerOptions.OUTPUT_HDFS_URL);
- String user = config.get(ComputerOptions.OUTPUT_HDFS_USER);
- this.fs = FileSystem.get(new URI(url), configuration, user);
+ this.fs = HdfsUtil.openHdfs(config, configuration);
String dir = config.get(ComputerOptions.OUTPUT_HDFS_DIR);
String jobId = config.get(ComputerOptions.JOB_ID);
--- /dev/null
+/*
+ * Copyright 2017 HugeGraph Authors
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.baidu.hugegraph.computer.core.util;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+
+import com.baidu.hugegraph.computer.core.config.ComputerOptions;
+import com.baidu.hugegraph.computer.core.config.Config;
+
+public class HdfsUtil {
+
+ public static FileSystem openHdfs(Config config, Configuration conf)
+ throws IOException, URISyntaxException,
+ InterruptedException {
+ String url = config.get(ComputerOptions.HDFS_URL);
+ String user = config.get(ComputerOptions.HDFS_USER);
+ return FileSystem.get(new URI(url), conf, user);
+ }
+}
Whitebox.setInternalState(ComputerOptions.HUGEGRAPH_GRAPH_NAME,
"defaultValue",
"hugegraph");
- Whitebox.setInternalState(ComputerOptions.OUTPUT_HDFS_URL,
+ Whitebox.setInternalState(ComputerOptions.HDFS_URL,
"defaultValue",
"hdfs://127.0.0.1:9000");
- Whitebox.setInternalState(ComputerOptions.OUTPUT_HDFS_USER,
+ Whitebox.setInternalState(ComputerOptions.HDFS_USER,
"defaultValue",
System.getProperty("user.name"));
}