hadoop - 在HDP 2.3中接收 “Exception in thread ” main“java.lang.NoSuchMethodError”异常,在CDH 4.7中正常工作

标签 hadoop hdfs

堆栈跟踪:

**Exception in thread "main" java.lang.NoSuchMethodError: org.apache.hadoop.util.Shell.getGROUPS_COMMAND()[Ljava/lang/String**<br/>
        at org.apache.hadoop.security.UnixUserGroupInformation.getUnixGroups(UnixUserGroupInformation.java:345)
        at org.apache.hadoop.security.UnixUserGroupInformation.login(UnixUserGroupInformation.java:264)
        at org.apache.hadoop.security.UnixUserGroupInformation.login(UnixUserGroupInformation.java:300)
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:192)
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:170)
        at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:82)

码:
public static void main(String[] args) throws Exception {

    System.out.println("start");

    OutputStream outputStream = null;
    try {

        String c11 = "c11";
        String c12 = "c12";
        String c13 = "c13";

        String c21 = "c21";
        String c22 = "c22";
        String c23 = "c22";

        Configuration configuration = new Configuration();
        configuration.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
        configuration.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());            
        FileSystem hdfs = FileSystem.get(new URI("hdfs://sandbox.hortonworks.com:8020"), configuration);
        outputStream = hdfs.create(
                new Path("hdfs://sandbox.hortonworks.com:8020/user/hue/oozie/workspaces/my2/windowsCompleted.txt"),
                new Progressable() {
                    @Override
                    public void progress() {
                    }
                });
        System.out.println("HELLO");
        byte[] byt = null;
        String input1 = c11 + "|" + c12 + "|" + c13 + "\n";
        String input2 = c21 + "|" + c22 + "|" + c23;
        byt = input1.toString().getBytes();
        outputStream.write(byt);

        byt = input2.toString().getBytes();
        outputStream.write(byt);

        outputStream.flush();
    } catch (Exception e) {
        e.printStackTrace();
    } finally {         
        if (null != outputStream)
            outputStream.close();
    }
    System.out.println("END");

}

POM.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>BBN</groupId>
<artifactId>BBN</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>BBN</name>
<properties>
    <maven.compiler.source>1.8</maven.compiler.source>
    <maven.compiler.target>1.8</maven.compiler.target>
</properties>

<dependencies>
    <!-- https://mvnrepository.com/artifact/com.googlecode.json-simple/json-simple -->
    <dependency>
        <groupId>com.googlecode.json-simple</groupId>
        <artifactId>json-simple</artifactId>
        <version>1.1</version>
    </dependency>

    <dependency>
        <groupId>org.slf4j</groupId>
        <artifactId>slf4j-api</artifactId>
        <version>1.7.5</version>
    </dependency>
    <dependency>
        <groupId>org.postgresql</groupId>
        <artifactId>postgresql</artifactId>
        <version>9.2-1003-jdbc4</version>
    </dependency>
    <dependency>
        <groupId>org.apache.hive</groupId>
        <artifactId>hive-jdbc</artifactId>
        <version>0.14.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.hbase</groupId>
        <artifactId>hbase-client</artifactId>
        <version>1.1.2</version>
    </dependency>
    <dependency>
        <groupId>org.slf4j</groupId>
        <artifactId>slf4j-log4j12</artifactId>
        <version>1.7.5</version>
    </dependency>
    <dependency>
        <groupId>org.apache.parquet</groupId>
        <artifactId>parquet-hadoop</artifactId>
        <version>1.7.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-core</artifactId>
        <version>0.20.2</version>
    </dependency>
    <dependency>
        <groupId>org.apache.parquet</groupId>
        <artifactId>parquet-avro</artifactId>
        <version>1.7.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.parquet</groupId>
        <artifactId>parquet-column</artifactId>
        <version>1.7.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.parquet</groupId>
        <artifactId>parquet-common</artifactId>
        <version>1.7.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.parquet</groupId>
        <artifactId>parquet-format</artifactId>
        <version>2.3.0-incubating</version>
    </dependency>
    <dependency>
        <groupId>org.apache.parquet</groupId>
        <artifactId>parquet-generator</artifactId>
        <version>1.7.0</version>
    </dependency>
    <dependency>
        <groupId>com.sun.jersey</groupId>
        <artifactId>jersey-bundle</artifactId>
        <version>1.8</version>
    </dependency>
    <dependency>
        <groupId>com.google.code.gson</groupId>
        <artifactId>gson</artifactId>
        <version>2.2.4</version>
    </dependency>
    <dependency>
        <groupId>org.json</groupId>
        <artifactId>json</artifactId>
        <version>20140107</version>
    </dependency>
    <dependency>
        <groupId>com.sun.jersey</groupId>
        <artifactId>jersey-server</artifactId>
        <version>1.8</version>
    </dependency>
    <dependency>
        <groupId>com.sun.jersey</groupId>
        <artifactId>jersey-core</artifactId>
        <version>1.8</version>
    </dependency>
    <dependency>  
        <groupId>jdk.tools</groupId>
        <artifactId>jdk.tools</artifactId>
        <!--<version>1.8.0_101</version>-->
        <version>1.7.0_79</version>
        <scope>system</scope>
        <systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
    </dependency>
    <!-- Added log4j for logging -->
    <dependency>
        <groupId>log4j</groupId>
        <artifactId>log4j</artifactId>
        <version>1.2.17</version>
    </dependency>
    <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs -->
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-hdfs</artifactId>
        <version>2.3.0</version>
    </dependency>
</dependencies>
<build>
    <plugins>
        <plugin>
            <artifactId>maven-assembly-plugin</artifactId>
            <configuration>
                <archive>
                    <manifest>
                        <mainClass>com.bbn.main.WindowsLoadData</mainClass>
                    </manifest>
                </archive>
                <descriptorRefs>
                    <descriptorRef>jar-with-dependencies</descriptorRef>
                </descriptorRefs>
            </configuration>
        </plugin>
    </plugins>
</build>

</project>

最佳答案

hadoop.util.Shell类属于pom.xml中缺少的hadoop-common jar。添加此依赖项和hadoop-common所需的依赖项jar。

关于hadoop - 在HDP 2.3中接收 “Exception in thread ” main“java.lang.NoSuchMethodError”异常,在CDH 4.7中正常工作,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/41597845/

相关文章:

hadoop - 在Spark中高效读取json

hive - 因覆盖 Hive 管理表而丢失的文件

hadoop - 如何在不下载的情况下浏览hadoop-2.5.0-cdh5.2.0的文件系统?

Hadoop java.io.IOException : Mkdirs failed to create/some/path 异常

hadoop - 通过 hadoop 访问数据?一步一步的解释

Java 从路径中删除了一个斜杠,后来给我 NoSuchFileException

java - 包 org.apache.hadoop.conf 不存在

hadoop - 停止/重启后 AWS 实例私有(private) IP 发生变化(之前没有发生)

java - 无法使用水槽从远程 HDFS 写入

python-3.x - 如何使用python使用hdfs3 lib上传HDFS上的本地文件