没有FileSystem for scheme:hdfs [英] No FileSystem for scheme: hdfs

查看:126
本文介绍了没有FileSystem for scheme:hdfs的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我在运行包含Hbase Bolt的Storm拓扑时出现以下错误。

  java.io。 IOException:没有FileSystem for scheme:hdfs 
at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2298)〜[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2305)〜[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop .fs.FileSystem.access $ 200(FileSystem.java:89)〜[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.FileSystem $ Cache.getInternal (FileSystem.java:2344)〜[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.FileSystem $ Cache.get(FileSystem.java:2326) 〜[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:353)〜[hadoop-common-2.0.0 -cdh4.7.0.jar:na]
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:194)〜[hadoop-common-2.0.0-cdh4.7.0.jar:na]
位于org.apache.hadoop.hbase.util.Dyna在org.apache.hadoop.hbase.protobuf.ProtobufUtil上的 < clinit>(ProtobufUtil.java:201)[hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.ClusterId.parseFrom(ClusterId.java :64)[hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.zookeeper.ZKClusterId.readClusterIdZNode(ZKClusterId.java:69)[hbase- client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.ZooKeeperRegistry.getClusterId(ZooKeeperRegistry.java:83)[hbase-client-0.98.1- hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.HConnectionManager $ HConnectionImplementation.retrieveClusterId(HConnectionManager.java:857)[hbase-client-0.98.1-hadoop2.jar: 0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.HConnectionManager $ HConnectionImplementation。< init>(HConnectionManager.java:662)[hbase-client-0.98.1-hadoop2.jar:0.98 (NativeConstructorAccessorImpl.java:57)[na:1.7.0_72]
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)[na:1.7.0_72]
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)[na: 1.7.0_72]
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[na:1.7.0_72]
at java.lang.reflect.Constructor.newInstance(Constructor.java:526 )[na:1.7.0_72]
at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:414)[hbase-client-0.98.1-hadoop2.jar:0.98.1- hadoop2]
at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:393)[hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.HConnectionManager.getConnection(HConnectionManager.java:274)[hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop .hbase.client.HTable。< init>(HTable.java:194)[hbase-client-0.98.1-hadoop2.jar:0.98.1- hadoop2]
at org.apache.hadoop.hbase.client.HTable。< init>(HTable.java:156)[hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.storm.hbase.bolt.HBaseBolt $ 1.run(HBaseBolt.java:97)[storm-hbase-0.1.2.jar:na]
at org.apache.storm.hbase .bolt.HBaseBolt $ 1.run(HBaseBolt.java:94)[storm-hbase-0.1.2.jar:na]
at java.security.AccessController.doPrivileged(Native Method)[na:1.7.0_72]
at javax.security.auth.Subject.doAs(Subject.java:415)[na:1.7.0_72]
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1438 )[hadoop-common-2.0.0-cdh4.7.0.jar:na]
在org.apache.storm.hbase.bolt.HBaseBolt.prepare(HBaseBolt.java:94)[storm-hbase-0.1。 2.jar:na]
at backtype.storm.daemon.executor $ fn__3352 $ fn__3364.invoke(executor.clj:690)[storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at backtype.storm.util $ async_loop $ fn__452.invoke(util.clj:429)[storm-core-0.9.2-incubating.jar:0.9.2-incubating] $ clobure.lang
.AFn.run(AFN。 java:24)[clojure-1.5.1.jar:na]
at java.lang.Thread.run(Thread.java:745)[na:1.7.0_72]

$ b

16:44:32.839 [Thread-31-HbasePersistorBolt] INFO backtype.storm.daemon.executor - 准备好的螺栓HbasePersistorBolt:(5) p>

这是我的pom.xml

 <?xml version = 1.0encoding =UTF-8?> 

http://maven.apache.org/xsd/maven-4.0.0.xsd>

aid-cim
fr.aid.cim
0.9-SNAPSHOT

4.0.0
event-struct-topology

 <依赖项> 
<! - Hadoop Hbase Storm Kafka依赖关系 - >
< dependency> ;
< groupId> org.apache.hadoop< / groupId>
< artifactId> hadoop-client< / artifactId>
< version> $ {org.apache.hadoop.version }< / version>
<排除>
<排除>
< artifactId> com.google.protobuf< / artifactId>
< groupId> protobuf-java< ; / groupId>
< /排除>
< /排除>
< /依赖>

<依赖>
< groupId> ; com.google.protobuf< / groupId>
< artifactId> protobuf- Java和LT; / artifactId的>
< version> 2.5.0< / version>
< /依赖关系>

< dependency>
< groupId> com.github.ptgoetz< / groupId>
< artifactId> storm-hbase< / artifactId>
<版本> $ {storm-hbase.version}< / version>
< /依赖关系>

< dependency>
< groupId> org.apache.storm< / groupId>
< artifactId> storm-kafka< / artifactId>
< version> 0.9.2-incubating< / version>
< /依赖关系>

< dependency>
< groupId> org.apache.kafka< / groupId>
< artifactId> kafka_2.10< / artifactId>
< version> 0.8.1.1< / version>
<排除项>
<排除>
< groupId> org.apache.zookeeper< / groupId>
< artifactId> zookeeper< / artifactId>
< /排除>
<排除>
< groupId> log4j< / groupId>
< artifactId> log4j< / artifactId>
< /排除>
< /排除>
< /依赖关系>
<! - END Hadoop Hbase Storm Kafka dependencies - >

<! - 项目依赖关系 - >
< dependency>
< groupId> fr.aid.cim< / groupId>
commons< / artifactId>
< version> $ {project.version}< / version>
< /依赖关系>

< dependency>
< groupId> fr.aid.cim< / groupId>
< artifactId> storm-hazelcast< / artifactId>
< version> $ {project.version}< / version>
< /依赖关系>
<! - END项目依赖项 - >

<! - Integration TEST Dependencies - >
< dependency>
< groupId> org.apache.hadoop< / groupId>
< artifactId> hadoop-core< / artifactId>
< scope> test< / scope>
< /依赖关系>

< dependency>
< groupId> org.apache.hadoop< / groupId>
< artifactId> hadoop-common< / artifactId>
< type> test-jar< / type>
< scope> test< / scope>
< /依赖关系>

< dependency>
< groupId> org.apache.hbase< / groupId>
< artifactId> hbase< / artifactId>
< version> $ {org.apache.hbase.version}< / version>
< type> test-jar< / type>
< scope> test< / scope>
< /依赖关系>

< dependency>
< groupId> org.apache.hadoop< / groupId>
< artifactId> hadoop-hdfs< / artifactId>
< version> $ {org.apache.hadoop.version}< / version>
< type> test-jar< / type>
< scope> test< / scope>
< /依赖关系>

< dependency>
< groupId> org.apache.hadoop< / groupId>
< artifactId> hadoop-hdfs< / artifactId>
< version> $ {org.apache.hadoop.version}< / version>
< scope> test< / scope>
< /依赖关系>

< dependency>
< groupId> org.apache.zookeeper< / groupId>
< artifactId> zookeeper< / artifactId>
< scope> test< / scope>
< /依赖关系>
<! - END TEST依赖关系 - >

<! - 其他依赖关系 - >
< dependency>
< groupId> org.json< / groupId>
< artifactId> json< / artifactId>
< version> 20140107< / version>
< /依赖关系>

< dependency>
< groupId> com.google.guava< / groupId>
< artifactId>番石榴< / artifactId>
< version> 11.0< / version>
< /依赖关系>

< dependency>
< groupId> com.fasterxml.jackson.core< / groupId>
< artifactId> jackson-databind< / artifactId>
< /依赖关系>
<! - END其他依赖关系 - >

< /依赖关系>

< build>
< plugins>
< plugin>
< artifactId> maven-assembly-plugin< / artifactId>
<配置>
< descriptorRefs>
< descriptorRef> jar -with-dependencies< / descriptorRef>
< / descriptorRefs>
< / configuration>
<执行次数>
<执行>
< id> make-assembly< / id>
<阶段>包< /阶段>
<目标>
< goal>单< / goal>
< /目标>
< /执行>
< /执行次数>
< / plugin>
< plugin>
< groupId> org.apache.maven.plugins< / groupId>
< artifactId> maven-dependency-plugin< / artifactId>
< / plugin>
< / plugins>
< / build>

<个人资料>
<个人资料>
< id>本地< / id>
<激活>
< activeByDefault> true< / activeByDefault>
< / activation>
<依赖关系>
< dependency>
< groupId> org.apache.storm< / groupId>
< artifactId> storm-core< / artifactId>
< /依赖关系>
< dependency>
< groupId> org.apache.zookeeper< / groupId>
< artifactId> zookeeper< / artifactId>
< /依赖关系>
< /依赖关系>
< / profile>
<个人资料>
< id>群集< / id>
<依赖关系>
< dependency>
< groupId> org.apache.storm< / groupId>
< artifactId> storm-core< / artifactId>
< scope>提供< / scope>
< /依赖关系>
< dependency>
< groupId> org.apache.zookeeper< / groupId>
< artifactId> zookeeper< / artifactId>
< scope>提供< / scope>
< /依赖关系>
< /依赖关系>
< / profile>

< / profiles>



有什么想法?谢谢

解决方案尝试添加 hadoop-hdfs 作为编译作用域依赖关系:

< dependency>
< groupId> org.apache.hadoop< / groupId>
< artifactId> hadoop-hdfs< / artifactId>
< version> $ {org.apache.hadoop.version}< / version>
< /依赖关系>


i have the following error when i'm running my Storm Topology that contains an Hbase Bolt.

java.io.IOException: No FileSystem for scheme: hdfs
at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2298) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2305) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:89) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2344) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2326) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:353) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:194) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.hbase.util.DynamicClassLoader.<init>(DynamicClassLoader.java:104) ~[hbase-common-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.protobuf.ProtobufUtil.<clinit>(ProtobufUtil.java:201) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.ClusterId.parseFrom(ClusterId.java:64) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.zookeeper.ZKClusterId.readClusterIdZNode(ZKClusterId.java:69) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.ZooKeeperRegistry.getClusterId(ZooKeeperRegistry.java:83) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.retrieveClusterId(HConnectionManager.java:857) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.<init>(HConnectionManager.java:662) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) [na:1.7.0_72]
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) [na:1.7.0_72]
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) [na:1.7.0_72]
at java.lang.reflect.Constructor.newInstance(Constructor.java:526) [na:1.7.0_72]
at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:414) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:393) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.HConnectionManager.getConnection(HConnectionManager.java:274) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:194) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:156) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.storm.hbase.bolt.HBaseBolt$1.run(HBaseBolt.java:97) [storm-hbase-0.1.2.jar:na]
at org.apache.storm.hbase.bolt.HBaseBolt$1.run(HBaseBolt.java:94) [storm-hbase-0.1.2.jar:na]
at java.security.AccessController.doPrivileged(Native Method) [na:1.7.0_72]
at javax.security.auth.Subject.doAs(Subject.java:415) [na:1.7.0_72]
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1438) [hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.storm.hbase.bolt.HBaseBolt.prepare(HBaseBolt.java:94) [storm-hbase-0.1.2.jar:na]
at backtype.storm.daemon.executor$fn__3352$fn__3364.invoke(executor.clj:690) [storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at backtype.storm.util$async_loop$fn__452.invoke(util.clj:429) [storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at clojure.lang.AFn.run(AFn.java:24) [clojure-1.5.1.jar:na]
at java.lang.Thread.run(Thread.java:745) [na:1.7.0_72]

16:44:32.839 [Thread-31-HbasePersistorBolt] INFO backtype.storm.daemon.executor - Prepared bolt HbasePersistorBolt:(5)

Here is my pom.xml

<?xml version="1.0" encoding="UTF-8"?>

http://maven.apache.org/xsd/maven-4.0.0.xsd"> aid-cim fr.aid.cim 0.9-SNAPSHOT 4.0.0 event-struct-topology

<dependencies>
    <!-- Hadoop Hbase Storm Kafka dependencies-->
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-client</artifactId>
        <version>${org.apache.hadoop.version}</version>
        <exclusions>
            <exclusion>
                <artifactId>com.google.protobuf</artifactId>
                <groupId>protobuf-java</groupId>
            </exclusion>
        </exclusions>
    </dependency>

    <dependency>
        <groupId>com.google.protobuf</groupId>
        <artifactId>protobuf-java</artifactId>
        <version>2.5.0</version>
    </dependency>

    <dependency>
        <groupId>com.github.ptgoetz</groupId>
        <artifactId>storm-hbase</artifactId>
        <version>${storm-hbase.version}</version>
    </dependency>

    <dependency>
        <groupId>org.apache.storm</groupId>
        <artifactId>storm-kafka</artifactId>
        <version>0.9.2-incubating</version>
    </dependency>

    <dependency>
        <groupId>org.apache.kafka</groupId>
        <artifactId>kafka_2.10</artifactId>
        <version>0.8.1.1</version>
        <exclusions>
            <exclusion>
                <groupId>org.apache.zookeeper</groupId>
                <artifactId>zookeeper</artifactId>
            </exclusion>
            <exclusion>
                <groupId>log4j</groupId>
                <artifactId>log4j</artifactId>
            </exclusion>
        </exclusions>
    </dependency>
    <!-- END Hadoop Hbase Storm Kafka dependencies-->

    <!-- Project Dependencies -->
    <dependency>
        <groupId>fr.aid.cim</groupId>
        <artifactId>commons</artifactId>
        <version>${project.version}</version>
    </dependency>

    <dependency>
        <groupId>fr.aid.cim</groupId>
        <artifactId>storm-hazelcast</artifactId>
        <version>${project.version}</version>
    </dependency>
    <!-- END Project Dependencies -->

    <!-- Integration TEST Dependencies -->
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-core</artifactId>
        <scope>test</scope>
    </dependency>

    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-common</artifactId>
        <type>test-jar</type>
        <scope>test</scope>
    </dependency>

    <dependency>
        <groupId>org.apache.hbase</groupId>
        <artifactId>hbase</artifactId>
        <version>${org.apache.hbase.version}</version>
        <type>test-jar</type>
        <scope>test</scope>
    </dependency>

    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-hdfs</artifactId>
        <version>${org.apache.hadoop.version}</version>
        <type>test-jar</type>
        <scope>test</scope>
    </dependency>

    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-hdfs</artifactId>
        <version>${org.apache.hadoop.version}</version>
        <scope>test</scope>
    </dependency>

    <dependency>
        <groupId>org.apache.zookeeper</groupId>
        <artifactId>zookeeper</artifactId>
        <scope>test</scope>
    </dependency>
    <!-- END TEST Dependencies -->

    <!-- Other Dependencies -->
    <dependency>
        <groupId>org.json</groupId>
        <artifactId>json</artifactId>
        <version>20140107</version>
    </dependency>

    <dependency>
        <groupId>com.google.guava</groupId>
        <artifactId>guava</artifactId>
        <version>11.0</version>
    </dependency>

    <dependency>
        <groupId>com.fasterxml.jackson.core</groupId>
        <artifactId>jackson-databind</artifactId>
    </dependency>
    <!-- END Other Dependencies -->

</dependencies>

<build>
    <plugins>
        <plugin>
            <artifactId>maven-assembly-plugin</artifactId>
            <configuration>
                <descriptorRefs>
                    <descriptorRef>jar-with-dependencies</descriptorRef>
                </descriptorRefs>
            </configuration>
            <executions>
                <execution>
                    <id>make-assembly</id>
                    <phase>package</phase>
                    <goals>
                        <goal>single</goal>
                    </goals>
                </execution>
            </executions>
        </plugin>
        <plugin>
            <groupId>org.apache.maven.plugins</groupId>
            <artifactId>maven-dependency-plugin</artifactId>
        </plugin>
    </plugins>
</build>

<profiles>
    <profile>
        <id>local</id>
        <activation>
            <activeByDefault>true</activeByDefault>
        </activation>
        <dependencies>
            <dependency>
                <groupId>org.apache.storm</groupId>
                <artifactId>storm-core</artifactId>
            </dependency>
            <dependency>
                <groupId>org.apache.zookeeper</groupId>
                <artifactId>zookeeper</artifactId>
            </dependency>
        </dependencies>
    </profile>
    <profile>
        <id>cluster</id>
        <dependencies>
            <dependency>
                <groupId>org.apache.storm</groupId>
                <artifactId>storm-core</artifactId>
                <scope>provided</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.zookeeper</groupId>
                <artifactId>zookeeper</artifactId>
                <scope>provided</scope>
            </dependency>
        </dependencies>
    </profile>

</profiles>

Any ideas ? Thanks

解决方案

Try to add hadoop-hdfs as compile scoped dependency:

<dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>${org.apache.hadoop.version}</version> </dependency>

这篇关于没有FileSystem for scheme:hdfs的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆