19:04:34.753 [main] DEBUG org.apache.hadoop.fs.FileSystem - looking for configuration option fs.hdfs.impl
19:04:34.753 [main] DEBUG org.apache.hadoop.fs.FileSystem - Looking in service filesystems for implementation class
19:04:34.755 [main] ERROR i.d.s.filesinks.hdfs.HdfsFileManager - Could not initialize HDFS filesystem or failed to check for existence of publish and / or working directories..
org.apache.hadoop.fs.UnsupportedFileSystemException: No FileSystem for scheme "hdfs"
at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:3332)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3352)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3403)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3371)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:477)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:226)
at io.divolte.server.filesinks.hdfs.HdfsFileManager$HdfsFileManagerFactory.getFileSystemInstance(HdfsFileManager.java:219)
at io.divolte.server.filesinks.hdfs.HdfsFileManager$HdfsFileManagerFactory.verifyFileSystemConfiguration(HdfsFileManager.java:154)
at io.divolte.server.config.HdfsSinkConfiguration.lambda$getFactory$1(HdfsSinkConfiguration.java:59)
at io.divolte.server.Server.lambda$new$7(Server.java:97)
at com.google.common.collect.CollectCollectors.lambda$toImmutableMap$1(CollectCollectors.java:61)
at java.util.stream.ReduceOps$3ReducingSink.accept(ReduceOps.java:169)
at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:175)
at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:175)
at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:175)
at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:175)
at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:175)
at java.util.Iterator.forEachRemaining(Iterator.java:116)
at java.util.Spliterators$IteratorSpliterator.forEachRemaining(Spliterators.java:1801)
at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:481)
at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:471)
at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708)
at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
at java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:499)
at io.divolte.server.Server.<init>(Server.java:94)
at io.divolte.server.Server.<init>(Server.java:61)
at io.divolte.server.Server.main(Server.java:225)
Exception in thread "main" java.io.UncheckedIOException: Could not initialize HDFS filesystem.
at io.divolte.server.filesinks.hdfs.HdfsFileManager$HdfsFileManagerFactory.verifyFileSystemConfiguration(HdfsFileManager.java:182)
at io.divolte.server.config.HdfsSinkConfiguration.lambda$getFactory$1(HdfsSinkConfiguration.java:59)
at io.divolte.server.Server.lambda$new$7(Server.java:97)
at com.google.common.collect.CollectCollectors.lambda$toImmutableMap$1(CollectCollectors.java:61)
at java.util.stream.ReduceOps$3ReducingSink.accept(ReduceOps.java:169)
at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:175)
at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:175)
at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:175)
at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:175)
at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:175)
at java.util.Iterator.forEachRemaining(Iterator.java:116)
at java.util.Spliterators$IteratorSpliterator.forEachRemaining(Spliterators.java:1801)
at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:481)
at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:471)
at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708)
at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
at java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:499)
at io.divolte.server.Server.<init>(Server.java:94)
at io.divolte.server.Server.<init>(Server.java:61)
at io.divolte.server.Server.main(Server.java:225)
Caused by: org.apache.hadoop.fs.UnsupportedFileSystemException: No FileSystem for scheme "hdfs"
at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:3332)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3352)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3403)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3371)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:477)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:226)
at io.divolte.server.filesinks.hdfs.HdfsFileManager$HdfsFileManagerFactory.getFileSystemInstance(HdfsFileManager.java:219)
at io.divolte.server.filesinks.hdfs.HdfsFileManager$HdfsFileManagerFactory.verifyFileSystemConfiguration(HdfsFileManager.java:154)