1.5.2

Chris Lu 2020-10-25 14:46:03 -07:00
parent a7a80341e7
commit ae6ed170b5
3 changed files with 12 additions and 12 deletions

@ -26,7 +26,7 @@ Then get the seaweedfs hadoop client jar.
``` ```
cd share/hadoop/common/lib/ cd share/hadoop/common/lib/
wget https://oss.sonatype.org/service/local/repositories/releases/content/com/github/chrislusf/seaweedfs-hadoop2-client/1.5.1/seaweedfs-hadoop2-client-1.5.1.jar wget https://oss.sonatype.org/service/local/repositories/releases/content/com/github/chrislusf/seaweedfs-hadoop2-client/1.5.2/seaweedfs-hadoop2-client-1.5.2.jar
``` ```
# TestDFSIO Benchmark # TestDFSIO Benchmark

@ -23,7 +23,7 @@ Maven
<dependency> <dependency>
<groupId>com.github.chrislusf</groupId> <groupId>com.github.chrislusf</groupId>
<artifactId>seaweedfs-hadoop3-client</artifactId> <artifactId>seaweedfs-hadoop3-client</artifactId>
<version>1.5.1</version> <version>1.5.2</version>
</dependency> </dependency>
or or
@ -31,16 +31,16 @@ or
<dependency> <dependency>
<groupId>com.github.chrislusf</groupId> <groupId>com.github.chrislusf</groupId>
<artifactId>seaweedfs-hadoop2-client</artifactId> <artifactId>seaweedfs-hadoop2-client</artifactId>
<version>1.5.1</version> <version>1.5.2</version>
</dependency> </dependency>
``` ```
Or you can download the latest version from MavenCentral Or you can download the latest version from MavenCentral
* https://mvnrepository.com/artifact/com.github.chrislusf/seaweedfs-hadoop2-client * https://mvnrepository.com/artifact/com.github.chrislusf/seaweedfs-hadoop2-client
* [seaweedfs-hadoop2-client-1.5.1.jar](https://oss.sonatype.org/service/local/repositories/releases/content/com/github/chrislusf/seaweedfs-hadoop2-client/1.5.1/seaweedfs-hadoop2-client-1.5.1.jar) * [seaweedfs-hadoop2-client-1.5.2.jar](https://oss.sonatype.org/service/local/repositories/releases/content/com/github/chrislusf/seaweedfs-hadoop2-client/1.5.2/seaweedfs-hadoop2-client-1.5.2.jar)
* https://mvnrepository.com/artifact/com.github.chrislusf/seaweedfs-hadoop3-client * https://mvnrepository.com/artifact/com.github.chrislusf/seaweedfs-hadoop3-client
* [seaweedfs-hadoop3-client-1.5.1.jar](https://oss.sonatype.org/service/local/repositories/releases/content/com/github/chrislusf/seaweedfs-hadoop3-client/1.5.1/seaweedfs-hadoop3-client-1.5.1.jar) * [seaweedfs-hadoop3-client-1.5.2.jar](https://oss.sonatype.org/service/local/repositories/releases/content/com/github/chrislusf/seaweedfs-hadoop3-client/1.5.2/seaweedfs-hadoop3-client-1.5.2.jar)
# Test SeaweedFS on Hadoop # Test SeaweedFS on Hadoop

@ -11,12 +11,12 @@ To make these files visible to Spark, set HADOOP_CONF_DIR in $SPARK_HOME/conf/sp
## installation not inheriting from Hadoop cluster configuration ## installation not inheriting from Hadoop cluster configuration
Copy the seaweedfs-hadoop2-client-1.5.1.jar to all executor machines. Copy the seaweedfs-hadoop2-client-1.5.2.jar to all executor machines.
Add the following to spark/conf/spark-defaults.conf on every node running Spark Add the following to spark/conf/spark-defaults.conf on every node running Spark
``` ```
spark.driver.extraClassPath=/path/to/seaweedfs-hadoop2-client-1.5.1.jar spark.driver.extraClassPath=/path/to/seaweedfs-hadoop2-client-1.5.2.jar
spark.executor.extraClassPath=/path/to/seaweedfs-hadoop2-client-1.5.1.jar spark.executor.extraClassPath=/path/to/seaweedfs-hadoop2-client-1.5.2.jar
``` ```
And modify the configuration at runtime: And modify the configuration at runtime:
@ -37,8 +37,8 @@ And modify the configuration at runtime:
1. change the spark-defaults.conf 1. change the spark-defaults.conf
``` ```
spark.driver.extraClassPath=/Users/chris/go/src/github.com/chrislusf/seaweedfs/other/java/hdfs2/target/seaweedfs-hadoop2-client-1.5.1.jar spark.driver.extraClassPath=/Users/chris/go/src/github.com/chrislusf/seaweedfs/other/java/hdfs2/target/seaweedfs-hadoop2-client-1.5.2.jar
spark.executor.extraClassPath=/Users/chris/go/src/github.com/chrislusf/seaweedfs/other/java/hdfs2/target/seaweedfs-hadoop2-client-1.5.1.jar spark.executor.extraClassPath=/Users/chris/go/src/github.com/chrislusf/seaweedfs/other/java/hdfs2/target/seaweedfs-hadoop2-client-1.5.2.jar
spark.hadoop.fs.seaweedfs.impl=seaweed.hdfs.SeaweedFileSystem spark.hadoop.fs.seaweedfs.impl=seaweed.hdfs.SeaweedFileSystem
``` ```
@ -81,8 +81,8 @@ spark.history.fs.cleaner.enabled=true
spark.history.fs.logDirectory=seaweedfs://localhost:8888/spark2-history/ spark.history.fs.logDirectory=seaweedfs://localhost:8888/spark2-history/
spark.eventLog.dir=seaweedfs://localhost:8888/spark2-history/ spark.eventLog.dir=seaweedfs://localhost:8888/spark2-history/
spark.driver.extraClassPath=/Users/chris/go/src/github.com/chrislusf/seaweedfs/other/java/hdfs2/target/seaweedfs-hadoop2-client-1.5.1.jar spark.driver.extraClassPath=/Users/chris/go/src/github.com/chrislusf/seaweedfs/other/java/hdfs2/target/seaweedfs-hadoop2-client-1.5.2.jar
spark.executor.extraClassPath=/Users/chris/go/src/github.com/chrislusf/seaweedfs/other/java/hdfs2/target/seaweedfs-hadoop2-client-1.5.1.jar spark.executor.extraClassPath=/Users/chris/go/src/github.com/chrislusf/seaweedfs/other/java/hdfs2/target/seaweedfs-hadoop2-client-1.5.2.jar
spark.hadoop.fs.seaweedfs.impl=seaweed.hdfs.SeaweedFileSystem spark.hadoop.fs.seaweedfs.impl=seaweed.hdfs.SeaweedFileSystem
spark.hadoop.fs.defaultFS=seaweedfs://localhost:8888 spark.hadoop.fs.defaultFS=seaweedfs://localhost:8888
``` ```