paginate through large folders

This commit is contained in:
Chris Lu 2018-12-17 01:25:02 -08:00
parent 52712782a2
commit d11cfdb356
4 changed files with 26 additions and 15 deletions

View file

@ -4,7 +4,7 @@
<groupId>com.github.chrislusf</groupId> <groupId>com.github.chrislusf</groupId>
<artifactId>seaweedfs-client</artifactId> <artifactId>seaweedfs-client</artifactId>
<version>1.0.2</version> <version>1.0.3</version>
<parent> <parent>
<groupId>org.sonatype.oss</groupId> <groupId>org.sonatype.oss</groupId>

View file

@ -5,6 +5,7 @@ import org.slf4j.LoggerFactory;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
@ -137,7 +138,23 @@ public class FilerClient {
} }
public List<FilerProto.Entry> listEntries(String path) { public List<FilerProto.Entry> listEntries(String path) {
return listEntries(path, "", "", 100000); List<FilerProto.Entry> results = new ArrayList<FilerProto.Entry>();
String lastFileName = "";
for (int limit = Integer.MAX_VALUE; limit > 0; ) {
List<FilerProto.Entry> t = listEntries(path, "", lastFileName, 1024);
if (t == null) {
break;
}
int nSize = t.size();
if (nSize > 0) {
limit -= nSize;
lastFileName = t.get(nSize - 1).getName();
}
if (t.size() < 1024) {
break;
}
}
return results;
} }
public List<FilerProto.Entry> listEntries(String path, String entryPrefix, String lastEntryName, int limit) { public List<FilerProto.Entry> listEntries(String path, String entryPrefix, String lastEntryName, int limit) {

View file

@ -4,9 +4,14 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<properties>
<seaweedfs.client.version>1.0.3</seaweedfs.client.version>
<hadoop.version>3.1.1</hadoop.version>
</properties>
<groupId>com.github.chrislusf</groupId> <groupId>com.github.chrislusf</groupId>
<artifactId>seaweedfs-hadoop-client</artifactId> <artifactId>seaweedfs-hadoop-client</artifactId>
<version>1.0.2</version> <version>${seaweedfs.client.version}</version>
<parent> <parent>
<groupId>org.sonatype.oss</groupId> <groupId>org.sonatype.oss</groupId>
@ -133,10 +138,6 @@
</plugins> </plugins>
</build> </build>
<properties>
<hadoop.version>3.1.1</hadoop.version>
</properties>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
@ -146,7 +147,7 @@
<dependency> <dependency>
<groupId>com.github.chrislusf</groupId> <groupId>com.github.chrislusf</groupId>
<artifactId>seaweedfs-client</artifactId> <artifactId>seaweedfs-client</artifactId>
<version>1.0.2</version> <version>${seaweedfs.client.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>

View file

@ -310,9 +310,6 @@ public class SeaweedFileSystem extends org.apache.hadoop.fs.FileSystem {
getClass().getSimpleName() + " FileSystem implementation"); getClass().getSimpleName() + " FileSystem implementation");
} }
/**
* See {@link FileContext#createSymlink(Path, Path, boolean)}.
*/
@Override @Override
public void createSymlink(final Path target, final Path link, public void createSymlink(final Path target, final Path link,
final boolean createParent) throws AccessControlException, final boolean createParent) throws AccessControlException,
@ -324,10 +321,6 @@ public class SeaweedFileSystem extends org.apache.hadoop.fs.FileSystem {
"Filesystem does not support symlinks!"); "Filesystem does not support symlinks!");
} }
/**
* See {@link AbstractFileSystem#supportsSymlinks()}.
*/
@Override
public boolean supportsSymlinks() { public boolean supportsSymlinks() {
return false; return false;
} }