mirror of
https://github.com/seaweedfs/seaweedfs.git
synced 2024-01-19 02:48:24 +00:00
split filer read into 2 files
This commit is contained in:
parent
8db9319a06
commit
9b603f5ffa
|
@ -4,75 +4,14 @@ import (
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strconv"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/chrislusf/seaweedfs/weed/filer2"
|
"github.com/chrislusf/seaweedfs/weed/filer2"
|
||||||
"github.com/chrislusf/seaweedfs/weed/glog"
|
"github.com/chrislusf/seaweedfs/weed/glog"
|
||||||
"github.com/chrislusf/seaweedfs/weed/operation"
|
"github.com/chrislusf/seaweedfs/weed/operation"
|
||||||
ui "github.com/chrislusf/seaweedfs/weed/server/filer_ui"
|
|
||||||
"github.com/chrislusf/seaweedfs/weed/util"
|
"github.com/chrislusf/seaweedfs/weed/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
// listDirectoryHandler lists directories and folers under a directory
|
|
||||||
// files are sorted by name and paginated via "lastFileName" and "limit".
|
|
||||||
// sub directories are listed on the first page, when "lastFileName"
|
|
||||||
// is empty.
|
|
||||||
func (fs *FilerServer) listDirectoryHandler(w http.ResponseWriter, r *http.Request) {
|
|
||||||
path := r.URL.Path
|
|
||||||
if strings.HasSuffix(path, "/") && len(path) > 1 {
|
|
||||||
path = path[:len(path)-1]
|
|
||||||
}
|
|
||||||
|
|
||||||
limit, limit_err := strconv.Atoi(r.FormValue("limit"))
|
|
||||||
if limit_err != nil {
|
|
||||||
limit = 100
|
|
||||||
}
|
|
||||||
|
|
||||||
lastFileName := r.FormValue("lastFileName")
|
|
||||||
|
|
||||||
entries, err := fs.filer.ListDirectoryEntries(filer2.FullPath(path), lastFileName, false, limit)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
glog.V(0).Infof("listDirectory %s %s $d: %s", path, lastFileName, limit, err)
|
|
||||||
w.WriteHeader(http.StatusNotFound)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
shouldDisplayLoadMore := len(entries) == limit
|
|
||||||
if path == "/" {
|
|
||||||
path = ""
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(entries) > 0 {
|
|
||||||
lastFileName = entries[len(entries)-1].Name()
|
|
||||||
}
|
|
||||||
|
|
||||||
glog.V(4).Infof("listDirectory %s, last file %s, limit %d: %d items", path, lastFileName, limit, len(entries))
|
|
||||||
|
|
||||||
args := struct {
|
|
||||||
Path string
|
|
||||||
Breadcrumbs []ui.Breadcrumb
|
|
||||||
Entries interface{}
|
|
||||||
Limit int
|
|
||||||
LastFileName string
|
|
||||||
ShouldDisplayLoadMore bool
|
|
||||||
}{
|
|
||||||
path,
|
|
||||||
ui.ToBreadcrumb(path),
|
|
||||||
entries,
|
|
||||||
limit,
|
|
||||||
lastFileName,
|
|
||||||
shouldDisplayLoadMore,
|
|
||||||
}
|
|
||||||
|
|
||||||
if r.Header.Get("Accept") == "application/json" {
|
|
||||||
writeJsonQuiet(w, r, http.StatusOK, args)
|
|
||||||
} else {
|
|
||||||
ui.StatusTpl.Execute(w, args)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *FilerServer) GetOrHeadHandler(w http.ResponseWriter, r *http.Request, isGetMethod bool) {
|
func (fs *FilerServer) GetOrHeadHandler(w http.ResponseWriter, r *http.Request, isGetMethod bool) {
|
||||||
path := r.URL.Path
|
path := r.URL.Path
|
||||||
if strings.HasSuffix(path, "/") && len(path) > 1 {
|
if strings.HasSuffix(path, "/") && len(path) > 1 {
|
||||||
|
|
70
weed/server/filer_server_handlers_read_dir.go
Normal file
70
weed/server/filer_server_handlers_read_dir.go
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
package weed_server
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/chrislusf/seaweedfs/weed/filer2"
|
||||||
|
"github.com/chrislusf/seaweedfs/weed/glog"
|
||||||
|
ui "github.com/chrislusf/seaweedfs/weed/server/filer_ui"
|
||||||
|
)
|
||||||
|
|
||||||
|
// listDirectoryHandler lists directories and folers under a directory
|
||||||
|
// files are sorted by name and paginated via "lastFileName" and "limit".
|
||||||
|
// sub directories are listed on the first page, when "lastFileName"
|
||||||
|
// is empty.
|
||||||
|
func (fs *FilerServer) listDirectoryHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
path := r.URL.Path
|
||||||
|
if strings.HasSuffix(path, "/") && len(path) > 1 {
|
||||||
|
path = path[:len(path)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
limit, limit_err := strconv.Atoi(r.FormValue("limit"))
|
||||||
|
if limit_err != nil {
|
||||||
|
limit = 100
|
||||||
|
}
|
||||||
|
|
||||||
|
lastFileName := r.FormValue("lastFileName")
|
||||||
|
|
||||||
|
entries, err := fs.filer.ListDirectoryEntries(filer2.FullPath(path), lastFileName, false, limit)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
glog.V(0).Infof("listDirectory %s %s $d: %s", path, lastFileName, limit, err)
|
||||||
|
w.WriteHeader(http.StatusNotFound)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
shouldDisplayLoadMore := len(entries) == limit
|
||||||
|
if path == "/" {
|
||||||
|
path = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(entries) > 0 {
|
||||||
|
lastFileName = entries[len(entries)-1].Name()
|
||||||
|
}
|
||||||
|
|
||||||
|
glog.V(4).Infof("listDirectory %s, last file %s, limit %d: %d items", path, lastFileName, limit, len(entries))
|
||||||
|
|
||||||
|
args := struct {
|
||||||
|
Path string
|
||||||
|
Breadcrumbs []ui.Breadcrumb
|
||||||
|
Entries interface{}
|
||||||
|
Limit int
|
||||||
|
LastFileName string
|
||||||
|
ShouldDisplayLoadMore bool
|
||||||
|
}{
|
||||||
|
path,
|
||||||
|
ui.ToBreadcrumb(path),
|
||||||
|
entries,
|
||||||
|
limit,
|
||||||
|
lastFileName,
|
||||||
|
shouldDisplayLoadMore,
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.Header.Get("Accept") == "application/json" {
|
||||||
|
writeJsonQuiet(w, r, http.StatusOK, args)
|
||||||
|
} else {
|
||||||
|
ui.StatusTpl.Execute(w, args)
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in a new issue