This commit is contained in:
Chris Lu 2020-12-22 02:34:08 -08:00
parent 6c4f32d173
commit 4f31c1bb94
12 changed files with 25 additions and 28 deletions

View file

@ -34,7 +34,7 @@ func (store *AbstractSqlStore) KvPut(ctx context.Context, key []byte, value []by
} }
_, err = res.RowsAffected() _, err = res.RowsAffected()
if err != nil { if err != nil {
return fmt.Errorf("kv upsert no rows affected: %s", err) return fmt.Errorf("kv upsert no rows affected: %s", err)
} }
return nil return nil

View file

@ -68,7 +68,7 @@ func (f *Filer) LoadConfiguration(config *viper.Viper) {
if err := store.Initialize(config, key+"."); err != nil { if err := store.Initialize(config, key+"."); err != nil {
glog.Fatalf("Failed to initialize store for %s: %+v", key, err) glog.Fatalf("Failed to initialize store for %s: %+v", key, err)
} }
location := config.GetString(key+".location") location := config.GetString(key + ".location")
if location == "" { if location == "" {
glog.Errorf("path-specific filer store needs %s", key+".location") glog.Errorf("path-specific filer store needs %s", key+".location")
os.Exit(-1) os.Exit(-1)

View file

@ -151,4 +151,3 @@ func (f *Filer) deleteChunksIfNotNew(oldEntry, newEntry *Entry) {
} }
f.DeleteChunks(toDelete) f.DeleteChunks(toDelete)
} }

View file

@ -30,7 +30,7 @@ func NewFilerStorePathTranlator(storeRoot string, store FilerStore) *FilerStoreP
} }
} }
func (t *FilerStorePathTranlator) translatePath(fp util.FullPath) (newPath util.FullPath){ func (t *FilerStorePathTranlator) translatePath(fp util.FullPath) (newPath util.FullPath) {
newPath = fp newPath = fp
if t.storeRoot == "/" { if t.storeRoot == "/" {
return return
@ -41,7 +41,7 @@ func (t *FilerStorePathTranlator) translatePath(fp util.FullPath) (newPath util.
} }
return return
} }
func (t *FilerStorePathTranlator) changeEntryPath(entry *Entry) (previousPath util.FullPath){ func (t *FilerStorePathTranlator) changeEntryPath(entry *Entry) (previousPath util.FullPath) {
previousPath = entry.FullPath previousPath = entry.FullPath
if t.storeRoot == "/" { if t.storeRoot == "/" {
return return

View file

@ -183,7 +183,7 @@ func (fsw *FilerStoreWrapper) DeleteOneEntry(ctx context.Context, existingEntry
} }
func (fsw *FilerStoreWrapper) DeleteFolderChildren(ctx context.Context, fp util.FullPath) (err error) { func (fsw *FilerStoreWrapper) DeleteFolderChildren(ctx context.Context, fp util.FullPath) (err error) {
actualStore := fsw.getActualStore(fp+"/") actualStore := fsw.getActualStore(fp + "/")
stats.FilerStoreCounter.WithLabelValues(actualStore.GetName(), "deleteFolderChildren").Inc() stats.FilerStoreCounter.WithLabelValues(actualStore.GetName(), "deleteFolderChildren").Inc()
start := time.Now() start := time.Now()
defer func() { defer func() {
@ -195,7 +195,7 @@ func (fsw *FilerStoreWrapper) DeleteFolderChildren(ctx context.Context, fp util.
} }
func (fsw *FilerStoreWrapper) ListDirectoryEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int) ([]*Entry, error) { func (fsw *FilerStoreWrapper) ListDirectoryEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int) ([]*Entry, error) {
actualStore := fsw.getActualStore(dirPath+"/") actualStore := fsw.getActualStore(dirPath + "/")
stats.FilerStoreCounter.WithLabelValues(actualStore.GetName(), "list").Inc() stats.FilerStoreCounter.WithLabelValues(actualStore.GetName(), "list").Inc()
start := time.Now() start := time.Now()
defer func() { defer func() {
@ -215,7 +215,7 @@ func (fsw *FilerStoreWrapper) ListDirectoryEntries(ctx context.Context, dirPath
} }
func (fsw *FilerStoreWrapper) ListDirectoryPrefixedEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int, prefix string) ([]*Entry, error) { func (fsw *FilerStoreWrapper) ListDirectoryPrefixedEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int, prefix string) ([]*Entry, error) {
actualStore := fsw.getActualStore(dirPath+"/") actualStore := fsw.getActualStore(dirPath + "/")
stats.FilerStoreCounter.WithLabelValues(actualStore.GetName(), "prefixList").Inc() stats.FilerStoreCounter.WithLabelValues(actualStore.GetName(), "prefixList").Inc()
start := time.Now() start := time.Now()
defer func() { defer func() {
@ -237,7 +237,7 @@ func (fsw *FilerStoreWrapper) ListDirectoryPrefixedEntries(ctx context.Context,
} }
func (fsw *FilerStoreWrapper) prefixFilterEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int, prefix string) (entries []*Entry, err error) { func (fsw *FilerStoreWrapper) prefixFilterEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int, prefix string) (entries []*Entry, err error) {
actualStore := fsw.getActualStore(dirPath+"/") actualStore := fsw.getActualStore(dirPath + "/")
entries, err = actualStore.ListDirectoryEntries(ctx, dirPath, startFileName, includeStartFile, limit) entries, err = actualStore.ListDirectoryEntries(ctx, dirPath, startFileName, includeStartFile, limit)
if err != nil { if err != nil {
return nil, err return nil, err

View file

@ -82,8 +82,8 @@ func SaveInsideFiler(client filer_pb.SeaweedFilerClient, dir, name string, conte
if err == filer_pb.ErrNotFound { if err == filer_pb.ErrNotFound {
err = filer_pb.CreateEntry(client, &filer_pb.CreateEntryRequest{ err = filer_pb.CreateEntry(client, &filer_pb.CreateEntryRequest{
Directory: dir, Directory: dir,
Entry: &filer_pb.Entry{ Entry: &filer_pb.Entry{
Name: name, Name: name,
IsDirectory: false, IsDirectory: false,
Attributes: &filer_pb.FuseAttributes{ Attributes: &filer_pb.FuseAttributes{
@ -92,7 +92,7 @@ func SaveInsideFiler(client filer_pb.SeaweedFilerClient, dir, name string, conte
FileMode: uint32(0644), FileMode: uint32(0644),
Collection: "", Collection: "",
Replication: "", Replication: "",
FileSize: uint64(len(content)), FileSize: uint64(len(content)),
}, },
Content: content, Content: content,
}, },
@ -103,10 +103,10 @@ func SaveInsideFiler(client filer_pb.SeaweedFilerClient, dir, name string, conte
entry.Attributes.Mtime = time.Now().Unix() entry.Attributes.Mtime = time.Now().Unix()
entry.Attributes.FileSize = uint64(len(content)) entry.Attributes.FileSize = uint64(len(content))
err = filer_pb.UpdateEntry(client, &filer_pb.UpdateEntryRequest{ err = filer_pb.UpdateEntry(client, &filer_pb.UpdateEntryRequest{
Directory: dir, Directory: dir,
Entry: entry, Entry: entry,
}) })
} }
return err return err
} }

View file

@ -18,7 +18,7 @@ const (
) )
type UniversalRedis2Store struct { type UniversalRedis2Store struct {
Client redis.UniversalClient Client redis.UniversalClient
superLargeDirectoryHash map[string]bool superLargeDirectoryHash map[string]bool
} }

View file

@ -79,7 +79,6 @@ func (iam *IdentityAccessManagement) loadS3ApiConfigurationFromBytes(content []b
return nil return nil
} }
func (iam *IdentityAccessManagement) loadS3ApiConfiguration(config *iam_pb.S3ApiConfiguration) error { func (iam *IdentityAccessManagement) loadS3ApiConfiguration(config *iam_pb.S3ApiConfiguration) error {
var identities []*Identity var identities []*Identity
for _, ident := range config.Identities { for _, ident := range config.Identities {

View file

@ -7,4 +7,3 @@ const (
ACTION_TAGGING = "Tagging" ACTION_TAGGING = "Tagging"
ACTION_LIST = "List" ACTION_LIST = "List"
) )

View file

@ -47,7 +47,7 @@ type Volume struct {
volumeInfo *volume_server_pb.VolumeInfo volumeInfo *volume_server_pb.VolumeInfo
location *DiskLocation location *DiskLocation
lastIoError error lastIoError error
} }
func NewVolume(dirname string, dirIdx string, collection string, id needle.VolumeId, needleMapKind NeedleMapType, replicaPlacement *super_block.ReplicaPlacement, ttl *needle.TTL, preallocate int64, memoryMapMaxSizeMb uint32) (v *Volume, e error) { func NewVolume(dirname string, dirIdx string, collection string, id needle.VolumeId, needleMapKind NeedleMapType, replicaPlacement *super_block.ReplicaPlacement, ttl *needle.TTL, preallocate int64, memoryMapMaxSizeMb uint32) (v *Volume, e error) {

View file

@ -92,7 +92,7 @@ func (v *Volume) load(alsoLoadIndex bool, createDatIfMissing bool, needleMapKind
if err == nil && alsoLoadIndex { if err == nil && alsoLoadIndex {
// adjust for existing volumes with .idx together with .dat files // adjust for existing volumes with .idx together with .dat files
if v.dirIdx != v.dir { if v.dirIdx != v.dir {
if util.FileExists(v.DataFileName()+".idx") { if util.FileExists(v.DataFileName() + ".idx") {
v.dirIdx = v.dir v.dirIdx = v.dir
} }
} }
@ -100,12 +100,12 @@ func (v *Volume) load(alsoLoadIndex bool, createDatIfMissing bool, needleMapKind
if v.noWriteOrDelete { if v.noWriteOrDelete {
glog.V(0).Infoln("open to read file", v.FileName(".idx")) glog.V(0).Infoln("open to read file", v.FileName(".idx"))
if indexFile, err = os.OpenFile(v.FileName(".idx"), os.O_RDONLY, 0644); err != nil { if indexFile, err = os.OpenFile(v.FileName(".idx"), os.O_RDONLY, 0644); err != nil {
return fmt.Errorf("cannot read Volume Index %s: %v", v.FileName(".idx"), err) return fmt.Errorf("cannot read Volume Index %s: %v", v.FileName(".idx"), err)
} }
} else { } else {
glog.V(1).Infoln("open to write file", v.FileName(".idx")) glog.V(1).Infoln("open to write file", v.FileName(".idx"))
if indexFile, err = os.OpenFile(v.FileName(".idx"), os.O_RDWR|os.O_CREATE, 0644); err != nil { if indexFile, err = os.OpenFile(v.FileName(".idx"), os.O_RDWR|os.O_CREATE, 0644); err != nil {
return fmt.Errorf("cannot write Volume Index %s: %v", v.FileName(".idx"), err) return fmt.Errorf("cannot write Volume Index %s: %v", v.FileName(".idx"), err)
} }
} }
if v.lastAppendAtNs, err = CheckAndFixVolumeDataIntegrity(v, indexFile); err != nil { if v.lastAppendAtNs, err = CheckAndFixVolumeDataIntegrity(v, indexFile); err != nil {
@ -115,7 +115,7 @@ func (v *Volume) load(alsoLoadIndex bool, createDatIfMissing bool, needleMapKind
if v.noWriteOrDelete || v.noWriteCanDelete { if v.noWriteOrDelete || v.noWriteCanDelete {
if v.nm, err = NewSortedFileNeedleMap(v.IndexFileName(), indexFile); err != nil { if v.nm, err = NewSortedFileNeedleMap(v.IndexFileName(), indexFile); err != nil {
glog.V(0).Infof("loading sorted db %s error: %v", v.FileName(".sdx"), err) glog.V(0).Infof("loading sorted db %s error: %v", v.FileName(".sdx"), err)
} }
} else { } else {
switch needleMapKind { switch needleMapKind {

View file

@ -19,11 +19,11 @@ type Configuration interface {
func LoadConfiguration(configFileName string, required bool) (loaded bool) { func LoadConfiguration(configFileName string, required bool) (loaded bool) {
// find a filer store // find a filer store
viper.SetConfigName(configFileName) // name of config file (without extension) viper.SetConfigName(configFileName) // name of config file (without extension)
viper.AddConfigPath(".") // optionally look for config in the working directory viper.AddConfigPath(".") // optionally look for config in the working directory
viper.AddConfigPath("$HOME/.seaweedfs") // call multiple times to add many search paths viper.AddConfigPath("$HOME/.seaweedfs") // call multiple times to add many search paths
viper.AddConfigPath("/usr/local/etc/seaweedfs/") // search path for bsd-style config directory in viper.AddConfigPath("/usr/local/etc/seaweedfs/") // search path for bsd-style config directory in
viper.AddConfigPath("/etc/seaweedfs/") // path to look for the config file in viper.AddConfigPath("/etc/seaweedfs/") // path to look for the config file in
glog.V(1).Infof("Reading %s.toml from %s", configFileName, viper.ConfigFileUsed()) glog.V(1).Infof("Reading %s.toml from %s", configFileName, viper.ConfigFileUsed())