2020-04-18 15:26:57 +00:00
|
|
|
package mongodb
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
2020-04-18 16:48:38 +00:00
|
|
|
"fmt"
|
2020-09-01 07:21:19 +00:00
|
|
|
"github.com/chrislusf/seaweedfs/weed/filer"
|
2020-04-18 17:16:50 +00:00
|
|
|
"github.com/chrislusf/seaweedfs/weed/glog"
|
2020-04-18 16:48:38 +00:00
|
|
|
"github.com/chrislusf/seaweedfs/weed/pb/filer_pb"
|
2020-04-18 15:26:57 +00:00
|
|
|
"github.com/chrislusf/seaweedfs/weed/util"
|
2020-04-18 16:48:38 +00:00
|
|
|
"go.mongodb.org/mongo-driver/bson"
|
2020-04-18 15:26:57 +00:00
|
|
|
"go.mongodb.org/mongo-driver/mongo"
|
|
|
|
"go.mongodb.org/mongo-driver/mongo/options"
|
2020-04-20 01:13:57 +00:00
|
|
|
"go.mongodb.org/mongo-driver/x/bsonx"
|
2020-04-18 15:26:57 +00:00
|
|
|
"time"
|
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
2020-09-01 07:21:19 +00:00
|
|
|
filer.Stores = append(filer.Stores, &MongodbStore{})
|
2020-04-18 15:26:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
type MongodbStore struct {
|
2020-04-18 16:48:38 +00:00
|
|
|
connect *mongo.Client
|
|
|
|
database string
|
|
|
|
collectionName string
|
|
|
|
}
|
|
|
|
|
|
|
|
type Model struct {
|
|
|
|
Directory string `bson:"directory"`
|
|
|
|
Name string `bson:"name"`
|
|
|
|
Meta []byte `bson:"meta"`
|
2020-04-18 15:26:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (store *MongodbStore) GetName() string {
|
|
|
|
return "mongodb"
|
|
|
|
}
|
|
|
|
|
|
|
|
func (store *MongodbStore) Initialize(configuration util.Configuration, prefix string) (err error) {
|
2020-04-18 16:48:38 +00:00
|
|
|
store.database = configuration.GetString(prefix + "database")
|
|
|
|
store.collectionName = "filemeta"
|
2020-04-19 10:51:32 +00:00
|
|
|
poolSize := configuration.GetInt(prefix + "option_pool_size")
|
2020-04-21 00:48:06 +00:00
|
|
|
return store.connection(configuration.GetString(prefix+"uri"), uint64(poolSize))
|
2020-04-18 15:26:57 +00:00
|
|
|
}
|
|
|
|
|
2020-04-19 10:51:32 +00:00
|
|
|
func (store *MongodbStore) connection(uri string, poolSize uint64) (err error) {
|
2020-04-18 15:26:57 +00:00
|
|
|
ctx, _ := context.WithTimeout(context.Background(), 10*time.Second)
|
2020-04-19 10:51:32 +00:00
|
|
|
opts := options.Client().ApplyURI(uri)
|
|
|
|
|
|
|
|
if poolSize > 0 {
|
|
|
|
opts.SetMaxPoolSize(poolSize)
|
|
|
|
}
|
|
|
|
|
|
|
|
client, err := mongo.Connect(ctx, opts)
|
2020-04-20 01:13:57 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
c := client.Database(store.database).Collection(store.collectionName)
|
|
|
|
err = store.indexUnique(c)
|
2020-04-18 15:26:57 +00:00
|
|
|
store.connect = client
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2020-04-21 00:48:06 +00:00
|
|
|
func (store *MongodbStore) createIndex(c *mongo.Collection, index mongo.IndexModel, opts *options.CreateIndexesOptions) error {
|
2020-04-20 01:13:57 +00:00
|
|
|
_, err := c.Indexes().CreateOne(context.Background(), index, opts)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
func (store *MongodbStore) indexUnique(c *mongo.Collection) error {
|
|
|
|
opts := options.CreateIndexes().SetMaxTime(10 * time.Second)
|
|
|
|
|
|
|
|
unique := new(bool)
|
|
|
|
*unique = true
|
|
|
|
|
|
|
|
index := mongo.IndexModel{
|
|
|
|
Keys: bsonx.Doc{{Key: "directory", Value: bsonx.Int32(1)}, {Key: "name", Value: bsonx.Int32(1)}},
|
|
|
|
Options: &options.IndexOptions{
|
|
|
|
Unique: unique,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
return store.createIndex(c, index, opts)
|
|
|
|
}
|
|
|
|
|
2020-04-18 15:26:57 +00:00
|
|
|
func (store *MongodbStore) BeginTransaction(ctx context.Context) (context.Context, error) {
|
|
|
|
return ctx, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (store *MongodbStore) CommitTransaction(ctx context.Context) error {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (store *MongodbStore) RollbackTransaction(ctx context.Context) error {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-09-01 07:21:19 +00:00
|
|
|
func (store *MongodbStore) InsertEntry(ctx context.Context, entry *filer.Entry) (err error) {
|
2020-04-18 16:48:38 +00:00
|
|
|
|
2020-09-18 04:50:52 +00:00
|
|
|
return store.UpdateEntry(ctx, entry)
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func (store *MongodbStore) UpdateEntry(ctx context.Context, entry *filer.Entry) (err error) {
|
|
|
|
|
2020-04-18 16:48:38 +00:00
|
|
|
dir, name := entry.FullPath.DirAndName()
|
|
|
|
meta, err := entry.EncodeAttributesAndChunks()
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("encode %s: %s", entry.FullPath, err)
|
|
|
|
}
|
|
|
|
|
2020-09-03 18:00:20 +00:00
|
|
|
if len(entry.Chunks) > 50 {
|
|
|
|
meta = util.MaybeGzipData(meta)
|
|
|
|
}
|
|
|
|
|
2020-04-18 16:48:38 +00:00
|
|
|
c := store.connect.Database(store.database).Collection(store.collectionName)
|
|
|
|
|
2020-09-18 04:50:52 +00:00
|
|
|
opts := options.Update().SetUpsert(true)
|
|
|
|
filter := bson.D{{"directory", dir}, {"name", name}}
|
|
|
|
update := bson.D{{"$set", bson.D{{"meta", meta}}}}
|
|
|
|
|
|
|
|
_, err = c.UpdateOne(ctx, filter, update, opts)
|
2020-04-18 16:48:38 +00:00
|
|
|
|
2020-09-03 00:17:44 +00:00
|
|
|
if err != nil {
|
2020-09-18 04:50:52 +00:00
|
|
|
return fmt.Errorf("UpdateEntry %s: %v", entry.FullPath, err)
|
2020-09-03 00:17:44 +00:00
|
|
|
}
|
|
|
|
|
2020-04-18 15:26:57 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-09-01 07:21:19 +00:00
|
|
|
func (store *MongodbStore) FindEntry(ctx context.Context, fullpath util.FullPath) (entry *filer.Entry, err error) {
|
2020-04-18 16:48:38 +00:00
|
|
|
|
|
|
|
dir, name := fullpath.DirAndName()
|
|
|
|
var data Model
|
|
|
|
|
2020-04-18 17:16:50 +00:00
|
|
|
var where = bson.M{"directory": dir, "name": name}
|
2020-04-18 16:48:38 +00:00
|
|
|
err = store.connect.Database(store.database).Collection(store.collectionName).FindOne(ctx, where).Decode(&data)
|
|
|
|
if err != mongo.ErrNoDocuments && err != nil {
|
2020-09-03 04:42:12 +00:00
|
|
|
glog.Errorf("find %s: %v", fullpath, err)
|
2020-04-18 16:48:38 +00:00
|
|
|
return nil, filer_pb.ErrNotFound
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(data.Meta) == 0 {
|
|
|
|
return nil, filer_pb.ErrNotFound
|
|
|
|
}
|
|
|
|
|
2020-09-01 07:21:19 +00:00
|
|
|
entry = &filer.Entry{
|
2020-04-18 16:48:38 +00:00
|
|
|
FullPath: fullpath,
|
|
|
|
}
|
|
|
|
|
2020-09-03 18:00:20 +00:00
|
|
|
err = entry.DecodeAttributesAndChunks(util.MaybeDecompressData(data.Meta))
|
2020-04-18 16:48:38 +00:00
|
|
|
if err != nil {
|
|
|
|
return entry, fmt.Errorf("decode %s : %v", entry.FullPath, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
return entry, nil
|
2020-04-18 15:26:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (store *MongodbStore) DeleteEntry(ctx context.Context, fullpath util.FullPath) error {
|
|
|
|
|
2020-04-18 17:31:49 +00:00
|
|
|
dir, name := fullpath.DirAndName()
|
|
|
|
|
|
|
|
where := bson.M{"directory": dir, "name": name}
|
2020-04-18 17:20:00 +00:00
|
|
|
_, err := store.connect.Database(store.database).Collection(store.collectionName).DeleteOne(ctx, where)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("delete %s : %v", fullpath, err)
|
|
|
|
}
|
|
|
|
|
2020-04-18 15:26:57 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-07-22 15:23:20 +00:00
|
|
|
func (store *MongodbStore) DeleteFolderChildren(ctx context.Context, fullpath util.FullPath) error {
|
2020-04-18 15:26:57 +00:00
|
|
|
|
2020-04-18 17:31:49 +00:00
|
|
|
where := bson.M{"directory": fullpath}
|
2020-05-07 10:11:26 +00:00
|
|
|
_, err := store.connect.Database(store.database).Collection(store.collectionName).DeleteMany(ctx, where)
|
2020-04-18 17:31:49 +00:00
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("delete %s : %v", fullpath, err)
|
|
|
|
}
|
|
|
|
|
2020-04-18 15:26:57 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-01-16 07:56:24 +00:00
|
|
|
func (store *MongodbStore) ListDirectoryPrefixedEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int64, prefix string, eachEntryFunc filer.ListEachEntryFunc) (lastFileName string, err error) {
|
|
|
|
return lastFileName, filer.ErrUnsupportedListDirectoryPrefixed
|
2020-08-05 17:19:16 +00:00
|
|
|
}
|
|
|
|
|
2021-01-16 07:56:24 +00:00
|
|
|
func (store *MongodbStore) ListDirectoryEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int64, eachEntryFunc filer.ListEachEntryFunc) (lastFileName string, err error) {
|
2020-04-18 15:26:57 +00:00
|
|
|
|
2021-01-15 06:28:51 +00:00
|
|
|
var where = bson.M{"directory": string(dirPath), "name": bson.M{"$gt": startFileName}}
|
|
|
|
if includeStartFile {
|
2020-04-18 17:16:50 +00:00
|
|
|
where["name"] = bson.M{
|
|
|
|
"$gte": startFileName,
|
|
|
|
}
|
|
|
|
}
|
2021-01-16 07:56:24 +00:00
|
|
|
optLimit := int64(limit)
|
2020-04-21 00:48:06 +00:00
|
|
|
opts := &options.FindOptions{Limit: &optLimit, Sort: bson.M{"name": 1}}
|
2020-04-19 10:51:32 +00:00
|
|
|
cur, err := store.connect.Database(store.database).Collection(store.collectionName).Find(ctx, where, opts)
|
2020-04-18 17:16:50 +00:00
|
|
|
for cur.Next(ctx) {
|
|
|
|
var data Model
|
|
|
|
err := cur.Decode(&data)
|
|
|
|
if err != nil && err != mongo.ErrNoDocuments {
|
2021-01-16 07:56:24 +00:00
|
|
|
return lastFileName, err
|
2020-04-18 17:16:50 +00:00
|
|
|
}
|
|
|
|
|
2020-09-01 07:21:19 +00:00
|
|
|
entry := &filer.Entry{
|
2021-01-15 06:28:51 +00:00
|
|
|
FullPath: util.NewFullPath(string(dirPath), data.Name),
|
2020-04-18 17:16:50 +00:00
|
|
|
}
|
2021-01-16 07:56:24 +00:00
|
|
|
lastFileName = data.Name
|
2020-09-03 18:00:20 +00:00
|
|
|
if decodeErr := entry.DecodeAttributesAndChunks(util.MaybeDecompressData(data.Meta)); decodeErr != nil {
|
2020-04-18 17:16:50 +00:00
|
|
|
err = decodeErr
|
|
|
|
glog.V(0).Infof("list %s : %v", entry.FullPath, err)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2021-01-16 07:56:24 +00:00
|
|
|
if !eachEntryFunc(entry) {
|
|
|
|
break
|
|
|
|
}
|
2020-04-18 17:16:50 +00:00
|
|
|
|
2021-01-15 06:28:51 +00:00
|
|
|
}
|
|
|
|
|
2020-04-18 17:16:50 +00:00
|
|
|
if err := cur.Close(ctx); err != nil {
|
|
|
|
glog.V(0).Infof("list iterator close: %v", err)
|
|
|
|
}
|
|
|
|
|
2021-01-16 07:56:24 +00:00
|
|
|
return lastFileName, err
|
2020-04-18 15:26:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (store *MongodbStore) Shutdown() {
|
|
|
|
ctx, _ := context.WithTimeout(context.Background(), 10*time.Second)
|
|
|
|
store.connect.Disconnect(ctx)
|
|
|
|
}
|