mirror of
https://github.com/seaweedfs/seaweedfs.git
synced 2024-01-19 02:48:24 +00:00
refactor
This commit is contained in:
parent
04d5dff6bb
commit
7b544576af
|
@ -4,6 +4,7 @@ import asql "github.com/chrislusf/seaweedfs/weed/filer/abstract_sql"
|
||||||
|
|
||||||
const (
|
const (
|
||||||
insertQuery = `
|
insertQuery = `
|
||||||
|
PRAGMA TablePathPrefix("%v");
|
||||||
DECLARE $dir_hash AS int64;
|
DECLARE $dir_hash AS int64;
|
||||||
DECLARE $directory AS Utf8;
|
DECLARE $directory AS Utf8;
|
||||||
DECLARE $name AS Utf8;
|
DECLARE $name AS Utf8;
|
||||||
|
@ -16,6 +17,7 @@ const (
|
||||||
($dir_hash, $name, $directory, $meta, $expire_at);`
|
($dir_hash, $name, $directory, $meta, $expire_at);`
|
||||||
|
|
||||||
updateQuery = `
|
updateQuery = `
|
||||||
|
PRAGMA TablePathPrefix("%v");
|
||||||
DECLARE $dir_hash AS int64;
|
DECLARE $dir_hash AS int64;
|
||||||
DECLARE $directory AS Utf8;
|
DECLARE $directory AS Utf8;
|
||||||
DECLARE $name AS Utf8;
|
DECLARE $name AS Utf8;
|
||||||
|
@ -28,6 +30,7 @@ const (
|
||||||
($dir_hash, $name, $directory, $meta, $expire_at);`
|
($dir_hash, $name, $directory, $meta, $expire_at);`
|
||||||
|
|
||||||
deleteQuery = `
|
deleteQuery = `
|
||||||
|
PRAGMA TablePathPrefix("%v");
|
||||||
DECLARE $dir_hash AS int64;
|
DECLARE $dir_hash AS int64;
|
||||||
DECLARE $name AS Utf8;
|
DECLARE $name AS Utf8;
|
||||||
|
|
||||||
|
@ -35,6 +38,7 @@ const (
|
||||||
WHERE dir_hash = $dir_hash AND name = $name;`
|
WHERE dir_hash = $dir_hash AND name = $name;`
|
||||||
|
|
||||||
findQuery = `
|
findQuery = `
|
||||||
|
PRAGMA TablePathPrefix("%v");
|
||||||
DECLARE $dir_hash AS int64;
|
DECLARE $dir_hash AS int64;
|
||||||
DECLARE $name AS Utf8;
|
DECLARE $name AS Utf8;
|
||||||
|
|
||||||
|
@ -43,6 +47,7 @@ const (
|
||||||
WHERE dir_hash = $dir_hash AND name = $name;`
|
WHERE dir_hash = $dir_hash AND name = $name;`
|
||||||
|
|
||||||
deleteFolderChildrenQuery = `
|
deleteFolderChildrenQuery = `
|
||||||
|
PRAGMA TablePathPrefix("%v");
|
||||||
DECLARE $dir_hash AS int64;
|
DECLARE $dir_hash AS int64;
|
||||||
DECLARE $directory AS Utf8;
|
DECLARE $directory AS Utf8;
|
||||||
|
|
||||||
|
@ -50,6 +55,7 @@ const (
|
||||||
WHERE dir_hash = $dir_hash AND directory = $directory;`
|
WHERE dir_hash = $dir_hash AND directory = $directory;`
|
||||||
|
|
||||||
listDirectoryQuery = `
|
listDirectoryQuery = `
|
||||||
|
PRAGMA TablePathPrefix("%v");
|
||||||
DECLARE $dir_hash AS int64;
|
DECLARE $dir_hash AS int64;
|
||||||
DECLARE $directory AS Utf8;
|
DECLARE $directory AS Utf8;
|
||||||
DECLARE $start_name AS Utf8;
|
DECLARE $start_name AS Utf8;
|
||||||
|
@ -58,6 +64,19 @@ const (
|
||||||
|
|
||||||
SELECT name, meta
|
SELECT name, meta
|
||||||
FROM ` + asql.DEFAULT_TABLE + `
|
FROM ` + asql.DEFAULT_TABLE + `
|
||||||
WHERE dir_hash = $dir_hash AND directory = $directory and name %s $start_name and name LIKE $prefix
|
WHERE dir_hash = $dir_hash AND directory = $directory and name > $start_name and name LIKE $prefix
|
||||||
|
ORDER BY name ASC LIMIT $limit;`
|
||||||
|
|
||||||
|
listInclusiveDirectoryQuery = `
|
||||||
|
PRAGMA TablePathPrefix("%v");
|
||||||
|
DECLARE $dir_hash AS int64;
|
||||||
|
DECLARE $directory AS Utf8;
|
||||||
|
DECLARE $start_name AS Utf8;
|
||||||
|
DECLARE $prefix AS Utf8;
|
||||||
|
DECLARE $limit AS Uint64;
|
||||||
|
|
||||||
|
SELECT name, meta
|
||||||
|
FROM ` + asql.DEFAULT_TABLE + `
|
||||||
|
WHERE dir_hash = $dir_hash AND directory = $directory and name >= $start_name and name LIKE $prefix
|
||||||
ORDER BY name ASC LIMIT $limit;`
|
ORDER BY name ASC LIMIT $limit;`
|
||||||
)
|
)
|
||||||
|
|
|
@ -66,6 +66,9 @@ func (store *YdbStore) Initialize(configuration util.Configuration, prefix strin
|
||||||
func (store *YdbStore) initialize(dirBuckets string, dsn string, tablePathPrefix string, useBucketPrefix bool, connectionTimeOut int, poolSizeLimit int) (err error) {
|
func (store *YdbStore) initialize(dirBuckets string, dsn string, tablePathPrefix string, useBucketPrefix bool, connectionTimeOut int, poolSizeLimit int) (err error) {
|
||||||
store.dirBuckets = dirBuckets
|
store.dirBuckets = dirBuckets
|
||||||
store.SupportBucketTable = useBucketPrefix
|
store.SupportBucketTable = useBucketPrefix
|
||||||
|
if store.SupportBucketTable {
|
||||||
|
glog.V(0).Infof("enabled BucketPrefix")
|
||||||
|
}
|
||||||
store.dbs = make(map[string]bool)
|
store.dbs = make(map[string]bool)
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
@ -133,7 +136,7 @@ func (store *YdbStore) doTxOrDB(ctx context.Context, query *string, params *tabl
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (store *YdbStore) insertOrUpdateEntry(ctx context.Context, entry *filer.Entry, query string) (err error) {
|
func (store *YdbStore) insertOrUpdateEntry(ctx context.Context, entry *filer.Entry, isUpdate bool) (err error) {
|
||||||
dir, name := entry.FullPath.DirAndName()
|
dir, name := entry.FullPath.DirAndName()
|
||||||
meta, err := entry.EncodeAttributesAndChunks()
|
meta, err := entry.EncodeAttributesAndChunks()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -143,29 +146,36 @@ func (store *YdbStore) insertOrUpdateEntry(ctx context.Context, entry *filer.Ent
|
||||||
if len(entry.Chunks) > filer.CountEntryChunksForGzip {
|
if len(entry.Chunks) > filer.CountEntryChunksForGzip {
|
||||||
meta = util.MaybeGzipData(meta)
|
meta = util.MaybeGzipData(meta)
|
||||||
}
|
}
|
||||||
queryWithPragma := withPragma(store.getPrefix(ctx, dir), query)
|
tablePathPrefix, shortDir := store.getPrefix(ctx, &dir)
|
||||||
fileMeta := FileMeta{util.HashStringToLong(dir), name, dir, meta}
|
fileMeta := FileMeta{util.HashStringToLong(dir), name, *shortDir, meta}
|
||||||
return store.doTxOrDB(ctx, &queryWithPragma, fileMeta.queryParameters(entry.TtlSec), rwTX, nil)
|
var query *string
|
||||||
|
if isUpdate {
|
||||||
|
query = withPragma(tablePathPrefix, updateQuery)
|
||||||
|
} else {
|
||||||
|
query = withPragma(tablePathPrefix, insertQuery)
|
||||||
|
}
|
||||||
|
return store.doTxOrDB(ctx, query, fileMeta.queryParameters(entry.TtlSec), rwTX, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (store *YdbStore) InsertEntry(ctx context.Context, entry *filer.Entry) (err error) {
|
func (store *YdbStore) InsertEntry(ctx context.Context, entry *filer.Entry) (err error) {
|
||||||
return store.insertOrUpdateEntry(ctx, entry, insertQuery)
|
return store.insertOrUpdateEntry(ctx, entry, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (store *YdbStore) UpdateEntry(ctx context.Context, entry *filer.Entry) (err error) {
|
func (store *YdbStore) UpdateEntry(ctx context.Context, entry *filer.Entry) (err error) {
|
||||||
return store.insertOrUpdateEntry(ctx, entry, updateQuery)
|
return store.insertOrUpdateEntry(ctx, entry, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (store *YdbStore) FindEntry(ctx context.Context, fullpath util.FullPath) (entry *filer.Entry, err error) {
|
func (store *YdbStore) FindEntry(ctx context.Context, fullpath util.FullPath) (entry *filer.Entry, err error) {
|
||||||
dir, name := fullpath.DirAndName()
|
dir, name := fullpath.DirAndName()
|
||||||
var data []byte
|
var data []byte
|
||||||
entryFound := false
|
entryFound := false
|
||||||
queryWithPragma := withPragma(store.getPrefix(ctx, dir), findQuery)
|
tablePathPrefix, shortDir := store.getPrefix(ctx, &dir)
|
||||||
|
query := withPragma(tablePathPrefix, findQuery)
|
||||||
queryParams := table.NewQueryParameters(
|
queryParams := table.NewQueryParameters(
|
||||||
table.ValueParam("$dir_hash", types.Int64Value(util.HashStringToLong(dir))),
|
table.ValueParam("$dir_hash", types.Int64Value(util.HashStringToLong(*shortDir))),
|
||||||
table.ValueParam("$name", types.UTF8Value(name)))
|
table.ValueParam("$name", types.UTF8Value(name)))
|
||||||
|
|
||||||
err = store.doTxOrDB(ctx, &queryWithPragma, queryParams, roTX, func(res result.Result) error {
|
err = store.doTxOrDB(ctx, query, queryParams, roTX, func(res result.Result) error {
|
||||||
for res.NextResultSet(ctx) {
|
for res.NextResultSet(ctx) {
|
||||||
for res.NextRow() {
|
for res.NextRow() {
|
||||||
if err = res.ScanNamed(named.OptionalWithDefault("meta", &data)); err != nil {
|
if err = res.ScanNamed(named.OptionalWithDefault("meta", &data)); err != nil {
|
||||||
|
@ -196,22 +206,24 @@ func (store *YdbStore) FindEntry(ctx context.Context, fullpath util.FullPath) (e
|
||||||
|
|
||||||
func (store *YdbStore) DeleteEntry(ctx context.Context, fullpath util.FullPath) (err error) {
|
func (store *YdbStore) DeleteEntry(ctx context.Context, fullpath util.FullPath) (err error) {
|
||||||
dir, name := fullpath.DirAndName()
|
dir, name := fullpath.DirAndName()
|
||||||
queryWithPragma := withPragma(store.getPrefix(ctx, dir), deleteQuery)
|
tablePathPrefix, shortDir := store.getPrefix(ctx, &dir)
|
||||||
|
query := withPragma(tablePathPrefix, deleteQuery)
|
||||||
queryParams := table.NewQueryParameters(
|
queryParams := table.NewQueryParameters(
|
||||||
table.ValueParam("$dir_hash", types.Int64Value(util.HashStringToLong(dir))),
|
table.ValueParam("$dir_hash", types.Int64Value(util.HashStringToLong(*shortDir))),
|
||||||
table.ValueParam("$name", types.UTF8Value(name)))
|
table.ValueParam("$name", types.UTF8Value(name)))
|
||||||
|
|
||||||
return store.doTxOrDB(ctx, &queryWithPragma, queryParams, rwTX, nil)
|
return store.doTxOrDB(ctx, query, queryParams, rwTX, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (store *YdbStore) DeleteFolderChildren(ctx context.Context, fullpath util.FullPath) (err error) {
|
func (store *YdbStore) DeleteFolderChildren(ctx context.Context, fullpath util.FullPath) (err error) {
|
||||||
dir, _ := fullpath.DirAndName()
|
dir, _ := fullpath.DirAndName()
|
||||||
queryWithPragma := withPragma(store.getPrefix(ctx, dir), deleteFolderChildrenQuery)
|
tablePathPrefix, shortDir := store.getPrefix(ctx, &dir)
|
||||||
|
query := withPragma(tablePathPrefix, deleteFolderChildrenQuery)
|
||||||
queryParams := table.NewQueryParameters(
|
queryParams := table.NewQueryParameters(
|
||||||
table.ValueParam("$dir_hash", types.Int64Value(util.HashStringToLong(dir))),
|
table.ValueParam("$dir_hash", types.Int64Value(util.HashStringToLong(*shortDir))),
|
||||||
table.ValueParam("$directory", types.UTF8Value(dir)))
|
table.ValueParam("$directory", types.UTF8Value(*shortDir)))
|
||||||
|
|
||||||
return store.doTxOrDB(ctx, &queryWithPragma, queryParams, rwTX, nil)
|
return store.doTxOrDB(ctx, query, queryParams, rwTX, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (store *YdbStore) ListDirectoryEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int64, eachEntryFunc filer.ListEachEntryFunc) (lastFileName string, err error) {
|
func (store *YdbStore) ListDirectoryEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int64, eachEntryFunc filer.ListEachEntryFunc) (lastFileName string, err error) {
|
||||||
|
@ -220,19 +232,21 @@ func (store *YdbStore) ListDirectoryEntries(ctx context.Context, dirPath util.Fu
|
||||||
|
|
||||||
func (store *YdbStore) ListDirectoryPrefixedEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int64, prefix string, eachEntryFunc filer.ListEachEntryFunc) (lastFileName string, err error) {
|
func (store *YdbStore) ListDirectoryPrefixedEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int64, prefix string, eachEntryFunc filer.ListEachEntryFunc) (lastFileName string, err error) {
|
||||||
dir := string(dirPath)
|
dir := string(dirPath)
|
||||||
startFileCompOp := ">"
|
tablePathPrefix, shortDir := store.getPrefix(ctx, &dir)
|
||||||
|
var query *string
|
||||||
if includeStartFile {
|
if includeStartFile {
|
||||||
startFileCompOp = ">="
|
query = withPragma(tablePathPrefix, listInclusiveDirectoryQuery)
|
||||||
|
} else {
|
||||||
|
query = withPragma(tablePathPrefix, listDirectoryQuery)
|
||||||
}
|
}
|
||||||
queryWithPragma := withPragma(store.getPrefix(ctx, dir), fmt.Sprintf(listDirectoryQuery, startFileCompOp))
|
|
||||||
queryParams := table.NewQueryParameters(
|
queryParams := table.NewQueryParameters(
|
||||||
table.ValueParam("$dir_hash", types.Int64Value(util.HashStringToLong(dir))),
|
table.ValueParam("$dir_hash", types.Int64Value(util.HashStringToLong(*shortDir))),
|
||||||
table.ValueParam("$directory", types.UTF8Value(dir)),
|
table.ValueParam("$directory", types.UTF8Value(*shortDir)),
|
||||||
table.ValueParam("$start_name", types.UTF8Value(startFileName)),
|
table.ValueParam("$start_name", types.UTF8Value(startFileName)),
|
||||||
table.ValueParam("$prefix", types.UTF8Value(prefix+"%")),
|
table.ValueParam("$prefix", types.UTF8Value(prefix+"%")),
|
||||||
table.ValueParam("$limit", types.Uint64Value(uint64(limit))),
|
table.ValueParam("$limit", types.Uint64Value(uint64(limit))),
|
||||||
)
|
)
|
||||||
err = store.doTxOrDB(ctx, &queryWithPragma, queryParams, roTX, func(res result.Result) error {
|
err = store.doTxOrDB(ctx, query, queryParams, roTX, func(res result.Result) error {
|
||||||
var name string
|
var name string
|
||||||
var data []byte
|
var data []byte
|
||||||
for res.NextResultSet(ctx) {
|
for res.NextResultSet(ctx) {
|
||||||
|
@ -337,41 +351,50 @@ func (store *YdbStore) deleteTable(ctx context.Context, prefix string) error {
|
||||||
if !store.SupportBucketTable {
|
if !store.SupportBucketTable {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
return store.DB.Table().Do(ctx, func(ctx context.Context, s table.Session) error {
|
if err := store.DB.Table().Do(ctx, func(ctx context.Context, s table.Session) error {
|
||||||
return s.DropTable(ctx, path.Join(prefix, abstract_sql.DEFAULT_TABLE))
|
return s.DropTable(ctx, path.Join(prefix, abstract_sql.DEFAULT_TABLE))
|
||||||
})
|
}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
glog.V(4).Infof("deleted table %s", prefix)
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (store *YdbStore) getPrefix(ctx context.Context, dir string) (tablePathPrefix string) {
|
func (store *YdbStore) getPrefix(ctx context.Context, dir *string) (tablePathPrefix *string, shortDir *string) {
|
||||||
tablePathPrefix = store.tablePathPrefix
|
tablePathPrefix = &store.tablePathPrefix
|
||||||
|
shortDir = dir
|
||||||
if !store.SupportBucketTable {
|
if !store.SupportBucketTable {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
prefixBuckets := store.dirBuckets + "/"
|
prefixBuckets := store.dirBuckets + "/"
|
||||||
if strings.HasPrefix(dir, prefixBuckets) {
|
if strings.HasPrefix(*dir, prefixBuckets) {
|
||||||
// detect bucket
|
// detect bucket
|
||||||
bucketAndDir := dir[len(prefixBuckets):]
|
bucketAndDir := (*dir)[len(prefixBuckets):]
|
||||||
t := strings.Index(bucketAndDir, "/")
|
var bucket string
|
||||||
if t < 0 {
|
if t := strings.Index(bucketAndDir, "/"); t > 0 {
|
||||||
|
bucket = bucketAndDir[:t]
|
||||||
|
} else if t < 0 {
|
||||||
|
bucket = bucketAndDir
|
||||||
|
}
|
||||||
|
if bucket == "" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
bucket := bucketAndDir[:t]
|
|
||||||
|
|
||||||
if bucket != "" {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
store.dbsLock.Lock()
|
store.dbsLock.Lock()
|
||||||
defer store.dbsLock.Unlock()
|
defer store.dbsLock.Unlock()
|
||||||
|
|
||||||
tablePathPrefix = path.Join(store.tablePathPrefix, bucket)
|
tablePathPrefixWithBucket := path.Join(store.tablePathPrefix, bucket)
|
||||||
if _, found := store.dbs[bucket]; !found {
|
if _, found := store.dbs[bucket]; !found {
|
||||||
if err := store.createTable(ctx, tablePathPrefix); err == nil {
|
if err := store.createTable(ctx, tablePathPrefixWithBucket); err == nil {
|
||||||
store.dbs[bucket] = true
|
store.dbs[bucket] = true
|
||||||
|
glog.V(4).Infof("created table %s", tablePathPrefixWithBucket)
|
||||||
} else {
|
} else {
|
||||||
glog.Errorf("createTable %s: %v", tablePathPrefix, err)
|
glog.Errorf("createTable %s: %v", tablePathPrefixWithBucket, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
tablePathPrefix = &tablePathPrefixWithBucket
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@ func (store *YdbStore) KvPut(ctx context.Context, key []byte, value []byte) (err
|
||||||
dirStr, dirHash, name := abstract_sql.GenDirAndName(key)
|
dirStr, dirHash, name := abstract_sql.GenDirAndName(key)
|
||||||
fileMeta := FileMeta{dirHash, name, dirStr, value}
|
fileMeta := FileMeta{dirHash, name, dirStr, value}
|
||||||
return store.DB.Table().Do(ctx, func(ctx context.Context, s table.Session) (err error) {
|
return store.DB.Table().Do(ctx, func(ctx context.Context, s table.Session) (err error) {
|
||||||
_, _, err = s.Execute(ctx, rwTX, withPragma(store.getPrefix(ctx, dirStr), insertQuery),
|
_, _, err = s.Execute(ctx, rwTX, *withPragma(&store.tablePathPrefix, insertQuery),
|
||||||
fileMeta.queryParameters(0),
|
fileMeta.queryParameters(0),
|
||||||
options.WithQueryCachePolicy(options.WithQueryCachePolicyKeepInCache()))
|
options.WithQueryCachePolicy(options.WithQueryCachePolicyKeepInCache()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -31,7 +31,7 @@ func (store *YdbStore) KvGet(ctx context.Context, key []byte) (value []byte, err
|
||||||
dirStr, dirHash, name := abstract_sql.GenDirAndName(key)
|
dirStr, dirHash, name := abstract_sql.GenDirAndName(key)
|
||||||
valueFound := false
|
valueFound := false
|
||||||
err = store.DB.Table().Do(ctx, func(ctx context.Context, s table.Session) error {
|
err = store.DB.Table().Do(ctx, func(ctx context.Context, s table.Session) error {
|
||||||
_, res, err := s.Execute(ctx, roTX, withPragma(store.getPrefix(ctx, dirStr), findQuery),
|
_, res, err := s.Execute(ctx, roTX, *withPragma(&store.tablePathPrefix, findQuery),
|
||||||
table.NewQueryParameters(
|
table.NewQueryParameters(
|
||||||
table.ValueParam("$dir_hash", types.Int64Value(dirHash)),
|
table.ValueParam("$dir_hash", types.Int64Value(dirHash)),
|
||||||
table.ValueParam("$name", types.UTF8Value(name))),
|
table.ValueParam("$name", types.UTF8Value(name))),
|
||||||
|
@ -62,7 +62,7 @@ func (store *YdbStore) KvGet(ctx context.Context, key []byte) (value []byte, err
|
||||||
func (store *YdbStore) KvDelete(ctx context.Context, key []byte) (err error) {
|
func (store *YdbStore) KvDelete(ctx context.Context, key []byte) (err error) {
|
||||||
dirStr, dirHash, name := abstract_sql.GenDirAndName(key)
|
dirStr, dirHash, name := abstract_sql.GenDirAndName(key)
|
||||||
return store.DB.Table().Do(ctx, func(ctx context.Context, s table.Session) (err error) {
|
return store.DB.Table().Do(ctx, func(ctx context.Context, s table.Session) (err error) {
|
||||||
_, _, err = s.Execute(ctx, rwTX, withPragma(store.getPrefix(ctx, dirStr), insertQuery),
|
_, _, err = s.Execute(ctx, rwTX, *withPragma(&store.tablePathPrefix, insertQuery),
|
||||||
table.NewQueryParameters(
|
table.NewQueryParameters(
|
||||||
table.ValueParam("$dir_hash", types.Int64Value(dirHash)),
|
table.ValueParam("$dir_hash", types.Int64Value(dirHash)),
|
||||||
table.ValueParam("$name", types.UTF8Value(name))),
|
table.ValueParam("$name", types.UTF8Value(name))),
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package ydb
|
package ydb
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"github.com/ydb-platform/ydb-go-sdk/v3/table"
|
"github.com/ydb-platform/ydb-go-sdk/v3/table"
|
||||||
"github.com/ydb-platform/ydb-go-sdk/v3/table/options"
|
"github.com/ydb-platform/ydb-go-sdk/v3/table/options"
|
||||||
"github.com/ydb-platform/ydb-go-sdk/v3/table/types"
|
"github.com/ydb-platform/ydb-go-sdk/v3/table/types"
|
||||||
|
@ -50,6 +51,7 @@ func createTableOptions() []options.CreateTableOption {
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
func withPragma(prefix string, query string) string {
|
func withPragma(prefix *string, query string) *string {
|
||||||
return `PRAGMA TablePathPrefix("` + prefix + `");` + query
|
queryWithPragma := fmt.Sprintf(query, *prefix)
|
||||||
|
return &queryWithPragma
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue