Vegetable540
3 months ago
10 changed files with 266 additions and 11 deletions
-
1go.mod
-
2go.sum
-
33weed/command/scaffold/filer.toml
-
14weed/filer/abstract_sql/abstract_sql_store.go
-
6weed/filer/abstract_sql/abstract_sql_store_kv.go
-
66weed/filer/dameng/dameng_sql_gen.go
-
129weed/filer/dameng/dameng_store.go
-
23weed/filer/dameng/dameng_store_test.go
-
2weed/filer/store_test/test_suite.go
-
1weed/server/filer_server.go
@ -0,0 +1,66 @@ |
|||||
|
package dameng |
||||
|
|
||||
|
import ( |
||||
|
"fmt" |
||||
|
|
||||
|
"github.com/seaweedfs/seaweedfs/weed/filer/abstract_sql" |
||||
|
) |
||||
|
|
||||
|
type SqlGenDameng struct { |
||||
|
CreateTableSqlTemplate string |
||||
|
DropTableSqlTemplate string |
||||
|
UpsertQueryTemplate string |
||||
|
} |
||||
|
|
||||
|
var ( |
||||
|
_ = abstract_sql.SqlGenerator(&SqlGenDameng{}) |
||||
|
) |
||||
|
|
||||
|
func (gen *SqlGenDameng) GetSqlInsert(tableName string) string { |
||||
|
sql := "" |
||||
|
if gen.UpsertQueryTemplate != "" { |
||||
|
sql = fmt.Sprintf(`MERGE INTO %s AS target |
||||
|
USING (SELECT ? AS dirhash, ? AS name, ? AS directory, ? AS meta FROM dual) AS source |
||||
|
ON (target.dirhash = source.dirhash AND target.name = source.name) |
||||
|
WHEN MATCHED THEN |
||||
|
UPDATE SET target.meta = source.meta |
||||
|
WHEN NOT MATCHED THEN |
||||
|
INSERT (dirhash, name, directory, meta) |
||||
|
VALUES (source.dirhash, source.name, source.directory, source.meta);`, tableName) |
||||
|
} else { |
||||
|
sql = fmt.Sprintf("INSERT INTO %s (dirhash,name,directory,meta) VALUES(?,?,?,?)", tableName) |
||||
|
} |
||||
|
return sql |
||||
|
} |
||||
|
|
||||
|
func (gen *SqlGenDameng) GetSqlUpdate(tableName string) string { |
||||
|
return fmt.Sprintf("UPDATE %s SET meta = ? WHERE dirhash = ? AND name = ? AND directory = ?", tableName) |
||||
|
} |
||||
|
|
||||
|
func (gen *SqlGenDameng) GetSqlFind(tableName string) string { |
||||
|
return fmt.Sprintf("SELECT meta FROM %s WHERE dirhash = ? AND name = ? AND directory = ?", tableName) |
||||
|
} |
||||
|
|
||||
|
func (gen *SqlGenDameng) GetSqlDelete(tableName string) string { |
||||
|
return fmt.Sprintf("DELETE FROM %s WHERE dirhash = ? AND name = ? AND directory = ?", tableName) |
||||
|
} |
||||
|
|
||||
|
func (gen *SqlGenDameng) GetSqlDeleteFolderChildren(tableName string) string { |
||||
|
return fmt.Sprintf("DELETE FROM %s WHERE dirhash = ? AND directory = ?", tableName) |
||||
|
} |
||||
|
|
||||
|
func (gen *SqlGenDameng) GetSqlListExclusive(tableName string) string { |
||||
|
return fmt.Sprintf("SELECT name, meta FROM %s WHERE dirhash = ? AND rowid > (SELECT IFNULL(MIN(rowid), 0) FROM %s WHERE directory = ? AND name = ?) AND directory = ? ORDER BY rowid ASC LIMIT ?", tableName, tableName) |
||||
|
} |
||||
|
|
||||
|
func (gen *SqlGenDameng) GetSqlListInclusive(tableName string) string { |
||||
|
return fmt.Sprintf("SELECT name, meta FROM %s WHERE dirhash = ? AND rowid >= (SELECT IFNULL(MIN(rowid), 0) FROM %s WHERE directory = ? AND name = ?) AND directory = ? ORDER BY rowid ASC LIMIT ?", tableName, tableName) |
||||
|
} |
||||
|
|
||||
|
func (gen *SqlGenDameng) GetSqlCreateTable(tableName string) string { |
||||
|
return fmt.Sprintf(gen.CreateTableSqlTemplate, tableName) |
||||
|
} |
||||
|
|
||||
|
func (gen *SqlGenDameng) GetSqlDropTable(tableName string) string { |
||||
|
return fmt.Sprintf(gen.DropTableSqlTemplate, tableName) |
||||
|
} |
@ -0,0 +1,129 @@ |
|||||
|
package dameng |
||||
|
|
||||
|
import ( |
||||
|
"context" |
||||
|
"database/sql" |
||||
|
"fmt" |
||||
|
"github.com/seaweedfs/seaweedfs/weed/glog" |
||||
|
"strings" |
||||
|
"time" |
||||
|
|
||||
|
_ "gitee.com/chunanyong/dm" |
||||
|
"github.com/seaweedfs/seaweedfs/weed/filer" |
||||
|
"github.com/seaweedfs/seaweedfs/weed/filer/abstract_sql" |
||||
|
"github.com/seaweedfs/seaweedfs/weed/util" |
||||
|
) |
||||
|
|
||||
|
const ( |
||||
|
CONNECTION_URL_PATTERN = "dm://%s:%s@%s:%d?schema=%s" |
||||
|
) |
||||
|
|
||||
|
func init() { |
||||
|
filer.Stores = append(filer.Stores, &DamengStore{}) |
||||
|
} |
||||
|
|
||||
|
type DamengStore struct { |
||||
|
abstract_sql.AbstractSqlStore |
||||
|
} |
||||
|
|
||||
|
func (store *DamengStore) GetName() string { |
||||
|
return "dameng" |
||||
|
} |
||||
|
|
||||
|
func (store *DamengStore) Initialize(configuration util.Configuration, prefix string) (err error) { |
||||
|
return store.initialize( |
||||
|
configuration.GetString(prefix+"dsn"), |
||||
|
configuration.GetString(prefix+"upsertQuery"), |
||||
|
configuration.GetBool(prefix+"enableUpsert"), |
||||
|
configuration.GetString(prefix+"username"), |
||||
|
configuration.GetString(prefix+"password"), |
||||
|
configuration.GetString(prefix+"hostname"), |
||||
|
configuration.GetInt(prefix+"port"), |
||||
|
configuration.GetString(prefix+"database"), |
||||
|
configuration.GetInt(prefix+"connection_max_idle"), |
||||
|
configuration.GetInt(prefix+"connection_max_open"), |
||||
|
configuration.GetInt(prefix+"connection_max_lifetime_seconds"), |
||||
|
configuration.GetBool(prefix+"interpolateParams"), |
||||
|
) |
||||
|
} |
||||
|
|
||||
|
func (store *DamengStore) initialize(dsn string, upsertQuery string, enableUpsert bool, user, password, hostname string, port int, database string, maxIdle, maxOpen, |
||||
|
maxLifetimeSeconds int, interpolateParams bool) (err error) { |
||||
|
|
||||
|
store.SupportBucketTable = false |
||||
|
if !enableUpsert { |
||||
|
upsertQuery = "" |
||||
|
} |
||||
|
store.SqlGenerator = &SqlGenDameng{ |
||||
|
CreateTableSqlTemplate: "", |
||||
|
DropTableSqlTemplate: "DROP TABLE `%s`", |
||||
|
UpsertQueryTemplate: upsertQuery, |
||||
|
} |
||||
|
|
||||
|
dsn = fmt.Sprintf(CONNECTION_URL_PATTERN, user, password, hostname, port, database) |
||||
|
|
||||
|
var dbErr error |
||||
|
store.DB, dbErr = sql.Open("dm", dsn) |
||||
|
if dbErr != nil { |
||||
|
store.DB.Close() |
||||
|
store.DB = nil |
||||
|
return fmt.Errorf("can not connect to %s error:%v", strings.ReplaceAll(dsn, "", "<ADAPTED>"), err) |
||||
|
} |
||||
|
|
||||
|
store.DB.SetMaxIdleConns(maxIdle) |
||||
|
store.DB.SetMaxOpenConns(maxOpen) |
||||
|
store.DB.SetConnMaxLifetime(time.Duration(maxLifetimeSeconds) * time.Second) |
||||
|
|
||||
|
if err = store.DB.Ping(); err != nil { |
||||
|
return fmt.Errorf("connect to %s error:%v", strings.ReplaceAll(dsn, "", "<ADAPTED>"), err) |
||||
|
} |
||||
|
|
||||
|
return nil |
||||
|
} |
||||
|
|
||||
|
func (store *DamengStore) ListDirectoryEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int64, eachEntryFunc filer.ListEachEntryFunc) (lastFileName string, err error) { |
||||
|
return store.ListDirectoryPrefixedEntries(ctx, dirPath, startFileName, includeStartFile, limit, "", eachEntryFunc) |
||||
|
} |
||||
|
|
||||
|
func (store *DamengStore) ListDirectoryPrefixedEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int64, prefix string, eachEntryFunc filer.ListEachEntryFunc) (lastFileName string, err error) { |
||||
|
db, bucket, shortPath, err := store.GetTxOrDB(ctx, dirPath, true) |
||||
|
if err != nil { |
||||
|
return lastFileName, fmt.Errorf("findDB %s : %v", dirPath, err) |
||||
|
} |
||||
|
|
||||
|
sqlText := store.GetSqlListExclusive(bucket) |
||||
|
if includeStartFile { |
||||
|
sqlText = store.GetSqlListInclusive(bucket) |
||||
|
} |
||||
|
|
||||
|
rows, err := db.QueryContext(ctx, sqlText, util.HashStringToLong(string(shortPath)), string(shortPath), startFileName, string(shortPath), limit) |
||||
|
if err != nil { |
||||
|
return lastFileName, fmt.Errorf("list %s : %v", dirPath, err) |
||||
|
} |
||||
|
defer rows.Close() |
||||
|
|
||||
|
for rows.Next() { |
||||
|
var name string |
||||
|
var data []byte |
||||
|
if err = rows.Scan(&name, &data); err != nil { |
||||
|
glog.V(0).Infof("scan %s : %v", dirPath, err) |
||||
|
return lastFileName, fmt.Errorf("scan %s: %v", dirPath, err) |
||||
|
} |
||||
|
lastFileName = name |
||||
|
|
||||
|
entry := &filer.Entry{ |
||||
|
FullPath: util.NewFullPath(string(dirPath), name), |
||||
|
} |
||||
|
if err = entry.DecodeAttributesAndChunks(util.MaybeDecompressData(data)); err != nil { |
||||
|
glog.V(0).Infof("scan decode %s : %v", entry.FullPath, err) |
||||
|
return lastFileName, fmt.Errorf("scan decode %s : %v", entry.FullPath, err) |
||||
|
} |
||||
|
|
||||
|
if !eachEntryFunc(entry) { |
||||
|
break |
||||
|
} |
||||
|
|
||||
|
} |
||||
|
|
||||
|
return lastFileName, nil |
||||
|
} |
@ -0,0 +1,23 @@ |
|||||
|
package dameng |
||||
|
|
||||
|
import ( |
||||
|
"github.com/seaweedfs/seaweedfs/weed/filer/store_test" |
||||
|
"testing" |
||||
|
) |
||||
|
|
||||
|
func TestStore(t *testing.T) { |
||||
|
// run "make test_ydb" under docker folder.
|
||||
|
// to set up local env
|
||||
|
store := &DamengStore{} |
||||
|
store.initialize("localhost", `MERGE INTO %s AS target |
||||
|
USING (SELECT ? AS dirhash, ? AS name, ? AS directory, ? AS meta FROM dual) AS source |
||||
|
ON (target.dirhash = source.dirhash AND target.name = source.name) |
||||
|
WHEN MATCHED THEN |
||||
|
UPDATE SET target.meta = source.meta |
||||
|
WHEN NOT MATCHED THEN |
||||
|
INSERT (dirhash, name, directory, meta) |
||||
|
VALUES (source.dirhash, source.name, source.directory, source.meta);`, |
||||
|
true, "SYSDBA", "SYSDBA001", "localhost", 5236, |
||||
|
"seaweedfs", 100, 2, 10, false) |
||||
|
store_test.TestFilerStore(t, store) |
||||
|
} |
Write
Preview
Loading…
Cancel
Save
Reference in new issue