Browse Source

Rename PlacementConfig to PlacementRequest for clarity

PlacementRequest better reflects that this is a request for placement
rather than a configuration object. This improves API semantics.
pull/7597/head
Chris Lu 3 days ago
parent
commit
44e1d9a0ed
  1. 14
      weed/storage/erasure_coding/placement/placement.go
  2. 22
      weed/storage/erasure_coding/placement/placement_test.go
  3. 2
      weed/worker/tasks/erasure_coding/detection.go

14
weed/storage/erasure_coding/placement/placement.go

@ -39,8 +39,8 @@ type NodeCandidate struct {
Disks []*DiskCandidate // All disks on this node
}
// PlacementConfig configures EC shard placement behavior
type PlacementConfig struct {
// PlacementRequest configures EC shard placement behavior
type PlacementRequest struct {
// ShardsNeeded is the total number of shards to place
ShardsNeeded int
@ -65,8 +65,8 @@ type PlacementConfig struct {
}
// DefaultConfig returns the default placement configuration
func DefaultConfig() PlacementConfig {
return PlacementConfig{
func DefaultConfig() PlacementRequest {
return PlacementRequest{
ShardsNeeded: 14,
MaxShardsPerServer: 0,
MaxShardsPerRack: 0,
@ -98,7 +98,7 @@ type PlacementResult struct {
// 1. First pass: Select one disk from each rack (maximize rack diversity)
// 2. Second pass: Select one disk from each unused server in used racks (maximize server diversity)
// 3. Third pass: Select additional disks from servers already used (maximize disk diversity)
func SelectDestinations(disks []*DiskCandidate, config PlacementConfig) (*PlacementResult, error) {
func SelectDestinations(disks []*DiskCandidate, config PlacementRequest) (*PlacementResult, error) {
if len(disks) == 0 {
return nil, fmt.Errorf("no disk candidates provided")
}
@ -248,7 +248,7 @@ func SelectDestinations(disks []*DiskCandidate, config PlacementConfig) (*Placem
}
// filterSuitableDisks filters disks that are suitable for EC placement
func filterSuitableDisks(disks []*DiskCandidate, config PlacementConfig) []*DiskCandidate {
func filterSuitableDisks(disks []*DiskCandidate, config PlacementRequest) []*DiskCandidate {
var suitable []*DiskCandidate
for _, disk := range disks {
if disk.FreeSlots <= 0 {
@ -323,7 +323,7 @@ func getSortedRackKeys(rackToDisks map[string][]*DiskCandidate) []string {
// selectBestDiskFromRack selects the best disk from a rack for EC placement
// It prefers servers that haven't been used yet
func selectBestDiskFromRack(disks []*DiskCandidate, usedServers, usedDisks map[string]bool, config PlacementConfig) *DiskCandidate {
func selectBestDiskFromRack(disks []*DiskCandidate, usedServers, usedDisks map[string]bool, config PlacementRequest) *DiskCandidate {
var bestDisk *DiskCandidate
bestScore := -1.0
bestIsFromUnusedServer := false

22
weed/storage/erasure_coding/placement/placement_test.go

@ -31,7 +31,7 @@ func TestSelectDestinations_SingleRack(t *testing.T) {
makeDisk("server3", 1, "dc1", "rack1", 10),
}
config := PlacementConfig{
config := PlacementRequest{
ShardsNeeded: 6,
PreferDifferentServers: true,
PreferDifferentRacks: true,
@ -77,7 +77,7 @@ func TestSelectDestinations_MultipleRacks(t *testing.T) {
makeDisk("server4", 1, "dc1", "rack2", 10),
}
config := PlacementConfig{
config := PlacementRequest{
ShardsNeeded: 8,
PreferDifferentServers: true,
PreferDifferentRacks: true,
@ -125,7 +125,7 @@ func TestSelectDestinations_PrefersDifferentServers(t *testing.T) {
makeDisk("server4", 3, "dc1", "rack1", 10),
}
config := PlacementConfig{
config := PlacementRequest{
ShardsNeeded: 4,
PreferDifferentServers: true,
PreferDifferentRacks: true,
@ -168,7 +168,7 @@ func TestSelectDestinations_SpilloverToMultipleDisksPerServer(t *testing.T) {
makeDisk("server2", 3, "dc1", "rack1", 10),
}
config := PlacementConfig{
config := PlacementRequest{
ShardsNeeded: 6,
PreferDifferentServers: true,
PreferDifferentRacks: true,
@ -210,7 +210,7 @@ func TestSelectDestinations_MaxShardsPerServer(t *testing.T) {
makeDisk("server2", 3, "dc1", "rack1", 10),
}
config := PlacementConfig{
config := PlacementRequest{
ShardsNeeded: 6,
MaxShardsPerServer: 2,
PreferDifferentServers: true,
@ -245,7 +245,7 @@ func TestSelectDestinations_14ShardsAcross7Servers(t *testing.T) {
disks = append(disks, makeDisk(serverID, 1, "dc1", "rack1", 10))
}
config := PlacementConfig{
config := PlacementRequest{
ShardsNeeded: 14,
PreferDifferentServers: true,
PreferDifferentRacks: true,
@ -288,7 +288,7 @@ func TestSelectDestinations_FewerServersThanShards(t *testing.T) {
makeDisk("server3", 2, "dc1", "rack1", 10),
}
config := PlacementConfig{
config := PlacementRequest{
ShardsNeeded: 6,
PreferDifferentServers: true,
PreferDifferentRacks: true,
@ -323,7 +323,7 @@ func TestSelectDestinations_NoSuitableDisks(t *testing.T) {
{NodeID: "server2", DiskID: 0, DataCenter: "dc1", Rack: "rack1", FreeSlots: 0},
}
config := PlacementConfig{
config := PlacementRequest{
ShardsNeeded: 4,
PreferDifferentServers: true,
PreferDifferentRacks: true,
@ -351,7 +351,7 @@ func TestSelectDestinations_FiltersByLoad(t *testing.T) {
{NodeID: "server3", DiskID: 0, DataCenter: "dc1", Rack: "rack1", FreeSlots: 10, LoadCount: 1},
}
config := PlacementConfig{
config := PlacementRequest{
ShardsNeeded: 2,
MaxTaskLoad: 5,
PreferDifferentServers: true,
@ -466,7 +466,7 @@ func TestSelectDestinations_MultiDC(t *testing.T) {
makeDisk("dc2-r2-s2", 1, "dc2", "rack2", 10),
}
config := PlacementConfig{
config := PlacementRequest{
ShardsNeeded: 8,
PreferDifferentServers: true,
PreferDifferentRacks: true,
@ -499,7 +499,7 @@ func TestSelectDestinations_SameRackDifferentDC(t *testing.T) {
makeDisk("dc2-s1", 0, "dc2", "rack1", 10),
}
config := PlacementConfig{
config := PlacementRequest{
ShardsNeeded: 2,
PreferDifferentServers: true,
PreferDifferentRacks: true,

2
weed/worker/tasks/erasure_coding/detection.go

@ -443,7 +443,7 @@ func selectBestECDestinations(disks []*topology.DiskInfo, sourceRack, sourceDC s
}
// Configure placement for EC shards
config := placement.PlacementConfig{
config := placement.PlacementRequest{
ShardsNeeded: shardsNeeded,
MaxShardsPerServer: 0, // No hard limit, but prefer spreading
MaxShardsPerRack: 0, // No hard limit, but prefer spreading

Loading…
Cancel
Save