Browse Source

works with single level of list

mq-subscribe
chrislu 8 months ago
parent
commit
d88c1872ac
  1. 7
      weed/mq/schema/to_parquet_schema.go
  2. 27
      weed/mq/schema/to_parquet_value.go
  3. 66
      weed/mq/schema/to_schema_value.go
  4. 31
      weed/mq/schema/write_parquet_test.go

7
weed/mq/schema/to_parquet_schema.go

@ -21,6 +21,7 @@ func toParquetFieldType(fieldType *schema_pb.Type) (dataType parquet.Node, err e
switch fieldType.Kind.(type) { switch fieldType.Kind.(type) {
case *schema_pb.Type_ScalarType: case *schema_pb.Type_ScalarType:
dataType, err = toParquetFieldTypeScalar(fieldType.GetScalarType()) dataType, err = toParquetFieldTypeScalar(fieldType.GetScalarType())
dataType = parquet.Optional(dataType)
case *schema_pb.Type_RecordType: case *schema_pb.Type_RecordType:
dataType, err = toParquetFieldTypeRecord(fieldType.GetRecordType()) dataType, err = toParquetFieldTypeRecord(fieldType.GetRecordType())
case *schema_pb.Type_ListType: case *schema_pb.Type_ListType:
@ -29,6 +30,7 @@ func toParquetFieldType(fieldType *schema_pb.Type) (dataType parquet.Node, err e
return nil, fmt.Errorf("unknown field type: %T", fieldType.Kind) return nil, fmt.Errorf("unknown field type: %T", fieldType.Kind)
} }
return dataType, err return dataType, err
} }
@ -37,7 +39,7 @@ func toParquetFieldTypeList(listType *schema_pb.ListType) (parquet.Node, error)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return parquet.List(elementType), nil
return parquet.Repeated(elementType), nil
} }
func toParquetFieldTypeScalar(scalarType schema_pb.ScalarType) (parquet.Node, error) { func toParquetFieldTypeScalar(scalarType schema_pb.ScalarType) (parquet.Node, error) {
@ -67,9 +69,6 @@ func toParquetFieldTypeRecord(recordType *schema_pb.RecordType) (parquet.Node, e
if err != nil { if err != nil {
return nil, err return nil, err
} }
if !field.IsRequired {
parquetFieldType = parquet.Optional(parquetFieldType)
}
recordNode[field.Name] = parquetFieldType recordNode[field.Name] = parquetFieldType
} }
return recordNode, nil return recordNode, nil

27
weed/mq/schema/to_parquet_value.go

@ -6,28 +6,33 @@ import (
"github.com/seaweedfs/seaweedfs/weed/pb/schema_pb" "github.com/seaweedfs/seaweedfs/weed/pb/schema_pb"
) )
func rowBuilderVisit(rowBuilder *parquet.RowBuilder, fieldType *schema_pb.Type, fieldValue *schema_pb.Value, columnIndex int) error {
func rowBuilderVisit(rowBuilder *parquet.RowBuilder, fieldType *schema_pb.Type, fieldValue *schema_pb.Value, columnIndex int) (endIndex int, err error) {
switch fieldType.Kind.(type) { switch fieldType.Kind.(type) {
case *schema_pb.Type_ScalarType: case *schema_pb.Type_ScalarType:
parquetValue, err := toParquetValue(fieldValue)
endIndex = columnIndex+1
var parquetValue parquet.Value
parquetValue, err = toParquetValue(fieldValue)
if err != nil { if err != nil {
return err
return
} }
rowBuilder.Add(columnIndex, parquetValue) rowBuilder.Add(columnIndex, parquetValue)
// fmt.Printf("rowBuilder.Add %d %v\n", columnIndex, parquetValue)
case *schema_pb.Type_ListType: case *schema_pb.Type_ListType:
rowBuilder.Next(columnIndex)
// fmt.Printf("rowBuilder.Next %d\n", columnIndex)
elementType := fieldType.GetListType().ElementType elementType := fieldType.GetListType().ElementType
for _, value := range fieldValue.GetListValue().Values { for _, value := range fieldValue.GetListValue().Values {
if err := rowBuilderVisit(rowBuilder, elementType, value, columnIndex); err != nil {
return err
if endIndex, err = rowBuilderVisit(rowBuilder, elementType, value, columnIndex); err != nil {
return
} }
} }
rowBuilder.Next(columnIndex)
} }
return nil
return
} }
func AddRecordValue(rowBuilder *parquet.RowBuilder, recordType *schema_pb.RecordType, recordValue *schema_pb.RecordValue) error { func AddRecordValue(rowBuilder *parquet.RowBuilder, recordType *schema_pb.RecordType, recordValue *schema_pb.RecordValue) error {
visitor := func(fieldType *schema_pb.Type, fieldValue *schema_pb.Value, index int) error {
visitor := func(fieldType *schema_pb.Type, fieldValue *schema_pb.Value, index int) (endIndex int, err error) {
return rowBuilderVisit(rowBuilder, fieldType, fieldValue, index) return rowBuilderVisit(rowBuilder, fieldType, fieldValue, index)
} }
fieldType := &schema_pb.Type{Kind: &schema_pb.Type_RecordType{RecordType: recordType}} fieldType := &schema_pb.Type{Kind: &schema_pb.Type_RecordType{RecordType: recordType}}
@ -38,7 +43,7 @@ func AddRecordValue(rowBuilder *parquet.RowBuilder, recordType *schema_pb.Record
// typeValueVisitor is a function that is called for each value in a schema_pb.Value // typeValueVisitor is a function that is called for each value in a schema_pb.Value
// Find the column index. // Find the column index.
// intended to be used in RowBuilder.Add(columnIndex, value) // intended to be used in RowBuilder.Add(columnIndex, value)
type typeValueVisitor func(fieldType *schema_pb.Type, fieldValue *schema_pb.Value, index int) error
type typeValueVisitor func(fieldType *schema_pb.Type, fieldValue *schema_pb.Value, index int) (endIndex int, err error)
func visitValue(fieldType *schema_pb.Type, fieldValue *schema_pb.Value, visitor typeValueVisitor) (err error) { func visitValue(fieldType *schema_pb.Type, fieldValue *schema_pb.Value, visitor typeValueVisitor) (err error) {
_, err = doVisitValue(fieldType, fieldValue, 0, visitor) _, err = doVisitValue(fieldType, fieldValue, 0, visitor)
@ -50,9 +55,9 @@ func visitValue(fieldType *schema_pb.Type, fieldValue *schema_pb.Value, visitor
func doVisitValue(fieldType *schema_pb.Type, fieldValue *schema_pb.Value, columnIndex int, visitor typeValueVisitor) (endIndex int, err error) { func doVisitValue(fieldType *schema_pb.Type, fieldValue *schema_pb.Value, columnIndex int, visitor typeValueVisitor) (endIndex int, err error) {
switch fieldType.Kind.(type) { switch fieldType.Kind.(type) {
case *schema_pb.Type_ScalarType: case *schema_pb.Type_ScalarType:
return columnIndex+1, visitor(fieldType, fieldValue, columnIndex)
return visitor(fieldType, fieldValue, columnIndex)
case *schema_pb.Type_ListType: case *schema_pb.Type_ListType:
return columnIndex+1, visitor(fieldType, fieldValue, columnIndex)
return visitor(fieldType, fieldValue, columnIndex)
case *schema_pb.Type_RecordType: case *schema_pb.Type_RecordType:
for _, field := range fieldType.GetRecordType().Fields { for _, field := range fieldType.GetRecordType().Fields {
fieldValue, found := fieldValue.GetRecordValue().Fields[field.Name] fieldValue, found := fieldValue.GetRecordValue().Fields[field.Name]

66
weed/mq/schema/to_schema_value.go

@ -8,78 +8,76 @@ import (
func ToRecordValue(recordType *schema_pb.RecordType, row parquet.Row) (*schema_pb.RecordValue, error) { func ToRecordValue(recordType *schema_pb.RecordType, row parquet.Row) (*schema_pb.RecordValue, error) {
values := []parquet.Value(row) values := []parquet.Value(row)
recordValue, _, err := toRecordValue(recordType, values, 0)
recordValue, _, _, err := toRecordValue(recordType, values, 0, 0)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return recordValue.GetRecordValue(), nil return recordValue.GetRecordValue(), nil
} }
func ToValue(t *schema_pb.Type, values []parquet.Value, columnIndex int) (value *schema_pb.Value, endIndex int, err error) {
func ToValue(t *schema_pb.Type, values []parquet.Value, valueIndex, columnIndex int) (value *schema_pb.Value, endValueIndex, endColumnIndex int, err error) {
switch t.Kind.(type) { switch t.Kind.(type) {
case *schema_pb.Type_ScalarType: case *schema_pb.Type_ScalarType:
return toScalarValue(t.GetScalarType(), values, columnIndex)
value, err = toScalarValue(t.GetScalarType(), values, valueIndex, columnIndex)
return value, valueIndex + 1, columnIndex + 1, err
case *schema_pb.Type_ListType: case *schema_pb.Type_ListType:
return toListValue(t.GetListType(), values, columnIndex)
return toListValue(t.GetListType(), values, valueIndex, columnIndex)
case *schema_pb.Type_RecordType: case *schema_pb.Type_RecordType:
return toRecordValue(t.GetRecordType(), values, columnIndex)
return toRecordValue(t.GetRecordType(), values, valueIndex, columnIndex)
} }
return nil, 0, fmt.Errorf("unsupported type: %v", t)
return nil, 0, 0, fmt.Errorf("unsupported type: %v", t)
} }
func toRecordValue(recordType *schema_pb.RecordType, values []parquet.Value, columnIndex int) (*schema_pb.Value, int, error) {
func toRecordValue(recordType *schema_pb.RecordType, values []parquet.Value, valueIndex, columnIndex int) (*schema_pb.Value, int, int, error) {
recordValue := schema_pb.RecordValue{Fields: make(map[string]*schema_pb.Value)} recordValue := schema_pb.RecordValue{Fields: make(map[string]*schema_pb.Value)}
for _, field := range recordType.Fields { for _, field := range recordType.Fields {
fieldValue, endIndex, err := ToValue(field.Type, values, columnIndex)
fieldValue, endValueIndex, endColumnIndex, err := ToValue(field.Type, values, valueIndex, columnIndex)
if err != nil { if err != nil {
return nil, 0, err
return nil, 0, 0, err
} }
if endIndex == columnIndex {
continue
}
columnIndex = endIndex
columnIndex = endColumnIndex
valueIndex = endValueIndex
recordValue.Fields[field.Name] = fieldValue recordValue.Fields[field.Name] = fieldValue
} }
return &schema_pb.Value{Kind: &schema_pb.Value_RecordValue{RecordValue: &recordValue}}, columnIndex, nil
return &schema_pb.Value{Kind: &schema_pb.Value_RecordValue{RecordValue: &recordValue}}, valueIndex, columnIndex, nil
} }
func toListValue(listType *schema_pb.ListType, values []parquet.Value, index int) (listValue *schema_pb.Value, endIndex int, err error) {
func toListValue(listType *schema_pb.ListType, values []parquet.Value, valueIndex, columnIndex int) (listValue *schema_pb.Value, endValueIndex, endColumnIndex int, err error) {
listValues := make([]*schema_pb.Value, 0) listValues := make([]*schema_pb.Value, 0)
var value *schema_pb.Value var value *schema_pb.Value
for i := index; i < len(values); {
value, endIndex, err = ToValue(listType.ElementType, values, i)
if err != nil {
return nil, 0, err
}
if endIndex == i {
for ;valueIndex < len(values); {
if values[valueIndex].Column() != columnIndex {
break break
} }
value, valueIndex, endColumnIndex, err = ToValue(listType.ElementType, values, valueIndex, columnIndex)
if err != nil {
return nil, 0,0, err
}
listValues = append(listValues, value) listValues = append(listValues, value)
i = endIndex
} }
return &schema_pb.Value{Kind: &schema_pb.Value_ListValue{ListValue: &schema_pb.ListValue{Values: listValues}}}, endIndex, nil
return &schema_pb.Value{Kind: &schema_pb.Value_ListValue{ListValue: &schema_pb.ListValue{Values: listValues}}}, valueIndex, endColumnIndex, nil
} }
func toScalarValue(scalarType schema_pb.ScalarType, values []parquet.Value, columnIndex int) (*schema_pb.Value, int, error) {
value := values[columnIndex]
func toScalarValue(scalarType schema_pb.ScalarType, values []parquet.Value, valueIndex, columnIndex int) (*schema_pb.Value, error) {
value := values[valueIndex]
if value.Column() != columnIndex { if value.Column() != columnIndex {
return nil, columnIndex, nil
return nil, nil
} }
switch scalarType { switch scalarType {
case schema_pb.ScalarType_BOOLEAN: case schema_pb.ScalarType_BOOLEAN:
return &schema_pb.Value{Kind: &schema_pb.Value_BoolValue{BoolValue: value.Boolean()}}, columnIndex + 1, nil
return &schema_pb.Value{Kind: &schema_pb.Value_BoolValue{BoolValue: value.Boolean()}}, nil
case schema_pb.ScalarType_INTEGER: case schema_pb.ScalarType_INTEGER:
return &schema_pb.Value{Kind: &schema_pb.Value_Int32Value{Int32Value: value.Int32()}}, columnIndex + 1, nil
return &schema_pb.Value{Kind: &schema_pb.Value_Int32Value{Int32Value: value.Int32()}}, nil
case schema_pb.ScalarType_LONG: case schema_pb.ScalarType_LONG:
return &schema_pb.Value{Kind: &schema_pb.Value_Int64Value{Int64Value: value.Int64()}}, columnIndex + 1, nil
return &schema_pb.Value{Kind: &schema_pb.Value_Int64Value{Int64Value: value.Int64()}}, nil
case schema_pb.ScalarType_FLOAT: case schema_pb.ScalarType_FLOAT:
return &schema_pb.Value{Kind: &schema_pb.Value_FloatValue{FloatValue: value.Float()}}, columnIndex + 1, nil
return &schema_pb.Value{Kind: &schema_pb.Value_FloatValue{FloatValue: value.Float()}}, nil
case schema_pb.ScalarType_DOUBLE: case schema_pb.ScalarType_DOUBLE:
return &schema_pb.Value{Kind: &schema_pb.Value_DoubleValue{DoubleValue: value.Double()}}, columnIndex + 1, nil
return &schema_pb.Value{Kind: &schema_pb.Value_DoubleValue{DoubleValue: value.Double()}}, nil
case schema_pb.ScalarType_BYTES: case schema_pb.ScalarType_BYTES:
return &schema_pb.Value{Kind: &schema_pb.Value_BytesValue{BytesValue: value.ByteArray()}}, columnIndex + 1, nil
return &schema_pb.Value{Kind: &schema_pb.Value_BytesValue{BytesValue: value.ByteArray()}}, nil
case schema_pb.ScalarType_STRING: case schema_pb.ScalarType_STRING:
return &schema_pb.Value{Kind: &schema_pb.Value_StringValue{StringValue: string(value.ByteArray())}}, columnIndex + 1, nil
return &schema_pb.Value{Kind: &schema_pb.Value_StringValue{StringValue: string(value.ByteArray())}}, nil
} }
return nil, columnIndex, fmt.Errorf("unsupported scalar type: %v", scalarType)
return nil, fmt.Errorf("unsupported scalar type: %v", scalarType)
} }

31
weed/mq/schema/write_parquet_test.go

@ -27,7 +27,8 @@ func TestWriteParquet(t *testing.T) {
t.Fatalf("ToParquetSchema failed: %v", err) t.Fatalf("ToParquetSchema failed: %v", err)
} }
fmt.Printf("ParquetSchema: %v\n", parquetSchema) fmt.Printf("ParquetSchema: %v\n", parquetSchema)
parquet.PrintSchema(os.Stdout, "example", parquetSchema)
fmt.Printf("Go Type: %+v\n", parquetSchema.GoType())
filename := "example.parquet" filename := "example.parquet"
@ -50,21 +51,29 @@ func testWritingParquetFile(t *testing.T, filename string, parquetSchema *parque
defer file.Close() defer file.Close()
writer := parquet.NewWriter(file, parquetSchema, parquet.Compression(&zstd.Codec{Level: zstd.SpeedDefault})) writer := parquet.NewWriter(file, parquetSchema, parquet.Compression(&zstd.Codec{Level: zstd.SpeedDefault}))
rowBuilder := parquet.NewRowBuilder(parquetSchema) rowBuilder := parquet.NewRowBuilder(parquetSchema)
for i := 0; i < 128; i++ {
for i := 0; i < 128*1024; i++ {
rowBuilder.Reset() rowBuilder.Reset()
// generate random data // generate random data
AddRecordValue(rowBuilder, recordType, NewRecordValueBuilder().
AddLongValue("ID", int64(1+i)).
AddLongValue("CreatedAt", 2*int64(i)).
recordValue := NewRecordValueBuilder().
AddLongValue("ID", 1+int64(i)).
AddLongValue("CreatedAt", 2+2*int64(i)).
AddRecordValue("Person", NewRecordValueBuilder(). AddRecordValue("Person", NewRecordValueBuilder().
AddStringValue("zName", fmt.Sprintf("john_%d", i)). AddStringValue("zName", fmt.Sprintf("john_%d", i)).
AddStringListValue("emails", AddStringListValue("emails",
fmt.Sprintf("john_%d@y.com", i),
fmt.Sprintf("john_%d@g.com", i),
fmt.Sprintf("john_%d@t.com", i))).
AddStringValue("Company", fmt.Sprintf("company_%d", i)).Build())
fmt.Sprintf("john_%d@a.com", i),
fmt.Sprintf("john_%d@b.com", i),
fmt.Sprintf("john_%d@c.com", i),
fmt.Sprintf("john_%d@d.com", i),
fmt.Sprintf("john_%d@e.com", i))).
AddStringValue("Company", fmt.Sprintf("company_%d", i)).Build()
AddRecordValue(rowBuilder, recordType, recordValue)
// fmt.Printf("RecordValue: %v\n", recordValue)
row := rowBuilder.Row() row := rowBuilder.Row()
// fmt.Printf("Row: %+v\n", row)
if err != nil { if err != nil {
t.Fatalf("rowBuilder.Build failed: %v", err) t.Fatalf("rowBuilder.Build failed: %v", err)
} }
@ -98,11 +107,11 @@ func testReadingParquetFile(t *testing.T, filename string, parquetSchema *parque
for i := 0; i < rowCount; i++ { for i := 0; i < rowCount; i++ {
row := rows[i] row := rows[i]
// convert parquet row to schema_pb.RecordValue // convert parquet row to schema_pb.RecordValue
recordValue, err := ToRecordValue(recordType, row)
_, err := ToRecordValue(recordType, row)
if err != nil { if err != nil {
t.Fatalf("ToRecordValue failed: %v", err) t.Fatalf("ToRecordValue failed: %v", err)
} }
fmt.Printf("RecordValue: %v\n", recordValue)
// fmt.Printf("RecordValue: %v\n", recordValue)
} }
total += rowCount total += rowCount
} }

Loading…
Cancel
Save