chrislu
3 years ago
11 changed files with 723 additions and 57 deletions
-
60.github/workflows/binaries_release4.yml
-
43.github/workflows/codeql.yml
-
3.github/workflows/container_release2.yml
-
58.github/workflows/container_release4.yml
-
3docker/Dockerfile.go_build
-
43docker/Dockerfile.go_build_large
-
2docker/Dockerfile.rocksdb_large
-
9docker/Makefile
-
3weed/s3api/http/header.go
-
124weed/s3api/s3api_object_copy_handlers.go
-
426weed/s3api/s3api_object_copy_handlers_test.go
@ -0,0 +1,60 @@ |
|||||
|
# This is a basic workflow to help you get started with Actions |
||||
|
|
||||
|
name: "go: build versioned binaries for linux with all tags" |
||||
|
|
||||
|
on: |
||||
|
push: |
||||
|
tags: |
||||
|
- '*' |
||||
|
|
||||
|
# Allows you to run this workflow manually from the Actions tab |
||||
|
workflow_dispatch: |
||||
|
|
||||
|
# A workflow run is made up of one or more jobs that can run sequentially or in parallel |
||||
|
permissions: |
||||
|
contents: read |
||||
|
|
||||
|
jobs: |
||||
|
|
||||
|
build-release-binaries_linux: |
||||
|
permissions: |
||||
|
contents: write # for wangyoucao577/go-release-action to upload release assets |
||||
|
runs-on: ubuntu-latest |
||||
|
strategy: |
||||
|
matrix: |
||||
|
goos: [linux] |
||||
|
goarch: [amd64] |
||||
|
|
||||
|
# Steps represent a sequence of tasks that will be executed as part of the job |
||||
|
steps: |
||||
|
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it |
||||
|
- uses: actions/checkout@629c2de402a417ea7690ca6ce3f33229e27606a5 # v2 |
||||
|
- name: Go Release Binaries Normal Volume Size |
||||
|
uses: wangyoucao577/go-release-action@16624612d4e2b73de613857a362d294700207fff # v1.22 |
||||
|
with: |
||||
|
github_token: ${{ secrets.GITHUB_TOKEN }} |
||||
|
goos: ${{ matrix.goos }} |
||||
|
goarch: ${{ matrix.goarch }} |
||||
|
overwrite: true |
||||
|
build_flags: -tags elastic,ydb,gocdk,hdfs,rocksdb |
||||
|
pre_command: export CGO_ENABLED=0 && export GODEBUG=http2client=0 |
||||
|
# build_flags: -tags 5BytesOffset # optional, default is |
||||
|
ldflags: -extldflags -static -X github.com/chrislusf/seaweedfs/weed/util.COMMIT=${{github.sha}} |
||||
|
# Where to run `go build .` |
||||
|
project_path: weed |
||||
|
binary_name: weed |
||||
|
asset_name: "${{ matrix.goos }}_${{ matrix.goarch }}_full" |
||||
|
- name: Go Release Large Disk Binaries |
||||
|
uses: wangyoucao577/go-release-action@16624612d4e2b73de613857a362d294700207fff # v1.22 |
||||
|
with: |
||||
|
github_token: ${{ secrets.GITHUB_TOKEN }} |
||||
|
goos: ${{ matrix.goos }} |
||||
|
goarch: ${{ matrix.goarch }} |
||||
|
overwrite: true |
||||
|
pre_command: export CGO_ENABLED=0 && export GODEBUG=http2client=0 |
||||
|
build_flags: -tags 5BytesOffset,elastic,ydb,gocdk,hdfs,rocksdb |
||||
|
ldflags: -extldflags -static -X github.com/chrislusf/seaweedfs/weed/util.COMMIT=${{github.sha}} |
||||
|
# Where to run `go build .` |
||||
|
project_path: weed |
||||
|
binary_name: weed |
||||
|
asset_name: "${{ matrix.goos }}_${{ matrix.goarch }}_full_large_disk" |
@ -0,0 +1,43 @@ |
|||||
|
name: "Code Scanning - Action" |
||||
|
|
||||
|
on: |
||||
|
pull_request: |
||||
|
|
||||
|
jobs: |
||||
|
CodeQL-Build: |
||||
|
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest |
||||
|
runs-on: ubuntu-latest |
||||
|
|
||||
|
permissions: |
||||
|
# required for all workflows |
||||
|
security-events: write |
||||
|
|
||||
|
steps: |
||||
|
- name: Checkout repository |
||||
|
uses: actions/checkout@v3 |
||||
|
|
||||
|
# Initializes the CodeQL tools for scanning. |
||||
|
- name: Initialize CodeQL |
||||
|
uses: github/codeql-action/init@v2 |
||||
|
# Override language selection by uncommenting this and choosing your languages |
||||
|
with: |
||||
|
languages: go |
||||
|
|
||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java). |
||||
|
# If this step fails, then you should remove it and run the build manually (see below). |
||||
|
- name: Autobuild |
||||
|
uses: github/codeql-action/autobuild@v2 |
||||
|
|
||||
|
# ℹ️ Command-line programs to run using the OS shell. |
||||
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun |
||||
|
|
||||
|
# ✏️ If the Autobuild fails above, remove it and uncomment the following |
||||
|
# three lines and modify them (or add more) to build your code if your |
||||
|
# project uses a compiled language |
||||
|
|
||||
|
#- run: | |
||||
|
# make bootstrap |
||||
|
# make release |
||||
|
|
||||
|
- name: Perform CodeQL Analysis |
||||
|
uses: github/codeql-action/analyze@v2 |
@ -0,0 +1,58 @@ |
|||||
|
name: "docker: build release containers for all tags" |
||||
|
|
||||
|
on: |
||||
|
push: |
||||
|
tags: |
||||
|
- '*' |
||||
|
workflow_dispatch: [] |
||||
|
|
||||
|
permissions: |
||||
|
contents: read |
||||
|
|
||||
|
jobs: |
||||
|
build-default-release-container: |
||||
|
runs-on: [ubuntu-latest] |
||||
|
|
||||
|
steps: |
||||
|
- |
||||
|
name: Checkout |
||||
|
uses: actions/checkout@629c2de402a417ea7690ca6ce3f33229e27606a5 # v2 |
||||
|
- |
||||
|
name: Docker meta |
||||
|
id: docker_meta |
||||
|
uses: docker/metadata-action@69f6fc9d46f2f8bf0d5491e4aabe0bb8c6a4678a # v3 |
||||
|
with: |
||||
|
images: | |
||||
|
chrislusf/seaweedfs |
||||
|
tags: | |
||||
|
type=ref,event=tag,suffix=_full |
||||
|
flavor: | |
||||
|
latest=false |
||||
|
labels: | |
||||
|
org.opencontainers.image.title=seaweedfs |
||||
|
org.opencontainers.image.description=SeaweedFS is a distributed storage system for blobs, objects, files, and data lake, to store and serve billions of files fast! |
||||
|
org.opencontainers.image.vendor=Chris Lu |
||||
|
- |
||||
|
name: Set up QEMU |
||||
|
uses: docker/setup-qemu-action@8b122486cedac8393e77aa9734c3528886e4a1a8 # v1 |
||||
|
- |
||||
|
name: Set up Docker Buildx |
||||
|
uses: docker/setup-buildx-action@dc7b9719a96d48369863986a06765841d7ea23f6 # v1 |
||||
|
- |
||||
|
name: Login to Docker Hub |
||||
|
if: github.event_name != 'pull_request' |
||||
|
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # v1 |
||||
|
with: |
||||
|
username: ${{ secrets.DOCKER_USERNAME }} |
||||
|
password: ${{ secrets.DOCKER_PASSWORD }} |
||||
|
- |
||||
|
name: Build |
||||
|
uses: docker/build-push-action@e551b19e49efd4e98792db7592c17c09b89db8d8 # v2 |
||||
|
with: |
||||
|
context: ./docker |
||||
|
push: ${{ github.event_name != 'pull_request' }} |
||||
|
file: ./docker/Dockerfile.go_build |
||||
|
build-args: TAGS=elastic,ydb,gocdk,hdfs |
||||
|
platforms: linux/amd64 |
||||
|
tags: ${{ steps.docker_meta.outputs.tags }} |
||||
|
labels: ${{ steps.docker_meta.outputs.labels }} |
@ -1,43 +0,0 @@ |
|||||
FROM golang:1.18-alpine as builder |
|
||||
RUN apk add git g++ fuse |
|
||||
RUN mkdir -p /go/src/github.com/chrislusf/ |
|
||||
RUN git clone https://github.com/chrislusf/seaweedfs /go/src/github.com/chrislusf/seaweedfs |
|
||||
ARG BRANCH=${BRANCH:-master} |
|
||||
RUN cd /go/src/github.com/chrislusf/seaweedfs && git checkout $BRANCH |
|
||||
RUN cd /go/src/github.com/chrislusf/seaweedfs/weed \ |
|
||||
&& export LDFLAGS="-X github.com/chrislusf/seaweedfs/weed/util.COMMIT=$(git rev-parse --short HEAD)" \ |
|
||||
&& CGO_ENABLED=0 go install -tags 5BytesOffset -ldflags "-extldflags -static ${LDFLAGS}" |
|
||||
|
|
||||
FROM alpine AS final |
|
||||
LABEL author="Chris Lu" |
|
||||
COPY --from=builder /go/bin/weed /usr/bin/ |
|
||||
RUN mkdir -p /etc/seaweedfs |
|
||||
COPY --from=builder /go/src/github.com/chrislusf/seaweedfs/docker/filer.toml /etc/seaweedfs/filer.toml |
|
||||
COPY --from=builder /go/src/github.com/chrislusf/seaweedfs/docker/entrypoint.sh /entrypoint.sh |
|
||||
RUN apk add fuse # for weed mount |
|
||||
|
|
||||
# volume server gprc port |
|
||||
EXPOSE 18080 |
|
||||
# volume server http port |
|
||||
EXPOSE 8080 |
|
||||
# filer server gprc port |
|
||||
EXPOSE 18888 |
|
||||
# filer server http port |
|
||||
EXPOSE 8888 |
|
||||
# master server shared gprc port |
|
||||
EXPOSE 19333 |
|
||||
# master server shared http port |
|
||||
EXPOSE 9333 |
|
||||
# s3 server http port |
|
||||
EXPOSE 8333 |
|
||||
# webdav server http port |
|
||||
EXPOSE 7333 |
|
||||
|
|
||||
RUN mkdir -p /data/filerldb2 |
|
||||
|
|
||||
VOLUME /data |
|
||||
WORKDIR /data |
|
||||
|
|
||||
RUN chmod +x /entrypoint.sh |
|
||||
|
|
||||
ENTRYPOINT ["/entrypoint.sh"] |
|
@ -0,0 +1,426 @@ |
|||||
|
package s3api |
||||
|
|
||||
|
import ( |
||||
|
"fmt" |
||||
|
headers "github.com/chrislusf/seaweedfs/weed/s3api/http" |
||||
|
"net/http" |
||||
|
"reflect" |
||||
|
"sort" |
||||
|
"strings" |
||||
|
"testing" |
||||
|
) |
||||
|
|
||||
|
type H map[string]string |
||||
|
|
||||
|
func (h H) String() string { |
||||
|
pairs := make([]string, 0, len(h)) |
||||
|
for k, v := range h { |
||||
|
pairs = append(pairs, fmt.Sprintf("%s : %s", k, v)) |
||||
|
} |
||||
|
sort.Strings(pairs) |
||||
|
join := strings.Join(pairs, "\n") |
||||
|
return "\n" + join + "\n" |
||||
|
} |
||||
|
|
||||
|
var processMetadataTestCases = []struct { |
||||
|
caseId int |
||||
|
request H |
||||
|
existing H |
||||
|
getTags H |
||||
|
want H |
||||
|
}{ |
||||
|
{ |
||||
|
201, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
"X-Amz-Tagging": "A=B&a=b&type=request", |
||||
|
}, |
||||
|
H{ |
||||
|
"X-Amz-Meta-My-Meta": "existing", |
||||
|
"X-Amz-Tagging-A": "B", |
||||
|
"X-Amz-Tagging-Type": "existing", |
||||
|
}, |
||||
|
H{ |
||||
|
"A": "B", |
||||
|
"a": "b", |
||||
|
"type": "existing", |
||||
|
}, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "existing", |
||||
|
"X-Amz-Tagging": "A=B&a=b&type=existing", |
||||
|
}, |
||||
|
}, |
||||
|
{ |
||||
|
202, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
"X-Amz-Tagging": "A=B&a=b&type=request", |
||||
|
headers.AmzUserMetaDirective: DirectiveReplace, |
||||
|
}, |
||||
|
H{ |
||||
|
"X-Amz-Meta-My-Meta": "existing", |
||||
|
"X-Amz-Tagging-A": "B", |
||||
|
"X-Amz-Tagging-Type": "existing", |
||||
|
}, |
||||
|
H{ |
||||
|
"A": "B", |
||||
|
"a": "b", |
||||
|
"type": "existing", |
||||
|
}, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
"X-Amz-Tagging": "A=B&a=b&type=existing", |
||||
|
headers.AmzUserMetaDirective: DirectiveReplace, |
||||
|
}, |
||||
|
}, |
||||
|
|
||||
|
{ |
||||
|
203, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
"X-Amz-Tagging": "A=B&a=b&type=request", |
||||
|
headers.AmzObjectTaggingDirective: DirectiveReplace, |
||||
|
}, |
||||
|
H{ |
||||
|
"X-Amz-Meta-My-Meta": "existing", |
||||
|
"X-Amz-Tagging-A": "B", |
||||
|
"X-Amz-Tagging-Type": "existing", |
||||
|
}, |
||||
|
H{ |
||||
|
"A": "B", |
||||
|
"a": "b", |
||||
|
"type": "existing", |
||||
|
}, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "existing", |
||||
|
"X-Amz-Tagging": "A=B&a=b&type=request", |
||||
|
headers.AmzObjectTaggingDirective: DirectiveReplace, |
||||
|
}, |
||||
|
}, |
||||
|
|
||||
|
{ |
||||
|
204, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
"X-Amz-Tagging": "A=B&a=b&type=request", |
||||
|
headers.AmzUserMetaDirective: DirectiveReplace, |
||||
|
headers.AmzObjectTaggingDirective: DirectiveReplace, |
||||
|
}, |
||||
|
H{ |
||||
|
"X-Amz-Meta-My-Meta": "existing", |
||||
|
"X-Amz-Tagging-A": "B", |
||||
|
"X-Amz-Tagging-a": "b", |
||||
|
"X-Amz-Tagging-Type": "existing", |
||||
|
}, |
||||
|
H{ |
||||
|
"A": "B", |
||||
|
"a": "b", |
||||
|
"type": "existing", |
||||
|
}, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
"X-Amz-Tagging": "A=B&a=b&type=request", |
||||
|
headers.AmzUserMetaDirective: DirectiveReplace, |
||||
|
headers.AmzObjectTaggingDirective: DirectiveReplace, |
||||
|
}, |
||||
|
}, |
||||
|
|
||||
|
{ |
||||
|
205, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
"X-Amz-Tagging": "A=B&a=b&type=request", |
||||
|
headers.AmzUserMetaDirective: DirectiveReplace, |
||||
|
headers.AmzObjectTaggingDirective: DirectiveReplace, |
||||
|
}, |
||||
|
H{}, |
||||
|
H{}, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
"X-Amz-Tagging": "A=B&a=b&type=request", |
||||
|
headers.AmzUserMetaDirective: DirectiveReplace, |
||||
|
headers.AmzObjectTaggingDirective: DirectiveReplace, |
||||
|
}, |
||||
|
}, |
||||
|
|
||||
|
{ |
||||
|
206, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
headers.AmzUserMetaDirective: DirectiveReplace, |
||||
|
headers.AmzObjectTaggingDirective: DirectiveReplace, |
||||
|
}, |
||||
|
H{ |
||||
|
"X-Amz-Meta-My-Meta": "existing", |
||||
|
"X-Amz-Tagging-A": "B", |
||||
|
"X-Amz-Tagging-a": "b", |
||||
|
"X-Amz-Tagging-Type": "existing", |
||||
|
}, |
||||
|
H{ |
||||
|
"A": "B", |
||||
|
"a": "b", |
||||
|
"type": "existing", |
||||
|
}, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
headers.AmzUserMetaDirective: DirectiveReplace, |
||||
|
headers.AmzObjectTaggingDirective: DirectiveReplace, |
||||
|
}, |
||||
|
}, |
||||
|
|
||||
|
{ |
||||
|
207, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
headers.AmzUserMetaDirective: DirectiveReplace, |
||||
|
headers.AmzObjectTaggingDirective: DirectiveReplace, |
||||
|
}, |
||||
|
H{ |
||||
|
"X-Amz-Meta-My-Meta": "existing", |
||||
|
"X-Amz-Tagging-A": "B", |
||||
|
"X-Amz-Tagging-a": "b", |
||||
|
"X-Amz-Tagging-Type": "existing", |
||||
|
}, |
||||
|
H{ |
||||
|
"A": "B", |
||||
|
"a": "b", |
||||
|
"type": "existing", |
||||
|
}, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
headers.AmzUserMetaDirective: DirectiveReplace, |
||||
|
headers.AmzObjectTaggingDirective: DirectiveReplace, |
||||
|
}, |
||||
|
}, |
||||
|
} |
||||
|
var processMetadataBytesTestCases = []struct { |
||||
|
caseId int |
||||
|
request H |
||||
|
existing H |
||||
|
want H |
||||
|
}{ |
||||
|
{ |
||||
|
101, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
"X-Amz-Tagging": "A=B&a=b&type=request", |
||||
|
}, |
||||
|
H{ |
||||
|
"X-Amz-Meta-My-Meta": "existing", |
||||
|
"X-Amz-Tagging-A": "B", |
||||
|
"X-Amz-Tagging-a": "b", |
||||
|
"X-Amz-Tagging-type": "existing", |
||||
|
}, |
||||
|
H{ |
||||
|
"X-Amz-Meta-My-Meta": "existing", |
||||
|
"X-Amz-Tagging-A": "B", |
||||
|
"X-Amz-Tagging-a": "b", |
||||
|
"X-Amz-Tagging-type": "existing", |
||||
|
}, |
||||
|
}, |
||||
|
|
||||
|
{ |
||||
|
102, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
"X-Amz-Tagging": "A=B&a=b&type=request", |
||||
|
headers.AmzUserMetaDirective: DirectiveReplace, |
||||
|
}, |
||||
|
H{ |
||||
|
"X-Amz-Meta-My-Meta": "existing", |
||||
|
"X-Amz-Tagging-A": "B", |
||||
|
"X-Amz-Tagging-a": "b", |
||||
|
"X-Amz-Tagging-type": "existing", |
||||
|
}, |
||||
|
H{ |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
"X-Amz-Tagging-A": "B", |
||||
|
"X-Amz-Tagging-a": "b", |
||||
|
"X-Amz-Tagging-type": "existing", |
||||
|
}, |
||||
|
}, |
||||
|
|
||||
|
{ |
||||
|
103, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
"X-Amz-Tagging": "A=B&a=b&type=request", |
||||
|
headers.AmzObjectTaggingDirective: DirectiveReplace, |
||||
|
}, |
||||
|
H{ |
||||
|
"X-Amz-Meta-My-Meta": "existing", |
||||
|
"X-Amz-Tagging-A": "B", |
||||
|
"X-Amz-Tagging-a": "b", |
||||
|
"X-Amz-Tagging-type": "existing", |
||||
|
}, |
||||
|
H{ |
||||
|
"X-Amz-Meta-My-Meta": "existing", |
||||
|
"X-Amz-Tagging-A": "B", |
||||
|
"X-Amz-Tagging-a": "b", |
||||
|
"X-Amz-Tagging-type": "request", |
||||
|
}, |
||||
|
}, |
||||
|
|
||||
|
{ |
||||
|
104, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
"X-Amz-Tagging": "A=B&a=b&type=request", |
||||
|
headers.AmzUserMetaDirective: DirectiveReplace, |
||||
|
headers.AmzObjectTaggingDirective: DirectiveReplace, |
||||
|
}, |
||||
|
H{ |
||||
|
"X-Amz-Meta-My-Meta": "existing", |
||||
|
"X-Amz-Tagging-A": "B", |
||||
|
"X-Amz-Tagging-a": "b", |
||||
|
"X-Amz-Tagging-type": "existing", |
||||
|
}, |
||||
|
H{ |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
"X-Amz-Tagging-A": "B", |
||||
|
"X-Amz-Tagging-a": "b", |
||||
|
"X-Amz-Tagging-type": "request", |
||||
|
}, |
||||
|
}, |
||||
|
|
||||
|
{ |
||||
|
105, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
headers.AmzUserMetaDirective: DirectiveReplace, |
||||
|
headers.AmzObjectTaggingDirective: DirectiveReplace, |
||||
|
}, |
||||
|
H{ |
||||
|
"X-Amz-Meta-My-Meta": "existing", |
||||
|
"X-Amz-Tagging-A": "B", |
||||
|
"X-Amz-Tagging-a": "b", |
||||
|
"X-Amz-Tagging-type": "existing", |
||||
|
}, |
||||
|
H{}, |
||||
|
}, |
||||
|
|
||||
|
{ |
||||
|
107, |
||||
|
H{ |
||||
|
"User-Agent": "firefox", |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
"X-Amz-Tagging": "A=B&a=b&type=request", |
||||
|
headers.AmzUserMetaDirective: DirectiveReplace, |
||||
|
headers.AmzObjectTaggingDirective: DirectiveReplace, |
||||
|
}, |
||||
|
H{}, |
||||
|
H{ |
||||
|
"X-Amz-Meta-My-Meta": "request", |
||||
|
"X-Amz-Tagging-A": "B", |
||||
|
"X-Amz-Tagging-a": "b", |
||||
|
"X-Amz-Tagging-type": "request", |
||||
|
}, |
||||
|
}, |
||||
|
} |
||||
|
|
||||
|
func TestProcessMetadata(t *testing.T) { |
||||
|
for _, tc := range processMetadataTestCases { |
||||
|
reqHeader := transferHToHeader(tc.request) |
||||
|
existing := transferHToHeader(tc.existing) |
||||
|
replaceMeta, replaceTagging := replaceDirective(reqHeader) |
||||
|
|
||||
|
err := processMetadata(reqHeader, existing, replaceMeta, replaceTagging, func(_ string, _ string) (tags map[string]string, err error) { |
||||
|
return tc.getTags, nil |
||||
|
}, "", "") |
||||
|
if err != nil { |
||||
|
t.Error(err) |
||||
|
} |
||||
|
|
||||
|
result := transferHeaderToH(reqHeader) |
||||
|
fmtTagging(result, tc.want) |
||||
|
|
||||
|
if !reflect.DeepEqual(result, tc.want) { |
||||
|
t.Error(fmt.Errorf("\n### CaseID: %d ###"+ |
||||
|
"\nRequest:%v"+ |
||||
|
"\nExisting:%v"+ |
||||
|
"\nGetTags:%v"+ |
||||
|
"\nWant:%v"+ |
||||
|
"\nActual:%v", |
||||
|
tc.caseId, tc.request, tc.existing, tc.getTags, tc.want, result)) |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
func TestProcessMetadataBytes(t *testing.T) { |
||||
|
for _, tc := range processMetadataBytesTestCases { |
||||
|
reqHeader := transferHToHeader(tc.request) |
||||
|
existing := transferHToBytesArr(tc.existing) |
||||
|
replaceMeta, replaceTagging := replaceDirective(reqHeader) |
||||
|
extends := processMetadataBytes(reqHeader, existing, replaceMeta, replaceTagging) |
||||
|
|
||||
|
result := transferBytesArrToH(extends) |
||||
|
fmtTagging(result, tc.want) |
||||
|
|
||||
|
if !reflect.DeepEqual(result, tc.want) { |
||||
|
t.Error(fmt.Errorf("\n### CaseID: %d ###"+ |
||||
|
"\nRequest:%v"+ |
||||
|
"\nExisting:%v"+ |
||||
|
"\nWant:%v"+ |
||||
|
"\nActual:%v", |
||||
|
tc.caseId, tc.request, tc.existing, tc.want, result)) |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
func fmtTagging(maps ...map[string]string) { |
||||
|
for _, m := range maps { |
||||
|
if tagging := m[headers.AmzObjectTagging]; len(tagging) > 0 { |
||||
|
split := strings.Split(tagging, "&") |
||||
|
sort.Strings(split) |
||||
|
m[headers.AmzObjectTagging] = strings.Join(split, "&") |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
func transferHToHeader(data map[string]string) http.Header { |
||||
|
header := http.Header{} |
||||
|
for k, v := range data { |
||||
|
header.Add(k, v) |
||||
|
} |
||||
|
return header |
||||
|
} |
||||
|
|
||||
|
func transferHToBytesArr(data map[string]string) map[string][]byte { |
||||
|
m := make(map[string][]byte, len(data)) |
||||
|
for k, v := range data { |
||||
|
m[k] = []byte(v) |
||||
|
} |
||||
|
return m |
||||
|
} |
||||
|
|
||||
|
func transferBytesArrToH(data map[string][]byte) H { |
||||
|
m := make(map[string]string, len(data)) |
||||
|
for k, v := range data { |
||||
|
m[k] = string(v) |
||||
|
} |
||||
|
return m |
||||
|
} |
||||
|
|
||||
|
func transferHeaderToH(data map[string][]string) H { |
||||
|
m := make(map[string]string, len(data)) |
||||
|
for k, v := range data { |
||||
|
m[k] = v[len(v)-1] |
||||
|
} |
||||
|
return m |
||||
|
} |
Write
Preview
Loading…
Cancel
Save
Reference in new issue