Konstantin Lebedev
2 years ago
32 changed files with 3090 additions and 1 deletions
-
37other/master-clients/python/.github/workflows/python.yml
-
66other/master-clients/python/.gitignore
-
25other/master-clients/python/.gitlab-ci.yml
-
23other/master-clients/python/.openapi-generator-ignore
-
29other/master-clients/python/.openapi-generator/FILES
-
1other/master-clients/python/.openapi-generator/VERSION
-
17other/master-clients/python/.travis.yml
-
113other/master-clients/python/README.md
-
164other/master-clients/python/docs/DefaultApi.md
-
30other/master-clients/python/docs/FileKey.md
-
29other/master-clients/python/docs/Location.md
-
57other/master-clients/python/git_push.sh
-
33other/master-clients/python/openapi_client/__init__.py
-
6other/master-clients/python/openapi_client/api/__init__.py
-
420other/master-clients/python/openapi_client/api/default_api.py
-
750other/master-clients/python/openapi_client/api_client.py
-
426other/master-clients/python/openapi_client/configuration.py
-
160other/master-clients/python/openapi_client/exceptions.py
-
18other/master-clients/python/openapi_client/models/__init__.py
-
86other/master-clients/python/openapi_client/models/file_key.py
-
80other/master-clients/python/openapi_client/models/location.py
-
296other/master-clients/python/openapi_client/rest.py
-
5other/master-clients/python/requirements.txt
-
2other/master-clients/python/setup.cfg
-
46other/master-clients/python/setup.py
-
3other/master-clients/python/test-requirements.txt
-
0other/master-clients/python/test/__init__.py
-
47other/master-clients/python/test/test_default_api.py
-
56other/master-clients/python/test/test_file_key.py
-
55other/master-clients/python/test/test_location.py
-
9other/master-clients/python/tox.ini
-
2other/swagger/Makefile
@ -0,0 +1,37 @@ |
|||||
|
# NOTE: This file is auto generated by OpenAPI Generator. |
||||
|
# URL: https://openapi-generator.tech |
||||
|
# |
||||
|
# ref: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python |
||||
|
|
||||
|
name: openapi_client Python package |
||||
|
|
||||
|
on: [push, pull_request] |
||||
|
|
||||
|
jobs: |
||||
|
build: |
||||
|
|
||||
|
runs-on: ubuntu-latest |
||||
|
strategy: |
||||
|
matrix: |
||||
|
python-version: ["3.7", "3.8", "3.9", "3.10"] |
||||
|
|
||||
|
steps:s |
||||
|
- uses: actions/checkout@v3 |
||||
|
- name: Set up Python ${{ matrix.python-version }} |
||||
|
uses: actions/setup-python@v4 |
||||
|
with: |
||||
|
python-version: ${{ matrix.python-version }}s |
||||
|
- name: Install dependencies |
||||
|
run: | |
||||
|
python -m pip install --upgrade pip |
||||
|
pip install flake8 pytest |
||||
|
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi |
||||
|
- name: Lint with flake8 |
||||
|
run: | |
||||
|
# stop the build if there are Python syntax errors or undefined names |
||||
|
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics |
||||
|
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide |
||||
|
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics |
||||
|
- name: Test with pytest |
||||
|
run: | |
||||
|
pytest |
@ -0,0 +1,66 @@ |
|||||
|
# Byte-compiled / optimized / DLL files |
||||
|
__pycache__/ |
||||
|
*.py[cod] |
||||
|
*$py.class |
||||
|
|
||||
|
# C extensions |
||||
|
*.so |
||||
|
|
||||
|
# Distribution / packaging |
||||
|
.Python |
||||
|
env/ |
||||
|
build/ |
||||
|
develop-eggs/ |
||||
|
dist/ |
||||
|
downloads/ |
||||
|
eggs/ |
||||
|
.eggs/ |
||||
|
lib/ |
||||
|
lib64/ |
||||
|
parts/ |
||||
|
sdist/ |
||||
|
var/ |
||||
|
*.egg-info/ |
||||
|
.installed.cfg |
||||
|
*.egg |
||||
|
|
||||
|
# PyInstaller |
||||
|
# Usually these files are written by a python script from a template |
||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it. |
||||
|
*.manifest |
||||
|
*.spec |
||||
|
|
||||
|
# Installer logs |
||||
|
pip-log.txt |
||||
|
pip-delete-this-directory.txt |
||||
|
|
||||
|
# Unit test / coverage reports |
||||
|
htmlcov/ |
||||
|
.tox/ |
||||
|
.coverage |
||||
|
.coverage.* |
||||
|
.cache |
||||
|
nosetests.xml |
||||
|
coverage.xml |
||||
|
*,cover |
||||
|
.hypothesis/ |
||||
|
venv/ |
||||
|
.venv/ |
||||
|
.python-version |
||||
|
.pytest_cache |
||||
|
|
||||
|
# Translations |
||||
|
*.mo |
||||
|
*.pot |
||||
|
|
||||
|
# Django stuff: |
||||
|
*.log |
||||
|
|
||||
|
# Sphinx documentation |
||||
|
docs/_build/ |
||||
|
|
||||
|
# PyBuilder |
||||
|
target/ |
||||
|
|
||||
|
#Ipython Notebook |
||||
|
.ipynb_checkpoints |
@ -0,0 +1,25 @@ |
|||||
|
# NOTE: This file is auto generated by OpenAPI Generator. |
||||
|
# URL: https://openapi-generator.tech |
||||
|
# |
||||
|
# ref: https://docs.gitlab.com/ee/ci/README.html |
||||
|
# ref: https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Python.gitlab-ci.yml |
||||
|
|
||||
|
stages: |
||||
|
- test |
||||
|
|
||||
|
.pytest: |
||||
|
stage: test |
||||
|
script: |
||||
|
- pip install -r requirements.txt |
||||
|
- pip install -r test-requirements.txt |
||||
|
- pytest --cov=openapi_client |
||||
|
|
||||
|
pytest-3.7: |
||||
|
extends: .pytest |
||||
|
image: python:3.7-alpine |
||||
|
pytest-3.8: |
||||
|
extends: .pytest |
||||
|
image: python:3.8-alpine |
||||
|
pytest-3.9: |
||||
|
extends: .pytest |
||||
|
image: python:3.9-alpine |
@ -0,0 +1,23 @@ |
|||||
|
# OpenAPI Generator Ignore |
||||
|
# Generated by openapi-generator https://github.com/openapitools/openapi-generator |
||||
|
|
||||
|
# Use this file to prevent files from being overwritten by the generator. |
||||
|
# The patterns follow closely to .gitignore or .dockerignore. |
||||
|
|
||||
|
# As an example, the C# client generator defines ApiClient.cs. |
||||
|
# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: |
||||
|
#ApiClient.cs |
||||
|
|
||||
|
# You can match any string of characters against a directory, file or extension with a single asterisk (*): |
||||
|
#foo/*/qux |
||||
|
# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux |
||||
|
|
||||
|
# You can recursively match patterns against a directory, file or extension with a double asterisk (**): |
||||
|
#foo/**/qux |
||||
|
# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux |
||||
|
|
||||
|
# You can also negate patterns with an exclamation (!). |
||||
|
# For example, you can ignore all files in a docs folder with the file extension .md: |
||||
|
#docs/*.md |
||||
|
# Then explicitly reverse the ignore rule for a single file: |
||||
|
#!docs/README.md |
@ -0,0 +1,29 @@ |
|||||
|
.github/workflows/python.yml |
||||
|
.gitignore |
||||
|
.gitlab-ci.yml |
||||
|
.openapi-generator-ignore |
||||
|
.travis.yml |
||||
|
README.md |
||||
|
docs/DefaultApi.md |
||||
|
docs/FileKey.md |
||||
|
docs/Location.md |
||||
|
git_push.sh |
||||
|
openapi_client/__init__.py |
||||
|
openapi_client/api/__init__.py |
||||
|
openapi_client/api/default_api.py |
||||
|
openapi_client/api_client.py |
||||
|
openapi_client/configuration.py |
||||
|
openapi_client/exceptions.py |
||||
|
openapi_client/models/__init__.py |
||||
|
openapi_client/models/file_key.py |
||||
|
openapi_client/models/location.py |
||||
|
openapi_client/rest.py |
||||
|
requirements.txt |
||||
|
setup.cfg |
||||
|
setup.py |
||||
|
test-requirements.txt |
||||
|
test/__init__.py |
||||
|
test/test_default_api.py |
||||
|
test/test_file_key.py |
||||
|
test/test_location.py |
||||
|
tox.ini |
@ -0,0 +1 @@ |
|||||
|
6.4.0 |
@ -0,0 +1,17 @@ |
|||||
|
# ref: https://docs.travis-ci.com/user/languages/python |
||||
|
language: python |
||||
|
python: |
||||
|
- "3.7" |
||||
|
- "3.8" |
||||
|
- "3.9" |
||||
|
- "3.10" |
||||
|
- "3.11" |
||||
|
# uncomment the following if needed |
||||
|
#- "3.11-dev" # 3.11 development branch |
||||
|
#- "nightly" # nightly build |
||||
|
# command to install dependencies |
||||
|
install: |
||||
|
- "pip install -r requirements.txt" |
||||
|
- "pip install -r test-requirements.txt" |
||||
|
# command to run tests |
||||
|
script: pytest --cov=openapi_client |
@ -0,0 +1,113 @@ |
|||||
|
# openapi-client |
||||
|
The Seaweedfs Master Server API allows you to store blobs |
||||
|
|
||||
|
This Python package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project: |
||||
|
|
||||
|
- API version: 3.43.0 |
||||
|
- Package version: 1.0.0 |
||||
|
- Build package: org.openapitools.codegen.languages.PythonNextgenClientCodegen |
||||
|
|
||||
|
## Requirements. |
||||
|
|
||||
|
Python 3.7+ |
||||
|
|
||||
|
## Installation & Usage |
||||
|
### pip install |
||||
|
|
||||
|
If the python package is hosted on a repository, you can install directly using: |
||||
|
|
||||
|
```sh |
||||
|
pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git |
||||
|
``` |
||||
|
(you may need to run `pip` with root permission: `sudo pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git`) |
||||
|
|
||||
|
Then import the package: |
||||
|
```python |
||||
|
import openapi_client |
||||
|
``` |
||||
|
|
||||
|
### Setuptools |
||||
|
|
||||
|
Install via [Setuptools](http://pypi.python.org/pypi/setuptools). |
||||
|
|
||||
|
```sh |
||||
|
python setup.py install --user |
||||
|
``` |
||||
|
(or `sudo python setup.py install` to install the package for all users) |
||||
|
|
||||
|
Then import the package: |
||||
|
```python |
||||
|
import openapi_client |
||||
|
``` |
||||
|
|
||||
|
## Getting Started |
||||
|
|
||||
|
Please follow the [installation procedure](#installation--usage) and then run the following: |
||||
|
|
||||
|
```python |
||||
|
from __future__ import print_function |
||||
|
|
||||
|
import time |
||||
|
import openapi_client |
||||
|
from openapi_client.rest import ApiException |
||||
|
from pprint import pprint |
||||
|
|
||||
|
# Defining the host is optional and defaults to https://127.0.0.1:9333 |
||||
|
# See configuration.py for a list of all supported configuration parameters. |
||||
|
configuration = openapi_client.Configuration( |
||||
|
host = "https://127.0.0.1:9333" |
||||
|
) |
||||
|
|
||||
|
|
||||
|
|
||||
|
# Enter a context with an instance of the API client |
||||
|
with openapi_client.ApiClient(configuration) as api_client: |
||||
|
# Create an instance of the API class |
||||
|
api_instance = openapi_client.DefaultApi(api_client) |
||||
|
count = None # object | how many file ids to assign. Use <fid>_1, <fid>_2 for the assigned additional file ids. e.g. 3,01637037d6_1, 3,01637037d6_2 (optional) |
||||
|
collection = None # object | required collection name (optional) |
||||
|
data_center = None # object | preferred data center (optional) |
||||
|
rack = None # object | preferred rack (optional) |
||||
|
data_node = None # object | preferred volume server, e.g. 127.0.0.1:8080 (optional) |
||||
|
disk = None # object | If you have disks labelled, this must be supplied to specify the disk type to allocate on. (optional) |
||||
|
replication = None # object | replica placement strategy (optional) |
||||
|
ttl = None # object | file expiration time limit, example: 3m for 3 minutes. units: m-minute, h-hour, d-day, w-week, M-month, y-year (optional) |
||||
|
preallocate = None # object | If no matching volumes, pre-allocate this number of bytes on disk for new volumes. (optional) |
||||
|
memory_map_max_size_mb = None # object | Only implemented for windows. Use memory mapped files with specified size for new volumes. (optional) |
||||
|
writable_volume_count = None # object | If no matching volumes, create specified number of new volumes. (optional) |
||||
|
|
||||
|
try: |
||||
|
# Assign a file key |
||||
|
api_response = api_instance.dir_assign(count=count, collection=collection, data_center=data_center, rack=rack, data_node=data_node, disk=disk, replication=replication, ttl=ttl, preallocate=preallocate, memory_map_max_size_mb=memory_map_max_size_mb, writable_volume_count=writable_volume_count) |
||||
|
print("The response of DefaultApi->dir_assign:\n") |
||||
|
pprint(api_response) |
||||
|
except ApiException as e: |
||||
|
print("Exception when calling DefaultApi->dir_assign: %s\n" % e) |
||||
|
|
||||
|
``` |
||||
|
|
||||
|
## Documentation for API Endpoints |
||||
|
|
||||
|
All URIs are relative to *https://127.0.0.1:9333* |
||||
|
|
||||
|
Class | Method | HTTP request | Description |
||||
|
------------ | ------------- | ------------- | ------------- |
||||
|
*DefaultApi* | [**dir_assign**](docs/DefaultApi.md#dir_assign) | **GET** /dir/assign | Assign a file key |
||||
|
*DefaultApi* | [**dir_lookup**](docs/DefaultApi.md#dir_lookup) | **GET** /dir/lookup | Lookup volume |
||||
|
|
||||
|
|
||||
|
## Documentation For Models |
||||
|
|
||||
|
- [FileKey](docs/FileKey.md) |
||||
|
- [Location](docs/Location.md) |
||||
|
|
||||
|
|
||||
|
## Documentation For Authorization |
||||
|
|
||||
|
All endpoints do not require authorization. |
||||
|
|
||||
|
## Author |
||||
|
|
||||
|
|
||||
|
|
||||
|
|
@ -0,0 +1,164 @@ |
|||||
|
# openapi_client.DefaultApi |
||||
|
|
||||
|
All URIs are relative to *https://127.0.0.1:9333* |
||||
|
|
||||
|
Method | HTTP request | Description |
||||
|
------------- | ------------- | ------------- |
||||
|
[**dir_assign**](DefaultApi.md#dir_assign) | **GET** /dir/assign | Assign a file key |
||||
|
[**dir_lookup**](DefaultApi.md#dir_lookup) | **GET** /dir/lookup | Lookup volume |
||||
|
|
||||
|
|
||||
|
# **dir_assign** |
||||
|
> FileKey dir_assign(count=count, collection=collection, data_center=data_center, rack=rack, data_node=data_node, disk=disk, replication=replication, ttl=ttl, preallocate=preallocate, memory_map_max_size_mb=memory_map_max_size_mb, writable_volume_count=writable_volume_count) |
||||
|
|
||||
|
Assign a file key |
||||
|
|
||||
|
This operation is very cheap. Just increase a number in master server's memory. |
||||
|
|
||||
|
### Example |
||||
|
|
||||
|
```python |
||||
|
from __future__ import print_function |
||||
|
import time |
||||
|
import os |
||||
|
import openapi_client |
||||
|
from openapi_client.rest import ApiException |
||||
|
from pprint import pprint |
||||
|
# Defining the host is optional and defaults to https://127.0.0.1:9333 |
||||
|
# See configuration.py for a list of all supported configuration parameters. |
||||
|
configuration = openapi_client.Configuration( |
||||
|
host = "https://127.0.0.1:9333" |
||||
|
) |
||||
|
|
||||
|
|
||||
|
# Enter a context with an instance of the API client |
||||
|
with openapi_client.ApiClient(configuration) as api_client: |
||||
|
# Create an instance of the API class |
||||
|
api_instance = openapi_client.DefaultApi(api_client) |
||||
|
count = None # object | how many file ids to assign. Use <fid>_1, <fid>_2 for the assigned additional file ids. e.g. 3,01637037d6_1, 3,01637037d6_2 (optional) |
||||
|
collection = None # object | required collection name (optional) |
||||
|
data_center = None # object | preferred data center (optional) |
||||
|
rack = None # object | preferred rack (optional) |
||||
|
data_node = None # object | preferred volume server, e.g. 127.0.0.1:8080 (optional) |
||||
|
disk = None # object | If you have disks labelled, this must be supplied to specify the disk type to allocate on. (optional) |
||||
|
replication = None # object | replica placement strategy (optional) |
||||
|
ttl = None # object | file expiration time limit, example: 3m for 3 minutes. units: m-minute, h-hour, d-day, w-week, M-month, y-year (optional) |
||||
|
preallocate = None # object | If no matching volumes, pre-allocate this number of bytes on disk for new volumes. (optional) |
||||
|
memory_map_max_size_mb = None # object | Only implemented for windows. Use memory mapped files with specified size for new volumes. (optional) |
||||
|
writable_volume_count = None # object | If no matching volumes, create specified number of new volumes. (optional) |
||||
|
|
||||
|
try: |
||||
|
# Assign a file key |
||||
|
api_response = api_instance.dir_assign(count=count, collection=collection, data_center=data_center, rack=rack, data_node=data_node, disk=disk, replication=replication, ttl=ttl, preallocate=preallocate, memory_map_max_size_mb=memory_map_max_size_mb, writable_volume_count=writable_volume_count) |
||||
|
print("The response of DefaultApi->dir_assign:\n") |
||||
|
pprint(api_response) |
||||
|
except Exception as e: |
||||
|
print("Exception when calling DefaultApi->dir_assign: %s\n" % e) |
||||
|
``` |
||||
|
|
||||
|
### Parameters |
||||
|
|
||||
|
Name | Type | Description | Notes |
||||
|
------------- | ------------- | ------------- | ------------- |
||||
|
**count** | [**object**](.md)| how many file ids to assign. Use <fid>_1, <fid>_2 for the assigned additional file ids. e.g. 3,01637037d6_1, 3,01637037d6_2 | [optional] |
||||
|
**collection** | [**object**](.md)| required collection name | [optional] |
||||
|
**data_center** | [**object**](.md)| preferred data center | [optional] |
||||
|
**rack** | [**object**](.md)| preferred rack | [optional] |
||||
|
**data_node** | [**object**](.md)| preferred volume server, e.g. 127.0.0.1:8080 | [optional] |
||||
|
**disk** | [**object**](.md)| If you have disks labelled, this must be supplied to specify the disk type to allocate on. | [optional] |
||||
|
**replication** | [**object**](.md)| replica placement strategy | [optional] |
||||
|
**ttl** | [**object**](.md)| file expiration time limit, example: 3m for 3 minutes. units: m-minute, h-hour, d-day, w-week, M-month, y-year | [optional] |
||||
|
**preallocate** | [**object**](.md)| If no matching volumes, pre-allocate this number of bytes on disk for new volumes. | [optional] |
||||
|
**memory_map_max_size_mb** | [**object**](.md)| Only implemented for windows. Use memory mapped files with specified size for new volumes. | [optional] |
||||
|
**writable_volume_count** | [**object**](.md)| If no matching volumes, create specified number of new volumes. | [optional] |
||||
|
|
||||
|
### Return type |
||||
|
|
||||
|
[**FileKey**](FileKey.md) |
||||
|
|
||||
|
### Authorization |
||||
|
|
||||
|
No authorization required |
||||
|
|
||||
|
### HTTP request headers |
||||
|
|
||||
|
- **Content-Type**: Not defined |
||||
|
- **Accept**: application/json |
||||
|
|
||||
|
### HTTP response details |
||||
|
| Status code | Description | Response headers | |
||||
|
|-------------|-------------|------------------| |
||||
|
**200** | successful operation | - | |
||||
|
|
||||
|
[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) |
||||
|
|
||||
|
# **dir_lookup** |
||||
|
> object dir_lookup(volume_id=volume_id, collection=collection, file_id=file_id, read=read) |
||||
|
|
||||
|
Lookup volume |
||||
|
|
||||
|
We would need to find out whether the volumes have moved. |
||||
|
|
||||
|
### Example |
||||
|
|
||||
|
```python |
||||
|
from __future__ import print_function |
||||
|
import time |
||||
|
import os |
||||
|
import openapi_client |
||||
|
from openapi_client.rest import ApiException |
||||
|
from pprint import pprint |
||||
|
# Defining the host is optional and defaults to https://127.0.0.1:9333 |
||||
|
# See configuration.py for a list of all supported configuration parameters. |
||||
|
configuration = openapi_client.Configuration( |
||||
|
host = "https://127.0.0.1:9333" |
||||
|
) |
||||
|
|
||||
|
|
||||
|
# Enter a context with an instance of the API client |
||||
|
with openapi_client.ApiClient(configuration) as api_client: |
||||
|
# Create an instance of the API class |
||||
|
api_instance = openapi_client.DefaultApi(api_client) |
||||
|
volume_id = None # object | volume id (optional) |
||||
|
collection = None # object | optionally to speed up the lookup (optional) |
||||
|
file_id = None # object | If provided, this returns the fileId location and a JWT to update or delete the file. (optional) |
||||
|
read = None # object | works together with \"fileId\", if read=yes, JWT is generated for reads. (optional) |
||||
|
|
||||
|
try: |
||||
|
# Lookup volume |
||||
|
api_response = api_instance.dir_lookup(volume_id=volume_id, collection=collection, file_id=file_id, read=read) |
||||
|
print("The response of DefaultApi->dir_lookup:\n") |
||||
|
pprint(api_response) |
||||
|
except Exception as e: |
||||
|
print("Exception when calling DefaultApi->dir_lookup: %s\n" % e) |
||||
|
``` |
||||
|
|
||||
|
### Parameters |
||||
|
|
||||
|
Name | Type | Description | Notes |
||||
|
------------- | ------------- | ------------- | ------------- |
||||
|
**volume_id** | [**object**](.md)| volume id | [optional] |
||||
|
**collection** | [**object**](.md)| optionally to speed up the lookup | [optional] |
||||
|
**file_id** | [**object**](.md)| If provided, this returns the fileId location and a JWT to update or delete the file. | [optional] |
||||
|
**read** | [**object**](.md)| works together with \"fileId\", if read=yes, JWT is generated for reads. | [optional] |
||||
|
|
||||
|
### Return type |
||||
|
|
||||
|
**object** |
||||
|
|
||||
|
### Authorization |
||||
|
|
||||
|
No authorization required |
||||
|
|
||||
|
### HTTP request headers |
||||
|
|
||||
|
- **Content-Type**: Not defined |
||||
|
- **Accept**: application/json |
||||
|
|
||||
|
### HTTP response details |
||||
|
| Status code | Description | Response headers | |
||||
|
|-------------|-------------|------------------| |
||||
|
**200** | successful operation | - | |
||||
|
|
||||
|
[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) |
||||
|
|
@ -0,0 +1,30 @@ |
|||||
|
# FileKey |
||||
|
|
||||
|
|
||||
|
## Properties |
||||
|
Name | Type | Description | Notes |
||||
|
------------ | ------------- | ------------- | ------------- |
||||
|
**count** | **object** | | [optional] |
||||
|
**fid** | **object** | | [optional] |
||||
|
**url** | **object** | | [optional] |
||||
|
|
||||
|
## Example |
||||
|
|
||||
|
```python |
||||
|
from openapi_client.models.file_key import FileKey |
||||
|
|
||||
|
# TODO update the JSON string below |
||||
|
json = "{}" |
||||
|
# create an instance of FileKey from a JSON string |
||||
|
file_key_instance = FileKey.from_json(json) |
||||
|
# print the JSON string representation of the object |
||||
|
print FileKey.to_json() |
||||
|
|
||||
|
# convert the object into a dict |
||||
|
file_key_dict = file_key_instance.to_dict() |
||||
|
# create an instance of FileKey from a dict |
||||
|
file_key_form_dict = file_key.from_dict(file_key_dict) |
||||
|
``` |
||||
|
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) |
||||
|
|
||||
|
|
@ -0,0 +1,29 @@ |
|||||
|
# Location |
||||
|
|
||||
|
|
||||
|
## Properties |
||||
|
Name | Type | Description | Notes |
||||
|
------------ | ------------- | ------------- | ------------- |
||||
|
**public_url** | **object** | | [optional] |
||||
|
**url** | **object** | | [optional] |
||||
|
|
||||
|
## Example |
||||
|
|
||||
|
```python |
||||
|
from openapi_client.models.location import Location |
||||
|
|
||||
|
# TODO update the JSON string below |
||||
|
json = "{}" |
||||
|
# create an instance of Location from a JSON string |
||||
|
location_instance = Location.from_json(json) |
||||
|
# print the JSON string representation of the object |
||||
|
print Location.to_json() |
||||
|
|
||||
|
# convert the object into a dict |
||||
|
location_dict = location_instance.to_dict() |
||||
|
# create an instance of Location from a dict |
||||
|
location_form_dict = location.from_dict(location_dict) |
||||
|
``` |
||||
|
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) |
||||
|
|
||||
|
|
@ -0,0 +1,57 @@ |
|||||
|
#!/bin/sh |
||||
|
# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/ |
||||
|
# |
||||
|
# Usage example: /bin/sh ./git_push.sh wing328 openapi-petstore-perl "minor update" "gitlab.com" |
||||
|
|
||||
|
git_user_id=$1 |
||||
|
git_repo_id=$2 |
||||
|
release_note=$3 |
||||
|
git_host=$4 |
||||
|
|
||||
|
if [ "$git_host" = "" ]; then |
||||
|
git_host="github.com" |
||||
|
echo "[INFO] No command line input provided. Set \$git_host to $git_host" |
||||
|
fi |
||||
|
|
||||
|
if [ "$git_user_id" = "" ]; then |
||||
|
git_user_id="GIT_USER_ID" |
||||
|
echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id" |
||||
|
fi |
||||
|
|
||||
|
if [ "$git_repo_id" = "" ]; then |
||||
|
git_repo_id="GIT_REPO_ID" |
||||
|
echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id" |
||||
|
fi |
||||
|
|
||||
|
if [ "$release_note" = "" ]; then |
||||
|
release_note="Minor update" |
||||
|
echo "[INFO] No command line input provided. Set \$release_note to $release_note" |
||||
|
fi |
||||
|
|
||||
|
# Initialize the local directory as a Git repository |
||||
|
git init |
||||
|
|
||||
|
# Adds the files in the local repository and stages them for commit. |
||||
|
git add . |
||||
|
|
||||
|
# Commits the tracked changes and prepares them to be pushed to a remote repository. |
||||
|
git commit -m "$release_note" |
||||
|
|
||||
|
# Sets the new remote |
||||
|
git_remote=$(git remote) |
||||
|
if [ "$git_remote" = "" ]; then # git remote not defined |
||||
|
|
||||
|
if [ "$GIT_TOKEN" = "" ]; then |
||||
|
echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment." |
||||
|
git remote add origin https://${git_host}/${git_user_id}/${git_repo_id}.git |
||||
|
else |
||||
|
git remote add origin https://${git_user_id}:"${GIT_TOKEN}"@${git_host}/${git_user_id}/${git_repo_id}.git |
||||
|
fi |
||||
|
|
||||
|
fi |
||||
|
|
||||
|
git pull origin master |
||||
|
|
||||
|
# Pushes (Forces) the changes in the local repository up to the remote repository |
||||
|
echo "Git pushing to https://${git_host}/${git_user_id}/${git_repo_id}.git" |
||||
|
git push origin master 2>&1 | grep -v 'To https' |
@ -0,0 +1,33 @@ |
|||||
|
# coding: utf-8 |
||||
|
|
||||
|
# flake8: noqa |
||||
|
|
||||
|
""" |
||||
|
Seaweedfs Master Server API |
||||
|
|
||||
|
The Seaweedfs Master Server API allows you to store blobs # noqa: E501 |
||||
|
|
||||
|
The version of the OpenAPI document: 3.43.0 |
||||
|
Generated by: https://openapi-generator.tech |
||||
|
""" |
||||
|
|
||||
|
|
||||
|
from __future__ import absolute_import |
||||
|
|
||||
|
__version__ = "1.0.0" |
||||
|
|
||||
|
# import apis into sdk package |
||||
|
from openapi_client.api.default_api import DefaultApi |
||||
|
|
||||
|
# import ApiClient |
||||
|
from openapi_client.api_client import ApiClient |
||||
|
from openapi_client.configuration import Configuration |
||||
|
from openapi_client.exceptions import OpenApiException |
||||
|
from openapi_client.exceptions import ApiTypeError |
||||
|
from openapi_client.exceptions import ApiValueError |
||||
|
from openapi_client.exceptions import ApiKeyError |
||||
|
from openapi_client.exceptions import ApiAttributeError |
||||
|
from openapi_client.exceptions import ApiException |
||||
|
# import models into sdk package |
||||
|
from openapi_client.models.file_key import FileKey |
||||
|
from openapi_client.models.location import Location |
@ -0,0 +1,6 @@ |
|||||
|
from __future__ import absolute_import |
||||
|
|
||||
|
# flake8: noqa |
||||
|
|
||||
|
# import apis into api package |
||||
|
from openapi_client.api.default_api import DefaultApi |
@ -0,0 +1,420 @@ |
|||||
|
# coding: utf-8 |
||||
|
|
||||
|
""" |
||||
|
Seaweedfs Master Server API |
||||
|
|
||||
|
The Seaweedfs Master Server API allows you to store blobs # noqa: E501 |
||||
|
|
||||
|
The version of the OpenAPI document: 3.43.0 |
||||
|
Generated by: https://openapi-generator.tech |
||||
|
""" |
||||
|
|
||||
|
|
||||
|
from __future__ import absolute_import |
||||
|
|
||||
|
import re # noqa: F401 |
||||
|
|
||||
|
from pydantic import validate_arguments, ValidationError |
||||
|
from typing_extensions import Annotated |
||||
|
|
||||
|
from pydantic import Field |
||||
|
|
||||
|
from typing import Any, Optional |
||||
|
|
||||
|
from openapi_client.models.file_key import FileKey |
||||
|
|
||||
|
from openapi_client.api_client import ApiClient |
||||
|
from openapi_client.exceptions import ( # noqa: F401 |
||||
|
ApiTypeError, |
||||
|
ApiValueError |
||||
|
) |
||||
|
|
||||
|
|
||||
|
class DefaultApi(object): |
||||
|
"""NOTE: This class is auto generated by OpenAPI Generator |
||||
|
Ref: https://openapi-generator.tech |
||||
|
|
||||
|
Do not edit the class manually. |
||||
|
""" |
||||
|
|
||||
|
def __init__(self, api_client=None): |
||||
|
if api_client is None: |
||||
|
api_client = ApiClient.get_default() |
||||
|
self.api_client = api_client |
||||
|
|
||||
|
@validate_arguments |
||||
|
def dir_assign(self, count : Annotated[Optional[Any], Field(description="how many file ids to assign. Use <fid>_1, <fid>_2 for the assigned additional file ids. e.g. 3,01637037d6_1, 3,01637037d6_2")] = None, collection : Annotated[Optional[Any], Field(description="required collection name")] = None, data_center : Annotated[Optional[Any], Field(description="preferred data center")] = None, rack : Annotated[Optional[Any], Field(description="preferred rack")] = None, data_node : Annotated[Optional[Any], Field(description="preferred volume server, e.g. 127.0.0.1:8080")] = None, disk : Annotated[Optional[Any], Field(description="If you have disks labelled, this must be supplied to specify the disk type to allocate on.")] = None, replication : Annotated[Optional[Any], Field(description="replica placement strategy")] = None, ttl : Annotated[Optional[Any], Field(description="file expiration time limit, example: 3m for 3 minutes. units: m-minute, h-hour, d-day, w-week, M-month, y-year")] = None, preallocate : Annotated[Optional[Any], Field(description="If no matching volumes, pre-allocate this number of bytes on disk for new volumes.")] = None, memory_map_max_size_mb : Annotated[Optional[Any], Field(description="Only implemented for windows. Use memory mapped files with specified size for new volumes.")] = None, writable_volume_count : Annotated[Optional[Any], Field(description="If no matching volumes, create specified number of new volumes.")] = None, **kwargs) -> FileKey: # noqa: E501 |
||||
|
"""Assign a file key # noqa: E501 |
||||
|
|
||||
|
This operation is very cheap. Just increase a number in master server's memory. # noqa: E501 |
||||
|
This method makes a synchronous HTTP request by default. To make an |
||||
|
asynchronous HTTP request, please pass async_req=True |
||||
|
|
||||
|
>>> thread = api.dir_assign(count, collection, data_center, rack, data_node, disk, replication, ttl, preallocate, memory_map_max_size_mb, writable_volume_count, async_req=True) |
||||
|
>>> result = thread.get() |
||||
|
|
||||
|
:param count: how many file ids to assign. Use <fid>_1, <fid>_2 for the assigned additional file ids. e.g. 3,01637037d6_1, 3,01637037d6_2 |
||||
|
:type count: object |
||||
|
:param collection: required collection name |
||||
|
:type collection: object |
||||
|
:param data_center: preferred data center |
||||
|
:type data_center: object |
||||
|
:param rack: preferred rack |
||||
|
:type rack: object |
||||
|
:param data_node: preferred volume server, e.g. 127.0.0.1:8080 |
||||
|
:type data_node: object |
||||
|
:param disk: If you have disks labelled, this must be supplied to specify the disk type to allocate on. |
||||
|
:type disk: object |
||||
|
:param replication: replica placement strategy |
||||
|
:type replication: object |
||||
|
:param ttl: file expiration time limit, example: 3m for 3 minutes. units: m-minute, h-hour, d-day, w-week, M-month, y-year |
||||
|
:type ttl: object |
||||
|
:param preallocate: If no matching volumes, pre-allocate this number of bytes on disk for new volumes. |
||||
|
:type preallocate: object |
||||
|
:param memory_map_max_size_mb: Only implemented for windows. Use memory mapped files with specified size for new volumes. |
||||
|
:type memory_map_max_size_mb: object |
||||
|
:param writable_volume_count: If no matching volumes, create specified number of new volumes. |
||||
|
:type writable_volume_count: object |
||||
|
:param async_req: Whether to execute the request asynchronously. |
||||
|
:type async_req: bool, optional |
||||
|
:param _preload_content: if False, the urllib3.HTTPResponse object will |
||||
|
be returned without reading/decoding response |
||||
|
data. Default is True. |
||||
|
:type _preload_content: bool, optional |
||||
|
:param _request_timeout: timeout setting for this request. If one |
||||
|
number provided, it will be total request |
||||
|
timeout. It can also be a pair (tuple) of |
||||
|
(connection, read) timeouts. |
||||
|
:return: Returns the result object. |
||||
|
If the method is called asynchronously, |
||||
|
returns the request thread. |
||||
|
:rtype: FileKey |
||||
|
""" |
||||
|
kwargs['_return_http_data_only'] = True |
||||
|
return self.dir_assign_with_http_info(count, collection, data_center, rack, data_node, disk, replication, ttl, preallocate, memory_map_max_size_mb, writable_volume_count, **kwargs) # noqa: E501 |
||||
|
|
||||
|
@validate_arguments |
||||
|
def dir_assign_with_http_info(self, count : Annotated[Optional[Any], Field(description="how many file ids to assign. Use <fid>_1, <fid>_2 for the assigned additional file ids. e.g. 3,01637037d6_1, 3,01637037d6_2")] = None, collection : Annotated[Optional[Any], Field(description="required collection name")] = None, data_center : Annotated[Optional[Any], Field(description="preferred data center")] = None, rack : Annotated[Optional[Any], Field(description="preferred rack")] = None, data_node : Annotated[Optional[Any], Field(description="preferred volume server, e.g. 127.0.0.1:8080")] = None, disk : Annotated[Optional[Any], Field(description="If you have disks labelled, this must be supplied to specify the disk type to allocate on.")] = None, replication : Annotated[Optional[Any], Field(description="replica placement strategy")] = None, ttl : Annotated[Optional[Any], Field(description="file expiration time limit, example: 3m for 3 minutes. units: m-minute, h-hour, d-day, w-week, M-month, y-year")] = None, preallocate : Annotated[Optional[Any], Field(description="If no matching volumes, pre-allocate this number of bytes on disk for new volumes.")] = None, memory_map_max_size_mb : Annotated[Optional[Any], Field(description="Only implemented for windows. Use memory mapped files with specified size for new volumes.")] = None, writable_volume_count : Annotated[Optional[Any], Field(description="If no matching volumes, create specified number of new volumes.")] = None, **kwargs): # noqa: E501 |
||||
|
"""Assign a file key # noqa: E501 |
||||
|
|
||||
|
This operation is very cheap. Just increase a number in master server's memory. # noqa: E501 |
||||
|
This method makes a synchronous HTTP request by default. To make an |
||||
|
asynchronous HTTP request, please pass async_req=True |
||||
|
|
||||
|
>>> thread = api.dir_assign_with_http_info(count, collection, data_center, rack, data_node, disk, replication, ttl, preallocate, memory_map_max_size_mb, writable_volume_count, async_req=True) |
||||
|
>>> result = thread.get() |
||||
|
|
||||
|
:param count: how many file ids to assign. Use <fid>_1, <fid>_2 for the assigned additional file ids. e.g. 3,01637037d6_1, 3,01637037d6_2 |
||||
|
:type count: object |
||||
|
:param collection: required collection name |
||||
|
:type collection: object |
||||
|
:param data_center: preferred data center |
||||
|
:type data_center: object |
||||
|
:param rack: preferred rack |
||||
|
:type rack: object |
||||
|
:param data_node: preferred volume server, e.g. 127.0.0.1:8080 |
||||
|
:type data_node: object |
||||
|
:param disk: If you have disks labelled, this must be supplied to specify the disk type to allocate on. |
||||
|
:type disk: object |
||||
|
:param replication: replica placement strategy |
||||
|
:type replication: object |
||||
|
:param ttl: file expiration time limit, example: 3m for 3 minutes. units: m-minute, h-hour, d-day, w-week, M-month, y-year |
||||
|
:type ttl: object |
||||
|
:param preallocate: If no matching volumes, pre-allocate this number of bytes on disk for new volumes. |
||||
|
:type preallocate: object |
||||
|
:param memory_map_max_size_mb: Only implemented for windows. Use memory mapped files with specified size for new volumes. |
||||
|
:type memory_map_max_size_mb: object |
||||
|
:param writable_volume_count: If no matching volumes, create specified number of new volumes. |
||||
|
:type writable_volume_count: object |
||||
|
:param async_req: Whether to execute the request asynchronously. |
||||
|
:type async_req: bool, optional |
||||
|
:param _return_http_data_only: response data without head status code |
||||
|
and headers |
||||
|
:type _return_http_data_only: bool, optional |
||||
|
:param _preload_content: if False, the urllib3.HTTPResponse object will |
||||
|
be returned without reading/decoding response |
||||
|
data. Default is True. |
||||
|
:type _preload_content: bool, optional |
||||
|
:param _request_timeout: timeout setting for this request. If one |
||||
|
number provided, it will be total request |
||||
|
timeout. It can also be a pair (tuple) of |
||||
|
(connection, read) timeouts. |
||||
|
:param _request_auth: set to override the auth_settings for an a single |
||||
|
request; this effectively ignores the authentication |
||||
|
in the spec for a single request. |
||||
|
:type _request_auth: dict, optional |
||||
|
:type _content_type: string, optional: force content-type for the request |
||||
|
:return: Returns the result object. |
||||
|
If the method is called asynchronously, |
||||
|
returns the request thread. |
||||
|
:rtype: tuple(FileKey, status_code(int), headers(HTTPHeaderDict)) |
||||
|
""" |
||||
|
|
||||
|
_params = locals() |
||||
|
|
||||
|
_all_params = [ |
||||
|
'count', |
||||
|
'collection', |
||||
|
'data_center', |
||||
|
'rack', |
||||
|
'data_node', |
||||
|
'disk', |
||||
|
'replication', |
||||
|
'ttl', |
||||
|
'preallocate', |
||||
|
'memory_map_max_size_mb', |
||||
|
'writable_volume_count' |
||||
|
] |
||||
|
_all_params.extend( |
||||
|
[ |
||||
|
'async_req', |
||||
|
'_return_http_data_only', |
||||
|
'_preload_content', |
||||
|
'_request_timeout', |
||||
|
'_request_auth', |
||||
|
'_content_type', |
||||
|
'_headers' |
||||
|
] |
||||
|
) |
||||
|
|
||||
|
# validate the arguments |
||||
|
for _key, _val in _params['kwargs'].items(): |
||||
|
if _key not in _all_params: |
||||
|
raise ApiTypeError( |
||||
|
"Got an unexpected keyword argument '%s'" |
||||
|
" to method dir_assign" % _key |
||||
|
) |
||||
|
_params[_key] = _val |
||||
|
del _params['kwargs'] |
||||
|
|
||||
|
_collection_formats = {} |
||||
|
|
||||
|
# process the path parameters |
||||
|
_path_params = {} |
||||
|
|
||||
|
# process the query parameters |
||||
|
_query_params = [] |
||||
|
if _params.get('count') is not None: # noqa: E501 |
||||
|
_query_params.append(('count', _params['count'])) |
||||
|
if _params.get('collection') is not None: # noqa: E501 |
||||
|
_query_params.append(('collection', _params['collection'])) |
||||
|
if _params.get('data_center') is not None: # noqa: E501 |
||||
|
_query_params.append(('dataCenter', _params['data_center'])) |
||||
|
if _params.get('rack') is not None: # noqa: E501 |
||||
|
_query_params.append(('rack', _params['rack'])) |
||||
|
if _params.get('data_node') is not None: # noqa: E501 |
||||
|
_query_params.append(('dataNode', _params['data_node'])) |
||||
|
if _params.get('disk') is not None: # noqa: E501 |
||||
|
_query_params.append(('disk', _params['disk'])) |
||||
|
if _params.get('replication') is not None: # noqa: E501 |
||||
|
_query_params.append(('replication', _params['replication'])) |
||||
|
if _params.get('ttl') is not None: # noqa: E501 |
||||
|
_query_params.append(('ttl', _params['ttl'])) |
||||
|
if _params.get('preallocate') is not None: # noqa: E501 |
||||
|
_query_params.append(('preallocate', _params['preallocate'])) |
||||
|
if _params.get('memory_map_max_size_mb') is not None: # noqa: E501 |
||||
|
_query_params.append(('memoryMapMaxSizeMb', _params['memory_map_max_size_mb'])) |
||||
|
if _params.get('writable_volume_count') is not None: # noqa: E501 |
||||
|
_query_params.append(('writableVolumeCount', _params['writable_volume_count'])) |
||||
|
|
||||
|
# process the header parameters |
||||
|
_header_params = dict(_params.get('_headers', {})) |
||||
|
|
||||
|
# process the form parameters |
||||
|
_form_params = [] |
||||
|
_files = {} |
||||
|
|
||||
|
# process the body parameter |
||||
|
_body_params = None |
||||
|
|
||||
|
# set the HTTP header `Accept` |
||||
|
_header_params['Accept'] = self.api_client.select_header_accept( |
||||
|
['application/json']) # noqa: E501 |
||||
|
|
||||
|
# authentication setting |
||||
|
_auth_settings = [] # noqa: E501 |
||||
|
|
||||
|
_response_types_map = { |
||||
|
'200': "FileKey", |
||||
|
} |
||||
|
|
||||
|
return self.api_client.call_api( |
||||
|
'/dir/assign', 'GET', |
||||
|
_path_params, |
||||
|
_query_params, |
||||
|
_header_params, |
||||
|
body=_body_params, |
||||
|
post_params=_form_params, |
||||
|
files=_files, |
||||
|
response_types_map=_response_types_map, |
||||
|
auth_settings=_auth_settings, |
||||
|
async_req=_params.get('async_req'), |
||||
|
_return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 |
||||
|
_preload_content=_params.get('_preload_content', True), |
||||
|
_request_timeout=_params.get('_request_timeout'), |
||||
|
collection_formats=_collection_formats, |
||||
|
_request_auth=_params.get('_request_auth')) |
||||
|
|
||||
|
@validate_arguments |
||||
|
def dir_lookup(self, volume_id : Annotated[Optional[Any], Field(description="volume id")] = None, collection : Annotated[Optional[Any], Field(description="optionally to speed up the lookup")] = None, file_id : Annotated[Optional[Any], Field(description="If provided, this returns the fileId location and a JWT to update or delete the file.")] = None, read : Annotated[Optional[Any], Field(description="works together with \"fileId\", if read=yes, JWT is generated for reads.")] = None, **kwargs) -> object: # noqa: E501 |
||||
|
"""Lookup volume # noqa: E501 |
||||
|
|
||||
|
We would need to find out whether the volumes have moved. # noqa: E501 |
||||
|
This method makes a synchronous HTTP request by default. To make an |
||||
|
asynchronous HTTP request, please pass async_req=True |
||||
|
|
||||
|
>>> thread = api.dir_lookup(volume_id, collection, file_id, read, async_req=True) |
||||
|
>>> result = thread.get() |
||||
|
|
||||
|
:param volume_id: volume id |
||||
|
:type volume_id: object |
||||
|
:param collection: optionally to speed up the lookup |
||||
|
:type collection: object |
||||
|
:param file_id: If provided, this returns the fileId location and a JWT to update or delete the file. |
||||
|
:type file_id: object |
||||
|
:param read: works together with \"fileId\", if read=yes, JWT is generated for reads. |
||||
|
:type read: object |
||||
|
:param async_req: Whether to execute the request asynchronously. |
||||
|
:type async_req: bool, optional |
||||
|
:param _preload_content: if False, the urllib3.HTTPResponse object will |
||||
|
be returned without reading/decoding response |
||||
|
data. Default is True. |
||||
|
:type _preload_content: bool, optional |
||||
|
:param _request_timeout: timeout setting for this request. If one |
||||
|
number provided, it will be total request |
||||
|
timeout. It can also be a pair (tuple) of |
||||
|
(connection, read) timeouts. |
||||
|
:return: Returns the result object. |
||||
|
If the method is called asynchronously, |
||||
|
returns the request thread. |
||||
|
:rtype: object |
||||
|
""" |
||||
|
kwargs['_return_http_data_only'] = True |
||||
|
return self.dir_lookup_with_http_info(volume_id, collection, file_id, read, **kwargs) # noqa: E501 |
||||
|
|
||||
|
@validate_arguments |
||||
|
def dir_lookup_with_http_info(self, volume_id : Annotated[Optional[Any], Field(description="volume id")] = None, collection : Annotated[Optional[Any], Field(description="optionally to speed up the lookup")] = None, file_id : Annotated[Optional[Any], Field(description="If provided, this returns the fileId location and a JWT to update or delete the file.")] = None, read : Annotated[Optional[Any], Field(description="works together with \"fileId\", if read=yes, JWT is generated for reads.")] = None, **kwargs): # noqa: E501 |
||||
|
"""Lookup volume # noqa: E501 |
||||
|
|
||||
|
We would need to find out whether the volumes have moved. # noqa: E501 |
||||
|
This method makes a synchronous HTTP request by default. To make an |
||||
|
asynchronous HTTP request, please pass async_req=True |
||||
|
|
||||
|
>>> thread = api.dir_lookup_with_http_info(volume_id, collection, file_id, read, async_req=True) |
||||
|
>>> result = thread.get() |
||||
|
|
||||
|
:param volume_id: volume id |
||||
|
:type volume_id: object |
||||
|
:param collection: optionally to speed up the lookup |
||||
|
:type collection: object |
||||
|
:param file_id: If provided, this returns the fileId location and a JWT to update or delete the file. |
||||
|
:type file_id: object |
||||
|
:param read: works together with \"fileId\", if read=yes, JWT is generated for reads. |
||||
|
:type read: object |
||||
|
:param async_req: Whether to execute the request asynchronously. |
||||
|
:type async_req: bool, optional |
||||
|
:param _return_http_data_only: response data without head status code |
||||
|
and headers |
||||
|
:type _return_http_data_only: bool, optional |
||||
|
:param _preload_content: if False, the urllib3.HTTPResponse object will |
||||
|
be returned without reading/decoding response |
||||
|
data. Default is True. |
||||
|
:type _preload_content: bool, optional |
||||
|
:param _request_timeout: timeout setting for this request. If one |
||||
|
number provided, it will be total request |
||||
|
timeout. It can also be a pair (tuple) of |
||||
|
(connection, read) timeouts. |
||||
|
:param _request_auth: set to override the auth_settings for an a single |
||||
|
request; this effectively ignores the authentication |
||||
|
in the spec for a single request. |
||||
|
:type _request_auth: dict, optional |
||||
|
:type _content_type: string, optional: force content-type for the request |
||||
|
:return: Returns the result object. |
||||
|
If the method is called asynchronously, |
||||
|
returns the request thread. |
||||
|
:rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) |
||||
|
""" |
||||
|
|
||||
|
_params = locals() |
||||
|
|
||||
|
_all_params = [ |
||||
|
'volume_id', |
||||
|
'collection', |
||||
|
'file_id', |
||||
|
'read' |
||||
|
] |
||||
|
_all_params.extend( |
||||
|
[ |
||||
|
'async_req', |
||||
|
'_return_http_data_only', |
||||
|
'_preload_content', |
||||
|
'_request_timeout', |
||||
|
'_request_auth', |
||||
|
'_content_type', |
||||
|
'_headers' |
||||
|
] |
||||
|
) |
||||
|
|
||||
|
# validate the arguments |
||||
|
for _key, _val in _params['kwargs'].items(): |
||||
|
if _key not in _all_params: |
||||
|
raise ApiTypeError( |
||||
|
"Got an unexpected keyword argument '%s'" |
||||
|
" to method dir_lookup" % _key |
||||
|
) |
||||
|
_params[_key] = _val |
||||
|
del _params['kwargs'] |
||||
|
|
||||
|
_collection_formats = {} |
||||
|
|
||||
|
# process the path parameters |
||||
|
_path_params = {} |
||||
|
|
||||
|
# process the query parameters |
||||
|
_query_params = [] |
||||
|
if _params.get('volume_id') is not None: # noqa: E501 |
||||
|
_query_params.append(('volumeId', _params['volume_id'])) |
||||
|
if _params.get('collection') is not None: # noqa: E501 |
||||
|
_query_params.append(('collection', _params['collection'])) |
||||
|
if _params.get('file_id') is not None: # noqa: E501 |
||||
|
_query_params.append(('fileId', _params['file_id'])) |
||||
|
if _params.get('read') is not None: # noqa: E501 |
||||
|
_query_params.append(('read', _params['read'])) |
||||
|
|
||||
|
# process the header parameters |
||||
|
_header_params = dict(_params.get('_headers', {})) |
||||
|
|
||||
|
# process the form parameters |
||||
|
_form_params = [] |
||||
|
_files = {} |
||||
|
|
||||
|
# process the body parameter |
||||
|
_body_params = None |
||||
|
|
||||
|
# set the HTTP header `Accept` |
||||
|
_header_params['Accept'] = self.api_client.select_header_accept( |
||||
|
['application/json']) # noqa: E501 |
||||
|
|
||||
|
# authentication setting |
||||
|
_auth_settings = [] # noqa: E501 |
||||
|
|
||||
|
_response_types_map = { |
||||
|
'200': "object", |
||||
|
} |
||||
|
|
||||
|
return self.api_client.call_api( |
||||
|
'/dir/lookup', 'GET', |
||||
|
_path_params, |
||||
|
_query_params, |
||||
|
_header_params, |
||||
|
body=_body_params, |
||||
|
post_params=_form_params, |
||||
|
files=_files, |
||||
|
response_types_map=_response_types_map, |
||||
|
auth_settings=_auth_settings, |
||||
|
async_req=_params.get('async_req'), |
||||
|
_return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501 |
||||
|
_preload_content=_params.get('_preload_content', True), |
||||
|
_request_timeout=_params.get('_request_timeout'), |
||||
|
collection_formats=_collection_formats, |
||||
|
_request_auth=_params.get('_request_auth')) |
@ -0,0 +1,750 @@ |
|||||
|
# coding: utf-8 |
||||
|
""" |
||||
|
Seaweedfs Master Server API |
||||
|
|
||||
|
The Seaweedfs Master Server API allows you to store blobs # noqa: E501 |
||||
|
|
||||
|
The version of the OpenAPI document: 3.43.0 |
||||
|
Generated by: https://openapi-generator.tech |
||||
|
""" |
||||
|
|
||||
|
from __future__ import absolute_import |
||||
|
|
||||
|
import atexit |
||||
|
import datetime |
||||
|
from dateutil.parser import parse |
||||
|
import json |
||||
|
import mimetypes |
||||
|
from multiprocessing.pool import ThreadPool |
||||
|
import os |
||||
|
import re |
||||
|
import tempfile |
||||
|
|
||||
|
from urllib.parse import quote |
||||
|
|
||||
|
from openapi_client.configuration import Configuration |
||||
|
import openapi_client.models |
||||
|
from openapi_client import rest |
||||
|
from openapi_client.exceptions import ApiValueError, ApiException |
||||
|
|
||||
|
|
||||
|
class ApiClient(object): |
||||
|
"""Generic API client for OpenAPI client library builds. |
||||
|
|
||||
|
OpenAPI generic API client. This client handles the client- |
||||
|
server communication, and is invariant across implementations. Specifics of |
||||
|
the methods and models for each application are generated from the OpenAPI |
||||
|
templates. |
||||
|
|
||||
|
NOTE: This class is auto generated by OpenAPI Generator. |
||||
|
Ref: https://openapi-generator.tech |
||||
|
Do not edit the class manually. |
||||
|
|
||||
|
:param configuration: .Configuration object for this client |
||||
|
:param header_name: a header to pass when making calls to the API. |
||||
|
:param header_value: a header value to pass when making calls to |
||||
|
the API. |
||||
|
:param cookie: a cookie to include in the header when making calls |
||||
|
to the API |
||||
|
:param pool_threads: The number of threads to use for async requests |
||||
|
to the API. More threads means more concurrent API requests. |
||||
|
""" |
||||
|
|
||||
|
PRIMITIVE_TYPES = (float, bool, bytes, str, int) |
||||
|
NATIVE_TYPES_MAPPING = { |
||||
|
'int': int, |
||||
|
'long': int, # TODO remove as only py3 is supported? |
||||
|
'float': float, |
||||
|
'str': str, |
||||
|
'bool': bool, |
||||
|
'date': datetime.date, |
||||
|
'datetime': datetime.datetime, |
||||
|
'object': object, |
||||
|
} |
||||
|
_pool = None |
||||
|
|
||||
|
def __init__(self, configuration=None, header_name=None, header_value=None, |
||||
|
cookie=None, pool_threads=1): |
||||
|
# use default configuraiton if none is provided |
||||
|
if configuration is None: |
||||
|
configuration = Configuration.get_default() |
||||
|
self.configuration = configuration |
||||
|
self.pool_threads = pool_threads |
||||
|
|
||||
|
self.rest_client = rest.RESTClientObject(configuration) |
||||
|
self.default_headers = {} |
||||
|
if header_name is not None: |
||||
|
self.default_headers[header_name] = header_value |
||||
|
self.cookie = cookie |
||||
|
# Set default User-Agent. |
||||
|
self.user_agent = 'OpenAPI-Generator/1.0.0/python' |
||||
|
self.client_side_validation = configuration.client_side_validation |
||||
|
|
||||
|
def __enter__(self): |
||||
|
return self |
||||
|
|
||||
|
def __exit__(self, exc_type, exc_value, traceback): |
||||
|
self.close() |
||||
|
|
||||
|
def close(self): |
||||
|
if self._pool: |
||||
|
self._pool.close() |
||||
|
self._pool.join() |
||||
|
self._pool = None |
||||
|
if hasattr(atexit, 'unregister'): |
||||
|
atexit.unregister(self.close) |
||||
|
|
||||
|
@property |
||||
|
def pool(self): |
||||
|
"""Create thread pool on first request |
||||
|
avoids instantiating unused threadpool for blocking clients. |
||||
|
""" |
||||
|
if self._pool is None: |
||||
|
atexit.register(self.close) |
||||
|
self._pool = ThreadPool(self.pool_threads) |
||||
|
return self._pool |
||||
|
|
||||
|
@property |
||||
|
def user_agent(self): |
||||
|
"""User agent for this API client""" |
||||
|
return self.default_headers['User-Agent'] |
||||
|
|
||||
|
@user_agent.setter |
||||
|
def user_agent(self, value): |
||||
|
self.default_headers['User-Agent'] = value |
||||
|
|
||||
|
def set_default_header(self, header_name, header_value): |
||||
|
self.default_headers[header_name] = header_value |
||||
|
|
||||
|
|
||||
|
_default = None |
||||
|
|
||||
|
@classmethod |
||||
|
def get_default(cls): |
||||
|
"""Return new instance of ApiClient. |
||||
|
|
||||
|
This method returns newly created, based on default constructor, |
||||
|
object of ApiClient class or returns a copy of default |
||||
|
ApiClient. |
||||
|
|
||||
|
:return: The ApiClient object. |
||||
|
""" |
||||
|
if cls._default is None: |
||||
|
cls._default = ApiClient() |
||||
|
return cls._default |
||||
|
|
||||
|
@classmethod |
||||
|
def set_default(cls, default): |
||||
|
"""Set default instance of ApiClient. |
||||
|
|
||||
|
It stores default ApiClient. |
||||
|
|
||||
|
:param default: object of ApiClient. |
||||
|
""" |
||||
|
cls._default = default |
||||
|
|
||||
|
def __call_api( |
||||
|
self, resource_path, method, path_params=None, |
||||
|
query_params=None, header_params=None, body=None, post_params=None, |
||||
|
files=None, response_types_map=None, auth_settings=None, |
||||
|
_return_http_data_only=None, collection_formats=None, |
||||
|
_preload_content=True, _request_timeout=None, _host=None, |
||||
|
_request_auth=None): |
||||
|
|
||||
|
config = self.configuration |
||||
|
|
||||
|
# header parameters |
||||
|
header_params = header_params or {} |
||||
|
header_params.update(self.default_headers) |
||||
|
if self.cookie: |
||||
|
header_params['Cookie'] = self.cookie |
||||
|
if header_params: |
||||
|
header_params = self.sanitize_for_serialization(header_params) |
||||
|
header_params = dict(self.parameters_to_tuples(header_params, |
||||
|
collection_formats)) |
||||
|
|
||||
|
# path parameters |
||||
|
if path_params: |
||||
|
path_params = self.sanitize_for_serialization(path_params) |
||||
|
path_params = self.parameters_to_tuples(path_params, |
||||
|
collection_formats) |
||||
|
for k, v in path_params: |
||||
|
# specified safe chars, encode everything |
||||
|
resource_path = resource_path.replace( |
||||
|
'{%s}' % k, |
||||
|
quote(str(v), safe=config.safe_chars_for_path_param) |
||||
|
) |
||||
|
|
||||
|
# post parameters |
||||
|
if post_params or files: |
||||
|
post_params = post_params if post_params else [] |
||||
|
post_params = self.sanitize_for_serialization(post_params) |
||||
|
post_params = self.parameters_to_tuples(post_params, |
||||
|
collection_formats) |
||||
|
post_params.extend(self.files_parameters(files)) |
||||
|
|
||||
|
# auth setting |
||||
|
self.update_params_for_auth( |
||||
|
header_params, query_params, auth_settings, |
||||
|
resource_path, method, body, |
||||
|
request_auth=_request_auth) |
||||
|
|
||||
|
# body |
||||
|
if body: |
||||
|
body = self.sanitize_for_serialization(body) |
||||
|
|
||||
|
# request url |
||||
|
if _host is None: |
||||
|
url = self.configuration.host + resource_path |
||||
|
else: |
||||
|
# use server/host defined in path or operation instead |
||||
|
url = _host + resource_path |
||||
|
|
||||
|
# query parameters |
||||
|
if query_params: |
||||
|
query_params = self.sanitize_for_serialization(query_params) |
||||
|
url_query = self.parameters_to_url_query(query_params, |
||||
|
collection_formats) |
||||
|
url += "?" + url_query |
||||
|
|
||||
|
try: |
||||
|
# perform request and return response |
||||
|
response_data = self.request( |
||||
|
method, url, |
||||
|
query_params=query_params, |
||||
|
headers=header_params, |
||||
|
post_params=post_params, body=body, |
||||
|
_preload_content=_preload_content, |
||||
|
_request_timeout=_request_timeout) |
||||
|
except ApiException as e: |
||||
|
if e.body: |
||||
|
e.body = e.body.decode('utf-8') |
||||
|
raise e |
||||
|
|
||||
|
self.last_response = response_data |
||||
|
|
||||
|
return_data = response_data |
||||
|
|
||||
|
if not _preload_content: |
||||
|
return return_data |
||||
|
|
||||
|
response_type = response_types_map.get(str(response_data.status), None) |
||||
|
|
||||
|
if response_type not in ["file", "bytes"]: |
||||
|
match = None |
||||
|
content_type = response_data.getheader('content-type') |
||||
|
if content_type is not None: |
||||
|
match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type) |
||||
|
encoding = match.group(1) if match else "utf-8" |
||||
|
response_data.data = response_data.data.decode(encoding) |
||||
|
|
||||
|
# deserialize response data |
||||
|
|
||||
|
if response_type: |
||||
|
return_data = self.deserialize(response_data, response_type) |
||||
|
else: |
||||
|
return_data = None |
||||
|
|
||||
|
if _return_http_data_only: |
||||
|
return (return_data) |
||||
|
else: |
||||
|
return (return_data, response_data.status, |
||||
|
response_data.getheaders()) |
||||
|
|
||||
|
def sanitize_for_serialization(self, obj): |
||||
|
"""Builds a JSON POST object. |
||||
|
|
||||
|
If obj is None, return None. |
||||
|
If obj is str, int, long, float, bool, return directly. |
||||
|
If obj is datetime.datetime, datetime.date |
||||
|
convert to string in iso8601 format. |
||||
|
If obj is list, sanitize each element in the list. |
||||
|
If obj is dict, return the dict. |
||||
|
If obj is OpenAPI model, return the properties dict. |
||||
|
|
||||
|
:param obj: The data to serialize. |
||||
|
:return: The serialized form of data. |
||||
|
""" |
||||
|
if obj is None: |
||||
|
return None |
||||
|
elif isinstance(obj, self.PRIMITIVE_TYPES): |
||||
|
return obj |
||||
|
elif isinstance(obj, list): |
||||
|
return [self.sanitize_for_serialization(sub_obj) |
||||
|
for sub_obj in obj] |
||||
|
elif isinstance(obj, tuple): |
||||
|
return tuple(self.sanitize_for_serialization(sub_obj) |
||||
|
for sub_obj in obj) |
||||
|
elif isinstance(obj, (datetime.datetime, datetime.date)): |
||||
|
return obj.isoformat() |
||||
|
|
||||
|
if isinstance(obj, dict): |
||||
|
obj_dict = obj |
||||
|
else: |
||||
|
# Convert model obj to dict except |
||||
|
# attributes `openapi_types`, `attribute_map` |
||||
|
# and attributes which value is not None. |
||||
|
# Convert attribute name to json key in |
||||
|
# model definition for request. |
||||
|
obj_dict = obj.to_dict() |
||||
|
|
||||
|
return {key: self.sanitize_for_serialization(val) |
||||
|
for key, val in obj_dict.items()} |
||||
|
|
||||
|
def deserialize(self, response, response_type): |
||||
|
"""Deserializes response into an object. |
||||
|
|
||||
|
:param response: RESTResponse object to be deserialized. |
||||
|
:param response_type: class literal for |
||||
|
deserialized object, or string of class name. |
||||
|
|
||||
|
:return: deserialized object. |
||||
|
""" |
||||
|
# handle file downloading |
||||
|
# save response body into a tmp file and return the instance |
||||
|
if response_type == "file": |
||||
|
return self.__deserialize_file(response) |
||||
|
|
||||
|
# fetch data from response object |
||||
|
try: |
||||
|
data = json.loads(response.data) |
||||
|
except ValueError: |
||||
|
data = response.data |
||||
|
|
||||
|
return self.__deserialize(data, response_type) |
||||
|
|
||||
|
def __deserialize(self, data, klass): |
||||
|
"""Deserializes dict, list, str into an object. |
||||
|
|
||||
|
:param data: dict, list or str. |
||||
|
:param klass: class literal, or string of class name. |
||||
|
|
||||
|
:return: object. |
||||
|
""" |
||||
|
if data is None: |
||||
|
return None |
||||
|
|
||||
|
if type(klass) == str: |
||||
|
if klass.startswith('List['): |
||||
|
sub_kls = re.match(r'List\[(.*)]', klass).group(1) |
||||
|
return [self.__deserialize(sub_data, sub_kls) |
||||
|
for sub_data in data] |
||||
|
|
||||
|
if klass.startswith('Dict['): |
||||
|
sub_kls = re.match(r'Dict\[([^,]*), (.*)]', klass).group(2) |
||||
|
return {k: self.__deserialize(v, sub_kls) |
||||
|
for k, v in data.items()} |
||||
|
|
||||
|
# convert str to class |
||||
|
if klass in self.NATIVE_TYPES_MAPPING: |
||||
|
klass = self.NATIVE_TYPES_MAPPING[klass] |
||||
|
else: |
||||
|
klass = getattr(openapi_client.models, klass) |
||||
|
|
||||
|
if klass in self.PRIMITIVE_TYPES: |
||||
|
return self.__deserialize_primitive(data, klass) |
||||
|
elif klass == object: |
||||
|
return self.__deserialize_object(data) |
||||
|
elif klass == datetime.date: |
||||
|
return self.__deserialize_date(data) |
||||
|
elif klass == datetime.datetime: |
||||
|
return self.__deserialize_datetime(data) |
||||
|
else: |
||||
|
return self.__deserialize_model(data, klass) |
||||
|
|
||||
|
def call_api(self, resource_path, method, |
||||
|
path_params=None, query_params=None, header_params=None, |
||||
|
body=None, post_params=None, files=None, |
||||
|
response_types_map=None, auth_settings=None, |
||||
|
async_req=None, _return_http_data_only=None, |
||||
|
collection_formats=None,_preload_content=True, |
||||
|
_request_timeout=None, _host=None, _request_auth=None): |
||||
|
"""Makes the HTTP request (synchronous) and returns deserialized data. |
||||
|
|
||||
|
To make an async_req request, set the async_req parameter. |
||||
|
|
||||
|
:param resource_path: Path to method endpoint. |
||||
|
:param method: Method to call. |
||||
|
:param path_params: Path parameters in the url. |
||||
|
:param query_params: Query parameters in the url. |
||||
|
:param header_params: Header parameters to be |
||||
|
placed in the request header. |
||||
|
:param body: Request body. |
||||
|
:param post_params dict: Request post form parameters, |
||||
|
for `application/x-www-form-urlencoded`, `multipart/form-data`. |
||||
|
:param auth_settings list: Auth Settings names for the request. |
||||
|
:param response: Response data type. |
||||
|
:param files dict: key -> filename, value -> filepath, |
||||
|
for `multipart/form-data`. |
||||
|
:param async_req bool: execute request asynchronously |
||||
|
:param _return_http_data_only: response data without head status code |
||||
|
and headers |
||||
|
:param collection_formats: dict of collection formats for path, query, |
||||
|
header, and post parameters. |
||||
|
:param _preload_content: if False, the urllib3.HTTPResponse object will |
||||
|
be returned without reading/decoding response |
||||
|
data. Default is True. |
||||
|
:param _request_timeout: timeout setting for this request. If one |
||||
|
number provided, it will be total request |
||||
|
timeout. It can also be a pair (tuple) of |
||||
|
(connection, read) timeouts. |
||||
|
:param _request_auth: set to override the auth_settings for an a single |
||||
|
request; this effectively ignores the authentication |
||||
|
in the spec for a single request. |
||||
|
:type _request_token: dict, optional |
||||
|
:return: |
||||
|
If async_req parameter is True, |
||||
|
the request will be called asynchronously. |
||||
|
The method will return the request thread. |
||||
|
If parameter async_req is False or missing, |
||||
|
then the method will return the response directly. |
||||
|
""" |
||||
|
if not async_req: |
||||
|
return self.__call_api(resource_path, method, |
||||
|
path_params, query_params, header_params, |
||||
|
body, post_params, files, |
||||
|
response_types_map, auth_settings, |
||||
|
_return_http_data_only, collection_formats, |
||||
|
_preload_content, _request_timeout, _host, |
||||
|
_request_auth) |
||||
|
|
||||
|
return self.pool.apply_async(self.__call_api, (resource_path, |
||||
|
method, path_params, |
||||
|
query_params, |
||||
|
header_params, body, |
||||
|
post_params, files, |
||||
|
response_types_map, |
||||
|
auth_settings, |
||||
|
_return_http_data_only, |
||||
|
collection_formats, |
||||
|
_preload_content, |
||||
|
_request_timeout, |
||||
|
_host, _request_auth)) |
||||
|
|
||||
|
def request(self, method, url, query_params=None, headers=None, |
||||
|
post_params=None, body=None, _preload_content=True, |
||||
|
_request_timeout=None): |
||||
|
"""Makes the HTTP request using RESTClient.""" |
||||
|
if method == "GET": |
||||
|
return self.rest_client.get_request(url, |
||||
|
query_params=query_params, |
||||
|
_preload_content=_preload_content, |
||||
|
_request_timeout=_request_timeout, |
||||
|
headers=headers) |
||||
|
elif method == "HEAD": |
||||
|
return self.rest_client.head_request(url, |
||||
|
query_params=query_params, |
||||
|
_preload_content=_preload_content, |
||||
|
_request_timeout=_request_timeout, |
||||
|
headers=headers) |
||||
|
elif method == "OPTIONS": |
||||
|
return self.rest_client.options_request(url, |
||||
|
query_params=query_params, |
||||
|
headers=headers, |
||||
|
_preload_content=_preload_content, |
||||
|
_request_timeout=_request_timeout) |
||||
|
elif method == "POST": |
||||
|
return self.rest_client.post_request(url, |
||||
|
query_params=query_params, |
||||
|
headers=headers, |
||||
|
post_params=post_params, |
||||
|
_preload_content=_preload_content, |
||||
|
_request_timeout=_request_timeout, |
||||
|
body=body) |
||||
|
elif method == "PUT": |
||||
|
return self.rest_client.put_request(url, |
||||
|
query_params=query_params, |
||||
|
headers=headers, |
||||
|
post_params=post_params, |
||||
|
_preload_content=_preload_content, |
||||
|
_request_timeout=_request_timeout, |
||||
|
body=body) |
||||
|
elif method == "PATCH": |
||||
|
return self.rest_client.patch_request(url, |
||||
|
query_params=query_params, |
||||
|
headers=headers, |
||||
|
post_params=post_params, |
||||
|
_preload_content=_preload_content, |
||||
|
_request_timeout=_request_timeout, |
||||
|
body=body) |
||||
|
elif method == "DELETE": |
||||
|
return self.rest_client.delete_request(url, |
||||
|
query_params=query_params, |
||||
|
headers=headers, |
||||
|
_preload_content=_preload_content, |
||||
|
_request_timeout=_request_timeout, |
||||
|
body=body) |
||||
|
else: |
||||
|
raise ApiValueError( |
||||
|
"http method must be `GET`, `HEAD`, `OPTIONS`," |
||||
|
" `POST`, `PATCH`, `PUT` or `DELETE`." |
||||
|
) |
||||
|
|
||||
|
def parameters_to_tuples(self, params, collection_formats): |
||||
|
"""Get parameters as list of tuples, formatting collections. |
||||
|
|
||||
|
:param params: Parameters as dict or list of two-tuples |
||||
|
:param dict collection_formats: Parameter collection formats |
||||
|
:return: Parameters as list of tuples, collections formatted |
||||
|
""" |
||||
|
new_params = [] |
||||
|
if collection_formats is None: |
||||
|
collection_formats = {} |
||||
|
for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 |
||||
|
if k in collection_formats: |
||||
|
collection_format = collection_formats[k] |
||||
|
if collection_format == 'multi': |
||||
|
new_params.extend((k, value) for value in v) |
||||
|
else: |
||||
|
if collection_format == 'ssv': |
||||
|
delimiter = ' ' |
||||
|
elif collection_format == 'tsv': |
||||
|
delimiter = '\t' |
||||
|
elif collection_format == 'pipes': |
||||
|
delimiter = '|' |
||||
|
else: # csv is the default |
||||
|
delimiter = ',' |
||||
|
new_params.append( |
||||
|
(k, delimiter.join(str(value) for value in v))) |
||||
|
else: |
||||
|
new_params.append((k, v)) |
||||
|
return new_params |
||||
|
|
||||
|
def parameters_to_url_query(self, params, collection_formats): |
||||
|
"""Get parameters as list of tuples, formatting collections. |
||||
|
|
||||
|
:param params: Parameters as dict or list of two-tuples |
||||
|
:param dict collection_formats: Parameter collection formats |
||||
|
:return: URL query string (e.g. a=Hello%20World&b=123) |
||||
|
""" |
||||
|
new_params = [] |
||||
|
if collection_formats is None: |
||||
|
collection_formats = {} |
||||
|
for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 |
||||
|
if isinstance(v, (int, float)): |
||||
|
v = str(v) |
||||
|
if isinstance(v, bool): |
||||
|
v = str(v).lower() |
||||
|
|
||||
|
if k in collection_formats: |
||||
|
collection_format = collection_formats[k] |
||||
|
if collection_format == 'multi': |
||||
|
new_params.extend((k, value) for value in v) |
||||
|
else: |
||||
|
if collection_format == 'ssv': |
||||
|
delimiter = ' ' |
||||
|
elif collection_format == 'tsv': |
||||
|
delimiter = '\t' |
||||
|
elif collection_format == 'pipes': |
||||
|
delimiter = '|' |
||||
|
else: # csv is the default |
||||
|
delimiter = ',' |
||||
|
new_params.append( |
||||
|
(k, delimiter.join(quote(str(value)) for value in v))) |
||||
|
else: |
||||
|
new_params.append((k, v)) |
||||
|
|
||||
|
return "&".join(["=".join(item) for item in new_params]) |
||||
|
|
||||
|
def files_parameters(self, files=None): |
||||
|
"""Builds form parameters. |
||||
|
|
||||
|
:param files: File parameters. |
||||
|
:return: Form parameters with files. |
||||
|
""" |
||||
|
params = [] |
||||
|
|
||||
|
if files: |
||||
|
for k, v in files.items(): |
||||
|
if not v: |
||||
|
continue |
||||
|
file_names = v if type(v) is list else [v] |
||||
|
for n in file_names: |
||||
|
with open(n, 'rb') as f: |
||||
|
filename = os.path.basename(f.name) |
||||
|
filedata = f.read() |
||||
|
mimetype = (mimetypes.guess_type(filename)[0] or |
||||
|
'application/octet-stream') |
||||
|
params.append( |
||||
|
tuple([k, tuple([filename, filedata, mimetype])])) |
||||
|
|
||||
|
return params |
||||
|
|
||||
|
def select_header_accept(self, accepts): |
||||
|
"""Returns `Accept` based on an array of accepts provided. |
||||
|
|
||||
|
:param accepts: List of headers. |
||||
|
:return: Accept (e.g. application/json). |
||||
|
""" |
||||
|
if not accepts: |
||||
|
return |
||||
|
|
||||
|
for accept in accepts: |
||||
|
if re.search('json', accept, re.IGNORECASE): |
||||
|
return accept |
||||
|
|
||||
|
return accepts[0] |
||||
|
|
||||
|
def select_header_content_type(self, content_types): |
||||
|
"""Returns `Content-Type` based on an array of content_types provided. |
||||
|
|
||||
|
:param content_types: List of content-types. |
||||
|
:return: Content-Type (e.g. application/json). |
||||
|
""" |
||||
|
if not content_types: |
||||
|
return None |
||||
|
|
||||
|
for content_type in content_types: |
||||
|
if re.search('json', content_type, re.IGNORECASE): |
||||
|
return content_type |
||||
|
|
||||
|
return content_types[0] |
||||
|
|
||||
|
def update_params_for_auth(self, headers, queries, auth_settings, |
||||
|
resource_path, method, body, |
||||
|
request_auth=None): |
||||
|
"""Updates header and query params based on authentication setting. |
||||
|
|
||||
|
:param headers: Header parameters dict to be updated. |
||||
|
:param queries: Query parameters tuple list to be updated. |
||||
|
:param auth_settings: Authentication setting identifiers list. |
||||
|
:resource_path: A string representation of the HTTP request resource path. |
||||
|
:method: A string representation of the HTTP request method. |
||||
|
:body: A object representing the body of the HTTP request. |
||||
|
The object type is the return value of sanitize_for_serialization(). |
||||
|
:param request_auth: if set, the provided settings will |
||||
|
override the token in the configuration. |
||||
|
""" |
||||
|
if not auth_settings: |
||||
|
return |
||||
|
|
||||
|
if request_auth: |
||||
|
self._apply_auth_params(headers, queries, |
||||
|
resource_path, method, body, |
||||
|
request_auth) |
||||
|
return |
||||
|
|
||||
|
for auth in auth_settings: |
||||
|
auth_setting = self.configuration.auth_settings().get(auth) |
||||
|
if auth_setting: |
||||
|
self._apply_auth_params(headers, queries, |
||||
|
resource_path, method, body, |
||||
|
auth_setting) |
||||
|
|
||||
|
def _apply_auth_params(self, headers, queries, |
||||
|
resource_path, method, body, |
||||
|
auth_setting): |
||||
|
"""Updates the request parameters based on a single auth_setting |
||||
|
|
||||
|
:param headers: Header parameters dict to be updated. |
||||
|
:param queries: Query parameters tuple list to be updated. |
||||
|
:resource_path: A string representation of the HTTP request resource path. |
||||
|
:method: A string representation of the HTTP request method. |
||||
|
:body: A object representing the body of the HTTP request. |
||||
|
The object type is the return value of sanitize_for_serialization(). |
||||
|
:param auth_setting: auth settings for the endpoint |
||||
|
""" |
||||
|
if auth_setting['in'] == 'cookie': |
||||
|
headers['Cookie'] = auth_setting['value'] |
||||
|
elif auth_setting['in'] == 'header': |
||||
|
if auth_setting['type'] != 'http-signature': |
||||
|
headers[auth_setting['key']] = auth_setting['value'] |
||||
|
elif auth_setting['in'] == 'query': |
||||
|
queries.append((auth_setting['key'], auth_setting['value'])) |
||||
|
else: |
||||
|
raise ApiValueError( |
||||
|
'Authentication token must be in `query` or `header`' |
||||
|
) |
||||
|
|
||||
|
def __deserialize_file(self, response): |
||||
|
"""Deserializes body to file |
||||
|
|
||||
|
Saves response body into a file in a temporary folder, |
||||
|
using the filename from the `Content-Disposition` header if provided. |
||||
|
|
||||
|
:param response: RESTResponse. |
||||
|
:return: file path. |
||||
|
""" |
||||
|
fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path) |
||||
|
os.close(fd) |
||||
|
os.remove(path) |
||||
|
|
||||
|
content_disposition = response.getheader("Content-Disposition") |
||||
|
if content_disposition: |
||||
|
filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', |
||||
|
content_disposition).group(1) |
||||
|
path = os.path.join(os.path.dirname(path), filename) |
||||
|
|
||||
|
with open(path, "wb") as f: |
||||
|
f.write(response.data) |
||||
|
|
||||
|
return path |
||||
|
|
||||
|
def __deserialize_primitive(self, data, klass): |
||||
|
"""Deserializes string to primitive type. |
||||
|
|
||||
|
:param data: str. |
||||
|
:param klass: class literal. |
||||
|
|
||||
|
:return: int, long, float, str, bool. |
||||
|
""" |
||||
|
try: |
||||
|
return klass(data) |
||||
|
except UnicodeEncodeError: |
||||
|
return str(data) |
||||
|
except TypeError: |
||||
|
return data |
||||
|
|
||||
|
def __deserialize_object(self, value): |
||||
|
"""Return an original value. |
||||
|
|
||||
|
:return: object. |
||||
|
""" |
||||
|
return value |
||||
|
|
||||
|
def __deserialize_date(self, string): |
||||
|
"""Deserializes string to date. |
||||
|
|
||||
|
:param string: str. |
||||
|
:return: date. |
||||
|
""" |
||||
|
try: |
||||
|
return parse(string).date() |
||||
|
except ImportError: |
||||
|
return string |
||||
|
except ValueError: |
||||
|
raise rest.ApiException( |
||||
|
status=0, |
||||
|
reason="Failed to parse `{0}` as date object".format(string) |
||||
|
) |
||||
|
|
||||
|
def __deserialize_datetime(self, string): |
||||
|
"""Deserializes string to datetime. |
||||
|
|
||||
|
The string should be in iso8601 datetime format. |
||||
|
|
||||
|
:param string: str. |
||||
|
:return: datetime. |
||||
|
""" |
||||
|
try: |
||||
|
return parse(string) |
||||
|
except ImportError: |
||||
|
return string |
||||
|
except ValueError: |
||||
|
raise rest.ApiException( |
||||
|
status=0, |
||||
|
reason=( |
||||
|
"Failed to parse `{0}` as datetime object" |
||||
|
.format(string) |
||||
|
) |
||||
|
) |
||||
|
|
||||
|
def __deserialize_model(self, data, klass): |
||||
|
"""Deserializes list or dict to model. |
||||
|
|
||||
|
:param data: dict, list. |
||||
|
:param klass: class literal. |
||||
|
:return: model object. |
||||
|
""" |
||||
|
|
||||
|
return klass.from_dict(data) |
@ -0,0 +1,426 @@ |
|||||
|
# coding: utf-8 |
||||
|
|
||||
|
""" |
||||
|
Seaweedfs Master Server API |
||||
|
|
||||
|
The Seaweedfs Master Server API allows you to store blobs # noqa: E501 |
||||
|
|
||||
|
The version of the OpenAPI document: 3.43.0 |
||||
|
Generated by: https://openapi-generator.tech |
||||
|
""" |
||||
|
|
||||
|
|
||||
|
from __future__ import absolute_import |
||||
|
|
||||
|
import copy |
||||
|
import logging |
||||
|
import multiprocessing |
||||
|
import sys |
||||
|
import urllib3 |
||||
|
|
||||
|
import http.client as httplib |
||||
|
from openapi_client.exceptions import ApiValueError |
||||
|
|
||||
|
|
||||
|
JSON_SCHEMA_VALIDATION_KEYWORDS = { |
||||
|
'multipleOf', 'maximum', 'exclusiveMaximum', |
||||
|
'minimum', 'exclusiveMinimum', 'maxLength', |
||||
|
'minLength', 'pattern', 'maxItems', 'minItems' |
||||
|
} |
||||
|
|
||||
|
class Configuration(object): |
||||
|
"""NOTE: This class is auto generated by OpenAPI Generator |
||||
|
|
||||
|
Ref: https://openapi-generator.tech |
||||
|
Do not edit the class manually. |
||||
|
|
||||
|
:param host: Base url. |
||||
|
:param api_key: Dict to store API key(s). |
||||
|
Each entry in the dict specifies an API key. |
||||
|
The dict key is the name of the security scheme in the OAS specification. |
||||
|
The dict value is the API key secret. |
||||
|
:param api_key_prefix: Dict to store API prefix (e.g. Bearer). |
||||
|
The dict key is the name of the security scheme in the OAS specification. |
||||
|
The dict value is an API key prefix when generating the auth data. |
||||
|
:param username: Username for HTTP basic authentication. |
||||
|
:param password: Password for HTTP basic authentication. |
||||
|
:param access_token: Access token. |
||||
|
:param server_index: Index to servers configuration. |
||||
|
:param server_variables: Mapping with string values to replace variables in |
||||
|
templated server configuration. The validation of enums is performed for |
||||
|
variables with defined enum values before. |
||||
|
:param server_operation_index: Mapping from operation ID to an index to server |
||||
|
configuration. |
||||
|
:param server_operation_variables: Mapping from operation ID to a mapping with |
||||
|
string values to replace variables in templated server configuration. |
||||
|
The validation of enums is performed for variables with defined enum values before. |
||||
|
:param ssl_ca_cert: str - the path to a file of concatenated CA certificates |
||||
|
in PEM format. |
||||
|
|
||||
|
""" |
||||
|
|
||||
|
_default = None |
||||
|
|
||||
|
def __init__(self, host=None, |
||||
|
api_key=None, api_key_prefix=None, |
||||
|
username=None, password=None, |
||||
|
access_token=None, |
||||
|
server_index=None, server_variables=None, |
||||
|
server_operation_index=None, server_operation_variables=None, |
||||
|
ssl_ca_cert=None, |
||||
|
): |
||||
|
"""Constructor |
||||
|
""" |
||||
|
self._base_path = "https://127.0.0.1:9333" if host is None else host |
||||
|
"""Default Base url |
||||
|
""" |
||||
|
self.server_index = 0 if server_index is None and host is None else server_index |
||||
|
self.server_operation_index = server_operation_index or {} |
||||
|
"""Default server index |
||||
|
""" |
||||
|
self.server_variables = server_variables or {} |
||||
|
self.server_operation_variables = server_operation_variables or {} |
||||
|
"""Default server variables |
||||
|
""" |
||||
|
self.temp_folder_path = None |
||||
|
"""Temp file folder for downloading files |
||||
|
""" |
||||
|
# Authentication Settings |
||||
|
self.api_key = {} |
||||
|
if api_key: |
||||
|
self.api_key = api_key |
||||
|
"""dict to store API key(s) |
||||
|
""" |
||||
|
self.api_key_prefix = {} |
||||
|
if api_key_prefix: |
||||
|
self.api_key_prefix = api_key_prefix |
||||
|
"""dict to store API prefix (e.g. Bearer) |
||||
|
""" |
||||
|
self.refresh_api_key_hook = None |
||||
|
"""function hook to refresh API key if expired |
||||
|
""" |
||||
|
self.username = username |
||||
|
"""Username for HTTP basic authentication |
||||
|
""" |
||||
|
self.password = password |
||||
|
"""Password for HTTP basic authentication |
||||
|
""" |
||||
|
self.access_token = access_token |
||||
|
"""Access token |
||||
|
""" |
||||
|
self.logger = {} |
||||
|
"""Logging Settings |
||||
|
""" |
||||
|
self.logger["package_logger"] = logging.getLogger("openapi_client") |
||||
|
self.logger["urllib3_logger"] = logging.getLogger("urllib3") |
||||
|
self.logger_format = '%(asctime)s %(levelname)s %(message)s' |
||||
|
"""Log format |
||||
|
""" |
||||
|
self.logger_stream_handler = None |
||||
|
"""Log stream handler |
||||
|
""" |
||||
|
self.logger_file_handler = None |
||||
|
"""Log file handler |
||||
|
""" |
||||
|
self.logger_file = None |
||||
|
"""Debug file location |
||||
|
""" |
||||
|
self.debug = False |
||||
|
"""Debug switch |
||||
|
""" |
||||
|
|
||||
|
self.verify_ssl = True |
||||
|
"""SSL/TLS verification |
||||
|
Set this to false to skip verifying SSL certificate when calling API |
||||
|
from https server. |
||||
|
""" |
||||
|
self.ssl_ca_cert = ssl_ca_cert |
||||
|
"""Set this to customize the certificate file to verify the peer. |
||||
|
""" |
||||
|
self.cert_file = None |
||||
|
"""client certificate file |
||||
|
""" |
||||
|
self.key_file = None |
||||
|
"""client key file |
||||
|
""" |
||||
|
self.assert_hostname = None |
||||
|
"""Set this to True/False to enable/disable SSL hostname verification. |
||||
|
""" |
||||
|
|
||||
|
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 |
||||
|
"""urllib3 connection pool's maximum number of connections saved |
||||
|
per pool. urllib3 uses 1 connection as default value, but this is |
||||
|
not the best value when you are making a lot of possibly parallel |
||||
|
requests to the same host, which is often the case here. |
||||
|
cpu_count * 5 is used as default value to increase performance. |
||||
|
""" |
||||
|
|
||||
|
self.proxy = None |
||||
|
"""Proxy URL |
||||
|
""" |
||||
|
self.proxy_headers = None |
||||
|
"""Proxy headers |
||||
|
""" |
||||
|
self.safe_chars_for_path_param = '' |
||||
|
"""Safe chars for path_param |
||||
|
""" |
||||
|
self.retries = None |
||||
|
"""Adding retries to override urllib3 default value 3 |
||||
|
""" |
||||
|
# Enable client side validation |
||||
|
self.client_side_validation = True |
||||
|
|
||||
|
self.socket_options = None |
||||
|
"""Options to pass down to the underlying urllib3 socket |
||||
|
""" |
||||
|
|
||||
|
def __deepcopy__(self, memo): |
||||
|
cls = self.__class__ |
||||
|
result = cls.__new__(cls) |
||||
|
memo[id(self)] = result |
||||
|
for k, v in self.__dict__.items(): |
||||
|
if k not in ('logger', 'logger_file_handler'): |
||||
|
setattr(result, k, copy.deepcopy(v, memo)) |
||||
|
# shallow copy of loggers |
||||
|
result.logger = copy.copy(self.logger) |
||||
|
# use setters to configure loggers |
||||
|
result.logger_file = self.logger_file |
||||
|
result.debug = self.debug |
||||
|
return result |
||||
|
|
||||
|
def __setattr__(self, name, value): |
||||
|
object.__setattr__(self, name, value) |
||||
|
|
||||
|
@classmethod |
||||
|
def set_default(cls, default): |
||||
|
"""Set default instance of configuration. |
||||
|
|
||||
|
It stores default configuration, which can be |
||||
|
returned by get_default_copy method. |
||||
|
|
||||
|
:param default: object of Configuration |
||||
|
""" |
||||
|
cls._default = default |
||||
|
|
||||
|
@classmethod |
||||
|
def get_default_copy(cls): |
||||
|
"""Deprecated. Please use `get_default` instead. |
||||
|
|
||||
|
Deprecated. Please use `get_default` instead. |
||||
|
|
||||
|
:return: The configuration object. |
||||
|
""" |
||||
|
return cls.get_default() |
||||
|
|
||||
|
@classmethod |
||||
|
def get_default(cls): |
||||
|
"""Return the default configuration. |
||||
|
|
||||
|
This method returns newly created, based on default constructor, |
||||
|
object of Configuration class or returns a copy of default |
||||
|
configuration. |
||||
|
|
||||
|
:return: The configuration object. |
||||
|
""" |
||||
|
if cls._default is None: |
||||
|
cls._default = Configuration() |
||||
|
return cls._default |
||||
|
|
||||
|
@property |
||||
|
def logger_file(self): |
||||
|
"""The logger file. |
||||
|
|
||||
|
If the logger_file is None, then add stream handler and remove file |
||||
|
handler. Otherwise, add file handler and remove stream handler. |
||||
|
|
||||
|
:param value: The logger_file path. |
||||
|
:type: str |
||||
|
""" |
||||
|
return self.__logger_file |
||||
|
|
||||
|
@logger_file.setter |
||||
|
def logger_file(self, value): |
||||
|
"""The logger file. |
||||
|
|
||||
|
If the logger_file is None, then add stream handler and remove file |
||||
|
handler. Otherwise, add file handler and remove stream handler. |
||||
|
|
||||
|
:param value: The logger_file path. |
||||
|
:type: str |
||||
|
""" |
||||
|
self.__logger_file = value |
||||
|
if self.__logger_file: |
||||
|
# If set logging file, |
||||
|
# then add file handler and remove stream handler. |
||||
|
self.logger_file_handler = logging.FileHandler(self.__logger_file) |
||||
|
self.logger_file_handler.setFormatter(self.logger_formatter) |
||||
|
for _, logger in self.logger.items(): |
||||
|
logger.addHandler(self.logger_file_handler) |
||||
|
|
||||
|
@property |
||||
|
def debug(self): |
||||
|
"""Debug status |
||||
|
|
||||
|
:param value: The debug status, True or False. |
||||
|
:type: bool |
||||
|
""" |
||||
|
return self.__debug |
||||
|
|
||||
|
@debug.setter |
||||
|
def debug(self, value): |
||||
|
"""Debug status |
||||
|
|
||||
|
:param value: The debug status, True or False. |
||||
|
:type: bool |
||||
|
""" |
||||
|
self.__debug = value |
||||
|
if self.__debug: |
||||
|
# if debug status is True, turn on debug logging |
||||
|
for _, logger in self.logger.items(): |
||||
|
logger.setLevel(logging.DEBUG) |
||||
|
# turn on httplib debug |
||||
|
httplib.HTTPConnection.debuglevel = 1 |
||||
|
else: |
||||
|
# if debug status is False, turn off debug logging, |
||||
|
# setting log level to default `logging.WARNING` |
||||
|
for _, logger in self.logger.items(): |
||||
|
logger.setLevel(logging.WARNING) |
||||
|
# turn off httplib debug |
||||
|
httplib.HTTPConnection.debuglevel = 0 |
||||
|
|
||||
|
@property |
||||
|
def logger_format(self): |
||||
|
"""The logger format. |
||||
|
|
||||
|
The logger_formatter will be updated when sets logger_format. |
||||
|
|
||||
|
:param value: The format string. |
||||
|
:type: str |
||||
|
""" |
||||
|
return self.__logger_format |
||||
|
|
||||
|
@logger_format.setter |
||||
|
def logger_format(self, value): |
||||
|
"""The logger format. |
||||
|
|
||||
|
The logger_formatter will be updated when sets logger_format. |
||||
|
|
||||
|
:param value: The format string. |
||||
|
:type: str |
||||
|
""" |
||||
|
self.__logger_format = value |
||||
|
self.logger_formatter = logging.Formatter(self.__logger_format) |
||||
|
|
||||
|
def get_api_key_with_prefix(self, identifier, alias=None): |
||||
|
"""Gets API key (with prefix if set). |
||||
|
|
||||
|
:param identifier: The identifier of apiKey. |
||||
|
:param alias: The alternative identifier of apiKey. |
||||
|
:return: The token for api key authentication. |
||||
|
""" |
||||
|
if self.refresh_api_key_hook is not None: |
||||
|
self.refresh_api_key_hook(self) |
||||
|
key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None) |
||||
|
if key: |
||||
|
prefix = self.api_key_prefix.get(identifier) |
||||
|
if prefix: |
||||
|
return "%s %s" % (prefix, key) |
||||
|
else: |
||||
|
return key |
||||
|
|
||||
|
def get_basic_auth_token(self): |
||||
|
"""Gets HTTP basic authentication header (string). |
||||
|
|
||||
|
:return: The token for basic HTTP authentication. |
||||
|
""" |
||||
|
username = "" |
||||
|
if self.username is not None: |
||||
|
username = self.username |
||||
|
password = "" |
||||
|
if self.password is not None: |
||||
|
password = self.password |
||||
|
return urllib3.util.make_headers( |
||||
|
basic_auth=username + ':' + password |
||||
|
).get('authorization') |
||||
|
|
||||
|
def auth_settings(self): |
||||
|
"""Gets Auth Settings dict for api client. |
||||
|
|
||||
|
:return: The Auth Settings information dict. |
||||
|
""" |
||||
|
auth = {} |
||||
|
return auth |
||||
|
|
||||
|
def to_debug_report(self): |
||||
|
"""Gets the essential information for debugging. |
||||
|
|
||||
|
:return: The report for debugging. |
||||
|
""" |
||||
|
return "Python SDK Debug Report:\n"\ |
||||
|
"OS: {env}\n"\ |
||||
|
"Python Version: {pyversion}\n"\ |
||||
|
"Version of the API: 3.43.0\n"\ |
||||
|
"SDK Package Version: 1.0.0".\ |
||||
|
format(env=sys.platform, pyversion=sys.version) |
||||
|
|
||||
|
def get_host_settings(self): |
||||
|
"""Gets an array of host settings |
||||
|
|
||||
|
:return: An array of host settings |
||||
|
""" |
||||
|
return [ |
||||
|
{ |
||||
|
'url': "https://127.0.0.1:9333", |
||||
|
'description': "No description provided", |
||||
|
} |
||||
|
] |
||||
|
|
||||
|
def get_host_from_settings(self, index, variables=None, servers=None): |
||||
|
"""Gets host URL based on the index and variables |
||||
|
:param index: array index of the host settings |
||||
|
:param variables: hash of variable and the corresponding value |
||||
|
:param servers: an array of host settings or None |
||||
|
:return: URL based on host settings |
||||
|
""" |
||||
|
if index is None: |
||||
|
return self._base_path |
||||
|
|
||||
|
variables = {} if variables is None else variables |
||||
|
servers = self.get_host_settings() if servers is None else servers |
||||
|
|
||||
|
try: |
||||
|
server = servers[index] |
||||
|
except IndexError: |
||||
|
raise ValueError( |
||||
|
"Invalid index {0} when selecting the host settings. " |
||||
|
"Must be less than {1}".format(index, len(servers))) |
||||
|
|
||||
|
url = server['url'] |
||||
|
|
||||
|
# go through variables and replace placeholders |
||||
|
for variable_name, variable in server.get('variables', {}).items(): |
||||
|
used_value = variables.get( |
||||
|
variable_name, variable['default_value']) |
||||
|
|
||||
|
if 'enum_values' in variable \ |
||||
|
and used_value not in variable['enum_values']: |
||||
|
raise ValueError( |
||||
|
"The variable `{0}` in the host URL has invalid value " |
||||
|
"{1}. Must be {2}.".format( |
||||
|
variable_name, variables[variable_name], |
||||
|
variable['enum_values'])) |
||||
|
|
||||
|
url = url.replace("{" + variable_name + "}", used_value) |
||||
|
|
||||
|
return url |
||||
|
|
||||
|
@property |
||||
|
def host(self): |
||||
|
"""Return generated host.""" |
||||
|
return self.get_host_from_settings(self.server_index, variables=self.server_variables) |
||||
|
|
||||
|
@host.setter |
||||
|
def host(self, value): |
||||
|
"""Fix base path.""" |
||||
|
self._base_path = value |
||||
|
self.server_index = None |
@ -0,0 +1,160 @@ |
|||||
|
# coding: utf-8 |
||||
|
|
||||
|
""" |
||||
|
Seaweedfs Master Server API |
||||
|
|
||||
|
The Seaweedfs Master Server API allows you to store blobs # noqa: E501 |
||||
|
|
||||
|
The version of the OpenAPI document: 3.43.0 |
||||
|
Generated by: https://openapi-generator.tech |
||||
|
""" |
||||
|
|
||||
|
|
||||
|
class OpenApiException(Exception): |
||||
|
"""The base exception class for all OpenAPIExceptions""" |
||||
|
|
||||
|
|
||||
|
class ApiTypeError(OpenApiException, TypeError): |
||||
|
def __init__(self, msg, path_to_item=None, valid_classes=None, |
||||
|
key_type=None): |
||||
|
""" Raises an exception for TypeErrors |
||||
|
|
||||
|
Args: |
||||
|
msg (str): the exception message |
||||
|
|
||||
|
Keyword Args: |
||||
|
path_to_item (list): a list of keys an indices to get to the |
||||
|
current_item |
||||
|
None if unset |
||||
|
valid_classes (tuple): the primitive classes that current item |
||||
|
should be an instance of |
||||
|
None if unset |
||||
|
key_type (bool): False if our value is a value in a dict |
||||
|
True if it is a key in a dict |
||||
|
False if our item is an item in a list |
||||
|
None if unset |
||||
|
""" |
||||
|
self.path_to_item = path_to_item |
||||
|
self.valid_classes = valid_classes |
||||
|
self.key_type = key_type |
||||
|
full_msg = msg |
||||
|
if path_to_item: |
||||
|
full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) |
||||
|
super(ApiTypeError, self).__init__(full_msg) |
||||
|
|
||||
|
|
||||
|
class ApiValueError(OpenApiException, ValueError): |
||||
|
def __init__(self, msg, path_to_item=None): |
||||
|
""" |
||||
|
Args: |
||||
|
msg (str): the exception message |
||||
|
|
||||
|
Keyword Args: |
||||
|
path_to_item (list) the path to the exception in the |
||||
|
received_data dict. None if unset |
||||
|
""" |
||||
|
|
||||
|
self.path_to_item = path_to_item |
||||
|
full_msg = msg |
||||
|
if path_to_item: |
||||
|
full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) |
||||
|
super(ApiValueError, self).__init__(full_msg) |
||||
|
|
||||
|
|
||||
|
class ApiAttributeError(OpenApiException, AttributeError): |
||||
|
def __init__(self, msg, path_to_item=None): |
||||
|
""" |
||||
|
Raised when an attribute reference or assignment fails. |
||||
|
|
||||
|
Args: |
||||
|
msg (str): the exception message |
||||
|
|
||||
|
Keyword Args: |
||||
|
path_to_item (None/list) the path to the exception in the |
||||
|
received_data dict |
||||
|
""" |
||||
|
self.path_to_item = path_to_item |
||||
|
full_msg = msg |
||||
|
if path_to_item: |
||||
|
full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) |
||||
|
super(ApiAttributeError, self).__init__(full_msg) |
||||
|
|
||||
|
|
||||
|
class ApiKeyError(OpenApiException, KeyError): |
||||
|
def __init__(self, msg, path_to_item=None): |
||||
|
""" |
||||
|
Args: |
||||
|
msg (str): the exception message |
||||
|
|
||||
|
Keyword Args: |
||||
|
path_to_item (None/list) the path to the exception in the |
||||
|
received_data dict |
||||
|
""" |
||||
|
self.path_to_item = path_to_item |
||||
|
full_msg = msg |
||||
|
if path_to_item: |
||||
|
full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) |
||||
|
super(ApiKeyError, self).__init__(full_msg) |
||||
|
|
||||
|
|
||||
|
class ApiException(OpenApiException): |
||||
|
|
||||
|
def __init__(self, status=None, reason=None, http_resp=None): |
||||
|
if http_resp: |
||||
|
self.status = http_resp.status |
||||
|
self.reason = http_resp.reason |
||||
|
self.body = http_resp.data |
||||
|
self.headers = http_resp.getheaders() |
||||
|
else: |
||||
|
self.status = status |
||||
|
self.reason = reason |
||||
|
self.body = None |
||||
|
self.headers = None |
||||
|
|
||||
|
def __str__(self): |
||||
|
"""Custom error messages for exception""" |
||||
|
error_message = "({0})\n"\ |
||||
|
"Reason: {1}\n".format(self.status, self.reason) |
||||
|
if self.headers: |
||||
|
error_message += "HTTP response headers: {0}\n".format( |
||||
|
self.headers) |
||||
|
|
||||
|
if self.body: |
||||
|
error_message += "HTTP response body: {0}\n".format(self.body) |
||||
|
|
||||
|
return error_message |
||||
|
|
||||
|
|
||||
|
class NotFoundException(ApiException): |
||||
|
|
||||
|
def __init__(self, status=None, reason=None, http_resp=None): |
||||
|
super(NotFoundException, self).__init__(status, reason, http_resp) |
||||
|
|
||||
|
|
||||
|
class UnauthorizedException(ApiException): |
||||
|
|
||||
|
def __init__(self, status=None, reason=None, http_resp=None): |
||||
|
super(UnauthorizedException, self).__init__(status, reason, http_resp) |
||||
|
|
||||
|
|
||||
|
class ForbiddenException(ApiException): |
||||
|
|
||||
|
def __init__(self, status=None, reason=None, http_resp=None): |
||||
|
super(ForbiddenException, self).__init__(status, reason, http_resp) |
||||
|
|
||||
|
|
||||
|
class ServiceException(ApiException): |
||||
|
|
||||
|
def __init__(self, status=None, reason=None, http_resp=None): |
||||
|
super(ServiceException, self).__init__(status, reason, http_resp) |
||||
|
|
||||
|
|
||||
|
def render_path(path_to_item): |
||||
|
"""Returns a string representation of a path""" |
||||
|
result = "" |
||||
|
for pth in path_to_item: |
||||
|
if isinstance(pth, int): |
||||
|
result += "[{0}]".format(pth) |
||||
|
else: |
||||
|
result += "['{0}']".format(pth) |
||||
|
return result |
@ -0,0 +1,18 @@ |
|||||
|
# coding: utf-8 |
||||
|
|
||||
|
# flake8: noqa |
||||
|
""" |
||||
|
Seaweedfs Master Server API |
||||
|
|
||||
|
The Seaweedfs Master Server API allows you to store blobs # noqa: E501 |
||||
|
|
||||
|
The version of the OpenAPI document: 3.43.0 |
||||
|
Generated by: https://openapi-generator.tech |
||||
|
""" |
||||
|
|
||||
|
|
||||
|
from __future__ import absolute_import |
||||
|
|
||||
|
# import models into model package |
||||
|
from openapi_client.models.file_key import FileKey |
||||
|
from openapi_client.models.location import Location |
@ -0,0 +1,86 @@ |
|||||
|
# coding: utf-8 |
||||
|
|
||||
|
""" |
||||
|
Seaweedfs Master Server API |
||||
|
|
||||
|
The Seaweedfs Master Server API allows you to store blobs # noqa: E501 |
||||
|
|
||||
|
The version of the OpenAPI document: 3.43.0 |
||||
|
Generated by: https://openapi-generator.tech |
||||
|
""" |
||||
|
|
||||
|
|
||||
|
from __future__ import annotations |
||||
|
from inspect import getfullargspec |
||||
|
import pprint |
||||
|
import re # noqa: F401 |
||||
|
import json |
||||
|
|
||||
|
|
||||
|
from typing import Any, Optional |
||||
|
from pydantic import BaseModel |
||||
|
|
||||
|
class FileKey(BaseModel): |
||||
|
"""NOTE: This class is auto generated by OpenAPI Generator. |
||||
|
Ref: https://openapi-generator.tech |
||||
|
|
||||
|
Do not edit the class manually. |
||||
|
""" |
||||
|
count: Optional[Any] = None |
||||
|
fid: Optional[Any] = None |
||||
|
url: Optional[Any] = None |
||||
|
__properties = ["count", "fid", "url"] |
||||
|
|
||||
|
class Config: |
||||
|
allow_population_by_field_name = True |
||||
|
validate_assignment = True |
||||
|
|
||||
|
def to_str(self) -> str: |
||||
|
"""Returns the string representation of the model using alias""" |
||||
|
return pprint.pformat(self.dict(by_alias=True)) |
||||
|
|
||||
|
def to_json(self) -> str: |
||||
|
"""Returns the JSON representation of the model using alias""" |
||||
|
return json.dumps(self.to_dict()) |
||||
|
|
||||
|
@classmethod |
||||
|
def from_json(cls, json_str: str) -> FileKey: |
||||
|
"""Create an instance of FileKey from a JSON string""" |
||||
|
return cls.from_dict(json.loads(json_str)) |
||||
|
|
||||
|
def to_dict(self): |
||||
|
"""Returns the dictionary representation of the model using alias""" |
||||
|
_dict = self.dict(by_alias=True, |
||||
|
exclude={ |
||||
|
}, |
||||
|
exclude_none=True) |
||||
|
# set to None if count (nullable) is None |
||||
|
if self.count is None: |
||||
|
_dict['count'] = None |
||||
|
|
||||
|
# set to None if fid (nullable) is None |
||||
|
if self.fid is None: |
||||
|
_dict['fid'] = None |
||||
|
|
||||
|
# set to None if url (nullable) is None |
||||
|
if self.url is None: |
||||
|
_dict['url'] = None |
||||
|
|
||||
|
return _dict |
||||
|
|
||||
|
@classmethod |
||||
|
def from_dict(cls, obj: dict) -> FileKey: |
||||
|
"""Create an instance of FileKey from a dict""" |
||||
|
if obj is None: |
||||
|
return None |
||||
|
|
||||
|
if type(obj) is not dict: |
||||
|
return FileKey.parse_obj(obj) |
||||
|
|
||||
|
_obj = FileKey.parse_obj({ |
||||
|
"count": obj.get("count"), |
||||
|
"fid": obj.get("fid"), |
||||
|
"url": obj.get("url") |
||||
|
}) |
||||
|
return _obj |
||||
|
|
@ -0,0 +1,80 @@ |
|||||
|
# coding: utf-8 |
||||
|
|
||||
|
""" |
||||
|
Seaweedfs Master Server API |
||||
|
|
||||
|
The Seaweedfs Master Server API allows you to store blobs # noqa: E501 |
||||
|
|
||||
|
The version of the OpenAPI document: 3.43.0 |
||||
|
Generated by: https://openapi-generator.tech |
||||
|
""" |
||||
|
|
||||
|
|
||||
|
from __future__ import annotations |
||||
|
from inspect import getfullargspec |
||||
|
import pprint |
||||
|
import re # noqa: F401 |
||||
|
import json |
||||
|
|
||||
|
|
||||
|
from typing import Any, Optional |
||||
|
from pydantic import BaseModel, Field |
||||
|
|
||||
|
class Location(BaseModel): |
||||
|
"""NOTE: This class is auto generated by OpenAPI Generator. |
||||
|
Ref: https://openapi-generator.tech |
||||
|
|
||||
|
Do not edit the class manually. |
||||
|
""" |
||||
|
public_url: Optional[Any] = Field(None, alias="publicUrl") |
||||
|
url: Optional[Any] = None |
||||
|
__properties = ["publicUrl", "url"] |
||||
|
|
||||
|
class Config: |
||||
|
allow_population_by_field_name = True |
||||
|
validate_assignment = True |
||||
|
|
||||
|
def to_str(self) -> str: |
||||
|
"""Returns the string representation of the model using alias""" |
||||
|
return pprint.pformat(self.dict(by_alias=True)) |
||||
|
|
||||
|
def to_json(self) -> str: |
||||
|
"""Returns the JSON representation of the model using alias""" |
||||
|
return json.dumps(self.to_dict()) |
||||
|
|
||||
|
@classmethod |
||||
|
def from_json(cls, json_str: str) -> Location: |
||||
|
"""Create an instance of Location from a JSON string""" |
||||
|
return cls.from_dict(json.loads(json_str)) |
||||
|
|
||||
|
def to_dict(self): |
||||
|
"""Returns the dictionary representation of the model using alias""" |
||||
|
_dict = self.dict(by_alias=True, |
||||
|
exclude={ |
||||
|
}, |
||||
|
exclude_none=True) |
||||
|
# set to None if public_url (nullable) is None |
||||
|
if self.public_url is None: |
||||
|
_dict['publicUrl'] = None |
||||
|
|
||||
|
# set to None if url (nullable) is None |
||||
|
if self.url is None: |
||||
|
_dict['url'] = None |
||||
|
|
||||
|
return _dict |
||||
|
|
||||
|
@classmethod |
||||
|
def from_dict(cls, obj: dict) -> Location: |
||||
|
"""Create an instance of Location from a dict""" |
||||
|
if obj is None: |
||||
|
return None |
||||
|
|
||||
|
if type(obj) is not dict: |
||||
|
return Location.parse_obj(obj) |
||||
|
|
||||
|
_obj = Location.parse_obj({ |
||||
|
"public_url": obj.get("publicUrl"), |
||||
|
"url": obj.get("url") |
||||
|
}) |
||||
|
return _obj |
||||
|
|
@ -0,0 +1,296 @@ |
|||||
|
# coding: utf-8 |
||||
|
|
||||
|
""" |
||||
|
Seaweedfs Master Server API |
||||
|
|
||||
|
The Seaweedfs Master Server API allows you to store blobs # noqa: E501 |
||||
|
|
||||
|
The version of the OpenAPI document: 3.43.0 |
||||
|
Generated by: https://openapi-generator.tech |
||||
|
""" |
||||
|
|
||||
|
|
||||
|
from __future__ import absolute_import |
||||
|
|
||||
|
import io |
||||
|
import json |
||||
|
import logging |
||||
|
import re |
||||
|
import ssl |
||||
|
|
||||
|
from urllib.parse import urlencode, quote_plus |
||||
|
import urllib3 |
||||
|
|
||||
|
from openapi_client.exceptions import ApiException, UnauthorizedException, ForbiddenException, NotFoundException, ServiceException, ApiValueError |
||||
|
|
||||
|
|
||||
|
logger = logging.getLogger(__name__) |
||||
|
|
||||
|
|
||||
|
class RESTResponse(io.IOBase): |
||||
|
|
||||
|
def __init__(self, resp): |
||||
|
self.urllib3_response = resp |
||||
|
self.status = resp.status |
||||
|
self.reason = resp.reason |
||||
|
self.data = resp.data |
||||
|
|
||||
|
def getheaders(self): |
||||
|
"""Returns a dictionary of the response headers.""" |
||||
|
return self.urllib3_response.headers |
||||
|
|
||||
|
def getheader(self, name, default=None): |
||||
|
"""Returns a given response header.""" |
||||
|
return self.urllib3_response.headers.get(name, default) |
||||
|
|
||||
|
|
||||
|
class RESTClientObject(object): |
||||
|
|
||||
|
def __init__(self, configuration, pools_size=4, maxsize=None): |
||||
|
# urllib3.PoolManager will pass all kw parameters to connectionpool |
||||
|
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 |
||||
|
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 |
||||
|
# maxsize is the number of requests to host that are allowed in parallel # noqa: E501 |
||||
|
# Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 |
||||
|
|
||||
|
# cert_reqs |
||||
|
if configuration.verify_ssl: |
||||
|
cert_reqs = ssl.CERT_REQUIRED |
||||
|
else: |
||||
|
cert_reqs = ssl.CERT_NONE |
||||
|
|
||||
|
addition_pool_args = {} |
||||
|
if configuration.assert_hostname is not None: |
||||
|
addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501 |
||||
|
|
||||
|
if configuration.retries is not None: |
||||
|
addition_pool_args['retries'] = configuration.retries |
||||
|
|
||||
|
if configuration.socket_options is not None: |
||||
|
addition_pool_args['socket_options'] = configuration.socket_options |
||||
|
|
||||
|
if maxsize is None: |
||||
|
if configuration.connection_pool_maxsize is not None: |
||||
|
maxsize = configuration.connection_pool_maxsize |
||||
|
else: |
||||
|
maxsize = 4 |
||||
|
|
||||
|
# https pool manager |
||||
|
if configuration.proxy: |
||||
|
self.pool_manager = urllib3.ProxyManager( |
||||
|
num_pools=pools_size, |
||||
|
maxsize=maxsize, |
||||
|
cert_reqs=cert_reqs, |
||||
|
ca_certs=configuration.ssl_ca_cert, |
||||
|
cert_file=configuration.cert_file, |
||||
|
key_file=configuration.key_file, |
||||
|
proxy_url=configuration.proxy, |
||||
|
proxy_headers=configuration.proxy_headers, |
||||
|
**addition_pool_args |
||||
|
) |
||||
|
else: |
||||
|
self.pool_manager = urllib3.PoolManager( |
||||
|
num_pools=pools_size, |
||||
|
maxsize=maxsize, |
||||
|
cert_reqs=cert_reqs, |
||||
|
ca_certs=configuration.ssl_ca_cert, |
||||
|
cert_file=configuration.cert_file, |
||||
|
key_file=configuration.key_file, |
||||
|
**addition_pool_args |
||||
|
) |
||||
|
|
||||
|
def request(self, method, url, query_params=None, headers=None, |
||||
|
body=None, post_params=None, _preload_content=True, |
||||
|
_request_timeout=None): |
||||
|
"""Perform requests. |
||||
|
|
||||
|
:param method: http request method |
||||
|
:param url: http request url |
||||
|
:param query_params: query parameters in the url |
||||
|
:param headers: http request headers |
||||
|
:param body: request json body, for `application/json` |
||||
|
:param post_params: request post parameters, |
||||
|
`application/x-www-form-urlencoded` |
||||
|
and `multipart/form-data` |
||||
|
:param _preload_content: if False, the urllib3.HTTPResponse object will |
||||
|
be returned without reading/decoding response |
||||
|
data. Default is True. |
||||
|
:param _request_timeout: timeout setting for this request. If one |
||||
|
number provided, it will be total request |
||||
|
timeout. It can also be a pair (tuple) of |
||||
|
(connection, read) timeouts. |
||||
|
""" |
||||
|
method = method.upper() |
||||
|
assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', |
||||
|
'PATCH', 'OPTIONS'] |
||||
|
|
||||
|
if post_params and body: |
||||
|
raise ApiValueError( |
||||
|
"body parameter cannot be used with post_params parameter." |
||||
|
) |
||||
|
|
||||
|
post_params = post_params or {} |
||||
|
headers = headers or {} |
||||
|
# url already contains the URL query string |
||||
|
# so reset query_params to empty dict |
||||
|
query_params = {} |
||||
|
|
||||
|
timeout = None |
||||
|
if _request_timeout: |
||||
|
if isinstance(_request_timeout, (int,float)): # noqa: E501,F821 |
||||
|
timeout = urllib3.Timeout(total=_request_timeout) |
||||
|
elif (isinstance(_request_timeout, tuple) and |
||||
|
len(_request_timeout) == 2): |
||||
|
timeout = urllib3.Timeout( |
||||
|
connect=_request_timeout[0], read=_request_timeout[1]) |
||||
|
|
||||
|
try: |
||||
|
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` |
||||
|
if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: |
||||
|
|
||||
|
# no content type provided or payload is json |
||||
|
if not headers.get('Content-Type') or re.search('json', headers['Content-Type'], re.IGNORECASE): |
||||
|
request_body = None |
||||
|
if body is not None: |
||||
|
request_body = json.dumps(body) |
||||
|
r = self.pool_manager.request( |
||||
|
method, url, |
||||
|
body=request_body, |
||||
|
preload_content=_preload_content, |
||||
|
timeout=timeout, |
||||
|
headers=headers) |
||||
|
elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 |
||||
|
r = self.pool_manager.request( |
||||
|
method, url, |
||||
|
fields=post_params, |
||||
|
encode_multipart=False, |
||||
|
preload_content=_preload_content, |
||||
|
timeout=timeout, |
||||
|
headers=headers) |
||||
|
elif headers['Content-Type'] == 'multipart/form-data': |
||||
|
# must del headers['Content-Type'], or the correct |
||||
|
# Content-Type which generated by urllib3 will be |
||||
|
# overwritten. |
||||
|
del headers['Content-Type'] |
||||
|
r = self.pool_manager.request( |
||||
|
method, url, |
||||
|
fields=post_params, |
||||
|
encode_multipart=True, |
||||
|
preload_content=_preload_content, |
||||
|
timeout=timeout, |
||||
|
headers=headers) |
||||
|
# Pass a `string` parameter directly in the body to support |
||||
|
# other content types than Json when `body` argument is |
||||
|
# provided in serialized form |
||||
|
elif isinstance(body, str) or isinstance(body, bytes): |
||||
|
request_body = body |
||||
|
r = self.pool_manager.request( |
||||
|
method, url, |
||||
|
body=request_body, |
||||
|
preload_content=_preload_content, |
||||
|
timeout=timeout, |
||||
|
headers=headers) |
||||
|
else: |
||||
|
# Cannot generate the request from given parameters |
||||
|
msg = """Cannot prepare a request message for provided |
||||
|
arguments. Please check that your arguments match |
||||
|
declared content type.""" |
||||
|
raise ApiException(status=0, reason=msg) |
||||
|
# For `GET`, `HEAD` |
||||
|
else: |
||||
|
r = self.pool_manager.request(method, url, |
||||
|
fields={}, |
||||
|
preload_content=_preload_content, |
||||
|
timeout=timeout, |
||||
|
headers=headers) |
||||
|
except urllib3.exceptions.SSLError as e: |
||||
|
msg = "{0}\n{1}".format(type(e).__name__, str(e)) |
||||
|
raise ApiException(status=0, reason=msg) |
||||
|
|
||||
|
if _preload_content: |
||||
|
r = RESTResponse(r) |
||||
|
|
||||
|
# log response body |
||||
|
logger.debug("response body: %s", r.data) |
||||
|
|
||||
|
if not 200 <= r.status <= 299: |
||||
|
if r.status == 401: |
||||
|
raise UnauthorizedException(http_resp=r) |
||||
|
|
||||
|
if r.status == 403: |
||||
|
raise ForbiddenException(http_resp=r) |
||||
|
|
||||
|
if r.status == 404: |
||||
|
raise NotFoundException(http_resp=r) |
||||
|
|
||||
|
if 500 <= r.status <= 599: |
||||
|
raise ServiceException(http_resp=r) |
||||
|
|
||||
|
raise ApiException(http_resp=r) |
||||
|
|
||||
|
return r |
||||
|
|
||||
|
def get_request(self, url, headers=None, query_params=None, _preload_content=True, |
||||
|
_request_timeout=None): |
||||
|
return self.request("GET", url, |
||||
|
headers=headers, |
||||
|
_preload_content=_preload_content, |
||||
|
_request_timeout=_request_timeout, |
||||
|
query_params=query_params) |
||||
|
|
||||
|
def head_request(self, url, headers=None, query_params=None, _preload_content=True, |
||||
|
_request_timeout=None): |
||||
|
return self.request("HEAD", url, |
||||
|
headers=headers, |
||||
|
_preload_content=_preload_content, |
||||
|
_request_timeout=_request_timeout, |
||||
|
query_params=query_params) |
||||
|
|
||||
|
def options_request(self, url, headers=None, query_params=None, post_params=None, |
||||
|
body=None, _preload_content=True, _request_timeout=None): |
||||
|
return self.request("OPTIONS", url, |
||||
|
headers=headers, |
||||
|
query_params=query_params, |
||||
|
post_params=post_params, |
||||
|
_preload_content=_preload_content, |
||||
|
_request_timeout=_request_timeout, |
||||
|
body=body) |
||||
|
|
||||
|
def delete_request(self, url, headers=None, query_params=None, body=None, |
||||
|
_preload_content=True, _request_timeout=None): |
||||
|
return self.request("DELETE", url, |
||||
|
headers=headers, |
||||
|
query_params=query_params, |
||||
|
_preload_content=_preload_content, |
||||
|
_request_timeout=_request_timeout, |
||||
|
body=body) |
||||
|
|
||||
|
def post_request(self, url, headers=None, query_params=None, post_params=None, |
||||
|
body=None, _preload_content=True, _request_timeout=None): |
||||
|
return self.request("POST", url, |
||||
|
headers=headers, |
||||
|
query_params=query_params, |
||||
|
post_params=post_params, |
||||
|
_preload_content=_preload_content, |
||||
|
_request_timeout=_request_timeout, |
||||
|
body=body) |
||||
|
|
||||
|
def put_request(self, url, headers=None, query_params=None, post_params=None, |
||||
|
body=None, _preload_content=True, _request_timeout=None): |
||||
|
return self.request("PUT", url, |
||||
|
headers=headers, |
||||
|
query_params=query_params, |
||||
|
post_params=post_params, |
||||
|
_preload_content=_preload_content, |
||||
|
_request_timeout=_request_timeout, |
||||
|
body=body) |
||||
|
|
||||
|
def patch_request(self, url, headers=None, query_params=None, post_params=None, |
||||
|
body=None, _preload_content=True, _request_timeout=None): |
||||
|
return self.request("PATCH", url, |
||||
|
headers=headers, |
||||
|
query_params=query_params, |
||||
|
post_params=post_params, |
||||
|
_preload_content=_preload_content, |
||||
|
_request_timeout=_request_timeout, |
||||
|
body=body) |
@ -0,0 +1,5 @@ |
|||||
|
python_dateutil >= 2.5.3 |
||||
|
setuptools >= 21.0.0 |
||||
|
urllib3 >= 1.25.3 |
||||
|
pydantic >= 1.10.2 |
||||
|
aenum >= 3.1.11 |
@ -0,0 +1,2 @@ |
|||||
|
[flake8] |
||||
|
max-line-length=99 |
@ -0,0 +1,46 @@ |
|||||
|
# coding: utf-8 |
||||
|
|
||||
|
""" |
||||
|
Seaweedfs Master Server API |
||||
|
|
||||
|
The Seaweedfs Master Server API allows you to store blobs # noqa: E501 |
||||
|
|
||||
|
The version of the OpenAPI document: 3.43.0 |
||||
|
Generated by: https://openapi-generator.tech |
||||
|
""" |
||||
|
|
||||
|
|
||||
|
from setuptools import setup, find_packages # noqa: H301 |
||||
|
|
||||
|
# To install the library, run the following |
||||
|
# |
||||
|
# python setup.py install |
||||
|
# |
||||
|
# prerequisite: setuptools |
||||
|
# http://pypi.python.org/pypi/setuptools |
||||
|
NAME = "openapi-client" |
||||
|
VERSION = "1.0.0" |
||||
|
PYTHON_REQUIRES = ">=3.7" |
||||
|
REQUIRES = [ |
||||
|
"urllib3 >= 1.25.3", |
||||
|
"python-dateutil", |
||||
|
"pydantic", |
||||
|
"aenum" |
||||
|
] |
||||
|
|
||||
|
setup( |
||||
|
name=NAME, |
||||
|
version=VERSION, |
||||
|
description="Seaweedfs Master Server API", |
||||
|
author="OpenAPI Generator community", |
||||
|
author_email="team@openapitools.org", |
||||
|
url="", |
||||
|
keywords=["OpenAPI", "OpenAPI-Generator", "Seaweedfs Master Server API"], |
||||
|
install_requires=REQUIRES, |
||||
|
packages=find_packages(exclude=["test", "tests"]), |
||||
|
include_package_data=True, |
||||
|
long_description_content_type='text/markdown', |
||||
|
long_description="""\ |
||||
|
The Seaweedfs Master Server API allows you to store blobs # noqa: E501 |
||||
|
""" |
||||
|
) |
@ -0,0 +1,3 @@ |
|||||
|
pytest~=7.1.3 |
||||
|
pytest-cov>=2.8.1 |
||||
|
pytest-randomly>=3.12.0 |
@ -0,0 +1,47 @@ |
|||||
|
# coding: utf-8 |
||||
|
|
||||
|
""" |
||||
|
Seaweedfs Master Server API |
||||
|
|
||||
|
The Seaweedfs Master Server API allows you to store blobs # noqa: E501 |
||||
|
|
||||
|
The version of the OpenAPI document: 3.43.0 |
||||
|
Generated by: https://openapi-generator.tech |
||||
|
""" |
||||
|
|
||||
|
|
||||
|
from __future__ import absolute_import |
||||
|
|
||||
|
import unittest |
||||
|
|
||||
|
import openapi_client |
||||
|
from openapi_client.api.default_api import DefaultApi # noqa: E501 |
||||
|
from openapi_client.rest import ApiException |
||||
|
|
||||
|
|
||||
|
class TestDefaultApi(unittest.TestCase): |
||||
|
"""DefaultApi unit test stubs""" |
||||
|
|
||||
|
def setUp(self): |
||||
|
self.api = openapi_client.api.default_api.DefaultApi() # noqa: E501 |
||||
|
|
||||
|
def tearDown(self): |
||||
|
pass |
||||
|
|
||||
|
def test_dir_assign(self): |
||||
|
"""Test case for dir_assign |
||||
|
|
||||
|
Assign a file key # noqa: E501 |
||||
|
""" |
||||
|
pass |
||||
|
|
||||
|
def test_dir_lookup(self): |
||||
|
"""Test case for dir_lookup |
||||
|
|
||||
|
Lookup volume # noqa: E501 |
||||
|
""" |
||||
|
pass |
||||
|
|
||||
|
|
||||
|
if __name__ == '__main__': |
||||
|
unittest.main() |
@ -0,0 +1,56 @@ |
|||||
|
# coding: utf-8 |
||||
|
|
||||
|
""" |
||||
|
Seaweedfs Master Server API |
||||
|
|
||||
|
The Seaweedfs Master Server API allows you to store blobs # noqa: E501 |
||||
|
|
||||
|
The version of the OpenAPI document: 3.43.0 |
||||
|
Generated by: https://openapi-generator.tech |
||||
|
""" |
||||
|
|
||||
|
|
||||
|
from __future__ import absolute_import |
||||
|
|
||||
|
import unittest |
||||
|
import datetime |
||||
|
|
||||
|
import openapi_client |
||||
|
from openapi_client.models.file_key import FileKey # noqa: E501 |
||||
|
from openapi_client.rest import ApiException |
||||
|
|
||||
|
class TestFileKey(unittest.TestCase): |
||||
|
"""FileKey unit test stubs""" |
||||
|
|
||||
|
def setUp(self): |
||||
|
pass |
||||
|
|
||||
|
def tearDown(self): |
||||
|
pass |
||||
|
|
||||
|
def make_instance(self, include_optional): |
||||
|
"""Test FileKey |
||||
|
include_option is a boolean, when False only required |
||||
|
params are included, when True both required and |
||||
|
optional params are included """ |
||||
|
# uncomment below to create an instance of `FileKey` |
||||
|
""" |
||||
|
model = openapi_client.models.file_key.FileKey() # noqa: E501 |
||||
|
if include_optional : |
||||
|
return FileKey( |
||||
|
count = 10, |
||||
|
fid = 3,01637037d6, |
||||
|
url = 127.0.0.1:8080 |
||||
|
) |
||||
|
else : |
||||
|
return FileKey( |
||||
|
) |
||||
|
""" |
||||
|
|
||||
|
def testFileKey(self): |
||||
|
"""Test FileKey""" |
||||
|
# inst_req_only = self.make_instance(include_optional=False) |
||||
|
# inst_req_and_optional = self.make_instance(include_optional=True) |
||||
|
|
||||
|
if __name__ == '__main__': |
||||
|
unittest.main() |
@ -0,0 +1,55 @@ |
|||||
|
# coding: utf-8 |
||||
|
|
||||
|
""" |
||||
|
Seaweedfs Master Server API |
||||
|
|
||||
|
The Seaweedfs Master Server API allows you to store blobs # noqa: E501 |
||||
|
|
||||
|
The version of the OpenAPI document: 3.43.0 |
||||
|
Generated by: https://openapi-generator.tech |
||||
|
""" |
||||
|
|
||||
|
|
||||
|
from __future__ import absolute_import |
||||
|
|
||||
|
import unittest |
||||
|
import datetime |
||||
|
|
||||
|
import openapi_client |
||||
|
from openapi_client.models.location import Location # noqa: E501 |
||||
|
from openapi_client.rest import ApiException |
||||
|
|
||||
|
class TestLocation(unittest.TestCase): |
||||
|
"""Location unit test stubs""" |
||||
|
|
||||
|
def setUp(self): |
||||
|
pass |
||||
|
|
||||
|
def tearDown(self): |
||||
|
pass |
||||
|
|
||||
|
def make_instance(self, include_optional): |
||||
|
"""Test Location |
||||
|
include_option is a boolean, when False only required |
||||
|
params are included, when True both required and |
||||
|
optional params are included """ |
||||
|
# uncomment below to create an instance of `Location` |
||||
|
""" |
||||
|
model = openapi_client.models.location.Location() # noqa: E501 |
||||
|
if include_optional : |
||||
|
return Location( |
||||
|
public_url = localhost:8080, |
||||
|
url = localhost:8080 |
||||
|
) |
||||
|
else : |
||||
|
return Location( |
||||
|
) |
||||
|
""" |
||||
|
|
||||
|
def testLocation(self): |
||||
|
"""Test Location""" |
||||
|
# inst_req_only = self.make_instance(include_optional=False) |
||||
|
# inst_req_and_optional = self.make_instance(include_optional=True) |
||||
|
|
||||
|
if __name__ == '__main__': |
||||
|
unittest.main() |
@ -0,0 +1,9 @@ |
|||||
|
[tox] |
||||
|
envlist = py3 |
||||
|
|
||||
|
[testenv] |
||||
|
deps=-r{toxinidir}/requirements.txt |
||||
|
-r{toxinidir}/test-requirements.txt |
||||
|
|
||||
|
commands= |
||||
|
pytest --cov=openapi_client |
@ -1,4 +1,4 @@ |
|||||
gen: |
gen: |
||||
for lang in cpp-restsdk csharp go kotlin-client php python ruby; do \
|
|
||||
|
for lang in cpp-restsdk csharp go kotlin-client php python-nextgen ruby; do \
|
||||
openapi-generator generate -i master.yaml -g $$lang -o ../master-clients/$$lang; \
|
openapi-generator generate -i master.yaml -g $$lang -o ../master-clients/$$lang; \
|
||||
done |
done |
Write
Preview
Loading…
Cancel
Save
Reference in new issue