diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 218ea852e..89fd9c560 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -61,6 +61,9 @@ jobs: - name: Run pylint run: poetry run pylint openapi_python_client + - name: Regenerate Golden Record + run: poetry run task regen_e2e + - name: Run pytest run: poetry run pytest --cov=openapi_python_client --cov-report=term-missing tests end_to_end_tests/test_end_to_end.py --basetemp=tests/tmp env: @@ -73,3 +76,63 @@ jobs: - uses: codecov/codecov-action@v2 with: files: ./coverage.xml + + - uses: stefanzweifel/git-auto-commit-action@v4 + if: runner.os == 'Linux' + with: + commit_message: "chore: Regenerate E2E Golden Record" + file_pattern: end_to_end_tests/golden-record end_to_end_tests/custom-templates-golden-record + + integration: + name: Integration Tests + runs-on: ubuntu-latest + services: + openapi-test-server: + image: ghcr.io/openapi-generators/openapi-test-server:latest + ports: + - "3000:3000" + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Get Python Version + id: get_python_version + run: echo "::set-output name=python_version::$(python --version)" + - name: Cache dependencies + uses: actions/cache@v2 + with: + path: .venv + key: ${{ runner.os }}-${{ steps.get_python_version.outputs.python_version }}-dependencies-${{ hashFiles('**/poetry.lock') }} + restore-keys: | + ${{ runner.os }}-${{ steps.get_python_version.outputs.python_version }}-dependencies + - name: Install dependencies + run: | + pip install poetry + python -m venv .venv + poetry run python -m pip install --upgrade pip + poetry install + - name: Regenerate Integration Client + run: | + poetry run openapi-python-client update --url http://localhost:3000/openapi.json --config integration-tests-config.yaml + - name: Cache Generated Client Dependencies + uses: actions/cache@v2 + with: + path: integration-tests/.venv + key: ${{ runner.os }}-${{ steps.get_python_version.outputs.python_version }}-integration-dependencies-${{ hashFiles('**/poetry.lock') }} + restore-keys: | + ${{ runner.os }}-${{ steps.get_python_version.outputs.python_version }}-integration-dependencies + - name: Install Integration Dependencies + run: | + cd integration-tests + python -m venv .venv + poetry run python -m pip install --upgrade pip + poetry install + - name: Run Tests + run: | + cd integration-tests + poetry run pytest + - uses: stefanzweifel/git-auto-commit-action@v4 + with: + commit_message: "chore: Regenerate Integration Client" diff --git a/CHANGELOG.md b/CHANGELOG.md index eaafdfa2e..f0eda793e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,8 +6,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), Breaking changes to any of the following will cause the **minor** version to be incremented (as long as this project is 0.x). Only these pieces are considered part of the public API: -1. The _behavior_ of the generated code. Specifically, the way in which generated endpoints and classes are called and the way in which those calls communicate with an OpenAPI server. Any other property of the generated code is not considered part of the versioned, public API (e.g., code formatting, comments). -2. The invocation of the CLI (e.g., commands or arguments). +- The _behavior_ of the generated code. Specifically, the way in which generated endpoints and classes are called and the way in which those calls communicate with an OpenAPI server. Any other property of the generated code is not considered part of the versioned, public API (e.g., code formatting, comments). +- The invocation of the CLI (e.g., commands or arguments). Programmatic usage of this project (e.g., importing it as a Python module) and the usage of custom templates are not considered part of the public API and therefore may change behavior at any time without notice. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b31be19aa..c25af06a7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -21,13 +21,14 @@ 2. When in a Poetry shell (`poetry shell`) run `task check` in order to run most of the same checks CI runs. This will auto-reformat the code, check type annotations, run unit tests, check code coverage, and lint the code. -### Rework end to end tests +### Rework end-to-end tests -3. If you're writing a new feature, try to add it to the end to end test. +3. If you're writing a new feature, try to add it to the end-to-end test. 1. If adding support for a new OpenAPI feature, add it somewhere in `end_to_end_tests/openapi.json` - 2. Regenerate the "golden records" with `task regen`. This client is generated from the OpenAPI document used for end to end testing. + 2. Regenerate the "golden records" with `task regen`. This client is generated from the OpenAPI document used for end-to-end testing. 3. Check the changes to `end_to_end_tests/golden-record` to confirm only what you intended to change did change and that the changes look correct. -4. Run the end to end tests with `task e2e`. This will generate clients against `end_to_end_tests/openapi.json` and compare them with the golden record. The tests will fail if **anything is different**. The end to end tests are not included in `task check` as they take longer to run and don't provide very useful feedback in the event of failure. If an e2e test does fail, the easiest way to check what's wrong is to run `task regen` and check the diffs. You can also use `task re` which will run `regen` and `e2e` in that order. +4. **If you added a test above OR modified the templates**: Run the end-to-end tests with `task e2e`. This will generate clients against `end_to_end_tests/openapi.json` and compare them with the golden record. The tests will fail if **anything is different**. The end-to-end tests are not included in `task check` as they take longer to run and don't provide very useful feedback in the event of failure. If an e2e test does fail, the easiest way to check what's wrong is to run `task regen` and check the diffs. You can also use `task re` which will run `regen` and `e2e` in that order. + ## Creating a Pull Request diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post.py b/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post.py index 200dbec53..9db6d85f8 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post.py @@ -102,36 +102,40 @@ def to_dict(self) -> Dict[str, Any]: def to_multipart(self) -> Dict[str, Any]: some_file = self.some_file.to_tuple() - some_object = (None, json.dumps(self.some_object.to_dict()), "application/json") + some_object = (None, json.dumps(self.some_object.to_dict()).encode(), "application/json") some_optional_file: Union[Unset, FileJsonType] = UNSET if not isinstance(self.some_optional_file, Unset): some_optional_file = self.some_optional_file.to_tuple() - some_string = self.some_string if self.some_string is UNSET else (None, str(self.some_string), "text/plain") - some_number = self.some_number if self.some_number is UNSET else (None, str(self.some_number), "text/plain") - some_array: Union[Unset, Tuple[None, str, str]] = UNSET + some_string = ( + self.some_string if self.some_string is UNSET else (None, str(self.some_string).encode(), "text/plain") + ) + some_number = ( + self.some_number if self.some_number is UNSET else (None, str(self.some_number).encode(), "text/plain") + ) + some_array: Union[Unset, Tuple[None, bytes, str]] = UNSET if not isinstance(self.some_array, Unset): _temp_some_array = self.some_array - some_array = (None, json.dumps(_temp_some_array), "application/json") + some_array = (None, json.dumps(_temp_some_array).encode(), "application/json") - some_optional_object: Union[Unset, Tuple[None, str, str]] = UNSET + some_optional_object: Union[Unset, Tuple[None, bytes, str]] = UNSET if not isinstance(self.some_optional_object, Unset): - some_optional_object = (None, json.dumps(self.some_optional_object.to_dict()), "application/json") + some_optional_object = (None, json.dumps(self.some_optional_object.to_dict()).encode(), "application/json") some_nullable_object = ( - (None, json.dumps(self.some_nullable_object.to_dict()), "application/json") + (None, json.dumps(self.some_nullable_object.to_dict()).encode(), "application/json") if self.some_nullable_object else None ) - some_enum: Union[Unset, Tuple[None, str, str]] = UNSET + some_enum: Union[Unset, Tuple[None, bytes, str]] = UNSET if not isinstance(self.some_enum, Unset): - some_enum = (None, str(self.some_enum.value), "text/plain") + some_enum = (None, str(self.some_enum.value).encode(), "text/plain") field_dict: Dict[str, Any] = {} for prop_name, prop in self.additional_properties.items(): - field_dict[prop_name] = (None, json.dumps(prop.to_dict()), "application/json") + field_dict[prop_name] = (None, json.dumps(prop.to_dict()).encode(), "application/json") field_dict.update( { diff --git a/end_to_end_tests/golden-record/my_test_api_client/types.py b/end_to_end_tests/golden-record/my_test_api_client/types.py index a6f00ece9..d8727579f 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/types.py +++ b/end_to_end_tests/golden-record/my_test_api_client/types.py @@ -1,5 +1,5 @@ """ Contains some shared types for properties """ -from typing import BinaryIO, Generic, MutableMapping, Optional, TextIO, Tuple, TypeVar, Union +from typing import BinaryIO, Generic, MutableMapping, Optional, Tuple, TypeVar import attr @@ -11,14 +11,14 @@ def __bool__(self) -> bool: UNSET: Unset = Unset() -FileJsonType = Tuple[Optional[str], Union[BinaryIO, TextIO], Optional[str]] +FileJsonType = Tuple[Optional[str], BinaryIO, Optional[str]] @attr.s(auto_attribs=True) class File: """Contains information for file uploads""" - payload: Union[BinaryIO, TextIO] + payload: BinaryIO file_name: Optional[str] = None mime_type: Optional[str] = None diff --git a/integration-tests-config.yaml b/integration-tests-config.yaml new file mode 100644 index 000000000..bda7890c5 --- /dev/null +++ b/integration-tests-config.yaml @@ -0,0 +1 @@ +project_name_override: integration-tests \ No newline at end of file diff --git a/integration-tests/.gitignore b/integration-tests/.gitignore new file mode 100644 index 000000000..ed29cb977 --- /dev/null +++ b/integration-tests/.gitignore @@ -0,0 +1,23 @@ +__pycache__/ +build/ +dist/ +*.egg-info/ +.pytest_cache/ + +# pyenv +.python-version + +# Environments +.env +.venv + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# JetBrains +.idea/ + +/coverage.xml +/.coverage \ No newline at end of file diff --git a/integration-tests/README.md b/integration-tests/README.md new file mode 100644 index 000000000..f7e1cdfc6 --- /dev/null +++ b/integration-tests/README.md @@ -0,0 +1,87 @@ +# open-api-test-server-client +A client library for accessing OpenAPI Test Server + +## Usage +First, create a client: + +```python +from integration_tests import Client + +client = Client(base_url="https://api.example.com") +``` + +If the endpoints you're going to hit require authentication, use `AuthenticatedClient` instead: + +```python +from integration_tests import AuthenticatedClient + +client = AuthenticatedClient(base_url="https://api.example.com", token="SuperSecretToken") +``` + +Now call your endpoint and use your models: + +```python +from integration_tests.models import MyDataModel +from integration_tests.api.my_tag import get_my_data_model +from integration_tests.types import Response + +my_data: MyDataModel = get_my_data_model.sync(client=client) +# or if you need more info (e.g. status_code) +response: Response[MyDataModel] = get_my_data_model.sync_detailed(client=client) +``` + +Or do the same thing with an async version: + +```python +from integration_tests.models import MyDataModel +from integration_tests.api.my_tag import get_my_data_model +from integration_tests.types import Response + +my_data: MyDataModel = await get_my_data_model.asyncio(client=client) +response: Response[MyDataModel] = await get_my_data_model.asyncio_detailed(client=client) +``` + +By default, when you're calling an HTTPS API it will attempt to verify that SSL is working correctly. Using certificate verification is highly recommended most of the time, but sometimes you may need to authenticate to a server (especially an internal server) using a custom certificate bundle. + +```python +client = AuthenticatedClient( + base_url="https://internal_api.example.com", + token="SuperSecretToken", + verify_ssl="/path/to/certificate_bundle.pem", +) +``` + +You can also disable certificate validation altogether, but beware that **this is a security risk**. + +```python +client = AuthenticatedClient( + base_url="https://internal_api.example.com", + token="SuperSecretToken", + verify_ssl=False +) +``` + +Things to know: +1. Every path/method combo becomes a Python module with four functions: + 1. `sync`: Blocking request that returns parsed data (if successful) or `None` + 1. `sync_detailed`: Blocking request that always returns a `Request`, optionally with `parsed` set if the request was successful. + 1. `asyncio`: Like `sync` but the async instead of blocking + 1. `asyncio_detailed`: Like `sync_detailed` by async instead of blocking + +1. All path/query params, and bodies become method arguments. +1. If your endpoint had any tags on it, the first tag will be used as a module name for the function (my_tag above) +1. Any endpoint which did not have a tag will be in `open_api_test_server_client.api.default` + +## Building / publishing this Client +This project uses [Poetry](https://python-poetry.org/) to manage dependencies and packaging. Here are the basics: +1. Update the metadata in pyproject.toml (e.g. authors, version) +1. If you're using a private repository, configure it with Poetry + 1. `poetry config repositories. ` + 1. `poetry config http-basic. ` +1. Publish the client with `poetry publish --build -r ` or, if for public PyPI, just `poetry publish --build` + +If you want to install this client into another project without publishing it (e.g. for development) then: +1. If that project **is using Poetry**, you can simply do `poetry add ` from that project +1. If that project is not using Poetry: + 1. Build a wheel with `poetry build -f wheel` + 1. Install that wheel from the other project `pip install ` \ No newline at end of file diff --git a/integration-tests/integration_tests/__init__.py b/integration-tests/integration_tests/__init__.py new file mode 100644 index 000000000..846ce4de1 --- /dev/null +++ b/integration-tests/integration_tests/__init__.py @@ -0,0 +1,2 @@ +""" A client library for accessing OpenAPI Test Server """ +from .client import AuthenticatedClient, Client diff --git a/integration-tests/integration_tests/api/__init__.py b/integration-tests/integration_tests/api/__init__.py new file mode 100644 index 000000000..dc035f4ce --- /dev/null +++ b/integration-tests/integration_tests/api/__init__.py @@ -0,0 +1 @@ +""" Contains methods for accessing the API """ diff --git a/integration-tests/integration_tests/api/body/__init__.py b/integration-tests/integration_tests/api/body/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/integration-tests/integration_tests/api/body/post_body_multipart.py b/integration-tests/integration_tests/api/body/post_body_multipart.py new file mode 100644 index 000000000..206b7bc13 --- /dev/null +++ b/integration-tests/integration_tests/api/body/post_body_multipart.py @@ -0,0 +1,142 @@ +from typing import Any, Dict, Optional, Union + +import httpx + +from ...client import Client +from ...models.post_body_multipart_multipart_data import PostBodyMultipartMultipartData +from ...models.post_body_multipart_response_200 import PostBodyMultipartResponse200 +from ...models.public_error import PublicError +from ...types import Response + + +def _get_kwargs( + *, + client: Client, + multipart_data: PostBodyMultipartMultipartData, +) -> Dict[str, Any]: + url = "{}/body/multipart".format(client.base_url) + + headers: Dict[str, Any] = client.get_headers() + cookies: Dict[str, Any] = client.get_cookies() + + multipart_multipart_data = multipart_data.to_multipart() + + return { + "method": "post", + "url": url, + "headers": headers, + "cookies": cookies, + "timeout": client.get_timeout(), + "files": multipart_multipart_data, + } + + +def _parse_response(*, response: httpx.Response) -> Optional[Union[PostBodyMultipartResponse200, PublicError]]: + if response.status_code == 200: + response_200 = PostBodyMultipartResponse200.from_dict(response.json()) + + return response_200 + if response.status_code == 400: + response_400 = PublicError.from_dict(response.json()) + + return response_400 + return None + + +def _build_response(*, response: httpx.Response) -> Response[Union[PostBodyMultipartResponse200, PublicError]]: + return Response( + status_code=response.status_code, + content=response.content, + headers=response.headers, + parsed=_parse_response(response=response), + ) + + +def sync_detailed( + *, + client: Client, + multipart_data: PostBodyMultipartMultipartData, +) -> Response[Union[PostBodyMultipartResponse200, PublicError]]: + """ + Args: + multipart_data (PostBodyMultipartMultipartData): + + Returns: + Response[Union[PostBodyMultipartResponse200, PublicError]] + """ + + kwargs = _get_kwargs( + client=client, + multipart_data=multipart_data, + ) + + response = httpx.request( + verify=client.verify_ssl, + **kwargs, + ) + + return _build_response(response=response) + + +def sync( + *, + client: Client, + multipart_data: PostBodyMultipartMultipartData, +) -> Optional[Union[PostBodyMultipartResponse200, PublicError]]: + """ + Args: + multipart_data (PostBodyMultipartMultipartData): + + Returns: + Response[Union[PostBodyMultipartResponse200, PublicError]] + """ + + return sync_detailed( + client=client, + multipart_data=multipart_data, + ).parsed + + +async def asyncio_detailed( + *, + client: Client, + multipart_data: PostBodyMultipartMultipartData, +) -> Response[Union[PostBodyMultipartResponse200, PublicError]]: + """ + Args: + multipart_data (PostBodyMultipartMultipartData): + + Returns: + Response[Union[PostBodyMultipartResponse200, PublicError]] + """ + + kwargs = _get_kwargs( + client=client, + multipart_data=multipart_data, + ) + + async with httpx.AsyncClient(verify=client.verify_ssl) as _client: + response = await _client.request(**kwargs) + + return _build_response(response=response) + + +async def asyncio( + *, + client: Client, + multipart_data: PostBodyMultipartMultipartData, +) -> Optional[Union[PostBodyMultipartResponse200, PublicError]]: + """ + Args: + multipart_data (PostBodyMultipartMultipartData): + + Returns: + Response[Union[PostBodyMultipartResponse200, PublicError]] + """ + + return ( + await asyncio_detailed( + client=client, + multipart_data=multipart_data, + ) + ).parsed diff --git a/integration-tests/integration_tests/client.py b/integration-tests/integration_tests/client.py new file mode 100644 index 000000000..9d3670988 --- /dev/null +++ b/integration-tests/integration_tests/client.py @@ -0,0 +1,48 @@ +import ssl +from typing import Dict, Union + +import attr + + +@attr.s(auto_attribs=True) +class Client: + """A class for keeping track of data related to the API""" + + base_url: str + cookies: Dict[str, str] = attr.ib(factory=dict, kw_only=True) + headers: Dict[str, str] = attr.ib(factory=dict, kw_only=True) + timeout: float = attr.ib(5.0, kw_only=True) + verify_ssl: Union[str, bool, ssl.SSLContext] = attr.ib(True, kw_only=True) + + def get_headers(self) -> Dict[str, str]: + """Get headers to be used in all endpoints""" + return {**self.headers} + + def with_headers(self, headers: Dict[str, str]) -> "Client": + """Get a new client matching this one with additional headers""" + return attr.evolve(self, headers={**self.headers, **headers}) + + def get_cookies(self) -> Dict[str, str]: + return {**self.cookies} + + def with_cookies(self, cookies: Dict[str, str]) -> "Client": + """Get a new client matching this one with additional cookies""" + return attr.evolve(self, cookies={**self.cookies, **cookies}) + + def get_timeout(self) -> float: + return self.timeout + + def with_timeout(self, timeout: float) -> "Client": + """Get a new client matching this one with a new timeout (in seconds)""" + return attr.evolve(self, timeout=timeout) + + +@attr.s(auto_attribs=True) +class AuthenticatedClient(Client): + """A Client which has been authenticated for use on secured endpoints""" + + token: str + + def get_headers(self) -> Dict[str, str]: + """Get headers to be used in authenticated endpoints""" + return {"Authorization": f"Bearer {self.token}", **self.headers} diff --git a/integration-tests/integration_tests/models/__init__.py b/integration-tests/integration_tests/models/__init__.py new file mode 100644 index 000000000..22998f371 --- /dev/null +++ b/integration-tests/integration_tests/models/__init__.py @@ -0,0 +1,6 @@ +""" Contains all the data models used in inputs/outputs """ + +from .post_body_multipart_multipart_data import PostBodyMultipartMultipartData +from .post_body_multipart_response_200 import PostBodyMultipartResponse200 +from .problem import Problem +from .public_error import PublicError diff --git a/integration-tests/integration_tests/models/post_body_multipart_multipart_data.py b/integration-tests/integration_tests/models/post_body_multipart_multipart_data.py new file mode 100644 index 000000000..0766a3a83 --- /dev/null +++ b/integration-tests/integration_tests/models/post_body_multipart_multipart_data.py @@ -0,0 +1,100 @@ +from io import BytesIO +from typing import Any, Dict, List, Type, TypeVar, Union + +import attr + +from ..types import UNSET, File, Unset + +T = TypeVar("T", bound="PostBodyMultipartMultipartData") + + +@attr.s(auto_attribs=True) +class PostBodyMultipartMultipartData: + """ + Attributes: + a_string (str): + file (File): For the sake of this test, include a file name and content type. The payload should also be valid + UTF-8. + description (Union[Unset, str]): + """ + + a_string: str + file: File + description: Union[Unset, str] = UNSET + additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + a_string = self.a_string + file = self.file.to_tuple() + + description = self.description + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "a_string": a_string, + "file": file, + } + ) + if description is not UNSET: + field_dict["description"] = description + + return field_dict + + def to_multipart(self) -> Dict[str, Any]: + a_string = self.a_string if self.a_string is UNSET else (None, str(self.a_string).encode(), "text/plain") + file = self.file.to_tuple() + + description = ( + self.description if self.description is UNSET else (None, str(self.description).encode(), "text/plain") + ) + + field_dict: Dict[str, Any] = {} + field_dict.update( + {key: (None, str(value).encode(), "text/plain") for key, value in self.additional_properties.items()} + ) + field_dict.update( + { + "a_string": a_string, + "file": file, + } + ) + if description is not UNSET: + field_dict["description"] = description + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + a_string = d.pop("a_string") + + file = File(payload=BytesIO(d.pop("file"))) + + description = d.pop("description", UNSET) + + post_body_multipart_multipart_data = cls( + a_string=a_string, + file=file, + description=description, + ) + + post_body_multipart_multipart_data.additional_properties = d + return post_body_multipart_multipart_data + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/integration-tests/integration_tests/models/post_body_multipart_response_200.py b/integration-tests/integration_tests/models/post_body_multipart_response_200.py new file mode 100644 index 000000000..319c25fd0 --- /dev/null +++ b/integration-tests/integration_tests/models/post_body_multipart_response_200.py @@ -0,0 +1,85 @@ +from typing import Any, Dict, List, Type, TypeVar + +import attr + +T = TypeVar("T", bound="PostBodyMultipartResponse200") + + +@attr.s(auto_attribs=True) +class PostBodyMultipartResponse200: + """ + Attributes: + a_string (str): Echo of the 'a_string' input parameter from the form. + file_data (str): Echo of content of the 'file' input parameter from the form. + description (str): Echo of the 'description' input parameter from the form. + file_name (str): The name of the file uploaded. + file_content_type (str): The content type of the file uploaded. + """ + + a_string: str + file_data: str + description: str + file_name: str + file_content_type: str + additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + a_string = self.a_string + file_data = self.file_data + description = self.description + file_name = self.file_name + file_content_type = self.file_content_type + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "a_string": a_string, + "file_data": file_data, + "description": description, + "file_name": file_name, + "file_content_type": file_content_type, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + a_string = d.pop("a_string") + + file_data = d.pop("file_data") + + description = d.pop("description") + + file_name = d.pop("file_name") + + file_content_type = d.pop("file_content_type") + + post_body_multipart_response_200 = cls( + a_string=a_string, + file_data=file_data, + description=description, + file_name=file_name, + file_content_type=file_content_type, + ) + + post_body_multipart_response_200.additional_properties = d + return post_body_multipart_response_200 + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/integration-tests/integration_tests/models/problem.py b/integration-tests/integration_tests/models/problem.py new file mode 100644 index 000000000..d343123ab --- /dev/null +++ b/integration-tests/integration_tests/models/problem.py @@ -0,0 +1,65 @@ +from typing import Any, Dict, List, Type, TypeVar, Union + +import attr + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="Problem") + + +@attr.s(auto_attribs=True) +class Problem: + """ + Attributes: + parameter_name (Union[Unset, str]): + description (Union[Unset, str]): + """ + + parameter_name: Union[Unset, str] = UNSET + description: Union[Unset, str] = UNSET + additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + parameter_name = self.parameter_name + description = self.description + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if parameter_name is not UNSET: + field_dict["parameter_name"] = parameter_name + if description is not UNSET: + field_dict["description"] = description + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + parameter_name = d.pop("parameter_name", UNSET) + + description = d.pop("description", UNSET) + + problem = cls( + parameter_name=parameter_name, + description=description, + ) + + problem.additional_properties = d + return problem + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/integration-tests/integration_tests/models/public_error.py b/integration-tests/integration_tests/models/public_error.py new file mode 100644 index 000000000..49e928b3d --- /dev/null +++ b/integration-tests/integration_tests/models/public_error.py @@ -0,0 +1,102 @@ +from typing import Any, Dict, List, Type, TypeVar, Union, cast + +import attr + +from ..models.problem import Problem +from ..types import UNSET, Unset + +T = TypeVar("T", bound="PublicError") + + +@attr.s(auto_attribs=True) +class PublicError: + """ + Attributes: + errors (Union[Unset, List[str]]): + extra_parameters (Union[Unset, List[str]]): + invalid_parameters (Union[Unset, List[Problem]]): + missing_parameters (Union[Unset, List[str]]): + """ + + errors: Union[Unset, List[str]] = UNSET + extra_parameters: Union[Unset, List[str]] = UNSET + invalid_parameters: Union[Unset, List[Problem]] = UNSET + missing_parameters: Union[Unset, List[str]] = UNSET + additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + errors: Union[Unset, List[str]] = UNSET + if not isinstance(self.errors, Unset): + errors = self.errors + + extra_parameters: Union[Unset, List[str]] = UNSET + if not isinstance(self.extra_parameters, Unset): + extra_parameters = self.extra_parameters + + invalid_parameters: Union[Unset, List[Dict[str, Any]]] = UNSET + if not isinstance(self.invalid_parameters, Unset): + invalid_parameters = [] + for invalid_parameters_item_data in self.invalid_parameters: + invalid_parameters_item = invalid_parameters_item_data.to_dict() + + invalid_parameters.append(invalid_parameters_item) + + missing_parameters: Union[Unset, List[str]] = UNSET + if not isinstance(self.missing_parameters, Unset): + missing_parameters = self.missing_parameters + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if errors is not UNSET: + field_dict["errors"] = errors + if extra_parameters is not UNSET: + field_dict["extra_parameters"] = extra_parameters + if invalid_parameters is not UNSET: + field_dict["invalid_parameters"] = invalid_parameters + if missing_parameters is not UNSET: + field_dict["missing_parameters"] = missing_parameters + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + errors = cast(List[str], d.pop("errors", UNSET)) + + extra_parameters = cast(List[str], d.pop("extra_parameters", UNSET)) + + invalid_parameters = [] + _invalid_parameters = d.pop("invalid_parameters", UNSET) + for invalid_parameters_item_data in _invalid_parameters or []: + invalid_parameters_item = Problem.from_dict(invalid_parameters_item_data) + + invalid_parameters.append(invalid_parameters_item) + + missing_parameters = cast(List[str], d.pop("missing_parameters", UNSET)) + + public_error = cls( + errors=errors, + extra_parameters=extra_parameters, + invalid_parameters=invalid_parameters, + missing_parameters=missing_parameters, + ) + + public_error.additional_properties = d + return public_error + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/integration-tests/integration_tests/py.typed b/integration-tests/integration_tests/py.typed new file mode 100644 index 000000000..1aad32711 --- /dev/null +++ b/integration-tests/integration_tests/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561 \ No newline at end of file diff --git a/integration-tests/integration_tests/types.py b/integration-tests/integration_tests/types.py new file mode 100644 index 000000000..d8727579f --- /dev/null +++ b/integration-tests/integration_tests/types.py @@ -0,0 +1,43 @@ +""" Contains some shared types for properties """ +from typing import BinaryIO, Generic, MutableMapping, Optional, Tuple, TypeVar + +import attr + + +class Unset: + def __bool__(self) -> bool: + return False + + +UNSET: Unset = Unset() + +FileJsonType = Tuple[Optional[str], BinaryIO, Optional[str]] + + +@attr.s(auto_attribs=True) +class File: + """Contains information for file uploads""" + + payload: BinaryIO + file_name: Optional[str] = None + mime_type: Optional[str] = None + + def to_tuple(self) -> FileJsonType: + """Return a tuple representation that httpx will accept for multipart/form-data""" + return self.file_name, self.payload, self.mime_type + + +T = TypeVar("T") + + +@attr.s(auto_attribs=True) +class Response(Generic[T]): + """A response from an endpoint""" + + status_code: int + content: bytes + headers: MutableMapping[str, str] + parsed: Optional[T] + + +__all__ = ["File", "Response", "FileJsonType"] diff --git a/integration-tests/poetry.lock b/integration-tests/poetry.lock new file mode 100644 index 000000000..ae950a660 --- /dev/null +++ b/integration-tests/poetry.lock @@ -0,0 +1,432 @@ +[[package]] +name = "async-generator" +version = "1.10" +description = "Async generators and context managers for Python 3.5+" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "attrs" +version = "21.4.0" +description = "Classes Without Boilerplate" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] + +[[package]] +name = "certifi" +version = "2021.10.8" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "charset-normalizer" +version = "2.0.10" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "contextvars" +version = "2.4" +description = "PEP 567 Backport" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +immutables = ">=0.9" + +[[package]] +name = "h11" +version = "0.12.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "httpcore" +version = "0.13.3" +description = "A minimal low-level HTTP client." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +h11 = ">=0.11,<0.13" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3,<5)"] + +[[package]] +name = "httpx" +version = "0.20.0" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +async-generator = {version = "*", markers = "python_version < \"3.7\""} +certifi = "*" +charset-normalizer = "*" +httpcore = ">=0.13.3,<0.14.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" + +[package.extras] +brotli = ["brotlicffi", "brotli"] +cli = ["click (>=8.0.0,<9.0.0)", "rich (>=10.0.0,<11.0.0)", "pygments (>=2.0.0,<3.0.0)"] +http2 = ["h2 (>=3,<5)"] + +[[package]] +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "immutables" +version = "0.16" +description = "Immutable Collections" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} + +[package.extras] +test = ["flake8 (>=3.8.4,<3.9.0)", "pycodestyle (>=2.6.0,<2.7.0)", "mypy (>=0.910)", "pytest (>=6.2.4,<6.3.0)"] + +[[package]] +name = "importlib-metadata" +version = "4.8.3" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +perf = ["ipython"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pyparsing" +version = "3.0.6" +description = "Python parsing module" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} + +[package.extras] +idna2008 = ["idna"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "sniffio" +version = "1.2.0" +description = "Sniff out which async library your code is running under" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +contextvars = {version = ">=2.1", markers = "python_version < \"3.7\""} + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "typing-extensions" +version = "4.0.1" +description = "Backported and Experimental Type Hints for Python 3.6+" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "zipp" +version = "3.6.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] + +[metadata] +lock-version = "1.1" +python-versions = "^3.6" +content-hash = "692376b6eae8871d446bf2ac8f1d0e0d3bcb8cc9715192ed0b81524faf5a68de" + +[metadata.files] +async-generator = [ + {file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"}, + {file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"}, +] +atomicwrites = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] +attrs = [ + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, +] +certifi = [ + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.0.10.tar.gz", hash = "sha256:876d180e9d7432c5d1dfd4c5d26b72f099d503e8fcc0feb7532c9289be60fcbd"}, + {file = "charset_normalizer-2.0.10-py3-none-any.whl", hash = "sha256:cb957888737fc0bbcd78e3df769addb41fd1ff8cf950dc9e7ad7793f1bf44455"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +contextvars = [ + {file = "contextvars-2.4.tar.gz", hash = "sha256:f38c908aaa59c14335eeea12abea5f443646216c4e29380d7bf34d2018e2c39e"}, +] +h11 = [ + {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, + {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, +] +httpcore = [ + {file = "httpcore-0.13.3-py3-none-any.whl", hash = "sha256:ff614f0ef875b9e5fe0bdd459b31ea0eea282ff12dc82add83d68b3811ee94ad"}, + {file = "httpcore-0.13.3.tar.gz", hash = "sha256:5d674b57a11275904d4fd0819ca02f960c538e4472533620f322fc7db1ea0edc"}, +] +httpx = [ + {file = "httpx-0.20.0-py3-none-any.whl", hash = "sha256:33af5aad9bdc82ef1fc89219c1e36f5693bf9cd0ebe330884df563445682c0f8"}, + {file = "httpx-0.20.0.tar.gz", hash = "sha256:09606d630f070d07f9ff28104fbcea429ea0014c1e89ac90b4d8de8286c40e7b"}, +] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] +immutables = [ + {file = "immutables-0.16-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:acbfa79d44228d96296279068441f980dc63dbed52522d9227ff9f4d96c6627e"}, + {file = "immutables-0.16-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c9ed003eacb92e630ef200e31f47236c2139b39476894f7963b32bd39bafa3"}, + {file = "immutables-0.16-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a396314b9024fa55bf83a27813fd76cf9f27dce51f53b0f19b51de035146251"}, + {file = "immutables-0.16-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4a2a71678348fb95b13ca108d447f559a754c41b47bd1e7e4fb23974e735682d"}, + {file = "immutables-0.16-cp36-cp36m-win32.whl", hash = "sha256:064001638ab5d36f6aa05b6101446f4a5793fb71e522bc81b8fc65a1894266ff"}, + {file = "immutables-0.16-cp36-cp36m-win_amd64.whl", hash = "sha256:1de393f1b188740ca7b38f946f2bbc7edf3910d2048f03bbb8d01f17a038d67c"}, + {file = "immutables-0.16-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fcf678a3074613119385a02a07c469ec5130559f5ea843c85a0840c80b5b71c6"}, + {file = "immutables-0.16-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a307eb0984eb43e815dcacea3ac50c11d00a936ecf694c46991cd5a23bcb0ec0"}, + {file = "immutables-0.16-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7a58825ff2254e2612c5a932174398a4ea8fbddd8a64a02c880cc32ee28b8820"}, + {file = "immutables-0.16-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:798b095381eb42cf40db6876339e7bed84093e5868018a9e73d8e1f7ab4bb21e"}, + {file = "immutables-0.16-cp37-cp37m-win32.whl", hash = "sha256:19bdede174847c2ef1292df0f23868ab3918b560febb09fcac6eec621bd4812b"}, + {file = "immutables-0.16-cp37-cp37m-win_amd64.whl", hash = "sha256:9ccf4c0e3e2e3237012b516c74c49de8872ccdf9129739f7a0b9d7444a8c4862"}, + {file = "immutables-0.16-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d59beef203a3765db72b1d0943547425c8318ecf7d64c451fd1e130b653c2fbb"}, + {file = "immutables-0.16-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0020aaa4010b136056c20a46ce53204e1407a9e4464246cb2cf95b90808d9161"}, + {file = "immutables-0.16-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edd9f67671555af1eb99ad3c7550238487dd7ac0ac5205b40204ed61c9a922ac"}, + {file = "immutables-0.16-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:298a301f85f307b4c056a0825eb30f060e64d73605e783289f3df37dd762bab8"}, + {file = "immutables-0.16-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b779617f5b94486bfd0f22162cd72eb5f2beb0214a14b75fdafb7b2c908ed0cb"}, + {file = "immutables-0.16-cp38-cp38-win32.whl", hash = "sha256:511c93d8b1bbbf103ff3f1f120c5a68a9866ce03dea6ac406537f93ca9b19139"}, + {file = "immutables-0.16-cp38-cp38-win_amd64.whl", hash = "sha256:b651b61c1af6cda2ee201450f2ffe048a5959bc88e43e6c312f4c93e69c9e929"}, + {file = "immutables-0.16-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:aa7bf572ae1e006104c584be70dc634849cf0dc62f42f4ee194774f97e7fd17d"}, + {file = "immutables-0.16-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:50793a44ba0d228ed8cad4d0925e00dfd62ea32f44ddee8854f8066447272d05"}, + {file = "immutables-0.16-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:799621dcdcdcbb2516546a40123b87bf88de75fe7459f7bd8144f079ace6ec3e"}, + {file = "immutables-0.16-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7bcf52aeb983bd803b7c6106eae1b2d9a0c7ab1241bc6b45e2174ba2b7283031"}, + {file = "immutables-0.16-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:734c269e82e5f307fb6e17945953b67659d1731e65309787b8f7ba267d1468f2"}, + {file = "immutables-0.16-cp39-cp39-win32.whl", hash = "sha256:a454d5d3fee4b7cc627345791eb2ca4b27fa3bbb062ccf362ecaaa51679a07ed"}, + {file = "immutables-0.16-cp39-cp39-win_amd64.whl", hash = "sha256:2505d93395d3f8ae4223e21465994c3bc6952015a38dc4f03cb3e07a2b8d8325"}, + {file = "immutables-0.16.tar.gz", hash = "sha256:d67e86859598eed0d926562da33325dac7767b7b1eff84e232c22abea19f4360"}, +] +importlib-metadata = [ + {file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"}, + {file = "importlib_metadata-4.8.3.tar.gz", hash = "sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +packaging = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +pyparsing = [ + {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"}, + {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"}, +] +pytest = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +rfc3986 = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +sniffio = [ + {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, + {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, +] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] +typing-extensions = [ + {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, + {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, +] +zipp = [ + {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, + {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, +] diff --git a/integration-tests/pyproject.toml b/integration-tests/pyproject.toml new file mode 100644 index 000000000..e3f321b5e --- /dev/null +++ b/integration-tests/pyproject.toml @@ -0,0 +1,40 @@ +[tool.poetry] +name = "integration-tests" +version = "0.0.1" +description = "A client library for accessing OpenAPI Test Server" +authors = [] +readme = "README.md" +packages = [ + {include = "integration_tests"}, +] +include = ["CHANGELOG.md", "open_api_test_server_client/py.typed"] + +[tool.poetry.dependencies] +python = "^3.6" +httpx = ">=0.15.4,<0.22.0" +attrs = ">=20.1.0,<22.0.0" +python-dateutil = "^2.8.0" + +[tool.poetry.dev-dependencies] +pytest = "^6.2.5" + +[build-system] +requires = ["poetry>=1.0"] +build-backend = "poetry.masonry.api" + +[tool.black] +line-length = 120 +target_version = ['py36', 'py37', 'py38'] +exclude = ''' +( + /( + | \.git + | \.venv + | \.mypy_cache + )/ +) +''' + +[tool.isort] +line_length = 120 +profile = "black" \ No newline at end of file diff --git a/integration-tests/tests/test_api/test_body/test_post_body_multipart.py b/integration-tests/tests/test_api/test_body/test_post_body_multipart.py new file mode 100644 index 000000000..b5e453ab6 --- /dev/null +++ b/integration-tests/tests/test_api/test_body/test_post_body_multipart.py @@ -0,0 +1,39 @@ +from io import BytesIO + +from integration_tests import Client +from integration_tests.api.body import post_body_multipart +from integration_tests.models import PostBodyMultipartMultipartData, PostBodyMultipartResponse200 +from integration_tests.types import File + + +def test(): + client = Client("http://localhost:3000") + + a_string = "a test string" + payload = b"some file content" + file_name = "cool_stuff.txt" + mime_type = "application/openapi-python-client" + description = "super descriptive thing" + + response = post_body_multipart.sync_detailed( + client=client, + multipart_data=PostBodyMultipartMultipartData( + a_string=a_string, + file=File( + payload=BytesIO(payload), + file_name=file_name, + mime_type=mime_type, + ), + description=description, + ), + ) + + content = response.parsed + if not isinstance(content, PostBodyMultipartResponse200): + raise AssertionError(f"Received status {response.status_code} from test server with payload: {content!r}") + + assert content.a_string == a_string + assert content.file_name == file_name + assert content.file_content_type == mime_type + assert content.file_data.encode() == payload + assert content.description == description diff --git a/openapi_python_client/__init__.py b/openapi_python_client/__init__.py index 608af8dce..a8860198e 100644 --- a/openapi_python_client/__init__.py +++ b/openapi_python_client/__init__.py @@ -123,7 +123,7 @@ def update(self) -> Sequence[GeneratorError]: """Update an existing project""" if not self.package_dir.is_dir(): - raise FileNotFoundError() + return [GeneratorError(detail=f"Directory {self.package_dir} not found")] print(f"Updating {self.package_name}") shutil.rmtree(self.package_dir) self._create_package() diff --git a/openapi_python_client/templates/model.py.jinja b/openapi_python_client/templates/model.py.jinja index 49246fa31..6f65fb5fa 100644 --- a/openapi_python_client/templates/model.py.jinja +++ b/openapi_python_client/templates/model.py.jinja @@ -68,9 +68,10 @@ class {{ class_name }}: {% for property in model.required_properties + model.optional_properties %} {% if property.template %} {% from "property_templates/" + property.template import transform %} -{{ transform(property, "self." + property.python_name, property.python_name, stringify=multipart) }} +{# Stopped here #} +{{ transform(property, "self." + property.python_name, property.python_name, multipart=multipart) }} {% elif multipart %} -{{ property.python_name }} = self.{{ property.python_name }} if self.{{ property.python_name }} is UNSET else (None, str(self.{{ property.python_name }}), "text/plain") +{{ property.python_name }} = self.{{ property.python_name }} if self.{{ property.python_name }} is UNSET else (None, str(self.{{ property.python_name }}).encode(), "text/plain") {% else %} {{ property.python_name }} = self.{{ property.python_name }} {% endif %} @@ -81,10 +82,10 @@ field_dict: Dict[str, Any] = {} {% if model.additional_properties.template %} {% from "property_templates/" + model.additional_properties.template import transform %} for prop_name, prop in self.additional_properties.items(): - {{ transform(model.additional_properties, "prop", "field_dict[prop_name]", stringify=multipart) | indent(4) }} + {{ transform(model.additional_properties, "prop", "field_dict[prop_name]", multipart=multipart) | indent(4) }} {% elif multipart %} field_dict.update({ - key: (None, str(value), "text/plain") + key: (None, str(value).encode(), "text/plain") for key, value in self.additional_properties.items() }) {% else %} diff --git a/openapi_python_client/templates/property_templates/any_property.py.jinja b/openapi_python_client/templates/property_templates/any_property.py.jinja index 18ccda75a..f2019e4c5 100644 --- a/openapi_python_client/templates/property_templates/any_property.py.jinja +++ b/openapi_python_client/templates/property_templates/any_property.py.jinja @@ -2,6 +2,6 @@ {{ property.python_name }} = {{ source }} {% endmacro %} -{% macro transform(property, source, destination, declare_type=True, stringify=False) %} +{% macro transform(property, source, destination, declare_type=True, multipart=False) %} {{ destination }} = {{ source }} {% endmacro %} diff --git a/openapi_python_client/templates/property_templates/date_property.py.jinja b/openapi_python_client/templates/property_templates/date_property.py.jinja index 7c4cebfbd..c5275bf26 100644 --- a/openapi_python_client/templates/property_templates/date_property.py.jinja +++ b/openapi_python_client/templates/property_templates/date_property.py.jinja @@ -10,16 +10,20 @@ isoparse({{ source }}).date() {% macro check_type_for_construct(property, source) %}isinstance({{ source }}, str){% endmacro %} -{% macro transform(property, source, destination, declare_type=True, stringify=False) %} +{% macro transform(property, source, destination, declare_type=True, multipart=False) %} +{% set transformed = source + ".isoformat()" %} +{% if multipart %} {# Multipart data must be bytes, not str #} +{% set transformed = transformed + ".encode()" %} +{% endif %} {% if property.required %} -{{ destination }} = {{ source }}.isoformat() {% if property.nullable %}if {{ source }} else None {%endif%} +{{ destination }} = {{ transformed }} {% if property.nullable %}if {{ source }} else None {%endif%} {% else %} {{ destination }}{% if declare_type %}: {{ property.get_type_string(json=True) }}{% endif %} = UNSET if not isinstance({{ source }}, Unset): {% if property.nullable %} - {{ destination }} = {{ source }}.isoformat() if {{ source }} else None + {{ destination }} = {{ transformed }} if {{ source }} else None {% else %} - {{ destination }} = {{ source }}.isoformat() + {{ destination }} = {{ transformed }} {% endif %} {% endif %} {% endmacro %} diff --git a/openapi_python_client/templates/property_templates/datetime_property.py.jinja b/openapi_python_client/templates/property_templates/datetime_property.py.jinja index 0984773e0..dd70c0c6f 100644 --- a/openapi_python_client/templates/property_templates/datetime_property.py.jinja +++ b/openapi_python_client/templates/property_templates/datetime_property.py.jinja @@ -10,20 +10,24 @@ isoparse({{ source }}) {% macro check_type_for_construct(property, source) %}isinstance({{ source }}, str){% endmacro %} -{% macro transform(property, source, destination, declare_type=True, stringify=False) %} +{% macro transform(property, source, destination, declare_type=True, multipart=False) %} +{% set transformed = source + ".isoformat()" %} +{% if multipart %} {# Multipart data must be bytes, not str #} +{% set transformed = transformed + ".encode()" %} +{% endif %} {% if property.required %} {% if property.nullable %} -{{ destination }} = {{ source }}.isoformat() if {{ source }} else None +{{ destination }} = {{ transformed }} if {{ source }} else None {% else %} -{{ destination }} = {{ source }}.isoformat() +{{ destination }} = {{ transformed }} {% endif %} {% else %} {{ destination }}{% if declare_type %}: {{ property.get_type_string(json=True) }}{% endif %} = UNSET if not isinstance({{ source }}, Unset): {% if property.nullable %} - {{ destination }} = {{ source }}.isoformat() if {{ source }} else None + {{ destination }} = {{ transformed }} if {{ source }} else None {% else %} - {{ destination }} = {{ source }}.isoformat() + {{ destination }} = {{ transformed }} {% endif %} {% endif %} {% endmacro %} diff --git a/openapi_python_client/templates/property_templates/enum_property.py.jinja b/openapi_python_client/templates/property_templates/enum_property.py.jinja index 340d67359..ffc07dd12 100644 --- a/openapi_python_client/templates/property_templates/enum_property.py.jinja +++ b/openapi_python_client/templates/property_templates/enum_property.py.jinja @@ -10,12 +10,12 @@ {% macro check_type_for_construct(property, source) %}isinstance({{ source }}, {{ property.value_type.__name__ }}){% endmacro %} -{% macro transform(property, source, destination, declare_type=True, stringify=False) %} +{% macro transform(property, source, destination, declare_type=True, multipart=False) %} {% set transformed = source + ".value" %} {% set type_string = property.get_type_string(json=True) %} -{% if stringify %} - {% set transformed = "(None, str(" + transformed + "), 'text/plain')" %} - {% set type_string = "Union[Unset, Tuple[None, str, str]]" %} +{% if multipart %} + {% set transformed = "(None, str(" + transformed + ").encode(), \"text/plain\")" %} + {% set type_string = "Union[Unset, Tuple[None, bytes, str]]" %} {% endif %} {% if property.required %} {% if property.nullable %} diff --git a/openapi_python_client/templates/property_templates/file_property.py.jinja b/openapi_python_client/templates/property_templates/file_property.py.jinja index e63cac53d..b2fec1fa8 100644 --- a/openapi_python_client/templates/property_templates/file_property.py.jinja +++ b/openapi_python_client/templates/property_templates/file_property.py.jinja @@ -12,7 +12,7 @@ File( {% macro check_type_for_construct(property, source) %}isinstance({{ source }}, bytes){% endmacro %} -{% macro transform(property, source, destination, declare_type=True, stringify=False) %} +{% macro transform(property, source, destination, declare_type=True, multipart=False) %} {% if property.required %} {% if property.nullable %} {{ destination }} = {{ source }}.to_tuple() if {{ source }} else None diff --git a/openapi_python_client/templates/property_templates/list_property.py.jinja b/openapi_python_client/templates/property_templates/list_property.py.jinja index 44dac097e..9c7187837 100644 --- a/openapi_python_client/templates/property_templates/list_property.py.jinja +++ b/openapi_python_client/templates/property_templates/list_property.py.jinja @@ -17,10 +17,10 @@ for {{ inner_source }} in (_{{ property.python_name }} or []): {% endif %} {% endmacro %} -{% macro _transform(property, source, destination, stringify, transform_method) %} +{% macro _transform(property, source, destination, multipart, transform_method) %} {% set inner_property = property.inner_property %} -{% if stringify %} -{% set stringified_destination = destination %} +{% if multipart %} +{% set multipart_destination = destination %} {% set destination = "_temp_" + destination %} {% endif %} {% if inner_property.template %} @@ -33,17 +33,17 @@ for {{ inner_source }} in {{ source }}: {% else %} {{ destination }} = {{ source }} {% endif %} -{% if stringify %} -{{ stringified_destination }} = (None, json.dumps({{ destination }}), 'application/json') +{% if multipart %} +{{ multipart_destination }} = (None, json.dumps({{ destination }}).encode(), 'application/json') {% endif %} {% endmacro %} {% macro check_type_for_construct(property, source) %}isinstance({{ source }}, list){% endmacro %} -{% macro transform(property, source, destination, declare_type=True, stringify=False, transform_method="to_dict") %} +{% macro transform(property, source, destination, declare_type=True, multipart=False, transform_method="to_dict") %} {% set inner_property = property.inner_property %} -{% if stringify %} - {% set type_string = "Union[Unset, Tuple[None, str, str]]" %} +{% if multipart %} + {% set type_string = "Union[Unset, Tuple[None, bytes, str]]" %} {% else %} {% set type_string = property.get_type_string(json=True) %} {% endif %} @@ -52,9 +52,9 @@ for {{ inner_source }} in {{ source }}: if {{ source }} is None: {{ destination }} = None else: - {{ _transform(property, source, destination, stringify, transform_method) | indent(4) }} + {{ _transform(property, source, destination, multipart, transform_method) | indent(4) }} {% else %} -{{ _transform(property, source, destination, stringify, transform_method) }} +{{ _transform(property, source, destination, multipart, transform_method) }} {% endif %} {% else %} {{ destination }}{% if declare_type %}: {{ type_string }}{% endif %} = UNSET @@ -63,9 +63,9 @@ if not isinstance({{ source }}, Unset): if {{ source }} is None: {{ destination }} = None else: - {{ _transform(property, source, destination, stringify, transform_method) | indent(8)}} + {{ _transform(property, source, destination, multipart, transform_method) | indent(8)}} {% else %} - {{ _transform(property, source, destination, stringify, transform_method) | indent(4)}} + {{ _transform(property, source, destination, multipart, transform_method) | indent(4)}} {% endif %} {% endif %} diff --git a/openapi_python_client/templates/property_templates/model_property.py.jinja b/openapi_python_client/templates/property_templates/model_property.py.jinja index b5b986863..903aeefaa 100644 --- a/openapi_python_client/templates/property_templates/model_property.py.jinja +++ b/openapi_python_client/templates/property_templates/model_property.py.jinja @@ -10,11 +10,11 @@ {% macro check_type_for_construct(property, source) %}isinstance({{ source }}, dict){% endmacro %} -{% macro transform(property, source, destination, declare_type=True, stringify=False, transform_method="to_dict") %} +{% macro transform(property, source, destination, declare_type=True, multipart=False, transform_method="to_dict") %} {% set transformed = source + "." + transform_method + "()" %} -{% if stringify %} - {% set transformed = "(None, json.dumps(" + transformed + "), 'application/json')" %} - {% set type_string = "Union[Unset, Tuple[None, str, str]]" %} +{% if multipart %} + {% set transformed = "(None, json.dumps(" + transformed + ").encode(), 'application/json')" %} + {% set type_string = "Union[Unset, Tuple[None, bytes, str]]" %} {% else %} {% set type_string = property.get_type_string(json=True) %} {% endif %} diff --git a/openapi_python_client/templates/property_templates/union_property.py.jinja b/openapi_python_client/templates/property_templates/union_property.py.jinja index 859207dda..807137c08 100644 --- a/openapi_python_client/templates/property_templates/union_property.py.jinja +++ b/openapi_python_client/templates/property_templates/union_property.py.jinja @@ -35,7 +35,7 @@ def _parse_{{ property.python_name }}(data: object) -> {{ property.get_type_stri {{ property.python_name }} = _parse_{{ property.python_name }}({{ source }}) {% endmacro %} -{% macro transform(property, source, destination, declare_type=True, stringify=False) %} +{% macro transform(property, source, destination, declare_type=True, multipart=False) %} {% if not property.required or property.nullable %} {{ destination }}{% if declare_type %}: {{ property.get_type_string(json=True) }}{% endif %} @@ -61,7 +61,7 @@ elif isinstance({{ source }}, {{ inner_property.get_instance_type_string() }}): else: {% endif %} {% from "property_templates/" + inner_property.template import transform %} - {{ transform(inner_property, source, destination, declare_type=False, stringify=stringify) | indent(4) }} + {{ transform(inner_property, source, destination, declare_type=False, multipart=multipart) | indent(4) }} {% endfor %} {% if property.has_properties_without_templates and (property.inner_properties_with_template() | any or not property.required)%} else: diff --git a/openapi_python_client/templates/types.py.jinja b/openapi_python_client/templates/types.py.jinja index 70daf2af4..bf90d01fd 100644 --- a/openapi_python_client/templates/types.py.jinja +++ b/openapi_python_client/templates/types.py.jinja @@ -1,5 +1,5 @@ """ Contains some shared types for properties """ -from typing import Any, BinaryIO, Generic, MutableMapping, Optional, TextIO, Tuple, TypeVar, Union +from typing import Any, BinaryIO, Generic, MutableMapping, Optional, Tuple, TypeVar import attr @@ -12,14 +12,14 @@ class Unset: UNSET: Unset = Unset() {# Used as `FileProperty._json_type_string` #} -FileJsonType = Tuple[Optional[str], Union[BinaryIO, TextIO], Optional[str]] +FileJsonType = Tuple[Optional[str], BinaryIO, Optional[str]] @attr.s(auto_attribs=True) class File: """ Contains information for file uploads """ - payload: Union[BinaryIO, TextIO] + payload: BinaryIO file_name: Optional[str] = None mime_type: Optional[str] = None diff --git a/pyproject.toml b/pyproject.toml index fe75cb0ce..299d7f0a9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,12 +65,17 @@ isort .\ && TASKIPY=true pytest --cov openapi_python_client tests --cov-report=term-missing --basetemp=tests/tmp\ && rm -r tests/tmp\ """ -regen = "python -m end_to_end_tests.regen_golden_record" +regen = """ +task regen_e2e\ +&& task regen_integration\ +""" e2e = "pytest openapi_python_client end_to_end_tests/test_end_to_end.py" re = """ task regen\ && task e2e\ """ +regen_e2e = "python -m end_to_end_tests.regen_golden_record" +regen_integration = "openapi-python-client update --url https://raw.githubusercontent.com/openapi-generators/openapi-test-server/main/openapi.json --config integration-tests-config.yaml" docs = "typer openapi_python_client/cli.py utils docs > usage.md" [tool.black] diff --git a/tests/test___init__.py b/tests/test___init__.py index b88c6fec3..85da7b18b 100644 --- a/tests/test___init__.py +++ b/tests/test___init__.py @@ -3,6 +3,7 @@ import httpcore import jinja2 import pytest +from pytest_mock import MockFixture from openapi_python_client import Config, ErrorLevel, GeneratorError, Project @@ -412,15 +413,15 @@ def test_update(self, mocker): project._get_errors.assert_called_once() assert result == project._get_errors.return_value - def test_update_missing_dir(self, mocker): + def test_update_missing_dir(self, mocker: MockFixture): project = make_project() - project.package_dir = mocker.MagicMock() + mocker.patch.object(project, "package_dir") project.package_dir.is_dir.return_value = False - project._build_models = mocker.MagicMock() + mocker.patch.object(project, "_build_models") - with pytest.raises(FileNotFoundError): - project.update() + errs = project.update() + assert len(errs) == 1 project.package_dir.is_dir.assert_called_once() project._build_models.assert_not_called()