diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 52b67100a..f5eb6095e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -54,17 +54,29 @@ If you think that some of the added code is not testable (or testing it would ad 2. If you're modifying the way an existing feature works, make sure an existing test generates the _old_ code in `end_to_end_tests/golden-record`. You'll use this to check for the new code once your changes are complete. 3. If you're improving an error or adding a new error, add a [unit test](#unit-tests) -#### End-to-end tests +#### End-to-end snapshot tests -This project aims to have all "happy paths" (types of code which _can_ be generated) covered by end to end tests (snapshot tests). In order to check code changes against the previous set of snapshots (called a "golden record" here), you can run `pdm e2e`. To regenerate the snapshots, run `pdm regen`. +This project aims to have all "happy paths" (types of code which _can_ be generated) covered by end-to-end tests. There are two types of these: snapshot tests, and unit tests of generated code. -There are 4 types of snapshots generated right now, you may have to update only some or all of these depending on the changes you're making. Within the `end_to_end_tets` directory: +Snapshot tests verify that the generated code is identical to a previously-committed set of snapshots (called a "golden record" here). They are basically regression tests to catch any unintended changes in the generator output. + +In order to check code changes against the previous set of snapshots (called a "golden record" here), you can run `pdm e2e`. To regenerate the snapshots, run `pdm regen`. + +There are 4 types of snapshots generated right now, you may have to update only some or all of these depending on the changes you're making. Within the `end_to_end_tests` directory: 1. `baseline_openapi_3.0.json` creates `golden-record` for testing OpenAPI 3.0 features 2. `baseline_openapi_3.1.yaml` is checked against `golden-record` for testing OpenAPI 3.1 features (and ensuring consistency with 3.0) 3. `test_custom_templates` are used with `baseline_openapi_3.0.json` to generate `custom-templates-golden-record` for testing custom templates 4. `3.1_specific.openapi.yaml` is used to generate `test-3-1-golden-record` and test 3.1-specific features (things which do not have a 3.0 equivalent) +#### Unit tests of generated code + +These verify the runtime behavior of the generated code, without making assertions about the exact implementation of the code. For instance, they can verify that JSON data is correctly decoded into model class attributes. + +The tests run the generator against a small API spec (defined inline for each test class), and then import and execute the generated code. This can sometimes identify issues with validation logic, module imports, etc., that might be harder to diagnose via the snapshot tests, especially during development of a new feature. + +See [`end_to_end_tests/generated_code_live_tests`](./end_to_end_tests/generated_code_live_tests). + #### Unit tests > **NOTE**: Several older-style unit tests using mocks exist in this project. These should be phased out rather than updated, as the tests are brittle and difficult to maintain. Only error cases should be tests with unit tests going forward. diff --git a/end_to_end_tests/__init__.py b/end_to_end_tests/__init__.py index 1bf33f63f..8c2224e4a 100644 --- a/end_to_end_tests/__init__.py +++ b/end_to_end_tests/__init__.py @@ -1 +1,4 @@ """ Generate a complete client and verify that it is correct """ +import pytest + +pytest.register_assert_rewrite("end_to_end_tests.end_to_end_test_helpers") diff --git a/end_to_end_tests/end_to_end_test_helpers.py b/end_to_end_tests/end_to_end_test_helpers.py new file mode 100644 index 000000000..8a9fe0da5 --- /dev/null +++ b/end_to_end_tests/end_to_end_test_helpers.py @@ -0,0 +1,203 @@ +import importlib +import os +import shutil +from filecmp import cmpfiles, dircmp +from pathlib import Path +import sys +import tempfile +from typing import Any, Callable, Dict, Generator, List, Optional, Set, Tuple + +from attrs import define +import pytest +from click.testing import Result +from typer.testing import CliRunner + +from openapi_python_client.cli import app +from openapi_python_client.utils import snake_case + + +@define +class GeneratedClientContext: + """A context manager with helpers for tests that run against generated client code. + + On entering this context, sys.path is changed to include the root directory of the + generated code, so its modules can be imported. On exit, the original sys.path is + restored, and any modules that were loaded within the context are removed. + """ + + output_path: Path + generator_result: Result + base_module: str + monkeypatch: pytest.MonkeyPatch + old_modules: Optional[Set[str]] = None + + def __enter__(self) -> "GeneratedClientContext": + self.monkeypatch.syspath_prepend(self.output_path) + self.old_modules = set(sys.modules.keys()) + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.monkeypatch.undo() + for module_name in set(sys.modules.keys()) - self.old_modules: + del sys.modules[module_name] + shutil.rmtree(self.output_path, ignore_errors=True) + + def import_module(self, module_path: str) -> Any: + """Attempt to import a module from the generated code.""" + return importlib.import_module(f"{self.base_module}{module_path}") + + +def _run_command( + command: str, + extra_args: Optional[List[str]] = None, + openapi_document: Optional[str] = None, + url: Optional[str] = None, + config_path: Optional[Path] = None, + raise_on_error: bool = True, +) -> Result: + """Generate a client from an OpenAPI document and return the result of the command.""" + runner = CliRunner() + if openapi_document is not None: + openapi_path = Path(__file__).parent / openapi_document + source_arg = f"--path={openapi_path}" + else: + source_arg = f"--url={url}" + config_path = config_path or (Path(__file__).parent / "config.yml") + args = [command, f"--config={config_path}", source_arg] + if extra_args: + args.extend(extra_args) + result = runner.invoke(app, args) + if result.exit_code != 0 and raise_on_error: + raise Exception(result.stdout) + return result + + +def generate_client( + openapi_document: str, + extra_args: List[str] = [], + output_path: str = "my-test-api-client", + base_module: str = "my_test_api_client", + overwrite: bool = True, + raise_on_error: bool = True, +) -> GeneratedClientContext: + """Run the generator and return a GeneratedClientContext for accessing the generated code.""" + full_output_path = Path.cwd() / output_path + if not overwrite: + shutil.rmtree(full_output_path, ignore_errors=True) + args = [ + *extra_args, + "--output-path", + str(full_output_path), + ] + if overwrite: + args = [*args, "--overwrite"] + generator_result = _run_command("generate", args, openapi_document, raise_on_error=raise_on_error) + print(generator_result.stdout) + return GeneratedClientContext( + full_output_path, + generator_result, + base_module, + pytest.MonkeyPatch(), + ) + + +def generate_client_from_inline_spec( + openapi_spec: str, + extra_args: List[str] = [], + filename_suffix: Optional[str] = None, + config: str = "", + base_module: str = "testapi_client", + add_openapi_info = True, + raise_on_error: bool = True, +) -> GeneratedClientContext: + """Run the generator on a temporary file created with the specified contents. + + You can also optionally tell it to create a temporary config file. + """ + if add_openapi_info and not openapi_spec.lstrip().startswith("openapi:"): + openapi_spec += """ +openapi: "3.1.0" +info: + title: "testapi" + description: "my test api" + version: "0.0.1" +""" + + output_path = tempfile.mkdtemp() + file = tempfile.NamedTemporaryFile(suffix=filename_suffix, delete=False) + file.write(openapi_spec.encode('utf-8')) + file.close() + + if config: + config_file = tempfile.NamedTemporaryFile(delete=False) + config_file.write(config.encode('utf-8')) + config_file.close() + extra_args = [*extra_args, "--config", config_file.name] + + generated_client = generate_client( + file.name, + extra_args, + output_path, + base_module, + raise_on_error=raise_on_error, + ) + os.unlink(file.name) + if config: + os.unlink(config_file.name) + + return generated_client + + +def with_generated_client_fixture( + openapi_spec: str, + name: str="generated_client", + config: str="", + extra_args: List[str] = [], +): + """Decorator to apply to a test class to create a fixture inside it called 'generated_client'. + + The fixture value will be a GeneratedClientContext created by calling + generate_client_from_inline_spec(). + """ + def _decorator(cls): + def generated_client(self): + with generate_client_from_inline_spec(openapi_spec, extra_args=extra_args, config=config) as g: + yield g + + setattr(cls, name, pytest.fixture(scope="class")(generated_client)) + return cls + + return _decorator + + +def with_generated_code_import(import_path: str, alias: Optional[str] = None): + """Decorator to apply to a test class to create a fixture from a generated code import. + + The 'generated_client' fixture must also be present. + + If import_path is "a.b.c", then the fixture's value is equal to "from a.b import c", and + its name is "c" unless you specify a different name with the alias parameter. + """ + parts = import_path.split(".") + module_name = ".".join(parts[0:-1]) + import_name = parts[-1] + + def _decorator(cls): + nonlocal alias + + def _func(self, generated_client): + module = generated_client.import_module(module_name) + return getattr(module, import_name) + + alias = alias or import_name + _func.__name__ = alias + setattr(cls, alias, pytest.fixture(scope="class")(_func)) + return cls + + return _decorator + + +def assert_model_decode_encode(model_class: Any, json_data: dict, expected_instance: Any): + instance = model_class.from_dict(json_data) + assert instance == expected_instance + assert instance.to_dict() == json_data diff --git a/end_to_end_tests/generated_code_live_tests/README.md b/end_to_end_tests/generated_code_live_tests/README.md new file mode 100644 index 000000000..71580f273 --- /dev/null +++ b/end_to_end_tests/generated_code_live_tests/README.md @@ -0,0 +1,37 @@ +## The `generated_code_live_tests` module + +These are end-to-end tests which run the code generator command, but unlike the other tests in `end_to_end_tests`, they are also unit tests _of the behavior of the generated code_. + +Each test class follows this pattern: + +- Use the decorator `@with_generated_client_fixture`, providing an inline API spec (JSON or YAML) that contains whatever schemas/paths/etc. are relevant to this test class. + - The spec can omit the `openapi:` and `info:` blocks, unless those are relevant to the test. + - The decorator creates a temporary file for the inline spec and a temporary directory for the generated code, and runs the client generator. + - It creates a `GeneratedClientContext` object (defined in `end_to_end_test_helpers.py`) to keep track of things like the location of the generated code and the output of the generator command. + - This object is injected into the test class as a fixture called `generated_client`, although most tests will not need to reference the fixture directly. + - `sys.path` is temporarily changed, for the scope of this test class, to allow imports from the generated code. +- Use the decorator `@with_generated_code_import` to make classes or functions from the generated code available to the tests. + - `@with_generated_code_import(".models.MyModel")` would execute `from [client package name].models import MyModel` and inject the imported object into the test class as a fixture called `MyModel`. + - `@with_generated_code_import(".models.MyModel", alias="model1")` would do the same thing, but the fixture would be named `model1`. + - After the test class finishes, these imports are discarded. + +Example: + +```python + +@with_generated_client_fixture( +""" +paths: {} +components: + schemas: + MyModel: + type: object + properties: + stringProp: {"type": "string"} +""") +@with_generated_code_import(".models.MyModel") +class TestSimpleJsonObject: + def test_encoding(MyModel): + instance = MyModel(string_prop="abc") + assert instance.to_dict() == {"stringProp": "abc"} +``` diff --git a/end_to_end_tests/generated_code_live_tests/test_docstrings.py b/end_to_end_tests/generated_code_live_tests/test_docstrings.py new file mode 100644 index 000000000..479acb65c --- /dev/null +++ b/end_to_end_tests/generated_code_live_tests/test_docstrings.py @@ -0,0 +1,163 @@ +from typing import Any, List +from end_to_end_tests.end_to_end_test_helpers import ( + with_generated_code_import, + with_generated_client_fixture, +) + + +class DocstringParser: + lines: List[str] + + def __init__(self, item: Any): + self.lines = [line.lstrip() for line in item.__doc__.split("\n")] + + def get_section(self, header_line: str) -> List[str]: + lines = self.lines[self.lines.index(header_line)+1:] + return lines[0:lines.index("")] + + +@with_generated_client_fixture( +""" +paths: {} +components: + schemas: + MyModel: + description: I like this type. + type: object + properties: + reqStr: + type: string + description: This is necessary. + optStr: + type: string + description: This isn't necessary. + undescribedProp: + type: string + required: ["reqStr", "undescribedProp"] +""") +@with_generated_code_import(".models.MyModel") +class TestSchemaDocstrings: + def test_model_description(self, MyModel): + assert DocstringParser(MyModel).lines[0] == "I like this type." + + def test_model_properties(self, MyModel): + assert set(DocstringParser(MyModel).get_section("Attributes:")) == { + "req_str (str): This is necessary.", + "opt_str (Union[Unset, str]): This isn't necessary.", + "undescribed_prop (str):", + } + + +@with_generated_client_fixture( +""" +tags: + - name: service1 +paths: + "/simple": + get: + operationId: getSimpleThing + description: Get a simple thing. + responses: + "200": + description: Success! + content: + application/json: + schema: + $ref: "#/components/schemas/GoodResponse" + tags: + - service1 + post: + operationId: postSimpleThing + description: Post a simple thing. + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/Thing" + responses: + "200": + description: Success! + content: + application/json: + schema: + $ref: "#/components/schemas/GoodResponse" + "400": + description: Failure!! + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + tags: + - service1 + "/simple/{id}/{index}": + get: + operationId: getAttributeByIndex + description: Get a simple thing's attribute. + parameters: + - name: id + in: path + required: true + schema: + type: string + description: Which one. + - name: index + in: path + required: true + schema: + type: integer + - name: fries + in: query + required: false + schema: + type: boolean + description: Do you want fries with that? + responses: + "200": + description: Success! + content: + application/json: + schema: + $ref: "#/components/schemas/GoodResponse" + tags: + - service1 + +components: + schemas: + GoodResponse: + type: object + ErrorResponse: + type: object + Thing: + type: object + description: The thing. +""") +@with_generated_code_import(".api.service1.get_simple_thing.sync", alias="get_simple_thing_sync") +@with_generated_code_import(".api.service1.post_simple_thing.sync", alias="post_simple_thing_sync") +@with_generated_code_import(".api.service1.get_attribute_by_index.sync", alias="get_attribute_by_index_sync") +class TestEndpointDocstrings: + def test_description(self, get_simple_thing_sync): + assert DocstringParser(get_simple_thing_sync).lines[0] == "Get a simple thing." + + def test_response_single_type(self, get_simple_thing_sync): + assert DocstringParser(get_simple_thing_sync).get_section("Returns:") == [ + "GoodResponse", + ] + + def test_response_union_type(self, post_simple_thing_sync): + returns_line = DocstringParser(post_simple_thing_sync).get_section("Returns:")[0] + assert returns_line in ( + "Union[GoodResponse, ErrorResponse]", + "Union[ErrorResponse, GoodResponse]", + ) + + def test_request_body(self, post_simple_thing_sync): + assert DocstringParser(post_simple_thing_sync).get_section("Args:") == [ + "body (Thing): The thing." + ] + + def test_params(self, get_attribute_by_index_sync): + assert DocstringParser(get_attribute_by_index_sync).get_section("Args:") == [ + "id (str): Which one.", + "index (int):", + "fries (Union[Unset, bool]): Do you want fries with that?", + ] diff --git a/end_to_end_tests/generated_code_live_tests/test_enums.py b/end_to_end_tests/generated_code_live_tests/test_enums.py new file mode 100644 index 000000000..5437e1aee --- /dev/null +++ b/end_to_end_tests/generated_code_live_tests/test_enums.py @@ -0,0 +1,153 @@ + +import pytest +from end_to_end_tests.end_to_end_test_helpers import ( + assert_model_decode_encode, + with_generated_code_import, + with_generated_client_fixture, +) + + +@with_generated_client_fixture( +""" +paths: {} +components: + schemas: + MyEnum: + type: string + enum: ["a", "B"] + MyIntEnum: + type: integer + enum: [2, 3] + MyModel: + properties: + enumProp: {"$ref": "#/components/schemas/MyEnum"} + intEnumProp: {"$ref": "#/components/schemas/MyIntEnum"} + nullableEnumProp: + oneOf: + - {"$ref": "#/components/schemas/MyEnum"} + - type: "null" +""") +@with_generated_code_import(".models.MyEnum") +@with_generated_code_import(".models.MyIntEnum") +@with_generated_code_import(".models.MyModel") +class TestEnumClasses: + def test_enum_classes(self, MyEnum, MyIntEnum): + assert MyEnum.A == MyEnum("a") + assert MyEnum.B == MyEnum("B") + assert MyIntEnum.VALUE_2 == MyIntEnum(2) + assert MyIntEnum.VALUE_3 == MyIntEnum(3) + + def test_enum_prop(self, MyModel, MyEnum, MyIntEnum): + assert_model_decode_encode(MyModel, {"enumProp": "B"}, MyModel(enum_prop=MyEnum.B)) + assert_model_decode_encode(MyModel, {"intEnumProp": 2}, MyModel(int_enum_prop=MyIntEnum.VALUE_2)) + + def test_enum_prop_type(self, MyModel, MyEnum, MyIntEnum): + assert isinstance(MyModel.from_dict({"enumProp": "B"}).enum_prop, MyEnum) + assert isinstance(MyModel.from_dict({"intEnumProp": 2}).int_enum_prop, MyIntEnum) + + def test_nullable_enum_prop(self, MyModel, MyEnum): + assert_model_decode_encode( + MyModel, + {"nullableEnumProp": "B"}, + MyModel(nullable_enum_prop=MyEnum.B), + ) + assert_model_decode_encode( + MyModel, + {"nullableEnumProp": None}, + MyModel(nullable_enum_prop=None), + ) + + def test_invalid_values(self, MyModel): + with pytest.raises(ValueError): + MyModel.from_dict({"enumProp": "c"}) + with pytest.raises(ValueError): + MyModel.from_dict({"enumProp": "A"}) + with pytest.raises(ValueError): + MyModel.from_dict({"enumProp": 2}) + with pytest.raises(ValueError): + MyModel.from_dict({"intEnumProp": 0}) + with pytest.raises(ValueError): + MyModel.from_dict({"intEnumProp": "a"}) + + +@with_generated_client_fixture( +""" +paths: {} +components: + schemas: + MyEnum: + type: string + enum: ["a", "A"] + MyIntEnum: + type: integer + enum: [2, 3] + MyModel: + properties: + enumProp: {"$ref": "#/components/schemas/MyEnum"} + intEnumProp: {"$ref": "#/components/schemas/MyIntEnum"} + nullableEnumProp: + oneOf: + - {"$ref": "#/components/schemas/MyEnum"} + - type: "null" +""", + config=""" +literal_enums: true +""", +) +@with_generated_code_import(".models.MyModel") +class TestLiteralEnums: + def test_enum_prop(self, MyModel): + assert_model_decode_encode(MyModel, {"enumProp": "a"}, MyModel(enum_prop="a")) + assert_model_decode_encode(MyModel, {"enumProp": "A"}, MyModel(enum_prop="A")) + assert_model_decode_encode(MyModel, {"intEnumProp": 2}, MyModel(int_enum_prop=2)) + + def test_enum_prop_type(self, MyModel): + assert MyModel.from_dict({"enumProp": "a"}).enum_prop.__class__ is str + assert MyModel.from_dict({"intEnumProp": 2}).int_enum_prop.__class__ is int + + def test_nullable_enum_prop(self, MyModel): + assert_model_decode_encode( + MyModel, + {"nullableEnumProp": "a"}, + MyModel(nullable_enum_prop="a"), + ) + assert_model_decode_encode( + MyModel, + {"nullableEnumProp": None}, + MyModel(nullable_enum_prop=None), + ) + + def test_invalid_values(self, MyModel): + with pytest.raises(TypeError): + MyModel.from_dict({"enumProp": "c"}) + with pytest.raises(TypeError): + MyModel.from_dict({"enumProp": 2}) + with pytest.raises(TypeError): + MyModel.from_dict({"intEnumProp": 0}) + with pytest.raises(TypeError): + MyModel.from_dict({"intEnumProp": "a"}) + + +@with_generated_client_fixture( +""" +paths: {} +components: + schemas: + MyModel: + properties: + mustBeErnest: + const: Ernest +""", +) +@with_generated_code_import(".models.MyModel") +class TestConst: + def test_valid_value(self, MyModel): + assert_model_decode_encode( + MyModel, + {"mustBeErnest": "Ernest"}, + MyModel(must_be_ernest="Ernest"), + ) + + def test_invalid_value(self, MyModel): + with pytest.raises(ValueError): + MyModel.from_dict({"mustBeErnest": "Jack"}) diff --git a/end_to_end_tests/generated_code_live_tests/test_property_encoding.py b/end_to_end_tests/generated_code_live_tests/test_property_encoding.py new file mode 100644 index 000000000..b3f27184e --- /dev/null +++ b/end_to_end_tests/generated_code_live_tests/test_property_encoding.py @@ -0,0 +1,154 @@ + +import datetime +import uuid +import pytest +from end_to_end_tests.end_to_end_test_helpers import ( + assert_model_decode_encode, + with_generated_code_import, + with_generated_client_fixture, +) + + +@with_generated_client_fixture( +""" +paths: {} +components: + schemas: + MyModel: + type: object + properties: + req1: {"type": "string"} + req2: {"type": "string"} + opt: {"type": "string"} + required: ["req1", "req2"] + DerivedModel: + allOf: + - $ref: "#/components/schemas/MyModel" + - type: object + properties: + req3: {"type": "string"} + required: ["req3"] +""") +@with_generated_code_import(".models.MyModel") +@with_generated_code_import(".models.DerivedModel") +class TestRequiredAndOptionalProperties: + def test_required_ok(self, MyModel, DerivedModel): + assert_model_decode_encode( + MyModel, + {"req1": "a", "req2": "b"}, + MyModel(req1="a", req2="b"), + ) + assert_model_decode_encode( + DerivedModel, + {"req1": "a", "req2": "b", "req3": "c"}, + DerivedModel(req1="a", req2="b", req3="c"), + ) + + def test_required_and_optional(self, MyModel, DerivedModel): + assert_model_decode_encode( + MyModel, + {"req1": "a", "req2": "b", "opt": "c"}, + MyModel(req1="a", req2="b", opt="c"), + ) + assert_model_decode_encode( + DerivedModel, + {"req1": "a", "req2": "b", "req3": "c", "opt": "d"}, + DerivedModel(req1="a", req2="b", req3="c", opt="d"), + ) + + def test_required_missing(self, MyModel, DerivedModel): + with pytest.raises(KeyError): + MyModel.from_dict({"requiredA": "a"}) + with pytest.raises(KeyError): + MyModel.from_dict({"requiredB": "b"}) + with pytest.raises(KeyError): + DerivedModel.from_dict({"requiredA": "a", "requiredB": "b"}) + + +@with_generated_client_fixture( +""" +paths: {} +components: + schemas: + MyModel: + type: object + properties: + booleanProp: {"type": "boolean"} + stringProp: {"type": "string"} + numberProp: {"type": "number"} + intProp: {"type": "integer"} + arrayOfStringsProp: {"type": "array", "items": {"type": "string"}} + anyObjectProp: {"$ref": "#/components/schemas/AnyObject"} + nullProp: {"type": "null"} + AnyObject: + type: object +""") +@with_generated_code_import(".models.MyModel") +@with_generated_code_import(".models.AnyObject") +class TestBasicModelProperties: + def test_decode_encode(self, MyModel, AnyObject): + json_data = { + "booleanProp": True, + "stringProp": "a", + "numberProp": 1.5, + "intProp": 2, + "arrayOfStringsProp": ["b", "c"], + "anyObjectProp": {"d": 3}, + "nullProp": None, + } + expected_any_object = AnyObject() + expected_any_object.additional_properties = {"d": 3} + assert_model_decode_encode( + MyModel, + json_data, + MyModel( + boolean_prop=True, + string_prop="a", + number_prop=1.5, + int_prop=2, + array_of_strings_prop=["b", "c"], + any_object_prop = expected_any_object, + null_prop=None, + ) + ) + + @pytest.mark.parametrize( + "bad_data", + ["a", True, 2, None], + ) + def test_decode_error_not_object(self, bad_data, MyModel): + with pytest.raises(Exception): + # Exception is overly broad, but unfortunately in the current implementation, the error + # being raised is AttributeError (because it tries to call bad_data.copy()) which isn't + # very meaningful + MyModel.from_dict(bad_data) + + +@with_generated_client_fixture( +""" +paths: {} +components: + schemas: + MyModel: + type: object + properties: + dateProp: {"type": "string", "format": "date"} + dateTimeProp: {"type": "string", "format": "date-time"} + uuidProp: {"type": "string", "format": "uuid"} +""") +@with_generated_code_import(".models.MyModel") +class TestSpecialStringFormats: + def test_date(self, MyModel): + date_value = datetime.date.today() + json_data = {"dateProp": date_value.isoformat()} + assert_model_decode_encode(MyModel, json_data, MyModel(date_prop=date_value)) + + def test_date_time(self, MyModel): + date_time_value = datetime.datetime.now(datetime.timezone.utc) + json_data = {"dateTimeProp": date_time_value.isoformat()} + assert_model_decode_encode(MyModel, json_data, MyModel(date_time_prop=date_time_value)) + + def test_uuid(self, MyModel): + uuid_value = uuid.uuid1() + json_data = {"uuidProp": str(uuid_value)} + assert_model_decode_encode(MyModel, json_data, MyModel(uuid_prop=uuid_value)) diff --git a/end_to_end_tests/generated_code_live_tests/test_unions.py b/end_to_end_tests/generated_code_live_tests/test_unions.py new file mode 100644 index 000000000..d9df83aaa --- /dev/null +++ b/end_to_end_tests/generated_code_live_tests/test_unions.py @@ -0,0 +1,272 @@ +import pytest + +from end_to_end_tests.end_to_end_test_helpers import ( + assert_model_decode_encode, + with_generated_code_import, + with_generated_client_fixture, +) + + +@with_generated_client_fixture( +""" +paths: {} +components: + schemas: + ThingA: + type: object + properties: + propA: { type: "string" } + required: ["propA"] + ThingB: + type: object + properties: + propB: { type: "string" } + required: ["propB"] + ModelWithUnion: + type: object + properties: + thing: + oneOf: + - $ref: "#/components/schemas/ThingA" + - $ref: "#/components/schemas/ThingB" + thingOrString: + oneOf: + - $ref: "#/components/schemas/ThingA" + - type: string +""") +@with_generated_code_import(".models.ThingA") +@with_generated_code_import(".models.ThingB") +@with_generated_code_import(".models.ModelWithUnion") +class TestOneOf: + def test_disambiguate_objects_via_required_properties(self, ThingA, ThingB, ModelWithUnion): + assert_model_decode_encode( + ModelWithUnion, + {"thing": {"propA": "x"}}, + ModelWithUnion(thing=ThingA(prop_a="x")), + ) + assert_model_decode_encode( + ModelWithUnion, + {"thing": {"propB": "x"}}, + ModelWithUnion(thing=ThingB(prop_b="x")), + ) + + def test_disambiguate_object_and_non_object(self, ThingA, ModelWithUnion): + assert_model_decode_encode( + ModelWithUnion, + {"thingOrString": {"propA": "x"}}, + ModelWithUnion(thing_or_string=ThingA(prop_a="x")), + ) + assert_model_decode_encode( + ModelWithUnion, + {"thingOrString": "x"}, + ModelWithUnion(thing_or_string="x"), + ) + + +@with_generated_client_fixture( +""" +paths: {} +components: + schemas: + ThingA: + type: object + properties: + kind: { type: "string" } + name: { type: "string" } + ThingB: + type: object + properties: + kind: { type: "string" } + name: { type: "string" } + ModelWithDiscriminatorImplicitMapping: + type: object + properties: + thing: + oneOf: + - $ref: "#/components/schemas/ThingA" + - $ref: "#/components/schemas/ThingB" + discriminator: + propertyName: kind + ModelWithDiscriminatorExplicitMapping: + type: object + properties: + thing: + oneOf: + - $ref: "#/components/schemas/ThingA" + - $ref: "#/components/schemas/ThingB" + discriminator: + propertyName: kind + mapping: + A: "#/components/schemas/ThingA" + B: "ThingB" + AlsoB: "ThingB" + ModelWithDiscriminatorPartialMapping: + type: object + properties: + thing: + oneOf: + - $ref: "#/components/schemas/ThingA" + - $ref: "#/components/schemas/ThingB" + discriminator: + propertyName: kind + mapping: + A: "#/components/schemas/ThingA" + # there's no mapping for ThingB here, so the value for it defaults to "ThingB" +""") +@with_generated_code_import(".models.ThingA") +@with_generated_code_import(".models.ThingB") +@with_generated_code_import(".models.ModelWithDiscriminatorImplicitMapping") +@with_generated_code_import(".models.ModelWithDiscriminatorExplicitMapping") +@with_generated_code_import(".models.ModelWithDiscriminatorPartialMapping") +class TestDiscriminator: + def test_implicit_mapping(self, ThingA, ThingB, ModelWithDiscriminatorImplicitMapping): + assert_model_decode_encode( + ModelWithDiscriminatorImplicitMapping, + {"thing": {"kind": "ThingA", "name": "x"}}, + ModelWithDiscriminatorImplicitMapping(thing=ThingA(kind="ThingA", name="x")), + ) + assert_model_decode_encode( + ModelWithDiscriminatorImplicitMapping, + {"thing": {"kind": "ThingB", "name": "x"}}, + ModelWithDiscriminatorImplicitMapping(thing=ThingB(kind="ThingB", name="x")), + ) + + def test_explicit_mapping(self, ThingA, ThingB, ModelWithDiscriminatorExplicitMapping): + assert_model_decode_encode( + ModelWithDiscriminatorExplicitMapping, + {"thing": {"kind": "A", "name": "x"}}, + ModelWithDiscriminatorExplicitMapping(thing=ThingA(kind="A", name="x")), + ) + assert_model_decode_encode( + ModelWithDiscriminatorExplicitMapping, + {"thing": {"kind": "B", "name": "x"}}, + ModelWithDiscriminatorExplicitMapping(thing=ThingB(kind="B", name="x")), + ) + assert_model_decode_encode( + ModelWithDiscriminatorExplicitMapping, + {"thing": {"kind": "AlsoB", "name": "x"}}, + ModelWithDiscriminatorExplicitMapping(thing=ThingB(kind="AlsoB", name="x")), + ) + + def test_partial_mapping(self, ThingA, ThingB, ModelWithDiscriminatorPartialMapping): + assert_model_decode_encode( + ModelWithDiscriminatorPartialMapping, + {"thing": {"kind": "A", "name": "x"}}, + ModelWithDiscriminatorPartialMapping(thing=ThingA(kind="A", name="x")), + ) + assert_model_decode_encode( + ModelWithDiscriminatorPartialMapping, + {"thing": {"kind": "ThingB", "name": "x"}}, + ModelWithDiscriminatorPartialMapping(thing=ThingB(kind="ThingB", name="x")), + ) + + def test_decode_fails_if_property_not_found(self, ModelWithDiscriminatorExplicitMapping): + with pytest.raises(TypeError): + ModelWithDiscriminatorExplicitMapping.from_dict({"thing": {"name": "x"}}) + + def test_decode_fails_if_property_has_unrecognized_value(self, ModelWithDiscriminatorExplicitMapping): + with pytest.raises(TypeError): + ModelWithDiscriminatorExplicitMapping.from_dict({"thing": {"kind": "C", "name": "x"}}) + + +@with_generated_client_fixture( +""" +paths: {} +components: + schemas: + Corgi: + type: object + properties: + dogType: { type: "string" } + name: { type: "string" } + Schnauzer: + type: object + properties: + dogType: { type: "string" } + name: { type: "string" } + Dog: + oneOf: + - $ref: "#/components/schemas/Corgi" + - $ref: "#/components/schemas/Schnauzer" + discriminator: + propertyName: dogType + Condor: + type: object + properties: + birdType: { type: "string" } + name: { type: "string" } + Emu: + type: object + properties: + birdType: { type: "string" } + name: { type: "string" } + Quail: + type: object + properties: + birdType: { type: "string" } + name: { type: "string" } + Sparrow: + type: object + properties: + birdType: { type: "string" } + name: { type: "string" } + BigBird: + oneOf: + - $ref: "#/components/schemas/Condor" + - $ref: "#/components/schemas/Emu" + discriminator: + propertyName: birdType + LittleBird: + oneOf: + - $ref: "#/components/schemas/Quail" + - $ref: "#/components/schemas/Sparrow" + discriminator: + propertyName: birdType + Bird: + oneOf: + - $ref: "#/components/schemas/BigBird" + - $ref: "#/components/schemas/LittleBird" + ModelWithDogOrBird: + type: object + properties: + dogOrBird: + oneOf: + - $ref: "#/components/schemas/Dog" + - $ref: "#/components/schemas/Bird" +""") +@with_generated_code_import(".models.Corgi") +@with_generated_code_import(".models.Schnauzer") +@with_generated_code_import(".models.Condor") +@with_generated_code_import(".models.Emu") +@with_generated_code_import(".models.Quail") +@with_generated_code_import(".models.Sparrow") +@with_generated_code_import(".models.ModelWithDogOrBird") +class TestDiscriminatorInNestedUnion: + def test_different_discriminator_properties(self, Schnauzer, Sparrow, ModelWithDogOrBird): + assert_model_decode_encode( + ModelWithDogOrBird, + {"dogOrBird": {"dogType": "Schnauzer", "name": "Fido"}}, + ModelWithDogOrBird(dog_or_bird=Schnauzer(dog_type="Schnauzer", name="Fido")), + ) + assert_model_decode_encode( + ModelWithDogOrBird, + {"dogOrBird": {"birdType": "Sparrow", "name": "Fido"}}, + ModelWithDogOrBird(dog_or_bird=Sparrow(bird_type="Sparrow", name="Fido")), + ) + + def test_same_discriminator_property_in_different_unions(self, Emu, Sparrow, ModelWithDogOrBird): + assert_model_decode_encode( + ModelWithDogOrBird, + {"dogOrBird": {"birdType": "Emu", "name": "Fido"}}, + ModelWithDogOrBird(dog_or_bird=Emu(bird_type="Emu", name="Fido")), + ) + assert_model_decode_encode( + ModelWithDogOrBird, + {"dogOrBird": {"birdType": "Sparrow", "name": "Fido"}}, + ModelWithDogOrBird(dog_or_bird=Sparrow(bird_type="Sparrow", name="Fido")), + ) + assert_model_decode_encode( + ModelWithDogOrBird, + {"dogOrBird": {"birdType": "Sparrow", "name": "Fido"}}, + ModelWithDogOrBird(dog_or_bird=Sparrow(bird_type="Sparrow", name="Fido")), + ) diff --git a/end_to_end_tests/test_end_to_end.py b/end_to_end_tests/test_end_to_end.py index a448a0698..5bc3e4700 100644 --- a/end_to_end_tests/test_end_to_end.py +++ b/end_to_end_tests/test_end_to_end.py @@ -7,6 +7,9 @@ from click.testing import Result from typer.testing import CliRunner +from end_to_end_tests.end_to_end_test_helpers import ( + _run_command, generate_client, generate_client_from_inline_spec, +) from openapi_python_client.cli import app @@ -84,50 +87,24 @@ def run_e2e_test( output_path: str = "my-test-api-client", expected_missing: Optional[Set[str]] = None, ) -> Result: - output_path = Path.cwd() / output_path - shutil.rmtree(output_path, ignore_errors=True) - result = generate(extra_args, openapi_document) - gr_path = Path(__file__).parent / golden_record_path - - expected_differences = expected_differences or {} - # Use absolute paths for expected differences for easier comparisons - expected_differences = { - output_path.joinpath(key): value for key, value in expected_differences.items() - } - _compare_directories( - gr_path, output_path, expected_differences=expected_differences, expected_missing=expected_missing - ) - - import mypy.api - - out, err, status = mypy.api.run([str(output_path), "--strict"]) - assert status == 0, f"Type checking client failed: {out}" - - shutil.rmtree(output_path) - return result - + with generate_client(openapi_document, extra_args, output_path) as g: + gr_path = Path(__file__).parent / golden_record_path + + expected_differences = expected_differences or {} + # Use absolute paths for expected differences for easier comparisons + expected_differences = { + g.output_path.joinpath(key): value for key, value in expected_differences.items() + } + _compare_directories( + gr_path, g.output_path, expected_differences=expected_differences, expected_missing=expected_missing + ) -def generate(extra_args: Optional[List[str]], openapi_document: str) -> Result: - """Generate a client from an OpenAPI document and return the path to the generated code""" - _run_command("generate", extra_args, openapi_document) + import mypy.api + out, err, status = mypy.api.run([str(g.output_path), "--strict"]) + assert status == 0, f"Type checking client failed: {out}" -def _run_command(command: str, extra_args: Optional[List[str]] = None, openapi_document: Optional[str] = None, url: Optional[str] = None, config_path: Optional[Path] = None) -> Result: - """Generate a client from an OpenAPI document and return the path to the generated code""" - runner = CliRunner() - if openapi_document is not None: - openapi_path = Path(__file__).parent / openapi_document - source_arg = f"--path={openapi_path}" - else: - source_arg = f"--url={url}" - config_path = config_path or (Path(__file__).parent / "config.yml") - args = [command, f"--config={config_path}", source_arg] - if extra_args: - args.extend(extra_args) - result = runner.invoke(app, args) - if result.exit_code != 0: - raise Exception(result.stdout) - return result + return g.generator_result def test_baseline_end_to_end_3_0(): @@ -168,18 +145,17 @@ def test_literal_enums_end_to_end(): ) ) def test_meta(meta: str, generated_file: Optional[str], expected_file: Optional[str]): - output_path = Path.cwd() / "test-3-1-features-client" - shutil.rmtree(output_path, ignore_errors=True) - generate([f"--meta={meta}"], "3.1_specific.openapi.yaml") - - if generated_file and expected_file: - assert (output_path / generated_file).exists() - assert ( - (output_path / generated_file).read_text() == - (Path(__file__).parent / "metadata_snapshots" / expected_file).read_text() - ) - - shutil.rmtree(output_path) + with generate_client( + "3.1_specific.openapi.yaml", + extra_args=[f"--meta={meta}"], + output_path="test-3-1-features-client", + ) as g: + if generated_file and expected_file: + assert (g.output_path / generated_file).exists() + assert ( + (g.output_path / generated_file).read_text() == + (Path(__file__).parent / "metadata_snapshots" / expected_file).read_text() + ) def test_none_meta(): @@ -238,55 +214,61 @@ def test_bad_url(): @pytest.mark.parametrize("document", ERROR_DOCUMENTS, ids=[path.stem for path in ERROR_DOCUMENTS]) def test_documents_with_errors(snapshot, document): - runner = CliRunner() - output_path = Path.cwd() / "test-documents-with-errors" - shutil.rmtree(output_path, ignore_errors=True) - result = runner.invoke(app, ["generate", f"--path={document}", "--fail-on-warning", f"--output-path={output_path}"]) - assert result.exit_code == 1 - assert result.stdout.replace(str(output_path), "/test-documents-with-errors") == snapshot - shutil.rmtree(output_path, ignore_errors=True) + with generate_client( + document, + extra_args=["--fail-on-warning"], + output_path="test-documents-with-errors", + raise_on_error=False, + ) as g: + result = g.generator_result + assert result.exit_code == 1 + output = result.stdout.replace(str(g.output_path), "/test-documents-with-errors") + assert output == snapshot def test_custom_post_hooks(): - shutil.rmtree(Path.cwd() / "my-test-api-client", ignore_errors=True) - runner = CliRunner() - openapi_document = Path(__file__).parent / "baseline_openapi_3.0.json" config_path = Path(__file__).parent / "custom_post_hooks.config.yml" - result = runner.invoke(app, ["generate", f"--path={openapi_document}", f"--config={config_path}"]) - assert result.exit_code == 1 - assert "this should fail" in result.stdout - shutil.rmtree(Path.cwd() / "my-test-api-client", ignore_errors=True) + with generate_client( + "baseline_openapi_3.0.json", + [f"--config={config_path}"], + raise_on_error=False, + ) as g: + assert g.generator_result.exit_code == 1 + assert "this should fail" in g.generator_result.stdout def test_generate_dir_already_exists(): project_dir = Path.cwd() / "my-test-api-client" if not project_dir.exists(): project_dir.mkdir() - runner = CliRunner() - openapi_document = Path(__file__).parent / "baseline_openapi_3.0.json" - result = runner.invoke(app, ["generate", f"--path={openapi_document}"]) - assert result.exit_code == 1 - assert "Directory already exists" in result.stdout - shutil.rmtree(Path.cwd() / "my-test-api-client", ignore_errors=True) + try: + runner = CliRunner() + openapi_document = Path(__file__).parent / "baseline_openapi_3.0.json" + result = runner.invoke(app, ["generate", f"--path={openapi_document}"]) + assert result.exit_code == 1 + assert "Directory already exists" in result.stdout + finally: + shutil.rmtree(Path.cwd() / "my-test-api-client", ignore_errors=True) @pytest.mark.parametrize( - ("file_name", "content", "expected_error"), + ("suffix", "content", "expected_error"), ( - ("invalid_openapi.yaml", "not a valid openapi document", "Failed to parse OpenAPI document"), - ("invalid_json.json", "Invalid JSON", "Invalid JSON"), - ("invalid_yaml.yaml", "{", "Invalid YAML"), + (".yaml", "not a valid openapi document", "Failed to parse OpenAPI document"), + (".json", "Invalid JSON", "Invalid JSON"), + (".yaml", "{", "Invalid YAML"), ), ids=("invalid_openapi", "invalid_json", "invalid_yaml") ) -def test_invalid_openapi_document(file_name, content, expected_error): - runner = CliRunner() - openapi_document = Path.cwd() / file_name - openapi_document.write_text(content) - result = runner.invoke(app, ["generate", f"--path={openapi_document}"]) - assert result.exit_code == 1 - assert expected_error in result.stdout - openapi_document.unlink() +def test_invalid_openapi_document(suffix, content, expected_error): + with generate_client_from_inline_spec( + content, + filename_suffix=suffix, + add_openapi_info=False, + raise_on_error=False, + ) as g: + assert g.generator_result.exit_code == 1 + assert expected_error in g.generator_result.stdout def test_update_integration_tests(): @@ -294,17 +276,21 @@ def test_update_integration_tests(): source_path = Path(__file__).parent.parent / "integration-tests" temp_dir = Path.cwd() / "test_update_integration_tests" shutil.rmtree(temp_dir, ignore_errors=True) - shutil.copytree(source_path, temp_dir) - config_path = source_path / "config.yaml" - _run_command( - "generate", - extra_args=["--meta=none", "--overwrite", f"--output-path={source_path / 'integration_tests'}"], - url=url, - config_path=config_path - ) - _compare_directories(temp_dir, source_path, expected_differences={}) - import mypy.api - out, err, status = mypy.api.run([str(temp_dir), "--strict"]) - assert status == 0, f"Type checking client failed: {out}" - shutil.rmtree(temp_dir) + try: + shutil.copytree(source_path, temp_dir) + config_path = source_path / "config.yaml" + _run_command( + "generate", + extra_args=["--meta=none", "--overwrite", f"--output-path={source_path / 'integration_tests'}"], + url=url, + config_path=config_path + ) + _compare_directories(temp_dir, source_path, expected_differences={}) + import mypy.api + + out, err, status = mypy.api.run([str(temp_dir), "--strict"]) + assert status == 0, f"Type checking client failed: {out}" + + finally: + shutil.rmtree(temp_dir) diff --git a/tests/test_parser/test_properties/test_union.py b/tests/test_parser/test_properties/test_union.py index b3305547b..4e5cccccd 100644 --- a/tests/test_parser/test_properties/test_union.py +++ b/tests/test_parser/test_properties/test_union.py @@ -85,84 +85,6 @@ def _assert_valid_discriminator( ] -def test_discriminator_with_explicit_mapping(config): - from openapi_python_client.parser.properties import Schemas, property_from_data - - schemas = Schemas() - props = {"type": oai.Schema.model_construct(type="string")} - model1, schemas = _make_basic_model("Model1", props, "type", schemas, config) - model2, schemas = _make_basic_model("Model2", props, "type", schemas, config) - data = oai.Schema.model_construct( - oneOf=[ - oai.Reference(ref="#/components/schemas/Model1"), - oai.Reference(ref="#/components/schemas/Model2"), - ], - discriminator=oai.Discriminator.model_construct( - propertyName="type", - mapping={ - # mappings can use either a fully-qualified schema reference or just the schema name - "type1": "#/components/schemas/Model1", - "type2": "Model2", - }, - ), - ) - - p, schemas = property_from_data( - name="MyUnion", required=False, data=data, schemas=schemas, parent_name="parent", config=config - ) - _assert_valid_discriminator(p, [("type", {"type1": model1, "type2": model2})]) - - -def test_discriminator_with_implicit_mapping(config): - from openapi_python_client.parser.properties import Schemas, property_from_data - - schemas = Schemas() - props = {"type": oai.Schema.model_construct(type="string")} - model1, schemas = _make_basic_model("Model1", props, "type", schemas, config) - model2, schemas = _make_basic_model("Model2", props, "type", schemas, config) - data = oai.Schema.model_construct( - oneOf=[ - oai.Reference(ref="#/components/schemas/Model1"), - oai.Reference(ref="#/components/schemas/Model2"), - ], - discriminator=oai.Discriminator.model_construct( - propertyName="type", - ), - ) - - p, schemas = property_from_data( - name="MyUnion", required=False, data=data, schemas=schemas, parent_name="parent", config=config - ) - _assert_valid_discriminator(p, [("type", {"Model1": model1, "Model2": model2})]) - - -def test_discriminator_with_partial_explicit_mapping(config): - from openapi_python_client.parser.properties import Schemas, property_from_data - - schemas = Schemas() - props = {"type": oai.Schema.model_construct(type="string")} - model1, schemas = _make_basic_model("Model1", props, "type", schemas, config) - model2, schemas = _make_basic_model("Model2", props, "type", schemas, config) - data = oai.Schema.model_construct( - oneOf=[ - oai.Reference(ref="#/components/schemas/Model1"), - oai.Reference(ref="#/components/schemas/Model2"), - ], - discriminator=oai.Discriminator.model_construct( - propertyName="type", - mapping={ - "type1": "#/components/schemas/Model1", - # no value specified for Model2, so it defaults to just "Model2" - }, - ), - ) - - p, schemas = property_from_data( - name="MyUnion", required=False, data=data, schemas=schemas, parent_name="parent", config=config - ) - _assert_valid_discriminator(p, [("type", {"type1": model1, "Model2": model2})]) - - def test_discriminators_in_nested_unions_same_property(config): from openapi_python_client.parser.properties import Schemas, property_from_data