diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md new file mode 100644 index 0000000..081587c --- /dev/null +++ b/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,15 @@ +* kororaa-graphql-api version: +* Python version: +* Operating System: + +### Description + +Describe what you were trying to get done. +Tell us what happened, what went wrong, and what you expected to happen. + +### What I Did + +``` +Paste the command(s) you ran and the output. +If there was a crash, please include the traceback here. +``` diff --git a/.github/workflows/deploy-to-aws.yaml b/.github/workflows/deploy-to-aws.yaml new file mode 100644 index 0000000..fd5e990 --- /dev/null +++ b/.github/workflows/deploy-to-aws.yaml @@ -0,0 +1,72 @@ +name: deploy-aws-lambda + +on: + push: + branches: + - deploy-test + - main + +jobs: + call-test-workflow: + # https://docs.github.com/en/actions/using-workflows/reusing-workflows#calling-a-reusable-workflow + uses: ./.github/workflows/run-tests.yml + + deploy: + needs: call-test-workflow + runs-on: ubuntu-latest + strategy: + matrix: + node-version: [20] + python-version: ['3.10'] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install and configure Poetry + uses: GNS-Science/install-poetry@main + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + check-latest: true + + - name: upgrade NPM + run: npm install --location=global npm@latest + + # - name: Verify NPM + # run: npm doctor + + - name: Install Dependencies + run: npm install + + - name: List packages + run: npm ls + + - name: Serverless Doctor + run: npm run-script sls_doctor + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: ap-southeast-2 + + - name: Serverless Deploy .... + env: + ENABLE_METRICS: 0 + run: | + if [[ "${{github.base_ref}}" == "main" || "${{github.ref}}" == 'refs/heads/main' ]]; then + STAGE=prod npm run-script deploy + else + STAGE=test npm run-script deploy + fi + diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml new file mode 100644 index 0000000..e10ac07 --- /dev/null +++ b/.github/workflows/dev.yml @@ -0,0 +1,13 @@ +name: Dev workflow + +on: + pull_request: + branches: [main, deploy-test] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +jobs: + call-test-workflow: + #https://docs.github.com/en/actions/using-workflows/reusing-workflows#calling-a-reusable-workflow + uses: ./.github/workflows/run-tests.yml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..7da57b6 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,89 @@ +# Publish package on main branch if it's tagged with 'v*' + +name: release & publish workflow + +# Controls when the action will run. +on: + # Triggers the workflow on push events but only for the master branch + push: + tags: + - 'v*' + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "release" + release: + name: Create Release + runs-on: ubuntu-20.04 + + strategy: + matrix: + python-versions: [3.8] + + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + - name: Get version from tag + id: tag_name + run: | + echo ::set-output name=current_version::${GITHUB_REF#refs/tags/v} + shell: bash + + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v2 + + - name: Get Changelog Entry + id: changelog_reader + uses: mindsers/changelog-reader-action@v2 + with: + validation_depth: 10 + version: ${{ steps.tag_name.outputs.current_version }} + path: ./CHANGELOG.md + + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-versions }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install poetry + + - name: build documentation + run: | + poetry install -E doc + poetry run mkdocs build + + - name: publish documentation + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./site + + # - name: Build wheels and source tarball + # run: >- + # poetry build + + # - name: show temporary files + # run: >- + # ls -l + + # - name: create github release + # id: create_release + # uses: softprops/action-gh-release@v1 + # env: + # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # with: + # body: ${{ steps.changelog_reader.outputs.changes }} + # files: dist/*.whl + # draft: false + # prerelease: false + + # - name: publish to PyPI + # uses: pypa/gh-action-pypi-publish@release/v1 + # with: + # user: __token__ + # password: ${{ secrets.PYPI_API_TOKEN }} + # skip_existing: true diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml new file mode 100644 index 0000000..de90bdc --- /dev/null +++ b/.github/workflows/run-tests.yml @@ -0,0 +1,47 @@ +name: Run Tests + +#https://docs.github.com/en/actions/using-workflows/reusing-workflows#calling-a-reusable-workflow + +on: + workflow_call: + +jobs: + run_unit_tests: + runs-on: ubuntu-latest + timeout-minutes: 10 + + strategy: + matrix: + python-versions: ['3.10'] + os: [ubuntu-latest] + + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-versions }} + + - name: Install and configure Poetry + uses: GNS-Science/install-poetry@main + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + + - name: Install dependencies w Poetry + run: | + poetry install --no-interaction --no-root --with dev + poetry add tox-gh-actions + + - name: test with tox (uses tox-gh-actions to select correct environment) + run: + poetry run tox + + - name: list files + run: ls -l . + + - uses: codecov/codecov-action@v3 + with: + fail_ci_if_error: false + files: coverage.xml diff --git a/CHANGELOG.md b/CHANGELOG.md index 675d224..202f463 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,16 +1,25 @@ # Changelog -## [0.2.0] 2024-05-30 +## [0.2.0] - 2024-05-30 +### Changed - Complete reset, no more django - all previous code is mothballed + - flake8 config + - new about and version resolvers + +### Added + - Node resolver support for NshmModel + - get_model resolver + - get_models resolver + - source logic tree models and resolvers ## [0.1.3] - 2023-09-04 -## Added +### Added - new about and version resolvers ## [0.1.2] - 2023-09-04 -## Changed +### Changed - configure static_url correctly for both local & AWS ## [0.1.1] - 2023-09-01 diff --git a/nshm_model_graphql_api/nshm_model_graphql_api.py b/nshm_model_graphql_api/nshm_model_graphql_api.py new file mode 100644 index 0000000..7aeebea --- /dev/null +++ b/nshm_model_graphql_api/nshm_model_graphql_api.py @@ -0,0 +1,65 @@ +"""Main module.""" + +import logging +import logging.config +import os + +import yaml +from flask import Flask +from flask_cors import CORS +from flask_graphql import GraphQLView + +# from nshm_model_graphql_api.library_version_check import log_library_info +from nshm_model_graphql_api.schema import schema_root + +LOGGING_CFG = os.getenv("LOGGING_CFG", "nshm_model_graphql_api/logging_aws.yaml") +logger = logging.getLogger(__name__) + +# TESTING = os.getenv('TESTING', False) +# if not TESTING: +# # because in testing, this screws up moto mocking +# log_library_info(['botocore', 'boto3', 'fiona']) + + +def create_app(): + """Function that creates our Flask application.""" + app = Flask(__name__) + CORS(app) + + # app.before_first_request(migrate) + + app.add_url_rule( + "/graphql", + view_func=GraphQLView.as_view( + "graphql", + schema=schema_root, + graphiql=True, + ), + ) + + """ + Setup logging configuration + ref https://fangpenlin.com/posts/2012/08/26/good-logging-practice-in-python/ + """ + if os.path.exists(LOGGING_CFG): + with open(LOGGING_CFG, "rt") as f: + config = yaml.safe_load(f.read()) + logging.config.dictConfig(config) + else: + print("Warning, no logging config found, using basicConfig(INFO)") + logging.basicConfig(level=logging.INFO) + + logger.debug("DEBUG logging enabled") + logger.info("INFO logging enabled") + logger.warning("WARN logging enabled") + logger.error("ERROR logging enabled") + + return app + + +# pragma: no cover +app = create_app() + + +if __name__ == "__main__": + app.run() diff --git a/nshm_model_graphql_api/schema/__init__.py b/nshm_model_graphql_api/schema/__init__.py new file mode 100644 index 0000000..cbe0b7f --- /dev/null +++ b/nshm_model_graphql_api/schema/__init__.py @@ -0,0 +1,3 @@ +"""API schema package.""" + +from .schema_root import schema_root # noqa: F401 diff --git a/nshm_model_graphql_api/schema/nshm_model_schema.py b/nshm_model_graphql_api/schema/nshm_model_schema.py new file mode 100644 index 0000000..29fafda --- /dev/null +++ b/nshm_model_graphql_api/schema/nshm_model_schema.py @@ -0,0 +1,52 @@ +"""Define graphene model for nzshm_model class.""" + +import logging +from typing import Iterator, Optional + +import graphene +import nzshm_model as nm +from graphene import relay + +from .nshm_model_sources_schema import SourceLogicTree + +log = logging.getLogger(__name__) + + +class NshmModel(graphene.ObjectType): + """A custom Node representing an entire model.""" + + class Meta: + interfaces = (relay.Node,) + + version = graphene.String() + title = graphene.String() + source_logic_tree = graphene.Field(SourceLogicTree) + + def resolve_id(self, info): + return self.version + + @staticmethod + def resolve_source_logic_tree(root, info, **kwargs): + log.info(f"resolve_source_logic_tree root: {root} kwargs: {kwargs}") + return SourceLogicTree( + model_version=root.version + ) # , branch_sets=get_branch_sets(slt)) + + @classmethod + def get_node(cls, info, version: str): + return get_nshm_model(version) + + +def get_nshm_models() -> Iterator[NshmModel]: + for version in nm.all_model_versions(): + yield NshmModel(version=version) + + +def get_nshm_model(version: Optional[str] = None) -> Optional[NshmModel]: + # model = nm.get_model_version(version) + model = nm.get_model_version(version) if version else nm.get_model_version() + return NshmModel(version=model.version, title=model.title) if model else None + + +def get_current_model_version() -> str: + return nm.CURRENT_VERSION diff --git a/nshm_model_graphql_api/schema/nshm_model_sources_schema.py b/nshm_model_graphql_api/schema/nshm_model_sources_schema.py new file mode 100644 index 0000000..6dc7979 --- /dev/null +++ b/nshm_model_graphql_api/schema/nshm_model_sources_schema.py @@ -0,0 +1,166 @@ +"""Define graphene model for nzshm_model source logic tree classes.""" + +import logging +from functools import lru_cache + +import graphene +import nzshm_model as nm +from graphene import relay +from nzshm_model.logic_tree.source_logic_tree.version2 import logic_tree + +log = logging.getLogger(__name__) + + +@lru_cache +def get_model_by_version(model_version: str) -> nm.NshmModel: + """a caching wrapper for nm""" + return nm.get_model_version(model_version) + + +# TODO: this method belongs on the nzshm-model slt class +@lru_cache +def get_branch_set(model_version, short_name): + slt = get_model_by_version(model_version).source_logic_tree + for bs in slt.branch_sets: + if bs.short_name == short_name: + return bs + assert 0, f"branch set {short_name} was not found" # pragma: no cover + + +# TODO: this method belongs on the nzshm-model slt class +@lru_cache +def get_logic_tree_branch(model_version, short_name, tag): + log.info(f"get_logic_tree_branch: {short_name} tag: {tag}") + branch_set = get_branch_set(model_version, short_name) + for ltb in branch_set.branches: + if ltb.tag == tag: + return ltb + # print(short_name, ltb.tag) + assert 0, f"branch with {tag} was not found" # pragma: no cover + + +class BranchInversionSource(graphene.ObjectType): + nrml_id = graphene.ID() + rupture_set_id = graphene.ID() + inversion_id = graphene.ID() + + +class BranchDistributedSource(graphene.ObjectType): + nrml_id = graphene.ID() + + +class BranchSource(graphene.Union): + class Meta: + types = (BranchInversionSource, BranchDistributedSource) + + +class SourceLogicTreeBranch(graphene.ObjectType): + class Meta: + interfaces = (relay.Node,) + + model_version = graphene.String() + branch_set_short_name = graphene.String() + tag = graphene.String() + weight = graphene.Float() + sources = graphene.List(BranchSource) + + def resolve_id(self, info): + return f"{self.model_version}:{self.branch_set_short_name}:{self.tag}" + + @classmethod + def get_node(cls, info, node_id: str): + model_version, branch_set_short_name, tag = node_id.split(":") + sltb = get_logic_tree_branch(model_version, branch_set_short_name, tag) + return SourceLogicTreeBranch( + model_version=model_version, + branch_set_short_name=branch_set_short_name, + tag=tag, + weight=sltb.weight, + ) + + @staticmethod + def resolve_sources(root, info, **kwargs): + log.info(f"resolve SourceLogicTreeBranch.sources root: {root} kwargs: {kwargs}") + ltb = get_logic_tree_branch( + root.model_version, root.branch_set_short_name, root.tag + ) + for src in ltb.sources: + if isinstance(src, logic_tree.InversionSource): + # print(src) + yield BranchInversionSource( + # model_version=root.model_version, + # branch_set_short_name=root.branch_set_short_name, + nrml_id=src.nrml_id, + rupture_set_id=src.rupture_set_id, + inversion_id=src.inversion_id, + ) + elif isinstance(src, logic_tree.DistributedSource): + yield BranchDistributedSource(nrml_id=src.nrml_id) + else: + raise RuntimeError( + f"got unknown source type :{src}" + ) # pragma: no cover + + +class SourceBranchSet(graphene.ObjectType): + + class Meta: + interfaces = (relay.Node,) + + model_version = graphene.String() + short_name = graphene.String() + long_name = graphene.String() + branches = graphene.List(SourceLogicTreeBranch) + + def resolve_id(self, info): + return f"{self.model_version}:{self.short_name}" + + @classmethod + def get_node(cls, info, node_id: str): + model_version, short_name = node_id.split(":") + bs = get_branch_set(model_version, short_name) + return SourceBranchSet( + model_version=model_version, short_name=short_name, long_name=bs.long_name + ) + + @staticmethod + def resolve_branches(root, info, **kwargs): + log.info(f"resolve_branches root: {root} kwargs: {kwargs}") + bs = get_branch_set(root.model_version, root.short_name) + for ltb in bs.branches: + sltb = SourceLogicTreeBranch( + model_version=root.model_version, + branch_set_short_name=bs.short_name, + weight=ltb.weight, + tag=ltb.tag, + ) + # log.debug(f'sltb {sltb}') + yield sltb + + +class SourceLogicTree(graphene.ObjectType): + """A custom Node representing the source logic tree of a given model.""" + + class Meta: + interfaces = (relay.Node,) + + model_version = graphene.String() + branch_sets = graphene.List(SourceBranchSet) + + def resolve_id(self, info): + return self.model_version + + @classmethod + def get_node(cls, info, model_version: str): + return SourceLogicTree(model_version=model_version) + + @staticmethod + def resolve_branch_sets(root, info, **kwargs): + log.info(f"resolve_branch_sets root: {root} kwargs: {kwargs}") + slt = get_model_by_version(root.model_version).source_logic_tree + for bs in slt.branch_sets: + yield SourceBranchSet( + model_version=root.model_version, + short_name=bs.short_name, + long_name=bs.long_name, + ) diff --git a/nshm_model_graphql_api/schema/schema_root.py b/nshm_model_graphql_api/schema/schema_root.py new file mode 100644 index 0000000..46cfa5a --- /dev/null +++ b/nshm_model_graphql_api/schema/schema_root.py @@ -0,0 +1,49 @@ +"""The main API schema.""" + +import logging +from typing import Optional + +import graphene +from graphene import relay + +from nshm_model_graphql_api import __version__ + +from .nshm_model_schema import ( + NshmModel, + get_current_model_version, + get_nshm_model, + get_nshm_models, +) + +log = logging.getLogger(__name__) + + +class QueryRoot(graphene.ObjectType): + """This is the entry point for all graphql query operations.""" + + node = relay.Node.Field() + about = graphene.String(description="About this API ") + version = graphene.String(description="API version string") + current_model_version = graphene.String() + + get_models = graphene.List(NshmModel) + + def resolve_get_models(root, info, **args): + return get_nshm_models() + + get_model = graphene.Field(NshmModel, version=graphene.String(required=False)) + + def resolve_get_model(root, info, version: Optional[str] = None): + return get_nshm_model(version) # if version else get_nshm_model() + + def resolve_current_model_version(root, info, **args): + return get_current_model_version() + + def resolve_about(root, info, **args): + return "Hello, I am nshm_model_graphql_api, version: %s!" % __version__ + + def resolve_version(root, info, **args): + return __version__ + + +schema_root = graphene.Schema(query=QueryRoot, mutation=None, auto_camelcase=False) diff --git a/poetry.lock b/poetry.lock index 0f90c1c..cd4237b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,39 +1,73 @@ # This file is automatically @generated by Poetry 1.8.0 and should not be changed by hand. +[[package]] +name = "aniso8601" +version = "7.0.0" +description = "A library for parsing ISO 8601 strings." +optional = false +python-versions = "*" +files = [ + {file = "aniso8601-7.0.0-py2.py3-none-any.whl", hash = "sha256:d10a4bf949f619f719b227ef5386e31f49a2b6d453004b21f02661ccc8670c7b"}, + {file = "aniso8601-7.0.0.tar.gz", hash = "sha256:513d2b6637b7853806ae79ffaca6f3e8754bdd547048f5ccc1420aec4b714f1e"}, +] + [[package]] name = "black" -version = "22.12.0" +version = "24.4.2" description = "The uncompromising code formatter." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, - {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, - {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, - {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, - {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, - {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, - {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, - {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, - {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, - {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, - {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, - {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, ] [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" +packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "blinker" +version = "1.8.2" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, +] + [[package]] name = "bump2version" version = "1.0.1" @@ -241,6 +275,57 @@ files = [ flake8 = ">=3" pydocstyle = ">=2.1" +[[package]] +name = "flask" +version = "3.0.3" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, + {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "flask-cors" +version = "4.0.1" +description = "A Flask extension adding a decorator for CORS support" +optional = false +python-versions = "*" +files = [ + {file = "Flask_Cors-4.0.1-py2.py3-none-any.whl", hash = "sha256:f2a704e4458665580c074b714c4627dd5a306b333deb9074d0b1794dfa2fb677"}, + {file = "flask_cors-4.0.1.tar.gz", hash = "sha256:eeb69b342142fdbf4766ad99357a7f3876a2ceb77689dc10ff912aac06c389e4"}, +] + +[package.dependencies] +Flask = ">=0.9" + +[[package]] +name = "flask-graphql" +version = "2.0.1" +description = "Adds GraphQL support to your Flask application" +optional = false +python-versions = "*" +files = [ + {file = "Flask-GraphQL-2.0.1.tar.gz", hash = "sha256:825578c044df436cd74503a38bbd31c919a90acda5e9b6e0e45736964bc5235d"}, +] + +[package.dependencies] +flask = ">=0.7.0" +graphql-core = ">=2.1,<3" +graphql-server-core = ">=1.1,<2" + [[package]] name = "ghp-import" version = "2.1.0" @@ -258,6 +343,81 @@ python-dateutil = ">=2.8.1" [package.extras] dev = ["flake8", "markdown", "twine", "wheel"] +[[package]] +name = "graphene" +version = "2.1.9" +description = "GraphQL Framework for Python" +optional = false +python-versions = "*" +files = [ + {file = "graphene-2.1.9-py2.py3-none-any.whl", hash = "sha256:3d446eb1237c551052bc31155cf1a3a607053e4f58c9172b83a1b597beaa0868"}, + {file = "graphene-2.1.9.tar.gz", hash = "sha256:b9f2850e064eebfee9a3ef4a1f8aa0742848d97652173ab44c82cc8a62b9ed93"}, +] + +[package.dependencies] +aniso8601 = ">=3,<=7" +graphql-core = ">=2.1,<3" +graphql-relay = ">=2,<3" +six = ">=1.10.0,<2" + +[package.extras] +django = ["graphene-django"] +sqlalchemy = ["graphene-sqlalchemy"] +test = ["coveralls", "fastdiff (==0.2.0)", "iso8601", "mock", "promise", "pytest", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytz", "six", "snapshottest"] + +[[package]] +name = "graphql-core" +version = "2.3.2" +description = "GraphQL implementation for Python" +optional = false +python-versions = "*" +files = [ + {file = "graphql-core-2.3.2.tar.gz", hash = "sha256:aac46a9ac524c9855910c14c48fc5d60474def7f99fd10245e76608eba7af746"}, + {file = "graphql_core-2.3.2-py2.py3-none-any.whl", hash = "sha256:44c9bac4514e5e30c5a595fac8e3c76c1975cae14db215e8174c7fe995825bad"}, +] + +[package.dependencies] +promise = ">=2.3,<3" +rx = ">=1.6,<2" +six = ">=1.10.0" + +[package.extras] +gevent = ["gevent (>=1.1)"] +test = ["coveralls (==1.11.1)", "cython (==0.29.17)", "gevent (==1.5.0)", "pyannotate (==1.2.0)", "pytest (==4.6.10)", "pytest-benchmark (==3.2.3)", "pytest-cov (==2.8.1)", "pytest-django (==3.9.0)", "pytest-mock (==2.0.0)", "six (==1.14.0)"] + +[[package]] +name = "graphql-relay" +version = "2.0.1" +description = "Relay implementation for Python" +optional = false +python-versions = "*" +files = [ + {file = "graphql-relay-2.0.1.tar.gz", hash = "sha256:870b6b5304123a38a0b215a79eace021acce5a466bf40cd39fa18cb8528afabb"}, + {file = "graphql_relay-2.0.1-py3-none-any.whl", hash = "sha256:ac514cb86db9a43014d7e73511d521137ac12cf0101b2eaa5f0a3da2e10d913d"}, +] + +[package.dependencies] +graphql-core = ">=2.2,<3" +promise = ">=2.2,<3" +six = ">=1.12" + +[[package]] +name = "graphql-server-core" +version = "1.2.0" +description = "GraphQL Server tools for powering your server" +optional = false +python-versions = "*" +files = [ + {file = "graphql-server-core-1.2.0.tar.gz", hash = "sha256:04ee90da0322949f7b49ff6905688e3a21a9efbd5a7d7835997e431a0afdbd11"}, +] + +[package.dependencies] +graphql-core = ">=2.3,<3" +promise = ">=2.3,<3" + +[package.extras] +test = ["pytest (==4.6.9)", "pytest-cov (==2.8.1)"] + [[package]] name = "griffe" version = "0.45.2" @@ -297,6 +457,17 @@ files = [ [package.extras] colors = ["colorama (>=0.4.6)"] +[[package]] +name = "itsdangerous" +version = "2.2.0" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.8" +files = [ + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, +] + [[package]] name = "jinja2" version = "3.1.4" @@ -779,6 +950,22 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "promise" +version = "2.3" +description = "Promises/A+ implementation for Python" +optional = false +python-versions = "*" +files = [ + {file = "promise-2.3.tar.gz", hash = "sha256:dfd18337c523ba4b6a58801c164c1904a9d4d1b1747c7d5dbf45b693a49d93d0"}, +] + +[package.dependencies] +six = "*" + +[package.extras] +test = ["coveralls", "futures", "mock", "pytest (>=2.7.3)", "pytest-benchmark", "pytest-cov"] + [[package]] name = "pycodestyle" version = "2.7.0" @@ -973,6 +1160,16 @@ files = [ [package.dependencies] pyyaml = "*" +[[package]] +name = "rx" +version = "1.6.3" +description = "Reactive Extensions (Rx) for Python" +optional = false +python-versions = "*" +files = [ + {file = "Rx-1.6.3.tar.gz", hash = "sha256:ca71b65d0fc0603a3b5cfaa9e33f5ba81e4aae10a58491133595088d7734b2da"}, +] + [[package]] name = "six" version = "1.16.0" @@ -1044,6 +1241,17 @@ virtualenv = ">=20.25" docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.25.2)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] testing = ["build[virtualenv] (>=1.0.3)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=8.0.2)", "distlib (>=0.3.8)", "flaky (>=3.7)", "hatch-vcs (>=0.4)", "hatchling (>=1.21)", "psutil (>=5.9.7)", "pytest (>=7.4.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-xdist (>=3.5)", "re-assert (>=1.1)", "time-machine (>=2.13)", "wheel (>=0.42)"] +[[package]] +name = "types-pyyaml" +version = "6.0.12.20240311" +description = "Typing stubs for PyYAML" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-PyYAML-6.0.12.20240311.tar.gz", hash = "sha256:a9e0f0f88dc835739b0c1ca51ee90d04ca2a897a71af79de9aec5f38cb0a5342"}, + {file = "types_PyYAML-6.0.12.20240311-py3-none-any.whl", hash = "sha256:b845b06a1c7e54b8e5b4c683043de0d9caf205e7434b3edc678ff2411979b8f6"}, +] + [[package]] name = "typing-extensions" version = "4.12.0" @@ -1119,11 +1327,24 @@ files = [ [package.extras] watchmedo = ["PyYAML (>=3.10)"] -[extras] -dev = ["black", "bump2version", "flake8", "isort", "mypy", "pip", "pytest", "tox", "virtualenv"] -test = ["black", "flake8", "flake8-docstrings", "isort", "mypy", "pytest", "pytest-cov"] +[[package]] +name = "werkzeug" +version = "3.0.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, + {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "a57f9d7e971c787847e628a1024f48940243ecc195623d2fe45545baf2b313e5" +content-hash = "a97c919eeca6c319622e5e1b2f1cf5976d4a05f45d0bcfc376ef7fa200e8683f" diff --git a/pyproject.toml b/pyproject.toml index ab56f56..15399f6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,9 +9,16 @@ packages = [{include = "nshm_model_graphql_api"}] [tool.poetry.dependencies] python = "^3.10" +nzshm-model = {git = "https://github.com/GNS-Science/nzshm-model", rev = "pre-release"} +flask = "^3.0.3" +flask-cors = "^4.0.1" +graphene = "<3" +flask-graphql = "^2.0.1" +pyyaml = "^6.0.1" +[tool.poetry.group.dev.dependencies] tox = "^4.2.8" -black = { version = "^22.3"} +black = { version = "^24.3.0"} isort = { version = "^5.8.0"} flake8 = { version = "^3.9.2"} flake8-docstrings = { version = "^1.6.0", optional = true } @@ -19,23 +26,9 @@ pytest = "^7.4.0" pytest-cov = { version = "^2.12.0"} virtualenv = { version = "^20.2.2", optional = true} pip = { version = "^23.2.1", optional = true} - bump2version = {version = "^1.0.1"} mypy = "^1.5.1" -nzshm-model = {git = "https://github.com/GNS-Science/nzshm-model", rev = "pre-release"} - -[tool.poetry.extras] -test = [ - "pytest", - "black", - "isort", - "mypy", - "flake8", - "flake8-docstrings", - "pytest-cov" - ] - -dev = ["tox", "virtualenv", "pip", "mypy", "isort", "black", "pytest", "flake8", "bump2version"] +types-pyyaml = "^6.0.12.20240311" [build-system] requires = ["poetry-core"] diff --git a/serverless.yml b/serverless.yml index b599a30..e629244 100644 --- a/serverless.yml +++ b/serverless.yml @@ -23,14 +23,11 @@ package: - db.sqlite3 - _sqlite3.cpython-310-x86_64-linux-gnu.so - nzshm_model_graphql_api/** - - nshm/** - - pipeline/** - - staticfiles/** custom: #serverless-wsgi settings wsgi: - app: nzshm_model_graphql_api.wsgi.application + app: nshm_model_graphql_api.nshm_model_graphql_api.app packRequirements: false pythonBin: python3 @@ -154,26 +151,3 @@ functions: # - test # - prod -# resources: -# Resources: -# ToshiBucket: -# Type: AWS::S3::Bucket -# Properties: -# BucketName: ${self:custom.s3_bucket} - -# # following on from -# # https://www.serverless.com/blog/build-geosearch-graphql-api-aws-appsync-elasticsearch -# ElasticSearchInstance: -# Type: AWS::Elasticsearch::Domain -# Properties: -# ElasticsearchVersion: 6.2 -# DomainName: "${self:custom.esDomainName}" -# EBSOptions: -# EBSEnabled: true -# VolumeType: gp2 -# VolumeSize: 10 -# ElasticsearchClusterConfig: -# InstanceType: t2.small.elasticsearch -# InstanceCount: 1 -# DedicatedMasterEnabled: false -# ZoneAwarenessEnabled: false \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index d2c4aac..64de18e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -5,6 +5,7 @@ ignore = E203, E266, W503 docstring-convention = google per-file-ignores = __init__.py:F401, tests/*.py: D100,D101,D102,D103 + nshm_model_graphql_api/schema/*.py: D102 exclude = .git, __pycache__, setup.py, @@ -32,7 +33,8 @@ ignore_errors = True [coverage:run] # uncomment the following to omit files during running -#omit = +omit = nshm_model_graphql_api/nshm_model_graphql_api.py + [coverage:report] exclude_lines = pragma: no cover @@ -64,7 +66,7 @@ setenv = TESTING = 1 commands = # pytest nshm_model_graphql_api tests - pytest --cov=pipeline --cov=nshm --cov-branch --cov-report=xml --cov-report=term-missing + pytest --cov=pipeline --cov=nshm_model_graphql_api --cov-branch --cov-report=xml --cov-report=term-missing [testenv:format] allowlist_externals = diff --git a/tests/test_nzshm_model_import.py b/tests/test_nzshm_model_import.py index 13f327b..054bae5 100644 --- a/tests/test_nzshm_model_import.py +++ b/tests/test_nzshm_model_import.py @@ -16,26 +16,10 @@ def test_available_versions(): [("NSHM_v1.0.0", "NSHM_v1.0.0"), ("NSHM_v1.0.4", "NSHM_v1.0.4")], ) def test_version_config(model, model_version): - mod = nzshm_model.NshmModel.get_model_version(model) + mod = nzshm_model.get_model_version(model) # mod = nzshm_model.get_model_version(model) assert mod.version == model_version assert mod.slt_config is not None assert mod.slt_config.logic_tree_permutations is not None - - -def test_get_model_version_unknown(): - with pytest.raises(ValueError, match="XXX is not a valid model version."): - nzshm_model.get_model_version("XXX") - - -@pytest.mark.parametrize("model_version", [ver for ver in nzshm_model.versions.keys()]) -def test_source_logic_tree_in_all_models(model_version): - current_model = nzshm_model.get_model_version(model_version) - slt = current_model.source_logic_tree - - old_api_fslt = slt.fault_system_lts[0] - new_api_fslt = slt.branch_sets[0] - - assert old_api_fslt is new_api_fslt diff --git a/tests/test_schema_info.py b/tests/test_schema_info.py new file mode 100644 index 0000000..2184939 --- /dev/null +++ b/tests/test_schema_info.py @@ -0,0 +1,43 @@ +import pytest +from graphene.test import Client + +from nshm_model_graphql_api import __version__, schema + + +@pytest.fixture(scope="module") +def client(): + return Client(schema.schema_root) + + +def test_get_about(client): + QUERY = """ + query { + about + } + """ + executed = client.execute(QUERY) + print(executed) + assert "nshm_model_graphql_api" in executed["data"]["about"] + assert __version__ in executed["data"]["about"] + + +def test_get_version(client): + QUERY = """ + query { + version + } + """ + executed = client.execute(QUERY) + print(executed) + assert __version__ in executed["data"]["version"] + + +def test_get_current_model_version(client): + QUERY = """ + query { + current_model_version + } + """ + executed = client.execute(QUERY) + print(executed) + assert executed["data"]["current_model_version"] == "NSHM_v1.0.4" diff --git a/tests/test_schema_model_source_logic_tree.py b/tests/test_schema_model_source_logic_tree.py new file mode 100644 index 0000000..7792a3c --- /dev/null +++ b/tests/test_schema_model_source_logic_tree.py @@ -0,0 +1,88 @@ +import pytest +from graphene.test import Client + +from nshm_model_graphql_api import schema + +# from graphql_relay import to_global_id + + +@pytest.fixture(scope="module") +def client(): + return Client(schema.schema_root) + + +@pytest.mark.parametrize( + "model_version", + ["NSHM_v1.0.0", "NSHM_v1.0.4"], +) +def test_get_model_and_branch_sets(client, model_version): + QUERY = ( + """ + query { + get_model(version: "%s") + { + source_logic_tree { + branch_sets { + __typename + model_version + short_name + long_name + } + } + ... on Node { + id + } + } + } + """ + % model_version + ) + executed = client.execute(QUERY) + print(executed) + branch_sets = executed["data"]["get_model"]["source_logic_tree"]["branch_sets"] + assert branch_sets[0]["__typename"] == "SourceBranchSet" + assert branch_sets[0]["model_version"] == model_version + assert branch_sets[0]["short_name"] == "PUY" + + +@pytest.mark.parametrize( + "model_version", + ["NSHM_v1.0.0", "NSHM_v1.0.4"], +) +def test_get_model_and_branch_set_branches(client, model_version): + QUERY = ( + """ + query { + get_model(version: "%s") + { + source_logic_tree { + branch_sets { + short_name + branches { + __typename + branch_set_short_name + tag + model_version + weight + } + } + } + ... on Node { + id + } + } + } + """ + % model_version + ) + executed = client.execute(QUERY) + print(executed) + branch_sets = executed["data"]["get_model"]["source_logic_tree"]["branch_sets"] + assert branch_sets[0]["short_name"] == "PUY" + assert branch_sets[0]["branches"][0]["weight"] <= 1.0 + assert ( + branch_sets[0]["branches"][0]["tag"] == "[dm0.7, bN[0.902, 4.6], C4.0, s0.28]" + ) + assert branch_sets[0]["branches"][0]["__typename"] == "SourceLogicTreeBranch" + assert branch_sets[0]["branches"][0]["model_version"] == model_version + assert branch_sets[0]["branches"][0]["branch_set_short_name"] == "PUY" diff --git a/tests/test_schema_models.py b/tests/test_schema_models.py new file mode 100644 index 0000000..f02d1ce --- /dev/null +++ b/tests/test_schema_models.py @@ -0,0 +1,62 @@ +import pytest +from graphene.test import Client + +from nshm_model_graphql_api import schema + + +@pytest.fixture(scope="module") +def client(): + return Client(schema.schema_root) + + +def test_get_models(client): + QUERY = """ + query { + get_models { + version + } + } + """ + executed = client.execute(QUERY) + print(executed) + assert executed["data"]["get_models"][0]["version"] == "NSHM_v1.0.0" + + +def test_get_model_default(client): + QUERY = """ + query { + get_model + { + version + } + } + """ + executed = client.execute(QUERY) + print(executed) + assert executed["data"]["get_model"]["version"] == "NSHM_v1.0.4" + + +@pytest.mark.parametrize( + "model_version", + ["NSHM_v1.0.0", "NSHM_v1.0.4"], +) +def test_get_model(client, model_version): + QUERY = ( + """ + query { + get_model(version: "%s") + { + __typename + version + ... on Node { + id + } + } + } + """ + % model_version + ) + executed = client.execute(QUERY) + print(executed) + assert executed["data"]["get_model"]["version"] == model_version + assert executed["data"]["get_model"]["__typename"] == "NshmModel" diff --git a/tests/test_schema_models_as_relay_node.py b/tests/test_schema_models_as_relay_node.py new file mode 100644 index 0000000..581c97f --- /dev/null +++ b/tests/test_schema_models_as_relay_node.py @@ -0,0 +1,241 @@ +import pytest +from graphene.test import Client +from graphql_relay import to_global_id + +from nshm_model_graphql_api import schema + + +@pytest.fixture(scope="module") +def client(): + return Client(schema.schema_root) + + +@pytest.mark.parametrize( + "version", + ["NSHM_v1.0.0", "NSHM_v1.0.4"], +) +def test_get_model_as_node(client, version): + QUERY = """ + query { + node(id: "%s") + { + ... on Node { + id + } + ... on NshmModel { + version + title + } + } + } + """ % to_global_id( + "NshmModel", version + ) + executed = client.execute(QUERY) + print(executed) + assert executed["data"]["node"]["version"] == version + assert executed["data"]["node"]["title"] is not None + assert executed["data"]["node"]["id"] == to_global_id("NshmModel", version) + + +@pytest.mark.parametrize( + "model_version", + ["NSHM_v1.0.0", "NSHM_v1.0.4"], +) +def test_get_model_SourceLogicTree_as_node(client, model_version): + QUERY = """ + query { + node(id: "%s") + { + ... on Node { + id + } + ... on SourceLogicTree { + model_version + } + } + } + """ % to_global_id( + "SourceLogicTree", model_version + ) + print(QUERY) + executed = client.execute(QUERY) + print(executed) + assert executed["data"]["node"]["model_version"] == model_version + assert executed["data"]["node"]["id"] == to_global_id( + "SourceLogicTree", model_version + ) + + +@pytest.mark.parametrize( + "model_version, short_name, long_name", + [ + ("NSHM_v1.0.0", "CRU", "Crustal"), + ("NSHM_v1.0.0", "PUY", "Puysegur"), + ("NSHM_v1.0.4", "CRU", "Crustal"), + ("NSHM_v1.0.4", "PUY", "Puysegur"), + ], +) +def test_get_model_SourceBranchSet_as_node( + client, model_version, short_name, long_name +): + QUERY = """ + query { + node(id: "%s") + { + ... on Node { + id + } + ... on SourceBranchSet { + model_version + short_name + long_name + } + + } + } + """ % to_global_id( + "SourceBranchSet", f"{model_version}:{short_name}" + ) + executed = client.execute(QUERY) + print(executed) + assert executed["data"]["node"]["model_version"] == model_version + assert executed["data"]["node"]["short_name"] == short_name + assert executed["data"]["node"]["long_name"] == long_name + assert executed["data"]["node"]["id"] == to_global_id( + "SourceBranchSet", f"{model_version}:{short_name}" + ) + + +@pytest.mark.parametrize( + "model_version, branch_set_short_name, tag, weight", + [ + ( + "NSHM_v1.0.0", + "CRU", + "[dmgeologic, tdFalse, bN[1.089, 4.6], C4.2, s1.0]", + 0.00541000379473566, + ), + ("NSHM_v1.0.0", "PUY", "[dm0.7, bN[0.902, 4.6], C4.0, s0.28]", 0.21), + ( + "NSHM_v1.0.4", + "CRU", + "[dmgeologic, tdFalse, bN[1.089, 4.6], C4.2, s1.41]", + 0.00286782725429677, + ), + ("NSHM_v1.0.4", "PUY", "[dm0.7, bN[0.902, 4.6], C4.0, s0.28]", 0.21), + ], +) +def test_get_model_SourceLogicTreeBranch_as_node( + client, model_version, branch_set_short_name, tag, weight +): + QUERY = """ + query { + node(id: "%s") + { + ... on Node { + id + } + ... on SourceLogicTreeBranch { + model_version + branch_set_short_name + tag + weight + sources { + ... on BranchInversionSource { + nrml_id + } + } + } + + } + } + """ % to_global_id( + "SourceLogicTreeBranch", f"{model_version}:{branch_set_short_name}:{tag}" + ) + executed = client.execute(QUERY) + print(executed) + assert executed["data"]["node"]["id"] == to_global_id( + "SourceLogicTreeBranch", f"{model_version}:{branch_set_short_name}:{tag}" + ) + + assert executed["data"]["node"]["model_version"] == model_version + assert executed["data"]["node"]["branch_set_short_name"] == branch_set_short_name + assert executed["data"]["node"]["tag"] == tag + assert executed["data"]["node"]["weight"] == weight + + +# @pytest.mark.parametrize( +# "model_version, branch_set_short_name, nrml_id, rupture_set_id, error", +# [ +# ( +# "NSHM_v1.0.0", +# "CRU", +# "SW52ZXJzaW9uU29sdXRpb25Ocm1sOjEyMDg5Mg==", +# "RmlsZToxMDAwODc=", +# None, +# ), +# ( +# "NSHM_v1.0.0", +# "PUY", +# "SW52ZXJzaW9uU29sdXRpb25Ocm1sOjExODcxNw==", +# "RmlsZToxNzU3My4wQUYzU1o=", +# None, +# ), +# ( +# "NSHM_v1.0.4", +# "CRU", +# "SW52ZXJzaW9uU29sdXRpb25Ocm1sOjEyOTE0ODY=", +# "RmlsZToxMDAwODc=", +# None, +# ), +# ( +# "NSHM_v1.0.0", +# "CRU", +# "SW52ZXJzaW9uU29sdXRpb25Ocm1sOjEyOTE0ODY=", +# "", +# "`NSHM_v1.0.0:CRU:1291486` was not found", +# ), +# ], +# ) +# def test_get_model_BranchInversionSource_as_node( +# client, model_version, branch_set_short_name, nrml_id, rupture_set_id, error +# ): +# QUERY = """ +# query { +# node(id: "%s") +# { +# ... on Node { +# id +# } +# ... on BranchInversionSource { +# model_version +# branch_set_short_name +# nrml_id +# rupture_set_id +# } + +# } +# } +# """ % to_global_id( +# "BranchInversionSource", +# f"{model_version}:{branch_set_short_name}:{from_global_id(nrml_id)[1]}", +# ) + +# print(QUERY) +# executed = client.execute(QUERY) +# print(executed) + +# if error: +# assert error in executed["errors"][0]["message"] +# else: +# assert executed["data"]["node"]["id"] == to_global_id( +# "BranchInversionSource", +# f"{model_version}:{branch_set_short_name}:{from_global_id(nrml_id)[1]}", +# ) +# assert executed["data"]["node"]["model_version"] == model_version +# assert ( +# executed["data"]["node"]["branch_set_short_name"] == branch_set_short_name +# ) +# assert executed["data"]["node"]["nrml_id"] == nrml_id +# assert executed["data"]["node"]["rupture_set_id"] == rupture_set_id