diff --git a/.github/dco.yml b/.github/dco.yml new file mode 100644 index 000000000..ec724591a --- /dev/null +++ b/.github/dco.yml @@ -0,0 +1,3 @@ +--- +require: + members: false diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..21ee444c8 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,22 @@ +--- +version: 2 +updates: + + # Maintain GitHub Actions dependencies + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + groups: + actions: + patterns: + - "actions/*" + open-pull-requests-limit: 5 + labels: + - "bot" + - "automation 🦾" # Required for enforce-change-type-label.yaml check + commit-message: + prefix: "[CHORE](deps)" + include: "scope" + cooldown: + default-days: 7 diff --git a/.github/workflows/check-python-code.yaml b/.github/workflows/check-python-code.yaml index 97d37eb4b..b3edac612 100644 --- a/.github/workflows/check-python-code.yaml +++ b/.github/workflows/check-python-code.yaml @@ -1,7 +1,7 @@ name: Check Python package code on: - pull_request_target: + pull_request: paths: - 'packages/**' - 'pyproject.toml' @@ -13,25 +13,59 @@ on: - 'pyproject.toml' - 'uv.lock' +permissions: + contents: read + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: check: + name: Check (${{ matrix.resolution }}, py${{ matrix.python }}) runs-on: ubuntu-latest - if: github.event.pull_request.head.repo.full_name == github.repository + + strategy: + fail-fast: false + matrix: + # Default resolution exercises the committed lock against every + # supported Python minor version. The lowest-direct cell pins each + # direct dependency to its declared floor (see UV_RESOLUTION below) + # and runs only on the Python floor. + python: ["3.10", "3.11", "3.12", "3.13", "3.14"] + resolution: [default] + include: + - python: "3.10" + resolution: lowest-direct + + permissions: + contents: read steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: - ref: ${{ github.event.pull_request.head.sha }} + persist-credentials: false - name: Install uv - uses: astral-sh/setup-uv@v4 + uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0 with: version: "latest" - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 with: - python-version: "3.10" + python-version: ${{ matrix.python }} + + # UV_RESOLUTION=lowest-direct makes `uv sync` re-resolve every direct + # dependency to the lowest version permitted by pyproject.toml. This + # exercises the declared floor (e.g. pydantic==2.12.0) instead of + # whatever the committed lock happens to point at. Failures here mean + # a direct dep's minimum needs to be bumped. Set via GITHUB_ENV only + # in the relevant cell so default cells run with no UV_RESOLUTION at + # all -- otherwise an empty value is rejected by uv. + - name: Configure resolution + if: matrix.resolution == 'lowest-direct' + run: echo "UV_RESOLUTION=lowest-direct" >> "$GITHUB_ENV" - name: Run make check run: make check diff --git a/.github/workflows/check-python-package-versions.yaml b/.github/workflows/check-python-package-versions.yaml index c8e4f2449..0ea47f1d3 100644 --- a/.github/workflows/check-python-package-versions.yaml +++ b/.github/workflows/check-python-package-versions.yaml @@ -1,19 +1,24 @@ name: Check Python package version numbers on: - pull_request_target: + pull_request: paths: - '**/pyproject.toml' - 'packages/**/__about__.py' permissions: - id-token: write contents: read +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: check: - if: github.event.pull_request.head.repo.full_name == github.repository uses: ./.github/workflows/reusable-check-python-package-versions.yaml + permissions: + id-token: write # Required for AWS CodeArtifact OIDC authentication + contents: read with: before_commit: ${{ github.event.pull_request.base.sha }} after_commit: ${{ github.event.pull_request.head.sha }} diff --git a/.github/workflows/copy-latest-docs-to-staging.yaml b/.github/workflows/copy-latest-docs-to-staging.yaml deleted file mode 100644 index 5b7789032..000000000 --- a/.github/workflows/copy-latest-docs-to-staging.yaml +++ /dev/null @@ -1,66 +0,0 @@ ---- -name: Publish docs to staging website (/latest/) - -on: - push: - branches: [dev] - - # Allow running from the actions tab - workflow_dispatch: - -permissions: - id-token: write - contents: read - -jobs: - publish: - runs-on: ubuntu-latest - steps: - - name: Check out the schema repository - uses: actions/checkout@v4 - - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: "20" - - - name: Checkout Documentation - uses: actions/checkout@v4 - with: - repository: OvertureMaps/docs - ref: main - path: docusaurus - - - name: Copy Examples, Schema, & Docs - run: | - mkdir -p docusaurus/docs/_examples - cp -R examples/* docusaurus/docs/_examples/ - mkdir -p docusaurus/docs/_schema - cp -R schema/* docusaurus/docs/_schema/ - cp -R docs/schema docusaurus/docs/ - - - name: Build Docusaurus website - env: - DOCUSAURUS_URL: https://dfhx9f55j8eg5.cloudfront.net/ - DOCUSAURUS_BASE_URL: /latest/ - run: | - cd docusaurus - npm install --prefer-dedupe - npm run docusaurus build - - - name: Fetch AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - role-to-assume: arn:aws:iam::207370808101:role/Overture_GitHub_schema_Publish_Docs_Staging - aws-region: us-east-2 - - - name: Copy Docusaurus website to staging bucket /latest/ - run: | - aws --region us-east-2 s3 sync --delete --storage-class INTELLIGENT_TIERING docusaurus/build s3://overture-schema-docs-static-staging-us-east-2/latest - - - name: Purge CDN cache - run: | - aws cloudfront create-invalidation --distribution-id E3L106P8HVBE9L --paths "/latest/*" - - - name: Publish URL - run: echo "### [https://dfhx9f55j8eg5.cloudfront.net/latest/schema/](https://dfhx9f55j8eg5.cloudfront.net/latest/schema/)" >> $GITHUB_STEP_SUMMARY diff --git a/.github/workflows/copy-pr-docs-to-staging.yaml b/.github/workflows/copy-pr-docs-to-staging.yaml deleted file mode 100644 index ec11fa978..000000000 --- a/.github/workflows/copy-pr-docs-to-staging.yaml +++ /dev/null @@ -1,65 +0,0 @@ ---- -name: Publish docs to staging website (for PR) -run-name: Publish schema documentation to staging website - -on: [pull_request] - -permissions: - id-token: write - contents: read - -jobs: - publish: - environment: - name: staging - url: https://dfhx9f55j8eg5.cloudfront.net/pr/${{github.event.number}}/schema - runs-on: ubuntu-latest - steps: - - name: Check out the schema repository - uses: actions/checkout@v4 - - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: "20" - - - name: Checkout Documentation - uses: actions/checkout@v4 - with: - repository: OvertureMaps/docs - ref: main - path: docusaurus - - - name: Copy Examples, Schema, & Docs - run: | - mkdir -p docusaurus/docs/_examples - cp -R examples/* docusaurus/docs/_examples/ - mkdir -p docusaurus/docs/_schema - cp -R schema/* docusaurus/docs/_schema/ - cp -R docs/schema docusaurus/docs/ - - - name: Build Docusaurus website - env: - DOCUSAURUS_URL: https://dfhx9f55j8eg5.cloudfront.net/ - DOCUSAURUS_BASE_URL: /pr/${{github.event.number}}/ - run: | - cd docusaurus - npm install --prefer-dedupe - npm run docusaurus build - - - name: Fetch AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - role-to-assume: arn:aws:iam::207370808101:role/Overture_GitHub_schema_Publish_Docs_Staging - aws-region: us-east-2 - - - name: Copy Docusaurus website to staging bucket /pr/${{github.event.number}}/ - run: | - aws --region us-east-2 s3 sync --delete --storage-class INTELLIGENT_TIERING docusaurus/build s3://overture-schema-docs-static-staging-us-east-2/pr/${{github.event.number}} - - - name: Purge CDN cache - run: | - aws cloudfront create-invalidation --distribution-id E3L106P8HVBE9L --paths "/pr/${{github.event.number}}/*" - - - name: Publish URL - run: echo "View preview page at [https://dfhx9f55j8eg5.cloudfront.net/pr/${{github.event.number}}/](https://dfhx9f55j8eg5.cloudfront.net/pr/${{github.event.number}}/schema)" >> $GITHUB_STEP_SUMMARY diff --git a/.github/workflows/enforce-change-type-label.yaml b/.github/workflows/enforce-change-type-label.yaml index e76cc5b9d..9f476feec 100644 --- a/.github/workflows/enforce-change-type-label.yaml +++ b/.github/workflows/enforce-change-type-label.yaml @@ -4,12 +4,24 @@ on: pull_request: types: [opened, edited, labeled, unlabeled, synchronize] +permissions: + contents: read + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number }} + cancel-in-progress: true + jobs: check-label: + name: Check label runs-on: ubuntu-latest + + permissions: + contents: read # Required for reading PR labels + steps: - name: Require exactly one change type label - uses: actions/github-script@v6 + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 with: script: | const allChangeTypeLabels = new Set([ @@ -18,12 +30,13 @@ jobs: 'change type - documentation - member 📝', 'change type - major 🚨', 'change type - minor 🤏', + 'automation 🦾', ]); const prLabels = context.payload.pull_request.labels.map(label => label.name); const appliedChangeTypeLabels = prLabels.filter(prLabel => allChangeTypeLabels.has(prLabel)); if (appliedChangeTypeLabels.length !== 1) { const baseMessage = `The PR must have EXACTLY one of the following CHANGE TYPE labels: ${Array.from(allChangeTypeLabels).sort().join(', ')}. ` - const n = appliedChangeTypeLabels.length; + const n = appliedChangeTypeLabels.length; let contextualMessage; if (n === 0) { contextualMessage = 'It currently has no change type label. Please ➕ add one label. 🙏' diff --git a/.github/workflows/publish-docs-gh-pages.yaml b/.github/workflows/publish-docs-gh-pages.yaml deleted file mode 100644 index bde9b6f5a..000000000 --- a/.github/workflows/publish-docs-gh-pages.yaml +++ /dev/null @@ -1,25 +0,0 @@ ---- -name: Publish the `schema` folder to schema.overturemaps.org - -on: - push: - branches: [main] - - # Allow running from the actions tab - workflow_dispatch: - -permissions: - contents: write - -jobs: - publish: - if: github.repository == 'overturemaps/schema' - runs-on: ubuntu-latest - steps: - - name: Check out the schema repository - uses: actions/checkout@v3 - - - name: Deploy 🚀 - uses: JamesIves/github-pages-deploy-action@v4 - with: - folder: schema diff --git a/.github/workflows/publish-python-packages.yaml b/.github/workflows/publish-python-packages.yaml new file mode 100644 index 000000000..fd40150dc --- /dev/null +++ b/.github/workflows/publish-python-packages.yaml @@ -0,0 +1,111 @@ +name: Publish Python packages to PyPI + +on: + push: + branches: [main] + paths: + - '**/pyproject.toml' + - 'packages/**/__about__.py' + workflow_dispatch: + inputs: + aws_iam_role_name: + description: The name of the IAM role to assume for accessing CodeArtifact + type: string + required: false + default: GithubActions_Schema_CodeArtifact_Publish + domain: + description: The CodeArtifact domain name + type: string + required: false + default: overture-pypi + repository: + description: The CodeArtifact repository name + type: string + required: false + default: overture + +permissions: + contents: read + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + check: + name: Check for changes + if: github.event.repository.full_name == github.repository + uses: ./.github/workflows/reusable-check-python-package-versions.yaml + permissions: + id-token: write # Required for OIDC in reusable workflow to check AWS CodeArtifact + with: + before_commit: ${{ github.event.before }} + after_commit: ${{ github.event.after }} + + publish: + name: Publish + needs: [check] + if: github.event.repository.full_name == github.repository && needs.check.outputs.num_changed_packages > 0 + runs-on: ubuntu-latest + permissions: + contents: read + id-token: write # Required for OIDC authentication to AWS + + strategy: + matrix: + include: ${{ fromJson(needs.check.outputs.changed_packages) }} + steps: + - name: Install uv + uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0 + with: + version: latest + + - name: Check out code + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + + - name: Sync code to make packages visible to Python + run: uv sync --all-packages + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@ec61189d14ec14c8efccab744f656cffd0e33f37 # v6.1.0 + with: + aws-region: us-west-2 + role-to-assume: arn:aws:iam::505071440022:role/GithubActions_Schema_CodeArtifact_Publish + role-session-name: GitHubActions_${{github.job}}_${{github.run_id}} + + - name: Get CodeArtifact publish URL + id: get-code-artifact-params + run: | + token=$(./.github/workflows/scripts/code-artifact.sh token \ + 505071440022 us-west-2 overture-pypi) + echo "::add-mask::${token}" + echo "token=${token}" >> $GITHUB_OUTPUT + publish_url=$(./.github/workflows/scripts/code-artifact.sh publish-url \ + 505071440022 us-west-2 overture-pypi overture) + echo "publish_url=${publish_url}" >> $GITHUB_OUTPUT + + - name: Publish package ${{ matrix.package }} version ${{ matrix.after }} to PyPI + env: + CA_TOKEN: ${{ steps.get-code-artifact-params.outputs.token }} + CA_PUBLISH_URL: ${{ steps.get-code-artifact-params.outputs.publish_url }} + PACKAGE: ${{ matrix.package }} # zizmor: ignore[template-injection] + BEFORE: ${{ matrix.before }} # zizmor: ignore[template-injection] + AFTER: ${{ matrix.after }} # zizmor: ignore[template-injection] + run: | + printf 'Publishing package %s version %s to PyPI (previous version %s)...\n' "$PACKAGE" "$AFTER" "$BEFORE" + uv build --package "$PACKAGE" + wheel="dist/${PACKAGE//-/_}-${AFTER}-py3-none-any.whl" + if [ ! -f "$wheel" ]; then + echo " Wheel file [$wheel] not found. Aborting!" + exit 1 + fi + tarball="dist/${PACKAGE//-/_}-${AFTER}.tar.gz" + if [ ! -f "$tarball" ]; then + echo " Source tarball file [$tarball] not found. Aborting!" + exit 1 + fi + uv publish "$wheel" "$tarball" \ + -t "${CA_TOKEN}" \ + --publish-url "${CA_PUBLISH_URL}" diff --git a/.github/workflows/reusable-check-python-package-versions.yaml b/.github/workflows/reusable-check-python-package-versions.yaml index 87b617cb9..379ab5dc3 100644 --- a/.github/workflows/reusable-check-python-package-versions.yaml +++ b/.github/workflows/reusable-check-python-package-versions.yaml @@ -15,32 +15,76 @@ on: PR or the latest commit in a push. type: string required: true + aws_account_id: + description: The AWS account ID that owns the CodeArtifact domain + type: string + required: false + default: '505071440022' + aws_region: + description: The AWS region where the CodeArtifact repository is hosted + type: string + required: false + default: us-west-2 + aws_iam_role_name: + description: The name of the IAM role to assume for accessing CodeArtifact + type: string + required: false + default: GithubActions_Schema_CodeArtifact_ReadOnly + domain: + description: The CodeArtifact domain name + type: string + required: false + default: overture-pypi + repository: + description: The CodeArtifact repository name + type: string + required: false + default: overture + outputs: + changed_packages: + description: >- + A JSON array of packages with changed versions, including package name, old version, and + new version, in the format: `[ {"package": "p1", "before": "v1", "after": "v2"}, ... ]` + value: ${{ jobs.check-python-package-versions.outputs.changed_packages }} + num_changed_packages: + description: The number of packages with changed versions + value: ${{ jobs.check-python-package-versions.outputs.num_changed_packages }} + + +permissions: + contents: read -jobs: - get-index-url: - uses: ./.github/workflows/reusable-get-code-artifact-index-url.yaml +jobs: check-python-package-versions: - needs: get-index-url + name: Check Python package versions runs-on: ubuntu-latest + permissions: + contents: read + id-token: write # Required for OIDC authentication to AWS to check CodeArtifact + + outputs: + changed_packages: ${{ steps.save-changes.outputs.changed_packages }} + num_changed_packages: ${{ steps.save-changes.outputs.num_changed_packages }} steps: - name: Install jq run: sudo apt-get update && sudo apt-get install -y jq - name: Install uv - uses: astral-sh/setup-uv@v4 + uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0 with: - version: "latest" - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3.10" + version: latest - name: Check out code before change - uses: actions/checkout@v4 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: ref: ${{ inputs.before_commit }} + persist-credentials: false + + - name: Set up Python + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 + with: + python-version-file: .python-version - name: Sync code before change to make packages visible to Python run: uv sync --all-packages @@ -49,9 +93,10 @@ jobs: run: uv run python ./.github/workflows/scripts/package-versions.py collect > /tmp/package-versions-before.json - name: Check out code after change - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: ref: ${{ inputs.after_commit }} + persist-credentials: false - name: Sync code after change to make packages visible to Python run: uv sync --all-packages --refresh @@ -69,13 +114,48 @@ jobs: - name: Print changed versions run: cat /tmp/package-version-diff.json + - name: Save changed versions as output + id: save-changes + run: | + echo 'changed_packages<> $GITHUB_OUTPUT + cat /tmp/package-version-diff.json >> $GITHUB_OUTPUT + echo EOF >> $GITHUB_OUTPUT + printf 'num_changed_packages=%s\n' "$(jq -c '. | length' /tmp/package-version-diff.json)" >> $GITHUB_OUTPUT + + - name: Configure AWS credentials + if: steps.save-changes.outputs.num_changed_packages > 0 + uses: aws-actions/configure-aws-credentials@ec61189d14ec14c8efccab744f656cffd0e33f37 # v6.1.0 + with: + aws-region: ${{ inputs.aws_region }} + role-to-assume: arn:aws:iam::${{ inputs.aws_account_id }}:role/${{ inputs.aws_iam_role_name }} + role-session-name: GitHubActions_${{github.job}}_${{github.run_id}} + + - name: Get CodeArtifact index URL + id: get-code-artifact-index-url + if: steps.save-changes.outputs.num_changed_packages > 0 + env: + AWS_ACCOUNT_ID: ${{ inputs.aws_account_id }} + AWS_REGION: ${{ inputs.aws_region }} + DOMAIN: ${{ inputs.domain }} + REPOSITORY: ${{ inputs.repository }} + run: | + index_url=$(./.github/workflows/scripts/code-artifact.sh index-url \ + "$AWS_ACCOUNT_ID" "$AWS_REGION" \ + "$DOMAIN" "$REPOSITORY") + echo "::add-mask::${index_url}" + echo "index_url=${index_url}" >> $GITHUB_OUTPUT + - name: Fail if any of the new versions already exist in the repo + if: steps.save-changes.outputs.num_changed_packages > 0 + env: + INDEX_URL: ${{ steps.get-code-artifact-index-url.outputs.index_url }} + # zizmor: ignore[template-injection] run: | jq -c '.[]' /tmp/package-version-diff.json | while read -r entry; do package=$(echo "$entry" | jq -r '.package') after=$(echo "$entry" | jq -r '.after') exit_code=0 - output=$(uv run pip download "${package}==${after}" --index-url "${{ needs.get-index-url.outputs.index_url }}simple/" --no-deps -d /tmp --quiet 2>&1) || exit_code=$? + output=$(uv run pip download "${package}==${after}" --index-url "${INDEX_URL}" --no-deps -d /tmp --quiet 2>&1) || exit_code=$? if [[ $exit_code -eq 0 || ( "${output,,}" != *"could not find a version"* && "${output,,}" != *"no matching distributions"* diff --git a/.github/workflows/reusable-get-code-artifact-index-url.yaml b/.github/workflows/reusable-get-code-artifact-index-url.yaml deleted file mode 100644 index 4721dd427..000000000 --- a/.github/workflows/reusable-get-code-artifact-index-url.yaml +++ /dev/null @@ -1,57 +0,0 @@ -name: "[REUSABLE] Get CodeArtifact Python package index URL" - -on: - workflow_call: - inputs: - account_id: - description: The AWS account ID that owns the CodeArtifact domain - type: string - required: false - default: 505071440022 - aws_region: - description: The AWS region where the CodeArtifact repository is hosted - type: string - required: false - default: us-west-2 - role_name: - description: The name of the IAM role to assume for accessing CodeArtifact - type: string - required: false - default: GithubActions_Schema_CodeArtifact_ReadOnly - domain: - description: The CodeArtifact domain name - type: string - required: false - default: overture-pypi - repository: - description: The CodeArtifact repository name - type: string - required: false - default: overture - outputs: - index_url: - description: The CodeArtifact Python index URL - value: ${{ jobs.get-code-artifact-index-url.outputs.index_url }} - -jobs: - get-code-artifact-index-url: - runs-on: ubuntu-latest - outputs: - index_url: ${{ steps.get-code-artifact-index-url.outputs.index_url }} - steps: - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-region: ${{ inputs.aws_region }} - role-to-assume: arn:aws:iam::${{ inputs.account_id }}:role/${{ inputs.role_name }} - role-session-name: GitHubActions_${{github.job}}_${{github.run_id}} - - - name: Get CodeArtifact authorization token - id: get-code-artifact-auth-token - run: | - AUTH_TOKEN=$(aws codeartifact get-authorization-token \ - --domain ${{ inputs.domain }} \ - --domain-owner ${{ inputs.account_id }} \ - --query authorizationToken \ - --output text) - echo "index_url=https://aws:${AUTH_TOKEN}@$${{ inputs.domain }}-${{ inputs.account_id }}.d.codeartifact.${{ inputs.aws_region }}.amazonaws.com/" >> $GITHUB_OUTPUT diff --git a/.github/workflows/schema-pr-preview-cleanup.yml b/.github/workflows/schema-pr-preview-cleanup.yml new file mode 100644 index 000000000..9f7a9f66e --- /dev/null +++ b/.github/workflows/schema-pr-preview-cleanup.yml @@ -0,0 +1,48 @@ +--- +name: Schema PR Preview Cleanup +run-name: Cleanup schema docs preview for PR #${{ github.event.number }} + +on: + pull_request: + types: [closed] + +permissions: + contents: read + +concurrency: + group: schema-pr-preview-${{ github.event.number }} + cancel-in-progress: false + +jobs: + cleanup: + name: Cleanup + if: github.event.pull_request.head.repo.full_name == github.repository + runs-on: ubuntu-latest + permissions: + id-token: write # Required for OIDC authentication to AWS + env: + AWS_ROLE_ARN: arn:aws:iam::763944545891:role/pages-staging-oidc-overturemaps + AWS_REGION: us-west-2 + + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@ec61189d14ec14c8efccab744f656cffd0e33f37 # v6.1.0 + with: + role-to-assume: ${{ env.AWS_ROLE_ARN }} + aws-region: ${{ env.AWS_REGION }} + + # No flags to ignore "not found" errors, so we use "|| true" to prevent failure if the path doesn't exist + - name: Delete from S3 + run: | + aws s3 rm --recursive --quiet \ + s3://overture-managed-staging-usw2/gh-pages/schema/pr/$PR_NUMBER/ || true + env: + PR_NUMBER: ${{ github.event.number }} # zizmor: ignore[template-injection] + + - name: Bust the cache + run: | + aws cloudfront create-invalidation \ + --distribution-id E1KP2IN0H2RGGT \ + --paths "/schema/pr/$PR_NUMBER/*" || true + env: + PR_NUMBER: ${{ github.event.number }} # zizmor: ignore[template-injection] diff --git a/.github/workflows/schema-pr-preview.yml b/.github/workflows/schema-pr-preview.yml new file mode 100644 index 000000000..ba4d5145f --- /dev/null +++ b/.github/workflows/schema-pr-preview.yml @@ -0,0 +1,145 @@ +--- +name: Schema PR Preview +run-name: Deploy schema docs preview for PR #${{ github.event.number }} + +on: + pull_request: + types: [opened, synchronize, reopened] + +concurrency: + group: schema-pr-preview-${{ github.event.number }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + check-fork: + name: Check fork + runs-on: ubuntu-slim + steps: + - name: Staging deploy is not supported for fork PRs + if: github.event.pull_request.head.repo.full_name != github.repository + run: | + echo "::warning title=Staging deploy not supported for fork PRs::Staging previews are only available for PRs from branches within this repository, not forks. Please open your PR from a branch in OvertureMaps/schema instead." + build: + name: Build + if: github.event.pull_request.head.repo.full_name == github.repository + runs-on: ubuntu-latest + needs: check-fork + env: + DOCS_PATH: _docs + STAGING_URL: https://staging.overturemaps.org + SCHEMA_PREVIEW: 'true' + steps: + - name: Check out schema repo + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + + - name: Check out docs repo + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + repository: OvertureMaps/docs + path: ${{ env.DOCS_PATH }} + persist-credentials: false + + - name: Set up Node.js + uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0 + with: + node-version-file: ${{ env.DOCS_PATH }}/package.json + + # Configure npm for faster CI context installs + - uses: lowlydba/sustainable-npm@31d51025884f424f58f22e4e6578178bb4e79632 # v3.0.0 + + - name: Install NPM dependencies + working-directory: ${{ env.DOCS_PATH }} + run: npm ci --omit=dev + + - name: Generate schema markdown docs + uses: OvertureMaps/workflows/.github/actions/generate-schema-docs@main # zizmor: ignore[ref-version-mismatch,unpinned-uses] Internal action, always use main + with: + output-dir: ${{ github.workspace }}/${{ env.DOCS_PATH }}/docs/schema/reference + schema-path: . + skip-checkout: 'true' # Repo is already checked out at root, and action defaults to looking there + + - name: Build Docusaurus site + working-directory: ${{ env.DOCS_PATH }} + run: npm run build + env: + DOCUSAURUS_URL: ${{ env.STAGING_URL }}/ + DOCUSAURUS_BASE_URL: /schema/pr/${{ github.event.number }}/ + SCHEMA_PREVIEW: ${{ env.SCHEMA_PREVIEW }} + + - name: Upload build artifact + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: build-artifact + path: ${{ env.DOCS_PATH }}/build + + deploy: + name: Deploy + runs-on: ubuntu-slim + needs: [check-fork, build] + permissions: + id-token: write # Required for OIDC authentication to AWS + pull-requests: write # Required for commenting on PRs + env: + AWS_ROLE_ARN: arn:aws:iam::763944545891:role/pages-staging-oidc-overturemaps + AWS_REGION: us-west-2 + STAGING_URL: https://staging.overturemaps.org + environment: + name: staging + url: https://staging.overturemaps.org/schema/pr/${{ github.event.number }}/schema/index.html + + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@ec61189d14ec14c8efccab744f656cffd0e33f37 # v6.1.0 + with: + role-to-assume: ${{ env.AWS_ROLE_ARN }} + aws-region: ${{ env.AWS_REGION }} + + - name: Download build artifact + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 + with: + name: build-artifact + path: build + + - name: Copy to S3 + run: | + aws s3 sync --delete --quiet build \ + s3://overture-managed-staging-usw2/gh-pages/schema/pr/$PR_NUMBER/ + env: + PR_NUMBER: ${{ github.event.number }} # zizmor: ignore[template-injection] + + - name: Bust the cache + run: | + aws cloudfront create-invalidation \ + --distribution-id E1KP2IN0H2RGGT \ + --paths "/schema/pr/$PR_NUMBER/*" + env: + PR_NUMBER: ${{ github.event.number }} # zizmor: ignore[template-injection] + + - name: Gather metadata for PR comment + id: deploy-metadata + run: | + echo "time=$(date -u +'%b %d, %Y %H:%M UTC')" >> $GITHUB_OUTPUT + echo "short-sha=$(echo '${{ github.event.pull_request.head.sha }}' | cut -c1-7)" >> $GITHUB_OUTPUT + env: + PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }} # zizmor: ignore[template-injection] + + - name: Comment on PR + uses: marocchino/sticky-pull-request-comment@0ea0beb66eb9baf113663a64ec522f60e49231c0 # v3.0.4 + with: + message: | + ## 🗺️ Schema reference docs preview is live! + + | | | + |------------------|--------------------------------------------------------------------| + | 🌍 **Preview** | https://staging.overturemaps.org/schema/pr/${{ github.event.number }}/schema/index.html | + | 🕐 **Updated** | ${{ steps.deploy-metadata.outputs.time }} | + | 📝 **Commit** | [${{ steps.deploy-metadata.outputs.short-sha }}](${{ github.server_url }}/${{ github.repository }}/commit/${{ github.event.pull_request.head.sha }}) | + | 🔧 **env SCHEMA_PREVIEW** | `true` | + + > [!NOTE] + > ♻️ This preview updates automatically with each push to this PR. diff --git a/.github/workflows/scripts/code-artifact.sh b/.github/workflows/scripts/code-artifact.sh new file mode 100755 index 000000000..488bdc5b0 --- /dev/null +++ b/.github/workflows/scripts/code-artifact.sh @@ -0,0 +1,60 @@ +#!/usr/bin/env bash + +set -euo pipefail + +readonly subcommand="$1" + +function token() { + local -r aws_account_id="$1" + local -r aws_region="$2" + local -r domain="$3" + + aws codeartifact get-authorization-token \ + --region "$aws_region" \ + --domain "$domain" \ + --domain-owner "$aws_account_id" \ + --query authorizationToken \ + --output text +} + +function repo_url() { + local -r token="$1" + local -r credentials="${token:+aws:$token@}" + local -r aws_account_id="$2" + local -r aws_region="$3" + local -r domain="$4" + local -r repository="$5" + local -r suffix="$6" + + printf "https://%s%s-%s.d.codeartifact.%s.amazonaws.com/pypi/%s%s\n" \ + "$credentials" "$domain" "$aws_account_id" "$aws_region" "$repository" "$suffix" +} + +case "$subcommand" in + token) + if [ $# -ne 4 ]; then + >&2 echo "Usage: $0 token " + exit 1 + fi + token "$2" "$3" "$4" + ;; + + index-url|publish-url) + if [ $# -ne 5 ]; then + >&2 echo "Usage: $0 $subcommand " + exit 1 + fi + + if [ "$subcommand" = "index-url" ]; then + repo_url "$(token "$2" "$3" "$4")" "$2" "$3" "$4" "$5" "/simple/" + else + repo_url "" "$2" "$3" "$4" "$5" "" + fi + ;; + + *) + >&2 echo "Unknown subcommand: ${subcommand:-}" + >&2 echo "Valid subcommands: token | index-url | publish-url" + exit 1 + ;; +esac diff --git a/.github/workflows/scripts/package-versions.py b/.github/workflows/scripts/package-versions.py index 4507d2672..63acba40d 100755 --- a/.github/workflows/scripts/package-versions.py +++ b/.github/workflows/scripts/package-versions.py @@ -3,6 +3,7 @@ from importlib import metadata from pathlib import Path import json +import re import sys @@ -35,6 +36,9 @@ def compare(before_file: str, after_file: str): Compare two JSON files containing package versions and print the packages that have a version number change as a JSON array. + The output JSON array is sorted in topological order by package name, so those changed packages + that do not depend on other changed packages appear first. + Form of the JSON array: [ {"package": "p1", "before": "v1", "after": "v2"}, ... ] @@ -48,7 +52,23 @@ def compare(before_file: str, after_file: str): before_dict = {item["package"]: item["version"] for item in before_array} after_dict = {item["package"]: item["version"] for item in after_array} - combined_keys = sorted(list(set(before_dict.keys()) | set(after_dict.keys()))) + def level(package: str) -> int: + """ + Return the level of a package for topological sorting. + + This is brittle and hard to keep in sync, so we should replace it with a version that + dynamically computes dependencies in the future. + """ + if package == "overture-schema-system": + return 0 + elif package in ["overture-schema-core"]: + return 1 + elif re.fullmatch(r'overture-schema-.*-theme', package) or package in ["overture-schema", "overture-schema-cli", "overture-schema-codegen", "overture-schema-annex"]: + return 2 + else: + raise ValueError(f"Unknown package for level computation: {package}") + + combined_keys = sorted(list(set(before_dict.keys()) | set(after_dict.keys())), key=level) changed_packages = [] for package in combined_keys: diff --git a/.github/workflows/test-schema.yaml b/.github/workflows/test-schema.yaml index d23d2b315..f90978da3 100644 --- a/.github/workflows/test-schema.yaml +++ b/.github/workflows/test-schema.yaml @@ -10,16 +10,32 @@ on: - 'examples/**' - 'counterexamples/**' +permissions: + contents: read + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + jobs: build: + name: Build runs-on: ubuntu-latest + permissions: + contents: read + steps: - - uses: actions/checkout@v3 - - name: Set up Go - uses: actions/setup-go@v5 - with: - go-version: 1.22 - - name: Install dependencies - run: go install github.com/santhosh-tekuri/jsonschema/cmd/jv@latest - - name: Validate - run: ./test.sh + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + + - name: Set up Go + uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0 + with: + go-version: 1.22 + + - name: Install dependencies + run: go install github.com/santhosh-tekuri/jsonschema/cmd/jv@latest + + - name: Validate + run: ./test.sh diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 000000000..0fe93f496 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,3 @@ +# Devops + +/.github @lowlydba @overturemaps/omf-public-reviewers diff --git a/Makefile b/Makefile index dbc3b4340..367874a10 100644 --- a/Makefile +++ b/Makefile @@ -1,20 +1,23 @@ -.PHONY: default uv-sync check test-all test docformat doctest mypy reset-baseline-schemas +.PHONY: default uv-sync check test-all test test-only docformat doctest doctest-only mypy mypy-only lint-only reset-baseline-schemas default: test-all +install: uv-sync + uv-sync: @uv sync --all-packages 2> /dev/null -check: test doctest - @uv run ruff check -q packages/ - @$(MAKE) mypy - @uv run ruff format --check packages/ +check: uv-sync + @$(MAKE) -j test-only doctest-only lint-only mypy-only test-all: uv-sync @uv run pytest -W error packages/ test: uv-sync - @uv run pytest -W error packages/ -x + @uv run pytest -W error packages/ -x -q --tb=short + +test-only: + @uv run pytest -W error packages/ -x -q --tb=short coverage: uv-sync @uv run pytest packages/ --cov overture.schema --cov-report=term --cov-report=html && open htmlcov/index.html @@ -23,7 +26,9 @@ docformat: @find packages/*/src -name "*.py" -type f -not -name "__*" \ | xargs uv run pydocstyle --convention=numpy --add-ignore=D102,D105,D200,D205,D400 -doctest: uv-sync +doctest: uv-sync doctest-only + +doctest-only: @# $$ escapes $ for make - sed needs literal $ for end-of-line anchor @find packages/*/src -name "*.py" -type f \ | sed 's|^packages/[^/]*/src/||' \ @@ -35,7 +40,9 @@ doctest: uv-sync | xargs uv run python -c 'import doctest, importlib, sys; sys.exit(any(doctest.testmod(importlib.import_module(m)).failed for m in sys.argv[1:]))' # mypy type checking with namespace package support -mypy: uv-sync +mypy: uv-sync mypy-only + +mypy-only: @# $$ escapes $ for make - sed needs literal $ for end-of-line anchor @find packages -maxdepth 1 -type d -name "overture-schema*" \ | sort \ @@ -43,7 +50,11 @@ mypy: uv-sync | tr - . \ | sed 's|^packages/|-p |' \ | xargs uv run mypy --no-error-summary - @uv run mypy --no-error-summary packages/*/tests/*.py + @for d in packages/*/tests; do find "$$d" -name "*.py" | sort | xargs uv run mypy --no-error-summary || exit 1; done + +lint-only: + @uv run ruff check -q packages/ + @uv run ruff format --check packages/ reset-baseline-schemas: @find . -name \*_baseline_schema.json -delete diff --git a/PYDANTIC_GUIDE.md b/PYDANTIC_GUIDE.md index 3f502c9e5..ceda6ec01 100644 --- a/PYDANTIC_GUIDE.md +++ b/PYDANTIC_GUIDE.md @@ -54,10 +54,8 @@ from overture.schema.system.string import ( NoWhitespaceString, StrippedString, ) -from overture.schema.core.types import ( - ConfidenceScore, - LanguageTag, -) +from overture.schema.core.types import ConfidenceScore +from overture.schema.system.string import LanguageTag # Numeric primitives (use these instead of int/float) from overture.schema.system.primitive import ( @@ -182,7 +180,7 @@ from typing import Literal from overture.schema.core import OvertureFeature from overture.schema.core.models import Stacked from overture.schema.core.names import Named -from overture.schema.system.primitives import float64 +from overture.schema.system.primitive import float64 class Building(OvertureFeature[Literal["buildings"], Literal["building"]], Named, Stacked): # Gets fields from Feature: id, theme, type, geometry, etc. @@ -506,18 +504,34 @@ class Building(OvertureFeature): Add documentation to describe what the enum and its values mean. In Python, you do this with **docstrings** - text enclosed in triple quotes `"""` that describes what something does: -TODO: DocumentedEnum +Use `DocumentedEnum` from `overture.schema.system.doc` when enum members need their own descriptions for code generation and documentation tooling. Each member takes a `(value, description)` tuple: ```python -class VehicleType(str, Enum): +from overture.schema.system.doc import DocumentedEnum + +class VehicleType(str, DocumentedEnum): """Types of vehicles for transportation.""" - CAR = "car" # Standard passenger vehicle - TRUCK = "truck" # Commercial freight vehicle - BICYCLE = "bicycle" # Human-powered two-wheeler - MOTORCYCLE = "motorcycle" # Motorized two-wheeler + CAR = ("car", "Standard passenger vehicle") + TRUCK = ("truck", "Commercial freight vehicle") + BICYCLE = ("bicycle", "Human-powered two-wheeler") + MOTORCYCLE = ("motorcycle", "Motorized two-wheeler") ``` +Members without descriptions use the plain value form -- documentation is optional per-member: + +```python +class ConnectionState(str, DocumentedEnum): + CONNECTED = "connected" + DISCONNECTED = "disconnected" + QUIESCING = ( + "quiescing", + "Gracefully shutting down, rejecting new requests but completing existing ones", + ) +``` + +Use `DocumentedEnum` over plain `str, Enum` when the enum members' semantics aren't obvious from their names and downstream tools (code generators, documentation renderers) need access to member-level descriptions. Use plain `str, Enum` for self-explanatory values. + #### Why str, Enum? Inheriting from `str, Enum` makes enum values work as both enums and strings, which is useful for JSON serialization and compatibility. @@ -560,7 +574,7 @@ class DivisionArea(OvertureFeature[Literal["divisions"], Literal["division_area" ] = None ``` -**Available relationship types (see [Relationship](packages/overture-schema-core/src/overture/schema/core/ref.py)):** +**Available relationship types (see [Relationship](packages/overture-schema-system/src/overture/schema/system/ref/ref.py)):** - **`BELONGS_TO`**: The referencing feature belongs to the referenced feature (division area belongs to division) - **`CONNECTS_TO`**: The referencing feature connects to the referenced feature (segment connects to connector) @@ -627,7 +641,7 @@ class Building(OvertureFeature[Literal["buildings"], Literal["building"]]): #### Best Practices -**1. Always Use Reference Annotations** +##### Always Use Reference Annotations Include `Reference` annotations for semantic clarity and documentation: @@ -643,7 +657,7 @@ division_id: Annotated[ division_id: Id ``` -**2. Choose the Right Pattern** +##### Choose the Right Pattern - **Simple relationships** → Direct references (foreign keys) - **Relationships with metadata** → Separate association features @@ -938,7 +952,7 @@ Organize code by scope and avoid circular imports: **Cross-theme shared**: `overture-schema-core` package -- Used by multiple themes (e.g., `LanguageTag`, `CountryCode`, `OvertureFeature`) +- Used by multiple themes (e.g., `OvertureFeature`, `Names`, `Sources`, `Scope`) **Theme-level shared**: Theme package root (e.g., `overture-schema-transportation-theme/src/overture/schema/transportation/`) @@ -1110,7 +1124,7 @@ JSON Schema containers become **mixin classes** in Pydantic that you inherit fro ```python models.py from typing import Annotated from pydantic import BaseModel, Field -from overture.schema.model_constraints import no_extra +from overture.schema.system.model_constraint import no_extra_fields from overture.schema.system.primitive import int8, float64 @no_extra_fields diff --git a/README.pydantic.md b/README.pydantic.md index 7bae5a7ff..0655c46d9 100644 --- a/README.pydantic.md +++ b/README.pydantic.md @@ -88,9 +88,11 @@ This workspace contains the following packages: - **`overture-schema`** - Main entrypoint package that aggregates all types for convenient usage -- **`overture-schema-core`** - Base classes, geometry models, and common structures - shared across all themes -- **`overture-schema-system`** - Foundational system of primitive types and constraints +- **`overture-schema-core`** - Overture-specific models shared across themes: base + feature class, scoping framework, names, sources, and cartographic hints +- **`overture-schema-system`** - Portable primitive types, constraints, and a + GeoJSON-aware base model for building Pydantic schemas that serialize to + JSON, Parquet, and Spark ### Theme Packages diff --git a/docs/schema/concepts/by-theme/transportation/index.mdx b/docs/schema/concepts/by-theme/transportation/index.mdx index fd9686d7c..7c17c1595 100644 --- a/docs/schema/concepts/by-theme/transportation/index.mdx +++ b/docs/schema/concepts/by-theme/transportation/index.mdx @@ -60,8 +60,8 @@ properties: ### Destinations The destination property in the `segment` feature type supports routing use cases. It describes the transitions from one segment to another on the way to a specified location. In turn-by-turn routing applications, this is what is known as “follow signs for” — the human-readable directions and signposts along a road, highway, or interstate that get us from point A to point Z, by way of any number of paths in between. We designed the `destinations` property with a flexible schema that will allow us to capture and model navigation data from many different sources. -### Linear referencing -The `segment` feature type uses linear referencing to describe the properties of specific sections of a road along a road segment. To avoid splitting road segments at any and every property change, we added linear referencing, which defines how some properties apply to portions of a segment can change along a segment that is generally understood to be the same 'road'. Segment splits are then reserved for more significant intersections so that we don't have to version the entire road any time any piece of the road changes. Other than some expected challenges learning how Linear Referencing worked, we noticed that the main difficulty really arises is when people want to convert the transportation data into a routing graph. Many routing engines want the data to be split at every 'decision point' where each decision is what amounts to a connector between segments the routing engine would consider routing on (e.g. vehicle routing would eliminate sidewalks). However that decision of what segments would be considered for routing someone varies significantly by application, even within similar 'types' of routing, so we could not identify a common subset of splitting rules that would meet all or even most of the various use cases of the members, much less the community at large. +### [Linear referencing](/schema/concepts/by-theme/transportation/linear-referencing) +The `segment` feature type uses linear referencing to describe the properties of specific sections of a road along a road segment. To avoid splitting road segments at any and every property change, we added linear referencing, which defines how properties that apply to portions of a segment can vary along that segment while it is generally understood to be the same 'road'. Segment splits are then reserved for more significant intersections so that we don't have to version the entire road any time any piece of the road changes. Other than some expected challenges learning how linear referencing worked, we noticed that the main difficulty really arises when people want to convert the transportation data into a routing graph. Many routing engines want the data to be split at every 'decision point' where each decision is what amounts to a connector between segments the routing engine would consider routing on (e.g. vehicle routing would eliminate sidewalks). However that decision of what segments would be considered for routing varies significantly by application, even within similar 'types' of routing, so we could not identify a common subset of splitting rules that would meet all or even most of the various use cases of the members, much less the community at large. ### [Scoped and rule-based properties](/schema/concepts/scoping-rules) The schema allows the values of properties to be specified at the sub-feature level. For example: diff --git a/docs/schema/concepts/by-theme/transportation/linear-referencing.mdx b/docs/schema/concepts/by-theme/transportation/linear-referencing.mdx new file mode 100644 index 000000000..5ecc1a6e7 --- /dev/null +++ b/docs/schema/concepts/by-theme/transportation/linear-referencing.mdx @@ -0,0 +1,62 @@ +--- +title: Linear referencing +--- + +Linear referencing allows properties to apply to portions of a segment without splitting the geometry. This promotes shape stability and reduces versioning when only part of a road changes. + +## Linear reference values + +A linear reference is a **normalized position** from `0.0` (start of segment) to `1.0` (end of segment). + +## `at` vs `between` + +| Property | Purpose | Example | +|----------|---------|---------| +| `at` | Single point location | `at: 0.3` — 30% along segment | +| `between` | Range along segment | `between: [0.2, 0.7]` — 20% to 70% | + +When `between` is not provided (or is null), the attribute applies to the full segment. + +## Calculation method + +Overture computes linear references using **WGS84 geodetic distance** in meters: + +``` +linear_ref = geodetic_distance_along_segment_from_start / total_geodetic_length +``` + +Both distances must be computed on the WGS84 ellipsoid—not planar distance on raw lon/lat coordinates. Other approaches exist (e.g., projected coordinates), but geodetic distance provides consistent accuracy globally. + +### Examples + +**Apache Sedona (SQL):** + +```sql +SELECT ST_LENGTHSPHEROID(ST_GEOMFROMWKB(geometry)) AS segment_length_m +FROM segments; +``` + +**pyproj (Python):** + +```python +from pyproj import Geod +from shapely import wkb + +geod = Geod(ellps="WGS84") +line_geometry = wkb.loads(geometry) # geometry is WKB bytes +segment_length = geod.geometry_length(line_geometry) # meters +``` + +See the [transportation-splitter](https://github.com/OvertureMaps/transportation-splitter) for a complete implementation. + +**Warning:** Functions like `ST_LINELOCATEPOINT` can produce incorrect results on geometries that cross over or near themselves in 2D (curved on-ramps, mountain switchbacks, cul-de-sacs). These functions may pick the wrong location when the line passes over or close to itself—even though the geometry is valid because crossings occur at different elevations or positions along the segment. Note that Overture [disallows self-intersecting segments](/schema/concepts/by-theme/transportation/shape-connectivity#loops) in its own data. + +## Why geodetic distance matters + +Using planar distance (treating lon/lat as Cartesian x/y) can produce incorrect linear references, especially at high latitudes or for long segments. For a 10 km east-west segment at 60°N latitude, planar calculations can underestimate length by ~50%. Some map projections (e.g., EPSG:3857) yield reasonable results for short, straight segments, but accuracy degrades with segment length and curvature. + +If a consumer calculates linear references differently than Overture, attribution or connector positions may be misaligned—potentially causing visual discrepancies on rendered maps or routing failures. + +## Edge cases + +For very short segments (< 1m), floating-point precision may be limited. Treat `at ≈ 0.0` or `at ≈ 1.0` as equivalent to endpoints. When a connector doesn't fall exactly on the geometry, the linear reference corresponds to the closest point on the segment. diff --git a/packages/overture-schema-addresses-theme/pyproject.toml b/packages/overture-schema-addresses-theme/pyproject.toml index b8ab65afa..3a3ae3127 100644 --- a/packages/overture-schema-addresses-theme/pyproject.toml +++ b/packages/overture-schema-addresses-theme/pyproject.toml @@ -1,7 +1,10 @@ [project] +maintainers = [ + {name = "Overture Maps Schema Working Group"}, +] dependencies = [ "overture-schema-core", - "pydantic>=2.0", + "pydantic>=2.12.0", ] description = "Overture Maps addresses theme models and structures" dynamic = ["version"] @@ -10,6 +13,11 @@ name = "overture-schema-addresses-theme" readme = "README.md" requires-python = ">=3.10" +[project.urls] +Homepage = "https://overturemaps.org" +Source = "https://github.com/OvertureMaps/schema" +Issues = "https://github.com/OvertureMaps/schema/issues" + [tool.uv.sources] overture-schema-core = { workspace = true } @@ -30,3 +38,29 @@ testpaths = ["tests"] [project.entry-points."overture.models"] "overture:addresses:address" = "overture.schema.addresses:Address" + +[[examples.Address]] +id = "416ab01c-d836-4c4f-aedc-2f30941ce94d" +geometry = "POINT (-176.5637854 -43.9471955)" +country = "NZ" +street = "Tikitiki Hill Road" +number = "54" +version = 1 +theme = "addresses" +type = "address" + +[examples.Address.bbox] +xmin = -176.56381225585938 +xmax = -176.56378173828125 +ymin = -43.94719696044922 +ymax = -43.94718933105469 + +[[examples.Address.address_levels]] +value = "Chatham Islands" + +[[examples.Address.address_levels]] +value = "Chatham Island" + +[[examples.Address.sources]] +property = "" +dataset = "OpenAddresses/LINZ" diff --git a/packages/overture-schema-addresses-theme/src/overture/schema/addresses/__about__.py b/packages/overture-schema-addresses-theme/src/overture/schema/addresses/__about__.py index 3dc1f76bc..5e3bb11ca 100644 --- a/packages/overture-schema-addresses-theme/src/overture/schema/addresses/__about__.py +++ b/packages/overture-schema-addresses-theme/src/overture/schema/addresses/__about__.py @@ -1 +1 @@ -__version__ = "0.1.0" +__version__ = "0.1.1.dev1" diff --git a/packages/overture-schema-addresses-theme/tests/address_baseline_schema.json b/packages/overture-schema-addresses-theme/tests/address_baseline_schema.json index 46d07f176..1ae5a6ab6 100644 --- a/packages/overture-schema-addresses-theme/tests/address_baseline_schema.json +++ b/packages/overture-schema-addresses-theme/tests/address_baseline_schema.json @@ -7,7 +7,7 @@ "value": { "description": "String with no leading/trailing whitespace", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Value", "type": "string" } @@ -45,7 +45,7 @@ }, "license": { "description": "Source data license name.\n\nThis should be a valid SPDX license identifier when available.\n\nIf omitted, contact the data provider for more license information.", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "License", "type": "string" }, @@ -160,21 +160,21 @@ "number": { "description": "The house number.\n\nThis field does not necessarily contain an integer or even a number. Values such as\n\"74B\", \"189 1/2\", and \"208.5\", where the non-integer or non-number part is part of\nthe house number, not a unit number, are in common use.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Number", "type": "string" }, "postal_city": { "description": "The postal authority designated city name, if applicable.\n\nIn some countries or regions, a mailing address may need to specify a different city\nname than the city that actually contains the address coordinates. This optional\nfield can be used to specify the alternate city name to use.\n\nFor example:\n\n- The postal city for the US address *716 East County Road, Winchester, Indiana*\n is Ridgeville.\n- The postal city for the Slovenian address *Tomaj 71, 6221 Tomaj, Slovenia* is\n Dutovlje.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Postal City", "type": "string" }, "postcode": { "description": "The postal code.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Postcode", "type": "string" }, @@ -191,7 +191,7 @@ "street": { "description": "The street name.\n\nThe street name can include a type (*e.g.*, \"Street\" or \"St\", \"Boulevard\" or \"Blvd\",\n*etc.*) and a directional (*e.g.*, \"NW\" or \"Northwest\", \"S\" or \"Sud\"). Both type and\ndirectional, if present, may be either a prefix or a suffix to the primary name.\nThey may either be fully spelled-out or abbreviated.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Street", "type": "string" }, @@ -208,7 +208,7 @@ "unit": { "description": "The secondary address unit designator.\n\nIn the case where the primary street address is divided into secondary units, which\nmay be apartments, floors, or even buildings if the primary street address is a\ncampus, this field names the specific secondary unit being addressed.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Unit", "type": "string" }, diff --git a/packages/overture-schema-annex/pyproject.toml b/packages/overture-schema-annex/pyproject.toml index b4db9c1b0..9ef990a8d 100644 --- a/packages/overture-schema-annex/pyproject.toml +++ b/packages/overture-schema-annex/pyproject.toml @@ -1,5 +1,8 @@ [project] -dependencies = ["overture-schema-core", "pydantic>=2.0"] +maintainers = [ + {name = "Overture Maps Schema Working Group"}, +] +dependencies = ["overture-schema-core", "pydantic>=2.12.0"] description = "Add your description here" dynamic = ["version"] license = "MIT" @@ -10,6 +13,11 @@ requires-python = ">=3.10" [tool.uv.sources] overture-schema-core = { workspace = true } +[project.urls] +Homepage = "https://overturemaps.org" +Source = "https://github.com/OvertureMaps/schema" +Issues = "https://github.com/OvertureMaps/schema/issues" + [build-system] build-backend = "hatchling.build" requires = ["hatchling"] diff --git a/packages/overture-schema-annex/src/overture/schema/__about__.py b/packages/overture-schema-annex/src/overture/schema/__about__.py index 3dc1f76bc..5e3bb11ca 100644 --- a/packages/overture-schema-annex/src/overture/schema/__about__.py +++ b/packages/overture-schema-annex/src/overture/schema/__about__.py @@ -1 +1 @@ -__version__ = "0.1.0" +__version__ = "0.1.1.dev1" diff --git a/packages/overture-schema-base-theme/pyproject.toml b/packages/overture-schema-base-theme/pyproject.toml index d71b20f82..fdf146adf 100644 --- a/packages/overture-schema-base-theme/pyproject.toml +++ b/packages/overture-schema-base-theme/pyproject.toml @@ -1,7 +1,11 @@ [project] +maintainers = [ + {name = "Overture Maps Schema Working Group"}, +] dependencies = [ "overture-schema-core", - "pydantic>=2.0", + "overture-schema-system", + "pydantic>=2.12.0", ] description = "Overture Maps base theme shared structures and models (bathymetry, infrastructure, land, land_cover, land_use, water)" dynamic = ["version"] @@ -10,8 +14,14 @@ name = "overture-schema-base-theme" readme = "README.md" requires-python = ">=3.10" +[project.urls] +Homepage = "https://overturemaps.org" +Source = "https://github.com/OvertureMaps/schema" +Issues = "https://github.com/OvertureMaps/schema/issues" + [tool.uv.sources] overture-schema-core = { workspace = true } +overture-schema-system = { workspace = true } [build-system] @@ -31,3 +41,171 @@ packages = ["src/overture"] "overture:base:land_cover" = "overture.schema.base:LandCover" "overture:base:land_use" = "overture.schema.base:LandUse" "overture:base:water" = "overture.schema.base:Water" + +[[examples.Bathymetry]] +id = "5d40bd6c-db14-5492-b29f-5e25a59032bc" +geometry = "MULTIPOLYGON (((-170.71296928 -76.744313428, -170.719841483 -76.757076376, -170.731061124 -76.761566192, -170.775652756 -76.76338726, -170.853616381 -76.76253958, -170.918562293 -76.755380155, -170.970490492 -76.741908984, -170.998699301 -76.729180777, -171.003188718 -76.717195533, -170.990421551 -76.703765214, -170.960397802 -76.68888982, -170.940748072 -76.674697941, -170.931472364 -76.661189576, -170.927114414 -76.637296658, -170.927674224 -76.603019188, -170.939335393 -76.574637428, -170.962097922 -76.552151379, -170.999015387 -76.535715361, -171.050087788 -76.525329373, -171.079133298 -76.50751024, -171.086151917 -76.482257963, -171.098653755 -76.462747286, -171.11663881 -76.448978211, -171.146691397 -76.437601179, -171.188811514 -76.428616191, -171.296181785 -76.4228609, -171.468802209 -76.420335306, -171.566055241 -76.41501101, -171.587940879 -76.406888013, -171.59004284 -76.387987744, -171.572361122 -76.358310204, -171.549343725 -76.334488281, -171.520990649 -76.316521976, -171.453759127 -76.301763636, -171.347649159 -76.290213262, -171.30597166 -76.267707269, -171.328726628 -76.234245658, -171.36676019 -76.195627518, -171.420072345 -76.151852851, -171.444766298 -76.12494912, -171.44084205 -76.114916326, -171.378107286 -76.099627787, -171.256562007 -76.079083503, -171.228218647 -76.058825682, -171.293077208 -76.038854322, -171.421365419 -76.023534207, -171.613083278 -76.012865337, -171.76411833 -75.99938969, -171.874470572 -75.983107266, -172.121928361 -75.958403596, -172.506491695 -75.925278679, -172.744527804 -75.899736153, -172.836036689 -75.88177602, -172.904681746 -75.862406785, -172.950462974 -75.841628448, -173.000855857 -75.830396498, -173.055860393 -75.828710933, -173.177561398 -75.810743709, -173.365958872 -75.776494827, -173.493573084 -75.759370386, -173.560404033 -75.759370386, -173.620925776 -75.77158365, -173.675138312 -75.796010178, -173.733786206 -75.808642966, -173.796869456 -75.809482015, -173.847216433 -75.805553449, -173.884827135 -75.79685727, -173.90475244 -75.789177124, -173.906992347 -75.782513013, -173.881736947 -75.76894365, -173.828986239 -75.748469035, -173.797974615 -75.732298475, -173.788702075 -75.72043197, -173.82491541 -75.701013882, -173.90661462 -75.674044211, -173.977087913 -75.656066882, -174.03633529 -75.647081894, -174.150190099 -75.643010485, -174.31865234 -75.643852655, -174.444433211 -75.652836726, -174.527532713 -75.669962696, -174.581709229 -75.687086831, -174.606962758 -75.704209131, -174.631095834 -75.708279163, -174.654108458 -75.699296928, -174.688637451 -75.699296928, -174.734682816 -75.708279163, -174.797846917 -75.708699866, -174.878129754 -75.700559037, -174.939903816 -75.70870181, -174.9831691 -75.733128185, -175.025841122 -75.746602837, -175.06791988 -75.749125768, -175.09922327 -75.755318987, -175.119751293 -75.765182495, -175.127900229 -75.775197415, -175.123670077 -75.785363749, -175.111718372 -75.791289392, -175.092045112 -75.792974345, -175.049907399 -75.780622976, -174.985305232 -75.754235285, -174.935355308 -75.74552996, -174.900057628 -75.754507001, -174.886060973 -75.766815613, -174.893365345 -75.782455795, -174.907537393 -75.791536245, -174.928577117 -75.794056963, -174.971105378 -75.818213107, -175.035122174 -75.864004677, -175.060941949 -75.892403254, -175.048564703 -75.903408839, -175.020469049 -75.909193043, -174.976654988 -75.909755867, -174.944760829 -75.90482541, -174.924786572 -75.894401673, -174.92111336 -75.881479168, -174.933741192 -75.866057897, -174.900484967 -75.857513625, -174.821344686 -75.855846351, -174.752433709 -75.839289534, -174.693752038 -75.807843172, -174.652894268 -75.780747792, -174.629860399 -75.758003392, -174.571227588 -75.745793709, -174.476995837 -75.744118743, -174.398722205 -75.751841803, -174.336406693 -75.768962888, -174.300477946 -75.783262828, -174.290935964 -75.794741623, -174.28812912 -75.812412878, -174.292057414 -75.836276591, -174.289237223 -75.852155302, -174.279668547 -75.860049012, -174.205113931 -75.879998026, -174.065573375 -75.912002343, -173.957779122 -75.924071248, -173.881731171 -75.916204739, -173.846521251 -75.926706189, -173.852149361 -75.955575598, -173.845408416 -75.979439305, -173.826298414 -75.99829731, -173.76424232 -76.018956172, -173.659240133 -76.041415889, -173.560434089 -76.057698465, -173.467824188 -76.067803901, -173.404678836 -76.077625909, -173.370998032 -76.087164489, -173.332530272 -76.106814524, -173.289275555 -76.136576014, -173.231864101 -76.154545405, -173.160295911 -76.1607227, -173.093917454 -76.17278471, -173.032728732 -76.190731436, -173.009710709 -76.205560908, -173.024863387 -76.217273124, -173.048718935 -76.225374126, -173.081277354 -76.229863912, -173.219658797 -76.237442552, -173.463863265 -76.248110046, -173.60352174 -76.25793895, -173.638634223 -76.266929265, -173.658723482 -76.274676093, -173.663789516 -76.281179435, -173.661403366 -76.289363255, -173.651565032 -76.299227554, -173.627282775 -76.313843189, -173.588556596 -76.33321016, -173.575369172 -76.355231445, -173.587720504 -76.379907046, -173.573965869 -76.402499893, -173.53410527 -76.423009985, -173.518376226 -76.437156259, -173.526778738 -76.444938715, -173.559015515 -76.446303683, -173.615086557 -76.441251162, -173.686785609 -76.421600788, -173.774112673 -76.387352563, -173.854573513 -76.372333877, -173.928168128 -76.37654473, -173.968906731 -76.383732772, -173.97678932 -76.393898005, -173.979325549 -76.410884215, -173.976515417 -76.434691403, -174.000646474 -76.454452818, -174.051718722 -76.470168462, -174.08231827 -76.482963711, -174.092445119 -76.492838563, -174.075053216 -76.514344245, -174.030142562 -76.547480757, -174.016669929 -76.575274601, -174.034635317 -76.597725777, -174.037021169 -76.62030279, -174.023827484 -76.64300564, -174.034634583 -76.661942018, -174.069442464 -76.677111923, -174.086843964 -76.690616859, -174.086839082 -76.702456825, -174.080513222 -76.712456309, -174.067866385 -76.72061531, -174.036259441 -76.725116584, -173.98569239 -76.725960131, -173.93723318 -76.720486558, -173.89088181 -76.708695864, -173.780274695 -76.695221211, -173.605411835 -76.6800626, -173.487930602 -76.662096294, -173.427830996 -76.641322294, -173.370307559 -76.630935294, -173.315360292 -76.630935294, -173.249406002 -76.637251344, -173.17244469 -76.649883444, -173.110795196 -76.653532162, -173.06445752 -76.648197497, -173.029349452 -76.637355272, -173.005470993 -76.621005486, -173.01753216 -76.605236858, -173.065532955 -76.590049388, -173.096548505 -76.576599032, -173.11057881 -76.564885791, -173.108053605 -76.552301955, -173.08897289 -76.538847523, -173.051362225 -76.527628807, -172.99522161 -76.518645807, -172.891534181 -76.516119525, -172.740299938 -76.52004996, -172.648684331 -76.524540794, -172.61668736 -76.529592027, -172.584268588 -76.541098757, -172.551428016 -76.559060982, -172.533042741 -76.576141146, -172.529112765 -76.592339249, -172.540195073 -76.604524646, -172.566289666 -76.612697339, -172.576243291 -76.621303431, -172.570055947 -76.630342924, -172.555183534 -76.636123529, -172.531626051 -76.638645245, -172.517040304 -76.643518276, -172.511426292 -76.650742621, -172.551848294 -76.672312544, -172.63830631 -76.708228042, -172.701431121 -76.728711408, -172.741222726 -76.733762641, -172.81460886 -76.72534004, -172.921589524 -76.703443605, -173.006960733 -76.697273314, -173.070722487 -76.706829166, -173.101615682 -76.719791531, -173.099640316 -76.736160408, -173.033958817 -76.759064999, -172.904571183 -76.788505304, -172.847033841 -76.810916113, -172.861346791 -76.826297424, -172.924787296 -76.856444925, -173.037355356 -76.901358615, -173.149640378 -76.935043659, -173.26164236 -76.957500057, -173.354942309 -76.968728255, -173.429540223 -76.968728255, -173.487771718 -76.964657535, -173.529636796 -76.956516094, -173.572768938 -76.955559014, -173.617168145 -76.961786296, -173.614655836 -76.97446809, -173.565232013 -76.993604396, -173.461502424 -77.006682128, -173.303467069 -77.013701287, -173.163373388 -77.02787859, -173.041221382 -77.049214037, -172.918094542 -77.059179951, -172.793992869 -77.057776334, -172.720418717 -77.044861043, -172.697372088 -77.020434079, -172.675885915 -77.003730799, -172.655960197 -76.994751205, -172.60882792 -76.987594764, -172.534489083 -76.982261476, -172.480072837 -76.983094424, -172.445579184 -76.990093609, -172.428332542 -76.998610734, -172.428332911 -77.008645799, -172.435068344 -77.018150822, -172.448538839 -77.027125803, -172.490777829 -77.039613708, -172.561785312 -77.055614535, -172.628175119 -77.080598263, -172.68994725 -77.114564892, -172.751818039 -77.133793765, -172.813787485 -77.138284883, -172.900229764 -77.131828165, -173.011144875 -77.114423613, -173.119679588 -77.128474884, -173.2258339 -77.17398198, -173.273849553 -77.202664633, -173.263726547 -77.214522842, -173.165895559 -77.239681117, -172.980356589 -77.278139457, -172.880291531 -77.312658914, -172.865700386 -77.343239487, -172.867667457 -77.371126102, -172.886192744 -77.39631876, -172.999732531 -77.429966955, -173.208286817 -77.472070689, -173.335454668 -77.509278677, -173.381236082 -77.541590921, -173.403703936 -77.570407724, -173.40285823 -77.595729086, -173.378288408 -77.634921, -173.329994472 -77.687983467, -173.241287742 -77.735563094, -173.112168219 -77.777659882, -173.054064387 -77.81089869, -173.066976248 -77.835279519, -173.063736051 -77.854657976, -173.044343797 -77.869034061, -172.890349983 -77.896435115, -172.60175461 -77.936861139, -172.376181212 -77.961986812, -172.213629791 -77.971812135, -172.023427102 -77.967320559, -171.805573145 -77.948512083, -171.581263004 -77.918894833, -171.350496677 -77.87846881, -171.217147208 -77.851799157, -171.181214596 -77.838885875, -171.160572341 -77.826074082, -171.155220441 -77.813363779, -171.178789134 -77.790158543, -171.231278422 -77.756458375, -171.27338337 -77.70988804, -171.305103978 -77.65044754, -171.293875473 -77.602346602, -171.239697854 -77.565585227, -171.168401509 -77.532887375, -171.079986438 -77.504253044, -171.028614514 -77.483042244, -171.014285737 -77.469254974, -171.016677114 -77.456576914, -171.035788644 -77.445008064, -171.086879845 -77.431646501, -171.169950715 -77.416492226, -171.216537864 -77.403175691, -171.226641293 -77.391696895, -171.228607057 -77.378968685, -171.222435157 -77.364991059, -171.168824693 -77.334840949, -171.067775664 -77.288518355, -171.000402018 -77.24121644, -170.966703754 -77.192935206, -170.894838531 -77.157002595, -170.784806349 -77.133418606, -170.725150821 -77.11627156, -170.715871945 -77.105561456, -170.710674146 -77.077210652, -170.709557424 -77.031219147, -170.697909144 -76.992502178, -170.675729304 -76.961059744, -170.654536164 -76.940848729, -170.634329723 -76.931869135, -170.581564681 -76.922044903, -170.496241038 -76.911376032, -170.429709562 -76.893409727, -170.381970254 -76.868145986, -170.285260999 -76.838950739, -170.139581798 -76.805823986, -170.061542334 -76.78431495, -170.051142608 -76.77442363, -170.076677284 -76.763148845, -170.138146365 -76.750490597, -170.192753568 -76.731526593, -170.240498896 -76.706256833, -170.315896371 -76.686462585, -170.418945993 -76.67214385, -170.498267121 -76.665405567, -170.553859754 -76.666247738, -170.609039198 -76.673409769, -170.663805452 -76.68689166, -170.695686968 -76.698414281, -170.704683743 -76.70797763, -170.710444514 -76.723277346, -170.71296928 -76.744313428), (-172.46185717 -77.485683162, -172.491725041 -77.49003391, -172.535448064 -77.490594163, -172.566986057 -77.488349711, -172.586339021 -77.483300552, -172.598540475 -77.476173053, -172.60359042 -77.466967216, -172.601627836 -77.458872071, -172.592652724 -77.451887618, -172.556765055 -77.448396429, -172.49396483 -77.448398503, -172.453726685 -77.452881992, -172.436050621 -77.461846897, -172.429868964 -77.468114837, -172.435181715 -77.47168581, -172.44584445 -77.477541919, -172.46185717 -77.485683162), (-172.812798475 -76.363628771, -172.855573928 -76.365453015, -172.885037626 -76.36040045, -172.90720433 -76.351027386, -172.92207404 -76.337333821, -172.9168827 -76.324750727, -172.89163031 -76.313278104, -172.862193885 -76.307261221, -172.828573425 -76.30670008, -172.792121028 -76.311189877, -172.752836694 -76.320730613, -172.732062811 -76.331770033, -172.729799379 -76.344308139, -172.756711267 -76.354927718, -172.812798475 -76.363628771), (-171.932998671 -76.183124002, -172.010021088 -76.180457336, -172.070931389 -76.166984091, -172.113033554 -76.150312062, -172.136327583 -76.130441248, -172.133522137 -76.111120124, -172.104617217 -76.092348689, -172.06028165 -76.080296327, -172.000515436 -76.074963039, -171.918725408 -76.076928027, -171.814911566 -76.086191292, -171.745182124 -76.097695899, -171.709537083 -76.111441849, -171.696346087 -76.126554541, -171.705609136 -76.143033974, -171.731004713 -76.156183802, -171.77253282 -76.166004024, -171.83986414 -76.174984091, -171.932998671 -76.183124002), (-173.16885937 -76.066345013, -173.199147981 -76.070696107, -173.23950163 -76.071257052, -173.269213382 -76.065813298, -173.288283234 -76.054364845, -173.2799961 -76.038973879, -173.244351978 -76.0196404, -173.207608446 -76.007588038, -173.169765504 -76.002816794, -173.139490241 -76.003094691, -173.116782658 -76.008421729, -173.104589039 -76.016938854, -173.102909386 -76.028646065, -173.111183172 -76.03940804, -173.129410398 -76.049224779, -173.148635798 -76.05820377, -173.16885937 -76.066345013)))" +version = 0 +depth = 500 +theme = "base" +type = "bathymetry" + +[examples.Bathymetry.bbox] +xmin = -175.12791442871094 +xmax = -170.05111694335938 +ymin = -77.9718246459961 +ymax = -75.64299774169922 + +[[examples.Bathymetry.sources]] +property = "" +dataset = "ETOPO/GLOBathy" +record_id = "2024-12-09T00:00:00.000Z" + +[examples.Bathymetry.cartography] +sort_key = 12 + +[[examples.Infrastructure]] +id = "e9e3d506-89c0-3473-8cee-5e5ac6596d6c" +geometry = "POINT (-179.9999994 -82.42408)" +version = 0 +subtype = "pedestrian" +class = "information" +wikidata = "Q800558" +theme = "base" +type = "infrastructure" + +[examples.Infrastructure.bbox] +xmin = -180.0 +xmax = -179.99998474121094 +ymin = -82.42408752441406 +ymax = -82.42407989501953 + +[[examples.Infrastructure.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "n7674174803@2" +update_time = "2023-04-07T17:37:48.000Z" + +[examples.Infrastructure.names] +primary = "1306 km to South Pole" + +[examples.Infrastructure.source_tags] +description = "1036 km to South Pole." +information = "route_marker" +note = "The road continue in west side of the map" +start_date = "2007" +tourism = "information" +wikipedia = "en:South Pole Traverse" + +[[examples.Land]] +id = "70fc3596-a987-3fea-820c-c016c0a2f0da" +geometry = "POINT (-178.7 -85.45)" +version = 0 +subtype = "physical" +class = "cliff" +wikidata = "Q5282342" +theme = "base" +type = "land" + +[examples.Land.bbox] +xmin = -178.7000274658203 +xmax = -178.6999969482422 +ymin = -85.45001220703125 +ymax = -85.44999694824219 + +[[examples.Land.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "n11693475112@1" +update_time = "2024-03-05T09:23:39.000Z" + +[examples.Land.names] +primary = "Dismal Buttress" + +[examples.Land.source_tags] +natural = "cliff" +"ref:linz:place_id" = "12318" +wikipedia = "en:Dismal Buttress" + +[[examples.LandCover]] +id = "c347312d-012b-5e73-8bd3-a10d04b2981d" +geometry = "POLYGON ((-179.99877531181616 65.95172539425603, -179.99740705536922 65.95265577758867, -179.99751722434937 65.9532545912543, -179.9974078443441 65.9541507615366, -179.9965398649702 65.95451215813897, -179.99644396804533 65.95493010632842, -179.99602533095998 65.95502533095993, -179.99468737767813 65.95677071067811, -179.9933586639601 65.9576086639598, -179.99313729490444 65.95812767174695, -179.99314866836227 65.95857649030111, -179.99370507548738 65.95907781410224, -179.99372539425596 65.95947468818369, -179.99395850935272 65.95967260714353, -179.99410866395988 65.96030800303998, -179.99534017576838 65.96101799736452, -179.99575621846904 65.96104928900519, -179.9959057775888 65.96150961146397, -179.9965950523775 65.96161426988128, -179.99663895270027 65.96216619349144, -179.99807649030126 65.96218466463768, -179.99819891654494 65.96189707483568, -179.99799883949768 65.96160842248709, -179.99825961146388 65.96142755541139, -179.99830761159433 65.9610635173197, -179.99936104612706 65.9609995273612, -179.9993797906372 65.96051410937864, -179.99964133604004 65.96039133604008, -179.9997 65.96016912258357, -179.99936104729989 65.95958380650865, -179.99900447103303 65.95954329910117, -179.9987608894112 65.95924038853603, -179.99806463264497 65.95902716440592, -179.99798856507215 65.95838313921075, -179.99834294463088 65.95801088941111, -179.9983374593203 65.9575910941953, -179.99855761159426 65.95723018431977, -179.99921013502978 65.95698784186104, -179.99931463264488 65.95663950159415, -179.99990450886096 65.95637680202988, -179.99997427859432 65.9560635173197, -180.00019127274402 65.9558913550169, -180.00019127274405 65.95544197881631, -180.0000389948438 65.9553039610106, -179.9996246090062 65.9553159274193, -179.99935793918766 65.95327531026125, -179.9988434361254 65.95288259953995, -179.99885243016726 65.95244253241113, -179.9991661934914 65.95227771429981, -179.9991960389287 65.95187767174694, -179.99877531181616 65.95172539425603))" +version = 0 +subtype = "barren" +theme = "base" +type = "land_cover" + +[examples.LandCover.bbox] +xmin = -180.0001983642578 +xmax = -179.99313354492188 +ymin = 65.95172119140625 +ymax = 65.96218872070312 + +[[examples.LandCover.sources]] +property = "" +dataset = "ESA WorldCover" +update_time = "2024-11-07T00:00:00.000Z" + +[examples.LandCover.cartography] +min_zoom = 8 +max_zoom = 15 +sort_key = 3 + +[[examples.LandUse]] +id = "1e1f6095-5bd2-3fdb-a422-41351b848e9d" +geometry = "POLYGON ((-176.5623454 -43.9567812, -176.5627644 -43.9561272, -176.5626898 -43.9557432, -176.5624297 -43.9553592, -176.562679 -43.9551603, -176.5629058 -43.9552064, -176.5631441 -43.9551769, -176.5632428 -43.9550676, -176.5633066 -43.9548702, -176.5634402 -43.9548071, -176.5639052 -43.9546682, -176.5642479 -43.9544118, -176.5647302 -43.9542142, -176.5651547 -43.954277, -176.5658293 -43.9545243, -176.5659454 -43.9543521, -176.566934 -43.9547987, -176.5669179 -43.955018, -176.5682465 -43.9553205, -176.5671004 -43.9579593, -176.5662034 -43.9600044, -176.5655366 -43.9597247, -176.5646109 -43.9595326, -176.564467 -43.9592563, -176.5639885 -43.9589226, -176.5637013 -43.9586925, -176.563223 -43.9586237, -176.5623454 -43.9567812))" +version = 0 +subtype = "golf" +class = "golf_course" +theme = "base" +type = "land_use" + +[examples.LandUse.bbox] +xmin = -176.56825256347656 +xmax = -176.56231689453125 +ymin = -43.96001052856445 +ymax = -43.95420837402344 + +[[examples.LandUse.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "w56117029@3" +update_time = "2010-04-24T22:35:13.000Z" + +[examples.LandUse.names] +primary = "Chatham Islands Golf Club" + +[examples.LandUse.source_tags] +"LINZ:source_version" = "V16" +attribution = "http://wiki.osm.org/wiki/Attribution#LINZ" +leisure = "golf_course" +source_ref = "http://www.linz.govt.nz/topography/topo-maps/" + +[[examples.Water]] +id = "6bbb5fe5-bf26-3efa-b120-0a7079b60840" +geometry = "POINT (-177.031799 -84.934793)" +version = 0 +subtype = "physical" +class = "cape" +wikidata = "Q33140589" +theme = "base" +type = "water" + +[examples.Water.bbox] +xmin = -177.03179931640625 +xmax = -177.0317840576172 +ymin = -84.93480682373047 +ymax = -84.9347915649414 + +[[examples.Water.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "n11109190647@2" +update_time = "2024-02-11T05:52:05.000Z" + +[examples.Water.names] +primary = "Thanksgiving Point" + +[examples.Water.source_tags] +natural = "cape" +"ref:linz:place_id" = "13433" diff --git a/packages/overture-schema-base-theme/src/overture/schema/base/__about__.py b/packages/overture-schema-base-theme/src/overture/schema/base/__about__.py index 3dc1f76bc..5e3bb11ca 100644 --- a/packages/overture-schema-base-theme/src/overture/schema/base/__about__.py +++ b/packages/overture-schema-base-theme/src/overture/schema/base/__about__.py @@ -1 +1 @@ -__version__ = "0.1.0" +__version__ = "0.1.1.dev1" diff --git a/packages/overture-schema-base-theme/src/overture/schema/base/__init__.py b/packages/overture-schema-base-theme/src/overture/schema/base/__init__.py index 186016dbf..8034c6484 100644 --- a/packages/overture-schema-base-theme/src/overture/schema/base/__init__.py +++ b/packages/overture-schema-base-theme/src/overture/schema/base/__init__.py @@ -11,7 +11,6 @@ Depth, Elevation, Height, - SourcedFromOpenStreetMap, SourceTags, SurfaceMaterial, ) @@ -22,7 +21,6 @@ from .land_use import LandUse, LandUseClass, LandUseSubtype from .water import Water, WaterClass, WaterSubtype -# Only the theme's feature type classes should be available for `import *`. __all__ = [ "Bathymetry", "Depth", @@ -30,14 +28,15 @@ "Height", "Infrastructure", "InfrastructureClass", - "InfrastructureSubType", + "InfrastructureSubtype", "Land", "LandClass", "LandCover", "LandCoverSubtype", "LandSubtype", "LandUse", - "SourcedFromOpenStreetMap", + "LandUseClass", + "LandUseSubtype", "SourceTags", "SurfaceMaterial", "Water", diff --git a/packages/overture-schema-base-theme/tests/bathymetry_baseline_schema.json b/packages/overture-schema-base-theme/tests/bathymetry_baseline_schema.json index 41fa38d73..c7b14a349 100644 --- a/packages/overture-schema-base-theme/tests/bathymetry_baseline_schema.json +++ b/packages/overture-schema-base-theme/tests/bathymetry_baseline_schema.json @@ -27,8 +27,8 @@ }, "sort_key": { "description": "Integer indicating the recommended order in which to draw features.\n\nFeatures with a lower number should be drawn \"in front\" of features with a higher\nnumber.", - "maximum": 255, - "minimum": 0, + "maximum": 2147483647, + "minimum": -2147483648, "title": "Sort Key", "type": "integer" } @@ -66,7 +66,7 @@ }, "license": { "description": "Source data license name.\n\nThis should be a valid SPDX license identifier when available.\n\nIf omitted, contact the data provider for more license information.", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "License", "type": "string" }, @@ -214,7 +214,7 @@ "properties": { "cartography": { "$ref": "#/$defs/CartographicHints", - "title": "cartography" + "description": "Cartographic hints useful when including the feature in maps" }, "depth": { "description": "Depth below surface level of the feature in meters.", diff --git a/packages/overture-schema-base-theme/tests/infrastructure_baseline_schema.json b/packages/overture-schema-base-theme/tests/infrastructure_baseline_schema.json index 1deda233e..c53570380 100644 --- a/packages/overture-schema-base-theme/tests/infrastructure_baseline_schema.json +++ b/packages/overture-schema-base-theme/tests/infrastructure_baseline_schema.json @@ -229,7 +229,7 @@ "value": { "description": "The actual name value.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Value", "type": "string" }, @@ -265,7 +265,7 @@ "patternProperties": { "^(?:(?:[A-Za-z]{2,3}(?:-[A-Za-z]{3}){0,3}?)|(?:[A-Za-z]{4,8}))(?:-[A-Za-z]{4})?(?:-[A-Za-z]{2}|[0-9]{3})?(?:-(?:[A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(?:-[A-WY-Za-wy-z0-9](?:-[A-Za-z0-9]{2,8})+)*$": { "description": "String with no leading/trailing whitespace", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "type": "string" } }, @@ -278,7 +278,7 @@ "primary": { "description": "The most commonly used name.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Primary", "type": "string" }, @@ -375,7 +375,7 @@ }, "license": { "description": "Source data license name.\n\nThis should be a valid SPDX license identifier when available.\n\nIf omitted, contact the data provider for more license information.", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "License", "type": "string" }, @@ -624,13 +624,14 @@ "level": { "default": 0, "description": "Z-order of the feature where 0 is visual level", - "maximum": 32767, - "minimum": -32768, + "maximum": 2147483647, + "minimum": -2147483648, "title": "Level", "type": "integer" }, "names": { - "$ref": "#/$defs/Names" + "$ref": "#/$defs/Names", + "description": "All known names by which the feature is called" }, "source_tags": { "additionalProperties": true, diff --git a/packages/overture-schema-base-theme/tests/land_baseline_schema.json b/packages/overture-schema-base-theme/tests/land_baseline_schema.json index 35c904003..be1f6d780 100644 --- a/packages/overture-schema-base-theme/tests/land_baseline_schema.json +++ b/packages/overture-schema-base-theme/tests/land_baseline_schema.json @@ -102,7 +102,7 @@ "value": { "description": "The actual name value.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Value", "type": "string" }, @@ -138,7 +138,7 @@ "patternProperties": { "^(?:(?:[A-Za-z]{2,3}(?:-[A-Za-z]{3}){0,3}?)|(?:[A-Za-z]{4,8}))(?:-[A-Za-z]{4})?(?:-[A-Za-z]{2}|[0-9]{3})?(?:-(?:[A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(?:-[A-WY-Za-wy-z0-9](?:-[A-Za-z0-9]{2,8})+)*$": { "description": "String with no leading/trailing whitespace", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "type": "string" } }, @@ -151,7 +151,7 @@ "primary": { "description": "The most commonly used name.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Primary", "type": "string" }, @@ -248,7 +248,7 @@ }, "license": { "description": "Source data license name.\n\nThis should be a valid SPDX license identifier when available.\n\nIf omitted, contact the data provider for more license information.", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "License", "type": "string" }, @@ -499,13 +499,14 @@ "level": { "default": 0, "description": "Z-order of the feature where 0 is visual level", - "maximum": 32767, - "minimum": -32768, + "maximum": 2147483647, + "minimum": -2147483648, "title": "Level", "type": "integer" }, "names": { - "$ref": "#/$defs/Names" + "$ref": "#/$defs/Names", + "description": "All known names by which the feature is called" }, "source_tags": { "additionalProperties": true, diff --git a/packages/overture-schema-base-theme/tests/land_cover_baseline_schema.json b/packages/overture-schema-base-theme/tests/land_cover_baseline_schema.json index 84fb66df4..a104da44a 100644 --- a/packages/overture-schema-base-theme/tests/land_cover_baseline_schema.json +++ b/packages/overture-schema-base-theme/tests/land_cover_baseline_schema.json @@ -27,8 +27,8 @@ }, "sort_key": { "description": "Integer indicating the recommended order in which to draw features.\n\nFeatures with a lower number should be drawn \"in front\" of features with a higher\nnumber.", - "maximum": 255, - "minimum": 0, + "maximum": 2147483647, + "minimum": -2147483648, "title": "Sort Key", "type": "integer" } @@ -83,7 +83,7 @@ }, "license": { "description": "Source data license name.\n\nThis should be a valid SPDX license identifier when available.\n\nIf omitted, contact the data provider for more license information.", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "License", "type": "string" }, @@ -231,7 +231,7 @@ "properties": { "cartography": { "$ref": "#/$defs/CartographicHints", - "title": "cartography" + "description": "Cartographic hints useful when including the feature in maps" }, "sources": { "description": "Information about the source data used to assemble the feature.", diff --git a/packages/overture-schema-base-theme/tests/land_use_baseline_schema.json b/packages/overture-schema-base-theme/tests/land_use_baseline_schema.json index 161673bbb..c1ad6e97f 100644 --- a/packages/overture-schema-base-theme/tests/land_use_baseline_schema.json +++ b/packages/overture-schema-base-theme/tests/land_use_baseline_schema.json @@ -180,7 +180,7 @@ "value": { "description": "The actual name value.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Value", "type": "string" }, @@ -216,7 +216,7 @@ "patternProperties": { "^(?:(?:[A-Za-z]{2,3}(?:-[A-Za-z]{3}){0,3}?)|(?:[A-Za-z]{4,8}))(?:-[A-Za-z]{4})?(?:-[A-Za-z]{2}|[0-9]{3})?(?:-(?:[A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(?:-[A-WY-Za-wy-z0-9](?:-[A-Za-z0-9]{2,8})+)*$": { "description": "String with no leading/trailing whitespace", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "type": "string" } }, @@ -229,7 +229,7 @@ "primary": { "description": "The most commonly used name.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Primary", "type": "string" }, @@ -326,7 +326,7 @@ }, "license": { "description": "Source data license name.\n\nThis should be a valid SPDX license identifier when available.\n\nIf omitted, contact the data provider for more license information.", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "License", "type": "string" }, @@ -576,13 +576,14 @@ "level": { "default": 0, "description": "Z-order of the feature where 0 is visual level", - "maximum": 32767, - "minimum": -32768, + "maximum": 2147483647, + "minimum": -2147483648, "title": "Level", "type": "integer" }, "names": { - "$ref": "#/$defs/Names" + "$ref": "#/$defs/Names", + "description": "All known names by which the feature is called" }, "source_tags": { "additionalProperties": true, diff --git a/packages/overture-schema-base-theme/tests/water_baseline_schema.json b/packages/overture-schema-base-theme/tests/water_baseline_schema.json index 348e65aea..fd78c6fb9 100644 --- a/packages/overture-schema-base-theme/tests/water_baseline_schema.json +++ b/packages/overture-schema-base-theme/tests/water_baseline_schema.json @@ -33,7 +33,7 @@ "value": { "description": "The actual name value.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Value", "type": "string" }, @@ -69,7 +69,7 @@ "patternProperties": { "^(?:(?:[A-Za-z]{2,3}(?:-[A-Za-z]{3}){0,3}?)|(?:[A-Za-z]{4,8}))(?:-[A-Za-z]{4})?(?:-[A-Za-z]{2}|[0-9]{3})?(?:-(?:[A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(?:-[A-WY-Za-wy-z0-9](?:-[A-Za-z0-9]{2,8})+)*$": { "description": "String with no leading/trailing whitespace", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "type": "string" } }, @@ -82,7 +82,7 @@ "primary": { "description": "The most commonly used name.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Primary", "type": "string" }, @@ -179,7 +179,7 @@ }, "license": { "description": "Source data license name.\n\nThis should be a valid SPDX license identifier when available.\n\nIf omitted, contact the data provider for more license information.", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "License", "type": "string" }, @@ -463,13 +463,14 @@ "level": { "default": 0, "description": "Z-order of the feature where 0 is visual level", - "maximum": 32767, - "minimum": -32768, + "maximum": 2147483647, + "minimum": -2147483648, "title": "Level", "type": "integer" }, "names": { - "$ref": "#/$defs/Names" + "$ref": "#/$defs/Names", + "description": "All known names by which the feature is called" }, "source_tags": { "additionalProperties": true, diff --git a/packages/overture-schema-buildings-theme/pyproject.toml b/packages/overture-schema-buildings-theme/pyproject.toml index e9766fc61..58665a55b 100644 --- a/packages/overture-schema-buildings-theme/pyproject.toml +++ b/packages/overture-schema-buildings-theme/pyproject.toml @@ -1,7 +1,11 @@ [project] +maintainers = [ + {name = "Overture Maps Schema Working Group"}, +] dependencies = [ "overture-schema-core", - "pydantic>=2.0", + "overture-schema-system", + "pydantic>=2.12.0", ] description = "Overture Maps buildings theme shared structures, building types, and building part types" dynamic = ["version"] @@ -10,8 +14,14 @@ name = "overture-schema-buildings-theme" readme = "README.md" requires-python = ">=3.10" +[project.urls] +Homepage = "https://overturemaps.org" +Source = "https://github.com/OvertureMaps/schema" +Issues = "https://github.com/OvertureMaps/schema/issues" + [tool.uv.sources] overture-schema-core = { workspace = true } +overture-schema-system = { workspace = true } [build-system] @@ -27,3 +37,46 @@ packages = ["src/overture"] [project.entry-points."overture.models"] "overture:buildings:building" = "overture.schema.buildings:Building" "overture:buildings:building_part" = "overture.schema.buildings:BuildingPart" + +[[examples.Building]] +id = "148f35b1-7bc1-4180-9280-10d39b13883b" +geometry = "POLYGON ((-176.6435004 -43.9938042, -176.6435738 -43.9937107, -176.6437726 -43.9937913, -176.6436992 -43.9938849, -176.6435004 -43.9938042))" +version = 1 +has_parts = false +is_underground = false +theme = "buildings" +type = "building" + +[examples.Building.bbox] +xmin = -176.643798828125 +xmax = -176.64349365234375 +ymin = -43.9938850402832 +ymax = -43.993709564208984 + +[[examples.Building.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "w519166507@1" +update_time = "2017-08-27T21:39:50.000Z" + +[[examples.BuildingPart]] +id = "19412d64-51ac-3d6a-ac2f-8a8c8b91bb60" +geometry = "POLYGON ((-73.2462509 -39.8108937, -73.2462755 -39.8109047, -73.246291 -39.8109182, -73.2463022 -39.8109382, -73.2463039 -39.810959, -73.2462962 -39.81098, -73.2462796 -39.8109977, -73.2462674 -39.8110052, -73.2462281 -39.8110153, -73.2461998 -39.811013, -73.2461743 -39.8110034, -73.2461566 -39.8109898, -73.246144 -39.8109702, -73.2461418 -39.8109427, -73.2461511 -39.8109221, -73.2461669 -39.8109066, -73.2461908 -39.8108947, -73.2462184 -39.8108898, -73.2462509 -39.8108937))" +version = 0 +level = 3 +is_underground = false +building_id = "bd663bd4-1844-4d7d-a400-114de051cf49" +theme = "buildings" +type = "building_part" + +[examples.BuildingPart.bbox] +xmin = -73.24630737304688 +xmax = -73.24613952636719 +ymin = -39.81101608276367 +ymax = -39.81088638305664 + +[[examples.BuildingPart.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "w223076787@2" +update_time = "2014-10-31T22:55:36.000Z" diff --git a/packages/overture-schema-buildings-theme/src/overture/schema/buildings/__about__.py b/packages/overture-schema-buildings-theme/src/overture/schema/buildings/__about__.py index 3dc1f76bc..5e3bb11ca 100644 --- a/packages/overture-schema-buildings-theme/src/overture/schema/buildings/__about__.py +++ b/packages/overture-schema-buildings-theme/src/overture/schema/buildings/__about__.py @@ -1 +1 @@ -__version__ = "0.1.0" +__version__ = "0.1.1.dev1" diff --git a/packages/overture-schema-buildings-theme/src/overture/schema/buildings/building.py b/packages/overture-schema-buildings-theme/src/overture/schema/buildings/building.py index 3af49134a..8c2d762bb 100644 --- a/packages/overture-schema-buildings-theme/src/overture/schema/buildings/building.py +++ b/packages/overture-schema-buildings-theme/src/overture/schema/buildings/building.py @@ -144,7 +144,7 @@ class Building( Appearance, ): """ - Buildings are man-made structures with roofs that exists permanently in one place. + Buildings are man-made structures with roofs that exist permanently in one place. A building's geometry represents the two-dimensional footprint of the building as viewed from directly above, looking down. Fields such as `height` and `num_floors` allow the diff --git a/packages/overture-schema-buildings-theme/tests/building_baseline_schema.json b/packages/overture-schema-buildings-theme/tests/building_baseline_schema.json index d54ea0185..31d563883 100644 --- a/packages/overture-schema-buildings-theme/tests/building_baseline_schema.json +++ b/packages/overture-schema-buildings-theme/tests/building_baseline_schema.json @@ -165,7 +165,7 @@ "value": { "description": "The actual name value.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Value", "type": "string" }, @@ -201,7 +201,7 @@ "patternProperties": { "^(?:(?:[A-Za-z]{2,3}(?:-[A-Za-z]{3}){0,3}?)|(?:[A-Za-z]{4,8}))(?:-[A-Za-z]{4})?(?:-[A-Za-z]{2}|[0-9]{3})?(?:-(?:[A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(?:-[A-WY-Za-wy-z0-9](?:-[A-Za-z0-9]{2,8})+)*$": { "description": "String with no leading/trailing whitespace", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "type": "string" } }, @@ -214,7 +214,7 @@ "primary": { "description": "The most commonly used name.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Primary", "type": "string" }, @@ -362,7 +362,7 @@ }, "license": { "description": "Source data license name.\n\nThis should be a valid SPDX license identifier when available.\n\nIf omitted, contact the data provider for more license information.", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "License", "type": "string" }, @@ -392,7 +392,7 @@ } }, "additionalProperties": false, - "description": "Buildings are man-made structures with roofs that exists permanently in one place.\n\nA building's geometry represents the two-dimensional footprint of the building as viewed from\ndirectly above, looking down. Fields such as `height` and `num_floors` allow the\nthree-dimensional shape to be approximated. Some buildings, identified by the `has_parts` field,\nhave associated `BuildingPart` features which can be used to generate a more representative 3D\nmodel of the building.", + "description": "Buildings are man-made structures with roofs that exist permanently in one place.\n\nA building's geometry represents the two-dimensional footprint of the building as viewed from\ndirectly above, looking down. Fields such as `height` and `num_floors` allow the\nthree-dimensional shape to be approximated. Some buildings, identified by the `has_parts` field,\nhave associated `BuildingPart` features which can be used to generate a more representative 3D\nmodel of the building.", "properties": { "bbox": { "description": "An optional bounding box for the feature", @@ -541,8 +541,8 @@ "level": { "default": 0, "description": "Z-order of the feature where 0 is visual level", - "maximum": 32767, - "minimum": -32768, + "maximum": 2147483647, + "minimum": -2147483648, "title": "Level", "type": "integer" }, @@ -560,7 +560,8 @@ "type": "number" }, "names": { - "$ref": "#/$defs/Names" + "$ref": "#/$defs/Names", + "description": "All known names by which the feature is called" }, "num_floors": { "description": "Number of above-ground floors of the building or part.", diff --git a/packages/overture-schema-buildings-theme/tests/building_part_baseline_schema.json b/packages/overture-schema-buildings-theme/tests/building_part_baseline_schema.json index 1962ff3c6..760f53efd 100644 --- a/packages/overture-schema-buildings-theme/tests/building_part_baseline_schema.json +++ b/packages/overture-schema-buildings-theme/tests/building_part_baseline_schema.json @@ -51,7 +51,7 @@ "value": { "description": "The actual name value.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Value", "type": "string" }, @@ -87,7 +87,7 @@ "patternProperties": { "^(?:(?:[A-Za-z]{2,3}(?:-[A-Za-z]{3}){0,3}?)|(?:[A-Za-z]{4,8}))(?:-[A-Za-z]{4})?(?:-[A-Za-z]{2}|[0-9]{3})?(?:-(?:[A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(?:-[A-WY-Za-wy-z0-9](?:-[A-Za-z0-9]{2,8})+)*$": { "description": "String with no leading/trailing whitespace", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "type": "string" } }, @@ -100,7 +100,7 @@ "primary": { "description": "The most commonly used name.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Primary", "type": "string" }, @@ -248,7 +248,7 @@ }, "license": { "description": "Source data license name.\n\nThis should be a valid SPDX license identifier when available.\n\nIf omitted, contact the data provider for more license information.", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "License", "type": "string" }, @@ -425,8 +425,8 @@ "level": { "default": 0, "description": "Z-order of the feature where 0 is visual level", - "maximum": 32767, - "minimum": -32768, + "maximum": 2147483647, + "minimum": -2147483648, "title": "Level", "type": "integer" }, @@ -444,7 +444,8 @@ "type": "number" }, "names": { - "$ref": "#/$defs/Names" + "$ref": "#/$defs/Names", + "description": "All known names by which the feature is called" }, "num_floors": { "description": "Number of above-ground floors of the building or part.", diff --git a/packages/overture-schema-cli/pyproject.toml b/packages/overture-schema-cli/pyproject.toml index 62ed4c3e3..590121005 100644 --- a/packages/overture-schema-cli/pyproject.toml +++ b/packages/overture-schema-cli/pyproject.toml @@ -1,7 +1,11 @@ [project] +maintainers = [ + {name = "Overture Maps Schema Working Group"}, +] dependencies = [ "overture-schema-core", - "pydantic>=2.0", + "overture-schema-system", + "pydantic>=2.12.0", "pyyaml>=6.0.2", "click>=8.0", "rich>=13.0", @@ -14,8 +18,14 @@ name = "overture-schema-cli" readme = "README.md" requires-python = ">=3.10" +[project.urls] +Homepage = "https://overturemaps.org" +Source = "https://github.com/OvertureMaps/schema" +Issues = "https://github.com/OvertureMaps/schema/issues" + [tool.uv.sources] overture-schema-core = { workspace = true } +overture-schema-system = { workspace = true } [build-system] build-backend = "hatchling.build" @@ -23,9 +33,9 @@ requires = ["hatchling"] [dependency-groups] dev = [ - "pytest>=7.0", - "ruff", - "mypy", + "pytest>=9.0.0", + "ruff>=0.13.0", + "mypy>=1.17.0", ] [tool.hatch.version] diff --git a/packages/overture-schema-cli/src/overture/schema/cli/__about__.py b/packages/overture-schema-cli/src/overture/schema/cli/__about__.py index 3dc1f76bc..5e3bb11ca 100644 --- a/packages/overture-schema-cli/src/overture/schema/cli/__about__.py +++ b/packages/overture-schema-cli/src/overture/schema/cli/__about__.py @@ -1 +1 @@ -__version__ = "0.1.0" +__version__ = "0.1.1.dev1" diff --git a/packages/overture-schema-cli/src/overture/schema/cli/commands.py b/packages/overture-schema-cli/src/overture/schema/cli/commands.py index 8fdd8bdf4..74c7cbae4 100644 --- a/packages/overture-schema-cli/src/overture/schema/cli/commands.py +++ b/packages/overture-schema-cli/src/overture/schema/cli/commands.py @@ -1,6 +1,7 @@ """Click-based CLI for overture-schema package.""" import builtins +import io import json import sys from collections import Counter, defaultdict @@ -322,8 +323,6 @@ def load_input(filename: Path) -> tuple[dict | list, str]: pass # Parse as single YAML/JSON document - import io - data = yaml.load(io.StringIO(content), Loader=CoreLoader) return data, "" @@ -798,7 +797,7 @@ def dump_namespace( sorted_types = sorted(theme_types[theme], key=lambda x: x[0].type) for key, model_class in sorted_types: stdout.print( - f" [bright_black]→[/bright_black] [bold cyan]{key.type}[/bold cyan] [dim magenta]({key.class_name})[/dim magenta]" + f" [bright_black]→[/bright_black] [bold cyan]{key.type}[/bold cyan] [dim magenta]({key.entry_point})[/dim magenta]" ) docstring = get_model_docstring(model_class) if docstring: diff --git a/packages/overture-schema-cli/src/overture/schema/cli/data_display.py b/packages/overture-schema-cli/src/overture/schema/cli/data_display.py index 5dc2f28a7..0b916fca2 100644 --- a/packages/overture-schema-cli/src/overture/schema/cli/data_display.py +++ b/packages/overture-schema-cli/src/overture/schema/cli/data_display.py @@ -2,6 +2,7 @@ from typing import Any +from rich import box from rich.panel import Panel from rich.table import Table @@ -670,8 +671,6 @@ def create_feature_display( table.add_row(field_name_styled, value_styled, "", "") # Wrap table in a Panel with rounded borders - from rich import box - # Add title: "Validation Failed" for single features, or item info for collections if item_index is not None: if item_type: diff --git a/packages/overture-schema-cli/src/overture/schema/cli/type_analysis.py b/packages/overture-schema-cli/src/overture/schema/cli/type_analysis.py index 9316c80c0..4362d7f76 100644 --- a/packages/overture-schema-cli/src/overture/schema/cli/type_analysis.py +++ b/packages/overture-schema-cli/src/overture/schema/cli/type_analysis.py @@ -8,6 +8,8 @@ from pydantic import BaseModel from pydantic.fields import FieldInfo +from overture.schema.system.feature import resolve_discriminator_field_name + from .types import ErrorLocation, ValidationErrorDict # Type aliases for structural tuple elements @@ -29,11 +31,23 @@ class UnionMetadata: nested_unions: dict[str, "UnionMetadata"] +def _extract_literal_value(model: type[BaseModel], field_name: str) -> str | None: + """Extract the single Literal value from a model field as a string, if present.""" + field_info = model.model_fields.get(field_name) + if field_info is None or field_info.annotation is None: + return None + if get_origin(field_info.annotation) is Literal: + args = get_args(field_info.annotation) + return str(args[0]) if args else None + return None + + def _process_union_member( member: Any, # noqa: ANN401 discriminator_to_model: dict[str, type[BaseModel]], model_name_to_model: dict[str, type[BaseModel]], nested_unions: dict[str, UnionMetadata], + discriminator_field: str | None = None, ) -> None: """Process a single union member, handling nesting recursively. @@ -43,6 +57,7 @@ def _process_union_member( discriminator_to_model: Dict to populate with discriminator value mappings model_name_to_model: Dict to populate with model name mappings nested_unions: Dict to populate with nested union metadata + discriminator_field: The discriminator field name from the parent union annotation """ member_origin = get_origin(member) @@ -63,12 +78,24 @@ def _process_union_member( nested_metadata = introspect_union(member) nested_unions[str(member)] = nested_metadata discriminator_to_model.update(nested_metadata.discriminator_to_model) + # The nested union's discriminator_to_model uses the nested discriminator + # field (e.g. "subtype"). Re-extract using the parent discriminator field + # (e.g. "type") so leaf models are also reachable by the parent's values. + if discriminator_field is not None: + for model in nested_metadata.model_name_to_model.values(): + value = _extract_literal_value(model, discriminator_field) + if value is not None: + discriminator_to_model[value] = model return # Unwrap Annotated to get the actual type (e.g., Annotated[Building, Tag('building')]) # and process it recursively _process_union_member( - member_args[0], discriminator_to_model, model_name_to_model, nested_unions + member_args[0], + discriminator_to_model, + model_name_to_model, + nested_unions, + discriminator_field, ) return @@ -76,17 +103,10 @@ def _process_union_member( if inspect.isclass(member) and issubclass(member, BaseModel): model_name_to_model[member.__name__] = member - # Extract discriminator values from known discriminator fields only - # Restrict to known discriminator names to avoid false positives from other Literal fields - discriminator_fields = ("type", "theme", "subtype") - for field_name, field_info in member.model_fields.items(): - if field_name not in discriminator_fields: - continue - annotation = field_info.annotation - if get_origin(annotation) is Literal: - literal_args = get_args(annotation) - if literal_args: - discriminator_to_model[literal_args[0]] = member + if discriminator_field is not None: + value = _extract_literal_value(member, discriminator_field) + if value is not None: + discriminator_to_model[value] = member def introspect_union(union_type: Any) -> UnionMetadata: # noqa: ANN401 @@ -163,9 +183,9 @@ def introspect_union(union_type: Any) -> UnionMetadata: # noqa: ANN401 if isinstance(metadata, FieldInfo) and hasattr( metadata, "discriminator" ): - disc = metadata.discriminator - # discriminator can be a string or Discriminator object - discriminator_field = str(disc) if disc is not None else None + discriminator_field = resolve_discriminator_field_name( + metadata.discriminator + ) break # Get union members @@ -183,7 +203,11 @@ def introspect_union(union_type: Any) -> UnionMetadata: # noqa: ANN401 # Process each union member for member in union_members: _process_union_member( - member, discriminator_to_model, model_name_to_model, nested_unions + member, + discriminator_to_model, + model_name_to_model, + nested_unions, + discriminator_field, ) return UnionMetadata( diff --git a/packages/overture-schema-cli/tests/test_cli_functions.py b/packages/overture-schema-cli/tests/test_cli_functions.py index e3de0fdbd..4218541f5 100644 --- a/packages/overture-schema-cli/tests/test_cli_functions.py +++ b/packages/overture-schema-cli/tests/test_cli_functions.py @@ -1,5 +1,6 @@ """Tests for CLI helper functions (load_input, perform_validation).""" +import io import json from pathlib import Path @@ -175,8 +176,6 @@ def test_load_input_jsonl_from_stdin( JSONL format is commonly used for streaming GeoJSON features where each line is a complete JSON object/feature. """ - import io - feature1 = build_feature(id="test1") feature2 = build_feature(id="test2") jsonl_input = f"{json.dumps(feature1)}\n{json.dumps(feature2)}\n" diff --git a/packages/overture-schema-cli/tests/test_data_display.py b/packages/overture-schema-cli/tests/test_data_display.py index dd0bfd562..218872cbc 100644 --- a/packages/overture-schema-cli/tests/test_data_display.py +++ b/packages/overture-schema-cli/tests/test_data_display.py @@ -10,6 +10,7 @@ select_context_fields, ) from rich.console import Console +from rich.panel import Panel class TestExtractFeatureData: @@ -291,8 +292,6 @@ def test_creates_panel_with_table(self) -> None: result = create_feature_display(fields, errors) # Verify result is a Panel - from rich.panel import Panel - assert isinstance(result, Panel) def test_includes_error_annotation(self) -> None: diff --git a/packages/overture-schema-cli/tests/test_resolve_types.py b/packages/overture-schema-cli/tests/test_resolve_types.py index 2fa226802..94231a1fe 100644 --- a/packages/overture-schema-cli/tests/test_resolve_types.py +++ b/packages/overture-schema-cli/tests/test_resolve_types.py @@ -2,6 +2,7 @@ import pytest from overture.schema.cli.commands import resolve_types +from overture.schema.core.discovery import discover_models class TestResolveTypes: @@ -124,8 +125,6 @@ def test_resolve_types_returns_expected_themes( expected_themes: set[str], ) -> None: """Test that resolve_types returns models from expected themes.""" - from overture.schema.core.discovery import discover_models - models = discover_models(namespace=namespace) actual_themes = {key.theme for key in models.keys()} diff --git a/packages/overture-schema-cli/tests/test_type_analysis.py b/packages/overture-schema-cli/tests/test_type_analysis.py index 12fb10e75..d6e9d930b 100644 --- a/packages/overture-schema-cli/tests/test_type_analysis.py +++ b/packages/overture-schema-cli/tests/test_type_analysis.py @@ -10,7 +10,7 @@ get_or_create_structural_tuple, introspect_union, ) -from pydantic import BaseModel, Field +from pydantic import BaseModel, Discriminator, Field class TestStructuralTuples: @@ -29,14 +29,9 @@ class ModelB(BaseModel): UnionType = Annotated[ModelA | ModelB, Field(discriminator="type")] - # Test simple discriminated union error path loc = ("a", "required_a") metadata = introspect_union(UnionType) structural = create_structural_tuple(loc, metadata) - print(f"\nloc: {loc}") - print(f"structural: {structural}") - assert len(structural) == len(loc) - # First element should be discriminator, second should be field assert structural == ("discriminator", "field") def test_mixed_union_structural_tuple(self) -> None: @@ -56,17 +51,11 @@ class Sources(BaseModel): # Test discriminated side loc1 = ("tagged-union[ModelA]", "a", "required_a") structural1 = create_structural_tuple(loc1, metadata) - print("\nDiscriminated side:") - print(f"loc: {loc1}") - print(f"structural: {structural1}") assert structural1 == ("union", "discriminator", "field") # Test non-discriminated side loc2 = ("Sources", "datasets") structural2 = create_structural_tuple(loc2, metadata) - print("\nNon-discriminated side:") - print(f"loc: {loc2}") - print(f"structural: {structural2}") assert structural2 == ("model", "field") def test_list_context_structural_tuple(self) -> None: @@ -78,13 +67,9 @@ class ModelA(BaseModel): UnionType = Annotated[ModelA, Field(discriminator="type")] - # Test list context loc = (1, "a", "required_a") metadata = introspect_union(list[UnionType]) structural = create_structural_tuple(loc, metadata) - print("\nList context:") - print(f"loc: {loc}") - print(f"structural: {structural}") assert structural == ("list_index", "discriminator", "field") def test_nested_discriminated_structural_tuple(self) -> None: @@ -114,13 +99,9 @@ class Sources(BaseModel): FeatureUnion = Annotated[Building | SegmentUnion, Field(discriminator="type")] MixedUnion = FeatureUnion | Sources - # Test nested discriminator path (type=segment, subtype=road) loc = ("tagged-union[SegmentUnion]", "segment", "road", "road_class") metadata = introspect_union(MixedUnion) structural = create_structural_tuple(loc, metadata) - print("\nNested discriminated:") - print(f"loc: {loc}") - print(f"structural: {structural}") assert structural == ("union", "discriminator", "discriminator", "field") @@ -253,34 +234,70 @@ class ModelA(BaseModel): assert metadata.discriminator_field == "type" assert "a" in metadata.discriminator_to_model - @pytest.mark.parametrize( - "literal_value,expected_in_mapping", - [ - pytest.param("building", True, id="literal_building"), - pytest.param("place", True, id="literal_place"), - pytest.param("nonexistent", False, id="not_present"), - ], - ) - def test_introspect_extracts_all_literals( - self, literal_value: str, expected_in_mapping: bool - ) -> None: - """Test that introspect_union extracts all Literal field values.""" + +class TestDiscriminatorDiscovery: + """Tests for runtime discriminator field discovery (not hardcoded).""" + + def test_nonstandard_discriminator_field_name(self) -> None: + """Discriminator field not named type/theme/subtype is discovered at runtime.""" + + class Cat(BaseModel): + kind: Literal["cat"] + indoor: bool + + class Dog(BaseModel): + kind: Literal["dog"] + breed: str + + UnionType = Annotated[Cat | Dog, Field(discriminator="kind")] + metadata = introspect_union(UnionType) + + assert metadata.is_discriminated is True + assert metadata.discriminator_field == "kind" + assert metadata.discriminator_to_model["cat"] == Cat + assert metadata.discriminator_to_model["dog"] == Dog + + def test_non_discriminator_literal_fields_excluded(self) -> None: + """Literal fields that aren't the discriminator are not in the mapping.""" class Building(BaseModel): type: Literal["building"] - subtype: Literal["residential"] + status: Literal["active"] class Place(BaseModel): type: Literal["place"] - category: Literal["restaurant"] + status: Literal["active"] UnionType = Annotated[Building | Place, Field(discriminator="type")] metadata = introspect_union(UnionType) - if expected_in_mapping: - assert literal_value in metadata.discriminator_to_model - else: - assert literal_value not in metadata.discriminator_to_model + assert "building" in metadata.discriminator_to_model + assert "place" in metadata.discriminator_to_model + assert "active" not in metadata.discriminator_to_model + + def test_callable_discriminator_extracts_field_name(self) -> None: + """Callable discriminators (Feature.field_discriminator) are supported.""" + + class ModelA(BaseModel): + kind: Literal["a"] + + class ModelB(BaseModel): + kind: Literal["b"] + + def get_kind(data: object) -> str | None: + return data.get("kind") if isinstance(data, dict) else None + + get_kind._field_name = "kind" # type: ignore[attr-defined] + + UnionType = Annotated[ + ModelA | ModelB, Field(discriminator=Discriminator(get_kind)) + ] + metadata = introspect_union(UnionType) + + assert metadata.is_discriminated is True + assert metadata.discriminator_field == "kind" + assert metadata.discriminator_to_model["a"] == ModelA + assert metadata.discriminator_to_model["b"] == ModelB class TestStructuralTupleCaching: diff --git a/packages/overture-schema-codegen/README.md b/packages/overture-schema-codegen/README.md new file mode 100644 index 000000000..92a4d8fbe --- /dev/null +++ b/packages/overture-schema-codegen/README.md @@ -0,0 +1,118 @@ +# Overture Schema Codegen + +Generates documentation from Overture Maps Pydantic schema definitions. + +Pydantic's `model_json_schema()` flattens the schema's domain vocabulary into JSON +Schema primitives. NewType names disappear, constraint provenance is lost (which NewType +contributed which bound), custom constraint classes lose their identity (a +`GeometryTypeConstraint` becomes an anonymous `enum` array), and discriminated union +structure collapses into `anyOf` arrays with duplicated fields. + +Navigating Python's type annotation machinery -- NewType chains, nested `Annotated` +wrappers, union filtering, generic resolution -- is complex. The codegen does it once. +`analyze_type()` unwraps annotations into `TypeInfo`, a flat target-independent +representation. Extractors build specs from `TypeInfo`. Renderers consume specs without +touching the type system. New output targets (Arrow schemas, PySpark expressions) add +renderers, not extraction logic. + +## Usage + +```bash +# Generate markdown documentation for all themes +overture-codegen generate --format markdown --output-dir docs/schema/reference + +# Generate for a single theme +overture-codegen generate --format markdown --theme buildings --output-dir out/ + +# List discovered models +overture-codegen list +``` + +The generator discovers models via `overture.models` entry points (provided by theme +packages like `overture-schema-buildings-theme`), extracts type information, and renders +output pages with cross-page links, constraint descriptions, and validated examples. + +## Architecture + +Four layers with strict downward imports -- no layer references the one above it: + +```text +Rendering Output formatting, all presentation decisions + ^ +Output Layout What to generate, where it goes, how outputs link + ^ +Extraction TypeInfo, FieldSpec, ModelSpec, UnionSpec + ^ +Discovery discover_models() from overture-schema-core +``` + +**Discovery** loads registered Pydantic models via entry points. The return dict +includes both concrete `BaseModel` subclasses (like `Building`) and discriminated union +type aliases (like `Segment`). Both satisfy the `FeatureSpec` protocol and flow through +the same pipeline. + +**Extraction** unwraps type annotations into specs. `analyze_type()` is the central +function -- a single iterative loop that peels NewType, Annotated, Union, and container +wrappers, accumulating constraints tagged with the NewType that contributed them. +Domain-specific extractors (`model_extraction`, `union_extraction`, `enum_extraction`, +`newtype_extraction`, `numeric_extraction`) call `analyze_type()` for field types and +produce spec dataclasses. + +**Output Layout** determines what artifacts to generate and where they go. Supplementary +type collection walks expanded feature trees to find referenced enums, NewTypes, and +sub-models. Path assignment maps every type to an output file path mirroring the Python +module structure. Link computation and reverse references enable cross-page navigation. + +**Rendering** consumes specs and owns all presentation decisions. Markdown output uses +Jinja2 templates for feature pages (with field tables, constraint sections, and +examples), enum pages, NewType pages, and aggregate primitive/geometry reference pages. + +`markdown/pipeline.py` orchestrates the full pipeline without I/O, returning +`list[RenderedPage]`. The CLI writes files to disk with Docusaurus frontmatter. + +## Programmatic use + +```python +from overture.schema.codegen.extraction.type_analyzer import analyze_type, TypeKind + +info = analyze_type(some_annotation) +assert info.kind == TypeKind.PRIMITIVE +assert info.base_type == "int32" +assert info.newtype_name == "FeatureVersion" +# Constraints carry provenance: +for cs in info.constraints: + print(f"{cs.constraint} from {cs.source}") +``` + +## Fetching sample data + +Theme packages include example records in their `pyproject.toml` files under +`[[examples.]]` sections. The codegen validates these against Pydantic +models and renders them in feature pages. + +To fetch a fresh sample from the latest Overture release using DuckDB: + +```bash +duckdb -json \ + -c "load spatial" \ + -c "attach 'http://labs.overturemaps.org/data/latest.ddb' as overture" \ + -c "select to_json(columns(*)) + from ( + select * REPLACE ST_AsText(geometry) as geometry + from overture.place + USING SAMPLE 1 + )" \ + | jq . +``` + +The `latest.ddb` database always points to the current release. Tables use +the type name directly (`overture.place`, `overture.segment`, +`overture.building`, etc.). Convert the JSON output to TOML for inclusion in +the theme's `pyproject.toml`. + +## Further reading + +- [Design document](docs/design.md) -- architecture, extension points, data flow + diagrams +- [Walkthrough](docs/walkthrough.md) -- module-by-module narrative tracing Segment + through the full pipeline diff --git a/packages/overture-schema-codegen/docs/design.md b/packages/overture-schema-codegen/docs/design.md new file mode 100644 index 000000000..f29b3ce87 --- /dev/null +++ b/packages/overture-schema-codegen/docs/design.md @@ -0,0 +1,263 @@ +# Code Generator Design + +Code generator that produces documentation and code from Overture Maps Pydantic schema +definitions. + +## Problem + +Overture Maps schema definitions live in Pydantic models across theme packages. Each +model carries type annotations, field constraints, docstrings, and relationships +(inheritance, composition, discriminated unions). Generating documentation or code from +these models requires introspecting all of that structure and rendering it into output +formats. + +Pydantic's internal representation is JSON-schema-oriented and discards the vocabulary +the code generator needs to preserve. `model_json_schema()` flattens `FeatureVersion` (a +NewType wrapping `int32` wrapping `Annotated[int, Field(ge=0, le=2^31-1)]`) to `{"type": +"integer", "minimum": 0}` -- the NewType names `FeatureVersion` and `int32` are gone, +custom constraint classes (`GeometryTypeConstraint`, `UniqueItemsConstraint`) are gone, +Python class references are gone, and constraint provenance (which NewType contributed +which bound) is gone. `FieldInfo.annotation` gives the raw annotation, but Pydantic does +not unwrap NewType chains or track multi-depth constraint provenance. + +The schema's domain language -- custom primitives (`int32`, `float64`), semantic +NewTypes (`FeatureVersion`, `Sources`), and custom constraint classes -- needs to +survive extraction intact. A single field annotation like `NewType("Foo", +Annotated[list[SomeModel] | None, Field(ge=0)])` encodes optionality, collection type, +element type, constraints, and semantic naming in nested Python typing constructs. Type +definitions regularly nest `Annotated` inside `NewType` inside `Annotated` -- +`FeatureVersion = NewType("FeatureVersion", int32)` where `int32 = NewType("int32", +Annotated[int, Field(ge=...)])` -- and constraints at each depth need to be tagged with +the NewType that contributed them. + +The code generator solves this by extracting type information once into a flat, +navigable representation (`TypeInfo`), then passing that to renderers that produce +output without touching Python's type system. + +## Inputs and Outputs + +**Inputs**: Pydantic `BaseModel` subclasses discovered via `overture.models` entry +points, plus example data from theme `pyproject.toml` files. Examples serve two +purposes: rendered examples in documentation pages, and a starting point for generating +tests that verify behavior of generated code. + +**Current Outputs**: Markdown documentation pages with field tables, cross-page links, +constraint descriptions, and examples. + +**Planned outputs**: Arrow schemas, PySpark expressions. + +## Architecture + +Four layers with strict downward imports -- no layer references the one above it: + +```text +Rendering Output formatting, all presentation decisions + ^ +Output Layout What to generate, where it goes, how outputs link + ^ +Extraction TypeInfo, FieldSpec, ModelSpec, EnumSpec, ... + ^ +Discovery discover_models() from overture-schema-core +``` + +`markdown/pipeline.py` orchestrates the pipeline without I/O: it expands feature trees, +collects supplementary types, builds placement registries, computes reverse references, +and calls renderers -- returning `RenderedPage` objects. The CLI (`cli.py`) is a thin +Click wrapper that calls `generate_markdown_pages()` and writes files to disk. + +```mermaid +graph TD + subgraph Discovery + DM["discover_models()"] + end + + DM -->|"dict[ModelKey, type]"| EX + + subgraph Extraction + EX["extraction/type_analyzer / extractors"] + EX -->|"ModelSpec, UnionSpec"| TREE["expand_model_tree()"] + end + + TREE -->|"FeatureSpec[]"| OL + + subgraph "Output Layout" + OL["layout/type_collection"] + OL -->|"SupplementarySpec{}"| PA["markdown/path_assignment"] + PA -->|"dict[str, Path]"| LC["markdown/link_computation"] + RR["markdown/reverse_references"] + end + + subgraph Rendering + R["markdown/renderer"] + TR["extraction/type_registry"] -.->|"type name resolution"| R + end + + subgraph Orchestration + MP["markdown/pipeline"] + end + + OL --> MP + LC --> MP + RR --> MP + MP --> R + R -->|"RenderedPage[]"| MP + MP -->|"list[RenderedPage]"| CLI["cli.py → disk"] +``` + +## Extraction + +### `analyze_type` -- iterative type unwrapping + +`analyze_type(annotation)` is a single iterative function that peels type annotation +layers in a fixed order, accumulating information into an `_UnwrapState`: + +1. **NewType**: Records the outermost name (user-facing semantic identity, e.g. + `FeatureVersion`) and updates the "current" name (used for constraint provenance and + as `base_type` at terminal) +2. **Annotated**: Collects constraints from metadata, each tagged with whichever NewType + was most recently entered. Extracts `Field.description` when present +3. **Union**: Filters out `None` (marks optional), `Sentinel`, and `Literal` sentinel + arms. If multiple concrete `BaseModel` arms remain, classifies as `UNION`; otherwise + continues with the single remaining arm +4. **list / dict**: Increments `list_depth` for each `list[...]` layer, sets dict flags, + continues into element types +5. **Terminal**: Classifies as `PRIMITIVE`, `LITERAL`, `ENUM`, `MODEL`, or `UNION` + +The result is `TypeInfo` -- a flat dataclass that fully describes the unwrapped type: +classification (`TypeKind`), optional/dict flags, `list_depth` (count of `list[...]` +layers), `newtype_outer_list_depth` (list layers outside the outermost NewType boundary), +accumulated constraints with provenance, NewType names, source type, literal values, and +(for UNION kind) the tuple of concrete `BaseModel` member types. Dict types carry +recursively analyzed `TypeInfo` for their key and value types. + +Multi-depth `Annotated` layers (common in practice, since NewTypes wrap `Annotated` +types that wrap further NewTypes) are handled naturally by the loop -- each iteration +processes the next wrapper. Constraints from each `Annotated` layer are tagged with the +NewType active at that depth. + +### Extractors by domain + +Extraction is split by entity kind: + +- `extraction/model_extraction.py`: Pydantic model -> `ModelSpec` (fields in MRO-aware + documentation order, alias-resolved names, model-level constraints) +- `extraction/enum_extraction.py`: Enum class -> `EnumSpec` +- `extraction/newtype_extraction.py`: NewType -> `NewTypeSpec` +- `extraction/union_extraction.py`: Discriminated union alias -> `UnionSpec` +- `extraction/numeric_extraction.py`: Numeric types -> `NumericSpec` + +Each calls `analyze_type()` for field types. Tree expansion (`expand_model_tree()`) +walks MODEL-kind fields to populate nested model references, with a shared cache and +cycle detection (`starts_cycle=True`). + +### Unions and the FeatureSpec protocol + +Discriminated unions (e.g. `Segment = Annotated[Union[RoadSegment, ...], +Discriminator(...)]`) are type aliases, not classes. `UnionSpec` captures the union +structure: member types, discriminator field and value mapping, and a merged field list. +Fields shared across all variants appear once; fields present in some variants are +wrapped in `AnnotatedField` with `variant_sources` indicating which members contribute +them. The common base class is identified so shared fields can be deduplicated. + +`FeatureSpec` is a `Protocol` satisfied by both `ModelSpec` and `UnionSpec`. Code that +operates on "any top-level feature" -- tree expansion, supplementary type collection, +rendering dispatch -- uses `FeatureSpec` rather than a concrete type, so union and model +features flow through the same pipeline. + +### Constraints + +Field-level constraints come from `Annotated` metadata -- `Ge`, `Le`, `Interval`, custom +constraint classes. Each is tagged with the NewType that contributed it via +`ConstraintSource`. + +Model-level constraints come from decorators (`@require_any_of`, `@require_if`, +`@forbid_if`) and are extracted via `ModelConstraint.get_model_constraints()`. + +## Output Layout + +Determines the full set of artifacts to generate, where each lives on disk, and how they +reference each other. + +### Supplementary type collection + +`collect_all_supplementary_types()` walks the expanded field trees of all feature specs, +extracting enums, semantic NewTypes, and sub-models that need their own output. Returns +`dict[str, SupplementarySpec]`. + +### Module-mirrored output paths + +Output paths derive from the source Python module path relative to a computed schema +root (`compute_schema_root()` finds the longest common prefix of all entry point module +paths). `compute_output_dir()` maps a Python module to an output directory. Feature +models land in their module-derived directory. Supplementary types land at their own +module-derived path, with a `types/` segment inserted when they fall under a feature +directory. + +### Link computation + +`LinkContext` carries the current output's path and the full type-to-path registry. When +a renderer formats a type reference, it looks up the target in the registry and computes +a relative path. Links exist only for types with registry entries, avoiding broken +references to ungenerated outputs. + +### Reverse references + +`compute_reverse_references()` walks feature specs to build `dict[type_name, +list[UsedByEntry]]` for "Used By" sections. + +## Rendering + +Renderers consume specs and own all presentation decisions -- formatting, casing, link +syntax. Extraction and the type registry carry no presentation logic. + +### Type registry + +`extraction/type_registry.py` maps type names to per-target string representations via +`TypeMapping`. `resolve_type_name()` looks up the registry and returns the display +string for a given target. `is_semantic_newtype()` distinguishes NewTypes that deserve +their own identity (like `FeatureVersion` wrapping `int32`) from pass-through aliases +to registered primitives. + +### Markdown renderer + +Jinja2 templates for feature, enum, NewType, primitives, and geometry pages. +`render_feature()` expands MODEL-kind fields inline with dot-notation (e.g., +`sources[].dataset`), stopping at cycle boundaries. `format_type()` in +`markdown/type_format.py` converts `TypeInfo` into link-aware display strings using +`LinkContext`. + +### Constraint prose + +`extraction/field_constraints.py` and `extraction/model_constraints.py` convert +constraint objects into human-readable descriptions. Field constraints produce inline +text. Model constraints produce section-level descriptions and per-field notes, with +consolidation for related conditional constraints (`require_if` / `forbid_if` grouped by +trigger). + +### Example loader + +Loads example data from theme `pyproject.toml` files, validates against Pydantic models, +and flattens to dot-notation rows for display in feature pages. Also provides a starting +point for generated test data. + +`validate_example` returns a Pydantic model instance. `flatten_model_instance` walks the +instance recursively using `isinstance(value, BaseModel)` to distinguish model fields +(recurse with dot notation) from dict fields (keep as leaf values). This eliminates the +need for external schema information -- the model instance itself encodes the type +structure. `augment_missing_fields` appends `(name, None)` entries for union cross-arm +fields absent from the concrete variant instance. + +## Extension Points + +**Adding a new output target** (Arrow schemas next, PySpark expressions after): Add a +column to `TypeMapping` in `extraction/type_registry.py` for type-name resolution. Write +a new renderer module that consumes specs and the type registry. The extraction layer and +output layout are target-independent. + +**Adding a new type kind**: Add a variant to `TypeKind` in `extraction/type_analyzer.py`. +Handle it in the terminal classification of `analyze_type()`. Add an extraction function +and spec dataclass if needed. Update renderers to handle the new kind. + +**Adding a new constraint type**: The iterative unwrapper collects it automatically (any +`Annotated` metadata becomes a `ConstraintSource`). Add a case to +`describe_field_constraint()` for the prose representation. diff --git a/packages/overture-schema-codegen/docs/walkthrough.md b/packages/overture-schema-codegen/docs/walkthrough.md new file mode 100644 index 000000000..f4d1562c2 --- /dev/null +++ b/packages/overture-schema-codegen/docs/walkthrough.md @@ -0,0 +1,757 @@ +# Walkthrough: overture-schema-codegen + +Pydantic's serialization machinery destroys the vocabulary that documentation needs. The +codegen recovers it. + +Consider the transportation theme's `Segment` type -- a discriminated union of +`RoadSegment`, `RailSegment`, and `WaterSegment`. All three share fields inherited from +`TransportationSegment`. Each adds variant-specific fields. The discriminator field +`subtype` carries a `Literal` value (`"road"`, `"rail"`, `"water"`) that selects the +arm. Call `model_json_schema()` and the union collapses into an `anyOf` array with +duplicated field definitions, the discriminator mapping disappears, and the common-base +relationship between variants is unrecoverable. + +The same loss happens at the field level. `FeatureVersion = NewType("FeatureVersion", +int32)` where `int32 = NewType("int32", Annotated[int, Field(ge=0, le=2147483647)])` +becomes `{"type": "integer", "minimum": 0, "maximum": 2147483647}`. Three things +vanished: the name "FeatureVersion," the name "int32," and the fact that `ge=0` came +from the `int32` layer rather than `FeatureVersion`. Custom constraint classes like +`GeometryTypeConstraint` lose their identity -- the class name, its docstring, and its +relationship to a specific NewType dissolve into anonymous JSON Schema keywords. + +Documentation needs all of this. The codegen exists to preserve it. + +Navigating Python's type annotation machinery -- NewType chains, nested `Annotated` +wrappers, union filtering, generic resolution -- is complex. The codegen does it once. +`analyze_type()` unwraps annotations into `TypeInfo`, a flat target-independent +representation. Extractors build specs from `TypeInfo`. Renderers consume specs without +re-entering the type system. New output targets add renderers, not extraction logic. + +The solution decomposes into four layers. Discovery finds models. Extraction unwraps +them into flat specifications. Output Layout decides what to generate and where it goes. +Rendering formats the output. Imports flow strictly downward -- no layer references the +one above it. + +Sixteen sections follow, ordered by dependency: each module appears before anything that +imports it. The final section inverts this and traces the full pipeline top-down. +Segment threads through as the primary example, since its path through the system -- +union classification, common base discovery, variant field partitioning, discriminator +extraction, tagged rendering -- exercises more of the pipeline than any model feature +does. + +--- + +## 1. Discovery + +The pipeline starts in `overture-schema-core`, not in the codegen package itself. +`discover_models()` calls `importlib.metadata.entry_points(group="overture.models")` and +loads every registered model. Each entry point name encodes identity as a +colon-delimited triple (`overture:buildings:building`); each value encodes the Python +location (`overture.schema.buildings:Building`). The function parses both formats -- +three-part names carry a theme component, two-part names set theme to `None` -- and +returns `dict[ModelKey, type[BaseModel]]`. + +`ModelKey` is a frozen dataclass with four fields: `namespace`, `theme`, `type`, and +`entry_point`. The `entry_point` field preserves the raw `module:Class` string that +downstream modules split to determine output directory structure. + +The return dict includes both concrete `BaseModel` subclasses and type aliases. +`Building` is a concrete class -- `isinstance(Building, type)` returns true. `Segment` +is not. It is an `Annotated` alias wrapping `Union[RoadSegment, RailSegment, +WaterSegment]` with a discriminator field. `isinstance` and `issubclass` cannot inspect +it. The entry point `overture:transportation:segment` maps to +`overture.schema.transportation:Segment`, which loads the alias itself. + +The codegen classifies these at the CLI boundary: `is_model_class` identifies concrete +`BaseModel` subclasses, `is_union_alias` calls `analyze_type` to identify discriminated +unions. From that point forward both model features and union features satisfy the +`FeatureSpec` protocol and flow through the same pipeline. + +## 2. Leaf utilities + +Two modules with no internal dependencies. Both serve multiple layers. + +### extraction/case_conversion.py + +Converts PascalCase to snake_case with two compiled regexes. `_ACRONYM_BOUNDARY` inserts +an underscore between an uppercase run and a capitalized word start: `HTMLParser` +becomes `HTML_Parser` becomes `html_parser`. `_CAMEL_BOUNDARY` inserts between +lowercase-or-digit and uppercase: `buildingPart` becomes `building_part`. +`to_snake_case` applies them in sequence and lowercases. + +`slug_filename` composes the conversion with a file extension. Every output file path in +the system passes through this function. + +```python +>>> slug_filename("HexColor") +'hex_color.md' +``` + +### extraction/docstring.py + +Distinguishes author-written docstrings from auto-generated ones. Both `Enum` and +`NewType` produce default docstrings that vary across Python versions. Rather than +hardcoding version-specific strings, the module creates temporary instances at import +time, captures their `__doc__` attributes, then deletes the instances: + +```python +class _DocstringProbeEnum(Enum): + pass + +_ENUM_DEFAULT_DOCSTRING = _DocstringProbeEnum.__doc__ +del _DocstringProbeEnum +``` + +`is_custom_docstring` compares a given docstring against these captured defaults and an +optional inherited docstring. The enum extractor uses this both at class level and +per-member, since `DocumentedEnum` members carry individual `__doc__` attributes. + +`clean_docstring` delegates to `inspect.cleandoc` and returns `None` for empty results. +`first_docstring_line` takes the first line only -- used by renderers that show +summaries. + +## 3. Type analysis + +This is the module the entire package exists to house. `analyze_type` takes a raw type +annotation and returns `TypeInfo` -- a flat dataclass that fully describes the unwrapped +type without any reference to Python's typing machinery. + +### The loop + +The function runs a single `while True` loop that peels layers in fixed order. Each +iteration handles one wrapper: + +**NewType** records names at two levels. The first NewType encountered becomes +`outermost_newtype_name` (the user-facing identity, e.g. "FeatureVersion") and snapshots +the current `list_depth` into `newtype_outer_list_depth` -- capturing how many list +layers appeared before the NewType boundary. Subsequent NewTypes update +`last_newtype_name` (the innermost, used for constraint provenance and as the terminal +`base_type`). The loop unwraps via `__supertype__` and continues. + +**Annotated** collects every metadata object as a `ConstraintSource`, tagging each with +whichever NewType was most recently entered. This is how constraint provenance survives: +when `int32`'s `Annotated` layer contributes `Field(ge=0)`, the constraint records +`source="int32"`. If a `FieldInfo` carries a description, the function captures it -- +first description wins, so the outermost NewType's documentation takes precedence. + +**Union** filters out `NoneType` (marks optional), `Sentinel` instances (Pydantic's +`` marker for undeclared defaults), and `Literal` sentinel arms (like +`Literal[""]` used alongside `HttpUrl`). If multiple concrete `BaseModel` subclasses +remain after filtering, the function classifies the type as `UNION` and returns +immediately with the member tuple. Non-BaseModel multi-type unions raise +`UnsupportedUnionError`. A single remaining arm continues the loop. + +The `Literal` filtering has a guard: when a union contains *only* Literal arms (like +`Optional[Literal["x"]]`), the function keeps them rather than filtering everything out. + +**list/dict** increments `list_depth` for each `list[...]` layer (so `list[list[str]]` +records depth 2), sets dict flags, and continues into element types. Dict is the one +case where `analyze_type` recurses -- it calls itself for key and value types, storing +the results as nested `TypeInfo` objects. + +**Terminal** classification in `_classify_terminal` handles what remains after all +wrappers are peeled: `Any` becomes a PRIMITIVE, `Literal` returns with the literal value +(single-value only -- multi-value Literals get `literal_value=None`), `Enum` subclasses +become ENUM, `BaseModel` subclasses become MODEL, everything else becomes PRIMITIVE. + +### Concrete walkthroughs + +**Segment (union path).** `analyze_type` receives the `Annotated` alias. Iteration 1 +sees `Annotated` -- collects the `FieldInfo` with discriminator metadata as a +constraint, unwraps to `Union[RoadSegment, RailSegment, WaterSegment]`. Iteration 2 sees +the union. No `None` arm, no sentinels. Three concrete `BaseModel` subclasses remain -- +the function classifies the type as `UNION` and returns immediately: `kind=UNION`, +`union_members=(RoadSegment, RailSegment, WaterSegment)`, `base_type="RoadSegment"` (the +first member). Two iterations, done. The union members are raw type objects, not +recursively analyzed -- callers that need field details call `extract_model` on each +member separately. + +**FeatureVersion (NewType chain path).** `FeatureVersion = NewType("FeatureVersion", +int32)` where `int32 = NewType("int32", Annotated[int, Field(ge=0, le=2147483647)])`. + +Iteration 1 sees `FeatureVersion`. It's a NewType -- record +`outermost_newtype_name="FeatureVersion"`, snapshot `newtype_outer_list_depth=0` (no list +layers yet), unwrap to `int32`, continue. Iteration 2 sees +`int32`. Also a NewType -- update `last_newtype_name="int32"`, unwrap to `Annotated[int, +Field(ge=0, ...)]`, continue. Iteration 3 sees `Annotated`. Collect +`ConstraintSource(source="int32", constraint=)`, unwrap to `int`. The +loop breaks on `int` (not a NewType, not Annotated, not a union, not a container). +`_classify_terminal` returns a `TypeInfo` with `base_type="int32"`, +`newtype_name="FeatureVersion"`, `kind=PRIMITIVE`, and a constraint tuple recording the +provenance chain. + +The two paths demonstrate the function's range. Segment exits early on the union branch +with member types for downstream extraction. FeatureVersion runs the full loop through +NewType and Annotated layers, accumulating constraint provenance that survives to +rendering. + +### _UnwrapState + +The accumulator dataclass carries state across iterations: optional/dict flags, +`list_depth` (incremented per `list[...]` layer), `newtype_outer_list_depth` (snapshotted +from `list_depth` when the first NewType is entered), the constraint list, both NewType +name slots, and the captured description. Its `build_type_info` method assembles the +final `TypeInfo` from accumulated state, freezing the constraint list into a tuple. + +### walk_type_info + +A shared visitor that recurses into dict key/value `TypeInfo` children. Both type +collection and reverse reference computation use it rather than duplicating the descent +pattern. Union members are raw `type` objects (not `TypeInfo` instances), so callers +handle them directly. + +## 4. Data structures + +`extraction/specs.py` defines the vocabulary shared between extraction and rendering. Every spec is +a dataclass with no methods beyond field access and, in `UnionSpec`'s case, one cached +property. + +**FieldSpec** represents one model field: alias-resolved name, `TypeInfo`, description, +required flag. Two fields populated later by tree expansion: `model` (a reference to the +nested `ModelSpec` for MODEL-kind fields) and `starts_cycle` (true when following this +field's model would create a cycle in the ancestor chain). + +**ModelSpec** represents one Pydantic model: class name, cleaned docstring, fields in +documentation order, source class reference, the entry point string that located it, and +model-level constraints from decorators like `@require_any_of`. + +**UnionSpec** represents a discriminated union type alias. Segment's `UnionSpec` carries +`members=[RoadSegment, RailSegment, WaterSegment]`, `discriminator_field="subtype"`, and +`common_base=TransportationSegment`. Its `annotated_fields` list pairs each `FieldSpec` +with `variant_sources` -- a tuple of class names indicating which union members +contribute that field, or `None` for fields from `TransportationSegment` shared across +all members. The `fields` cached property unwraps this for code that doesn't need +provenance. `UnionSpec` uses `eq=False` because it contains mutable lists and a +`cached_property` -- dataclass-generated `__eq__` would be unreliable. + +**FeatureSpec** is a `Protocol` satisfied by both `ModelSpec` and `UnionSpec`. This is +the pipeline's unifying abstraction. Tree expansion, type collection, rendering +dispatch, and example loading all operate on `FeatureSpec` without knowing which +concrete type they hold. + +**EnumSpec** and **EnumMemberSpec** serve enums. **NewTypeSpec** serves NewTypes. +**NumericSpec** serves numeric primitives with an `Interval` for bounds and optional +`float_bits`. + +**SupplementarySpec** is the union type alias `EnumSpec | NewTypeSpec | ModelSpec` -- +the set of non-feature types that need their own output pages. `NumericSpec` and +geometry types are excluded because they render on aggregate pages rather than +individual ones. + +### Classification functions + +Three functions at the bottom of `extraction/specs.py` classify discovery results. `is_model_class` +is a `TypeGuard` that checks `isinstance(obj, type) and issubclass(obj, BaseModel)`. +`is_union_alias` calls `analyze_type` and checks for `UNION` kind -- the only place +outside the type analyzer that touches Python type annotations. `filter_model_classes` +applies the model guard across the discovery dict's values. + +## 5. Type registry + +Maps type names to per-target display strings. `PRIMITIVE_TYPES` contains 15 entries: +four signed integer widths, three unsigned, two floats, `str`/`bool`, two Python builtin +aliases (`int` maps to `int64`, `float` maps to `float64`), and two geometry types +(`Geometry`, `BBox`). Each maps to a `TypeMapping` with a `markdown` field. + +`is_semantic_newtype` answers a question: does this NewType deserve its own +documentation page? The function returns true when the outermost name differs from the +base type (`FeatureVersion` wrapping `int32`) or when the base type has no registry +entry (`HexColor` wrapping `str` via constraints). It returns false for registered +primitives (`int32` wrapping `int`) -- those are the type system's building blocks, not +user-facing concepts. + +`resolve_type_name` looks up the registry by `base_type`, tries `source_type.__name__` +when the first lookup fails, and falls back to `base_type` as a last resort. Semantic +NewTypes wrapping unregistered classes (like `Sources` wrapping `SourceItem`) use the +underlying class name rather than the NewType alias -- `source_type.__name__` takes +precedence. + +## 6. Model extraction + +`extract_model` converts a Pydantic `BaseModel` subclass into a `ModelSpec`. + +### Field ordering + +Documentation order differs from Python declaration order. `_class_order` produces the +MRO-aware sequence: for single inheritance, reversed MRO puts base class fields first +and derived fields last. For multiple inheritance, the primary chain (first base) comes +first, then the class's own fields, then mixin fields. This matches how a reader +encounters the model -- shared structure before specialization. + +`_field_order` walks the class hierarchy produced by `_class_order` and collects +`__annotations__` keys, deduplicating as it goes. + +### Field extraction + +For each field, the extractor resolves the alias chain (`validation_alias` > `alias` > +Python name via `resolve_field_alias`), calls `analyze_type` on `field_info.annotation`, +and builds a `FieldSpec`. The extractor uses `field_info.annotation` rather than +`get_type_hints()` because the latter returns unresolved TypeVars for generic base +classes. + +One subtlety: Pydantic strips the `Annotated` wrapper from some fields and moves the +metadata to `field_info.metadata`. When this happens, `analyze_type` sees a bare type +and misses the constraints. `_merge_field_metadata` patches them back in, tagging them +with `source=None` since they came from the field's own annotation rather than a NewType +chain. + +Model-level constraints come from `ModelConstraint.get_model_constraints(model_class)`, +which inspects decorators like `@require_any_of` and `@require_if`. + +### Tree expansion + +`expand_model_tree` is the recursive step that populates `FieldSpec.model` references. +It maintains a shared cache keyed by Python class and an ancestor set for cycle +detection. + +The cache insert happens *before* recursion. Without this ordering, a back-edge +encounter would find no cached entry and infinite-loop instead of marking +`starts_cycle=True`. The sequence: extract the sub-model, insert it into the cache, then +recurse into its fields. Shared references (the same sub-model used in multiple fields) +reuse the cached `ModelSpec` without marking cycles. + +Union-kind fields skip inline expansion -- they appear as a single row in the output, +linking to their members, rather than expanding inline. + +## 7. Other extractors + +### Enum extraction + +`extract_enum` iterates members, checking `is_custom_docstring` for both class-level and +per-member descriptions. `DocumentedEnum` members carry `__doc__` attributes that the +extractor preserves. The class-level docstring is passed as `inherited_doc` to the +per-member check, so members that inherit the class docstring verbatim get +`description=None`. + +### NewType extraction + +`extract_newtype` calls `analyze_type` on the NewType callable and extracts the custom +docstring. When the NewType has no explicit docstring, it falls back to +`TypeInfo.description` -- the first `Field.description` found in the `Annotated` +metadata chain. + +### Union extraction + +The most involved extractor. Walk through `Segment` concretely. + +`extract_union("Segment", annotation)` calls `analyze_type` on the +`Annotated[Union[RoadSegment, RailSegment, WaterSegment], ...]` alias. The analyzer +returns `kind=UNION` with the three member types. + +Next, `_find_common_base` intersects each member's filtered MRO (BaseModel subclasses +only, excluding `BaseModel` itself). All three share `TransportationSegment` in their +MRO. The function picks the most-derived class in the intersection -- the one whose +worst-case MRO distance is smallest. `TransportationSegment` wins: it is the direct +parent of all three members. + +The extractor calls `extract_model(TransportationSegment)` to get the shared field set. +Fields like `id`, `geometry`, `version`, `sources`, and `subtype` appear in the common +base. These become shared `AnnotatedField` entries with `variant_sources=None`. + +Then it extracts each member: `RoadSegment`, `RailSegment`, `WaterSegment`. Fields not +in the shared set are variant-specific, deduplicated by `(name, type_identity)` where +`type_identity` captures `base_type`, `kind`, `is_optional`, and `list_depth`. If +`RoadSegment` and `WaterSegment` both define a `width` field with the same type +identity, the `AnnotatedField` accumulates both class names: +`variant_sources=("RoadSegment", "WaterSegment")`. Fields unique to one member get a +single-element tuple. + +`extract_discriminator` inspects the `Annotated` metadata for a `FieldInfo` with a +discriminator attribute. For Segment, it finds `subtype` and builds the mapping: +`{"road": RoadSegment, "rail": RailSegment, "water": WaterSegment}` by checking each +member for single-value `Literal` fields on the discriminator. + +### Primitive extraction + +`partition_numeric_and_geometry_types` reads a module's `__all__` exports. NewType +exports are numeric primitives; non-constraint class exports are geometry types. + +`extract_numerics` builds `NumericSpec` objects. For each primitive name it resolves +the object from the module, calls `extract_newtype` for the type analysis, then extracts +numeric bounds from constraints. `extract_numeric_bounds` scans constraint objects for +`ge`/`gt`/`le`/`lt` attributes and packs them into an `Interval`. + +## 8. Constraint prose + +Two modules convert constraint objects into human-readable text. + +### Field constraints + +`extraction/field_constraints.py` pattern-matches constraint types. `Interval` renders +as `lower <= x <= upper` using Unicode comparison operators. Single-bound constraints +(`Ge`, `Gt`, `Le`, `Lt`) render as `>= value` or `< value`. Length constraints +(`MinLen`, `MaxLen`) render as plain prose (e.g. "Minimum length: 1"). `GeometryTypeConstraint` lists +allowed geometry types by name, converting snake_case values to PascalCase. `Reference` +describes the relationship and target model, using an optional `link_fn` to produce +markdown links. + +Opaque constraints -- classes that inherit `object.__repr__` without customization -- +render as their class name plus docstring. When a regex pattern attribute exists, the +prose includes it. + +`constraint_display_text` is the top-level entry point. It checks whether the constraint +is opaque and has a docstring, and if so, produces a composite description combining the +docstring, class name, and pattern. Otherwise it delegates to +`describe_field_constraint`. + +### Model constraints + +`extraction/model_constraints.py` handles model-level constraints from decorators. +`analyze_model_constraints` returns two things in one pass: a list of section-level +descriptions and a dict mapping field names to the constraint descriptions that +reference them. + +The module consolidates related conditionals. Three `require_if` constraints with the +same target fields but different trigger values merge into "when X is one of: a, b, c" +instead of three separate bullets. `_consolidation_key` groups constraints by `(type, +field_names, condition_field_name)`. Groups with one member render normally; groups with +multiple members produce consolidated prose. + +`NoExtraFieldsConstraint` is silently skipped -- it is a structural validation rule, not +something a documentation reader acts on. + +## 9. Module layout + +Translates Python module paths into output directory paths. `compute_schema_root` finds +the longest common dotted prefix across all entry point module paths. Given paths like +`overture.schema.buildings`, `overture.schema.places`, and +`overture.schema.transportation`, the root is `overture.schema`. For a single unique +path, it drops the last component. + +`compute_output_dir` mirrors the remaining package structure after stripping the root. +Packages (directories with `__path__` per PEP 302) keep all components. File modules +drop their last component, since the `.py` filename adds no useful structure. +`is_package_module` checks `sys.modules` for `__path__` to make this distinction. + +The entry point string `overture.schema.buildings:Building` encodes both module and +class. `entry_point_module` extracts the module path, `entry_point_class` extracts the +class name. `output_dir_for_entry_point` composes these to produce the output directory +for a feature. + +## 10. Supplementary type collection + +`collect_all_supplementary_types` walks the expanded field trees of all feature specs to +discover every referenced type that needs its own output page: enums, semantic NewTypes, +and sub-models. + +The walk maintains a visited set for models and a feature name set for skip detection. +Types that are themselves top-level features get skipped. For UNION-kind fields, the +function extracts and walks each member's fields. For semantic NewTypes, it walks the +`__supertype__` chain to collect intermediate NewTypes -- `Id` wraps +`NoWhitespaceString` wraps `str`, and both `Id` and `NoWhitespaceString` get their own +pages. The `walk_type_info` visitor handles dict key/value recursion. + +MODEL-kind fields follow `field_spec.model` references that were populated by +`expand_model_tree`. The function raises `RuntimeError` if it encounters a MODEL-kind +field with `model=None` -- a guard against calling collection before tree expansion. + +A single field matches multiple conditions independently. A semantic NewType wrapping a +MODEL-kind type triggers both NewType extraction and model collection. The checks use +independent `if` statements, not `elif`. + +## 11. Path assignment + +`build_placement_registry` builds the complete mapping from type names to output file +paths. Three tiers: + +Aggregate pages come first. All numeric primitives point to +`system/primitive/primitives.md`. All geometry types point to +`system/primitive/geometry.md`. These are hardcoded paths since the types share a single +reference page. + +Feature specs get individual pages. Output directories derive from +`output_dir_for_entry_point`. Filenames use `slug_filename`. + +Supplementary specs get module-derived paths from `source_type.__module__`. When a +supplementary type's output directory falls under a feature directory, +`_nest_under_types` inserts a `types/` segment. Without this insertion, an enum defined +in `overture.schema.buildings` would land alongside the Building feature page. With it, +the enum lands in `buildings/types/` -- preventing supplementary type pages from +cluttering feature directories. + +`_nest_under_types` sorts feature directories by path length (descending) before +checking containment, so the most specific match wins. + +## 12. Links and reverse references + +### Link computation + +`LinkContext` carries the current page's output path and the full type-to-path registry. +When a renderer formats a type reference, it calls `resolve_link` to compute a relative +path from the current page to the target. Types without registry entries return `None`, +telling renderers to show inline code instead of a broken link. `resolve_link_or_slug` +provides a fallback when a link is required regardless. + +`relative_link` computes `../` navigation between any two paths in the output tree. It +finds the common prefix of directory components, counts the levels up from the source +directory, and descends into the target. Both paths must be normalized -- the function +rejects `..` components to prevent path traversal surprises. + +### Reverse references + +`compute_reverse_references` walks all feature fields and supplementary specs to build +`dict[str, list[UsedByEntry]]`. Each entry maps a type name to the list of types that +reference it. Entries sort models before NewTypes, alphabetical within each group. + +The function tracks references with sets for deduplication, then sorts into lists at the +end. It skips self-references and references to types not in the supplementary spec dict +(features don't need "used by" sections since they are the entry points). + +NewType specs register additional references from their constraint sources. If `Id` +inherits a constraint from `NoWhitespaceString`, the reverse reference captures that +`Id` uses `NoWhitespaceString` -- even though the relationship is through constraint +provenance rather than direct field reference. + +## 13. Markdown type formatting + +`markdown/type_format.py` converts `TypeInfo` into display strings for markdown output. + +`format_type` handles the full range of field types. Single-value Literals render as +`"value"` in backticks. Semantic NewTypes and enums/models get markdown links via +`_resolve_type_link`, which checks the `LinkContext` registry and falls back to plain +code spans. For types with a linked identity (semantic NewTypes, enums, models), list +rendering depends on where the list layers sit relative to the NewType boundary. +`newtype_outer_list_depth > 0` means the list wraps the NewType (`list[PhoneNumber]`) and +renders as `list`. `is_list` with `newtype_name` set means the NewType +wraps a list internally (`Sources` wrapping `list[SourceItem]`) and renders with a +`(list)` qualifier. Non-NewType identities (enums, models) use `list` syntax. Linked +inner types use broken-backtick syntax (`` `list<` `` ... `` `>` ``) built as a single +wrapper to avoid adjacent backticks that CommonMark would interpret as multi-backtick +code span delimiters. Dict types render as `` `map` ``. Qualifiers (optional, list, +map) append in parentheses. + +Union members format independently -- each gets its own link resolution, joined with +pipe separators escaped for table-cell safety. + +`format_underlying_type` handles NewType page headers. It links enums and models that +have their own pages but skips the outermost NewType name to avoid self-referencing. The +function uses `source_type.__name__` rather than `base_type` for link resolution, since +`base_type` may carry the outermost NewType name when only one NewType wraps a class. + +## 14. Markdown rendering + +`markdown/renderer.py` is the template driver. + +### Templates + +Six Jinja2 templates in `markdown/templates/`. `feature.md.jinja2` renders a field table +with Name, Type, and Description columns, an optional Constraints section, an optional +Examples section, and a "Used By" partial. `enum.md.jinja2` renders a bullet list of +values. `newtype.md.jinja2` shows underlying type and constraints with provenance links. +`primitives.md.jinja2` and `geometry.md.jinja2` render aggregate reference pages. +`_used_by.md.jinja2` is an included partial. + +The Jinja2 environment registers `linkify_urls` as a filter, which wraps bare URLs in +markdown link syntax. The filter uses a two-pass approach: extract code spans first (to +avoid modifying URLs inside backticks), linkify the remaining text, then restore code +spans. + +### Field expansion + +`render_feature` dispatches on spec type. `ModelSpec` gets `_expand_model_fields`, which +walks the pre-populated `FieldSpec.model` tree and produces dot-notation rows. +`sources[0].dataset` appears as a single row in the flat field table, with `[]` +appended per nesting level to list-of-model fields (so a doubly-nested list gets +`[][]`). Expansion stops at fields marked with +`starts_cycle`. + +`UnionSpec` gets `_expand_union_fields`, which adds italic variant tags to +variant-specific fields. For Segment, shared fields from `TransportationSegment` (like +`id`, `geometry`, `sources`) render as plain rows. Variant-specific fields get tagged: +`_short_variant_name` strips the union name suffix, so `RoadSegment` becomes `Road`, +`WaterSegment` becomes `Water`. A field present in two of three members renders as `` +`width` *(Road, Water)* ``. Shared fields render without tags. + +### Constraint annotation + +Field-level constraints from the field's own annotation (not inherited from NewType +chains) annotate the field's description cell as italic text. The distinction matters: +constraints with `source=None` came from the field itself, while constraints with a +named source live on the NewType's own page. + +Model-level constraints annotate top-level field rows (those without dot-notation +prefixes) using the `field_notes` dict from `analyze_model_constraints`. + +### Example formatting + +Example values render in backticks for monospace consistency. Booleans use +`true`/`false` (not Python's `True`/`False`). `None` renders as `null`. Long values +truncate at 100 characters. Lists and dicts use compact bracket/brace notation. + +### Aggregate pages + +`render_primitives_from_specs` sorts primitives by bit-width key (prefix then numeric +width), groups into signed integers, unsigned integers, and floats, and formats ranges. +Integer ranges show both bounds as a compact "lower to upper" form; `int64`-scale bounds +use `2^63` notation for readability. `render_geometry_from_values` produces a +comma-separated backtick list. + +## 15. Example loader + +Loads example data from theme `pyproject.toml` files and validates it against the +schema. + +`resolve_pyproject_path` walks up from a model's module file to find `pyproject.toml`. +`load_examples_from_toml` reads the `[examples.ModelName]` TOML section. + +Validation requires two preprocessing steps that handle flat-schema conventions. + +Literal fields (like `theme="buildings"`) are omitted from examples since they carry +constant values. `_inject_literal_fields` adds them back before validation by scanning +`model_fields` for single-value `Literal` annotations via `single_literal_value`. + +Discriminated union examples from flat Parquet schemas include null fields from +non-selected variant arms. `_strip_null_unknown_fields` removes null-valued fields not +in the common base's field set, so the selected arm's validator accepts the data without +choking on fields that belong to sibling variants. + +`validate_example` returns a Pydantic model instance. `flatten_model_instance` walks the +instance recursively using `isinstance(value, BaseModel)` to distinguish model fields +(recurse with dot notation) from dict fields (keep as leaf values). Lists of models +use bracket notation (`sources[0].dataset`), nested lists use double-index notation +(`hierarchies[0][1].name`). The model instance itself encodes the type structure, +eliminating the need for external schema information. + +For discriminated unions, the concrete variant instance lacks fields from other arms. +`augment_missing_fields` compares base field names against the union's merged field list +and appends `(name, None)` for absent fields, matching the flat Parquet schema where all +variant columns exist. + +`order_example_rows` sorts by field position in the documentation's field order using a +stable sort, so sub-fields maintain their original relative order. + +`load_examples` orchestrates the full flow: find the pyproject.toml, load the TOML +section, validate each example, flatten via `flatten_model_instance`, augment missing +fields, and order. Invalid examples log a warning and skip rather than failing the +pipeline. + +## 16. Orchestration and CLI + +### The pipeline + +`generate_markdown_pages` in `markdown/pipeline.py` is the "main" function. It takes +feature specs and a schema root, returns rendered pages without touching the filesystem. +Eight steps: + +1. **Expand model trees** with a shared cache across all features, so sub-models + referenced by multiple features extract once. + +2. **Partition primitive and geometry names** from the system primitive module's + `__all__` exports. + +3. **Collect supplementary types** by walking expanded feature trees. + +4. **Build the placement registry** mapping every type to its output file path. + +5. **Compute reverse references** across all features and supplements. + +6. **Render each feature** with its `LinkContext`, loaded examples, and used-by entries. + +7. **Render each supplementary type** -- dispatching to `render_enum`, `render_newtype`, + or `render_feature` (for sub-models) based on spec type. + +8. **Render aggregate pages** for primitives and geometry. + +The return value is `list[RenderedPage]` -- frozen dataclasses carrying content, output +path, and a boolean `is_feature` flag. The caller decides what to do with them. + +### The CLI + +`cli.py` is a thin Click wrapper. The `generate` command discovers models, computes +schema root from *all* entry points (before any theme filtering), classifies each entry +as model or union via `is_model_class` and `is_union_alias`, extracts specs, calls the +pipeline, and writes output. + +Schema root computation uses all entry points deliberately. Theme filtering narrows +which features appear in the output, but the directory structure must remain stable +regardless of which themes are selected. Computing the root from filtered paths would +shift output directories when themes change. + +Feature pages get Docusaurus frontmatter (`sidebar_position: 1`) prepended. The CLI +generates `_category_.json` files for sidebar navigation, assigning positions +alphabetically with feature directories first. + +The `list` command prints sorted model names -- a diagnostic tool for verifying which +models the entry point system discovers. + +--- + +## Top-down trace: Segment through the pipeline + +A reader who reached this point has seen every module in isolation. This section follows +`Segment` from discovery to rendered markdown, showing how the pieces compose. + +**Discovery.** The CLI calls `discover_models()`. The entry point +`overture:transportation:segment` loads `overture.schema.transportation:Segment` -- the +`Annotated[Union[...]]` alias. `Segment` lands in the return dict keyed by +`ModelKey(namespace="overture", theme="transportation", type="segment", +entry_point="overture.schema.transportation:Segment")`. + +**Classification.** The CLI tests each entry. `is_model_class(Segment)` returns false -- +`Segment` is not a class. `is_union_alias(Segment)` calls `analyze_type`, which peels +the `Annotated` wrapper and finds three `BaseModel` subclasses in the union. The +analyzer returns `kind=UNION`. The CLI routes Segment to `extract_union`. + +**Extraction.** `extract_union("Segment", annotation)` calls `analyze_type` again (cheap +-- the same two-iteration path), gets the three member types, and finds +`TransportationSegment` as the common base via `_find_common_base`. It extracts the +common base's fields as shared, then extracts each member's fields and partitions the +non-shared ones into `AnnotatedField` entries with variant provenance. +`extract_discriminator` finds `subtype` and builds `{"road": RoadSegment, "rail": +RailSegment, "water": WaterSegment}`. The result is a `UnionSpec` satisfying +`FeatureSpec`. + +Meanwhile, concrete models like `Building` go through `extract_model`, which calls +`analyze_type` on each field annotation. A field typed `FeatureVersion` unwraps through +two NewType layers and an `Annotated` layer, producing a `TypeInfo` with +`base_type="int32"`, `newtype_name="FeatureVersion"`, and constraint provenance linking +`ge=0` back to the `int32` NewType. Both extraction paths produce specs satisfying +`FeatureSpec`. + +**Pipeline entry.** The feature specs enter `generate_markdown_pages`. +`expand_model_tree` walks MODEL-kind fields on Segment's `UnionSpec` and populates +`FieldSpec.model` references. The shared cache ensures sub-models referenced by multiple +features (like `Sources`) extract once. Union-kind fields skip inline expansion. + +**Layout.** `partition_numeric_and_geometry_types` reads the system module's exports. +`collect_all_supplementary_types` walks Segment's expanded fields and discovers +referenced enums (like `Subtype`), semantic NewTypes (like `Id`, `Sources`), and +sub-models. The walk follows `FieldSpec.model` references down the tree, and for +UNION-kind fields, extracts and walks each member's fields separately. + +`build_placement_registry` assigns Segment's output path from its entry point: +`entry_point_module` extracts `overture.schema.transportation`, `compute_output_dir` +strips the schema root and mirrors the remaining structure. Supplementary types get +module-derived paths with `types/` inserted under feature directories. + +**Reverse references.** `compute_reverse_references` walks Segment's fields and records +that Segment references `Subtype`, `Id`, `Sources`, and other types. These references +populate "Used By" sections: the `Subtype` enum page shows that Segment uses it. + +**Rendering.** The pipeline builds a `LinkContext` from Segment's output path and the +full registry. `render_feature` dispatches to `_expand_union_fields` because the spec is +a `UnionSpec`. Shared fields from `TransportationSegment` render as plain rows. +Variant-specific fields get italic tags: `` `road_class` *(Road)* ``. The renderer +formats each field's type via `format_type`, which resolves links through the +`LinkContext` -- `Subtype` gets a relative link to its enum page, `Id` links to its +NewType page. Constraints with `source=None` annotate field rows; constraints with named +sources appear on the source NewType's page instead. + +The example loader finds `pyproject.toml` in the transportation theme package, reads +`[examples.Segment]`, validates each example against the union alias (injecting literal +fields, stripping null fields from non-selected arms), flattens the model instance to +dot-notation via `flatten_model_instance`, augments missing cross-arm fields, and orders +by field position. + +The Jinja2 template assembles the field table, optional constraints section, examples, +and "Used By" partial into markdown. + +**Output.** The pipeline returns a `RenderedPage` with Segment's content, its output +path, and `is_feature=True`. The CLI prepends Docusaurus frontmatter and writes the +file. `_category_.json` files get generated for sidebar navigation. + +**The layering principle.** At every stage, the modules that do the work never reach +back up the dependency chain. Renderers consume specs and registries but never import +extractors. Extractors consume `analyze_type` but never import renderers. The type +analyzer imports nothing from the codegen package except `clean_docstring`. Any module +can be understood, tested, and modified by reading only the modules below it. diff --git a/packages/overture-schema-codegen/pyproject.toml b/packages/overture-schema-codegen/pyproject.toml new file mode 100644 index 000000000..de42c5fb9 --- /dev/null +++ b/packages/overture-schema-codegen/pyproject.toml @@ -0,0 +1,29 @@ +[build-system] +build-backend = "hatchling.build" +requires = ["hatchling"] + +[project] +dependencies = [ + "click>=8.0", + "jinja2>=3.0", + "overture-schema-core", + "overture-schema-system", + "tomli>=2.0; python_version < '3.11'", +] +description = "Code generator that produces documentation and code from Pydantic models" +dynamic = ["version"] +license = "MIT" +name = "overture-schema-codegen" + +[project.scripts] +overture-codegen = "overture.schema.codegen.cli:main" + +[tool.uv.sources] +overture-schema-core = { workspace = true } +overture-schema-system = { workspace = true } + +[tool.hatch.version] +path = "src/overture/schema/codegen/__about__.py" + +[tool.hatch.build.targets.wheel] +packages = ["src/overture"] diff --git a/packages/overture-schema-codegen/src/overture/__init__.py b/packages/overture-schema-codegen/src/overture/__init__.py new file mode 100644 index 000000000..8db66d3d0 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/packages/overture-schema-codegen/src/overture/schema/__init__.py b/packages/overture-schema-codegen/src/overture/schema/__init__.py new file mode 100644 index 000000000..8db66d3d0 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/__about__.py b/packages/overture-schema-codegen/src/overture/schema/codegen/__about__.py new file mode 100644 index 000000000..3dc1f76bc --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/__about__.py @@ -0,0 +1 @@ +__version__ = "0.1.0" diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/__init__.py b/packages/overture-schema-codegen/src/overture/schema/codegen/__init__.py new file mode 100644 index 000000000..2de7d7120 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/__init__.py @@ -0,0 +1 @@ +"""Code generator for Overture Schema Pydantic models.""" diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/cli.py b/packages/overture-schema-codegen/src/overture/schema/codegen/cli.py new file mode 100644 index 000000000..0a24c7348 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/cli.py @@ -0,0 +1,195 @@ +"""CLI entrypoint for schema code generation.""" + +import json +import logging +from pathlib import Path, PurePosixPath + +import click + +from overture.schema.core.discovery import discover_models + +from .extraction.model_extraction import extract_model +from .extraction.specs import ( + FeatureSpec, + is_model_class, + is_union_alias, +) +from .extraction.union_extraction import extract_union +from .layout.module_layout import ( + OUTPUT_ROOT, + compute_schema_root, + entry_point_class, + entry_point_module, +) +from .markdown.pipeline import generate_markdown_pages + +log = logging.getLogger(__name__) + +__all__ = ["cli"] + +_OUTPUT_FORMATS = ("markdown",) + +_FEATURE_FRONTMATTER = "---\nsidebar_position: 1\n---\n\n" + + +def _write_output( + content: str, + output_dir: Path | None, + output_path: PurePosixPath, +) -> None: + """Write content to a file under output_dir, or stdout.""" + if output_dir: + file_path = output_dir / output_path + file_path.parent.mkdir(parents=True, exist_ok=True) + file_path.write_text(content) + else: + click.echo(content) + click.echo() # separate entries with a blank line in stdout mode + + +@click.group() +def cli() -> None: + """Overture Schema code generator. + + Generate documentation and code from Pydantic schema models. + """ + + +@cli.command("list") +def list_models() -> None: + """List all discovered models.""" + models = discover_models() + names = sorted( + model.__name__ if isinstance(model, type) else str(model) + for model in models.values() + ) + for name in names: + click.echo(name) + + +@cli.command() +@click.option( + "--format", + "output_format", + required=True, + type=click.Choice(_OUTPUT_FORMATS), + help="Output format", +) +@click.option( + "--theme", + multiple=True, + help="Filter to specific theme(s); repeatable (e.g., --theme buildings --theme places)", +) +@click.option( + "--output-dir", + type=click.Path(path_type=Path), + default=None, + help="Write output to directory (default: stdout)", +) +def generate( + output_format: str, + theme: tuple[str, ...], + output_dir: Path | None, +) -> None: + """Generate code/docs from discovered models.""" + all_models = discover_models() + + # Schema root from ALL entry points (before theme filter). + module_paths = [entry_point_module(k.entry_point) for k in all_models] + schema_root = compute_schema_root(module_paths) + + models = ( + {k: v for k, v in all_models.items() if k.theme in theme} + if theme + else all_models + ) + + if output_dir: + output_dir.mkdir(parents=True, exist_ok=True) + + feature_specs: list[FeatureSpec] = [] + for key, entry in models.items(): + if is_model_class(entry): + feature_specs.append(extract_model(entry, entry_point=key.entry_point)) + elif is_union_alias(entry): + feature_specs.append( + extract_union( + entry_point_class(key.entry_point), + entry, + entry_point=key.entry_point, + ) + ) + + _generate_markdown(feature_specs, schema_root, output_dir) + + +def _generate_markdown( + feature_specs: list[FeatureSpec], + schema_root: str, + output_dir: Path | None, +) -> None: + """Generate markdown with directory layout and placement-aware links.""" + pages = generate_markdown_pages(feature_specs, schema_root) + + for page in pages: + content = ( + f"{_FEATURE_FRONTMATTER}{page.content}" if page.is_feature else page.content + ) + _write_output(content, output_dir, page.path) + + if output_dir: + feature_paths = {page.path for page in pages if page.is_feature} + all_paths = {page.path for page in pages} + _write_category_files(output_dir, all_paths, feature_paths) + + +def _ancestor_dirs(paths: set[PurePosixPath]) -> set[PurePosixPath]: + """Collect all ancestor directories for a set of file paths.""" + dirs: set[PurePosixPath] = set() + for path in paths: + parent = path.parent + while parent != OUTPUT_ROOT: + dirs.add(parent) + parent = parent.parent + return dirs + + +def _top_level_positions( + dirs: set[PurePosixPath], + feature_paths: set[PurePosixPath], +) -> dict[PurePosixPath, int]: + """Assign sidebar positions: feature dirs first, then non-feature, both alphabetical.""" + feature_dir_names = {p.parts[0] for p in feature_paths} + top_level = sorted(d for d in dirs if d.parent == OUTPUT_ROOT) + feature_dirs = [d for d in top_level if d.name in feature_dir_names] + non_feature_dirs = [d for d in top_level if d.name not in feature_dir_names] + return {d: i for i, d in enumerate(feature_dirs + non_feature_dirs, start=1)} + + +def _write_category_files( + output_dir: Path, + all_paths: set[PurePosixPath], + feature_paths: set[PurePosixPath], +) -> None: + """Write _category_.json files for Docusaurus sidebar navigation.""" + dirs = _ancestor_dirs(all_paths) + positions = _top_level_positions(dirs, feature_paths) + + for dir_path in sorted(dirs): + label = dir_path.name.replace("_", " ").title() + category: dict[str, object] = {"label": label} + if dir_path in positions: + category["position"] = positions[dir_path] + + file_path = output_dir / dir_path / "_category_.json" + file_path.parent.mkdir(parents=True, exist_ok=True) + file_path.write_text(json.dumps(category, indent=2) + "\n") + + +def main() -> None: + """Run the CLI entry point.""" + cli() + + +if __name__ == "__main__": + main() diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/__init__.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/case_conversion.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/case_conversion.py new file mode 100644 index 000000000..9d06341fb --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/case_conversion.py @@ -0,0 +1,41 @@ +"""PascalCase to snake_case conversion for code generation.""" + +import re + +__all__ = ["slug_filename", "to_snake_case"] + +# Insert _ between an acronym run and a capitalized word start (HTML|Parser) +_ACRONYM_BOUNDARY = re.compile(r"([A-Z]+)([A-Z][a-z])") +# Insert _ between a lowercase/digit and an uppercase letter (building|Part) +_CAMEL_BOUNDARY = re.compile(r"([a-z0-9])([A-Z])") + + +def to_snake_case(name: str) -> str: + """Convert PascalCase to snake_case. + + Handles acronym runs correctly: "HTMLParser" becomes "html_parser", + not "h_t_m_l_parser". + + >>> to_snake_case("HTMLParser") + 'html_parser' + >>> to_snake_case("BuildingPart") + 'building_part' + >>> to_snake_case("simple") + 'simple' + """ + name = _ACRONYM_BOUNDARY.sub(r"\1_\2", name) + name = _CAMEL_BOUNDARY.sub(r"\1_\2", name) + return name.lower() + + +def slug_filename(name: str, ext: str = ".md") -> str: + """Convert a PascalCase type name to a snake_case filename. + + >>> slug_filename("HexColor") + 'hex_color.md' + >>> slug_filename("BuildingPart") + 'building_part.md' + >>> slug_filename("BuildingPart", ext=".json") + 'building_part.json' + """ + return f"{to_snake_case(name)}{ext}" diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/docstring.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/docstring.py new file mode 100644 index 000000000..7dc2e112f --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/docstring.py @@ -0,0 +1,46 @@ +"""Docstring extraction and cleaning utilities.""" + +import inspect +from enum import Enum +from typing import NewType + +__all__ = ["clean_docstring", "first_docstring_line", "is_custom_docstring"] + + +# Probe auto-generated docstrings so we can distinguish them from explicit ones. +# Both Enum and NewType generate default docstrings that vary by Python version; +# capturing at import time adapts automatically if the format changes. +class _DocstringProbeEnum(Enum): + pass + + +_ENUM_DEFAULT_DOCSTRING = _DocstringProbeEnum.__doc__ +del _DocstringProbeEnum +_NewtypeProbe = NewType("_NewtypeProbe", int) +_NEWTYPE_DEFAULT_DOCSTRING = _NewtypeProbe.__doc__ +del _NewtypeProbe + + +def clean_docstring(doc: str | None) -> str | None: + """Return cleaned docstring, or None if absent or whitespace-only.""" + if not doc: + return None + cleaned = inspect.cleandoc(doc) + return cleaned or None + + +def first_docstring_line(doc: str | None) -> str | None: + """Return the first line of a docstring, or None if absent.""" + cleaned = clean_docstring(doc) + if not cleaned: + return None + return cleaned.split("\n")[0] + + +def is_custom_docstring(doc: str | None, inherited_doc: str | None = None) -> bool: + """Check if a docstring was explicitly written, not auto-generated or inherited.""" + return bool(doc) and doc not in ( + _ENUM_DEFAULT_DOCSTRING, + _NEWTYPE_DEFAULT_DOCSTRING, + inherited_doc, + ) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/enum_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/enum_extraction.py new file mode 100644 index 000000000..545979d66 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/enum_extraction.py @@ -0,0 +1,40 @@ +"""Enum extraction.""" + +from enum import Enum + +from .docstring import clean_docstring, is_custom_docstring +from .specs import EnumMemberSpec, EnumSpec + +__all__ = ["extract_enum"] + + +def extract_enum(enum_class: type[Enum]) -> EnumSpec: + """Extract enum specification from an Enum class. + + Handles both simple str Enums and DocumentedEnums where members + have per-value descriptions via the __doc__ attribute. + """ + class_doc = enum_class.__doc__ + description = clean_docstring(class_doc) if is_custom_docstring(class_doc) else None + + members: list[EnumMemberSpec] = [] + for member in enum_class: + member_doc = getattr(member, "__doc__", None) + member_description = ( + member_doc if is_custom_docstring(member_doc, class_doc) else None + ) + + members.append( + EnumMemberSpec( + name=member.name, + value=str(member.value), + description=member_description, + ) + ) + + return EnumSpec( + name=enum_class.__name__, + description=description, + members=members, + source_type=enum_class, + ) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py new file mode 100644 index 000000000..e5f949831 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py @@ -0,0 +1,367 @@ +"""Load, validate, and flatten example data for schema documentation.""" + +import logging +import sys +from dataclasses import dataclass +from pathlib import Path +from typing import Any + +from pydantic import BaseModel, TypeAdapter, ValidationError +from pydantic.fields import FieldInfo + +from .model_extraction import resolve_field_alias +from .type_analyzer import single_literal_value + +log = logging.getLogger(__name__) + +__all__ = [ + "ExampleRecord", + "augment_missing_fields", + "flatten_model_instance", + "load_examples", + "validate_example", +] + +# tomllib is stdlib from 3.11+; tomli is the backport for 3.10. +try: + import tomllib # type: ignore[import-not-found] +except ModuleNotFoundError: + import tomli as tomllib # type: ignore[import-not-found] + + +@dataclass +class ExampleRecord: + """A flattened example with field-value pairs in documentation order.""" + + rows: list[tuple[str, Any]] + + +def _inject_literal_fields( + model_fields_dict: dict[str, FieldInfo], data: dict[str, Any] +) -> dict[str, Any]: + """Inject single-value Literal field defaults missing from *data*. + + Inspects *model_fields_dict* for fields with single-value `Literal` + annotations. For each field missing from *data*, injects the literal + value using the field's `validation_alias` (if set), falling back + to `alias`, then to the field name. + + Returns a new dict; the original is not mutated. + """ + result = data.copy() + + for field_name, field_info in model_fields_dict.items(): + key = resolve_field_alias(field_name, field_info) + if key in result: + continue + + literal_value = single_literal_value(field_info.annotation) + if literal_value is not None: + result[key] = literal_value + + return result + + +def _known_field_keys(model_fields_dict: dict[str, FieldInfo]) -> frozenset[str]: + """Alias-resolved field keys from a model_fields dict.""" + return frozenset( + resolve_field_alias(name, info) for name, info in model_fields_dict.items() + ) + + +def _strip_null_unknown_fields( + data: dict[str, Any], known_keys: frozenset[str] +) -> dict[str, Any]: + """Drop null-valued fields not in *known_keys*. + + For discriminated unions, *known_keys* contains only common base + fields. Variant-specific null fields from other arms (present in + flat parquet schemas) are stripped so the selected arm's validator + doesn't reject them as unknown extras. + + Non-null fields are always kept so the arm's own validator can + accept or reject them normally. + """ + return {k: v for k, v in data.items() if v is not None or k in known_keys} + + +def validate_example( + validation_type: object, + raw: dict[str, Any], + *, + model_fields: dict[str, FieldInfo] | None = None, +) -> BaseModel: + """Validate example data against a model or union type. + + Returns the validated model instance. Preprocesses *raw* data by: + 1. Injecting missing Literal fields for validation (if model_fields provided) + 2. Stripping null-valued fields not in *model_fields* (handles + flat-schema examples from discriminated unions) + """ + if model_fields is None: + if isinstance(validation_type, type) and issubclass(validation_type, BaseModel): + model_fields = validation_type.model_fields + else: + model_fields = {} + + known_keys = _known_field_keys(model_fields) + preprocessed = _inject_literal_fields(model_fields, raw) + preprocessed = _strip_null_unknown_fields(preprocessed, known_keys) + result: object = TypeAdapter(validation_type).validate_python(preprocessed) + if not isinstance(result, BaseModel): + raise TypeError(f"Expected BaseModel instance, got {type(result).__name__}") + return result + + +def extract_base_field(key: str) -> str: + """Extract the top-level field name from a flattened key. + + >>> extract_base_field("sources[0].dataset") + 'sources' + >>> extract_base_field("names.primary") + 'names' + >>> extract_base_field("id") + 'id' + """ + if "[" in key: + return key.split("[")[0] + if "." in key: + return key.split(".")[0] + return key + + +def order_example_rows( + flat_rows: list[tuple[str, Any]], + field_names: list[str], +) -> list[tuple[str, Any]]: + """Order flattened rows by field position in documentation. + + Sorts by position of base field name in *field_names*. + Fields with the same base maintain their original order (stable sort). + Unknown fields sort to end. + """ + position = {name: i for i, name in enumerate(field_names)} + sentinel = len(field_names) + + def sort_key(row: tuple[str, Any]) -> int: + return position.get(extract_base_field(row[0]), sentinel) + + return sorted(flat_rows, key=sort_key) + + +def _structured_fields(value: object) -> list[tuple[str, Any]] | None: + """Extract named fields from `__slots__`-based types like BBox. + + Returns a list of `(name, value)` pairs for types that expose + public properties backed by private slots (`_name` -> `name`). + Returns `None` for types without this pattern. + """ + cls = type(value) + slots = getattr(cls, "__slots__", ()) + if not slots: + return None + fields: list[tuple[str, Any]] = [] + for slot in slots: + attr = slot.lstrip("_") + if attr != slot and isinstance(getattr(cls, attr, None), property): + fields.append((attr, getattr(value, attr))) + return fields if len(fields) >= 2 else None + + +def _needs_recursion(items: list[Any]) -> bool: + """Check whether list items contain models or nested lists.""" + return bool(items) and isinstance(items[0], (BaseModel, list)) + + +def _flatten_list_items(key: str, items: list[Any]) -> list[tuple[str, Any]]: + """Flatten list items, recursing into BaseModel and nested list items. + + Returns the list as a single leaf value when no items need recursion. + Pydantic model fields produce homogeneous lists, so the first item's + type determines the flattening strategy. + """ + if not _needs_recursion(items): + return [(key, items)] + rows: list[tuple[str, Any]] = [] + for i, item in enumerate(items): + if isinstance(item, BaseModel): + rows.extend(flatten_model_instance(item, f"{key}[{i}].")) + elif isinstance(item, list): + rows.extend(_flatten_list_items(f"{key}[{i}]", item)) + else: + rows.append((f"{key}[{i}]", item)) + return rows + + +def flatten_model_instance( + instance: BaseModel, + prefix: str = "", +) -> list[tuple[str, Any]]: + """Flatten a Pydantic model instance to dot-notation key-value pairs. + + Walks model fields recursively. BaseModel values recurse with dot + notation, lists of BaseModel recurse with bracket notation, and + everything else (dicts, primitives, None) is a leaf value. + + Parameters + ---------- + instance + The Pydantic model instance to flatten. + prefix + Dot-notation prefix accumulated from parent fields. + + Returns + ------- + list[tuple[str, Any]] + Flattened key-value pairs in field declaration order. + """ + rows: list[tuple[str, Any]] = [] + for field_name, field_info in type(instance).model_fields.items(): + key = resolve_field_alias(field_name, field_info) + value = getattr(instance, field_name) + full_key = f"{prefix}{key}" if prefix else key + + if isinstance(value, BaseModel): + rows.extend(flatten_model_instance(value, f"{full_key}.")) + elif isinstance(value, list): + rows.extend(_flatten_list_items(full_key, value)) + elif (sub_fields := _structured_fields(value)) is not None: + for name, v in sub_fields: + rows.append((f"{full_key}.{name}", v)) + else: + rows.append((full_key, value)) + return rows + + +def augment_missing_fields( + rows: list[tuple[str, Any]], + field_names: list[str], +) -> list[tuple[str, Any]]: + """Add (name, None) entries for fields absent from *rows*. + + Compares base field names (via `extract_base_field`) against + *field_names*. Fields in *field_names* not represented in *rows* + are appended as `(name, None)`. Handles dot-notation and bracket- + notation keys correctly. + + Parameters + ---------- + rows + Flattened key-value pairs from a concrete model instance. + field_names + Merged field name list from the union spec. + + Returns + ------- + list[tuple[str, Any]] + Original rows with (name, None) entries appended for absent fields. + """ + present = {extract_base_field(key) for key, _ in rows} + augmented = list(rows) + for name in field_names: + if name not in present: + augmented.append((name, None)) + return augmented + + +def load_examples_from_toml( + pyproject_path: Path, + model_name: str, +) -> list[dict[str, Any]]: + """Load `[examples.]` from a pyproject.toml file.""" + with pyproject_path.open("rb") as f: + data = tomllib.load(f) + + examples: dict[str, list[dict[str, Any]]] = data.get("examples", {}) + return examples.get(model_name, []) + + +def resolve_pyproject_path(model_class: type) -> Path | None: + """Find pyproject.toml by walking up from the model's module location.""" + module_name = getattr(model_class, "__module__", None) + if not module_name: + return None + + module = sys.modules.get(module_name) + if not module: + return None + + module_file = getattr(module, "__file__", None) + if not module_file: + return None + + # Walk up from module directory + current = Path(module_file).parent + while current != current.parent: # Stop at filesystem root + pyproject = current / "pyproject.toml" + if pyproject.exists(): + return pyproject + current = current.parent + + return None + + +def load_examples( + validation_type: object, + model_name: str, + field_names: list[str], + *, + pyproject_source: type | None = None, + model_fields: dict[str, FieldInfo] | None = None, +) -> list[ExampleRecord]: + """Load examples for a model, flattened and ordered by *field_names*. + + Validates each example against the validation type. Invalid examples + are skipped with a warning logged. Returns an empty list on any failure + (missing file, missing section, parse error). + + Parameters + ---------- + validation_type : type[BaseModel] | object + Model class or union alias to validate against. + model_name : str + Name of the model to load examples for. + field_names : list[str] + List of field names for ordering output. + pyproject_source : type or None + Type to use for finding pyproject.toml. If None, + uses validation_type if it's a class. + model_fields : dict[str, FieldInfo] or None + Field info dict for Literal injection. If None, infers + from validation_type if it's a BaseModel class. + """ + source_type = pyproject_source if pyproject_source is not None else validation_type + if not isinstance(source_type, type): + return [] + + pyproject_path = resolve_pyproject_path(source_type) + if not pyproject_path: + return [] + + try: + raw_examples = load_examples_from_toml(pyproject_path, model_name) + except (OSError, tomllib.TOMLDecodeError): + log.debug("Failed to load examples for %s", model_name, exc_info=True) + return [] + + if not raw_examples: + return [] + + records = [] + for raw in raw_examples: + try: + instance = validate_example(validation_type, raw, model_fields=model_fields) + except ValidationError as e: + log.warning( + "Skipping invalid example for %s in %s: %s", + model_name, + pyproject_path, + e, + ) + continue + flat_rows = flatten_model_instance(instance) + flat_rows = augment_missing_fields(flat_rows, field_names) + ordered_rows = order_example_rows(flat_rows, field_names) + records.append(ExampleRecord(rows=ordered_rows)) + + return records diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/field_constraints.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/field_constraints.py new file mode 100644 index 000000000..5cdc3dcd2 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/field_constraints.py @@ -0,0 +1,153 @@ +"""Convert field-level constraints to display text. + +Handles constraints from Annotated metadata and NewType wrappers: +Ge, Gt, Interval, Le, Lt, MaxLen, MinLen, GeometryTypeConstraint, +Reference, and custom constraint classes. +""" + +from __future__ import annotations + +from collections.abc import Callable + +from annotated_types import Ge, Gt, Interval, Le, Lt, MaxLen, MinLen + +from overture.schema.system.primitive import GeometryTypeConstraint +from overture.schema.system.ref import Reference + +from .docstring import first_docstring_line +from .specs import TypeIdentity +from .type_analyzer import ConstraintSource + +__all__ = [ + "constraint_display_text", + "constraint_pattern", + "describe_field_constraint", +] + +# Bound attribute names paired with display operators. Each entry maps an +# annotated_types constraint attribute (Ge, Gt, Le, Lt, Interval) to its +# mathematical symbol for prose rendering. +# +# numeric_extraction.py has its own _BOUND_ATTRS for numeric extraction. The +# duplication is deliberate: these modules use the same attribute names for +# unrelated purposes (display formatting vs. numeric bound extraction), and +# coupling them for four string literals adds a dependency without value. +_BOUND_OPS: tuple[tuple[str, str], ...] = ( + ("ge", "≥"), + ("gt", ">"), + ("le", "≤"), + ("lt", "<"), +) + + +def _first_bound(obj: object) -> str | None: + """Return backticked notation for the first set bound, or None.""" + for attr, op in _BOUND_OPS: + val = getattr(obj, attr, None) + if val is not None: + return f"`{op} {val}`" + return None + + +def _describe_interval(iv: Interval) -> str: + """Format an Interval as readable bound notation.""" + lower_val = iv.ge if iv.ge is not None else iv.gt + lower_op = "≤" if iv.ge is not None else "<" + upper_val = iv.le if iv.le is not None else iv.lt + upper_op = "≤" if iv.le is not None else "<" + + if lower_val is not None and upper_val is not None: + return f"`{lower_val} {lower_op} x {upper_op} {upper_val}`" + + return _first_bound(iv) or "" + + +def _is_opaque_constraint(constraint: object) -> bool: + """Check whether the constraint has no custom __repr__ (renders as just its class name).""" + return type(constraint).__repr__ is object.__repr__ + + +def _geometry_type_label(value: str) -> str: + """Convert a GeometryType value to PascalCase display name. + + >>> _geometry_type_label("line_string") + 'LineString' + """ + return "".join(part.title() for part in value.split("_")) + + +def describe_field_constraint( + constraint: object, + link_fn: Callable[[TypeIdentity], str] | None = None, +) -> str: + """Return a display string for a field-level constraint object. + + *link_fn* resolves a TypeIdentity to a markdown link string (e.g. + `` [`Name`](path) ``). When None, names render as inline code. + """ + if isinstance(constraint, GeometryTypeConstraint): + labels = ", ".join( + _geometry_type_label(gt.value) for gt in constraint.allowed_types + ) + return f"Allowed geometry types: {labels}" + if isinstance(constraint, Reference): + rel_value: str = constraint.relationship.value # type: ignore[assignment] + rel_label = rel_value.replace("_", " ") + target = constraint.relatee + target_id = TypeIdentity.of(target) + target_str = link_fn(target_id) if link_fn else f"`{target.__name__}`" + return f"References {target_str} ({rel_label})" + if isinstance(constraint, Interval): + desc = _describe_interval(constraint) + if desc: + return desc + elif isinstance(constraint, (Ge, Gt, Le, Lt)): + result = _first_bound(constraint) + if result is not None: + return result + if isinstance(constraint, MinLen): + return f"Minimum length: {constraint.min_length}" + if isinstance(constraint, MaxLen): + return f"Maximum length: {constraint.max_length}" + + if _is_opaque_constraint(constraint): + return f"`{type(constraint).__name__}`" + return f"`{constraint}`" + + +def _constraint_class_description(constraint: object) -> str | None: + """Extract the first docstring line from a custom constraint class. + + Returns None for builtins and classes without docstrings. + """ + constraint_type = type(constraint) + if constraint_type.__module__ == "builtins": + return None + line = first_docstring_line(constraint_type.__doc__) + return line or None + + +def constraint_pattern(constraint: object) -> str | None: + """Extract the regex pattern string from a constraint, if present. + + Traverses two levels: constraint.pattern is a compiled re.Pattern + object, and re.Pattern.pattern is the raw string. + """ + compiled = getattr(constraint, "pattern", None) + return getattr(compiled, "pattern", None) + + +def constraint_display_text( + cs: ConstraintSource, + link_fn: Callable[[TypeIdentity], str] | None = None, +) -> str: + """Build display text for a constraint, combining description/pattern when available.""" + description = _constraint_class_description(cs.constraint) + if _is_opaque_constraint(cs.constraint) and description: + cls_name = type(cs.constraint).__name__ + pattern = constraint_pattern(cs.constraint) + if pattern: + return f"{description} (`{cls_name}`, pattern: `{pattern}`)" + return f"{description} (`{cls_name}`)" + + return describe_field_constraint(cs.constraint, link_fn=link_fn) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_constraints.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_constraints.py new file mode 100644 index 000000000..8290ee3ea --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_constraints.py @@ -0,0 +1,226 @@ +"""Convert model-level constraints to human-readable prose. + +Handles RequireAnyOf, RadioGroup, ForbidIf, RequireIf, and other +ModelConstraint types. Produces descriptions and per-field notes for +documentation rendering. +""" + +from __future__ import annotations + +from dataclasses import dataclass + +from overture.schema.system.model_constraint import ( + FieldEqCondition, + ForbidIfConstraint, + MinFieldsSetConstraint, + ModelConstraint, + NoExtraFieldsConstraint, + Not, + RadioGroupConstraint, + RequireAnyOfConstraint, + RequireIfConstraint, +) + +__all__ = ["analyze_model_constraints"] + +_ConditionalConstraint = RequireIfConstraint | ForbidIfConstraint + + +@dataclass(frozen=True) +class _ConstraintEntry: + """A constraint description paired with the field names it affects.""" + + description: str + field_names: frozenset[str] + + +def _backtick_join(names: tuple[str, ...]) -> str: + """Format field names as backtick-quoted, comma-separated list.""" + return ", ".join(f"`{n}`" for n in names) + + +def _conditional_verb(constraint: _ConditionalConstraint) -> str: + """Return 'required' or 'forbidden' based on constraint type.""" + return "required" if isinstance(constraint, RequireIfConstraint) else "forbidden" + + +def _plural_verb(names: tuple[str, ...]) -> str: + """Return 'is' or 'are' based on field count.""" + return "are" if len(names) > 1 else "is" + + +def _unwrap_field_eq(condition: object) -> tuple[FieldEqCondition, bool] | None: + """Extract the FieldEqCondition from a condition, with negation flag. + + Returns (field_eq, is_negated) or None for unrecognized conditions. + """ + if isinstance(condition, Not) and isinstance(condition.inner, FieldEqCondition): + return condition.inner, True + if isinstance(condition, FieldEqCondition): + return condition, False + return None + + +def _describe_condition(condition: object) -> str: + """Render a Condition as human-readable text.""" + unwrapped = _unwrap_field_eq(condition) + if unwrapped is not None: + field_eq, negated = unwrapped + op = "≠" if negated else "=" + return f"`{field_eq.field_name}` {op} `{field_eq.value}`" + return str(condition) + + +def _describe_conditional(constraint: _ConditionalConstraint) -> str: + """Describe a require_if or forbid_if constraint.""" + fields = _backtick_join(constraint.field_names) + verb = _conditional_verb(constraint) + cond = _describe_condition(constraint.condition) + return f"{fields} {_plural_verb(constraint.field_names)} {verb} when {cond}" + + +def _consolidation_key( + constraint: _ConditionalConstraint, +) -> tuple[type, tuple[str, ...], str] | None: + """Return a grouping key if the constraint is consolidatable, else None. + + Consolidatable: same type, same field_names, plain FieldEqCondition + (not negated) on the same condition field. + """ + cond = constraint.condition + if not isinstance(cond, FieldEqCondition): + return None + return (type(constraint), constraint.field_names, cond.field_name) + + +def _as_field_eq(constraint: _ConditionalConstraint) -> FieldEqCondition: + """Narrow a conditional constraint's condition to FieldEqCondition. + + Only called on constraints that passed _consolidation_key, which + rejects non-FieldEqCondition conditions. + """ + cond = constraint.condition + if not isinstance(cond, FieldEqCondition): + raise TypeError(f"Expected FieldEqCondition, got {type(cond).__name__}") + return cond + + +def _describe_consolidated( + constraints: list[_ConditionalConstraint], +) -> str: + """Describe a group of consolidated conditional constraints.""" + first = constraints[0] + fields = _backtick_join(first.field_names) + verb = _conditional_verb(first) + cond_field = _as_field_eq(first).field_name + values = ", ".join(f"`{_as_field_eq(c).value}`" for c in constraints) + return ( + f"{fields} {_plural_verb(first.field_names)} {verb} " + f"when `{cond_field}` is one of: {values}" + ) + + +def _condition_field_names(condition: object) -> frozenset[str]: + """Extract field names referenced by a condition.""" + unwrapped = _unwrap_field_eq(condition) + if unwrapped is not None: + return frozenset({unwrapped[0].field_name}) + return frozenset() + + +def _affected_field_names(constraint: ModelConstraint) -> frozenset[str]: + """Return all field names referenced by a constraint. + + Includes both constrained field_names and condition trigger fields. + Returns empty set for constraints that don't reference specific fields + (NoExtraFieldsConstraint, MinFieldsSetConstraint). + """ + if isinstance(constraint, (NoExtraFieldsConstraint, MinFieldsSetConstraint)): + return frozenset() + if isinstance(constraint, (RequireIfConstraint, ForbidIfConstraint)): + return frozenset(constraint.field_names) | _condition_field_names( + constraint.condition + ) + if isinstance(constraint, (RequireAnyOfConstraint, RadioGroupConstraint)): + return frozenset(constraint.field_names) + return frozenset() + + +def _describe_one(constraint: ModelConstraint) -> str | None: + """Describe a single constraint, or None to skip it.""" + if isinstance(constraint, NoExtraFieldsConstraint): + return None + if isinstance(constraint, RequireAnyOfConstraint): + return f"At least one of {_backtick_join(constraint.field_names)} must be set" + if isinstance(constraint, RadioGroupConstraint): + return f"Exactly one of {_backtick_join(constraint.field_names)} must be `true`" + if isinstance(constraint, MinFieldsSetConstraint): + return f"At least {constraint.count} fields must be set" + if isinstance(constraint, (RequireIfConstraint, ForbidIfConstraint)): + return _describe_conditional(constraint) + return f"`{constraint.name}`" + + +def _analyze_constraints( + constraints: tuple[ModelConstraint, ...], +) -> list[_ConstraintEntry]: + """Analyze constraints into descriptions paired with affected fields. + + Handles consolidation and filtering, preserving original declaration order. + """ + groups: dict[ + tuple[type, tuple[str, ...], str], list[tuple[int, _ConditionalConstraint]] + ] = {} + standalone: list[tuple[int, ModelConstraint]] = [] + + for i, c in enumerate(constraints): + if isinstance(c, (RequireIfConstraint, ForbidIfConstraint)): + key = _consolidation_key(c) + if key is not None: + groups.setdefault(key, []).append((i, c)) + continue + standalone.append((i, c)) + + entries: list[tuple[int, _ConstraintEntry]] = [] + + for group_items in groups.values(): + first_idx = group_items[0][0] + group_constraints = [c for _, c in group_items] + all_fields: frozenset[str] = frozenset().union( + *(_affected_field_names(c) for c in group_constraints) + ) + if len(group_constraints) == 1: + desc = _describe_one(group_constraints[0]) + else: + desc = _describe_consolidated(group_constraints) + if desc is not None: + entries.append((first_idx, _ConstraintEntry(desc, all_fields))) + + for idx, c in standalone: + desc = _describe_one(c) + if desc is not None: + entries.append((idx, _ConstraintEntry(desc, _affected_field_names(c)))) + + entries.sort(key=lambda e: e[0]) + return [entry for _, entry in entries] + + +def analyze_model_constraints( + constraints: tuple[ModelConstraint, ...], +) -> tuple[list[str], dict[str, list[str]]]: + """Analyze constraints into descriptions and per-field notes in one pass. + + Returns (descriptions, field_notes) where descriptions is the list of + human-readable constraint strings and field_notes maps field names to + constraint descriptions that reference them. + """ + entries = _analyze_constraints(constraints) + + descriptions = [entry.description for entry in entries] + + field_notes: dict[str, list[str]] = {} + for entry in entries: + for name in entry.field_names: + field_notes.setdefault(name, []).append(entry.description) + + return descriptions, field_notes diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_extraction.py new file mode 100644 index 000000000..76807e123 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_extraction.py @@ -0,0 +1,204 @@ +"""Model extraction and tree expansion.""" + +from __future__ import annotations + +import dataclasses + +from pydantic import BaseModel +from pydantic.fields import FieldInfo +from pydantic_core import PydanticUndefined + +from overture.schema.system.model_constraint import ModelConstraint + +from .docstring import clean_docstring +from .specs import FeatureSpec, FieldSpec, ModelSpec, is_model_class +from .type_analyzer import ConstraintSource, TypeInfo, TypeKind, analyze_type + +__all__ = [ + "expand_model_tree", + "extract_model", + "resolve_field_alias", +] + + +def resolve_field_alias(field_name: str, field_info: FieldInfo) -> str: + """Return the data-dict key for a Pydantic field. + + Prefers `validation_alias`, falls back to `alias`, then the + Python field name. Only string aliases are supported; AliasPath + and AliasChoices are ignored. + """ + validation_alias = field_info.validation_alias + if isinstance(validation_alias, str): + return validation_alias + alias = field_info.alias + if isinstance(alias, str): + return alias + return field_name + + +def _merge_field_metadata(type_info: TypeInfo, field_info: FieldInfo) -> TypeInfo: + """Merge constraints from field_info.metadata into TypeInfo. + + Pydantic strips the Annotated wrapper from some fields (non-optional, + non-union) and moves the metadata to field_info.metadata. When this + happens, analyze_type sees a bare type and misses the constraints. + The two sets never overlap: field_info.metadata is empty when the + Annotated wrapper survives in the annotation. + """ + if not field_info.metadata: + return type_info + extra = tuple(ConstraintSource(None, None, m) for m in field_info.metadata) + return dataclasses.replace(type_info, constraints=type_info.constraints + extra) + + +def _is_field_required(field_info: FieldInfo, type_info: TypeInfo) -> bool: + """Determine whether a field is required (no default and not Optional).""" + has_default = ( + field_info.default is not PydanticUndefined + or field_info.default_factory is not None + ) + return not has_default and not type_info.is_optional + + +def _basemodel_bases(cls: type) -> list[type[BaseModel]]: + """Return direct BaseModel bases, excluding BaseModel itself.""" + return [b for b in cls.__bases__ if is_model_class(b) and b is not BaseModel] + + +def _class_order(model_class: type[BaseModel]) -> list[type]: + """Return MRO classes in documentation order, recursively. + + For single-inheritance: reversed MRO (base first, derived last). + For multiple-inheritance: primary chain → self → mixins, where + primary chain and each mixin are themselves recursively ordered. + """ + bases = _basemodel_bases(model_class) + + if len(bases) <= 1: + return [ + cls + for cls in reversed(model_class.__mro__) + if issubclass(cls, BaseModel) and cls is not BaseModel + ] + + primary = _class_order(bases[0]) + mixins = [cls for base in bases[1:] for cls in _class_order(base)] + return primary + [model_class] + mixins + + +def _field_order(model_class: type[BaseModel]) -> list[str]: + """Return model_fields keys in documentation order. + + Walks the class hierarchy recursively. At each level of multiple + inheritance, the first base is the "primary chain" and the rest + are "mixins." Primary chain and own fields come first, then mixin + fields in declaration order. Single-inheritance levels use + Pydantic's default reversed-MRO order. + """ + valid_names = set(model_class.model_fields.keys()) + result: list[str] = [] + seen: set[str] = set() + for cls in _class_order(model_class): + for name in getattr(cls, "__annotations__", {}): + if name not in seen and name in valid_names: + result.append(name) + seen.add(name) + return result + + +def extract_model( + model_class: type[BaseModel], + *, + entry_point: str | None = None, +) -> ModelSpec: + """Extract model specification from a Pydantic model class.""" + field_info_map = model_class.model_fields + ordered_keys = _field_order(model_class) + + fields: list[FieldSpec] = [] + for field_name in ordered_keys: + field_info = field_info_map[field_name] + output_name = resolve_field_alias(field_name, field_info) + + # Use field_info.annotation (resolved TypeVars) not get_type_hints + annotation = field_info.annotation + if annotation is None: + continue + + type_info = _merge_field_metadata(analyze_type(annotation), field_info) + + fields.append( + FieldSpec( + name=output_name, + type_info=type_info, + description=field_info.description or type_info.description, + is_required=_is_field_required(field_info, type_info), + ) + ) + + return ModelSpec( + name=model_class.__name__, + description=clean_docstring(model_class.__doc__), + fields=fields, + source_type=model_class, + entry_point=entry_point, + constraints=ModelConstraint.get_model_constraints(model_class), + ) + + +def expand_model_tree( + spec: FeatureSpec, + cache: dict[type, ModelSpec] | None = None, +) -> FeatureSpec: + """Populate model references on MODEL-kind fields, recursively. + + Walks *spec*'s fields and sets `field.model` for fields whose type + is a Pydantic model. Uses *cache* to reuse already-extracted ModelSpecs + and detect shared references. Marks fields whose model creates a cycle + in the ancestor chain with `starts_cycle=True`. + + Mutates *spec* in place and returns it. + """ + if cache is None: + cache = {} + if isinstance(spec, ModelSpec) and spec.source_type is not None: + cache[spec.source_type] = spec + ancestors = frozenset({spec.source_type}) if spec.source_type else frozenset() + _expand_fields(spec.fields, cache, ancestors) + return spec + + +def _expand_fields( + fields: list[FieldSpec], + cache: dict[type, ModelSpec], + ancestors: frozenset[type], +) -> None: + """Recursive helper for expand_model_tree. + + Cache insertion happens before recursion — cycle detection depends + on the ancestor's ModelSpec being in the cache when the back-edge + is encountered. + """ + for field_spec in fields: + ti = field_spec.type_info + source = ti.source_type + if ti.kind == TypeKind.UNION: + # Union fields have no single model to recurse into. + # The field row appears in the output; skip inline expansion. + continue + if ti.kind != TypeKind.MODEL or source is None: + continue + + if source in ancestors: + # Cycle: reuse existing spec, mark the edge + field_spec.model = cache.get(source) + field_spec.starts_cycle = True + elif source in cache: + # Shared reference: reuse, not a cycle + field_spec.model = cache[source] + else: + sub_spec = extract_model(source) + cache[source] = sub_spec # insert BEFORE recursing + field_spec.model = sub_spec + _expand_fields(sub_spec.fields, cache, ancestors | {source}) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/newtype_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/newtype_extraction.py new file mode 100644 index 000000000..ff11c770a --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/newtype_extraction.py @@ -0,0 +1,26 @@ +"""NewType extraction.""" + +from .docstring import clean_docstring, is_custom_docstring +from .specs import NewTypeSpec +from .type_analyzer import analyze_type + +__all__ = ["extract_newtype"] + + +def extract_newtype(newtype_callable: object) -> NewTypeSpec: + """Extract NewType specification from a NewType callable.""" + type_info = analyze_type(newtype_callable) + doc = getattr(newtype_callable, "__doc__", None) + name = type_info.newtype_name or getattr(newtype_callable, "__name__", None) + if name is None: + msg = f"Cannot determine name for NewType: {newtype_callable!r}" + raise ValueError(msg) + description = ( + clean_docstring(doc) if is_custom_docstring(doc) else type_info.description + ) + return NewTypeSpec( + name=name, + description=description, + type_info=type_info, + source_type=newtype_callable, + ) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/numeric_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/numeric_extraction.py new file mode 100644 index 000000000..ae899a4e6 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/numeric_extraction.py @@ -0,0 +1,72 @@ +"""Numeric type extraction.""" + +from annotated_types import Interval + +from .docstring import first_docstring_line +from .newtype_extraction import extract_newtype +from .specs import NumericSpec, TypeIdentity +from .type_analyzer import TypeInfo + +__all__ = [ + "extract_numeric_bounds", + "extract_numerics", +] + + +# Bound attribute names on annotated_types constraint objects (Ge, Gt, Le, +# Lt, Interval) used for numeric bound extraction. +# +# field_constraints.py has its own _BOUND_OPS for display formatting. +# The duplication is deliberate: these modules use the same attribute names +# for unrelated purposes (numeric extraction vs. prose rendering), and +# coupling them for four string literals adds a dependency without value. +_BOUND_ATTRS = ("ge", "gt", "le", "lt") + + +def extract_numeric_bounds(type_info: TypeInfo) -> Interval: + """Extract numeric bounds from a TypeInfo's constraints. + + Checks for ge, gt, le, and lt attributes on constraint objects. + Stops at the first constraint defining each bound. + """ + found: dict[str, int | float] = {} + for cs in type_info.constraints: + c = cs.constraint + for attr in _BOUND_ATTRS: + if attr not in found: + val = getattr(c, attr, None) + if val is not None: + found[attr] = val + return Interval(**found) + + +def extract_numerics( + numeric_ids: list[TypeIdentity], +) -> list[NumericSpec]: + """Extract specifications for numeric types.""" + specs: list[NumericSpec] = [] + for tid in numeric_ids: + newtype_spec = extract_newtype(tid.obj) + bounds = extract_numeric_bounds(newtype_spec.type_info) + description = first_docstring_line(getattr(tid.obj, "__doc__", None)) + float_bits = _extract_float_bits(tid.name) + specs.append( + NumericSpec( + name=tid.name, + description=description, + bounds=bounds, + float_bits=float_bits, + ) + ) + return specs + + +_FLOAT_BITS: dict[str, int] = { + "float32": 32, + "float64": 64, +} + + +def _extract_float_bits(name: str) -> int | None: + """Extract bit width from a float type name like 'float32'.""" + return _FLOAT_BITS.get(name) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/pydantic_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/pydantic_extraction.py new file mode 100644 index 000000000..120f4760d --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/pydantic_extraction.py @@ -0,0 +1,33 @@ +"""Pydantic built-in type extraction.""" + +import re + +from .docstring import first_docstring_line +from .specs import PydanticTypeSpec + +__all__ = ["extract_pydantic_type"] + +# Matches bare admonition labels like "Info:" or "Note:" with no following text. +_ADMONITION_LABEL = re.compile(r"^\w+:\s*$") + + +def _usable_description(doc: str | None) -> str | None: + """Return the first docstring line, or None if it's an admonition label.""" + line = first_docstring_line(doc) + if line is None or _ADMONITION_LABEL.match(line): + return None + return line + + +def extract_pydantic_type(cls: type) -> PydanticTypeSpec: + """Extract a PydanticTypeSpec from a Pydantic built-in type class.""" + module = getattr(cls, "__module__", "") + if not module.startswith("pydantic"): + msg = f"Expected a pydantic type, got {cls!r} from {module!r}" + raise ValueError(msg) + return PydanticTypeSpec( + name=cls.__name__, + description=_usable_description(cls.__doc__), + source_type=cls, + source_module=cls.__module__.removeprefix("pydantic."), + ) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py new file mode 100644 index 000000000..acba1577d --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py @@ -0,0 +1,263 @@ +"""Data types for extracted specifications.""" + +from __future__ import annotations + +import functools +from dataclasses import dataclass, field +from typing import Any, Protocol, TypeGuard, runtime_checkable + +from annotated_types import Interval +from pydantic import BaseModel + +from overture.schema.system.model_constraint import ModelConstraint + +from .type_analyzer import TypeInfo, TypeKind, UnsupportedUnionError, analyze_type + +__all__ = [ + "AnnotatedField", + "EnumMemberSpec", + "EnumSpec", + "FeatureSpec", + "FieldSpec", + "ModelSpec", + "NewTypeSpec", + "NumericSpec", + "PydanticTypeSpec", + "SupplementarySpec", + "TypeIdentity", + "filter_model_classes", + "is_model_class", + "is_pydantic_sourced", + "is_pydantic_type", + "is_union_alias", +] + + +@dataclass(frozen=True, eq=False) +class TypeIdentity: + """Unique identity for a type in the codegen system. + + Pairs a unique Python object (class, NewType callable, or union + annotation) with its display name. Equality and hashing delegate + to `obj` identity so registry lookups work regardless of how + the display name was derived. + """ + + obj: object + name: str + + @classmethod + def of(cls, obj: object) -> TypeIdentity: + """Derive a TypeIdentity from a named object (class, NewType, etc.).""" + name = getattr(obj, "__name__", None) + if name is None: + raise TypeError(f"Cannot derive TypeIdentity from {obj!r}: no __name__") + return cls(obj, name) + + def __eq__(self, other: object) -> bool: + return isinstance(other, TypeIdentity) and self.obj is other.obj + + def __hash__(self) -> int: + return id(self.obj) + + @property + def module(self) -> str: + """Source module of the underlying object, or empty string.""" + return getattr(self.obj, "__module__", "") + + +class _SourceTypeIdentityMixin: + """Mixin providing `identity` from `source_type` and `name`. + + Shared by EnumSpec, ModelSpec, NewTypeSpec, and PydanticTypeSpec -- + each has a `source_type` (the Python class/callable) and a `name`. + UnionSpec uses `source_annotation` instead, so it defines its + own `identity`. + """ + + source_type: object | None + name: str + + @property + def identity(self) -> TypeIdentity: + if self.source_type is None: + raise ValueError(f"Cannot derive identity for {self.name}: no source_type") + return TypeIdentity(self.source_type, self.name) + + +@dataclass +class EnumMemberSpec: + """Specification for an enum member.""" + + name: str + value: str + description: str | None + + +@dataclass +class EnumSpec(_SourceTypeIdentityMixin): + """Specification for an Enum class.""" + + name: str + description: str | None + members: list[EnumMemberSpec] = field(default_factory=list) + source_type: type | None = None + + +@dataclass +class FieldSpec: + """Specification for a model field.""" + + name: str + type_info: TypeInfo + description: str | None + is_required: bool + model: ModelSpec | None = None + starts_cycle: bool = False + + +@runtime_checkable +class FeatureSpec(Protocol): + """Shared interface for feature-level specs (ModelSpec, UnionSpec).""" + + name: str + description: str | None + source_type: type[BaseModel] | None + entry_point: str | None + constraints: tuple[ModelConstraint, ...] + + @property + def fields(self) -> list[FieldSpec]: ... + + @property + def identity(self) -> TypeIdentity: ... + + +@dataclass +class ModelSpec(_SourceTypeIdentityMixin): + """Specification for a Pydantic model.""" + + name: str + description: str | None + fields: list[FieldSpec] = field(default_factory=list) + source_type: type[BaseModel] | None = None + entry_point: str | None = None + constraints: tuple[ModelConstraint, ...] = () + + +@dataclass +class AnnotatedField: + """A FieldSpec paired with union variant provenance.""" + + field_spec: FieldSpec + variant_sources: tuple[str, ...] | None + + +# eq=False: contains mutable lists and a cached_property, so +# dataclass-generated __eq__ would be unreliable. +@dataclass(eq=False) +class UnionSpec: + """Specification for a discriminated union type alias.""" + + name: str + description: str | None + annotated_fields: list[AnnotatedField] + members: list[type[BaseModel]] + discriminator_field: str | None + discriminator_mapping: dict[str, type[BaseModel]] | None + source_annotation: object + common_base: type[BaseModel] + source_type: type[BaseModel] | None = field(default=None, init=False) + entry_point: str | None = None + constraints: tuple[ModelConstraint, ...] = () + + @functools.cached_property + def fields(self) -> list[FieldSpec]: + """Plain field list for tree expansion and supplementary collection.""" + return [af.field_spec for af in self.annotated_fields] + + @property + def identity(self) -> TypeIdentity: + return TypeIdentity(self.source_annotation, self.name) + + +@dataclass +class NewTypeSpec(_SourceTypeIdentityMixin): + """Specification for a NewType.""" + + name: str + description: str | None + type_info: TypeInfo + source_type: object | None = None + + +@dataclass +class NumericSpec: + """Extracted specification for a numeric type.""" + + name: str + description: str | None + bounds: Interval = field(default_factory=Interval) + float_bits: int | None = None + + +@dataclass +class PydanticTypeSpec(_SourceTypeIdentityMixin): + """Specification for a Pydantic built-in type (HttpUrl, EmailStr, etc.).""" + + name: str + description: str | None + source_type: type + source_module: str + + @property + def docs_url(self) -> str: + """Pydantic documentation URL for this type.""" + return ( + f"https://docs.pydantic.dev/latest/api/{self.source_module}" + f"/#pydantic.{self.source_module}.{self.name}" + ) + + +SupplementarySpec = EnumSpec | NewTypeSpec | ModelSpec | PydanticTypeSpec +"""Non-feature types referenced by feature models. + +Excludes NumericSpec and geometry types, which are extracted +separately via dedicated functions. +""" + + +def is_pydantic_sourced(source_type: type | None) -> bool: + """Check whether *source_type* originates from the `pydantic` package.""" + return getattr(source_type, "__module__", "").startswith("pydantic") + + +def is_pydantic_type(ti: TypeInfo) -> bool: + """Check whether a TypeInfo represents a Pydantic built-in type.""" + return ( + ti.kind == TypeKind.PRIMITIVE + and ti.source_type is not None + and is_pydantic_sourced(ti.source_type) + ) + + +def is_model_class(obj: object) -> TypeGuard[type[BaseModel]]: + """Check whether *obj* is a concrete BaseModel subclass (not a type alias).""" + return isinstance(obj, type) and issubclass(obj, BaseModel) + + +def is_union_alias(obj: object) -> bool: + """Check whether *obj* is a discriminated union type alias of BaseModel subclasses.""" + try: + ti = analyze_type(obj) + except (TypeError, UnsupportedUnionError): + return False + return ti.kind == TypeKind.UNION + + +def filter_model_classes(models: dict[Any, Any]) -> list[type[BaseModel]]: + """Filter discovered models to concrete BaseModel subclasses. + + Excludes type aliases (like discriminated unions) and non-class entries. + """ + return [v for v in models.values() if is_model_class(v)] diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/type_analyzer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/type_analyzer.py new file mode 100644 index 000000000..a0cd5314f --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/type_analyzer.py @@ -0,0 +1,344 @@ +"""Iterative type unwrapping for Pydantic model annotations.""" + +from __future__ import annotations + +import types +from collections.abc import Callable +from dataclasses import dataclass, field +from enum import Enum, auto +from typing import Annotated, Any, Literal, Union, get_args, get_origin + +from pydantic import BaseModel +from pydantic.fields import FieldInfo +from typing_extensions import Sentinel + +from .docstring import clean_docstring + +__all__ = [ + "ConstraintSource", + "TypeKind", + "TypeInfo", + "UnsupportedUnionError", + "analyze_type", + "is_newtype", + "single_literal_value", + "walk_type_info", +] + + +class UnsupportedUnionError(TypeError): + """Raised when analyze_type encounters a multi-type union it cannot represent.""" + + +class TypeKind(Enum): + """Classification of type kinds.""" + + PRIMITIVE = auto() + LITERAL = auto() + ENUM = auto() + MODEL = auto() + UNION = auto() + + +@dataclass(slots=True) +class ConstraintSource: + """A constraint paired with the NewType that contributed it.""" + + source_ref: object | None + source_name: str | None + constraint: object + + +@dataclass(slots=True) +class TypeInfo: + """Information about a type annotation.""" + + base_type: str + kind: TypeKind + is_optional: bool = False + list_depth: int = 0 + newtype_outer_list_depth: int = 0 + is_dict: bool = False + dict_key_type: TypeInfo | None = None + dict_value_type: TypeInfo | None = None + constraints: tuple[ConstraintSource, ...] = () + literal_values: tuple[object, ...] | None = None + source_type: type | None = None + newtype_name: str | None = None + newtype_ref: object | None = None + union_members: tuple[type[BaseModel], ...] | None = None + description: str | None = None + + @property + def is_list(self) -> bool: + """Whether this type has any list wrapping.""" + return self.list_depth > 0 + + +def walk_type_info(ti: TypeInfo, visitor: Callable[[TypeInfo], None]) -> None: + """Call *visitor* on *ti*, then recurse into dict key/value types. + + Captures the shared recursive descent pattern used by type collection + and reverse reference computation. Union members are `type` objects + (not `TypeInfo`), so callers handle them directly. + """ + visitor(ti) + if ti.dict_key_type is not None: + walk_type_info(ti.dict_key_type, visitor) + if ti.dict_value_type is not None: + walk_type_info(ti.dict_value_type, visitor) + + +def is_newtype(annotation: object) -> bool: + """Check if annotation is a typing.NewType. + + NewType creates a callable with a __supertype__ attribute pointing + to the wrapped type. No public API exists for this check. + """ + return callable(annotation) and hasattr(annotation, "__supertype__") + + +def _is_union(origin: object) -> bool: + """Check if an origin represents a union type (X | Y or Union[X, Y]).""" + return origin in (types.UnionType, Union) + + +@dataclass(slots=True) +class _UnwrapState: + """Accumulated state from iterative type unwrapping. + + Tracks NewType names and refs during unwrapping: + - `outermost_newtype_name` / `outermost_newtype_ref`: the first + NewType encountered, exposed as `TypeInfo.newtype_name` / `newtype_ref`. + - `last_newtype_name`: the most recently entered NewType name, used + as the resolved `base_type` for the terminal type. + - `last_newtype_ref`: the most recently entered NewType callable, + used as constraint provenance (which NewType contributed each constraint). + - `newtype_outer_list_depth`: list layers accumulated before entering + the outermost NewType boundary. + """ + + is_optional: bool = False + list_depth: int = 0 + newtype_outer_list_depth: int = 0 + is_dict: bool = False + dict_key_type: TypeInfo | None = None + dict_value_type: TypeInfo | None = None + constraints: list[ConstraintSource] = field(default_factory=list) + outermost_newtype_name: str | None = None + outermost_newtype_ref: object | None = None + last_newtype_name: str | None = None + last_newtype_ref: object | None = None + description: str | None = None + + def add_constraint(self, constraint: object) -> None: + self.constraints.append( + ConstraintSource(self.last_newtype_ref, self.last_newtype_name, constraint) + ) + + def build_type_info( + self, + *, + base_type: str, + kind: TypeKind, + literal_values: tuple[object, ...] | None = None, + source_type: type | None = None, + union_members: tuple[type[BaseModel], ...] | None = None, + ) -> TypeInfo: + return TypeInfo( + base_type=base_type, + kind=kind, + is_optional=self.is_optional, + list_depth=self.list_depth, + newtype_outer_list_depth=self.newtype_outer_list_depth, + is_dict=self.is_dict, + dict_key_type=self.dict_key_type, + dict_value_type=self.dict_value_type, + constraints=tuple(self.constraints), + literal_values=literal_values, + source_type=source_type, + newtype_name=self.outermost_newtype_name, + newtype_ref=self.outermost_newtype_ref, + union_members=union_members, + description=self.description, + ) + + +def analyze_type(annotation: object) -> TypeInfo: + """Analyze a type annotation and return TypeInfo. + + Iteratively unwraps type wrappers (Annotated, Optional, list, NewType) until + reaching a terminal type. + """ + state = _UnwrapState() + + while True: + origin = get_origin(annotation) + + # Handle NewType (e.g., int32 = NewType("int32", Annotated[int, ...])) + if is_newtype(annotation): + name = annotation.__name__ # type: ignore[attr-defined] + state.last_newtype_name = name + state.last_newtype_ref = annotation + if state.outermost_newtype_name is None: + state.newtype_outer_list_depth = state.list_depth + state.outermost_newtype_name = name + state.outermost_newtype_ref = annotation + annotation = annotation.__supertype__ # type: ignore[attr-defined] + continue + + # Handle Annotated types (Annotated[X, metadata...]) + if origin is Annotated: + args = get_args(annotation) + annotation = args[0] + for c in args[1:]: + if isinstance(c, FieldInfo): + if c.description is not None and state.description is None: + state.description = clean_docstring(c.description) + for m in c.metadata: + state.add_constraint(m) + else: + state.add_constraint(c) + continue + + # Handle union types (X | None or Optional[X]) + if _is_union(origin): + args = get_args(annotation) + # Filter out None, Sentinel types (Pydantic's ), and + # Literal alternatives (e.g., HttpUrl | Literal[""] where the + # Literal is a special-value sentinel, not the primary type). + if any(a is types.NoneType for a in args): + state.is_optional = True + + non_none_args = [ + a + for a in args + if a is not types.NoneType and not isinstance(a, Sentinel) + ] + + # Only filter out Literal arms when a concrete (non-Literal) type + # exists. Without this guard, Optional[Literal["x"]] would lose + # all args because the Literal *is* the primary type. + concrete_args = [a for a in non_none_args if get_origin(a) is not Literal] + real_args = concrete_args if concrete_args else non_none_args + + if len(real_args) > 1: + # Check if all real args are BaseModel subclasses + # (unwrap Annotated wrappers to get the actual class) + members: list[type[BaseModel]] = [] + for arg in real_args: + inner = arg + if get_origin(inner) is Annotated: + inner = get_args(inner)[0] + if isinstance(inner, type) and issubclass(inner, BaseModel): + members.append(inner) + else: + raise UnsupportedUnionError( + f"Multi-type unions not supported: {annotation}" + ) + return state.build_type_info( + base_type=members[0].__name__, + kind=TypeKind.UNION, + union_members=tuple(members), + ) + + if not real_args: + raise UnsupportedUnionError( + f"Union with no concrete types: {annotation}" + ) + + annotation = real_args[0] + continue + + # Handle list types (list[X]) + if origin is list: + args = get_args(annotation) + if not args: + raise TypeError("Bare list without type argument is not supported") + state.list_depth += 1 + annotation = args[0] + continue + + # Handle dict types (dict[K, V]) + if origin is dict: + args = get_args(annotation) + if not args: + raise TypeError("Bare dict without type arguments is not supported") + state.is_dict = True + state.dict_key_type = analyze_type(args[0]) + state.dict_value_type = analyze_type(args[1]) + base_type = state.last_newtype_name or "dict" + return state.build_type_info( + base_type=base_type, + kind=TypeKind.PRIMITIVE, + source_type=dict, + ) + + break + + return _classify_terminal(annotation, state) + + +def _classify_terminal(annotation: object, state: _UnwrapState) -> TypeInfo: + """Classify a fully-unwrapped terminal type into a TypeInfo.""" + # typing.Any -- treat as an opaque primitive + if annotation is Any: + return state.build_type_info( + base_type="Any", + kind=TypeKind.PRIMITIVE, + ) + + # Literal types (e.g., Literal["value"] or Literal["a", "b"]) + if get_origin(annotation) is Literal: + args = get_args(annotation) + return state.build_type_info( + base_type="Literal", + kind=TypeKind.LITERAL, + literal_values=tuple(args), + ) + + if not isinstance(annotation, type): + raise TypeError(f"Unsupported annotation type: {type(annotation)}") + + if issubclass(annotation, list): + raise TypeError("Bare list without type argument is not supported") + + if issubclass(annotation, dict): + raise TypeError("Bare dict without type arguments is not supported") + + # Determine kind from type hierarchy + if issubclass(annotation, Enum): + kind = TypeKind.ENUM + elif issubclass(annotation, BaseModel): + kind = TypeKind.MODEL + else: + kind = TypeKind.PRIMITIVE + + base_type = state.last_newtype_name or annotation.__name__ + + return state.build_type_info( + base_type=base_type, + kind=kind, + source_type=annotation, + ) + + +def single_literal_value(annotation: object) -> object | None: + """Extract a single literal value from a type annotation, or None. + + Delegates to analyze_type for all unwrapping, then checks + whether the result is a single-value Literal. Multi-value + Literals return None — callers needing all values should use + `analyze_type` and read `literal_values` directly. + """ + try: + ti = analyze_type(annotation) + except (TypeError, UnsupportedUnionError): + return None + if ( + ti.kind == TypeKind.LITERAL + and ti.literal_values + and len(ti.literal_values) == 1 + ): + return ti.literal_values[0] + return None diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/type_registry.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/type_registry.py new file mode 100644 index 000000000..505657866 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/type_registry.py @@ -0,0 +1,113 @@ +"""Type registry mapping Python types to target representations.""" + +from dataclasses import dataclass + +from .type_analyzer import TypeInfo + +__all__ = [ + "TypeMapping", + "PRIMITIVE_TYPES", + "get_type_mapping", + "is_semantic_newtype", + "resolve_type_name", +] + + +@dataclass(frozen=True) +class TypeMapping: + """Maps a type to its representation in different targets.""" + + markdown: str + + def for_target(self, target: str) -> str: + """Get the type representation for a named target.""" + if target != "markdown": + raise ValueError(f"Unknown target {target!r}, expected 'markdown'") + return self.markdown + + +PRIMITIVE_TYPES: dict[str, TypeMapping] = { + # Signed integers + "int8": TypeMapping(markdown="int8"), + "int16": TypeMapping(markdown="int16"), + "int32": TypeMapping(markdown="int32"), + "int64": TypeMapping(markdown="int64"), + # Unsigned integers + "uint8": TypeMapping(markdown="uint8"), + "uint16": TypeMapping(markdown="uint16"), + "uint32": TypeMapping(markdown="uint32"), + # Floating point + "float32": TypeMapping(markdown="float32"), + "float64": TypeMapping(markdown="float64"), + # Basic types + "str": TypeMapping(markdown="string"), + "bool": TypeMapping(markdown="boolean"), + # Python builtins (aliases to their portable equivalents) + "int": TypeMapping(markdown="int64"), + "float": TypeMapping(markdown="float64"), + # Geometry types + "Geometry": TypeMapping(markdown="geometry"), + "BBox": TypeMapping(markdown="bbox"), +} + + +def is_semantic_newtype(type_info: TypeInfo) -> bool: + """Whether a type represents a semantic NewType that should be displayed by name. + + Returns True for unregistered NewTypes (HexColor, Sources) and NewTypes + that wrap a different base type (FeatureVersion wrapping int32, Id wrapping + NoWhitespaceString). Returns False for registered primitives (int32, Geometry). + """ + if type_info.newtype_name is None: + return False + if type_info.newtype_name != type_info.base_type: + return True + return get_type_mapping(type_info.base_type) is None + + +def get_type_mapping(type_name: str) -> TypeMapping | None: + """Look up a type mapping by name. + + Parameters + ---------- + type_name : str + The type name to look up (e.g., "int32", "str", "Geometry"). + Also accepts Python builtin names ("int" -> int64, "float" -> float64). + + Returns + ------- + TypeMapping or None + The TypeMapping for the type, or None if not found. + """ + return PRIMITIVE_TYPES.get(type_name) + + +def resolve_type_name(type_info: TypeInfo, target: str) -> str: + """Resolve a TypeInfo to the base type string for a given target. + + Looks up the type in the registry first (trying source_type if base_type + has no mapping). Falls back to the base_type name as-is. + + Parameters + ---------- + type_info : TypeInfo + The analyzed type information. + target : str + The output target ("markdown"). + + Returns + ------- + str + The resolved base type name string for the target. + """ + mapping = get_type_mapping(type_info.base_type) + if mapping is None and type_info.source_type is not None: + mapping = get_type_mapping(type_info.source_type.__name__) + if mapping is not None: + return mapping.for_target(target) + + # Semantic NewType wrapping an unregistered type (e.g., Sources wrapping + # SourceItem): use the underlying class name rather than the NewType alias. + if type_info.newtype_name and type_info.source_type is not None: + return type_info.source_type.__name__ + return type_info.base_type diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/union_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/union_extraction.py new file mode 100644 index 000000000..c555fdba0 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/union_extraction.py @@ -0,0 +1,144 @@ +"""Union extraction and discriminator handling.""" + +from __future__ import annotations + +from typing import Annotated, get_args, get_origin + +from pydantic import BaseModel +from pydantic.fields import FieldInfo + +from overture.schema.system.feature import resolve_discriminator_field_name + +from .model_extraction import extract_model, resolve_field_alias +from .specs import AnnotatedField, UnionSpec, is_model_class +from .type_analyzer import TypeInfo, TypeKind, analyze_type, single_literal_value + +__all__ = ["extract_discriminator", "extract_union"] + + +def _find_common_base(members: list[type[BaseModel]]) -> type[BaseModel]: + """Find the most-derived common BaseModel ancestor of all members.""" + if not members: + raise ValueError("Cannot find common base of empty members list") + filtered_mros = [ + [c for c in cls.__mro__ if is_model_class(c) and c is not BaseModel] + for cls in members + ] + common = set(filtered_mros[0]) + for mro in filtered_mros[1:]: + common &= set(mro) + if not common: + raise ValueError( + f"No common BaseModel ancestor for {[m.__name__ for m in members]}" + ) + + def max_mro_index(cls: type) -> int: + return max(mro.index(cls) for mro in filtered_mros) + + return min(common, key=max_mro_index) + + +def _find_field_by_alias(model: type[BaseModel], alias: str) -> FieldInfo | None: + """Find a field in model_fields by alias-resolved name.""" + direct = model.model_fields.get(alias) + if direct is not None: + return direct + for py_name, fi in model.model_fields.items(): + if resolve_field_alias(py_name, fi) == alias: + return fi + return None + + +def extract_discriminator( + annotation: object, + members: list[type[BaseModel]], +) -> tuple[str | None, dict[str, type[BaseModel]] | None]: + """Extract discriminator field name and value-to-type mapping.""" + if get_origin(annotation) is not Annotated: + return None, None + + disc_field_name: str | None = None + for metadata in get_args(annotation)[1:]: + if isinstance(metadata, FieldInfo): + disc_field_name = resolve_discriminator_field_name(metadata.discriminator) + if disc_field_name is not None: + break + + if disc_field_name is None: + return None, None + + mapping: dict[str, type[BaseModel]] = {} + for member in members: + field_info = _find_field_by_alias(member, disc_field_name) + if field_info and field_info.annotation is not None: + lit_val = single_literal_value(field_info.annotation) + if lit_val is not None: + mapping[str(lit_val)] = member + + return disc_field_name, mapping or None + + +_TypeShape = tuple[str, TypeKind, bool, int] +_FieldKey = tuple[str, _TypeShape] + + +def _type_shape(ti: TypeInfo) -> _TypeShape: + """Structural shape for dedup -- excludes source_type which varies across members.""" + return (ti.base_type, ti.kind, ti.is_optional, ti.list_depth) + + +def extract_union( + name: str, + annotation: object, + *, + entry_point: str | None = None, +) -> UnionSpec: + """Extract a UnionSpec from a discriminated union type alias.""" + ti = analyze_type(annotation) + if ti.kind != TypeKind.UNION or ti.union_members is None: + raise TypeError(f"{name} is not a union type alias") + + members = list(ti.union_members) + common_base = _find_common_base(members) + + base_spec = extract_model(common_base) + shared_field_names = {f.name for f in base_spec.fields} + + member_specs = [(m, extract_model(m)) for m in members] + + annotated_fields: list[AnnotatedField] = [] + + # Shared fields first (from common base) + for fs in base_spec.fields: + annotated_fields.append(AnnotatedField(field_spec=fs, variant_sources=None)) + + # Variant-specific fields: collect by (name, type identity) for dedup + seen: dict[_FieldKey, AnnotatedField] = {} + + for member_cls, member_spec in member_specs: + for fs in member_spec.fields: + if fs.name in shared_field_names: + continue + key = (fs.name, _type_shape(fs.type_info)) + existing = seen.get(key) + prior_sources = existing.variant_sources or () if existing else () + seen[key] = AnnotatedField( + field_spec=fs, + variant_sources=(*prior_sources, member_cls.__name__), + ) + + annotated_fields.extend(seen.values()) + + disc_field, disc_mapping = extract_discriminator(annotation, members) + + return UnionSpec( + name=name, + description=ti.description, + annotated_fields=annotated_fields, + members=members, + discriminator_field=disc_field, + discriminator_mapping=disc_mapping, + source_annotation=annotation, + common_base=common_base, + entry_point=entry_point, + ) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/layout/__init__.py b/packages/overture-schema-codegen/src/overture/schema/codegen/layout/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/layout/module_layout.py b/packages/overture-schema-codegen/src/overture/schema/codegen/layout/module_layout.py new file mode 100644 index 000000000..bb6b92379 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/layout/module_layout.py @@ -0,0 +1,150 @@ +"""Output directory layout from Python module paths. + +Translates dotted module paths into output directory paths by mirroring +the source package structure. +""" + +from __future__ import annotations + +import sys +from collections.abc import Iterable, Mapping +from pathlib import PurePosixPath + +__all__ = [ + "OUTPUT_ROOT", + "compute_output_dir", + "compute_schema_root", + "entry_point_class", + "entry_point_module", + "is_package_module", + "module_relpath", + "output_dir_for_entry_point", +] + +OUTPUT_ROOT = PurePosixPath(".") + + +def _split_entry_point(entry_point_path: str) -> tuple[str, str]: + """Split `"module.path:ClassName"` into its two parts. + + >>> _split_entry_point("overture.schema.buildings:Building") + ('overture.schema.buildings', 'Building') + """ + if ":" not in entry_point_path: + msg = f"Expected 'module:Class' format, got {entry_point_path!r}" + raise ValueError(msg) + module, cls = entry_point_path.split(":", 1) + return module, cls + + +def entry_point_module(entry_point_path: str) -> str: + """Extract module path from entry-point-style path. + + >>> entry_point_module("overture.schema.buildings:Building") + 'overture.schema.buildings' + """ + return _split_entry_point(entry_point_path)[0] + + +def entry_point_class(entry_point_path: str) -> str: + """Extract class name from entry-point-style path. + + >>> entry_point_class("overture.schema.buildings:Building") + 'Building' + """ + return _split_entry_point(entry_point_path)[1] + + +def compute_schema_root(module_paths: Iterable[str]) -> str: + """Find the longest common dotted prefix of module paths. + + Deduplicates inputs first. For a single unique path, drops the + last component (the module itself). + """ + paths = sorted(set(module_paths)) + if not paths: + msg = "No module paths provided" + raise ValueError(msg) + + segments = [p.split(".") for p in paths] + if len(segments) == 1: + return ".".join(segments[0][:-1]) + + common: list[str] = [] + for parts in zip(*segments, strict=False): + if len(set(parts)) == 1: + common.append(parts[0]) + else: + break + return ".".join(common) + + +def module_relpath(module: str, root: str) -> str: + """Strip the schema root prefix from a dotted module path.""" + if not root: + return module + if module == root: + return "" + prefix = root + "." + if not module.startswith(prefix): + msg = f"Module {module!r} does not start with root {root!r}" + raise ValueError(msg) + return module[len(prefix) :] + + +def is_package_module( + module: str, + module_registry: Mapping[str, object] | None = None, +) -> bool: + """Check whether a module is a package (directory) or a file module. + + Packages have `__path__`; file modules do not (PEP 302). + """ + registry: Mapping[str, object] = ( + module_registry if module_registry is not None else sys.modules + ) + mod = registry.get(module) + if mod is None: + msg = f"Module {module!r} not found in registry" + raise ValueError(msg) + return hasattr(mod, "__path__") + + +def output_dir_for_entry_point( + entry_point_path: str | None, + schema_root: str, + module_registry: Mapping[str, object] | None = None, +) -> PurePosixPath: + """Compute output directory from an entry-point-style path. + + Raises ValueError if *entry_point_path* is None. + """ + if entry_point_path is None: + msg = "entry_point_path must not be None" + raise ValueError(msg) + module = entry_point_module(entry_point_path) + return compute_output_dir(module, schema_root, module_registry) + + +def compute_output_dir( + module: str, + schema_root: str, + module_registry: Mapping[str, object] | None = None, +) -> PurePosixPath: + """Compute output directory for a module, mirroring package structure. + + File modules drop their last component (the .py filename). + Packages keep all components. Returns `PurePosixPath(".")` for + the root directory. + """ + relpath = module_relpath(module, schema_root) + if not relpath: + return OUTPUT_ROOT + + parts = relpath.split(".") + if not is_package_module(module, module_registry): + parts = parts[:-1] + + if not parts: + return OUTPUT_ROOT + return PurePosixPath(*parts) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/layout/type_collection.py b/packages/overture-schema-codegen/src/overture/schema/codegen/layout/type_collection.py new file mode 100644 index 000000000..b9072da64 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/layout/type_collection.py @@ -0,0 +1,158 @@ +"""Supplementary type discovery by walking expanded feature trees. + +Walks FieldSpec.model references for sub-models (already extracted), +and extracts enums and NewTypes on first encounter. +""" + +from collections.abc import Sequence +from typing import Annotated, get_args, get_origin + +from ..extraction.enum_extraction import extract_enum +from ..extraction.model_extraction import expand_model_tree, extract_model +from ..extraction.newtype_extraction import extract_newtype +from ..extraction.pydantic_extraction import extract_pydantic_type +from ..extraction.specs import ( + FeatureSpec, + FieldSpec, + ModelSpec, + SupplementarySpec, + TypeIdentity, + is_pydantic_type, +) +from ..extraction.type_analyzer import ( + TypeInfo, + TypeKind, + analyze_type, + is_newtype, + walk_type_info, +) +from ..extraction.type_registry import is_semantic_newtype + +__all__ = ["collect_all_supplementary_types"] + + +def collect_all_supplementary_types( + feature_specs: Sequence[FeatureSpec], +) -> dict[TypeIdentity, SupplementarySpec]: + """Collect supplementary types by walking expanded feature trees. + + Requires that expand_model_tree has been called on all feature specs + first. Walks FieldSpec.model references for sub-models (already + extracted), and extracts enums and NewTypes on first encounter. + + Returns a dict mapping TypeIdentity to extracted specs. Two types + with the same class name from different modules are keyed separately. + """ + feature_objs: set[object] = {spec.identity.obj for spec in feature_specs} + all_specs: dict[TypeIdentity, SupplementarySpec] = {} + visited_models: set[object] = set() + + def _register_newtype(newtype_ref: object, name: str) -> bool: + """Register a NewType if not already present. Returns True if registered.""" + nt_id = TypeIdentity(newtype_ref, name) + if nt_id in all_specs: + return False + all_specs[nt_id] = extract_newtype(newtype_ref) + return True + + def _collect_from_model(model_spec: ModelSpec) -> None: + if ( + model_spec.source_type in visited_models + or model_spec.source_type in feature_objs + ): + return + visited_models.add(model_spec.source_type) + all_specs[model_spec.identity] = model_spec + _collect_from_fields(model_spec.fields) + + def _collect_inner_newtypes(newtype_ref: object) -> None: + """Walk a NewType's __supertype__ chain for intermediate semantic NewTypes.""" + annotation = getattr(newtype_ref, "__supertype__", None) + while annotation is not None: + if get_origin(annotation) is Annotated: + annotation = get_args(annotation)[0] + continue + if is_newtype(annotation): + inner_ti = analyze_type(annotation) + if ( + inner_ti.newtype_ref is not None + and inner_ti.newtype_name is not None + and is_semantic_newtype(inner_ti) + ): + _register_newtype(inner_ti.newtype_ref, inner_ti.newtype_name) + annotation = getattr(annotation, "__supertype__", None) + continue + break + + def _collect_from_type_info(ti: TypeInfo) -> None: + """Collect supplementary types from a single TypeInfo. + + Uses walk_type_info for dict key/value recursion. Handles all + TypeKind variants without early returns so newtype extraction + and dict recursion apply regardless of kind. + """ + + def _visit(node: TypeInfo) -> None: + # UNION, ENUM, and pydantic (PRIMITIVE) are mutually exclusive + # by TypeKind. NewType extraction is orthogonal -- a node can be + # a NewType-wrapped ENUM, for instance. + if node.kind == TypeKind.UNION and node.union_members: + # Walk each member's fields for supplementary types. + # Members that are also top-level feature specs are skipped + # by the feature_objs guard in _collect_from_model. + for member_cls in node.union_members: + member_spec = extract_model(member_cls) + expand_model_tree(member_spec) + _collect_from_model(member_spec) + elif node.kind == TypeKind.ENUM and node.source_type is not None: + enum_id = TypeIdentity.of(node.source_type) + if enum_id not in all_specs: + all_specs[enum_id] = extract_enum(node.source_type) + elif is_pydantic_type(node): + if node.source_type is None: + raise TypeError( + "is_pydantic_type returned True but source_type is None" + ) + pid = TypeIdentity.of(node.source_type) + if pid not in all_specs: + all_specs[pid] = extract_pydantic_type(node.source_type) + + # Semantic NewTypes always get extracted, including intermediate + # NewTypes in the wrapping chain (e.g., Id wraps NoWhitespaceString + # wraps str -- both Id and NoWhitespaceString get pages). + if ( + node.newtype_ref is not None + and node.newtype_name is not None + and is_semantic_newtype(node) + ): + newly_registered = _register_newtype( + node.newtype_ref, node.newtype_name + ) + if newly_registered: + _collect_inner_newtypes(node.newtype_ref) + + walk_type_info(ti, _visit) + + def _collect_from_fields(fields: list[FieldSpec]) -> None: + # A single field can match multiple conditions (e.g., Sources is both + # a semantic NewType and wraps a MODEL-kind type), so checks are + # independent `if` statements, not `elif`. + for field_spec in fields: + ti = field_spec.type_info + _collect_from_type_info(ti) + + # MODEL-kind fields (whether direct or via NewType wrapper) get expanded + if ti.kind == TypeKind.MODEL and ti.source_type is not None: + if field_spec.model is None: + msg = ( + f"MODEL-kind field {field_spec.name!r} has source_type " + f"but model=None — call expand_model_tree first" + ) + raise RuntimeError(msg) + if not field_spec.starts_cycle: + _collect_from_model(field_spec.model) + + for spec in feature_specs: + _collect_from_fields(spec.fields) + + return all_specs diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/__init__.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/link_computation.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/link_computation.py new file mode 100644 index 000000000..bf09950c4 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/link_computation.py @@ -0,0 +1,69 @@ +"""Relative link computation between rendered output files.""" + +from dataclasses import dataclass +from pathlib import PurePosixPath + +from ..extraction.case_conversion import slug_filename +from ..extraction.specs import TypeIdentity + +__all__ = ["LinkContext", "relative_link"] + + +@dataclass +class LinkContext: + """Placement context for resolving cross-directory markdown links.""" + + page_path: PurePosixPath + registry: dict[TypeIdentity, PurePosixPath] + + def resolve_link(self, identity: TypeIdentity) -> str | None: + """Resolve *identity* to a relative link if it exists in the registry.""" + if identity in self.registry: + return relative_link(self.page_path, self.registry[identity]) + return None + + def resolve_link_or_slug(self, identity: TypeIdentity) -> str: + """Resolve *identity* to a relative link, falling back to a slug filename. + + Always returns a usable link string. Use when the caller needs a + link regardless of whether the type has a registered page. + """ + return self.resolve_link(identity) or slug_filename(identity.name) + + +def _is_normalized(path: PurePosixPath) -> bool: + """Check whether the path contains no '..' or '.' components (except root '.').""" + return ".." not in path.parts and path.parts.count(".") <= 1 + + +def relative_link(source: PurePosixPath, target: PurePosixPath) -> str: + """Compute a relative path from source file to target file. + + Both paths must be normalized (no `..` components) and relative + to the same output root. + """ + if not _is_normalized(source): + msg = f"Source path not normalized: {source}" + raise ValueError(msg) + if not _is_normalized(target): + msg = f"Target path not normalized: {target}" + raise ValueError(msg) + source_dir = source.parent + # Count how many levels up from source_dir to common ancestor, + # then descend to target. PurePosixPath doesn't have os.path.relpath, + # so compute manually. + source_parts = source_dir.parts + target_parts = target.parts + + # Find common prefix length + common = 0 + for s, t in zip(source_parts, target_parts, strict=False): + if s != t: + break + common += 1 + + ups = len(source_parts) - common + downs = target_parts[common:] + + parts = [".."] * ups + list(downs) + return "/".join(parts) if parts else "." diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/path_assignment.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/path_assignment.py new file mode 100644 index 000000000..f0d224ee4 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/path_assignment.py @@ -0,0 +1,115 @@ +"""Map types to markdown output file paths. + +Uses module-mirrored output directories: output paths derive from +the source Python module path relative to schema_root. +""" + +from collections.abc import Sequence +from pathlib import PurePosixPath + +from ..extraction.case_conversion import slug_filename +from ..extraction.specs import ( + FeatureSpec, + PydanticTypeSpec, + SupplementarySpec, + TypeIdentity, +) +from ..layout.module_layout import compute_output_dir, output_dir_for_entry_point + +__all__ = [ + "GEOMETRY_PAGE", + "PRIMITIVES_PAGE", + "build_placement_registry", + "resolve_output_path", +] + +# Aggregate page paths. +PRIMITIVES_PAGE = PurePosixPath("system/primitive/primitives.md") +GEOMETRY_PAGE = PurePosixPath("system/primitive/geometry.md") + + +def build_placement_registry( + feature_specs: Sequence[FeatureSpec], + all_specs: dict[TypeIdentity, SupplementarySpec], + numeric_names: list[TypeIdentity], + geometry_names: list[TypeIdentity], + schema_root: str, +) -> dict[TypeIdentity, PurePosixPath]: + """Build a mapping from TypeIdentity to output file paths. + + Uses module-mirrored output directories: output paths derive from + the source Python module path relative to schema_root. + """ + registry: dict[TypeIdentity, PurePosixPath] = _aggregate_page_entries( + numeric_names, geometry_names + ) + + feature_dirs: set[PurePosixPath] = set() + for spec in feature_specs: + spec_dir = output_dir_for_entry_point(spec.entry_point, schema_root) + registry[spec.identity] = _md_path(spec_dir, spec.name) + feature_dirs.add(spec_dir) + + for tid, supp_spec in all_specs.items(): + if tid in registry: + continue + if isinstance(supp_spec, PydanticTypeSpec): + registry[tid] = ( + PurePosixPath("pydantic") + / supp_spec.source_module + / slug_filename(tid.name) + ) + continue + source_module = getattr(supp_spec.source_type, "__module__", None) + if source_module is None: + continue + output_dir = compute_output_dir(source_module, schema_root) + output_dir = _nest_under_types(output_dir, feature_dirs) + registry[tid] = _md_path(output_dir, tid.name) + + return registry + + +def resolve_output_path( + identity: TypeIdentity, + registry: dict[TypeIdentity, PurePosixPath] | None, +) -> PurePosixPath: + """Look up a type's output path from the registry, with flat-file fallback.""" + if registry is not None and identity in registry: + return registry[identity] + return PurePosixPath(slug_filename(identity.name)) + + +def _aggregate_page_entries( + numeric_names: list[TypeIdentity], + geometry_names: list[TypeIdentity], +) -> dict[TypeIdentity, PurePosixPath]: + """Pre-populate registry entries for types documented on aggregate pages.""" + entries: dict[TypeIdentity, PurePosixPath] = dict.fromkeys( + numeric_names, PRIMITIVES_PAGE + ) + entries.update(dict.fromkeys(geometry_names, GEOMETRY_PAGE)) + return entries + + +def _nest_under_types( + output_dir: PurePosixPath, feature_dirs: set[PurePosixPath] +) -> PurePosixPath: + """Insert `types/` after the feature directory portion. + + If *output_dir* equals or is a subdirectory of a feature directory, + returns a path with `types/` inserted after the feature directory. + Otherwise returns *output_dir* unchanged. + """ + for fd in sorted(feature_dirs, key=lambda p: len(p.parts), reverse=True): + try: + relative = output_dir.relative_to(fd) + except ValueError: + continue + return fd / "types" / relative + return output_dir + + +def _md_path(directory: PurePosixPath, name: str) -> PurePosixPath: + """Build a .md file path from a directory and a PascalCase type name.""" + return directory / slug_filename(name) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/pipeline.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/pipeline.py new file mode 100644 index 000000000..f7c676c06 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/pipeline.py @@ -0,0 +1,187 @@ +"""Markdown generation pipeline: render pages without I/O. + +Orchestrates tree expansion, type collection, placement, reverse +references, and rendering into a list of RenderedPage objects. The +caller decides what to do with them (write to disk, add frontmatter, +stream to stdout, etc.). +""" + +from collections.abc import Sequence +from dataclasses import dataclass +from pathlib import PurePosixPath + +import overture.schema.system.primitive as _system_primitive +from overture.schema.system.primitive import GeometryType + +from ..extraction.examples import ExampleRecord, load_examples +from ..extraction.model_extraction import expand_model_tree +from ..extraction.numeric_extraction import extract_numerics +from ..extraction.specs import ( + EnumSpec, + FeatureSpec, + ModelSpec, + NewTypeSpec, + PydanticTypeSpec, + SupplementarySpec, + TypeIdentity, + UnionSpec, +) +from ..extraction.type_analyzer import is_newtype +from ..layout.type_collection import collect_all_supplementary_types +from .link_computation import LinkContext +from .path_assignment import ( + GEOMETRY_PAGE, + PRIMITIVES_PAGE, + build_placement_registry, + resolve_output_path, +) +from .renderer import ( + render_enum, + render_feature, + render_geometry_from_values, + render_newtype, + render_primitives_from_specs, + render_pydantic_type, +) +from .reverse_references import UsedByEntry, compute_reverse_references + +__all__ = [ + "RenderedPage", + "generate_markdown_pages", + "partition_numeric_and_geometry_types", +] + + +@dataclass(frozen=True, slots=True) +class RenderedPage: + """A rendered page with its content and output path.""" + + content: str + path: PurePosixPath + is_feature: bool = False + + +def _load_model_examples( + spec: FeatureSpec, +) -> list[ExampleRecord] | None: + """Load examples for a feature spec, returning None when absent.""" + if isinstance(spec, UnionSpec): + pyproject_source = spec.members[0] if spec.members else None + validation_type = spec.source_annotation + model_fields = spec.common_base.model_fields + else: + pyproject_source = spec.source_type + validation_type = spec.source_type + model_fields = spec.source_type.model_fields if spec.source_type else {} + if not pyproject_source: + return None + field_names = [f.name for f in spec.fields] + examples = load_examples( + validation_type, + spec.name, + field_names, + pyproject_source=pyproject_source, + model_fields=model_fields, + ) + return examples or None + + +def _render_supplement( + tid: TypeIdentity, + spec: SupplementarySpec, + registry: dict[TypeIdentity, PurePosixPath], + reverse_refs: dict[TypeIdentity, list[UsedByEntry]], +) -> RenderedPage: + """Render a single supplementary type page.""" + output_path = resolve_output_path(tid, registry) + ctx = LinkContext(output_path, registry) + used_by = reverse_refs.get(tid) + + if isinstance(spec, EnumSpec): + content = render_enum(spec, link_ctx=ctx, used_by=used_by) + elif isinstance(spec, NewTypeSpec): + content = render_newtype(spec, ctx, used_by=used_by) + elif isinstance(spec, ModelSpec): + content = render_feature(spec, ctx, used_by=used_by) + elif isinstance(spec, PydanticTypeSpec): + content = render_pydantic_type(spec, link_ctx=ctx, used_by=used_by) + else: + raise TypeError(f"Unhandled SupplementarySpec variant: {type(spec).__name__}") + + return RenderedPage(content=content, path=output_path) + + +def partition_numeric_and_geometry_types( + types_module: object, +) -> tuple[list[TypeIdentity], list[TypeIdentity]]: + """Discover numeric and geometry types from a module's exports. + + NewType exports are numeric types. + Non-constraint class/enum exports are geometry types. + """ + module_all: list[str] = getattr(types_module, "__all__", []) + numerics: list[TypeIdentity] = [] + geometries: list[TypeIdentity] = [] + + for name in module_all: + obj = getattr(types_module, name) + if is_newtype(obj): + numerics.append(TypeIdentity(obj, name)) + elif isinstance(obj, type) and not name.endswith("Constraint"): + geometries.append(TypeIdentity(obj, name)) + + return numerics, geometries + + +def generate_markdown_pages( + feature_specs: Sequence[FeatureSpec], + schema_root: str, +) -> list[RenderedPage]: + """Generate all markdown pages from feature specs. + + Returns rendered pages without writing to disk. The caller handles + I/O, frontmatter injection, and any output-format-specific concerns + (like Docusaurus category files). + """ + cache: dict[type, ModelSpec] = {} + for spec in feature_specs: + expand_model_tree(spec, cache) + + numeric_names, geometry_names = partition_numeric_and_geometry_types( + _system_primitive + ) + all_specs = collect_all_supplementary_types(feature_specs) + registry = build_placement_registry( + feature_specs, all_specs, numeric_names, geometry_names, schema_root + ) + + reverse_refs = compute_reverse_references(feature_specs, all_specs) + + pages: list[RenderedPage] = [] + + for spec in feature_specs: + output_path = registry[spec.identity] + ctx = LinkContext(output_path, registry) + examples = _load_model_examples(spec) + used_by = reverse_refs.get(spec.identity) + content = render_feature(spec, link_ctx=ctx, examples=examples, used_by=used_by) + pages.append(RenderedPage(content=content, path=output_path, is_feature=True)) + + for tid, supp_spec in all_specs.items(): + pages.append(_render_supplement(tid, supp_spec, registry, reverse_refs)) + + pages.append( + RenderedPage( + content=render_primitives_from_specs(extract_numerics(numeric_names)), + path=PRIMITIVES_PAGE, + ) + ) + + pages.append( + RenderedPage( + content=render_geometry_from_values([m.value for m in GeometryType]), + path=GEOMETRY_PAGE, + ) + ) + + return pages diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py new file mode 100644 index 000000000..0e829d1f4 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py @@ -0,0 +1,630 @@ +"""Markdown renderer for Pydantic model documentation.""" + +import datetime +import functools +import json +import re +from collections.abc import Callable +from dataclasses import dataclass +from pathlib import Path +from typing import TypedDict, cast + +from annotated_types import Interval +from jinja2 import Environment, FileSystemLoader +from typing_extensions import NotRequired + +from ..extraction.examples import ExampleRecord +from ..extraction.field_constraints import constraint_display_text +from ..extraction.model_constraints import analyze_model_constraints +from ..extraction.specs import ( + AnnotatedField, + EnumSpec, + FeatureSpec, + FieldSpec, + ModelSpec, + NewTypeSpec, + NumericSpec, + PydanticTypeSpec, + TypeIdentity, + UnionSpec, +) +from ..extraction.type_analyzer import ( + ConstraintSource, +) +from .link_computation import LinkContext +from .reverse_references import UsedByEntry +from .type_format import ( + format_type, + format_underlying_type, + resolve_type_link, +) + +__all__ = [ + "render_enum", + "render_feature", + "render_geometry_from_values", + "render_newtype", + "render_primitives_from_specs", + "render_pydantic_type", +] + + +_LinkFn = Callable[[TypeIdentity], str] + +_TEMPLATES_DIR = Path(__file__).parent / "templates" + +_BARE_URL_RE = re.compile( + r"(?)]+|www\.[^\s<>)]+)" +) +_TRAILING_PUNCT_RE = re.compile(r"[.,;:!?]+$") +# (.+?) deliberately does not match newlines -- CommonMark code spans are inline. +_CODE_SPAN_RE = re.compile(r"(`+)(.+?)\1") + + +def _linkify_bare_urls(text: str) -> str: + """Wrap bare URLs in Markdown link syntax. + + Turns `www.example.com` into `[www.example.com](https://www.example.com)` + and `https://example.com` into `[https://example.com](https://example.com)`. + URLs already inside `[text](url)` or backtick code spans are left + untouched. Trailing sentence punctuation (`.`, `,`, etc.) is excluded + from the link. + + Two-pass approach: extract code spans first, linkify the remaining + text, then restore code spans. + """ + # Extract code spans, replacing with placeholders + spans: list[str] = [] + + def _stash_span(m: re.Match[str]) -> str: + spans.append(m.group(0)) + return f"\x00CODESPAN{len(spans) - 1}\x00" + + text = _CODE_SPAN_RE.sub(_stash_span, text) + + # Linkify bare URLs in non-code text + def _to_link(m: re.Match[str]) -> str: + raw = m.group(0) + url = _TRAILING_PUNCT_RE.sub("", raw) + trailing = raw[len(url) :] + href = url if url.startswith("http") else f"https://{url}" + return f"[{url}]({href}){trailing}" + + text = _BARE_URL_RE.sub(_to_link, text) + + # Restore code spans + for i, span in enumerate(spans): + text = text.replace(f"\x00CODESPAN{i}\x00", span) + + return text + + +@functools.lru_cache(maxsize=1) +def _get_jinja_env() -> Environment: + """Return the Jinja2 environment, creating it on first use.""" + env = Environment( + loader=FileSystemLoader(_TEMPLATES_DIR), + trim_blocks=True, + lstrip_blocks=True, + ) + env.filters["linkify_urls"] = _linkify_bare_urls + return env + + +_EXAMPLE_TRUNCATION_LIMIT = 100 + + +class _FieldRow(TypedDict): + """Template context for a single field table row. + + `pre_formatted` indicates the `name` already contains backticks + and variant tags, so the template should render it verbatim. + """ + + name: str + type_str: str + description: str | None + pre_formatted: NotRequired[bool] + + +_PARAGRAPH_BREAK_RE = re.compile(r"\n(?:[ \t]*\n)+") + + +def _unwrap_paragraphs(text: str) -> str: + r"""Unwrap hard-wrapped lines within paragraphs, preserving paragraph breaks. + + Splits on blank lines (paragraph boundaries), replaces single newlines + within each paragraph with spaces, then rejoins with `\n\n`. + Matches markdown's treatment of newlines within paragraphs. + """ + paragraphs = _PARAGRAPH_BREAK_RE.split(text) + return "\n\n".join(p.replace("\n", " ") for p in paragraphs) + + +def _sanitize_for_table_cell(text: str) -> str: + """Sanitize text for embedding in a markdown table cell. + + Unwraps within-paragraph newlines to spaces, then converts paragraph + breaks to `

`. Escapes pipe characters for table safety. + Uses `
` (not `
`) for MDX/Docusaurus compatibility. + """ + text = text.strip() + text = _unwrap_paragraphs(text) + text = text.replace("\n\n", "

") + return text.replace("|", "\\|") + + +def _truncate(text: str) -> str: + """Truncate text to `_EXAMPLE_TRUNCATION_LIMIT` chars, adding ellipsis.""" + if len(text) > _EXAMPLE_TRUNCATION_LIMIT: + return text[: _EXAMPLE_TRUNCATION_LIMIT - 3] + "..." + return text + + +def _format_example_value(value: object) -> str: + """Format an example value for display in a markdown Column | Value table. + + All non-empty values render in backticks for consistent monospace + formatting. Long representations are truncated before wrapping. + """ + if value is None: + return "`null`" + + if isinstance(value, bool): + return "`true`" if value else "`false`" + + if isinstance(value, datetime.date): + return f"`{value.isoformat()}`" + + if isinstance(value, str): + if value == "": + return "" + return f"`{_truncate(value)}`" + + if isinstance(value, list): + items = ", ".join(json.dumps(item, default=str) for item in value) + return f"`{_truncate(f'[{items}]')}`" + + if isinstance(value, dict): + pairs = ", ".join( + f"{json.dumps(k, default=str)}: {json.dumps(v, default=str)}" + for k, v in value.items() + ) + return f"`{_truncate(f'{{{pairs}}}')}`" + + return f"`{_truncate(str(value))}`" + + +def _field_template_context( + field: FieldSpec, + ctx: LinkContext | None = None, +) -> _FieldRow: + """Build template context dict for a field.""" + description = ( + _sanitize_for_table_cell(field.description) if field.description else None + ) + return _FieldRow( + name=field.name, + type_str=format_type(field, ctx), + description=description, + ) + + +def _annotate_constraint_notes( + row: _FieldRow, + notes: list[str], +) -> None: + """Append italic constraint descriptions to a field's description cell.""" + formatted = "
".join(f"*{note}*" for note in notes) + if row["description"]: + row["description"] = f"{row['description']}

{formatted}" + else: + row["description"] = formatted + + +def _link_fn_from_ctx(ctx: LinkContext | None) -> _LinkFn: + r"""Build a TypeIdentity-to-markdown-link resolver from a LinkContext. + + Returns a function that resolves a TypeIdentity to ``[`Name`](href)`` + when the identity has a page in the registry, or plain ``\`Name\``` otherwise. + """ + return functools.partial(resolve_type_link, ctx=ctx) + + +def _annotate_field_constraints( + row: _FieldRow, field: FieldSpec, ctx: LinkContext | None +) -> None: + """Annotate a field row with constraints from the field's own annotation. + + Shows constraints where source is None — those applied directly to + the field, not inherited from NewType chains. NewType-inherited + constraints appear on the NewType's own page instead. + """ + link_fn = _link_fn_from_ctx(ctx) + notes = [ + constraint_display_text(cs, link_fn=link_fn) + for cs in field.type_info.constraints + if cs.source_ref is None + ] + if notes: + _annotate_constraint_notes(row, notes) + + +def _expandable_list_suffix(field_spec: FieldSpec) -> str: + """Return `"[]"` per nesting level for list-of-model fields expanded inline.""" + if ( + field_spec.type_info.is_list + and field_spec.model + and not field_spec.starts_cycle + ): + return "[]" * field_spec.type_info.list_depth + return "" + + +def _expand_sub_model( + field_spec: FieldSpec, + name: str, + ctx: LinkContext | None, + result: list[_FieldRow], +) -> None: + """Expand sub-model fields inline, appending child rows to *result*.""" + sub = field_spec.model if not field_spec.starts_cycle else None + if sub is not None: + child_prefix = f"{name}{_expandable_list_suffix(field_spec)}." + result.extend(_expand_model_fields(sub.fields, ctx, prefix=child_prefix)) + + +def _annotate_top_level_constraints( + rows: list[_FieldRow], + constraint_notes: dict[str, list[str]] | None, +) -> None: + """Annotate top-level field rows with model-constraint notes. + + Top-level rows are those without dot-notation prefixes. + """ + if not constraint_notes: + return + for row in rows: + name = row["name"] + if "." in name: + continue + field_name = name.split("[")[0] + if field_name in constraint_notes: + _annotate_constraint_notes(row, constraint_notes[field_name]) + + +def _expand_model_fields( + fields: list[FieldSpec], + ctx: LinkContext | None, + prefix: str = "", +) -> list[_FieldRow]: + """Flatten nested model fields into dot-notation rows for display. + + Walks the pre-populated FieldSpec.model tree. Stops recursion at + fields marked with starts_cycle. + """ + result: list[_FieldRow] = [] + for field_spec in fields: + row = _field_template_context(field_spec, ctx) + name = f"{prefix}{field_spec.name}" if prefix else field_spec.name + row["name"] = f"{name}{_expandable_list_suffix(field_spec)}" + if not prefix: + _annotate_field_constraints(row, field_spec, ctx) + result.append(row) + + _expand_sub_model(field_spec, name, ctx, result) + return result + + +def _short_variant_name(class_name: str, union_name: str) -> str: + """Strip common suffix to produce short variant name. + + Examples + -------- + >>> _short_variant_name("RoadSegment", "Segment") + 'Road' + >>> _short_variant_name("WaterSegment", "Segment") + 'Water' + >>> _short_variant_name("Building", "Building") + 'Building' + """ + if class_name.endswith(union_name): + short = class_name[: -len(union_name)] + if short: + return short + return class_name + + +def _variant_tag(annotated: AnnotatedField, union_name: str) -> str | None: + """Return an italic variant tag like `*(Road, Water)*`, or None for shared fields.""" + if annotated.variant_sources is None: + return None + short_names = [ + _short_variant_name(v, union_name) for v in annotated.variant_sources + ] + return f" *({', '.join(short_names)})*" + + +def _expand_union_fields( + spec: UnionSpec, + ctx: LinkContext | None, + constraint_notes: dict[str, list[str]] | None = None, +) -> list[_FieldRow]: + """Expand UnionSpec fields with inline variant tags. + + Shared fields (variant_sources=None) render normally. Variant-specific + fields get *(ShortName)* tag after the field name. + """ + result: list[_FieldRow] = [] + for annotated in spec.annotated_fields: + field_spec = annotated.field_spec + row = _field_template_context(field_spec, ctx) + name = field_spec.name + suffix = _expandable_list_suffix(field_spec) + + _annotate_field_constraints(row, field_spec, ctx) + if constraint_notes and field_spec.name in constraint_notes: + _annotate_constraint_notes(row, constraint_notes[field_spec.name]) + + tag = _variant_tag(annotated, spec.name) + if tag is not None: + row["name"] = f"`{name}{suffix}`{tag}" + row["pre_formatted"] = True + else: + row["name"] = f"{name}{suffix}" + + result.append(row) + _expand_sub_model(field_spec, name, ctx, result) + return result + + +def render_feature( + spec: FeatureSpec, + link_ctx: LinkContext | None = None, + examples: list[ExampleRecord] | None = None, + used_by: list[UsedByEntry] | None = None, +) -> str: + """Render a FeatureSpec (ModelSpec or UnionSpec) as Markdown documentation. + + For ModelSpec, requires expand_model_tree to have been called first. + For UnionSpec, adds inline variant tags to variant-specific fields. + """ + template = _get_jinja_env().get_template("feature.md.jinja2") + + constraint_descriptions, field_notes = analyze_model_constraints(spec.constraints) + + if isinstance(spec, UnionSpec): + fields = _expand_union_fields(spec, link_ctx, constraint_notes=field_notes) + elif isinstance(spec, ModelSpec): + fields = _expand_model_fields(spec.fields, link_ctx) + _annotate_top_level_constraints(fields, field_notes) + else: + raise TypeError(f"Unsupported spec type: {type(spec).__name__}") + + formatted_examples: list[list[dict[str, str]]] | None = None + if examples: + formatted_examples = [ + [ + {"column": key, "value": _format_example_value(val)} + for key, val in record.rows + ] + for record in examples + ] + + return template.render( + model=spec, + fields=fields, + constraints=constraint_descriptions, + examples=formatted_examples, + used_by=_build_used_by_context(used_by, link_ctx), + ) + + +def render_enum( + enum_spec: EnumSpec, + link_ctx: LinkContext | None = None, + used_by: list[UsedByEntry] | None = None, +) -> str: + """Render an EnumSpec as Markdown documentation.""" + template = _get_jinja_env().get_template("enum.md.jinja2") + return template.render( + enum=enum_spec, used_by=_build_used_by_context(used_by, link_ctx) + ) + + +@dataclass +class _NewTypeConstraintRow: + """Rendered constraint for template.""" + + display: str + source: str | None = None + source_link: str | None = None + + +def _format_constraint( + cs: ConstraintSource, + newtype_ref: object, + ctx: LinkContext | None = None, +) -> _NewTypeConstraintRow: + """Format a ConstraintSource for display in a NewType page.""" + display = constraint_display_text(cs) + + if cs.source_ref is None or cs.source_ref is newtype_ref: + return _NewTypeConstraintRow(display=display) + + # source_ref and source_name are always set together + if cs.source_name is None: + return _NewTypeConstraintRow(display=display) + source_identity = TypeIdentity(cs.source_ref, cs.source_name) + source_link = ctx.resolve_link(source_identity) if ctx else None + return _NewTypeConstraintRow( + display=display, source=cs.source_name, source_link=source_link + ) + + +class _UsedByContext(TypedDict): + """Template context for a used-by entry.""" + + name: str + link: str | None + + +def _build_used_by_context( + used_by: list[UsedByEntry] | None, + link_ctx: LinkContext | None, +) -> list[_UsedByContext] | None: + """Build template context for used-by entries.""" + if not used_by: + return None + return [ + { + "name": entry.identity.name, + "link": link_ctx.resolve_link(entry.identity) if link_ctx else None, + } + for entry in used_by + ] + + +def render_newtype( + newtype_spec: NewTypeSpec, + link_ctx: LinkContext | None = None, + used_by: list[UsedByEntry] | None = None, +) -> str: + """Render a NewTypeSpec as Markdown documentation.""" + template = _get_jinja_env().get_template("newtype.md.jinja2") + ti = newtype_spec.type_info + underlying = format_underlying_type(ti, link_ctx) + constraints = [ + _format_constraint(cs, newtype_spec.source_type, link_ctx) + for cs in ti.constraints + ] + + return template.render( + newtype=newtype_spec, + underlying_type=underlying, + constraints=constraints, + used_by=_build_used_by_context(used_by, link_ctx), + ) + + +def render_pydantic_type( + spec: PydanticTypeSpec, + link_ctx: LinkContext | None = None, + used_by: list[UsedByEntry] | None = None, +) -> str: + """Render a PydanticTypeSpec as Markdown documentation.""" + template = _get_jinja_env().get_template("pydantic_type.md.jinja2") + return template.render( + pydantic_type=spec, + used_by=_build_used_by_context(used_by, link_ctx), + ) + + +# Matches the ge/le bounds of the int64 NewType in overture.schema.system.primitive. +_INT64_MIN = -(2**63) +_INT64_MAX = 2**63 - 1 + +_NumericBound = int | float | None + +# IEEE 754 precision by bit width — formatting knowledge, not schema data. +_FLOAT_PRECISION: dict[int, str] = {32: "~7 decimal digits", 64: "~15 decimal digits"} + + +def _format_bound(value: int | float) -> str: + """Format a numeric bound for display. + + Uses `2^63` notation for int64-scale values to avoid unreadable + numbers; otherwise formats with thousands separators for ints. + """ + if value == _INT64_MIN: + return "-2^63" + if value == _INT64_MAX: + return "2^63-1" + if isinstance(value, float): + return str(value) + return f"{value:,}" + + +def _format_interval(bounds: Interval) -> str: + """Format an Interval as a range string, or empty if unconstrained. + + Two inclusive bounds render as `lower to upper`. All other + combinations use explicit comparison operators so the + inclusivity/exclusivity is unambiguous. + """ + # Interval fields are typed as Supports* protocols; narrow to numeric + # since we only encounter int/float constraints from the schema. + ge = cast(_NumericBound, bounds.ge) + gt = cast(_NumericBound, bounds.gt) + le = cast(_NumericBound, bounds.le) + lt = cast(_NumericBound, bounds.lt) + + # Both bounds inclusive: compact "lower to upper" form + if ge is not None and le is not None: + return f"{_format_bound(ge)} to {_format_bound(le)}" + + # Any other two-bound combination: use explicit operators + parts: list[str] = [] + if ge is not None: + parts.append(f">= {_format_bound(ge)}") + elif gt is not None: + parts.append(f"> {_format_bound(gt)}") + + if le is not None: + parts.append(f"<= {_format_bound(le)}") + elif lt is not None: + parts.append(f"< {_format_bound(lt)}") + + return ", ".join(parts) + + +def _bit_width_key(name: str) -> tuple[str, int]: + """Sort key: prefix then numeric bit width.""" + prefix = name.rstrip("0123456789") + digits = name[len(prefix) :] + return (prefix, int(digits) if digits else 0) + + +def render_primitives_from_specs(specs: list[NumericSpec]) -> str: + """Render the primitives.md page from pre-extracted NumericSpecs.""" + template = _get_jinja_env().get_template("primitives.md.jinja2") + + signed_ints: list[dict[str, str | None]] = [] + unsigned_ints: list[dict[str, str | None]] = [] + floats: list[dict[str, str | None]] = [] + + for spec in sorted(specs, key=lambda s: _bit_width_key(s.name)): + if spec.name.startswith(("int", "uint")): + target = signed_ints if spec.name.startswith("int") else unsigned_ints + target.append( + { + "name": spec.name, + "range": _format_interval(spec.bounds), + "description": _sanitize_for_table_cell(spec.description or ""), + } + ) + elif spec.name.startswith("float"): + precision = ( + _FLOAT_PRECISION.get(spec.float_bits, "") if spec.float_bits else "" + ) + floats.append( + { + "name": spec.name, + "precision": precision, + "description": _sanitize_for_table_cell(spec.description or ""), + } + ) + + return template.render( + signed_ints=signed_ints, + unsigned_ints=unsigned_ints, + floats=floats, + ) + + +def render_geometry_from_values(geometry_type_values: list[str]) -> str: + """Render the geometry.md page from pre-extracted geometry type values.""" + template = _get_jinja_env().get_template("geometry.md.jinja2") + geometry_types = ", ".join(f"`{v}`" for v in geometry_type_values) + return template.render(geometry_types=geometry_types) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/reverse_references.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/reverse_references.py new file mode 100644 index 000000000..2ad471fc1 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/reverse_references.py @@ -0,0 +1,173 @@ +"""Compute reverse references from types to their referrers.""" + +from __future__ import annotations + +from collections.abc import Mapping, Sequence +from dataclasses import dataclass +from enum import Enum + +from ..extraction.specs import ( + FeatureSpec, + FieldSpec, + ModelSpec, + NewTypeSpec, + SupplementarySpec, + TypeIdentity, + UnionSpec, + is_pydantic_type, +) +from ..extraction.type_analyzer import TypeInfo, TypeKind, walk_type_info + +__all__ = [ + "UsedByEntry", + "UsedByKind", + "compute_reverse_references", +] + + +class UsedByKind(Enum): + """Kind of referrer in a 'used by' entry.""" + + MODEL = 0 + NEWTYPE = 1 + + +@dataclass(frozen=True, slots=True) +class UsedByEntry: + """A single 'used by' entry pointing to a referrer.""" + + identity: TypeIdentity + kind: UsedByKind + + +def compute_reverse_references( + feature_specs: Sequence[FeatureSpec], + all_specs: Mapping[TypeIdentity, SupplementarySpec], +) -> dict[TypeIdentity, list[UsedByEntry]]: + """Compute reverse references from types to their referrers. + + Returns a dict mapping TypeIdentity to lists of UsedByEntry, sorted with + models before NewTypes, alphabetical within each group. + + Parameters + ---------- + feature_specs : Sequence[FeatureSpec] + Feature-level specs (ModelSpec or UnionSpec). + all_specs : Mapping[TypeIdentity, SupplementarySpec] + Supplementary types (enums, newtypes, sub-models). + + Returns + ------- + dict[TypeIdentity, list[UsedByEntry]] + Dict mapping TypeIdentity to sorted lists of UsedByEntry. + """ + # Track references with sets to deduplicate + references: dict[TypeIdentity, set[UsedByEntry]] = {} + + def add_reference( + target: TypeIdentity, referrer: TypeIdentity, kind: UsedByKind + ) -> None: + """Add a reference from referrer to target, with deduplication.""" + if target == referrer or target not in all_specs: + return + references.setdefault(target, set()).add(UsedByEntry(referrer, kind)) + + def collect_from_type_info( + ti: TypeInfo, referrer: TypeIdentity, referrer_kind: UsedByKind + ) -> None: + """Collect references from a TypeInfo.""" + + def _visit(node: TypeInfo) -> None: + if node.newtype_ref is not None and node.newtype_name is not None: + add_reference( + TypeIdentity(node.newtype_ref, node.newtype_name), + referrer, + referrer_kind, + ) + + # ENUM, MODEL, pydantic (PRIMITIVE), and UNION are mutually + # exclusive by TypeKind. + if ( + node.kind in (TypeKind.ENUM, TypeKind.MODEL) + and node.source_type is not None + ): + add_reference( + TypeIdentity.of(node.source_type), + referrer, + referrer_kind, + ) + elif is_pydantic_type(node): + add_reference( + TypeIdentity.of(node.source_type), referrer, referrer_kind + ) + elif node.union_members is not None: + for member_cls in node.union_members: + add_reference( + TypeIdentity.of(member_cls), + referrer, + referrer_kind, + ) + + walk_type_info(ti, _visit) + + def collect_from_fields( + fields: list[FieldSpec], referrer: TypeIdentity, referrer_kind: UsedByKind + ) -> None: + """Collect references from model fields.""" + for field_spec in fields: + collect_from_type_info(field_spec.type_info, referrer, referrer_kind) + + def collect_from_model_spec(spec: ModelSpec, referrer: TypeIdentity) -> None: + """Collect references from a ModelSpec.""" + collect_from_fields(spec.fields, referrer, UsedByKind.MODEL) + + def collect_from_union_spec(spec: UnionSpec) -> None: + """Collect references from a UnionSpec.""" + referrer = spec.identity + # Union features reference their members + for member_cls in spec.members: + add_reference( + TypeIdentity.of(member_cls), + referrer, + UsedByKind.MODEL, + ) + # Also walk fields for other supplementary types + collect_from_fields(spec.fields, referrer, UsedByKind.MODEL) + + def collect_from_newtype_spec(spec: NewTypeSpec, referrer: TypeIdentity) -> None: + """Collect references from a NewTypeSpec.""" + collect_from_type_info(spec.type_info, referrer, UsedByKind.NEWTYPE) + + # Collect inherited NewTypes from constraint sources + for cs in spec.type_info.constraints: + if cs.source_ref is not None and cs.source_name is not None: + ref_id = TypeIdentity(cs.source_ref, cs.source_name) + add_reference(ref_id, referrer, UsedByKind.NEWTYPE) + + # Collect from features + for spec in feature_specs: + if isinstance(spec, ModelSpec): + collect_from_model_spec(spec, spec.identity) + elif isinstance(spec, UnionSpec): + collect_from_union_spec(spec) + + # Collect from supplementary specs (NewTypes and sub-models reference + # other types; enums do not, so they need no processing here) + for tid, supp_spec in all_specs.items(): + if isinstance(supp_spec, NewTypeSpec): + collect_from_newtype_spec(supp_spec, tid) + elif isinstance(supp_spec, ModelSpec): + collect_from_model_spec(supp_spec, tid) + + # Sort into deterministic lists. (kind, name) handles the common case; + # module breaks ties when two referrers share the same display name + # (e.g. identically-named types from different themes/modules). + result: dict[TypeIdentity, list[UsedByEntry]] = {} + for target, ref_set in references.items(): + entries = sorted( + ref_set, + key=lambda e: (e.kind.value, e.identity.name, e.identity.module), + ) + result[target] = entries + + return result diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/_used_by.md.jinja2 b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/_used_by.md.jinja2 new file mode 100644 index 000000000..fcbd9e82b --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/_used_by.md.jinja2 @@ -0,0 +1,10 @@ +{% if used_by %} + +## Used By + +{% for entry in used_by -%} +{% if entry.link %}- [`{{ entry.name }}`]({{ entry.link }}) +{% else %}- `{{ entry.name }}` +{% endif %} +{% endfor %} +{% endif %} diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/enum.md.jinja2 b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/enum.md.jinja2 new file mode 100644 index 000000000..b5b71c254 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/enum.md.jinja2 @@ -0,0 +1,13 @@ +# {{ enum.name }} +{% if enum.description %} + +{{ enum.description | linkify_urls }} +{% endif %} + +## Values + +{% for member in enum.members -%} +- `{{ member.value }}`{% if member.description %} - {{ member.description }}{% endif %} + +{% endfor %} +{% include '_used_by.md.jinja2' %} diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/feature.md.jinja2 b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/feature.md.jinja2 new file mode 100644 index 000000000..78a183c5e --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/feature.md.jinja2 @@ -0,0 +1,45 @@ +# {{ model.name }} +{% if model.description %} + +{{ model.description | linkify_urls }} +{% endif %} + +## Fields + +| Name | Type | Description | +| -----: | :----: | ------------- | +{% for field in fields -%} +| {% if field.pre_formatted %}{{ field.name }}{% else %}`{{ field.name }}`{% endif %} | {{ field.type_str }} | {% if field.description %}{{ field.description }} {% endif %}| +{% endfor %} +{% if constraints %} + +## Constraints + +{% for c in constraints %} +- {{ c }} +{% endfor %} +{% endif %} +{% if examples %} + +## Examples +{% if examples|length == 1 %} + +| Column | Value | +| -------: | ------- | +{% for row in examples[0] -%} +| `{{ row.column }}` | {{ row.value }} | +{% endfor %} +{% else %} +{% for example in examples %} + +### Example {{ loop.index }} + +| Column | Value | +| -------: | ------- | +{% for row in example -%} +| `{{ row.column }}` | {{ row.value }} | +{% endfor %} +{% endfor %} +{% endif %} +{% endif %} +{% include '_used_by.md.jinja2' %} diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/geometry.md.jinja2 b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/geometry.md.jinja2 new file mode 100644 index 000000000..cd6b200de --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/geometry.md.jinja2 @@ -0,0 +1,11 @@ +# Geometry Types + +Spatial types for representing geographic features. + +## Types + +| Type | Description | +| -----: | ------------- | +| `Geometry` | GeoJSON geometry value (Point, LineString, Polygon, etc.) | +| `BBox` | Bounding box as 4 or 6 coordinate values: [west, south, east, north] or [west, south, min-altitude, east, north, max-altitude] | +| `GeometryType` | Enumeration of geometry types: {{ geometry_types }} | diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/newtype.md.jinja2 b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/newtype.md.jinja2 new file mode 100644 index 000000000..3d2c58f3a --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/newtype.md.jinja2 @@ -0,0 +1,17 @@ +# {{ newtype.name }} +{% if newtype.description %} + +{{ newtype.description | linkify_urls }} +{% endif %} + +Underlying type: {{ underlying_type }} +{% if constraints %} + +## Constraints + +{% for c in constraints -%} +- {{ c.display }}{% if c.source_link %} (from [`{{ c.source }}`]({{ c.source_link }})){% endif %} + +{% endfor %} +{% endif %} +{% include '_used_by.md.jinja2' %} diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/primitives.md.jinja2 b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/primitives.md.jinja2 new file mode 100644 index 000000000..fd87a1ec0 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/primitives.md.jinja2 @@ -0,0 +1,27 @@ +# Primitive Types + +Numeric types used for schema field definitions. + +## Integer Types + +| Type | Range | Description | +| -----: | :-----: | ------------- | +{% for t in signed_ints -%} +| `{{ t.name }}` | {{ t.range }} | {{ t.description }} | +{% endfor %} + +## Unsigned Integer Types + +| Type | Range | Description | +| -----: | :-----: | ------------- | +{% for t in unsigned_ints -%} +| `{{ t.name }}` | {{ t.range }} | {{ t.description }} | +{% endfor %} + +## Floating Point Types + +| Type | Precision | Description | +| -----: | :---------: | ------------- | +{% for t in floats -%} +| `{{ t.name }}` | {{ t.precision }} | {{ t.description }} | +{% endfor %} diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/pydantic_type.md.jinja2 b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/pydantic_type.md.jinja2 new file mode 100644 index 000000000..3185acf56 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/templates/pydantic_type.md.jinja2 @@ -0,0 +1,8 @@ +# {{ pydantic_type.name }} +{% if pydantic_type.description %} + +{{ pydantic_type.description | linkify_urls }} +{% endif %} + +See: [Pydantic docs]({{ pydantic_type.docs_url }}) +{% include '_used_by.md.jinja2' %} diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/type_format.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/type_format.py new file mode 100644 index 000000000..b6bd7a6ec --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/type_format.py @@ -0,0 +1,240 @@ +"""Format TypeInfo as markdown type strings with cross-page links.""" + +from __future__ import annotations + +from pydantic import BaseModel + +from ..extraction.specs import FieldSpec, TypeIdentity +from ..extraction.type_analyzer import TypeInfo, TypeKind +from ..extraction.type_registry import is_semantic_newtype, resolve_type_name +from .link_computation import LinkContext + +__all__ = [ + "format_dict_type", + "format_type", + "format_underlying_type", + "resolve_type_link", +] + + +def _code_link(name: str, href: str) -> str: + """Format a markdown link with inline-code text: [`name`](href).""" + return f"[`{name}`]({href})" + + +def resolve_type_link(identity: TypeIdentity, ctx: LinkContext | None = None) -> str: + """Resolve a TypeIdentity to a linked code span or plain code span. + + When *ctx* is provided, links only to types in the registry (types + without pages render as inline code). Without context, renders as + inline code -- producing a link requires a placement registry to + compute correct relative paths. + """ + if ctx: + href = ctx.resolve_link(identity) + if href: + return _code_link(identity.name, href) + return f"`{identity.name}`" + + +def _wrap_list_n(inner: str, depth: int) -> str: + """Wrap an inner type string in `list<...>` markdown syntax *depth* times. + + Builds a single broken-backtick wrapper rather than nesting iteratively. + Iterative nesting creates adjacent backticks that CommonMark + interprets as multi-backtick code span delimiters. + """ + return f"`{'list<' * depth}`{inner}`{'>' * depth}`" + + +def _plain_list_type(base: str, depth: int) -> str: + """Format a plain (unlinked) list type string for *depth* nesting levels.""" + return f"`{'list<' * depth}{base}{'>' * depth}`" + + +def _linked_type_identity(ti: TypeInfo) -> TypeIdentity | None: + """Return the TypeIdentity to use for a markdown link, or None for non-linked types.""" + if ( + is_semantic_newtype(ti) + and ti.newtype_ref is not None + and ti.newtype_name is not None + ): + return TypeIdentity(ti.newtype_ref, ti.newtype_name) + if ti.kind in (TypeKind.ENUM, TypeKind.MODEL) and ti.source_type is not None: + return TypeIdentity(ti.source_type, ti.base_type) + return None + + +def _try_primitive_link( + ti: TypeInfo, display_name: str, ctx: LinkContext | None +) -> str | None: + """Try to link a PRIMITIVE type to its page via registry lookup. + + Registered primitives (int32, Geometry) and Pydantic types (HttpUrl) + can have pages in the registry. Uses the type registry display name + (e.g. `geometry` not `Geometry`) for the link text. + """ + if ti.kind != TypeKind.PRIMITIVE or not ctx: + return None + candidate = ti.newtype_ref or ti.source_type + if candidate is None: + return None + href = ctx.resolve_link(TypeIdentity(candidate, display_name)) + if href: + return _code_link(display_name, href) + return None + + +def _markdown_type_name(ti: TypeInfo) -> str: + """Return the markdown display name for a type. + + Uses the semantic NewType name when present (e.g. `LanguageTag`), + otherwise falls back to the resolved markdown type (e.g. `string`). + """ + name = ti.newtype_name if is_semantic_newtype(ti) else None + return name or resolve_type_name(ti, "markdown") + + +def format_dict_type(ti: TypeInfo) -> str: + """Format a dict TypeInfo as bare `map` using resolved markdown names.""" + if ti.dict_key_type is None or ti.dict_value_type is None: + msg = f"format_dict_type requires dict key/value types, got {ti}" + raise ValueError(msg) + key = _markdown_type_name(ti.dict_key_type) + value = _markdown_type_name(ti.dict_value_type) + return f"map<{key}, {value}>" + + +def _format_union_members( + members: tuple[type[BaseModel], ...], + ctx: LinkContext | None, + separator: str = r" \| ", +) -> str: + r"""Format union members as individually linked/backticked names. + + Each member is resolved independently so members with pages get linked + while others render as plain code spans. *separator* is inserted between + members (default is `\|` for table-cell safety). + """ + return separator.join(resolve_type_link(TypeIdentity.of(m), ctx) for m in members) + + +def format_type( + field: FieldSpec, + ctx: LinkContext | None = None, +) -> str: + """Format a field's type for markdown display, with links and qualifiers.""" + ti = field.type_info + qualifiers: list[str] = [] + + if ti.kind == TypeKind.LITERAL and ti.literal_values: + if len(ti.literal_values) == 1: + return f'`"{ti.literal_values[0]}"`' + return r" \| ".join(f'`"{v}"`' for v in ti.literal_values) + + identity = _linked_type_identity(ti) + + if ti.kind == TypeKind.UNION and ti.union_members: + display = _format_union_members(ti.union_members, ctx) + if ti.is_list: + qualifiers.append("list") + elif ti.is_dict: + if identity: + display = resolve_type_link(identity, ctx) + qualifiers.append("map") + else: + display = f"`{format_dict_type(ti)}`" + elif identity: + display = resolve_type_link(identity, ctx) + # List layers outside a NewType wrap with list<> syntax (e.g., list[PhoneNumber] + # renders as list). List layers inside a NewType use a (list) + # qualifier instead (e.g., Sources wrapping list[SourceItem] renders as + # Sources (list)), since the list-ness is an implementation detail of the type. + if ti.newtype_outer_list_depth > 0: + display = _wrap_list_n(display, ti.newtype_outer_list_depth) + elif ti.is_list and ti.newtype_name is not None: # list is inside the NewType + qualifiers.append("list") + elif ti.is_list: + display = _wrap_list_n(display, ti.list_depth) + else: + # Fallback: types without a linked identity. Registered primitives (int32, + # Geometry) and Pydantic types (HttpUrl) may still link to aggregate pages + # via the placement registry. Unregistered primitives render as plain code. + base = resolve_type_name(ti, "markdown") + link = _try_primitive_link(ti, base, ctx) + if link and ti.is_list: + display = _wrap_list_n(link, ti.list_depth) + elif link: + display = link + elif ti.is_list: + display = _plain_list_type(base, ti.list_depth) + else: + display = f"`{base}`" + + if not field.is_required: + qualifiers.append("optional") + + if qualifiers: + return f"{display} ({', '.join(qualifiers)})" + return display + + +def _linked_or_backticked(ti: TypeInfo, ctx: LinkContext | None) -> tuple[str, bool]: + """Return (formatted_string, has_link) for a TypeInfo component. + + Used by format_underlying_type to decide whether container types + need broken-backtick formatting (interleaving backtick runs with + linked text). + + When `has_link` is True, `formatted_string` is a markdown link + ready for broken-backtick container syntax. When False, it is a raw + name that the caller embeds inside backticks. + """ + identity = _linked_type_identity(ti) + if identity and ctx: + href = ctx.resolve_link(identity) + if href: + return _code_link(identity.name, href), True + return _markdown_type_name(ti), False + + +def format_underlying_type(ti: TypeInfo, ctx: LinkContext | None = None) -> str: + """Format a NewType's underlying type for the page header, with links. + + Links enums and models that have their own pages. Does not link the + outermost NewType (which would self-reference). Dict key/value types + use full link resolution since they reference other types. + """ + if ti.kind == TypeKind.UNION and ti.union_members: + return _format_union_members(ti.union_members, ctx, separator=" | ") + + if ti.is_dict and ti.dict_key_type and ti.dict_value_type: + key_str, key_linked = _linked_or_backticked(ti.dict_key_type, ctx) + val_str, val_linked = _linked_or_backticked(ti.dict_value_type, ctx) + if key_linked or val_linked: + if not key_linked: + key_str = f"`{key_str}`" + if not val_linked: + val_str = f"`{val_str}`" + return f"`map<`{key_str}`,`{val_str}`>`" + return f"`map<{key_str}, {val_str}>`" + + # Only link enums and models -- skip is_semantic_newtype to avoid + # self-linking (this TypeInfo belongs to the NewType being rendered). + identity = ( + TypeIdentity.of(ti.source_type) + if ti.kind in (TypeKind.ENUM, TypeKind.MODEL) and ti.source_type + else None + ) + if identity and ctx: + href = ctx.resolve_link(identity) + if href: + linked = _code_link(identity.name, href) + if ti.is_list: + return _wrap_list_n(linked, ti.list_depth) + return linked + + base = identity.name if identity else resolve_type_name(ti, "markdown") + if ti.is_list: + return _plain_list_type(base, ti.list_depth) + return f"`{base}`" diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/py.typed b/packages/overture-schema-codegen/src/overture/schema/codegen/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/packages/overture-schema-codegen/tests/codegen_test_support.py b/packages/overture-schema-codegen/tests/codegen_test_support.py new file mode 100644 index 000000000..64facf5a9 --- /dev/null +++ b/packages/overture-schema-codegen/tests/codegen_test_support.py @@ -0,0 +1,365 @@ +"""Shared test support for overture-schema-codegen tests. + +Provides reusable model fixtures and helpers. Pytest fixtures are in conftest.py. +""" + +from __future__ import annotations + +from collections.abc import Mapping +from difflib import unified_diff +from enum import Enum +from pathlib import Path +from typing import Annotated, Generic, Literal, NewType, TypeVar + +import pytest +from overture.schema.codegen.extraction.model_extraction import extract_model +from overture.schema.codegen.extraction.pydantic_extraction import extract_pydantic_type +from overture.schema.codegen.extraction.specs import ( + AnnotatedField, + EnumMemberSpec, + EnumSpec, + FieldSpec, + ModelSpec, + TypeIdentity, + UnionSpec, + is_model_class, +) +from overture.schema.codegen.extraction.type_analyzer import TypeInfo, TypeKind +from overture.schema.core.discovery import discover_models +from overture.schema.system.doc import DocumentedEnum +from overture.schema.system.field_constraint import UniqueItemsConstraint +from overture.schema.system.model_constraint import require_any_of +from overture.schema.system.primitive import ( + Geometry, + GeometryType, + GeometryTypeConstraint, + float64, + int32, +) +from overture.schema.system.ref import Id, Identified, Reference, Relationship +from overture.schema.system.string import HexColor, LanguageTag, StrippedString +from pydantic import BaseModel, EmailStr, Field, HttpUrl + +STR_TYPE = TypeInfo(base_type="str", kind=TypeKind.PRIMITIVE) + +ThemeT = TypeVar("ThemeT") +TypeT = TypeVar("TypeT") + + +class SimpleModel(BaseModel): + """A simple model.""" + + name: str + + +class FeatureBase(BaseModel, Generic[ThemeT, TypeT]): + """Base class mimicking OvertureFeature pattern for tests.""" + + theme: ThemeT + type: TypeT + + +# Separate TypeVars from ThemeT/TypeT: IdentifiedFeature models a +# non-Overture user building on Identified with their own nomenclature. +CategoryT = TypeVar("CategoryT") +KindT = TypeVar("KindT") + + +class IdentifiedFeature(Identified, Generic[CategoryT, KindT]): + """Feature with identity and typed category/kind.""" + + category: CategoryT + kind: KindT + + +class InstrumentFamily(str, DocumentedEnum): + """Classification by sound production method.""" + + STRING = "string", "Sound from vibrating strings" + WIND = "wind", "Sound from vibrating air column" + PERCUSSION = "percussion" + + +class SimpleKind(str, Enum): + SMALL = "small" + LARGE = "large" + + +class Instrument( + IdentifiedFeature[Literal["music"], Literal["instrument"]], +): + """A musical instrument. + + Instruments produce sound through vibration. They are classified + by how sound is produced. + """ + + name: str = Field(description="Common name") + tuning: float64 | None = Field( + None, + description=("Concert pitch in Hz.\n\nStandard tuning is 440 Hz."), + ) + num_strings: int32 | None = Field(None) + family: InstrumentFamily | None = None + color: HexColor | None = Field(None, description="Body color") + tags: Annotated[list[str], UniqueItemsConstraint()] | None = None + + +@require_any_of("name", "description") +class Venue( + IdentifiedFeature[Literal["music"], Literal["venue"]], +): + """A concert venue. + + A location where musical performances take place. + """ + + name: str | None = Field(None, description="Venue name") + description: str | None = None + geometry: Annotated[ + Geometry, + GeometryTypeConstraint(GeometryType.POINT, GeometryType.POLYGON), + ] + capacity: Annotated[int, Field(ge=1)] | None = None + resident_ensemble: ( + Annotated[Id, Reference(Relationship.BELONGS_TO, Instrument)] | None + ) = None + + +class SourceItem(BaseModel): + """A source data reference.""" + + dataset: str = Field(description="Source dataset name") + + +Sources = NewType( + "Sources", + Annotated[ + list[SourceItem], + Field(min_length=1, description="Source data references"), + UniqueItemsConstraint(), + ], +) + + +class FeatureWithSources( + FeatureBase[Literal["test"], Literal["sourced"]], +): + """A feature with a Sources field.""" + + name: str = Field(description="Feature name") + sources: Sources | None = None + + +class Address(BaseModel): + """A mailing address.""" + + street: str = Field(description="Street name") + city: str = Field(description="City name") + zip_code: str | None = Field(None, description="Postal code") + + +class FeatureWithAddress( + FeatureBase[Literal["test"], Literal["addressed"]], +): + """A feature with an address field.""" + + title: str = Field(description="Feature title") + address: Address + + +class TreeNode(BaseModel): + """A recursive tree node.""" + + label: str = Field(description="Node label") + parent: TreeNode | None = None + + +class Widget(BaseModel): + active: bool + label: str = Field(description="Display label") + + +CommonNames = NewType("CommonNames", dict[LanguageTag, StrippedString]) + + +class FeatureWithDict( + FeatureBase[Literal["test"], Literal["dictfeat"]], +): + """A feature with dict fields.""" + + name: str = Field(description="Feature name") + names: CommonNames | None = Field(None, description="Localized names") + alt_names: dict[LanguageTag, StrippedString] | None = Field( + None, description="Alternate localized names" + ) + tags: dict[str, str] | None = Field(None, description="Arbitrary tags") + metadata: dict[str, int] = Field(description="Numeric metadata") + + +class FeatureWithUrl(FeatureBase[Literal["test"], Literal["linked"]]): + """A feature with Pydantic URL and email fields.""" + + website: HttpUrl | None = None + emails: list[EmailStr] | None = None + + +HTTP_URL_SPEC = extract_pydantic_type(HttpUrl) +EMAIL_STR_SPEC = extract_pydantic_type(EmailStr) + + +class SegmentBase(BaseModel): + """Common base for test segments.""" + + geometry: str + subtype: str + + +class RoadSegment(SegmentBase): + subtype: Literal["road"] + class_: Annotated[str, Field(alias="class")] + speed_limit: int | None = None + + +class RailSegment(SegmentBase): + subtype: Literal["rail"] + class_: Annotated[int, Field(alias="class")] + rail_gauge: float | None = None + + +class WaterSegment(SegmentBase): + subtype: Literal["water"] + + +TestSegment = Annotated[ + RoadSegment | RailSegment | WaterSegment, + Field(description="Test segment union"), +] + + +class ContactInfo(BaseModel): + """Contact information for a venue.""" + + email: str = Field(description="Email address") + phone: str | None = Field(None, description="Phone number") + + +class VenueWithContact(SegmentBase): + """A segment variant with a nested sub-model field.""" + + subtype: Literal["venue"] + contact: ContactInfo + + +TestSegmentWithSubModel = Annotated[ + RoadSegment | VenueWithContact, + Field(description="Test segment union with sub-model member"), +] + + +def make_union_spec( + name: str = "TestUnion", + *, + description: str | None = None, + annotated_fields: list[AnnotatedField] | None = None, + members: list[type[BaseModel]] | None = None, + source_annotation: object = None, + common_base: type[BaseModel] | None = None, + entry_point: str | None = None, +) -> UnionSpec: + """Build a UnionSpec with sensible defaults for tests.""" + return UnionSpec( + name=name, + description=description, + annotated_fields=annotated_fields or [], + members=members or [], + discriminator_field=None, + discriminator_mapping=None, + source_annotation=source_annotation, + common_base=common_base or BaseModel, + entry_point=entry_point, + ) + + +def find_model_class(name: str, models: dict[object, object]) -> type[BaseModel]: + """Find a discovered model class by name.""" + matches = [v for v in models.values() if getattr(v, "__name__", None) == name] + assert matches, f"{name} model not found" + match = matches[0] + assert isinstance(match, type) + assert issubclass(match, BaseModel) + return match + + +def find_field(spec: ModelSpec, name: str) -> FieldSpec: + """Find a field by name in a ModelSpec, raising if missing.""" + return next(f for f in spec.fields if f.name == name) + + +def find_member(spec: EnumSpec, name: str) -> EnumMemberSpec: + """Find a member by name in an EnumSpec, raising if missing.""" + return next(m for m in spec.members if m.name == name) + + +T = TypeVar("T") + + +def lookup_by_name(mapping: dict[TypeIdentity, T], name: str) -> T: + """Look up a value in a TypeIdentity-keyed dict by name, raising KeyError if absent.""" + for tid, value in mapping.items(): + if tid.name == name: + return value + raise KeyError(name) + + +def has_name(mapping: Mapping[TypeIdentity, object], name: str) -> bool: + """Check whether a TypeIdentity-keyed mapping contains a key with the given name.""" + return any(tid.name == name for tid in mapping) + + +def assert_literal_field( + spec: ModelSpec, field_name: str, expected_value: object +) -> None: + """Assert a field is a single-value Literal with the expected value.""" + field = find_field(spec, field_name) + assert field.type_info.kind == TypeKind.LITERAL + assert field.type_info.literal_values == (expected_value,) + + +def flat_specs_from_discovery( + theme: str | None = None, +) -> list[ModelSpec]: + """Build a flat list of ModelSpecs from discovery, with entry_point set.""" + models = discover_models() + if theme: + models = {k: v for k, v in models.items() if k.theme == theme} + result = [] + for key, cls in models.items(): + if not is_model_class(cls): + continue + result.append(extract_model(cls, entry_point=key.entry_point)) + return result + + +def assert_golden(actual: str, golden_path: Path, *, update: bool) -> None: + """Compare rendered output against a golden file. + + When update is True, writes actual content to the golden file + instead of comparing. + """ + if update: + golden_path.parent.mkdir(parents=True, exist_ok=True) + golden_path.write_text(actual) + return + expected = golden_path.read_text() + if actual != expected: + diff = "\n".join( + unified_diff( + expected.splitlines(), + actual.splitlines(), + fromfile=str(golden_path), + tofile="actual", + lineterm="", + ) + ) + pytest.fail(f"Golden file mismatch:\n{diff}") diff --git a/packages/overture-schema-codegen/tests/conftest.py b/packages/overture-schema-codegen/tests/conftest.py new file mode 100644 index 000000000..8dce88bf5 --- /dev/null +++ b/packages/overture-schema-codegen/tests/conftest.py @@ -0,0 +1,82 @@ +"""Shared pytest fixtures for overture-schema-codegen tests.""" + +import overture.schema.system.primitive as _system_primitive +import pytest +from click.testing import CliRunner +from codegen_test_support import find_model_class +from overture.schema.codegen.extraction.model_extraction import extract_model +from overture.schema.codegen.extraction.numeric_extraction import extract_numerics +from overture.schema.codegen.extraction.specs import ModelSpec +from overture.schema.codegen.markdown.pipeline import ( + partition_numeric_and_geometry_types, +) +from overture.schema.codegen.markdown.renderer import ( + render_geometry_from_values, + render_primitives_from_specs, +) +from overture.schema.core.discovery import discover_models +from overture.schema.system.primitive import GeometryType +from pydantic import BaseModel + + +def pytest_addoption(parser: pytest.Parser) -> None: + parser.addoption( + "--update-golden", + action="store_true", + default=False, + help="Regenerate golden files instead of comparing against them", + ) + + +@pytest.fixture +def update_golden(request: pytest.FixtureRequest) -> bool: + return bool(request.config.getoption("--update-golden")) + + +@pytest.fixture +def cli_runner() -> CliRunner: + """Provide a Click CLI test runner.""" + return CliRunner() + + +@pytest.fixture +def all_discovered_models() -> dict: + """Discover and return all registered Overture models.""" + return discover_models() + + +@pytest.fixture +def building_class(all_discovered_models: dict) -> type[BaseModel]: + """Get the Building model class.""" + return find_model_class("Building", all_discovered_models) + + +@pytest.fixture +def building_spec(building_class: type[BaseModel]) -> ModelSpec: + """Extract the Building model spec.""" + return extract_model(building_class) + + +@pytest.fixture +def place_class(all_discovered_models: dict) -> type[BaseModel]: + """Get the Place model class.""" + return find_model_class("Place", all_discovered_models) + + +@pytest.fixture +def division_class(all_discovered_models: dict) -> type[BaseModel]: + """Get the Division model class.""" + return find_model_class("Division", all_discovered_models) + + +@pytest.fixture(scope="module") +def primitives_markdown() -> str: + """Render the primitives.md page from the system primitive module.""" + numeric_names, _ = partition_numeric_and_geometry_types(_system_primitive) + return render_primitives_from_specs(extract_numerics(numeric_names)) + + +@pytest.fixture(scope="module") +def geometry_markdown() -> str: + """Render the geometry.md page from system GeometryType values.""" + return render_geometry_from_values([m.value for m in GeometryType]) diff --git a/packages/overture-schema-codegen/tests/golden/markdown/common_names.md b/packages/overture-schema-codegen/tests/golden/markdown/common_names.md new file mode 100644 index 000000000..c73d708c9 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/common_names.md @@ -0,0 +1,7 @@ +# CommonNames + +Underlying type: `map` + +## Used By + +- `FeatureWithDict` diff --git a/packages/overture-schema-codegen/tests/golden/markdown/feature_with_address.md b/packages/overture-schema-codegen/tests/golden/markdown/feature_with_address.md new file mode 100644 index 000000000..fdbfdc7a8 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/feature_with_address.md @@ -0,0 +1,15 @@ +# FeatureWithAddress + +A feature with an address field. + +## Fields + +| Name | Type | Description | +| -----: | :----: | ------------- | +| `theme` | `"test"` | | +| `type` | `"addressed"` | | +| `title` | `string` | Feature title | +| `address` | `Address` | | +| `address.street` | `string` | Street name | +| `address.city` | `string` | City name | +| `address.zip_code` | `string` (optional) | Postal code | diff --git a/packages/overture-schema-codegen/tests/golden/markdown/feature_with_dict.md b/packages/overture-schema-codegen/tests/golden/markdown/feature_with_dict.md new file mode 100644 index 000000000..499787d06 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/feature_with_dict.md @@ -0,0 +1,15 @@ +# FeatureWithDict + +A feature with dict fields. + +## Fields + +| Name | Type | Description | +| -----: | :----: | ------------- | +| `theme` | `"test"` | | +| `type` | `"dictfeat"` | | +| `name` | `string` | Feature name | +| `names` | `CommonNames` (map, optional) | Localized names | +| `alt_names` | `map` (optional) | Alternate localized names | +| `tags` | `map` (optional) | Arbitrary tags | +| `metadata` | `map` | Numeric metadata | diff --git a/packages/overture-schema-codegen/tests/golden/markdown/feature_with_sources.md b/packages/overture-schema-codegen/tests/golden/markdown/feature_with_sources.md new file mode 100644 index 000000000..c3e4bc39b --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/feature_with_sources.md @@ -0,0 +1,13 @@ +# FeatureWithSources + +A feature with a Sources field. + +## Fields + +| Name | Type | Description | +| -----: | :----: | ------------- | +| `theme` | `"test"` | | +| `type` | `"sourced"` | | +| `name` | `string` | Feature name | +| `sources[]` | `Sources` (list, optional) | Source data references | +| `sources[].dataset` | `string` | Source dataset name | diff --git a/packages/overture-schema-codegen/tests/golden/markdown/hex_color.md b/packages/overture-schema-codegen/tests/golden/markdown/hex_color.md new file mode 100644 index 000000000..847a1b9a5 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/hex_color.md @@ -0,0 +1,19 @@ +# HexColor + +A color represented as an #RRGGBB or #RGB hexadecimal string. + +For example: + +- `"#ff0000"` or `#f00` for pure red 🟥 +- `"#ffa500"` for bright orange 🟧 +- `"#000000"` or `"#000"` for black ⬛ + +Underlying type: `string` + +## Constraints + +- Allows only hexadecimal color codes (e.g., #FF0000 or #FFF). (`HexColorConstraint`, pattern: `^#[0-9A-Fa-f]{3}([0-9A-Fa-f]{3})?$`) + +## Used By + +- `Instrument` diff --git a/packages/overture-schema-codegen/tests/golden/markdown/id.md b/packages/overture-schema-codegen/tests/golden/markdown/id.md new file mode 100644 index 000000000..b2bfa2995 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/id.md @@ -0,0 +1,15 @@ +# Id + +A unique identifier. + +Underlying type: `string` + +## Constraints + +- Minimum length: 1 +- Allows only strings that contain no whitespace characters. (`NoWhitespaceConstraint`, pattern: `^\S+$`) + +## Used By + +- `Instrument` +- `Venue` diff --git a/packages/overture-schema-codegen/tests/golden/markdown/instrument.md b/packages/overture-schema-codegen/tests/golden/markdown/instrument.md new file mode 100644 index 000000000..727f1b559 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/instrument.md @@ -0,0 +1,20 @@ +# Instrument + +A musical instrument. + +Instruments produce sound through vibration. They are classified +by how sound is produced. + +## Fields + +| Name | Type | Description | +| -----: | :----: | ------------- | +| `id` | `Id` | Unique identifier | +| `category` | `"music"` | | +| `kind` | `"instrument"` | | +| `name` | `string` | Common name | +| `tuning` | `float64` (optional) | Concert pitch in Hz.

Standard tuning is 440 Hz. | +| `num_strings` | `int32` (optional) | | +| `family` | `InstrumentFamily` (optional) | | +| `color` | `HexColor` (optional) | Body color | +| `tags` | `list` (optional) | *All items must be unique. (`UniqueItemsConstraint`)* | diff --git a/packages/overture-schema-codegen/tests/golden/markdown/instrument_family.md b/packages/overture-schema-codegen/tests/golden/markdown/instrument_family.md new file mode 100644 index 000000000..d8489cc4f --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/instrument_family.md @@ -0,0 +1,13 @@ +# InstrumentFamily + +Classification by sound production method. + +## Values + +- `string` - Sound from vibrating strings +- `wind` - Sound from vibrating air column +- `percussion` + +## Used By + +- `Instrument` diff --git a/packages/overture-schema-codegen/tests/golden/markdown/simple_kind.md b/packages/overture-schema-codegen/tests/golden/markdown/simple_kind.md new file mode 100644 index 000000000..f0aca0300 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/simple_kind.md @@ -0,0 +1,6 @@ +# SimpleKind + +## Values + +- `small` +- `large` diff --git a/packages/overture-schema-codegen/tests/golden/markdown/sources.md b/packages/overture-schema-codegen/tests/golden/markdown/sources.md new file mode 100644 index 000000000..ec0343cb6 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/sources.md @@ -0,0 +1,14 @@ +# Sources + +Source data references + +Underlying type: `list` + +## Constraints + +- Minimum length: 1 +- All items must be unique. (`UniqueItemsConstraint`) + +## Used By + +- `FeatureWithSources` diff --git a/packages/overture-schema-codegen/tests/golden/markdown/venue.md b/packages/overture-schema-codegen/tests/golden/markdown/venue.md new file mode 100644 index 000000000..edb0578ef --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/venue.md @@ -0,0 +1,22 @@ +# Venue + +A concert venue. + +A location where musical performances take place. + +## Fields + +| Name | Type | Description | +| -----: | :----: | ------------- | +| `id` | `Id` | Unique identifier | +| `category` | `"music"` | | +| `kind` | `"venue"` | | +| `name` | `string` (optional) | Venue name

*At least one of `name`, `description` must be set* | +| `description` | `string` (optional) | *At least one of `name`, `description` must be set* | +| `geometry` | `geometry` | *Allowed geometry types: Point, Polygon* | +| `capacity` | `int64` (optional) | *`≥ 1`* | +| `resident_ensemble` | `Id` (optional) | A unique identifier

*References `Instrument` (belongs to)* | + +## Constraints + +- At least one of `name`, `description` must be set diff --git a/packages/overture-schema-codegen/tests/golden/markdown/widget.md b/packages/overture-schema-codegen/tests/golden/markdown/widget.md new file mode 100644 index 000000000..c056d27a3 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/widget.md @@ -0,0 +1,8 @@ +# Widget + +## Fields + +| Name | Type | Description | +| -----: | :----: | ------------- | +| `active` | `boolean` | | +| `label` | `string` | Display label | diff --git a/packages/overture-schema-codegen/tests/test_cli.py b/packages/overture-schema-codegen/tests/test_cli.py new file mode 100644 index 000000000..eecd45627 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_cli.py @@ -0,0 +1,434 @@ +"""Tests for CLI entrypoint.""" + +import json +import re +from pathlib import Path + +import pytest +from click.testing import CliRunner +from overture.schema.codegen.cli import cli +from overture.schema.codegen.extraction.specs import ModelSpec + + +class TestCliList: + """Tests for the list command.""" + + def test_list_command_exists(self, cli_runner: CliRunner) -> None: + """list command should be available.""" + result = cli_runner.invoke(cli, ["list"]) + assert result.exit_code == 0 + + def test_list_shows_discovered_models(self, cli_runner: CliRunner) -> None: + """list command should show discovered models.""" + result = cli_runner.invoke(cli, ["list"]) + + assert "Building" in result.output + assert "Place" in result.output + + +class TestCliGenerate: + """Tests for the generate command.""" + + def test_generate_command_exists(self, cli_runner: CliRunner) -> None: + """generate command should be available.""" + result = cli_runner.invoke(cli, ["generate", "--help"]) + + assert result.exit_code == 0 + assert "Generate" in result.output or "generate" in result.output + + def test_generate_requires_format(self, cli_runner: CliRunner) -> None: + """generate command should require --format.""" + result = cli_runner.invoke(cli, ["generate"]) + assert result.exit_code != 0 + + def test_generate_markdown_to_stdout(self, cli_runner: CliRunner) -> None: + """generate --format markdown should output markdown to stdout.""" + result = cli_runner.invoke(cli, ["generate", "--format", "markdown"]) + + assert result.exit_code == 0 + assert "# Building" in result.output or "# " in result.output + + def test_generate_with_theme_filter(self, cli_runner: CliRunner) -> None: + """generate --theme should filter to specific theme.""" + result = cli_runner.invoke( + cli, ["generate", "--format", "markdown", "--theme", "buildings"] + ) + + assert result.exit_code == 0 + assert "Building" in result.output + assert "Place" not in result.output + + def test_generate_markdown_feature_at_theme_level( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """Markdown features go directly in theme directory.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--theme", + "buildings", + "--output-dir", + str(tmp_path), + ], + ) + assert result.exit_code == 0 + + # Feature models at theme level + assert (tmp_path / "buildings" / "building.md").exists() + assert (tmp_path / "buildings" / "building_part.md").exists() + + # NOT in subdirectories + assert not (tmp_path / "buildings" / "building" / "building.md").exists() + + def test_feature_pages_have_sidebar_position( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """Feature pages include sidebar_position frontmatter.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--theme", + "buildings", + "--output-dir", + str(tmp_path), + ], + ) + assert result.exit_code == 0 + + content = (tmp_path / "buildings" / "building.md").read_text() + assert content.startswith("---\nsidebar_position: 1\n---\n") + + def test_generate_markdown_shared_types_mirror_modules( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """Core/system types land in module-mirrored directories.""" + result = cli_runner.invoke( + cli, + ["generate", "--format", "markdown", "--output-dir", str(tmp_path)], + ) + assert result.exit_code == 0 + + core_dir = tmp_path / "core" + assert core_dir.exists(), "core/ directory should exist" + subdirs = [d.name for d in core_dir.iterdir() if d.is_dir()] + assert len(subdirs) > 0, "core/ should have subdirectories" + + def test_generate_multiple_themes_to_output_dir( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """generate all themes should create subdirectories for each theme.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--output-dir", + str(tmp_path), + ], + ) + + assert result.exit_code == 0 + + subdirs = [d.name for d in tmp_path.iterdir() if d.is_dir()] + assert "buildings" in subdirs + assert "places" in subdirs + + def test_generate_no_duplicate_files( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """No type should produce duplicate output files.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--output-dir", + str(tmp_path), + ], + ) + + assert result.exit_code == 0 + + all_files = list(tmp_path.rglob("*.md")) + all_paths = [str(f.relative_to(tmp_path)) for f in all_files] + assert len(all_paths) == len(set(all_paths)), ( + f"Duplicate files: {[p for p in all_paths if all_paths.count(p) > 1]}" + ) + + +class TestCliGenerateLinkIntegrity: + """Verify all markdown links resolve to existing files.""" + + def test_all_links_resolve(self, cli_runner: CliRunner, tmp_path: Path) -> None: + """Every markdown link target should exist as a file.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--output-dir", + str(tmp_path), + ], + ) + + assert result.exit_code == 0 + + link_re = re.compile(r"\[.*?\]\(([^)]+\.md(?:#[^)]*)?)\)") + broken: list[str] = [] + + for md_file in tmp_path.rglob("*.md"): + content = md_file.read_text() + for match in link_re.finditer(content): + href = match.group(1).split("#")[0] + # Resolve relative path from the file's directory + target = (md_file.parent / href).resolve() + if not target.exists(): + rel = md_file.relative_to(tmp_path) + broken.append(f"{rel}: {href}") + + assert not broken, "Broken links:\n" + "\n".join(broken) + + +class TestCliGenerateCategoryFiles: + """Tests for _category_.json generation.""" + + def test_generates_category_files( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """Should generate _category_.json files in output directories.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--theme", + "buildings", + "--output-dir", + str(tmp_path), + ], + ) + + assert result.exit_code == 0 + + # Theme directory should have a category file + cat_file = tmp_path / "buildings" / "_category_.json" + assert cat_file.exists() + data = json.loads(cat_file.read_text()) + assert data["label"] == "Buildings" + + def test_core_directory_has_category_file( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """core/ directory should have _category_.json.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--output-dir", + str(tmp_path), + ], + ) + + assert result.exit_code == 0 + + cat_file = tmp_path / "core" / "_category_.json" + assert cat_file.exists() + data = json.loads(cat_file.read_text()) + assert data["label"] == "Core" + + def test_feature_dirs_positioned_before_non_feature_dirs( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """Feature directories should have lower position than non-feature directories.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--output-dir", + str(tmp_path), + ], + ) + assert result.exit_code == 0 + + def pos(dir_name: str) -> int: + data = json.loads((tmp_path / dir_name / "_category_.json").read_text()) + result: int = data["position"] + return result + + # Feature directories (contain feature pages) should sort before + # non-feature directories (core, system -- shared types only) + feature_positions = [pos("buildings"), pos("places"), pos("transportation")] + non_feature_positions = [pos("core"), pos("system")] + + assert max(feature_positions) < min(non_feature_positions) + + def test_subdirectories_have_no_position( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """Only top-level directories get position values.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--output-dir", + str(tmp_path), + ], + ) + assert result.exit_code == 0 + + data = json.loads( + (tmp_path / "core" / "scoping" / "_category_.json").read_text() + ) + assert "position" not in data + + +class TestCliGenerateEnums: + """Tests for enum generation in the generate command.""" + + def test_generate_markdown_includes_enum_files( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """generate --format markdown should create enum documentation files.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--theme", + "buildings", + "--output-dir", + str(tmp_path), + ], + ) + + assert result.exit_code == 0 + + # Enum files exist somewhere under the buildings directory + all_md = list((tmp_path / "buildings").rglob("*.md")) + all_names = [f.stem for f in all_md] + + assert "building" in all_names + + # Should have enum files beyond the feature models + non_feature = [n for n in all_names if n not in ("building", "building_part")] + assert len(non_feature) > 0, "Should generate enum documentation files" + + +class TestCliEntryPoint: + """generate populates entry_point from discovery keys.""" + + def test_generate_sets_entry_point_on_specs( + self, cli_runner: CliRunner, monkeypatch: pytest.MonkeyPatch + ) -> None: + captured: list[ModelSpec] = [] + + def spy(feature_specs: list, schema_root: str, output_dir: object) -> None: + captured.extend(feature_specs) + + monkeypatch.setattr("overture.schema.codegen.cli._generate_markdown", spy) + result = cli_runner.invoke( + cli, ["generate", "--format", "markdown", "--theme", "buildings"] + ) + + assert result.exit_code == 0 + assert len(captured) > 0 + for spec in captured: + assert spec.entry_point is not None, f"{spec.name} missing entry_point" + assert ":" in spec.entry_point, ( + f"entry_point should be entry-point style: {spec.entry_point!r}" + ) + + +class TestCliHelp: + """Tests for CLI help.""" + + def test_main_help(self, cli_runner: CliRunner) -> None: + """--help should show usage information.""" + result = cli_runner.invoke(cli, ["--help"]) + + assert result.exit_code == 0 + assert "generate" in result.output + assert "list" in result.output + + +class TestGenerateWithSegment: + """Integration test: Segment union produces markdown output.""" + + def test_segment_appears_in_markdown_output( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """Generate markdown and verify Segment page exists.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--theme", + "transportation", + "--output-dir", + str(tmp_path), + ], + ) + assert result.exit_code == 0 + + # Segment page should exist + segment_files = list(tmp_path.rglob("segment.md")) + assert len(segment_files) >= 1, f"No segment.md found in {tmp_path}" + + content = segment_files[0].read_text() + assert "# Segment" in content + assert "subtype" in content + + +class TestReverseReferences: + """Integration test: Reverse references appear in generated markdown.""" + + def test_used_by_sections_appear_in_markdown( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """Generate markdown and verify Used By sections appear.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--theme", + "buildings", + "--output-dir", + str(tmp_path), + ], + ) + assert result.exit_code == 0 + + # Find a supplementary type that should have Used By section + # For example, if Building references some enum or NewType + all_md = list(tmp_path.rglob("*.md")) + + # At least one supplementary type should have a Used By section + has_used_by = False + for md_file in all_md: + content = md_file.read_text() + if "## Used By" in content: + has_used_by = True + break + + assert has_used_by, "No 'Used By' sections found in any generated markdown" diff --git a/packages/overture-schema-codegen/tests/test_constraint_description.py b/packages/overture-schema-codegen/tests/test_constraint_description.py new file mode 100644 index 000000000..9961ef2b2 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_constraint_description.py @@ -0,0 +1,458 @@ +"""Tests for constraint description (model-level and field-level).""" + +from annotated_types import Ge, Gt, Interval, Le, Lt, MaxLen, MinLen +from overture.schema.codegen.extraction.field_constraints import ( + constraint_display_text, + describe_field_constraint, +) +from overture.schema.codegen.extraction.model_constraints import ( + analyze_model_constraints, +) +from overture.schema.codegen.extraction.specs import TypeIdentity +from overture.schema.codegen.extraction.type_analyzer import ConstraintSource +from overture.schema.system.model_constraint import ( + FieldEqCondition, + ForbidIfConstraint, + MinFieldsSetConstraint, + ModelConstraint, + NoExtraFieldsConstraint, + Not, + RadioGroupConstraint, + RequireAnyOfConstraint, + RequireIfConstraint, +) +from overture.schema.system.primitive import GeometryType, GeometryTypeConstraint +from overture.schema.system.ref import Reference, Relationship +from overture.schema.system.ref.id import Identified + + +def describe_model_constraints( + constraints: tuple[ModelConstraint, ...], +) -> list[str]: + descriptions, _ = analyze_model_constraints(constraints) + return descriptions + + +def field_constraint_notes( + constraints: tuple[ModelConstraint, ...], +) -> dict[str, list[str]]: + _, field_notes = analyze_model_constraints(constraints) + return field_notes + + +class TestDescribeSingleConstraint: + """Each constraint type produces readable prose.""" + + def test_require_any_of(self) -> None: + constraint = RequireAnyOfConstraint._create_internal( + "@require_any_of", "name", "description" + ) + result = describe_model_constraints((constraint,)) + + assert result == ["At least one of `name`, `description` must be set"] + + def test_radio_group(self) -> None: + constraint = RadioGroupConstraint._create_internal( + "@radio_group", "is_land", "is_territorial" + ) + result = describe_model_constraints((constraint,)) + + assert result == ["Exactly one of `is_land`, `is_territorial` must be `true`"] + + def test_min_fields_set(self) -> None: + constraint = MinFieldsSetConstraint._create_internal("@min_fields_set", 3) + result = describe_model_constraints((constraint,)) + + assert result == ["At least 3 fields must be set"] + + def test_require_if_field_eq(self) -> None: + constraint = RequireIfConstraint._create_internal( + "@require_if", ["admin_level"], FieldEqCondition("subtype", "country") + ) + result = describe_model_constraints((constraint,)) + + assert result == ["`admin_level` is required when `subtype` = `country`"] + + def test_require_if_negated_condition(self) -> None: + """Not(FieldEqCondition) uses not-equal sign.""" + constraint = RequireIfConstraint._create_internal( + "@require_if", + ["parent_division_id"], + Not(FieldEqCondition("subtype", "country")), + ) + result = describe_model_constraints((constraint,)) + + assert result == ["`parent_division_id` is required when `subtype` ≠ `country`"] + + def test_forbid_if_field_eq(self) -> None: + constraint = ForbidIfConstraint._create_internal( + "@forbid_if", + ["parent_division_id"], + FieldEqCondition("subtype", "country"), + ) + result = describe_model_constraints((constraint,)) + + assert result == [ + "`parent_division_id` is forbidden when `subtype` = `country`" + ] + + def test_multi_field_uses_plural_verb(self) -> None: + """Multiple field names produce 'are required', not 'is required'.""" + constraint = RequireIfConstraint._create_internal( + "@require_if", + ["foo", "bar"], + FieldEqCondition("flag", "on"), + ) + result = describe_model_constraints((constraint,)) + + assert result == ["`foo`, `bar` are required when `flag` = `on`"] + + +class TestDescribeFiltering: + """Filtering and fallback behavior.""" + + def test_no_extra_fields_filtered_out(self) -> None: + """@no_extra_fields produces no output.""" + constraint = NoExtraFieldsConstraint._create_internal("@no_extra_fields") + result = describe_model_constraints((constraint,)) + + assert result == [] + + def test_unknown_constraint_uses_name_fallback(self) -> None: + """Unrecognized constraint type falls back to constraint.name.""" + + class FutureConstraint(ModelConstraint): + pass + + constraint = FutureConstraint("@future_thing") + result = describe_model_constraints((constraint,)) + + assert result == ["`@future_thing`"] + + +class TestConsolidation: + """Consolidation of same-field conditional constraints.""" + + def test_consolidate_require_if_same_field(self) -> None: + """Multiple @require_if with same fields, different FieldEqCondition values, merge.""" + constraints = tuple( + RequireIfConstraint._create_internal( + "@require_if", + ["admin_level"], + FieldEqCondition("subtype", val), + ) + for val in ("country", "dependency", "macroregion") + ) + result = describe_model_constraints(constraints) + + assert result == [ + "`admin_level` is required when `subtype` is one of: " + "`country`, `dependency`, `macroregion`" + ] + + def test_no_consolidation_for_different_fields(self) -> None: + """@require_if with different field_names are not consolidated.""" + c1 = RequireIfConstraint._create_internal( + "@require_if", ["foo"], FieldEqCondition("flag", "a") + ) + c2 = RequireIfConstraint._create_internal( + "@require_if", ["bar"], FieldEqCondition("flag", "b") + ) + result = describe_model_constraints((c1, c2)) + + assert len(result) == 2 + + def test_no_consolidation_for_negated_conditions(self) -> None: + """Negated conditions are not consolidated.""" + c1 = RequireIfConstraint._create_internal( + "@require_if", ["foo"], Not(FieldEqCondition("flag", "a")) + ) + c2 = RequireIfConstraint._create_internal( + "@require_if", ["foo"], Not(FieldEqCondition("flag", "b")) + ) + result = describe_model_constraints((c1, c2)) + + assert len(result) == 2 + + def test_consolidate_forbid_if_same_field(self) -> None: + """Multiple @forbid_if with same fields also consolidate.""" + constraints = tuple( + ForbidIfConstraint._create_internal( + "@forbid_if", + ["secret"], + FieldEqCondition("role", val), + ) + for val in ("guest", "anonymous") + ) + result = describe_model_constraints(constraints) + + assert result == [ + "`secret` is forbidden when `role` is one of: `guest`, `anonymous`" + ] + + +class TestMixedConstraints: + """End-to-end with mixed constraint types.""" + + def test_division_like_model(self) -> None: + """Mixed constraints render in declaration order with consolidation.""" + constraints = ( + RequireAnyOfConstraint._create_internal("@require_any_of", "foo", "bar"), + ForbidIfConstraint._create_internal( + "@forbid_if", + ["parent_id"], + FieldEqCondition("subtype", "country"), + ), + RequireIfConstraint._create_internal( + "@require_if", + ["parent_id"], + Not(FieldEqCondition("subtype", "country")), + ), + RequireIfConstraint._create_internal( + "@require_if", + ["level"], + FieldEqCondition("subtype", "country"), + ), + RequireIfConstraint._create_internal( + "@require_if", + ["level"], + FieldEqCondition("subtype", "region"), + ), + RadioGroupConstraint._create_internal("@radio_group", "is_land", "is_sea"), + ) + result = describe_model_constraints(constraints) + + assert result == [ + "At least one of `foo`, `bar` must be set", + "`parent_id` is forbidden when `subtype` = `country`", + "`parent_id` is required when `subtype` ≠ `country`", + "`level` is required when `subtype` is one of: `country`, `region`", + "Exactly one of `is_land`, `is_sea` must be `true`", + ] + + +class TestFieldConstraintNotes: + """field_constraint_notes maps field names to their constraint descriptions.""" + + def test_require_any_of_maps_all_fields(self) -> None: + """RequireAnyOfConstraint maps each field name to the description.""" + constraint = RequireAnyOfConstraint._create_internal( + "@require_any_of", "name", "description" + ) + result = field_constraint_notes((constraint,)) + + expected = "At least one of `name`, `description` must be set" + assert result == {"name": [expected], "description": [expected]} + + def test_require_if_includes_condition_field(self) -> None: + """RequireIfConstraint includes both constrained and condition fields.""" + constraint = RequireIfConstraint._create_internal( + "@require_if", ["admin_level"], FieldEqCondition("subtype", "country") + ) + result = field_constraint_notes((constraint,)) + + expected = "`admin_level` is required when `subtype` = `country`" + assert result["admin_level"] == [expected] + assert result["subtype"] == [expected] + + def test_forbid_if_with_negated_condition_includes_condition_field(self) -> None: + """ForbidIfConstraint with Not(FieldEqCondition) includes condition field.""" + constraint = ForbidIfConstraint._create_internal( + "@forbid_if", + ["parent_id"], + Not(FieldEqCondition("subtype", "country")), + ) + result = field_constraint_notes((constraint,)) + + expected = "`parent_id` is forbidden when `subtype` ≠ `country`" + assert result["parent_id"] == [expected] + assert result["subtype"] == [expected] + + def test_consolidated_constraints_map_all_fields(self) -> None: + """Consolidated constraints map to all participating fields.""" + constraints = tuple( + RequireIfConstraint._create_internal( + "@require_if", + ["admin_level"], + FieldEqCondition("subtype", val), + ) + for val in ("country", "dependency") + ) + result = field_constraint_notes(constraints) + + expected = ( + "`admin_level` is required when `subtype` is one of: " + "`country`, `dependency`" + ) + assert result["admin_level"] == [expected] + assert result["subtype"] == [expected] + + def test_no_extra_fields_produces_no_annotations(self) -> None: + """NoExtraFieldsConstraint produces no field annotations.""" + constraint = NoExtraFieldsConstraint._create_internal("@no_extra_fields") + result = field_constraint_notes((constraint,)) + + assert result == {} + + def test_min_fields_set_produces_no_annotations(self) -> None: + """MinFieldsSetConstraint produces no field annotations.""" + constraint = MinFieldsSetConstraint._create_internal("@min_fields_set", 3) + result = field_constraint_notes((constraint,)) + + assert result == {} + + def test_radio_group_maps_all_fields(self) -> None: + """RadioGroupConstraint maps each field name to the description.""" + constraint = RadioGroupConstraint._create_internal( + "@radio_group", "is_land", "is_sea" + ) + result = field_constraint_notes((constraint,)) + + expected = "Exactly one of `is_land`, `is_sea` must be `true`" + assert result == {"is_land": [expected], "is_sea": [expected]} + + def test_multiple_constraints_on_one_field(self) -> None: + """Field appearing in multiple constraints gets all descriptions.""" + c1 = RequireAnyOfConstraint._create_internal( + "@require_any_of", "name", "description" + ) + c2 = RequireIfConstraint._create_internal( + "@require_if", ["name"], FieldEqCondition("subtype", "venue") + ) + result = field_constraint_notes((c1, c2)) + + assert len(result["name"]) == 2 + + +class TestDescribeFieldConstraint: + """Tests for describe_field_constraint readable output.""" + + def test_ge(self) -> None: + assert describe_field_constraint(Ge(ge=0)) == "`≥ 0`" + + def test_le(self) -> None: + assert describe_field_constraint(Le(le=100)) == "`≤ 100`" + + def test_gt(self) -> None: + assert describe_field_constraint(Gt(gt=0)) == "`> 0`" + + def test_lt(self) -> None: + assert describe_field_constraint(Lt(lt=100)) == "`< 100`" + + def test_min_len(self) -> None: + assert describe_field_constraint(MinLen(min_length=1)) == "Minimum length: 1" + + def test_max_len(self) -> None: + assert describe_field_constraint(MaxLen(max_length=10)) == "Maximum length: 10" + + def test_interval_closed(self) -> None: + assert describe_field_constraint(Interval(ge=0, le=100)) == "`0 ≤ x ≤ 100`" + + def test_interval_open(self) -> None: + assert describe_field_constraint(Interval(gt=0, lt=100)) == "`0 < x < 100`" + + def test_interval_half_open(self) -> None: + assert describe_field_constraint(Interval(ge=0, lt=100)) == "`0 ≤ x < 100`" + + def test_interval_lower_only(self) -> None: + assert describe_field_constraint(Interval(ge=0)) == "`≥ 0`" + + def test_interval_upper_only(self) -> None: + assert describe_field_constraint(Interval(le=100)) == "`≤ 100`" + + def test_geometry_type_single(self) -> None: + constraint = GeometryTypeConstraint(GeometryType.POINT) + assert describe_field_constraint(constraint) == "Allowed geometry types: Point" + + def test_geometry_type_multiple(self) -> None: + constraint = GeometryTypeConstraint(GeometryType.POINT, GeometryType.POLYGON) + assert ( + describe_field_constraint(constraint) + == "Allowed geometry types: Point, Polygon" + ) + + def test_geometry_type_all_types(self) -> None: + constraint = GeometryTypeConstraint( + GeometryType.POINT, + GeometryType.LINE_STRING, + GeometryType.POLYGON, + ) + assert ( + describe_field_constraint(constraint) + == "Allowed geometry types: LineString, Point, Polygon" + ) + + def test_reference_belongs_to(self) -> None: + class Target(Identified): + pass + + constraint = Reference(Relationship.BELONGS_TO, Target) + assert ( + describe_field_constraint(constraint) == "References `Target` (belongs to)" + ) + + def test_reference_connects_to(self) -> None: + class Other(Identified): + pass + + constraint = Reference(Relationship.CONNECTS_TO, Other) + assert ( + describe_field_constraint(constraint) == "References `Other` (connects to)" + ) + + def test_reference_link_fn_receives_type_identity(self) -> None: + """link_fn callback receives TypeIdentity wrapping the relatee class.""" + + class Target(Identified): + pass + + received: list[TypeIdentity] = [] + + def link_fn(tid: TypeIdentity) -> str: + received.append(tid) + return f"[`{tid.name}`](link)" + + constraint = Reference(Relationship.BELONGS_TO, Target) + result = describe_field_constraint(constraint, link_fn=link_fn) + + assert len(received) == 1 + assert received[0].obj is Target + assert received[0].name == "Target" + assert result == "References [`Target`](link) (belongs to)" + + def test_reference_link_fn_used_in_output(self) -> None: + """link_fn return value appears verbatim in the description.""" + + class Target(Identified): + pass + + constraint = Reference(Relationship.CONNECTS_TO, Target) + result = describe_field_constraint( + constraint, link_fn=lambda tid: f"[`{tid.name}`](path/to/target)" + ) + assert result == "References [`Target`](path/to/target) (connects to)" + + +class TestConstraintDisplayText: + """constraint_display_text forwards link_fn to describe_field_constraint.""" + + def test_link_fn_forwarded_to_reference_constraint(self) -> None: + """link_fn is forwarded when constraint is a Reference.""" + + class Target(Identified): + pass + + constraint = Reference(Relationship.BELONGS_TO, Target) + cs = ConstraintSource(source_ref=None, source_name=None, constraint=constraint) + + received: list[TypeIdentity] = [] + + def link_fn(tid: TypeIdentity) -> str: + received.append(tid) + return f"[`{tid.name}`](link)" + + result = constraint_display_text(cs, link_fn=link_fn) + + assert len(received) == 1 + assert received[0].obj is Target + assert result == "References [`Target`](link) (belongs to)" diff --git a/packages/overture-schema-codegen/tests/test_enum_extraction.py b/packages/overture-schema-codegen/tests/test_enum_extraction.py new file mode 100644 index 000000000..2e5367e3b --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_enum_extraction.py @@ -0,0 +1,149 @@ +"""Tests for enum extraction.""" + +from enum import Enum + +from codegen_test_support import find_member +from overture.schema.codegen.extraction.enum_extraction import extract_enum +from overture.schema.codegen.extraction.specs import EnumMemberSpec, EnumSpec +from overture.schema.system.doc import DocumentedEnum + + +class TestEnumMemberSpec: + """Tests for EnumMemberSpec dataclass.""" + + def test_stores_name_value_description(self) -> None: + """EnumMemberSpec should store name, value, and description.""" + member = EnumMemberSpec( + name="GABLED", value="gabled", description="A gabled roof" + ) + + assert member.name == "GABLED" + assert member.value == "gabled" + assert member.description == "A gabled roof" + + def test_description_can_be_none(self) -> None: + """EnumMemberSpec description should be optional.""" + member = EnumMemberSpec(name="FLAT", value="flat", description=None) + + assert member.description is None + + +class TestEnumSpec: + """Tests for EnumSpec dataclass.""" + + def test_stores_name_description_members(self) -> None: + """EnumSpec should store name, description, and members list.""" + members = [ + EnumMemberSpec(name="A", value="a", description=None), + EnumMemberSpec(name="B", value="b", description="The letter B"), + ] + + spec = EnumSpec( + name="Letters", description="A collection of letters", members=members + ) + + assert spec.name == "Letters" + assert spec.description == "A collection of letters" + assert len(spec.members) == 2 + + +class TestExtractEnumSimple: + """Tests for extract_enum with simple str Enum classes.""" + + def test_extracts_simple_str_enum(self) -> None: + """Should extract name, description, and members from simple str Enum.""" + + class RoofShape(str, Enum): + """The shape of the roof.""" + + FLAT = "flat" + GABLED = "gabled" + DOMED = "dome" + + result = extract_enum(RoofShape) + + assert result.name == "RoofShape" + assert result.description == "The shape of the roof." + assert len(result.members) == 3 + + # Check member extraction + flat = find_member(result, "FLAT") + assert flat.value == "flat" + assert flat.description is None + + gabled = find_member(result, "GABLED") + assert gabled.value == "gabled" + + def test_enum_without_docstring(self) -> None: + """Should handle enum without docstring.""" + + class SimpleEnum(str, Enum): + A = "a" + B = "b" + + result = extract_enum(SimpleEnum) + + assert result.name == "SimpleEnum" + assert result.description is None + + +class TestExtractEnumDocumented: + """Tests for extract_enum with DocumentedEnum classes.""" + + def test_extracts_documented_enum_with_member_descriptions(self) -> None: + """Should extract per-member descriptions from DocumentedEnum.""" + + class Side(str, DocumentedEnum): + """The side on which something appears.""" + + LEFT = ("left", "On the left side") + RIGHT = ("right", "On the right side") + + result = extract_enum(Side) + + assert result.name == "Side" + assert result.description == "The side on which something appears." + assert len(result.members) == 2 + + left = find_member(result, "LEFT") + assert left.value == "left" + assert left.description == "On the left side" + + right = find_member(result, "RIGHT") + assert right.value == "right" + assert right.description == "On the right side" + + def test_documented_enum_with_mixed_documentation(self) -> None: + """DocumentedEnum can have some members documented and others not.""" + + class ConnectionState(str, DocumentedEnum): + """Connection states.""" + + CONNECTED = "connected" + DISCONNECTED = "disconnected" + QUIESCING = ("quiescing", "Gracefully shutting down") + + result = extract_enum(ConnectionState) + + connected = find_member(result, "CONNECTED") + assert connected.value == "connected" + assert connected.description is None + + quiescing = find_member(result, "QUIESCING") + assert quiescing.value == "quiescing" + assert quiescing.description == "Gracefully shutting down" + + +class TestEnumSpecSourceType: + """Tests for source_type on EnumSpec.""" + + def test_enum_spec_source_type_defaults_to_none(self) -> None: + spec = EnumSpec(name="Test", description=None) + assert spec.source_type is None + + def test_extract_enum_sets_source_type(self) -> None: + class Color(str, Enum): + RED = "red" + + spec = extract_enum(Color) + assert spec.source_type is Color diff --git a/packages/overture-schema-codegen/tests/test_example_loader.py b/packages/overture-schema-codegen/tests/test_example_loader.py new file mode 100644 index 000000000..1f94bc06d --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_example_loader.py @@ -0,0 +1,932 @@ +"""Tests for examples module.""" + +import logging +import sys +import types +from collections.abc import Iterator +from pathlib import Path +from textwrap import dedent +from typing import Annotated, Literal + +import pytest +from overture.schema.codegen.extraction.examples import ( + ExampleRecord, + _inject_literal_fields, + augment_missing_fields, + flatten_model_instance, + load_examples, + load_examples_from_toml, + order_example_rows, + resolve_pyproject_path, + validate_example, +) +from pydantic import BaseModel, ConfigDict, Field, Tag, ValidationError + + +class TestOrderExampleRows: + """Tests for order_example_rows function.""" + + def test_order_by_field_names(self) -> None: + """Order rows by position in field_names.""" + flat_rows = [("version", 1), ("id", "123"), ("name", "test")] + field_names = ["id", "name", "version"] + result = order_example_rows(flat_rows, field_names) + assert result == [("id", "123"), ("name", "test"), ("version", 1)] + + def test_extract_base_field_from_dot_notation(self) -> None: + """Extract base field from dotted keys.""" + flat_rows = [ + ("names.primary", "foo"), + ("id", "123"), + ("names.common.en", "bar"), + ] + field_names = ["id", "names"] + result = order_example_rows(flat_rows, field_names) + assert result == [ + ("id", "123"), + ("names.primary", "foo"), + ("names.common.en", "bar"), + ] + + def test_extract_base_field_from_array_notation(self) -> None: + """Extract base field from array notation.""" + flat_rows = [ + ("sources[0].dataset", "OSM"), + ("id", "123"), + ("sources[0].record_id", "w123"), + ("sources[1].dataset", "MSFT"), + ] + field_names = ["id", "sources"] + result = order_example_rows(flat_rows, field_names) + assert result == [ + ("id", "123"), + ("sources[0].dataset", "OSM"), + ("sources[0].record_id", "w123"), + ("sources[1].dataset", "MSFT"), + ] + + def test_order_with_mixed_notation(self) -> None: + """Order rows with mixed simple, dotted, and array notation.""" + flat_rows = [ + ("version", 1), + ("sources[0].dataset", "OSM"), + ("id", "123"), + ("names.primary", "foo"), + ] + field_names = ["id", "names", "sources", "version"] + result = order_example_rows(flat_rows, field_names) + assert result == [ + ("id", "123"), + ("names.primary", "foo"), + ("sources[0].dataset", "OSM"), + ("version", 1), + ] + + def test_unknown_fields_sort_to_end(self) -> None: + """Unknown fields sort to end, maintaining relative order.""" + flat_rows = [ + ("unknown2", "b"), + ("id", "123"), + ("unknown1", "a"), + ("version", 1), + ] + field_names = ["id", "version"] + result = order_example_rows(flat_rows, field_names) + assert result == [ + ("id", "123"), + ("version", 1), + ("unknown2", "b"), + ("unknown1", "a"), + ] + + +class TestLoadExamplesFromToml: + """Tests for load_examples_from_toml function.""" + + def test_load_example_list(self, tmp_path: Path) -> None: + """Load examples for a model from TOML.""" + toml_path = tmp_path / "pyproject.toml" + toml_path.write_text( + dedent(""" + [project] + name = "test-package" + + [[examples.Building]] + id = "123" + version = 1 + + [[examples.Building]] + id = "456" + version = 2 + """) + ) + + result = load_examples_from_toml(toml_path, "Building") + assert len(result) == 2 + assert result[0] == {"id": "123", "version": 1} + assert result[1] == {"id": "456", "version": 2} + + def test_model_not_found_returns_empty(self, tmp_path: Path) -> None: + """Return empty list when model has no examples.""" + toml_path = tmp_path / "pyproject.toml" + toml_path.write_text( + dedent(""" + [project] + name = "test-package" + + [[examples.Building]] + id = "123" + """) + ) + + result = load_examples_from_toml(toml_path, "Road") + assert result == [] + + def test_no_examples_section_returns_empty(self, tmp_path: Path) -> None: + """Return empty list when no examples section exists.""" + toml_path = tmp_path / "pyproject.toml" + toml_path.write_text( + dedent(""" + [project] + name = "test-package" + """) + ) + + result = load_examples_from_toml(toml_path, "Building") + assert result == [] + + +class MockProject: + """A temporary project directory with registered mock modules.""" + + def __init__(self, root: Path, pyproject: Path, mod_name: str) -> None: + self.root = root + self.pyproject = pyproject + self.mod_name = mod_name + self._registered_modules: list[str] = [mod_name] + + def write_pyproject(self, content: str) -> None: + self.pyproject.write_text(content) + + def add_submodule(self, *subdirs: str) -> str: + """Register a deeper module under this project's src directory. + + Returns the module name for use in __module__ attributes. + """ + pkg_dir = self.root / "src" / Path(*subdirs) + pkg_dir.mkdir(parents=True, exist_ok=True) + module_file = pkg_dir / "module.py" + module_file.write_text("# module") + + sub_mod_name = f"{self.mod_name}_{'_'.join(subdirs)}" + mod = types.ModuleType(sub_mod_name) + mod.__file__ = str(module_file) + sys.modules[sub_mod_name] = mod + self._registered_modules.append(sub_mod_name) + return sub_mod_name + + def cleanup(self) -> None: + for name in self._registered_modules: + sys.modules.pop(name, None) + + +@pytest.fixture +def mock_project(tmp_path: Path) -> Iterator[MockProject]: + """Create a project directory with a mock module registered in sys.modules. + + Yields a MockProject with root, pyproject path, and mod_name. + Writes a minimal pyproject.toml by default; tests can overwrite via + `project.write_pyproject()`. + """ + root = tmp_path / "project" + root.mkdir() + pyproject = root / "pyproject.toml" + pyproject.write_text("[project]\nname = 'test'") + + src_dir = root / "src" + src_dir.mkdir() + module_file = src_dir / "module.py" + module_file.write_text("# module") + + mod_name = f"_test_mock_{id(tmp_path)}" + mod = types.ModuleType(mod_name) + mod.__file__ = str(module_file) + sys.modules[mod_name] = mod + + project = MockProject(root=root, pyproject=pyproject, mod_name=mod_name) + yield project + project.cleanup() + + +class TestResolvePyprojectPath: + """Tests for resolve_pyproject_path function.""" + + def test_finds_pyproject_in_parent_dirs(self, mock_project: MockProject) -> None: + """Walk up from module location to find pyproject.toml.""" + deeper_mod = mock_project.add_submodule("pkg") + + class MockModel: + __module__ = deeper_mod + + result = resolve_pyproject_path(MockModel) + assert result == mock_project.pyproject + + def test_returns_none_when_not_found(self, tmp_path: Path) -> None: + """Return None when pyproject.toml doesn't exist.""" + module_dir = tmp_path / "src" + module_dir.mkdir() + module_file = module_dir / "module.py" + module_file.write_text("# module") + + mod_name = f"_test_resolve_nf_{id(tmp_path)}" + mod = types.ModuleType(mod_name) + mod.__file__ = str(module_file) + sys.modules[mod_name] = mod + try: + + class MockModel: + __module__ = mod_name + + result = resolve_pyproject_path(MockModel) + assert result is None + finally: + sys.modules.pop(mod_name, None) + + def test_returns_none_when_no_module(self) -> None: + """Return None when model's module is not in sys.modules.""" + + class MockModel: + __module__ = "_nonexistent_module_for_test" + + result = resolve_pyproject_path(MockModel) + assert result is None + + +class TestLoadExamples: + """Tests for load_examples entry point.""" + + def test_end_to_end(self, mock_project: MockProject) -> None: + """Load, flatten, and order examples end-to-end.""" + mock_project.write_pyproject( + dedent(""" + [project] + name = "test" + + [[examples.Building]] + version = 1 + id = "123" + + [examples.Building.names] + primary = "Tower" + + [examples.Building.bbox] + xmin = 1.0 + xmax = 2.0 + + [[examples.Building.sources]] + dataset = "OSM" + record_id = "w456" + """) + ) + + class Names(BaseModel): + primary: str + secondary: str | None = None + + class Bbox(BaseModel): + xmin: float + xmax: float + ymin: float | None = None + ymax: float | None = None + + class Source(BaseModel): + dataset: str + record_id: str + + class MockModel(BaseModel): + __module__ = mock_project.mod_name + id: str + version: int + bbox: Bbox | None = None + names: Names | None = None + sources: list[Source] = [] + + field_names = ["id", "bbox", "names", "sources", "version"] + result = load_examples(MockModel, "Building", field_names) + + assert len(result) == 1 + record = result[0] + assert isinstance(record, ExampleRecord) + + assert record.rows == [ + ("id", "123"), + ("bbox.xmin", 1.0), + ("bbox.xmax", 2.0), + ("bbox.ymin", None), + ("bbox.ymax", None), + ("names.primary", "Tower"), + ("names.secondary", None), + ("sources[0].dataset", "OSM"), + ("sources[0].record_id", "w456"), + ("version", 1), + ] + + def test_returns_empty_on_missing_pyproject(self) -> None: + """Return empty list when model's module not in sys.modules.""" + + class MockModel(BaseModel): + __module__ = "_nonexistent_module_for_load_test" + + result = load_examples(MockModel, "Building", ["id"]) + assert result == [] + + def test_returns_empty_on_missing_model(self, mock_project: MockProject) -> None: + """Return empty list when model has no examples.""" + + class MockModel(BaseModel): + __module__ = mock_project.mod_name + + result = load_examples(MockModel, "Building", ["id"]) + assert result == [] + + def test_invalid_examples_skipped_with_warning( + self, mock_project: MockProject, caplog: pytest.LogCaptureFixture + ) -> None: + """Invalid examples are skipped and warning logged.""" + mock_project.write_pyproject( + dedent(""" + [project] + name = "test" + + [[examples.MockModel]] + name = "valid" + count = 1 + + [[examples.MockModel]] + name = "invalid" + count = "not_an_int" + + [[examples.MockModel]] + name = "also_valid" + count = 2 + """) + ) + + class MockModel(BaseModel): + __module__ = mock_project.mod_name + name: str + count: int + + caplog.set_level(logging.WARNING) + + result = load_examples(MockModel, "MockModel", ["name", "count"]) + + assert len(result) == 2 + assert result[0].rows == [("name", "valid"), ("count", 1)] + assert result[1].rows == [("name", "also_valid"), ("count", 2)] + + assert any( + "MockModel" in record.message + and "validation" in record.message.lower() + and str(mock_project.pyproject) in record.message + for record in caplog.records + ) + + def test_dict_field_kept_as_leaf(self, mock_project: MockProject) -> None: + """Dict fields are kept as leaf values without dict_paths.""" + mock_project.write_pyproject( + dedent(""" + [project] + name = "test" + + [[examples.MockModel]] + name = "Tower" + + [examples.MockModel.tags] + color = "red" + size = "large" + """) + ) + + class MockModel(BaseModel): + __module__ = mock_project.mod_name + name: str + tags: dict[str, str] + + result = load_examples(MockModel, "MockModel", ["name", "tags"]) + + assert len(result) == 1 + assert result[0].rows == [ + ("name", "Tower"), + ("tags", {"color": "red", "size": "large"}), + ] + + +class TestInjectLiteralFields: + """Tests for _inject_literal_fields function.""" + + def test_injects_single_value_literal(self) -> None: + """Inject field with single-value Literal annotation.""" + + class MockModel(BaseModel): + theme: Literal["buildings"] + name: str + + data = {"name": "Tower"} + result = _inject_literal_fields(MockModel.model_fields, data) + assert result == {"name": "Tower", "theme": "buildings"} + + def test_skips_non_literal_field(self) -> None: + """Do not inject fields without Literal annotations.""" + + class MockModel(BaseModel): + name: str + count: int + + data = {"name": "Tower"} + result = _inject_literal_fields(MockModel.model_fields, data) + assert result == {"name": "Tower"} + + def test_skips_already_present_field(self) -> None: + """Do not overwrite fields already in data.""" + + class MockModel(BaseModel): + theme: Literal["buildings"] + name: str + + data = {"theme": "custom", "name": "Tower"} + result = _inject_literal_fields(MockModel.model_fields, data) + assert result == {"theme": "custom", "name": "Tower"} + + def test_respects_validation_alias(self) -> None: + """Use validation_alias when injecting.""" + + class MockModel(BaseModel): + class_: Literal["building"] = Field(validation_alias="class") + name: str + + data = {"name": "Tower"} + result = _inject_literal_fields(MockModel.model_fields, data) + assert result == {"name": "Tower", "class": "building"} + + def test_no_mutation(self) -> None: + """Original data dict is not modified.""" + + class MockModel(BaseModel): + theme: Literal["buildings"] + name: str + + data = {"name": "Tower"} + original_data = data.copy() + _inject_literal_fields(MockModel.model_fields, data) + assert data == original_data + + def test_multiple_literal_fields(self) -> None: + """Inject multiple Literal fields.""" + + class MockModel(BaseModel): + theme: Literal["buildings"] + type: Literal["building"] + name: str + + data = {"name": "Tower"} + result = _inject_literal_fields(MockModel.model_fields, data) + assert result == {"name": "Tower", "theme": "buildings", "type": "building"} + + def test_skips_multi_value_literal(self) -> None: + """Do not inject Literal with multiple values.""" + + class MockModel(BaseModel): + status: Literal["active", "inactive"] + name: str + + data = {"name": "Tower"} + result = _inject_literal_fields(MockModel.model_fields, data) + assert result == {"name": "Tower"} + + def test_respects_alias_fallback(self) -> None: + """Fall back to alias if validation_alias not set.""" + + class MockModel(BaseModel): + class_: Literal["building"] = Field(alias="class") + name: str + + data = {"name": "Tower"} + result = _inject_literal_fields(MockModel.model_fields, data) + assert result == {"name": "Tower", "class": "building"} + + def test_unwraps_optional_literal(self) -> None: + """Inject Optional[Literal["x"]] fields (union-wrapped by Pydantic).""" + + class MockModel(BaseModel): + theme: Literal["buildings"] | None = None + name: str + + data = {"name": "Tower"} + result = _inject_literal_fields(MockModel.model_fields, data) + assert result == {"name": "Tower", "theme": "buildings"} + + +class TestValidateExample: + """Tests for validate_example function.""" + + def test_valid_data_returns_instance(self) -> None: + """Valid data returns a model instance.""" + + class MockModel(BaseModel): + name: str + count: int + + raw = {"name": "test", "count": 42} + result = validate_example(MockModel, raw) + assert isinstance(result, MockModel) + assert result.name == "test" + assert result.count == 42 + + def test_invalid_data_raises_validation_error(self) -> None: + """Invalid data raises ValidationError.""" + + class MockModel(BaseModel): + count: int + + raw = {"count": "not_an_int"} + with pytest.raises(ValidationError): + validate_example(MockModel, raw) + + def test_literals_injected_before_validation(self) -> None: + """Missing Literal fields are injected before validation.""" + + class MockModel(BaseModel): + theme: Literal["buildings"] + name: str + + raw = {"name": "Tower"} + result = validate_example(MockModel, raw) + assert isinstance(result, MockModel) + assert result.theme == "buildings" + assert result.name == "Tower" + + +class _Dog(BaseModel): + kind: Literal["dog"] + bark: str + + +class _Cat(BaseModel): + kind: Literal["cat"] + purr: bool + + +_PetUnion = Annotated[ + Annotated[_Dog, Tag("dog")] | Annotated[_Cat, Tag("cat")], + Field(discriminator="kind"), +] + + +class TestValidateExampleWithUnion: + """Tests for validate_example with discriminated unions via TypeAdapter.""" + + def test_validates_union_via_type_adapter(self) -> None: + """TypeAdapter validates against a discriminated union.""" + raw = {"kind": "dog", "bark": "woof"} + result = validate_example(_PetUnion, raw, model_fields=_Dog.model_fields) + assert isinstance(result, _Dog) + assert result.kind == "dog" + assert result.bark == "woof" + + def test_invalid_union_example_raises(self) -> None: + """Invalid data against union raises ValidationError.""" + raw = {"kind": "dog", "bark": 42} # bark should be str + with pytest.raises(ValidationError): + validate_example(_PetUnion, raw, model_fields=_Dog.model_fields) + + def test_null_cross_arm_fields_stripped_for_validation(self) -> None: + """Null fields from other union arms are stripped before validation. + + Parquet files have columns for all union arms. A road segment row + includes rail_flags=null because the column exists. Preprocessing + strips these so extra='forbid' models accept the data. + """ + + class _Base(BaseModel): + model_config = ConfigDict(extra="forbid") + kind: str + name: str + + class Dog(_Base): + kind: Literal["dog"] + bark: str | None = None + + class Cat(_Base): + kind: Literal["cat"] + purr: bool | None = None + + PetUnion = Annotated[ + Annotated[Dog, Tag("dog")] | Annotated[Cat, Tag("cat")], + Field(discriminator="kind"), + ] + + # Flat schema: Dog example includes Cat's "purr" field as null + raw = {"kind": "dog", "name": "Rex", "bark": "woof", "purr": None} + result = validate_example(PetUnion, raw, model_fields=_Base.model_fields) + assert isinstance(result, Dog) + assert result.name == "Rex" + assert result.bark == "woof" + + +class TestIntegration: + """Integration tests with real schema models.""" + + def test_real_building_examples_validate(self) -> None: + """Validate real Building examples from the schema package.""" + pytest.importorskip("overture.schema.buildings.building") + + from overture.schema.buildings.building import Building # noqa: PLC0415 + + pyproject_path = resolve_pyproject_path(Building) + assert pyproject_path is not None, "Could not find pyproject.toml for Building" + + raw_examples = load_examples_from_toml(pyproject_path, "Building") + assert len(raw_examples) > 0, "No Building examples found in pyproject.toml" + + for idx, raw_example in enumerate(raw_examples): + validated = validate_example(Building, raw_example) + assert isinstance(validated, BaseModel), ( + f"Example {idx}: Expected BaseModel" + ) + + def test_real_segment_examples_validate(self) -> None: + """Validate real Segment examples (discriminated union with cross-arm fields).""" + pytest.importorskip("overture.schema.transportation") + + from overture.schema.transportation import Segment # noqa: PLC0415 + from overture.schema.transportation.segment.models import ( # noqa: PLC0415 + RoadSegment, + TransportationSegment, + ) + + pyproject_path = resolve_pyproject_path(RoadSegment) + assert pyproject_path is not None + + raw_examples = load_examples_from_toml(pyproject_path, "Segment") + assert len(raw_examples) > 0, "No Segment examples found" + + for idx, raw_example in enumerate(raw_examples): + validated = validate_example( + Segment, + raw_example, + model_fields=TransportationSegment.model_fields, + ) + assert isinstance(validated, BaseModel), ( + f"Example {idx}: Expected BaseModel" + ) + + +class TestAugmentMissingFields: + """Tests for augment_missing_fields function.""" + + def test_no_missing_fields(self) -> None: + """All fields present, nothing augmented.""" + rows = [("id", "123"), ("name", "test")] + result = augment_missing_fields(rows, ["id", "name"]) + assert result == [("id", "123"), ("name", "test")] + + def test_missing_top_level_field(self) -> None: + """Missing field added as (name, None).""" + rows = [("id", "123")] + result = augment_missing_fields(rows, ["id", "name", "level"]) + assert result == [("id", "123"), ("name", None), ("level", None)] + + def test_dotted_field_counts_as_present(self) -> None: + """A dotted key like 'names.primary' counts 'names' as present.""" + rows = [("id", "123"), ("names.primary", "foo")] + result = augment_missing_fields(rows, ["id", "names"]) + assert result == [("id", "123"), ("names.primary", "foo")] + + def test_indexed_field_counts_as_present(self) -> None: + """A bracketed key like 'sources[0].dataset' counts 'sources' as present.""" + rows = [("id", "123"), ("sources[0].dataset", "OSM")] + result = augment_missing_fields(rows, ["id", "sources"]) + assert result == [("id", "123"), ("sources[0].dataset", "OSM")] + + def test_union_cross_arm_fields_added(self) -> None: + """Fields from other union arms are added as None.""" + rows = [ + ("kind", "dog"), + ("name", "Rex"), + ("bark", "woof"), + ] + field_names = ["kind", "name", "bark", "purr"] + result = augment_missing_fields(rows, field_names) + assert result == [ + ("kind", "dog"), + ("name", "Rex"), + ("bark", "woof"), + ("purr", None), + ] + + +class TestFlattenModelInstance: + """Tests for flatten_model_instance function.""" + + def test_simple_fields(self) -> None: + """Flatten simple model fields.""" + + class Simple(BaseModel): + id: str + version: int + + instance = Simple(id="123", version=1) + result = flatten_model_instance(instance) + assert result == [("id", "123"), ("version", 1)] + + def test_nested_model(self) -> None: + """Nested BaseModel fields use dot notation.""" + + class Inner(BaseModel): + primary: str + secondary: str | None = None + + class Outer(BaseModel): + name: str + names: Inner + + instance = Outer(name="test", names=Inner(primary="foo")) + result = flatten_model_instance(instance) + assert result == [ + ("name", "test"), + ("names.primary", "foo"), + ("names.secondary", None), + ] + + def test_list_of_models(self) -> None: + """List of BaseModel uses bracket notation.""" + + class Source(BaseModel): + dataset: str + record_id: str + + class Feature(BaseModel): + id: str + sources: list[Source] + + instance = Feature( + id="123", + sources=[ + Source(dataset="OSM", record_id="w123"), + Source(dataset="MSFT", record_id="w456"), + ], + ) + result = flatten_model_instance(instance) + assert result == [ + ("id", "123"), + ("sources[0].dataset", "OSM"), + ("sources[0].record_id", "w123"), + ("sources[1].dataset", "MSFT"), + ("sources[1].record_id", "w456"), + ] + + def test_dict_field_kept_as_leaf(self) -> None: + """Dict-typed fields are leaf values, not recursed.""" + + class Tagged(BaseModel): + name: str + tags: dict[str, str] + + instance = Tagged(name="test", tags={"color": "red", "size": "large"}) + result = flatten_model_instance(instance) + assert result == [ + ("name", "test"), + ("tags", {"color": "red", "size": "large"}), + ] + + def test_none_defaulted_fields_appear(self) -> None: + """Fields with None defaults still appear in output.""" + + class WithDefaults(BaseModel): + name: str + level: int | None = None + height: float | None = None + + instance = WithDefaults(name="test") + result = flatten_model_instance(instance) + assert result == [ + ("name", "test"), + ("level", None), + ("height", None), + ] + + def test_plain_list_kept_as_leaf(self) -> None: + """Plain list of primitives is a single leaf value.""" + + class WithList(BaseModel): + phones: list[str] + + instance = WithList(phones=["+1234", "+5678"]) + result = flatten_model_instance(instance) + assert result == [("phones", ["+1234", "+5678"])] + + def test_empty_list_kept_as_leaf(self) -> None: + """Empty list is a leaf value.""" + + class WithList(BaseModel): + tags: list[str] = [] + + instance = WithList() + result = flatten_model_instance(instance) + assert result == [("tags", [])] + + def test_nested_list_of_lists_of_models(self) -> None: + """list[list[Model]] uses double-index notation.""" + + class Node(BaseModel): + division_id: str + name: str + + class Feature(BaseModel): + hierarchies: list[list[Node]] + + instance = Feature( + hierarchies=[ + [ + Node(division_id="aaa", name="Country"), + Node(division_id="bbb", name="Region"), + ], + ] + ) + result = flatten_model_instance(instance) + assert result == [ + ("hierarchies[0][0].division_id", "aaa"), + ("hierarchies[0][0].name", "Country"), + ("hierarchies[0][1].division_id", "bbb"), + ("hierarchies[0][1].name", "Region"), + ] + + def test_none_model_field_is_leaf(self) -> None: + """A model-typed field with None value is a leaf, not recursed.""" + + class Inner(BaseModel): + value: str + + class Outer(BaseModel): + name: str + inner: Inner | None = None + + instance = Outer(name="test") + result = flatten_model_instance(instance) + assert result == [("name", "test"), ("inner", None)] + + def test_field_alias(self) -> None: + """Field with validation_alias uses the alias as key.""" + + class Aliased(BaseModel): + class_: Literal["building"] = Field(validation_alias="class") + name: str + + instance = Aliased.model_validate({"class": "building", "name": "Tower"}) + result = flatten_model_instance(instance) + assert result == [("class", "building"), ("name", "Tower")] + + def test_slots_based_field_flattened(self) -> None: + """Non-BaseModel types with __slots__ and properties are flattened.""" + from overture.schema.system.primitive import BBox # noqa: PLC0415 + + class WithBBox(BaseModel): + id: str + bbox: BBox | None = None + + instance = WithBBox(id="123", bbox=BBox(xmin=1.0, ymin=2.0, xmax=3.0, ymax=4.0)) + result = flatten_model_instance(instance) + assert result == [ + ("id", "123"), + ("bbox.xmin", 1.0), + ("bbox.ymin", 2.0), + ("bbox.xmax", 3.0), + ("bbox.ymax", 4.0), + ] + + def test_none_slots_based_field_is_leaf(self) -> None: + """A slots-based field with None value is a leaf.""" + from overture.schema.system.primitive import BBox # noqa: PLC0415 + + class WithBBox(BaseModel): + id: str + bbox: BBox | None = None + + instance = WithBBox(id="123") + result = flatten_model_instance(instance) + assert result == [("id", "123"), ("bbox", None)] + + def test_single_slot_wrapper_is_leaf(self) -> None: + """Single-slot types (wrappers like Geometry) are leaf values.""" + from overture.schema.system.primitive import Geometry # noqa: PLC0415 + from shapely.geometry import Point # noqa: PLC0415 + + class WithGeom(BaseModel): + id: str + geometry: Geometry + + geom = Geometry(Point(1, 2)) + instance = WithGeom(id="123", geometry=geom) + result = flatten_model_instance(instance) + assert result == [("id", "123"), ("geometry", geom)] diff --git a/packages/overture-schema-codegen/tests/test_golden_markdown.py b/packages/overture-schema-codegen/tests/test_golden_markdown.py new file mode 100644 index 000000000..42320ee69 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_golden_markdown.py @@ -0,0 +1,130 @@ +"""Golden-file snapshot tests for Markdown renderer output.""" + +from enum import Enum +from pathlib import Path + +import pytest +from codegen_test_support import ( + CommonNames, + FeatureWithAddress, + FeatureWithDict, + FeatureWithSources, + HexColor, + Id, + Instrument, + InstrumentFamily, + SimpleKind, + Sources, + Venue, + Widget, + assert_golden, +) +from overture.schema.codegen.extraction.enum_extraction import extract_enum +from overture.schema.codegen.extraction.model_extraction import ( + expand_model_tree, + extract_model, +) +from overture.schema.codegen.extraction.newtype_extraction import extract_newtype +from overture.schema.codegen.extraction.specs import TypeIdentity +from overture.schema.codegen.layout.type_collection import ( + collect_all_supplementary_types, +) +from overture.schema.codegen.markdown.renderer import ( + render_enum, + render_feature, + render_newtype, +) +from overture.schema.codegen.markdown.reverse_references import ( + UsedByEntry, + compute_reverse_references, +) +from pydantic import BaseModel + +GOLDEN_DIR = Path(__file__).parent / "golden" / "markdown" + +FEATURE_CASES = [ + (Instrument, "instrument.md"), + (Venue, "venue.md"), + (Widget, "widget.md"), + (FeatureWithSources, "feature_with_sources.md"), + (FeatureWithAddress, "feature_with_address.md"), + (FeatureWithDict, "feature_with_dict.md"), +] + +ENUM_CASES = [ + (InstrumentFamily, "instrument_family.md"), + (SimpleKind, "simple_kind.md"), +] + +NEWTYPE_CASES = [ + (HexColor, "hex_color.md"), + (Id, "id.md"), + (Sources, "sources.md"), + (CommonNames, "common_names.md"), +] + + +@pytest.fixture(scope="module") +def reverse_refs() -> dict[TypeIdentity, list[UsedByEntry]]: + """Compute reverse references for all test models.""" + feature_specs = [] + for model_class, _ in FEATURE_CASES: + assert isinstance(model_class, type) and issubclass(model_class, BaseModel) + spec = extract_model(model_class) + expand_model_tree(spec) + feature_specs.append(spec) + + all_specs = collect_all_supplementary_types(feature_specs) + return compute_reverse_references(feature_specs, all_specs) + + +@pytest.mark.parametrize( + ("model_class", "golden_filename"), + FEATURE_CASES, + ids=[name for _, name in FEATURE_CASES], +) +def test_feature_golden( + model_class: type[BaseModel], + golden_filename: str, + update_golden: bool, + reverse_refs: dict[TypeIdentity, list[UsedByEntry]], +) -> None: + spec = extract_model(model_class) + expand_model_tree(spec) + used_by = reverse_refs.get(spec.identity) + actual = render_feature(spec, used_by=used_by) + assert_golden(actual, GOLDEN_DIR / golden_filename, update=update_golden) + + +@pytest.mark.parametrize( + ("enum_class", "golden_filename"), + ENUM_CASES, + ids=[name for _, name in ENUM_CASES], +) +def test_enum_golden( + enum_class: type[Enum], + golden_filename: str, + update_golden: bool, + reverse_refs: dict[TypeIdentity, list[UsedByEntry]], +) -> None: + spec = extract_enum(enum_class) + used_by = reverse_refs.get(spec.identity) + actual = render_enum(spec, used_by=used_by) + assert_golden(actual, GOLDEN_DIR / golden_filename, update=update_golden) + + +@pytest.mark.parametrize( + ("newtype_callable", "golden_filename"), + NEWTYPE_CASES, + ids=[name for _, name in NEWTYPE_CASES], +) +def test_newtype_golden( + newtype_callable: object, + golden_filename: str, + update_golden: bool, + reverse_refs: dict[TypeIdentity, list[UsedByEntry]], +) -> None: + spec = extract_newtype(newtype_callable) + used_by = reverse_refs.get(spec.identity) + actual = render_newtype(spec, used_by=used_by) + assert_golden(actual, GOLDEN_DIR / golden_filename, update=update_golden) diff --git a/packages/overture-schema-codegen/tests/test_integration_real_models.py b/packages/overture-schema-codegen/tests/test_integration_real_models.py new file mode 100644 index 000000000..b4dd9419f --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_integration_real_models.py @@ -0,0 +1,279 @@ +"""Integration tests against real Overture models. + +These tests validate the extraction layer against actual models from +the installed Overture schema packages. +""" + +import pytest +from codegen_test_support import assert_literal_field +from overture.schema.codegen.extraction.model_extraction import extract_model +from overture.schema.codegen.extraction.specs import ( + FeatureSpec, + ModelSpec, + UnionSpec, + filter_model_classes, + is_model_class, + is_union_alias, +) +from overture.schema.codegen.extraction.type_analyzer import TypeKind +from overture.schema.codegen.extraction.union_extraction import extract_union +from overture.schema.codegen.layout.module_layout import entry_point_class +from overture.schema.codegen.markdown.pipeline import generate_markdown_pages +from overture.schema.codegen.markdown.renderer import render_feature +from overture.schema.core.discovery import discover_models +from overture.schema.transportation import Segment +from overture.schema.transportation.segment.models import RoadSegment +from pydantic import BaseModel + + +class TestDiscoverModels: + """Tests for model discovery.""" + + def test_discover_models_returns_dict(self) -> None: + """discover_models() should return a dictionary.""" + models = discover_models() + assert isinstance(models, dict) + + def test_discover_models_finds_building( + self, building_class: type[BaseModel] + ) -> None: + """Should discover the Building model.""" + assert issubclass(building_class, BaseModel) + + def test_discover_models_finds_place(self, place_class: type[BaseModel]) -> None: + """Should discover the Place model.""" + assert issubclass(place_class, BaseModel) + + def test_discover_models_returns_multiple_themes(self) -> None: + """Should discover models from multiple themes.""" + models = discover_models() + assert len(models) >= 3, f"Expected at least 3 models, got {len(models)}" + + +class TestExtractBuildingModel: + """Tests for extracting the Building model.""" + + def test_extract_building_has_name(self, building_spec: ModelSpec) -> None: + """Building model spec should have correct name.""" + assert building_spec.name == "Building" + + def test_extract_building_has_theme_type(self, building_spec: ModelSpec) -> None: + """Building should have theme='buildings', type='building' as Literal fields.""" + assert_literal_field(building_spec, "theme", "buildings") + assert_literal_field(building_spec, "type", "building") + + def test_extract_building_has_fields(self, building_spec: ModelSpec) -> None: + """Building should have multiple fields.""" + assert len(building_spec.fields) > 0, "Building should have at least one field" + field_names = {f.name for f in building_spec.fields} + assert "id" in field_names + + def test_building_field_types_are_valid(self, building_spec: ModelSpec) -> None: + """All Building fields should have valid TypeInfo.""" + for field in building_spec.fields: + assert field.type_info is not None + assert field.type_info.kind in TypeKind + + +class TestExtractPlaceModel: + """Tests for extracting the Place model.""" + + def test_extract_place_has_theme_type(self, place_class: type[BaseModel]) -> None: + """Place should have theme='places', type='place' as Literal fields.""" + spec = extract_model(place_class) + assert_literal_field(spec, "theme", "places") + assert_literal_field(spec, "type", "place") + + def test_place_has_fields(self, place_class: type[BaseModel]) -> None: + """Place model should have fields.""" + spec = extract_model(place_class) + assert len(spec.fields) > 0 + + +class TestExtractDivisionModel: + """Tests for extracting Division model.""" + + def test_extract_division_theme_type(self, division_class: type[BaseModel]) -> None: + """Division should have theme='divisions', type='division' as Literal fields.""" + spec = extract_model(division_class) + assert_literal_field(spec, "theme", "divisions") + assert_literal_field(spec, "type", "division") + + +class TestFieldTypeAnalysis: + """Tests that analyze_type handles real model field types correctly.""" + + def test_no_analyze_type_crashes(self, all_discovered_models: dict) -> None: + """extract_model should not crash on any discovered model.""" + for model_class in filter_model_classes(all_discovered_models): + spec = extract_model(model_class) + assert spec.name == model_class.__name__ + + def test_all_field_types_resolved(self, all_discovered_models: dict) -> None: + """All fields should have resolved TypeInfo.""" + for model_class in filter_model_classes(all_discovered_models): + spec = extract_model(model_class) + for field in spec.fields: + assert field.type_info.base_type, ( + f"No base_type for {spec.name}.{field.name}" + ) + assert field.type_info.kind in TypeKind, ( + f"Invalid kind for {spec.name}.{field.name}" + ) + + +class TestMarkdownRenderingRealModels: + """Tests for markdown rendering with real models.""" + + def test_render_building_content(self, building_class: type[BaseModel]) -> None: + """Building renders with title, field table, and expected fields.""" + markdown = render_feature(extract_model(building_class)) + + assert "# Building" in markdown + assert "| Name |" in markdown + assert "| Type |" in markdown + assert "id" in markdown + assert "geometry" in markdown + + def test_render_all_models_without_crash(self, all_discovered_models: dict) -> None: + """render_feature should not crash on any discovered model.""" + for model_class in filter_model_classes(all_discovered_models): + markdown = render_feature(extract_model(model_class)) + assert isinstance(markdown, str) + assert len(markdown) > 0 + + +class TestDiscriminatedUnions: + """Tests for discriminated union types like Segment. + + Segment is registered as a discriminated union (type alias), not a class. + The extraction layer handles the individual union members (RoadSegment, + RailSegment, WaterSegment) but not the union itself. + """ + + def test_segment_is_not_a_class(self) -> None: + """Segment discovery returns a type alias, not a class.""" + models = discover_models() + segment_entries = [ + (k, v) for k, v in models.items() if "segment" in str(k).lower() + ] + + assert len(segment_entries) == 1 + _key, segment = segment_entries[0] + + assert not isinstance(segment, type) + + def test_individual_segment_types_extractable(self) -> None: + """Individual segment member types have expected theme/type literals.""" + spec = extract_union("Segment", Segment) + for member_cls in spec.members: + member_spec = extract_model(member_cls) + assert_literal_field(member_spec, "theme", "transportation") + assert_literal_field(member_spec, "type", "segment") + + def test_road_segment_has_road_specific_fields(self) -> None: + """RoadSegment should have road-specific fields.""" + spec = extract_model(RoadSegment) + field_names = {f.name for f in spec.fields} + + assert "subtype" in field_names + + +class TestSegmentUnionExtraction: + """Tests for extracting the real Segment discriminated union.""" + + @pytest.fixture + def segment_spec(self) -> UnionSpec: + """Extract Segment union spec.""" + return extract_union("Segment", Segment) + + def test_segment_extract_union_succeeds(self, segment_spec: UnionSpec) -> None: + """extract_union works on the real Segment type alias.""" + assert segment_spec.name == "Segment" + assert len(segment_spec.members) == 3 + + def test_segment_has_shared_fields(self, segment_spec: UnionSpec) -> None: + """Segment UnionSpec has shared fields from TransportationSegment.""" + shared = [ + af for af in segment_spec.annotated_fields if af.variant_sources is None + ] + shared_names = {af.field_spec.name for af in shared} + # All segments share these base fields + assert "geometry" in shared_names + assert "subtype" in shared_names + assert "id" in shared_names + + def test_segment_has_variant_fields(self, segment_spec: UnionSpec) -> None: + """Segment UnionSpec has variant-specific fields.""" + variant = [ + af for af in segment_spec.annotated_fields if af.variant_sources is not None + ] + variant_names = {af.field_spec.name for af in variant} + # RoadSegment has these specific fields + assert "road_flags" in variant_names + assert "road_surface" in variant_names + assert len(variant_names) > 0 + + def test_segment_discriminator_extracted_from_callable( + self, segment_spec: UnionSpec + ) -> None: + """Segment callable discriminator is resolved via _field_name.""" + assert segment_spec.discriminator_field == "subtype" + assert segment_spec.discriminator_mapping is not None + assert len(segment_spec.discriminator_mapping) == 3 + # Keys are str(enum_member), e.g. "Subtype.ROAD" + road_key = next(k for k in segment_spec.discriminator_mapping if "ROAD" in k) + assert segment_spec.discriminator_mapping[road_key] is RoadSegment + + def test_segment_common_base_is_base_model(self, segment_spec: UnionSpec) -> None: + """Segment common_base is the shared base class.""" + assert segment_spec.common_base is not None + assert issubclass(segment_spec.common_base, BaseModel) + # Verify common base has expected fields + assert "geometry" in segment_spec.common_base.model_fields + assert "id" in segment_spec.common_base.model_fields + + +class TestPydanticTypePages: + """End-to-end: pipeline produces pages for referenced Pydantic built-in types.""" + + _SCHEMA_ROOT = "overture.schema" + + @pytest.fixture(scope="class") + def pages(self) -> list: + """Generate all pages from real discovered models.""" + models = discover_models() + feature_specs: list[FeatureSpec] = [] + for key, entry in models.items(): + if is_model_class(entry): + feature_specs.append(extract_model(entry, entry_point=key.entry_point)) + elif is_union_alias(entry): + feature_specs.append( + extract_union( + entry_point_class(key.entry_point), + entry, + entry_point=key.entry_point, + ) + ) + return generate_markdown_pages(feature_specs, self._SCHEMA_ROOT) + + def test_http_url_page_exists(self, pages: list) -> None: + """Pipeline produces a page for HttpUrl under pydantic/networks/.""" + paths = {str(p.path) for p in pages} + assert any("pydantic/networks/http_url" in path for path in paths) + + def test_email_str_page_exists(self, pages: list) -> None: + """Pipeline produces a page for EmailStr under pydantic/networks/.""" + paths = {str(p.path) for p in pages} + assert any("pydantic/networks/email_str" in path for path in paths) + + def test_http_url_page_content(self, pages: list) -> None: + """HttpUrl page has expected heading and Pydantic docs link.""" + page = next(p for p in pages if "pydantic/networks/http_url" in str(p.path)) + assert "# HttpUrl" in page.content + assert "docs.pydantic.dev" in page.content + + def test_place_links_to_http_url(self, pages: list) -> None: + """Place feature page links to the HttpUrl type page.""" + place_page = next(p for p in pages if p.path.stem == "place" and p.is_feature) + assert "HttpUrl" in place_page.content diff --git a/packages/overture-schema-codegen/tests/test_markdown_renderer.py b/packages/overture-schema-codegen/tests/test_markdown_renderer.py new file mode 100644 index 000000000..5ebddcab9 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_markdown_renderer.py @@ -0,0 +1,1436 @@ +"""Tests for Markdown renderer.""" + +from collections.abc import Callable +from enum import Enum +from pathlib import PurePosixPath +from typing import Annotated, Literal, NewType + +import pytest +from annotated_types import Ge, Interval +from codegen_test_support import ( + EMAIL_STR_SPEC, + HTTP_URL_SPEC, + STR_TYPE, + CommonNames, + FeatureBase, + FeatureWithAddress, + FeatureWithSources, + Instrument, + SimpleModel, + SourceItem, + Sources, + TreeNode, + Venue, + make_union_spec, +) +from overture.schema.codegen.extraction.examples import ExampleRecord +from overture.schema.codegen.extraction.model_extraction import ( + expand_model_tree, + extract_model, +) +from overture.schema.codegen.extraction.newtype_extraction import extract_newtype +from overture.schema.codegen.extraction.specs import ( + AnnotatedField, + EnumMemberSpec, + EnumSpec, + FieldSpec, + NumericSpec, + TypeIdentity, +) +from overture.schema.codegen.extraction.type_analyzer import ConstraintSource +from overture.schema.codegen.markdown.link_computation import LinkContext +from overture.schema.codegen.markdown.renderer import ( + _format_constraint, + _format_example_value, + _linkify_bare_urls, + _sanitize_for_table_cell, + render_enum, + render_feature, + render_newtype, + render_primitives_from_specs, + render_pydantic_type, +) +from overture.schema.codegen.markdown.reverse_references import UsedByEntry, UsedByKind +from overture.schema.system.field_constraint import ( + CountryCodeAlpha2Constraint, + JsonPointerConstraint, + UniqueItemsConstraint, +) +from overture.schema.system.model_constraint import no_extra_fields +from overture.schema.system.primitive import int32 +from overture.schema.system.ref import Id +from overture.schema.system.string import HexColor, NoWhitespaceString +from pydantic import BaseModel, Field + +_FLAT_MEMBER = EnumMemberSpec(name="FLAT", value="flat", description=None) + +_ROOF_SHAPE_SPEC = EnumSpec( + name="RoofShape", + description="The shape of the roof.", + members=[_FLAT_MEMBER], +) + + +class TestSanitizeForTableCell: + """Tests for _sanitize_for_table_cell.""" + + def test_single_line_unchanged(self) -> None: + """Single-line text passes through unchanged.""" + assert ( + _sanitize_for_table_cell("A simple description.") == "A simple description." + ) + + def test_single_newline_becomes_space(self) -> None: + """Single newline within a paragraph becomes a space.""" + assert _sanitize_for_table_cell("Line one.\nLine two.") == "Line one. Line two." + + def test_blank_line_becomes_double_br(self) -> None: + """Blank line (paragraph break) becomes

.""" + assert ( + _sanitize_for_table_cell("Para one.\n\nPara two.") + == "Para one.

Para two." + ) + + def test_blank_line_with_whitespace(self) -> None: + """Blank line containing only whitespace is treated as blank.""" + assert ( + _sanitize_for_table_cell("Para one.\n \nPara two.") + == "Para one.

Para two." + ) + + def test_multiple_blank_lines_collapsed(self) -> None: + """Multiple consecutive blank lines collapse to one

.""" + assert _sanitize_for_table_cell("A.\n\n\nB.") == "A.

B." + + def test_pipe_escaped(self) -> None: + """Pipe characters escaped to avoid breaking table columns.""" + assert _sanitize_for_table_cell("foo | bar") == "foo \\| bar" + + def test_pipe_and_newline_both_handled(self) -> None: + """Pipes and newlines handled together.""" + assert _sanitize_for_table_cell("a | b\nc | d") == "a \\| b c \\| d" + + def test_strips_leading_trailing_whitespace(self) -> None: + """Leading/trailing whitespace stripped.""" + assert _sanitize_for_table_cell(" hello ") == "hello" + + +class TestLinkifyBareUrls: + """Tests for _linkify_bare_urls.""" + + def test_www_url_gets_linked(self) -> None: + """www. URLs become Markdown links with https:// href.""" + assert ( + _linkify_bare_urls("see www.example.com for details") + == "see [www.example.com](https://www.example.com) for details" + ) + + def test_https_url_gets_linked(self) -> None: + """https:// URLs become self-referencing Markdown links.""" + assert ( + _linkify_bare_urls("see https://example.com/path") + == "see [https://example.com/path](https://example.com/path)" + ) + + def test_http_url_gets_linked(self) -> None: + """http:// URLs become self-referencing Markdown links.""" + assert ( + _linkify_bare_urls("see http://example.com") + == "see [http://example.com](http://example.com)" + ) + + def test_existing_markdown_link_unchanged(self) -> None: + """URLs already inside [text](url) are left alone.""" + text = "[example](https://example.com)" + assert _linkify_bare_urls(text) == text + + def test_text_without_urls_unchanged(self) -> None: + """Plain text passes through unchanged.""" + assert _linkify_bare_urls("no urls here") == "no urls here" + + def test_url_in_parentheses(self) -> None: + """URL inside sentence parentheses gets linked.""" + result = _linkify_bare_urls("from the OA (www.openaddresses.io) project") + assert "[www.openaddresses.io](https://www.openaddresses.io)" in result + + def test_trailing_period_excluded(self) -> None: + """Trailing sentence punctuation is not part of the URL.""" + assert ( + _linkify_bare_urls("found on https://www.wikidata.org/.") + == "found on [https://www.wikidata.org/](https://www.wikidata.org/)." + ) + + def test_trailing_comma_excluded(self) -> None: + """Trailing comma is not part of the URL.""" + assert ( + _linkify_bare_urls("see https://example.com, and more") + == "see [https://example.com](https://example.com), and more" + ) + + def test_url_in_backtick_code_span_unchanged(self) -> None: + """URLs inside backtick code spans are not linkified.""" + text = "use `https://example.com` as the base" + assert _linkify_bare_urls(text) == text + + def test_url_in_double_backtick_code_span_unchanged(self) -> None: + """URLs inside double-backtick code spans are not linkified.""" + text = "use ``https://example.com/path`` as the base" + assert _linkify_bare_urls(text) == text + + def test_mixed_code_span_and_bare_url(self) -> None: + """Code-span URLs preserved while bare URLs are linkified.""" + text = "see `https://a.com` and https://b.com" + result = _linkify_bare_urls(text) + assert "`https://a.com`" in result + assert "[https://b.com](https://b.com)" in result + + +class TestRenderFeatureBasic: + """Tests for render_feature with basic models.""" + + def test_renders_title_from_model_name(self) -> None: + """Should render model name as H1 title.""" + spec = extract_model(SimpleModel) + result = render_feature(spec) + + assert "# SimpleModel" in result + + def test_renders_description_from_docstring(self) -> None: + """Should render model docstring as description.""" + + class DescribedModel(BaseModel): + """This is the model description.""" + + value: int + + spec = extract_model(DescribedModel) + result = render_feature(spec) + + assert "This is the model description." in result + + def test_renders_fields_section(self) -> None: + """Should include Fields section header.""" + + class ModelWithField(BaseModel): + """Model with a field.""" + + name: str + + spec = extract_model(ModelWithField) + result = render_feature(spec) + + assert "## Fields" in result + + def test_renders_field_table_header(self) -> None: + """Should render field table with proper headers.""" + + class ModelWithField(BaseModel): + """Model with a field.""" + + name: str + + spec = extract_model(ModelWithField) + result = render_feature(spec) + + assert "| Name | Type | Description |" in result + assert "| -----: | :----: | ------------- |" in result + + +class TestRenderFeatureFieldTable: + """Tests for field table rendering.""" + + def test_renders_required_field(self) -> None: + """Should render required field without (optional) suffix.""" + + class ModelWithRequired(BaseModel): + """Model with required field.""" + + name: str = Field(description="The name") + + spec = extract_model(ModelWithRequired) + result = render_feature(spec) + + assert "| `name` |" in result + assert "| `string` |" in result + assert "The name" in result + + def test_renders_optional_field(self) -> None: + """Should render optional field with (optional) suffix.""" + + class ModelWithOptional(BaseModel): + """Model with optional field.""" + + nickname: str | None = Field(None, description="Optional nickname") + + spec = extract_model(ModelWithOptional) + result = render_feature(spec) + + assert "| `nickname` |" in result + assert "(optional)" in result + assert "Optional nickname" in result + + def test_renders_typed_fields(self) -> None: + """Should render field types correctly.""" + + class ModelWithTypes(BaseModel): + """Model with various types.""" + + count: int + price: float + active: bool + + spec = extract_model(ModelWithTypes) + result = render_feature(spec) + + # Check that fields are present (exact type format may vary) + assert "`count`" in result + assert "`price`" in result + assert "`active`" in result + + def test_multiline_description_sanitized_in_table(self) -> None: + """Multiline field description rendered with
in table cell.""" + + class ModelWithMultilineDesc(BaseModel): + """Model.""" + + name: str = Field(description="First line.\n\nSecond paragraph.") + + spec = extract_model(ModelWithMultilineDesc) + result = render_feature(spec) + + assert "First line.

Second paragraph." in result + # The table should not be broken by a blank line + lines = result.splitlines() + table_start = next(i for i, line in enumerate(lines) if "| Name |" in line) + for i in range(table_start, len(lines)): + if lines[i].strip() == "": + break + assert lines[i].startswith("|"), f"Table broken at line {i}: {lines[i]}" + + +class TestRenderFeatureWithThemeType: + """Tests for rendering Feature-like models with theme/type.""" + + def test_renders_theme_and_type_fields(self) -> None: + """Should render theme and type as Literal fields.""" + + class Place(FeatureBase[Literal["places"], Literal["place"]]): + """A place feature.""" + + name: str + + spec = extract_model(Place) + result = render_feature(spec) + + # Theme and type should appear somewhere in output + assert "places" in result + assert "place" in result + + +class TestRenderFeatureLiteralField: + """Tests for rendering Literal-typed fields.""" + + def test_literal_field_renders_as_quoted_value(self) -> None: + """Literal field should render as quoted string in backticks.""" + + class TestFeature(FeatureBase[Literal["test_theme"], Literal["test_type"]]): + """Test feature.""" + + name: str + + spec = extract_model(TestFeature) + result = render_feature(spec) + + assert '| `"test_theme"` |' in result + assert '| `"test_type"` |' in result + + +class TestRenderFeatureNewTypeDisplay: + """Tests for NewType rendering in Markdown.""" + + def test_newtype_wrapping_list_renders_name_with_list_qualifier( + self, + ) -> None: + """NewType wrapping a list renders as name with (list, optional).""" + + class Item(BaseModel): + value: str + + TestSources = NewType( + "TestSources", Annotated[list[Item], UniqueItemsConstraint()] + ) + + class ModelWithSources(BaseModel): + """Model with sources.""" + + sources: TestSources | None = None + + spec = extract_model(ModelWithSources) + expand_model_tree(spec) + result = render_feature(spec) + + assert "`TestSources`" in result + assert "(list, optional)" in result + + def test_hex_color_renders_as_newtype_name(self) -> None: + """HexColor (unregistered NewType) renders as code-formatted name.""" + + class ModelWithColor(BaseModel): + """Model with color.""" + + color: HexColor | None = None + + spec = extract_model(ModelWithColor) + result = render_feature(spec) + + assert "`HexColor`" in result + assert "(optional)" in result + + def test_registered_primitive_renders_through_registry(self) -> None: + """Registered primitive (int32) renders via registry, not as NewType link.""" + + class ModelWithCount(BaseModel): + """Model with count.""" + + count: int32 + + spec = extract_model(ModelWithCount) + result = render_feature(spec) + + assert "| `int32` |" in result + # Should NOT be linked + assert "](int32.md)" not in result + + def test_plain_str_renders_as_string(self) -> None: + """Plain str field renders as 'string'.""" + + class ModelWithName(BaseModel): + """Model with name.""" + + name: str + + spec = extract_model(ModelWithName) + result = render_feature(spec) + + assert "| `string` |" in result + + def test_enum_renders_as_code_without_context(self) -> None: + """Enum fields render as inline code without LinkContext.""" + + class Status(str, Enum): + ACTIVE = "active" + + class ModelWithEnum(BaseModel): + """Model with enum.""" + + status: Status + + spec = extract_model(ModelWithEnum) + result = render_feature(spec) + + assert "| `Status` |" in result + + def test_model_field_renders_as_code_without_context(self) -> None: + """BaseModel field renders as inline code without LinkContext.""" + + class Inner(BaseModel): + value: str + + class Outer(BaseModel): + """Model with nested model.""" + + inner: Inner + + spec = extract_model(Outer) + expand_model_tree(spec) + result = render_feature(spec) + + assert "| `Inner` |" in result + + +class TestRenderFeatureInlineExpansion: + """Tests for inline expansion of nested model fields.""" + + def test_direct_model_fields_expanded_with_dot_prefix(self) -> None: + """Direct model field expands sub-fields with dot notation.""" + spec = extract_model(FeatureWithAddress) + expand_model_tree(spec) + result = render_feature(spec) + + assert "| `address.street` |" in result + assert "| `address.city` |" in result + assert "| `address.zip_code` |" in result + + def test_list_of_model_fields_expanded_with_bracket_dot_prefix(self) -> None: + """List-of-model field expands sub-fields with []. notation.""" + spec = extract_model(FeatureWithSources) + expand_model_tree(spec) + result = render_feature(spec) + + assert "| `sources[]` |" in result + assert "| `sources[].dataset` |" in result + + def test_cycle_detection_prevents_infinite_recursion(self) -> None: + """Recursive model emits parent row but does not recurse.""" + spec = extract_model(TreeNode) + expand_model_tree(spec) + result = render_feature(spec) + + # The parent field row appears + assert "| `parent` |" in result + # But no recursion into parent.label + assert "parent.label" not in result + + def test_primitive_field_unchanged(self) -> None: + """Primitive fields produce a single row without expansion.""" + spec = extract_model(SimpleModel) + result = render_feature(spec) + + lines = [line for line in result.splitlines() if "| `name` |" in line] + assert len(lines) == 1 + + def test_parent_row_preserved_before_expansion(self) -> None: + """The parent field row still appears before expanded sub-fields.""" + spec = extract_model(FeatureWithAddress) + expand_model_tree(spec) + result = render_feature(spec) + + # Parent row for 'address' itself appears + assert "| `address` |" in result + # And it appears before the expanded fields + lines = result.splitlines() + address_line = next( + i for i, line in enumerate(lines) if "| `address` |" in line + ) + street_line = next( + i for i, line in enumerate(lines) if "| `address.street` |" in line + ) + assert address_line < street_line + + +class TestRenderFeatureConstraints: + """Tests for model-level constraint rendering in feature pages.""" + + def test_venue_has_constraints_section(self) -> None: + """Venue's @require_any_of renders as a Constraints section.""" + spec = extract_model(Venue) + result = render_feature(spec) + + assert "## Constraints" in result + assert "At least one of `name`, `description` must be set" in result + + def test_constraints_section_between_fields_and_examples(self) -> None: + """Constraints section appears after Fields, before Examples.""" + spec = extract_model(Venue) + examples = [ExampleRecord(rows=[("name", "test")])] + result = render_feature(spec, examples=examples) + + lines = result.splitlines() + fields_line = next(i for i, line in enumerate(lines) if "## Fields" in line) + constraints_line = next( + i for i, line in enumerate(lines) if "## Constraints" in line + ) + examples_line = next(i for i, line in enumerate(lines) if "## Examples" in line) + + assert fields_line < constraints_line < examples_line + + def test_no_constraints_section_without_constraints(self) -> None: + """Models without model-level constraints omit Constraints section.""" + + class Plain(BaseModel): + """Plain model.""" + + name: str + + spec = extract_model(Plain) + result = render_feature(spec) + + assert "## Constraints" not in result + + def test_no_constraints_section_with_only_no_extra_fields(self) -> None: + """Model with only @no_extra_fields omits Constraints section.""" + + @no_extra_fields + class Strict(BaseModel): + """Strict model.""" + + name: str + + spec = extract_model(Strict) + result = render_feature(spec) + + assert "## Constraints" not in result + + +class TestRenderFeatureConstraintNotes: + """Tests for inline constraint notes in field description cells.""" + + def test_venue_name_field_includes_constraint_note(self) -> None: + """Venue's name field description cell includes constraint note in italics.""" + spec = extract_model(Venue) + result = render_feature(spec) + + # Find the row for 'name' field + lines = result.splitlines() + name_line = next(line for line in lines if "| `name` |" in line) + assert "Venue name" in name_line + assert "*At least one of `name`, `description` must be set*" in name_line + assert "
" in name_line + + def test_field_with_no_description_gets_constraint_note(self) -> None: + """Field with no existing description still gets the constraint note.""" + spec = extract_model(Venue) + result = render_feature(spec) + + # description field on Venue has no Field(description=...) + lines = result.splitlines() + desc_line = next(line for line in lines if "| `description` |" in line) + assert "*At least one of `name`, `description` must be set*" in desc_line + + +class TestRenderFeatureFieldConstraints: + """Tests for field-level constraint annotation from TypeInfo.""" + + def test_venue_geometry_shows_allowed_types(self) -> None: + """Venue's geometry field shows GeometryTypeConstraint as a note.""" + spec = extract_model(Venue) + expand_model_tree(spec) + result = render_feature(spec) + + lines = result.splitlines() + geo_line = next(line for line in lines if "| `geometry` |" in line) + assert "*Allowed geometry types: Point, Polygon*" in geo_line + + def test_venue_reference_links_when_context_available(self) -> None: + """Reference constraint links the target type when LinkContext has the page.""" + spec = extract_model(Venue) + expand_model_tree(spec) + ctx = LinkContext( + page_path=PurePosixPath("music/venue.md"), + registry={ + TypeIdentity(Instrument, "Instrument"): PurePosixPath( + "music/instrument.md" + ) + }, + ) + result = render_feature(spec, link_ctx=ctx) + + lines = result.splitlines() + ref_line = next(line for line in lines if "| `resident_ensemble` |" in line) + assert "[`Instrument`](instrument.md)" in ref_line + assert "belongs to" in ref_line + + def test_venue_reference_unlinked_without_context(self) -> None: + """Reference constraint renders as plain code when no LinkContext.""" + spec = extract_model(Venue) + expand_model_tree(spec) + result = render_feature(spec) + + lines = result.splitlines() + ref_line = next(line for line in lines if "| `resident_ensemble` |" in line) + assert "References `Instrument`" in ref_line + assert "belongs to" in ref_line + + +class TestRenderEnumBasic: + """Tests for render_enum with simple enums.""" + + def test_renders_title_from_enum_name(self) -> None: + """Should render enum name as H1 title.""" + result = render_enum(_ROOF_SHAPE_SPEC) + + assert "# RoofShape" in result + + def test_renders_description_from_docstring(self) -> None: + """Should render enum docstring as description.""" + result = render_enum(_ROOF_SHAPE_SPEC) + + assert "The shape of the roof." in result + + def test_renders_values_section(self) -> None: + """Should include Values section header.""" + result = render_enum(_ROOF_SHAPE_SPEC) + + assert "## Values" in result + + def test_renders_values_as_bullet_list(self) -> None: + """Should render each value as a bullet point.""" + spec = EnumSpec( + name="RoofShape", + description="The shape of the roof.", + members=[ + EnumMemberSpec(name="FLAT", value="flat", description=None), + EnumMemberSpec(name="GABLED", value="gabled", description=None), + EnumMemberSpec(name="DOME", value="dome", description=None), + ], + ) + + result = render_enum(spec) + + assert "- `flat`" in result + assert "- `gabled`" in result + assert "- `dome`" in result + + +class TestRenderEnumDocumented: + """Tests for render_enum with DocumentedEnum (per-value descriptions).""" + + def test_renders_member_descriptions(self) -> None: + """Should render per-value descriptions after the value.""" + spec = EnumSpec( + name="Side", + description="The side on which something appears.", + members=[ + EnumMemberSpec( + name="LEFT", value="left", description="On the left side" + ), + EnumMemberSpec( + name="RIGHT", value="right", description="On the right side" + ), + ], + ) + + result = render_enum(spec) + + assert "- `left` - On the left side" in result + assert "- `right` - On the right side" in result + + def test_renders_mixed_documented_undocumented(self) -> None: + """Should handle mix of documented and undocumented members.""" + spec = EnumSpec( + name="ConnectionState", + description="Connection states.", + members=[ + EnumMemberSpec(name="CONNECTED", value="connected", description=None), + EnumMemberSpec( + name="QUIESCING", + value="quiescing", + description="Gracefully shutting down", + ), + ], + ) + + result = render_enum(spec) + + # Undocumented: just the value + assert "- `connected`" in result + # Documented: value + description + assert "- `quiescing` - Gracefully shutting down" in result + + +class TestRenderEnumNoDescription: + """Tests for enums without class docstrings.""" + + def test_enum_without_description(self) -> None: + """Should render enum without description section when None.""" + spec = EnumSpec( + name="SimpleEnum", + description=None, + members=[ + EnumMemberSpec(name="A", value="a", description=None), + EnumMemberSpec(name="B", value="b", description=None), + ], + ) + + result = render_enum(spec) + + # Should still have title and values + assert "# SimpleEnum" in result + assert "## Values" in result + assert "- `a`" in result + assert "- `b`" in result + # Should not have empty lines where description would be + lines = result.strip().split("\n") + # Title should be followed by blank line then Values header + assert lines[0] == "# SimpleEnum" + + +class TestRenderNewType: + """Tests for render_newtype.""" + + def test_renders_title(self) -> None: + """Should render NewType name as H1 title.""" + spec = extract_newtype(HexColor) + result = render_newtype(spec) + + assert "# HexColor" in result + + def test_renders_underlying_type(self) -> None: + """Should show the resolved underlying type below the description.""" + spec = extract_newtype(HexColor) + result = render_newtype(spec) + + assert "# HexColor\n" in result + assert "Underlying type: `string`" in result + + def test_renders_constraints(self) -> None: + """Should render constraints section with description and pattern.""" + spec = extract_newtype(HexColor) + result = render_newtype(spec) + + assert "## Constraints" in result + assert "Allows only hexadecimal color codes" in result + assert "`HexColorConstraint`" in result + assert "pattern:" in result + + def test_renders_id_with_provenance_without_link(self) -> None: + """Id page shows constraints without provenance links when no context.""" + spec = extract_newtype(Id) + result = render_newtype(spec) + + assert "# Id" in result + assert "NoWhitespaceConstraint" in result + # No link without LinkContext + assert "no_whitespace_string.md" not in result + + def test_builtin_underlying_type_not_linked(self) -> None: + """Built-in underlying type (string) stays in plain backticks.""" + spec = extract_newtype(HexColor) + result = render_newtype(spec) + + assert "Underlying type: `string`" in result + + def test_list_model_underlying_type_without_context(self) -> None: + """List-of-model underlying type renders without link when no context.""" + spec = extract_newtype(Sources) + result = render_newtype(spec) + + assert "Underlying type: `list`" in result + + def test_dict_underlying_types_without_context(self) -> None: + """Dict key/value NewTypes render without links when no context.""" + spec = extract_newtype(CommonNames) + result = render_newtype(spec) + + assert "map" in result + + +class TestPlacementAwareLinks: + """Tests for rendering with LinkContext for cross-directory links.""" + + def test_feature_links_to_shared_type_via_registry(self) -> None: + """Feature in theme subdir links to shared type in types/ dir.""" + + class ModelWithColor(BaseModel): + """Model with color.""" + + color: HexColor | None = None + + spec = extract_model(ModelWithColor) + page_path = PurePosixPath("buildings/building/building.md") + ctx = LinkContext( + page_path, + { + TypeIdentity(HexColor, "HexColor"): PurePosixPath( + "types/strings/hex_color.md" + ) + }, + ) + + result = render_feature(spec, link_ctx=ctx) + + assert "[`HexColor`](../../types/strings/hex_color.md)" in result + + def test_feature_links_to_theme_level_type(self) -> None: + """Feature in subdir links to type at theme level.""" + + class RoofShape(str, Enum): + FLAT = "flat" + + class ModelWithRoof(BaseModel): + """Model with roof.""" + + roof: RoofShape + + spec = extract_model(ModelWithRoof) + page_path = PurePosixPath("buildings/building/building.md") + ctx = LinkContext( + page_path, + { + TypeIdentity(RoofShape, "RoofShape"): PurePosixPath( + "buildings/roof_shape.md" + ) + }, + ) + + result = render_feature(spec, link_ctx=ctx) + + assert "[`RoofShape`](../roof_shape.md)" in result + + def test_feature_links_to_sibling_in_same_subdir(self) -> None: + """Feature links to type in its own subdirectory.""" + + class BuildingClass(str, Enum): + RESIDENTIAL = "residential" + + class ModelWithClass(BaseModel): + """Model.""" + + building_class: BuildingClass + + spec = extract_model(ModelWithClass) + page_path = PurePosixPath("buildings/building/building.md") + ctx = LinkContext( + page_path, + { + TypeIdentity(BuildingClass, "BuildingClass"): PurePosixPath( + "buildings/building/building_class.md" + ) + }, + ) + + result = render_feature(spec, link_ctx=ctx) + + assert "[`BuildingClass`](building_class.md)" in result + + def test_without_context_renders_as_code(self) -> None: + """Without LinkContext, types render as inline code (no link).""" + + class ModelWithColor(BaseModel): + """Model with color.""" + + color: HexColor | None = None + + spec = extract_model(ModelWithColor) + result = render_feature(spec) + + assert "`HexColor`" in result + assert "hex_color.md" not in result + + def test_newtype_underlying_type_linked_via_registry(self) -> None: + """NewType header links underlying model type through placement registry.""" + spec = extract_newtype(Sources) + page_path = PurePosixPath("types/references/sources.md") + ctx = LinkContext( + page_path, + { + TypeIdentity(SourceItem, "SourceItem"): PurePosixPath( + "types/references/source_item.md" + ) + }, + ) + + result = render_newtype(spec, link_ctx=ctx) + + assert "[`SourceItem`](source_item.md)" in result + + def test_newtype_underlying_type_not_linked_when_absent(self) -> None: + """Underlying type stays backtick-only when missing from registry.""" + spec = extract_newtype(Sources) + page_path = PurePosixPath("types/references/sources.md") + ctx = LinkContext(page_path, {}) + + result = render_newtype(spec, link_ctx=ctx) + + assert "`list`" in result + assert "[`SourceItem`]" not in result + + def test_newtype_provenance_link_uses_registry(self) -> None: + """NewType provenance links resolve through placement registry.""" + spec = extract_newtype(Id) + page_path = PurePosixPath("types/references/id.md") + registry = { + TypeIdentity(NoWhitespaceString, "NoWhitespaceString"): PurePosixPath( + "types/strings/no_whitespace_string.md" + ), + } + ctx = LinkContext(page_path, registry) + + result = render_newtype(spec, link_ctx=ctx) + + assert "../strings/no_whitespace_string.md" in result + + +class TestFormatExampleValue: + """Tests for _format_example_value.""" + + def test_none_renders_as_null(self) -> None: + """None renders as backtick-quoted null.""" + + assert _format_example_value(None) == "`null`" + + def test_string_null_renders_with_backticks(self) -> None: + """String 'null' renders as a backtick-wrapped string.""" + + assert _format_example_value("null") == "`null`" + + def test_bool_true_renders_lowercase(self) -> None: + """Boolean True renders as backtick-quoted lowercase true.""" + + assert _format_example_value(True) == "`true`" + + def test_bool_false_renders_lowercase(self) -> None: + """Boolean False renders as backtick-quoted lowercase false.""" + + assert _format_example_value(False) == "`false`" + + def test_empty_string_renders_empty(self) -> None: + """Empty string renders as empty string.""" + + assert _format_example_value("") == "" + + def test_short_string_has_backticks(self) -> None: + """Non-empty strings render with backticks.""" + + assert _format_example_value("OpenStreetMap") == "`OpenStreetMap`" + + def test_long_string_truncated(self) -> None: + """Strings longer than 100 chars are truncated with ellipsis.""" + + long = "x" * 150 + result = _format_example_value(long) + assert result == f"`{'x' * 97}...`" + assert len(result) == 100 + 2 # 100 content + 2 backticks + + def test_integer_has_backticks(self) -> None: + """Integers render with backticks.""" + + assert _format_example_value(42) == "`42`" + assert _format_example_value(0) == "`0`" + assert _format_example_value(-17) == "`-17`" + + def test_float_has_backticks(self) -> None: + """Floats render with backticks.""" + + assert _format_example_value(3.14) == "`3.14`" + assert _format_example_value(-2.5) == "`-2.5`" + + def test_list_renders_comma_separated(self) -> None: + """Lists render as backtick-wrapped comma-separated values.""" + + assert _format_example_value([1, 2, 3]) == "`[1, 2, 3]`" + assert _format_example_value(["a", "b"]) == '`["a", "b"]`' + assert _format_example_value([]) == "`[]`" + + def test_long_list_truncated(self) -> None: + """Lists longer than truncation limit are truncated with ellipsis.""" + long_list = list(range(200)) + result = _format_example_value(long_list) + assert result.startswith("`[0, 1, 2,") + assert result.endswith("...`") + inner = result[1:-1] # strip backticks + assert len(inner) <= 100 + + def test_long_dict_truncated(self) -> None: + """Dicts longer than truncation limit are truncated with ellipsis.""" + long_dict = {f"key_{i}": f"value_{i}" for i in range(50)} + result = _format_example_value(long_dict) + assert result.startswith('`{"key_0":') + assert result.endswith("...`") + inner = result[1:-1] + assert len(inner) <= 100 + + def test_pipe_character_not_escaped_in_backticks(self) -> None: + """Pipe characters need no escaping inside backticks.""" + + assert _format_example_value("foo|bar") == "`foo|bar`" + assert _format_example_value("a|b|c") == "`a|b|c`" + + +class TestRenderFeatureWithExamples: + """Tests for render_feature with examples support.""" + + def test_accepts_examples_parameter(self) -> None: + """render_feature accepts examples parameter.""" + spec = extract_model(SimpleModel) + examples = [ExampleRecord(rows=[("name", "test")])] + + # Should not raise + result = render_feature(spec, examples=examples) + assert "# SimpleModel" in result + + def test_renders_single_example_without_heading(self) -> None: + """Single example renders without 'Example 1' heading.""" + + class ModelWithCount(BaseModel): + """A simple model.""" + + name: str + count: int + + spec = extract_model(ModelWithCount) + examples = [ExampleRecord(rows=[("name", "test"), ("count", 42)])] + + result = render_feature(spec, examples=examples) + assert "## Examples" in result + assert "| Column | Value |" in result + assert "| `name` | `test` |" in result + assert "| `count` | `42` |" in result + # Should NOT have "Example 1" heading + assert "### Example 1" not in result + + def test_renders_multiple_examples_with_headings(self) -> None: + """Multiple examples render with 'Example N' headings.""" + spec = extract_model(SimpleModel) + examples = [ + ExampleRecord(rows=[("name", "first")]), + ExampleRecord(rows=[("name", "second")]), + ] + + result = render_feature(spec, examples=examples) + assert "## Examples" in result + assert "### Example 1" in result + assert "### Example 2" in result + assert "| `name` | `first` |" in result + assert "| `name` | `second` |" in result + + def test_formats_example_values(self) -> None: + """Example values are formatted using _format_example_value.""" + + class TestModel(BaseModel): + """Test model.""" + + text: str + count: int + active: bool + optional: str | None + + spec = extract_model(TestModel) + examples = [ + ExampleRecord( + rows=[ + ("text", "hello"), + ("count", 42), + ("active", True), + ("optional", None), + ] + ) + ] + + result = render_feature(spec, examples=examples) + # String with backticks + assert "| `text` | `hello` |" in result + # Number with backticks + assert "| `count` | `42` |" in result + # Boolean with backticks, lowercase + assert "| `active` | `true` |" in result + # None as null + assert "| `optional` | `null` |" in result + + def test_no_examples_omits_section(self) -> None: + """When examples is None, Examples section is not rendered.""" + spec = extract_model(SimpleModel) + result = render_feature(spec, examples=None) + + assert "## Examples" not in result + + def test_empty_examples_list_omits_section(self) -> None: + """When examples is empty list, Examples section is not rendered.""" + spec = extract_model(SimpleModel) + result = render_feature(spec, examples=[]) + + assert "## Examples" not in result + + +class TestRenderPrimitivesPage: + """Tests for the aggregate primitives page.""" + + def test_contains_title(self, primitives_markdown: str) -> None: + assert "# Primitive Types" in primitives_markdown + + def test_contains_signed_integers(self, primitives_markdown: str) -> None: + assert "| `int8` |" in primitives_markdown + assert "| `int16` |" in primitives_markdown + assert "| `int32` |" in primitives_markdown + assert "| `int64` |" in primitives_markdown + + def test_contains_unsigned_integers(self, primitives_markdown: str) -> None: + assert "| `uint8` |" in primitives_markdown + assert "| `uint16` |" in primitives_markdown + assert "| `uint32` |" in primitives_markdown + + def test_contains_floats(self, primitives_markdown: str) -> None: + assert "| `float32` |" in primitives_markdown + assert "| `float64` |" in primitives_markdown + + def test_ranges_match_schema_constraints(self, primitives_markdown: str) -> None: + """Range strings derive from ge/le constraints in the schema.""" + assert "-128 to 127" in primitives_markdown + assert "-32,768 to 32,767" in primitives_markdown + assert "-2,147,483,648 to 2,147,483,647" in primitives_markdown + assert "-2^63 to 2^63-1" in primitives_markdown + assert "0 to 255" in primitives_markdown + assert "0 to 65,535" in primitives_markdown + assert "0 to 4,294,967,295" in primitives_markdown + + def test_descriptions_from_docstrings(self, primitives_markdown: str) -> None: + """Descriptions derive from first line of NewType docstrings.""" + assert "Portable 8-bit signed integer." in primitives_markdown + assert "Portable 16-bit unsigned integer." in primitives_markdown + assert "Portable IEEE 32-bit floating point number." in primitives_markdown + + def test_float_precision(self, primitives_markdown: str) -> None: + """Float entries show IEEE 754 precision.""" + assert "~7 decimal digits" in primitives_markdown + assert "~15 decimal digits" in primitives_markdown + + def test_pipe_in_description_escaped(self) -> None: + """Pipe characters in primitive descriptions are escaped.""" + specs = [ + NumericSpec( + name="int8", + description="Range: -128 | 127", + bounds=Interval(ge=-128, le=127), + ), + ] + result = render_primitives_from_specs(specs) + assert "Range: -128 \\| 127" in result + + +class TestRenderGeometryPage: + """Tests for the aggregate geometry page.""" + + def test_contains_title(self, geometry_markdown: str) -> None: + assert "# Geometry Types" in geometry_markdown + + def test_contains_geometry_types(self, geometry_markdown: str) -> None: + assert "Geometry" in geometry_markdown + assert "BBox" in geometry_markdown + assert "GeometryType" in geometry_markdown + + def test_lists_geometry_type_values(self, geometry_markdown: str) -> None: + assert "`point`" in geometry_markdown or "`POINT`" in geometry_markdown + + +class TestRenderUnionTemplate: + """Tests for UnionSpec template rendering with synthetic specs.""" + + def test_shared_fields_have_no_variant_tag(self) -> None: + """Shared fields render without variant annotation.""" + spec = make_union_spec( + description="A test union.", + annotated_fields=[ + AnnotatedField( + field_spec=FieldSpec( + name="id", + type_info=STR_TYPE, + description="ID", + is_required=True, + ), + variant_sources=None, + ), + ], + ) + result = render_feature(spec) + assert "| `id` |" in result + assert "*(" not in result # no variant tag + + def test_variant_fields_have_inline_tag(self) -> None: + """Variant-specific fields get *(Variant)* tag.""" + spec = make_union_spec( + name="Segment", + annotated_fields=[ + AnnotatedField( + field_spec=FieldSpec( + name="speed_limit", + type_info=STR_TYPE, + description=None, + is_required=False, + ), + variant_sources=("RoadSegment",), + ), + ], + ) + result = render_feature(spec) + assert "| `speed_limit` *(Road)* |" in result + + +class TestFormatConstraintDisplay: + """Tests for FieldConstraint display with on-demand description/pattern extraction.""" + + def test_description_and_pattern(self) -> None: + """Constraint with docstring and pattern renders both.""" + cs = ConstraintSource( + source_ref=None, source_name=None, constraint=CountryCodeAlpha2Constraint() + ) + result = _format_constraint(cs, None) + assert "Allows only ISO 3166-1 alpha-2 country codes." in result.display + assert "`CountryCodeAlpha2Constraint`" in result.display + assert "pattern: `^[A-Z]{2}$`" in result.display + + def test_description_without_pattern(self) -> None: + """Constraint with docstring but no pattern renders description only.""" + cs = ConstraintSource( + source_ref=None, source_name=None, constraint=JsonPointerConstraint() + ) + result = _format_constraint(cs, None) + assert "Allows only valid JSON Pointer values (RFC 6901)." in result.display + assert "`JsonPointerConstraint`" in result.display + assert "pattern" not in result.display + + def test_no_description_falls_through(self) -> None: + """Plain string metadata has no docstring and falls through.""" + cs = ConstraintSource( + source_ref=None, source_name=None, constraint="plain string metadata" + ) + result = _format_constraint(cs, None) + assert result.display == "`plain string metadata`" + + def test_annotated_types_uses_operator_notation_not_docstring(self) -> None: + """annotated-types constraints use operator notation, not their __doc__.""" + cs = ConstraintSource(source_ref=None, source_name=None, constraint=Ge(ge=0)) + result = _format_constraint(cs, None) + assert result.display == "`≥ 0`" + assert "Ge(ge=x)" not in result.display + + def test_constraint_class_not_linked(self) -> None: + """Constraint class name stays in backticks (no pages generated for constraints).""" + cs = ConstraintSource( + source_ref=None, source_name=None, constraint=CountryCodeAlpha2Constraint() + ) + result = _format_constraint(cs, None) + assert "`CountryCodeAlpha2Constraint`" in result.display + assert "[`CountryCodeAlpha2Constraint`](" not in result.display + + +def _feature_spec() -> object: + return extract_model(SimpleModel) + + +def _enum_spec() -> object: + return _ROOF_SHAPE_SPEC + + +def _newtype_spec() -> object: + return extract_newtype(HexColor) + + +_USED_BY_CASES = [ + pytest.param(_feature_spec, render_feature, id="feature"), + pytest.param(_enum_spec, render_enum, id="enum"), + pytest.param(_newtype_spec, render_newtype, id="newtype"), +] + + +class TestUsedByRendering: + """Tests for rendering 'Used By' section across all render functions.""" + + @pytest.mark.parametrize(("spec_factory", "render_fn"), _USED_BY_CASES) + def test_entries_render_without_links_when_no_context( + self, + spec_factory: Callable[[], object], + render_fn: Callable[..., str], + ) -> None: + """Without LinkContext, 'Used By' entries render as inline code.""" + _building = object() + _building_id = object() + used_by = [ + UsedByEntry( + identity=TypeIdentity(_building, "Building"), kind=UsedByKind.MODEL + ), + UsedByEntry( + identity=TypeIdentity(_building_id, "BuildingId"), + kind=UsedByKind.NEWTYPE, + ), + ] + + result = render_fn(spec_factory(), used_by=used_by) + + assert "## Used By" in result + assert "- `Building`" in result + assert "- `BuildingId`" in result + + @pytest.mark.parametrize( + ("spec_factory", "render_fn", "page_path", "expected_link"), + [ + pytest.param( + _feature_spec, + render_feature, + PurePosixPath("types/strings/hex_color.md"), + "../../buildings/building/building.md", + id="feature", + ), + pytest.param( + _enum_spec, + render_enum, + PurePosixPath("buildings/roof_shape.md"), + "building/building.md", + id="enum", + ), + pytest.param( + _newtype_spec, + render_newtype, + PurePosixPath("types/strings/hex_color.md"), + "../../buildings/building/building.md", + id="newtype", + ), + ], + ) + def test_link_context_uses_registry( + self, + spec_factory: Callable[[], object], + render_fn: Callable[..., str], + page_path: PurePosixPath, + expected_link: str, + ) -> None: + """Used-by entries resolve links through placement registry.""" + _building = object() + _building_identity = TypeIdentity(_building, "Building") + registry = { + _building_identity: PurePosixPath("buildings/building/building.md"), + } + ctx = LinkContext(page_path, registry) + used_by = [UsedByEntry(identity=_building_identity, kind=UsedByKind.MODEL)] + + result = render_fn(spec_factory(), link_ctx=ctx, used_by=used_by) + + assert "## Used By" in result + assert f"[`Building`]({expected_link})" in result + + @pytest.mark.parametrize(("spec_factory", "render_fn"), _USED_BY_CASES) + def test_no_used_by_omits_section( + self, + spec_factory: Callable[[], object], + render_fn: Callable[..., str], + ) -> None: + """When used_by is None, 'Used By' section is not rendered.""" + result = render_fn(spec_factory(), used_by=None) + + assert "## Used By" not in result + + @pytest.mark.parametrize(("spec_factory", "render_fn"), _USED_BY_CASES) + def test_empty_used_by_omits_section( + self, + spec_factory: Callable[[], object], + render_fn: Callable[..., str], + ) -> None: + """When used_by is empty list, 'Used By' section is not rendered.""" + result = render_fn(spec_factory(), used_by=[]) + + assert "## Used By" not in result + + +class TestRenderPydanticType: + """Tests for render_pydantic_type.""" + + def test_heading_is_pascal_case(self) -> None: + result = render_pydantic_type(HTTP_URL_SPEC) + assert result.startswith("# HttpUrl\n") + + def test_description_rendered(self) -> None: + result = render_pydantic_type(HTTP_URL_SPEC) + assert "A type that will accept any http or https URL." in result + + def test_no_description_omits_paragraph(self) -> None: + result = render_pydantic_type(EMAIL_STR_SPEC) + lines = result.strip().split("\n") + assert lines[0] == "# EmailStr" + + def test_pydantic_docs_link(self) -> None: + result = render_pydantic_type(HTTP_URL_SPEC) + assert ( + "https://docs.pydantic.dev/latest/api/networks/#pydantic.networks.HttpUrl" + in result + ) + + def test_used_by_section(self) -> None: + place_cls = type("Place", (), {}) + place_id = TypeIdentity(place_cls, "Place") + used_by = [UsedByEntry(place_id, UsedByKind.MODEL)] + ctx = LinkContext( + page_path=PurePosixPath("pydantic/networks/http_url.md"), + registry={place_id: PurePosixPath("places/place/place.md")}, + ) + result = render_pydantic_type(HTTP_URL_SPEC, link_ctx=ctx, used_by=used_by) + assert "## Used By" in result + assert "Place" in result diff --git a/packages/overture-schema-codegen/tests/test_markdown_type_format.py b/packages/overture-schema-codegen/tests/test_markdown_type_format.py new file mode 100644 index 000000000..e54426f5f --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_markdown_type_format.py @@ -0,0 +1,317 @@ +"""Tests for markdown type formatting.""" + +from enum import Enum +from pathlib import PurePosixPath +from typing import Literal, NewType + +from overture.schema.codegen.extraction.specs import FieldSpec, TypeIdentity +from overture.schema.codegen.extraction.type_analyzer import ( + TypeInfo, + TypeKind, + analyze_type, +) +from overture.schema.codegen.markdown.link_computation import LinkContext +from overture.schema.codegen.markdown.type_format import ( + format_dict_type, + format_type, + format_underlying_type, +) +from overture.schema.system.primitive import int32 +from pydantic import BaseModel, HttpUrl + + +class _ModelA(BaseModel): + x: int + + +class _ModelB(BaseModel): + y: str + + +class TestFormatType: + """Tests for format_type.""" + + def test_plain_str_renders_as_string(self) -> None: + ti = analyze_type(str) + assert format_type(_make_field(ti)) == "`string`" + + def test_optional_adds_qualifier(self) -> None: + ti = analyze_type(str | None) + assert format_type(_make_field(ti, is_required=False)) == "`string` (optional)" + + def test_literal_renders_as_quoted_value(self) -> None: + ti = analyze_type(Literal["places"]) + assert format_type(_make_field(ti)) == '`"places"`' + + def test_multi_value_literal_renders_comma_separated(self) -> None: + ti = analyze_type(Literal["a", "b", "c"]) + assert format_type(_make_field(ti)) == '`"a"` \\| `"b"` \\| `"c"`' + + def test_enum_without_context_renders_as_code(self) -> None: + class Color(str, Enum): + RED = "red" + + ti = analyze_type(Color) + assert format_type(_make_field(ti)) == "`Color`" + + def test_enum_with_link_context(self) -> None: + class Color(str, Enum): + RED = "red" + + ti = analyze_type(Color) + field = _make_field(ti) + ctx = LinkContext( + page_path=PurePosixPath("buildings/building/building.md"), + registry={ + TypeIdentity(Color, "Color"): PurePosixPath("types/enums/color.md") + }, + ) + assert format_type(field, ctx) == "[`Color`](../../types/enums/color.md)" + + def test_list_of_primitives(self) -> None: + ti = analyze_type(list[str]) + assert format_type(_make_field(ti)) == "`list`" + + def test_nested_list_of_primitives(self) -> None: + ti = analyze_type(list[list[str]]) + assert format_type(_make_field(ti)) == "`list>`" + + def test_registered_primitive_not_linked(self) -> None: + ti = analyze_type(int32) + result = format_type(_make_field(ti)) + assert result == "`int32`" + assert "](int32.md)" not in result + + +class TestFormatDictType: + """Tests for format_dict_type.""" + + def test_simple_dict_renders_as_map(self) -> None: + ti = analyze_type(dict[str, int]) + result = format_dict_type(ti) + assert result == "map" + + def test_dict_with_newtype_shows_semantic_name(self) -> None: + MyKey = NewType("MyKey", str) + ti = analyze_type(dict[MyKey, int]) + result = format_dict_type(ti) + assert result == "map" + + +def _make_field( + ti: TypeInfo, *, name: str = "x", is_required: bool = True +) -> FieldSpec: + """Build a FieldSpec for test convenience.""" + return FieldSpec(name=name, type_info=ti, description=None, is_required=is_required) + + +class TestFormatUnionType: + """Tests for UNION-kind TypeInfo in format_type.""" + + def test_union_renders_all_members(self) -> None: + ti = analyze_type(_ModelA | _ModelB) + result = format_type(_make_field(ti)) + assert "`_ModelA`" in result + assert "`_ModelB`" in result + # Pipe separator escaped for table cells + assert r"\|" in result + + def test_union_with_link_context_links_each_member(self) -> None: + ti = analyze_type(_ModelA | _ModelB) + ctx = LinkContext( + page_path=PurePosixPath("theme/feature/feature.md"), + registry={ + TypeIdentity(_ModelA, "_ModelA"): PurePosixPath( + "theme/feature/types/model_a.md" + ), + TypeIdentity(_ModelB, "_ModelB"): PurePosixPath( + "theme/feature/types/model_b.md" + ), + }, + ) + result = format_type(_make_field(ti), ctx) + assert "[`_ModelA`](types/model_a.md)" in result + assert "[`_ModelB`](types/model_b.md)" in result + + def test_optional_union_adds_qualifier(self) -> None: + ti = analyze_type(_ModelA | _ModelB | None) + result = format_type(_make_field(ti, is_required=False)) + assert "(optional)" in result + assert "`_ModelA`" in result + assert "`_ModelB`" in result + + def test_list_of_union_adds_qualifier(self) -> None: + ti = TypeInfo( + base_type="_ModelA", + kind=TypeKind.UNION, + list_depth=1, + union_members=(_ModelA, _ModelB), + ) + result = format_type(_make_field(ti)) + assert "(list)" in result + assert "`_ModelA`" in result + assert "`_ModelB`" in result + + def test_union_members_unlinked_without_context(self) -> None: + ti = analyze_type(_ModelA | _ModelB) + result = format_type(_make_field(ti)) + # No markdown links without context + assert "]()" not in result + assert "[`" not in result + + def test_union_partial_links(self) -> None: + """Members with pages get linked; members without don't.""" + ti = analyze_type(_ModelA | _ModelB) + ctx = LinkContext( + page_path=PurePosixPath("theme/feature/feature.md"), + registry={ + TypeIdentity(_ModelA, "_ModelA"): PurePosixPath( + "theme/feature/types/model_a.md" + ) + }, + ) + result = format_type(_make_field(ti), ctx) + assert "[`_ModelA`](types/model_a.md)" in result + assert "`_ModelB`" in result + # _ModelB should NOT be linked + assert "[`_ModelB`]" not in result + + +class TestPydanticTypeLinking: + """Tests for PRIMITIVE types with pages getting linked.""" + + def test_pydantic_type_linked_when_in_registry(self) -> None: + ti = analyze_type(HttpUrl) + field = _make_field(ti) + ctx = LinkContext( + page_path=PurePosixPath("places/place/place.md"), + registry={ + TypeIdentity(HttpUrl, "HttpUrl"): PurePosixPath( + "pydantic/networks/http_url.md" + ) + }, + ) + result = format_type(field, ctx) + assert "[`HttpUrl`]" in result + assert "pydantic/networks/http_url.md" in result + + def test_pydantic_type_unlinked_without_registry_entry(self) -> None: + ti = analyze_type(HttpUrl) + field = _make_field(ti) + ctx = LinkContext( + page_path=PurePosixPath("places/place/place.md"), + registry={}, + ) + result = format_type(field, ctx) + assert result == "`HttpUrl`" + assert "[" not in result + + def test_list_of_pydantic_type_linked(self) -> None: + ti = analyze_type(list[HttpUrl]) + field = _make_field(ti) + ctx = LinkContext( + page_path=PurePosixPath("places/place/place.md"), + registry={ + TypeIdentity(HttpUrl, "HttpUrl"): PurePosixPath( + "pydantic/networks/http_url.md" + ) + }, + ) + result = format_type(field, ctx) + assert "HttpUrl" in result + assert "pydantic/networks/http_url.md" in result + + def test_registered_primitive_links_to_aggregate_page(self) -> None: + """int32 links to the primitives aggregate page when in registry.""" + ti = analyze_type(int32) + field = _make_field(ti) + ctx = LinkContext( + page_path=PurePosixPath("places/place/place.md"), + registry={ + TypeIdentity(int32, "int32"): PurePosixPath( + "system/primitive/primitives.md" + ) + }, + ) + result = format_type(field, ctx) + assert "[`int32`]" in result + assert "system/primitive/primitives.md" in result + + +class TestListOfSemanticNewtype: + """Tests for list[SemanticNewType] rendering. + + When a scalar NewType appears inside list[], the type renders as + list rather than NewTypeName (list). The (list) qualifier + is reserved for NewTypes that internally wrap a list. + """ + + def test_list_of_scalar_newtype_renders_list_syntax(self) -> None: + """list[ScalarNewType] renders as list, not Name (list).""" + ScalarNT = NewType("ScalarNT", str) + ti = analyze_type(list[ScalarNT]) + result = format_type(_make_field(ti)) + assert "list<" in result + assert "ScalarNT" in result + assert "(list)" not in result + + def test_newtype_wrapping_list_renders_qualifier(self) -> None: + """NewType wrapping list[X] renders as Name (list).""" + ListNT = NewType("ListNT", list[str]) + ti = analyze_type(ListNT) + result = format_type(_make_field(ti)) + assert "(list)" in result + assert "ListNT" in result + + def test_list_of_scalar_newtype_with_link(self) -> None: + """list[ScalarNewType] with link context renders linked list.""" + ScalarNT = NewType("ScalarNT", str) + ti = analyze_type(list[ScalarNT]) + field = _make_field(ti) + ctx = LinkContext( + page_path=PurePosixPath("places/place/place.md"), + registry={ + TypeIdentity(ScalarNT, "ScalarNT"): PurePosixPath("system/scalar_nt.md") + }, + ) + result = format_type(field, ctx) + assert "list<" in result + assert "ScalarNT" in result + assert "system/scalar_nt.md" in result + assert "(list)" not in result + + def test_nested_list_of_scalar_newtype_renders_nested_list_syntax(self) -> None: + """list[list[ScalarNewType]] renders as list>.""" + ScalarNT = NewType("ScalarNT", str) + ti = analyze_type(list[list[ScalarNT]]) + result = format_type(_make_field(ti)) + assert "list<" in result + assert "list<`" in result or "`list None: + ti = analyze_type(_ModelA | _ModelB) + result = format_underlying_type(ti) + assert result == "`_ModelA` | `_ModelB`" + + def test_union_with_link_context(self) -> None: + ti = analyze_type(_ModelA | _ModelB) + ctx = LinkContext( + page_path=PurePosixPath("types/my_union.md"), + registry={ + TypeIdentity(_ModelA, "_ModelA"): PurePosixPath( + "theme/feature/types/model_a.md" + ), + TypeIdentity(_ModelB, "_ModelB"): PurePosixPath( + "theme/feature/types/model_b.md" + ), + }, + ) + result = format_underlying_type(ti, ctx) + assert "[`_ModelA`](../theme/feature/types/model_a.md)" in result + assert "[`_ModelB`](../theme/feature/types/model_b.md)" in result diff --git a/packages/overture-schema-codegen/tests/test_model_extractor.py b/packages/overture-schema-codegen/tests/test_model_extractor.py new file mode 100644 index 000000000..f2b2bd257 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_model_extractor.py @@ -0,0 +1,549 @@ +"""Tests for model extraction.""" + +from typing import Annotated, Literal + +from codegen_test_support import ( + FeatureBase, + FeatureWithAddress, + Instrument, + SourceItem, + TreeNode, + Venue, + assert_literal_field, + find_field, +) +from overture.schema.codegen.extraction.model_extraction import ( + expand_model_tree, + extract_model, +) +from overture.schema.codegen.extraction.specs import ModelSpec +from overture.schema.system.field_constraint import UniqueItemsConstraint +from overture.schema.system.model_constraint import ( + FieldEqCondition, + FieldGroupConstraint, + require_any_of, + require_if, +) +from overture.schema.system.primitive import ( + Geometry, + GeometryType, + GeometryTypeConstraint, +) +from overture.schema.system.string import HexColor +from pydantic import BaseModel, Field + + +class TestModelConstraints: + """Model-level constraint extraction.""" + + def test_unconstrained_model_has_empty_constraints(self) -> None: + """Models without decorators produce an empty constraints tuple.""" + + class Plain(BaseModel): + name: str + + spec = extract_model(Plain) + + assert spec.constraints == () + + def test_extracts_require_any_of(self) -> None: + """Should extract @require_any_of from a decorated model.""" + spec = extract_model(Venue) + + assert len(spec.constraints) == 1 + (constraint,) = spec.constraints + assert constraint.name == "@require_any_of" + assert isinstance(constraint, FieldGroupConstraint) + assert constraint.field_names == ("name", "description") + + def test_stacked_constraints_preserve_order(self) -> None: + """Multiple decorators extracted in stacking order (inner-first).""" + + @require_if(["bar"], FieldEqCondition("baz", "x")) + @require_any_of("foo", "bar") + class Stacked(BaseModel): + foo: str | None = None + bar: str | None = None + baz: str | None = None + + spec = extract_model(Stacked) + + assert len(spec.constraints) == 2 + assert spec.constraints[0].name == "@require_any_of" + assert spec.constraints[1].name == "@require_if" + + +class TestExtractModelSimple: + """Tests for extract_model with simple Pydantic models.""" + + def test_extract_simple_model(self) -> None: + """Should extract basic model information.""" + + class SimpleModel(BaseModel): + """A simple test model.""" + + name: str + + result = extract_model(SimpleModel) + + assert result.name == "SimpleModel" + assert result.description == "A simple test model." + assert len(result.fields) == 1 + assert result.fields[0].name == "name" + assert result.fields[0].type_info.base_type == "str" + assert result.fields[0].is_required is True + + def test_extract_model_does_not_set_entry_point(self) -> None: + class M(BaseModel): + x: int + + result = extract_model(M) + assert result.entry_point is None + + def test_extract_model_with_optional_field(self) -> None: + """Should handle optional fields correctly.""" + + class ModelWithOptional(BaseModel): + """Model with optional field.""" + + name: str + nickname: str | None = None + + result = extract_model(ModelWithOptional) + + assert len(result.fields) == 2 + + name_field = find_field(result, "name") + assert name_field.is_required is True + + nickname_field = find_field(result, "nickname") + assert nickname_field.is_required is False + assert nickname_field.type_info.is_optional is True + + def test_extract_model_with_field_description(self) -> None: + """Should extract field descriptions from Field().""" + + class ModelWithDescription(BaseModel): + """Model with field descriptions.""" + + name: str = Field(description="The name of the entity") + + result = extract_model(ModelWithDescription) + + assert result.fields[0].description == "The name of the entity" + + def test_extract_model_with_list_field(self) -> None: + """Should handle list fields correctly.""" + + class ModelWithList(BaseModel): + """Model with list field.""" + + tags: list[str] + + result = extract_model(ModelWithList) + + tags_field = result.fields[0] + assert tags_field.name == "tags" + assert tags_field.type_info.is_list is True + assert tags_field.type_info.base_type == "str" + + +class TestExtractModelWithThemeType: + """Tests for extracting theme/type from Feature-like models.""" + + def test_extract_theme_and_type_from_generic(self) -> None: + """Should extract theme and type as Literal fields.""" + + class Place(FeatureBase[Literal["places"], Literal["place"]]): + """A place feature.""" + + name: str + + result = extract_model(Place) + assert_literal_field(result, "theme", "places") + assert_literal_field(result, "type", "place") + + def test_extract_different_theme_type(self) -> None: + """Should handle different theme/type values as Literal fields.""" + + class Building(FeatureBase[Literal["buildings"], Literal["building"]]): + """A building feature.""" + + height: float | None = None + + result = extract_model(Building) + assert_literal_field(result, "theme", "buildings") + assert_literal_field(result, "type", "building") + + def test_non_feature_model_has_no_theme_type(self) -> None: + """Regular models without Generic base should have no theme/type fields.""" + + class RegularModel(BaseModel): + """A regular model.""" + + value: int + + result = extract_model(RegularModel) + + field_names = [f.name for f in result.fields] + assert "theme" not in field_names + assert "type" not in field_names + + +class TestExtractModelFieldAlias: + """Tests for field alias handling in extract_model.""" + + def test_field_with_alias_uses_alias_name(self) -> None: + """Fields with alias should use alias as the field name, not Python attr name.""" + + class ModelWithAlias(BaseModel): + """Model with aliased field.""" + + class_: str | None = Field(default=None, alias="class") + + result = extract_model(ModelWithAlias) + + # Should use alias 'class', not Python name 'class_' + class_field = result.fields[0] + assert class_field.name == "class" + + def test_field_without_alias_uses_python_name(self) -> None: + """Fields without alias should use Python attribute name.""" + + class ModelWithoutAlias(BaseModel): + """Model without alias.""" + + name: str + + result = extract_model(ModelWithoutAlias) + + assert result.fields[0].name == "name" + + +class TestExtractModelDocstring: + """Tests for docstring extraction and cleaning.""" + + def test_multiline_docstring_has_indentation_stripped(self) -> None: + """Multi-line docstrings should have leading whitespace stripped. + + Docstrings defined in classes have leading whitespace on continuation + lines. This should be stripped so they render as normal paragraphs + in Markdown, not as code blocks. + """ + + class ModelWithMultilineDoc(BaseModel): + """A model with multi-line docstring. + + This is a second paragraph that would have leading + whitespace in the raw __doc__ attribute. + """ + + name: str + + result = extract_model(ModelWithMultilineDoc) + + # Description should NOT have leading whitespace on continuation lines + assert result.description is not None + assert "\n " not in result.description + # Should still have the content + assert "second paragraph" in result.description + + +class TestFieldOrderingWithMixins: + """Tests for field ordering when a model has multiple inheritance.""" + + def test_mixin_fields_come_after_primary_chain_and_own(self) -> None: + """Fields from mixin bases should appear after primary chain and own fields.""" + + class PrimaryBase(BaseModel): + base_field: str + + class MixinA(BaseModel): + a_field: str + + class MixinB(BaseModel): + b_field: str + + class Child(PrimaryBase, MixinA, MixinB): + """A child model with mixins.""" + + own_field: str + + result = extract_model(Child) + field_names = [f.name for f in result.fields] + + assert field_names == ["base_field", "own_field", "a_field", "b_field"] + + def test_single_inheritance_order_unchanged(self) -> None: + """Single-inheritance models should keep Pydantic's default order.""" + + class Parent(BaseModel): + parent_field: str + + class Child(Parent): + """A child model.""" + + child_field: str + + result = extract_model(Child) + field_names = [f.name for f in result.fields] + + assert field_names == ["parent_field", "child_field"] + + def test_mixin_fields_in_declaration_order(self) -> None: + """Mixin fields should appear in class declaration order, not reversed MRO.""" + + class Primary(BaseModel): + p: str + + class MixinFirst(BaseModel): + first: str + + class MixinSecond(BaseModel): + second: str + + class MixinThird(BaseModel): + third: str + + class Model(Primary, MixinFirst, MixinSecond, MixinThird): + """Model with three mixins.""" + + own: str + + result = extract_model(Model) + field_names = [f.name for f in result.fields] + + # Mixins in declaration order: First, Second, Third + assert field_names == ["p", "own", "first", "second", "third"] + + def test_deep_primary_chain_before_mixins(self) -> None: + """Fields from the entire primary chain should precede mixin fields.""" + + class GrandParent(BaseModel): + gp_field: str + + class Parent(GrandParent): + p_field: str + + class Mixin(BaseModel): + m_field: str + + class Child(Parent, Mixin): + """Child with deep primary chain.""" + + own_field: str + + result = extract_model(Child) + field_names = [f.name for f in result.fields] + + assert field_names == ["gp_field", "p_field", "own_field", "m_field"] + + def test_recursive_mixin_reordering(self) -> None: + """Mixins on primary-chain classes should also be reordered.""" + + class CoreBase(BaseModel): + core: str + + class ParentMixin(BaseModel): + pm: str + + class Parent(CoreBase, ParentMixin): + p: str + + class ChildMixin(BaseModel): + cm: str + + class Child(Parent, ChildMixin): + """Child where primary-chain parent has its own mixin.""" + + own: str + + result = extract_model(Child) + field_names = [f.name for f in result.fields] + + # CoreBase (Parent's primary) -> Parent own -> ParentMixin -> Child own -> ChildMixin + assert field_names == ["core", "p", "pm", "own", "cm"] + + +class TestExpandModelTree: + """Tests for expand_model_tree.""" + + def test_model_without_sub_models_unchanged(self) -> None: + """Fields without MODEL kind get model=None.""" + + class Simple(BaseModel): + name: str + count: int + + spec = extract_model(Simple) + expand_model_tree(spec) + + for f in spec.fields: + assert f.model is None + assert f.starts_cycle is False + + def test_nested_model_gets_expanded(self) -> None: + """MODEL-kind fields get their model populated.""" + spec = extract_model(FeatureWithAddress) + expand_model_tree(spec) + + addr_field = find_field(spec, "address") + assert addr_field.model is not None + assert addr_field.model.name == "Address" + assert addr_field.starts_cycle is False + + # Sub-model fields should exist + sub_names = [f.name for f in addr_field.model.fields] + assert "street" in sub_names + assert "city" in sub_names + + def test_cycle_detected_and_marked(self) -> None: + """Self-referential model gets starts_cycle=True.""" + spec = extract_model(TreeNode) + expand_model_tree(spec) + + parent_field = find_field(spec, "parent") + assert parent_field.model is not None + assert parent_field.model is spec # Same object -- cycle + assert parent_field.starts_cycle is True + + def test_shared_reference_not_marked_as_cycle(self) -> None: + """Two models referencing the same sub-model share it without cycle.""" + + class Shared(BaseModel): + value: str + + class ModelA(BaseModel): + ref: Shared + + class ModelB(BaseModel): + ref: Shared + + cache: dict[type, ModelSpec] = {} + spec_a = extract_model(ModelA) + expand_model_tree(spec_a, cache) + + spec_b = extract_model(ModelB) + expand_model_tree(spec_b, cache) + + ref_a = find_field(spec_a, "ref") + ref_b = find_field(spec_b, "ref") + + # Same ModelSpec object, neither is a cycle + assert ref_a.model is ref_b.model + assert ref_a.starts_cycle is False + assert ref_b.starts_cycle is False + + def test_list_of_model_gets_expanded(self) -> None: + """list[Model] fields also get their model populated.""" + + class HasList(BaseModel): + items: list[SourceItem] + + spec = extract_model(HasList) + expand_model_tree(spec) + + items_field = find_field(spec, "items") + assert items_field.model is not None + assert items_field.model.name == "SourceItem" + + +class TestFieldInfoMetadataConstraints: + """Constraints from field_info.metadata are merged into TypeInfo. + + Pydantic strips the Annotated wrapper from some fields and moves the + metadata to field_info.metadata. extract_model merges these back into + TypeInfo.constraints so they aren't silently dropped. + """ + + def test_geometry_type_constraint_extracted(self) -> None: + """GeometryTypeConstraint on geometry field should appear in constraints.""" + spec = extract_model(Venue) + geometry_field = find_field(spec, "geometry") + + constraint_types = [ + type(cs.constraint) for cs in geometry_field.type_info.constraints + ] + assert GeometryTypeConstraint in constraint_types + + def test_geometry_type_constraint_has_null_source(self) -> None: + """Constraints from field_info.metadata have source_ref=None (not from a NewType).""" + spec = extract_model(Venue) + geometry_field = find_field(spec, "geometry") + + geo_constraints = [ + cs + for cs in geometry_field.type_info.constraints + if isinstance(cs.constraint, GeometryTypeConstraint) + ] + assert len(geo_constraints) == 1 + assert geo_constraints[0].source_ref is None + + def test_metadata_constraints_not_duplicated(self) -> None: + """Fields where Pydantic preserves Annotated don't get duplicate constraints. + + When field_info.metadata is empty (Pydantic kept the Annotated wrapper), + no extra constraints are added. + """ + spec = extract_model(Instrument) + tags_field = find_field(spec, "tags") + + unique_constraints = [ + cs + for cs in tags_field.type_info.constraints + if isinstance(cs.constraint, UniqueItemsConstraint) + ] + assert len(unique_constraints) == 1 + + def test_standalone_annotated_field_extracts_metadata(self) -> None: + """Direct Annotated[Type, constraint] fields (non-optional, non-union) + get their constraints from field_info.metadata.""" + + class Model(BaseModel): + geo: Annotated[ + Geometry, + GeometryTypeConstraint(GeometryType.POINT), + ] + + spec = extract_model(Model) + geo_field = find_field(spec, "geo") + + constraint_types = [ + type(cs.constraint) for cs in geo_field.type_info.constraints + ] + assert GeometryTypeConstraint in constraint_types + + +class TestFieldDescriptionFallback: + """Tests for field description fallback from NewType Field metadata.""" + + def test_field_inherits_newtype_description(self) -> None: + """Field with no explicit description gets NewType's Field description.""" + + class TestModel(BaseModel): + color: HexColor + + spec = extract_model(TestModel) + field = find_field(spec, "color") + assert field.description is not None + assert "color" in field.description.lower() + + def test_explicit_description_not_overridden(self) -> None: + """Field with explicit description keeps its own, ignores NewType's.""" + + class TestModel(BaseModel): + color: HexColor = Field(description="Custom color description") + + spec = extract_model(TestModel) + field = find_field(spec, "color") + assert field.description == "Custom color description" + + def test_field_without_newtype_description_stays_none(self) -> None: + """Field typed as plain str (no NewType description) keeps None.""" + + class TestModel(BaseModel): + name: str + + spec = extract_model(TestModel) + field = find_field(spec, "name") + assert field.description is None diff --git a/packages/overture-schema-codegen/tests/test_module_layout.py b/packages/overture-schema-codegen/tests/test_module_layout.py new file mode 100644 index 000000000..5766e60e0 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_module_layout.py @@ -0,0 +1,175 @@ +"""Tests for module_layout: output directory layout from module paths.""" + +from pathlib import PurePosixPath + +import pytest +from overture.schema.codegen.layout.module_layout import ( + compute_output_dir, + compute_schema_root, + entry_point_class, + entry_point_module, + is_package_module, + module_relpath, +) + + +class TestComputeSchemaRoot: + def test_multiple_paths_common_prefix(self) -> None: + paths = [ + "overture.schema.buildings", + "overture.schema.places", + "overture.schema.divisions", + ] + assert compute_schema_root(paths) == "overture.schema" + + def test_single_path_drops_last_component(self) -> None: + assert compute_schema_root(["overture.schema.buildings"]) == "overture.schema" + + def test_mixed_depth_paths(self) -> None: + paths = [ + "overture.schema.buildings", + "overture.schema.core.names.primary_name", + ] + assert compute_schema_root(paths) == "overture.schema" + + def test_divergent_namespaces(self) -> None: + paths = ["overture.schema.buildings", "acme.transit"] + assert compute_schema_root(paths) == "" + + def test_empty_raises(self) -> None: + with pytest.raises(ValueError): + compute_schema_root([]) + + def test_single_component_path(self) -> None: + assert compute_schema_root(["buildings"]) == "" + + def test_identical_paths_deduplicated(self) -> None: + paths = ["overture.schema.buildings", "overture.schema.buildings"] + assert compute_schema_root(paths) == "overture.schema" + + +class TestEntryPointModule: + def test_extracts_module(self) -> None: + assert entry_point_module("overture.schema.buildings:Building") == ( + "overture.schema.buildings" + ) + + def test_missing_colon_raises(self) -> None: + with pytest.raises(ValueError): + entry_point_module("no_colon") + + def test_multiple_colons_splits_on_first(self) -> None: + assert entry_point_module("mod:A:B") == "mod" + + +class TestEntryPointClass: + def test_extracts_class(self) -> None: + assert entry_point_class("overture.schema.buildings:Building") == "Building" + + def test_missing_colon_raises(self) -> None: + with pytest.raises(ValueError): + entry_point_class("no_colon") + + def test_colon_at_end_returns_empty(self) -> None: + assert entry_point_class("mod:") == "" + + def test_multiple_colons_splits_on_first(self) -> None: + assert entry_point_class("mod:A:B") == "A:B" + + +class TestModuleRelpath: + def test_strips_root_prefix(self) -> None: + assert ( + module_relpath("overture.schema.buildings", "overture.schema") + == "buildings" + ) + + def test_deep_path(self) -> None: + assert ( + module_relpath("overture.schema.core.names.primary_name", "overture.schema") + == "core.names.primary_name" + ) + + def test_module_equals_root(self) -> None: + assert module_relpath("overture.schema", "overture.schema") == "" + + def test_empty_root(self) -> None: + assert module_relpath("buildings", "") == "buildings" + + def test_nonmatching_raises(self) -> None: + with pytest.raises(ValueError): + module_relpath("acme.transit", "overture.schema") + + +def _make_registry(*entries: tuple[str, bool]) -> dict[str, object]: + """Build a synthetic module registry. + + Each entry is (module_path, is_package). Packages get __path__; + file modules do not. + """ + registry: dict[str, object] = {} + for mod_path, is_pkg in entries: + if is_pkg: + registry[mod_path] = type("pkg", (), {"__path__": ["/fake"]})() + else: + registry[mod_path] = type("mod", (), {})() + return registry + + +class TestIsPackageModule: + def test_package_has_path(self) -> None: + registry = _make_registry(("my.package", True)) + assert is_package_module("my.package", registry) is True + + def test_file_module_no_path(self) -> None: + registry = _make_registry(("my.module", False)) + assert is_package_module("my.module", registry) is False + + def test_missing_module_raises(self) -> None: + with pytest.raises(ValueError): + is_package_module("nonexistent", {}) + + +class TestComputeOutputDir: + def test_package_keeps_all_parts(self) -> None: + reg = _make_registry(("overture.schema.buildings", True)) + result = compute_output_dir("overture.schema.buildings", "overture.schema", reg) + assert result == PurePosixPath("buildings") + + def test_file_module_drops_last(self) -> None: + reg = _make_registry(("overture.schema.core.names.primary_name", False)) + result = compute_output_dir( + "overture.schema.core.names.primary_name", "overture.schema", reg + ) + assert result == PurePosixPath("core/names") + + def test_deep_package(self) -> None: + reg = _make_registry(("overture.schema.core.names", True)) + result = compute_output_dir( + "overture.schema.core.names", "overture.schema", reg + ) + assert result == PurePosixPath("core/names") + + def test_file_module_in_theme(self) -> None: + reg = _make_registry(("overture.schema.buildings.enums", False)) + result = compute_output_dir( + "overture.schema.buildings.enums", "overture.schema", reg + ) + assert result == PurePosixPath("buildings") + + def test_file_module_deep(self) -> None: + reg = _make_registry(("overture.schema.divisions.division.models", False)) + result = compute_output_dir( + "overture.schema.divisions.division.models", "overture.schema", reg + ) + assert result == PurePosixPath("divisions/division") + + def test_root_module_returns_dot(self) -> None: + reg = _make_registry(("overture.schema", True)) + result = compute_output_dir("overture.schema", "overture.schema", reg) + assert result == PurePosixPath(".") + + def test_file_module_one_level_returns_dot(self) -> None: + reg = _make_registry(("overture.schema.types", False)) + result = compute_output_dir("overture.schema.types", "overture.schema", reg) + assert result == PurePosixPath(".") diff --git a/packages/overture-schema-codegen/tests/test_naming.py b/packages/overture-schema-codegen/tests/test_naming.py new file mode 100644 index 000000000..77e4d5773 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_naming.py @@ -0,0 +1,23 @@ +"""Tests for PascalCase to snake_case conversion.""" + +import pytest +from overture.schema.codegen.extraction.case_conversion import to_snake_case + + +class TestToSnakeCase: + """Tests for snake_case conversion helper.""" + + @pytest.mark.parametrize( + ("input_name", "expected"), + [ + ("Building", "building"), + ("BuildingPart", "building_part"), + ("RoadSegment", "road_segment"), + ("Place", "place"), + ("simple", "simple"), # Already lowercase + ("HTTPServer", "http_server"), # Consecutive caps + ], + ) + def test_converts_pascal_to_snake(self, input_name: str, expected: str) -> None: + """PascalCase names should convert to snake_case.""" + assert to_snake_case(input_name) == expected diff --git a/packages/overture-schema-codegen/tests/test_newtype_extraction.py b/packages/overture-schema-codegen/tests/test_newtype_extraction.py new file mode 100644 index 000000000..6cd73c5c2 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_newtype_extraction.py @@ -0,0 +1,74 @@ +"""Tests for NewType extraction.""" + +from typing import Annotated, NewType + +from codegen_test_support import STR_TYPE +from overture.schema.codegen.extraction.newtype_extraction import extract_newtype +from overture.schema.codegen.extraction.specs import NewTypeSpec +from overture.schema.system.field_constraint import UniqueItemsConstraint +from overture.schema.system.ref import Id +from overture.schema.system.string import HexColor +from pydantic import BaseModel, Field + + +class TestExtractNewType: + """Tests for extract_newtype function.""" + + def test_extract_hex_color(self) -> None: + """Should extract HexColor NewType specification.""" + spec = extract_newtype(HexColor) + + assert spec.name == "HexColor" + assert spec.type_info.newtype_name == "HexColor" + + def test_extract_id(self) -> None: + """Should extract Id NewType with nested chain.""" + spec = extract_newtype(Id) + + assert spec.name == "Id" + assert spec.type_info.newtype_name == "Id" + assert spec.type_info.base_type == "NoWhitespaceString" + + def test_extract_newtype_wrapping_list(self) -> None: + """Should extract a list-wrapping NewType.""" + + class Item(BaseModel): + value: str + + TestSources = NewType( + "TestSources", Annotated[list[Item], UniqueItemsConstraint()] + ) + spec = extract_newtype(TestSources) + + assert spec.name == "TestSources" + assert spec.type_info.is_list is True + assert spec.type_info.newtype_name == "TestSources" + + def test_extract_newtype_without_doc_uses_field_description(self) -> None: + """NewType with Field(description=...) but no __doc__ uses Field description.""" + TestType = NewType( + "TestType", + Annotated[str, Field(description="A test type description")], + ) + spec = extract_newtype(TestType) + assert spec.description == "A test type description" + + def test_extract_newtype_with_doc_ignores_field_description(self) -> None: + """NewType with custom __doc__ uses docstring, not Field description.""" + spec = extract_newtype(HexColor) + # HexColor has both __doc__ and Field(description=...). + # __doc__ should win because is_custom_docstring returns True. + assert spec.description is not None + assert "example" in spec.description.lower() or "#" in spec.description + + +class TestNewTypeSpecSourceType: + """Tests for source_type on NewTypeSpec.""" + + def test_newtype_spec_source_type_defaults_to_none(self) -> None: + spec = NewTypeSpec(name="Test", description=None, type_info=STR_TYPE) + assert spec.source_type is None + + def test_extract_newtype_sets_source_type(self) -> None: + spec = extract_newtype(HexColor) + assert spec.source_type is HexColor diff --git a/packages/overture-schema-codegen/tests/test_numeric_extraction.py b/packages/overture-schema-codegen/tests/test_numeric_extraction.py new file mode 100644 index 000000000..ee604ba75 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_numeric_extraction.py @@ -0,0 +1,111 @@ +"""Tests for numeric extraction and numeric bounds.""" + +from typing import Annotated, NewType + +import overture.schema.system.primitive as _system_primitive +from overture.schema.codegen.extraction.newtype_extraction import extract_newtype +from overture.schema.codegen.extraction.numeric_extraction import ( + extract_numeric_bounds, + extract_numerics, +) +from overture.schema.codegen.extraction.specs import TypeIdentity +from overture.schema.codegen.extraction.type_analyzer import analyze_type +from overture.schema.codegen.markdown.pipeline import ( + partition_numeric_and_geometry_types, +) +from overture.schema.system.primitive import float32, int32, int64, uint8 +from pydantic import Field + + +class TestPartitionNumericAndGeometryTypes: + """Tests for partition_numeric_and_geometry_types function.""" + + def test_returns_type_identities(self) -> None: + nums, geoms = partition_numeric_and_geometry_types(_system_primitive) + assert all(isinstance(p, TypeIdentity) for p in nums) + assert all(isinstance(g, TypeIdentity) for g in geoms) + + def test_identity_obj_is_actual_callable(self) -> None: + nums, _ = partition_numeric_and_geometry_types(_system_primitive) + int32_id = next(p for p in nums if p.name == "int32") + assert int32_id.obj is _system_primitive.int32 + + +class TestExtractNumerics: + """Tests for extract_numerics function.""" + + def test_accepts_type_identities(self) -> None: + nums, _ = partition_numeric_and_geometry_types(_system_primitive) + specs = extract_numerics(nums) + assert len(specs) > 0 + names = [s.name for s in specs] + assert "int32" in names + + def test_extracts_bounds(self) -> None: + nums, _ = partition_numeric_and_geometry_types(_system_primitive) + specs = extract_numerics(nums) + int32_spec = next(s for s in specs if s.name == "int32") + assert int32_spec.bounds.ge == -(2**31) + assert int32_spec.bounds.le == 2**31 - 1 + + +class TestExtractNumericBounds: + """Tests for extract_numeric_bounds function.""" + + def test_signed_integer_bounds(self) -> None: + """Should extract ge/le from a constrained integer NewType.""" + spec = extract_newtype(int32) + bounds = extract_numeric_bounds(spec.type_info) + + assert bounds.ge == -(2**31) + assert bounds.le == 2**31 - 1 + + def test_unsigned_integer_bounds(self) -> None: + """Should extract 0-based bounds from unsigned NewType.""" + spec = extract_newtype(uint8) + bounds = extract_numeric_bounds(spec.type_info) + + assert bounds.ge == 0 + assert bounds.le == 255 + + def test_int64_bounds(self) -> None: + """Should extract large bounds from int64.""" + spec = extract_newtype(int64) + bounds = extract_numeric_bounds(spec.type_info) + + assert bounds.ge == -(2**63) + assert bounds.le == 2**63 - 1 + + def test_unconstrained_type(self) -> None: + """Should return empty Interval for types without numeric constraints.""" + spec = extract_newtype(float32) + bounds = extract_numeric_bounds(spec.type_info) + + assert bounds.ge is None + assert bounds.gt is None + assert bounds.le is None + assert bounds.lt is None + + def test_exclusive_bounds(self) -> None: + """Should extract gt/lt from constraints using exclusive bounds.""" + ExclusiveBounded = NewType( + "ExclusiveBounded", Annotated[int, Field(gt=0, lt=100)] + ) + type_info = analyze_type(ExclusiveBounded) + bounds = extract_numeric_bounds(type_info) + + assert bounds.gt == 0 + assert bounds.lt == 100 + assert bounds.ge is None + assert bounds.le is None + + def test_mixed_bounds(self) -> None: + """Should extract a mix of inclusive and exclusive bounds.""" + MixedBounded = NewType("MixedBounded", Annotated[int, Field(ge=0, lt=256)]) + type_info = analyze_type(MixedBounded) + bounds = extract_numeric_bounds(type_info) + + assert bounds.ge == 0 + assert bounds.lt == 256 + assert bounds.gt is None + assert bounds.le is None diff --git a/packages/overture-schema-codegen/tests/test_pydantic_extraction.py b/packages/overture-schema-codegen/tests/test_pydantic_extraction.py new file mode 100644 index 000000000..1d8803d16 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_pydantic_extraction.py @@ -0,0 +1,29 @@ +"""Tests for Pydantic type extraction.""" + +from overture.schema.codegen.extraction.pydantic_extraction import extract_pydantic_type +from overture.schema.codegen.extraction.specs import PydanticTypeSpec +from pydantic import EmailStr, HttpUrl + + +class TestExtractPydanticType: + def test_extracts_http_url(self) -> None: + spec = extract_pydantic_type(HttpUrl) + assert isinstance(spec, PydanticTypeSpec) + assert spec.name == "HttpUrl" + assert spec.source_type is HttpUrl + assert spec.source_module == "networks" + assert spec.description is not None + assert "http" in spec.description.lower() + + def test_extracts_email_str(self) -> None: + spec = extract_pydantic_type(EmailStr) + assert isinstance(spec, PydanticTypeSpec) + assert spec.name == "EmailStr" + assert spec.source_type is EmailStr + assert spec.source_module == "networks" + + def test_admonition_label_filtered_from_description(self) -> None: + spec = extract_pydantic_type(EmailStr) + # EmailStr.__doc__ starts with "Info:" (bare admonition label). + # _usable_description filters this, returning None. + assert spec.description is None diff --git a/packages/overture-schema-codegen/tests/test_reverse_references.py b/packages/overture-schema-codegen/tests/test_reverse_references.py new file mode 100644 index 000000000..fb8e1e41a --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_reverse_references.py @@ -0,0 +1,227 @@ +"""Tests for reverse reference computation.""" + +from enum import Enum as PyEnum +from typing import NewType + +import pytest +from codegen_test_support import ( + FeatureWithAddress, + FeatureWithUrl, + Instrument, + RoadSegment, + TreeNode, + Venue, + has_name, + lookup_by_name, + make_union_spec, +) +from overture.schema.codegen.extraction.enum_extraction import extract_enum +from overture.schema.codegen.extraction.model_extraction import ( + expand_model_tree, + extract_model, +) +from overture.schema.codegen.extraction.newtype_extraction import extract_newtype +from overture.schema.codegen.extraction.specs import PydanticTypeSpec, TypeIdentity +from overture.schema.codegen.layout.type_collection import ( + collect_all_supplementary_types, +) +from overture.schema.codegen.markdown.reverse_references import ( + UsedByKind, + compute_reverse_references, +) +from overture.schema.system.ref import Id +from overture.schema.system.string import NoWhitespaceString +from pydantic import BaseModel + + +@pytest.mark.parametrize( + ("model_class", "model_name", "target_name"), + [ + (Instrument, "Instrument", "InstrumentFamily"), + (Instrument, "Instrument", "HexColor"), + (FeatureWithAddress, "FeatureWithAddress", "Address"), + ], + ids=["enum", "newtype", "sub-model"], +) +def test_model_referencing_type_produces_used_by_entry( + model_class: type, + model_name: str, + target_name: str, +) -> None: + """Model referencing a type produces a 'used by' entry on that type.""" + model_spec = extract_model(model_class, entry_point=model_name) + expand_model_tree(model_spec) + all_specs = collect_all_supplementary_types([model_spec]) + + assert has_name(all_specs, target_name) + + result = compute_reverse_references([model_spec], all_specs) + + entries = lookup_by_name(result, target_name) + assert len(entries) == 1 + assert entries[0].identity.name == model_name + assert entries[0].kind == UsedByKind.MODEL + + +def test_newtype_inheriting_from_newtype_produces_used_by_entry() -> None: + """NewType inheriting constraints from another NewType produces a 'used by' entry.""" + # Id wraps NoWhitespaceString, which is also a NewType + # When we extract Id, its constraints include ConstraintSource(source_ref=NoWhitespaceString, ...) + id_spec = extract_newtype(Id) + nws_spec = extract_newtype(NoWhitespaceString) + + all_specs = { + TypeIdentity(Id, "Id"): id_spec, + TypeIdentity(NoWhitespaceString, "NoWhitespaceString"): nws_spec, + } + + result = compute_reverse_references([], all_specs) + + # NoWhitespaceString should have a used_by entry from Id + entries = lookup_by_name(result, "NoWhitespaceString") + assert len(entries) == 1 + assert entries[0].identity.name == "Id" + assert entries[0].kind == UsedByKind.NEWTYPE + + +def test_union_members_have_used_by_entries() -> None: + """Union members have 'used by' entries pointing to the union feature.""" + # Create a union spec with RoadSegment as a member + union_spec = make_union_spec( + name="TestSegment", + description="Test segment union", + members=[RoadSegment], + entry_point="TestSegment", + ) + + # Extract the member + road_spec = extract_model(RoadSegment) + expand_model_tree(road_spec) + all_specs = {TypeIdentity(RoadSegment, "RoadSegment"): road_spec} + + result = compute_reverse_references([union_spec], all_specs) + + entries = lookup_by_name(result, "RoadSegment") + assert len(entries) == 1 + assert entries[0].identity.name == "TestSegment" + assert entries[0].kind == UsedByKind.MODEL + + +def test_self_references_filtered_out() -> None: + """Self-references are filtered out (handles recursive types).""" + tree_spec = extract_model(TreeNode, entry_point="TreeNode") + expand_model_tree(tree_spec) + + # Manually add TreeNode to all_specs to test self-reference filtering + all_specs = {TypeIdentity(TreeNode, "TreeNode"): tree_spec} + + result = compute_reverse_references([tree_spec], all_specs) + + # TreeNode should not appear in result since it only references itself + with pytest.raises(KeyError): + lookup_by_name(result, "TreeNode") + + +def test_deduplication_same_type_multiple_fields() -> None: + """Deduplication works when same type is referenced via multiple fields.""" + instrument_spec = extract_model(Instrument, entry_point="Instrument") + venue_spec = extract_model(Venue, entry_point="Venue") + expand_model_tree(instrument_spec) + expand_model_tree(venue_spec) + all_specs = collect_all_supplementary_types([instrument_spec, venue_spec]) + + assert has_name(all_specs, "Id") + + result = compute_reverse_references([instrument_spec, venue_spec], all_specs) + + entries = lookup_by_name(result, "Id") + # Both Instrument and Venue reference Id + assert len(entries) == 2 + names = {e.identity.name for e in entries} + assert names == {"Instrument", "Venue"} + # All should be MODELs + assert all(e.kind == UsedByKind.MODEL for e in entries) + + +def test_pydantic_type_has_used_by_from_feature() -> None: + """Pydantic type in all_specs gets used-by entries from features referencing it.""" + model_spec = extract_model(FeatureWithUrl, entry_point="FeatureWithUrl") + expand_model_tree(model_spec) + all_specs = collect_all_supplementary_types([model_spec]) + + assert has_name(all_specs, "HttpUrl") + assert isinstance(lookup_by_name(all_specs, "HttpUrl"), PydanticTypeSpec) + + result = compute_reverse_references([model_spec], all_specs) + + entries = lookup_by_name(result, "HttpUrl") + assert any(e.identity.name == "FeatureWithUrl" for e in entries) + + +def test_sort_tiebreaker_uses_module_for_same_name_referrers() -> None: + """Referrers with the same name sort deterministically by module.""" + + # Two model classes named "Feature" from different modules. + class SharedEnum(PyEnum): + A = "a" + + class FeatureAlpha(BaseModel): + value: SharedEnum + + class FeatureBeta(BaseModel): + value: SharedEnum + + FeatureAlpha.__name__ = "Feature" + FeatureAlpha.__module__ = "alpha.models" + FeatureBeta.__name__ = "Feature" + FeatureBeta.__module__ = "beta.models" + + spec_a = extract_model(FeatureAlpha, entry_point="Feature") + spec_b = extract_model(FeatureBeta, entry_point="Feature") + expand_model_tree(spec_a) + expand_model_tree(spec_b) + + enum_id = TypeIdentity(SharedEnum, "SharedEnum") + all_specs = {enum_id: extract_enum(SharedEnum)} + + result = compute_reverse_references([spec_a, spec_b], all_specs) + + entries = lookup_by_name(result, "SharedEnum") + assert len(entries) == 2 + # Both named "Feature" — module provides the tiebreaker + modules = [e.identity.module for e in entries] + assert modules == ["alpha.models", "beta.models"] + + +def test_sorting_models_before_newtypes() -> None: + """Sorting produces models before NewTypes, alphabetical within groups.""" + # Create a test where the same type (Id) is referenced by: + # - Two models (Instrument and Venue) - both MODEL referrers + # - A NewType wrapper around Id + # Create a synthetic NewType that wraps Id + CustomId = NewType("CustomId", Id) + + instrument_spec = extract_model(Instrument, entry_point="Instrument") + venue_spec = extract_model(Venue, entry_point="Venue") + expand_model_tree(instrument_spec) + expand_model_tree(venue_spec) + all_specs = collect_all_supplementary_types([instrument_spec, venue_spec]) + + # Add the CustomId NewType which references Id + custom_id_spec = extract_newtype(CustomId) + all_specs[TypeIdentity(CustomId, "CustomId")] = custom_id_spec + + result = compute_reverse_references([instrument_spec, venue_spec], all_specs) + + # Id should have entries from both Instrument and Venue (MODELs) and CustomId (NEWTYPE) + entries = lookup_by_name(result, "Id") + assert len(entries) == 3 + + # Check sorting: MODELs first, then NEWTYPE + # Within MODELs: alphabetical (Instrument, Venue) + assert entries[0].kind == UsedByKind.MODEL + assert entries[0].identity.name == "Instrument" + assert entries[1].kind == UsedByKind.MODEL + assert entries[1].identity.name == "Venue" + assert entries[2].kind == UsedByKind.NEWTYPE + assert entries[2].identity.name == "CustomId" diff --git a/packages/overture-schema-codegen/tests/test_specs.py b/packages/overture-schema-codegen/tests/test_specs.py new file mode 100644 index 000000000..0780e2fda --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_specs.py @@ -0,0 +1,305 @@ +"""Tests for spec data structures and predicates.""" + +from typing import Annotated + +import pytest +from codegen_test_support import ( + STR_TYPE, + InstrumentFamily, + SimpleModel, + make_union_spec, +) +from overture.schema.codegen.extraction.model_extraction import extract_model +from overture.schema.codegen.extraction.specs import ( + AnnotatedField, + EnumSpec, + FeatureSpec, + FieldSpec, + ModelSpec, + NewTypeSpec, + TypeIdentity, + is_union_alias, +) +from overture.schema.codegen.extraction.type_analyzer import TypeInfo, TypeKind +from pydantic import BaseModel, Field + + +class TestFeatureSpecProtocol: + """Tests for FeatureSpec protocol compliance.""" + + def test_model_spec_satisfies_feature_spec(self) -> None: + """ModelSpec satisfies the FeatureSpec protocol.""" + + class Simple(BaseModel): + name: str + + spec = extract_model(Simple) + # Protocol compliance check + assert isinstance(spec, FeatureSpec) + # Verify protocol attributes + assert spec.name == "Simple" + assert isinstance(spec.fields, list) + assert spec.source_type is Simple + + +class TestFieldSpec: + """Tests for FieldSpec dataclass.""" + + def test_fieldspec_stores_basic_attributes(self) -> None: + """FieldSpec should store name, type_info, description, is_required.""" + field_spec = FieldSpec( + name="test_field", + type_info=STR_TYPE, + description="A test field", + is_required=True, + ) + + assert field_spec.name == "test_field" + assert field_spec.type_info == STR_TYPE + assert field_spec.description == "A test field" + assert field_spec.is_required is True + + def test_fieldspec_optional_field(self) -> None: + """FieldSpec should handle optional fields.""" + optional_str = TypeInfo( + base_type="str", kind=TypeKind.PRIMITIVE, is_optional=True + ) + + field_spec = FieldSpec( + name="optional_field", + type_info=optional_str, + description=None, + is_required=False, + ) + + assert field_spec.is_required is False + assert field_spec.description is None + + +class TestModelSpec: + """Tests for ModelSpec dataclass.""" + + def test_modelspec_stores_basic_attributes(self) -> None: + """ModelSpec should store name, description, fields.""" + field = FieldSpec( + name="id", + type_info=STR_TYPE, + description="Unique identifier", + is_required=True, + ) + + model_spec = ModelSpec( + name="TestModel", + description="A test model", + fields=[field], + ) + + assert model_spec.name == "TestModel" + assert model_spec.description == "A test model" + assert len(model_spec.fields) == 1 + assert model_spec.fields[0].name == "id" + + def test_entry_point_defaults_to_none(self) -> None: + spec = ModelSpec(name="M", description=None) + assert spec.entry_point is None + + +class TestAnnotatedField: + """Tests for AnnotatedField wrapper.""" + + def test_stores_field_and_variant_sources(self) -> None: + """AnnotatedField pairs a FieldSpec with variant provenance.""" + fs = FieldSpec(name="x", type_info=STR_TYPE, description=None, is_required=True) + af = AnnotatedField(field_spec=fs, variant_sources=("RoadSegment",)) + assert af.field_spec is fs + assert af.variant_sources == ("RoadSegment",) + + def test_none_variant_sources_means_shared(self) -> None: + """variant_sources=None indicates a shared field.""" + fs = FieldSpec(name="x", type_info=STR_TYPE, description=None, is_required=True) + af = AnnotatedField(field_spec=fs, variant_sources=None) + assert af.variant_sources is None + + +class TestFieldSpecModelTree: + """Tests for FieldSpec model and starts_cycle fields.""" + + def test_model_defaults_to_none(self) -> None: + field_spec = FieldSpec( + name="test", type_info=STR_TYPE, description=None, is_required=True + ) + assert field_spec.model is None + + def test_starts_cycle_defaults_to_false(self) -> None: + field_spec = FieldSpec( + name="test", type_info=STR_TYPE, description=None, is_required=True + ) + assert field_spec.starts_cycle is False + + def test_model_can_hold_model_spec(self) -> None: + type_info = TypeInfo(base_type="Address", kind=TypeKind.MODEL) + sub = ModelSpec(name="Address", description=None) + field_spec = FieldSpec( + name="address", + type_info=type_info, + description=None, + is_required=True, + model=sub, + ) + assert field_spec.model is sub + + def test_starts_cycle_can_be_set(self) -> None: + type_info = TypeInfo(base_type="Node", kind=TypeKind.MODEL) + sub = ModelSpec(name="Node", description=None) + field_spec = FieldSpec( + name="parent", + type_info=type_info, + description=None, + is_required=False, + model=sub, + starts_cycle=True, + ) + assert field_spec.starts_cycle is True + assert field_spec.model is sub + + def test_starts_cycle_without_model_is_nonsensical(self) -> None: + """starts_cycle=True with model=None is expressible but invalid. + + expand_model_tree never produces this combination -- starts_cycle + is only set when model points to the cycle-causing ModelSpec. + Document the invariant so violations stand out. + """ + type_info = TypeInfo(base_type="Node", kind=TypeKind.MODEL) + field_spec = FieldSpec( + name="parent", + type_info=type_info, + description=None, + is_required=False, + starts_cycle=True, + ) + # Expressible but meaningless: cycle to nowhere + assert field_spec.starts_cycle is True + assert field_spec.model is None + + +class TestIsUnionAlias: + """Tests for is_union_alias predicate.""" + + def test_annotated_union_of_models_returns_true(self) -> None: + """Annotated[Union of BaseModels] is a union alias.""" + + class A(BaseModel): + x: int + + class B(BaseModel): + y: str + + union_type = Annotated[A | B, Field(description="test")] + assert is_union_alias(union_type) is True + + def test_model_class_returns_false(self) -> None: + """A concrete BaseModel class is not a union alias.""" + + class A(BaseModel): + x: int + + assert is_union_alias(A) is False + + def test_plain_string_returns_false(self) -> None: + """A plain string is not a union alias.""" + assert is_union_alias("not a type") is False + + def test_non_model_union_returns_false(self) -> None: + """A union of non-model types is not a union alias.""" + assert is_union_alias(str | int) is False + + +class TestUnionSpec: + """Tests for UnionSpec data structure.""" + + def test_fields_property_returns_plain_field_specs(self) -> None: + """UnionSpec.fields property returns list[FieldSpec] from annotated_fields.""" + fs1 = FieldSpec( + name="a", type_info=STR_TYPE, description=None, is_required=True + ) + fs2 = FieldSpec( + name="b", type_info=STR_TYPE, description=None, is_required=False + ) + spec = make_union_spec( + annotated_fields=[ + AnnotatedField(field_spec=fs1, variant_sources=None), + AnnotatedField(field_spec=fs2, variant_sources=("X",)), + ], + ) + assert spec.fields == [fs1, fs2] + + +class TestTypeIdentity: + def test_frozen(self) -> None: + ti = TypeIdentity(obj=int, name="int") + with pytest.raises(AttributeError): + ti.obj = str # type: ignore[misc] + + def test_same_obj_equal(self) -> None: + a = TypeIdentity(obj=int, name="int") + b = TypeIdentity(obj=int, name="integer") + assert a == b + + def test_same_obj_same_hash(self) -> None: + a = TypeIdentity(obj=int, name="int") + b = TypeIdentity(obj=int, name="integer") + assert hash(a) == hash(b) + + def test_different_obj_not_equal(self) -> None: + a = TypeIdentity(obj=int, name="int") + b = TypeIdentity(obj=str, name="int") + assert a != b + + def test_works_as_dict_key(self) -> None: + ti = TypeIdentity(obj=int, name="int") + d = {ti: "value"} + assert d[TypeIdentity(obj=int, name="different")] == "value" + + def test_not_equal_to_non_identity(self) -> None: + ti = TypeIdentity(obj=int, name="int") + non_identity_type: object = int + non_identity_str: object = "int" + assert ti != non_identity_type + assert ti != non_identity_str + + +class TestSpecIdentity: + def test_model_spec_identity(self) -> None: + spec = ModelSpec(name="Foo", description=None, source_type=SimpleModel) + ident = spec.identity + assert isinstance(ident, TypeIdentity) + assert ident.obj is SimpleModel + assert ident.name == "Foo" + + def test_enum_spec_identity(self) -> None: + spec = EnumSpec(name="Color", description=None, source_type=InstrumentFamily) + ident = spec.identity + assert ident.obj is InstrumentFamily + assert ident.name == "Color" + + def test_newtype_spec_identity(self) -> None: + from overture.schema.system.primitive import int32 + + spec = NewTypeSpec( + name="int32", description=None, type_info=STR_TYPE, source_type=int32 + ) + ident = spec.identity + assert ident.obj is int32 + assert ident.name == "int32" + + def test_union_spec_identity(self) -> None: + sentinel = object() + spec = make_union_spec("TestUnion", source_annotation=sentinel) + ident = spec.identity + assert ident.obj is sentinel + assert ident.name == "TestUnion" + + def test_model_spec_satisfies_feature_protocol_with_identity(self) -> None: + spec = ModelSpec(name="Foo", description=None, source_type=SimpleModel) + feature: FeatureSpec = spec + assert feature.identity.obj is SimpleModel diff --git a/packages/overture-schema-codegen/tests/test_type_analyzer.py b/packages/overture-schema-codegen/tests/test_type_analyzer.py new file mode 100644 index 000000000..bbf8373fd --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_type_analyzer.py @@ -0,0 +1,676 @@ +"""Tests for type analysis.""" + +from enum import Enum +from typing import Annotated, Any, Literal, NewType, Optional + +import pytest +from annotated_types import Ge +from overture.schema.codegen.extraction.type_analyzer import ( + TypeInfo, + TypeKind, + UnsupportedUnionError, + analyze_type, + single_literal_value, +) +from overture.schema.system.primitive import float64, int32 +from overture.schema.system.ref import Id +from overture.schema.system.string import ( + HexColor, + NoWhitespaceConstraint, + NoWhitespaceString, + SnakeCaseString, +) +from pydantic import BaseModel, Field, Tag +from typing_extensions import Sentinel + + +@pytest.fixture() +def id_type_info() -> TypeInfo: + return analyze_type(Id) + + +@pytest.fixture() +def hex_color_type_info() -> TypeInfo: + return analyze_type(HexColor) + + +class TestAnalyzeTypePrimitives: + """Tests for primitive type analysis.""" + + @pytest.mark.parametrize("annotation", [str, int, float, bool]) + def test_builtin_returns_primitive_type_info(self, annotation: type) -> None: + """Builtin type annotations return PRIMITIVE TypeInfo with matching base_type.""" + result = analyze_type(annotation) + + assert result.base_type == annotation.__name__ + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is False + assert result.is_list is False + + +class TestAnalyzeTypeSentinel: + """Tests for Sentinel type filtering in unions. + + Pydantic uses `typing_extensions.Sentinel` instances (like ``) + in union types for optional fields. The type analyzer filters these out + alongside `None` when processing unions. + """ + + @pytest.fixture() + def missing_sentinel(self) -> object: + return Sentinel("MISSING") + + def test_sentinel_filtered_from_union(self, missing_sentinel: object) -> None: + """Sentinel is filtered out, leaving the concrete type.""" + result = analyze_type(str | missing_sentinel) # type: ignore[arg-type] + + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is False + + def test_sentinel_with_none_sets_optional(self, missing_sentinel: object) -> None: + """Sentinel + None both filtered; None triggers is_optional.""" + result = analyze_type(str | missing_sentinel | None) # type: ignore[arg-type] + + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is True + + +class TestAnalyzeTypeOptional: + """Tests for Optional type analysis.""" + + def test_pipe_none_sets_is_optional(self) -> None: + """str | None returns TypeInfo with is_optional=True.""" + result = analyze_type(str | None) + + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is True + assert result.is_list is False + + def test_type_with_literal_and_none(self) -> None: + """str | Literal[""] | None filters Literal and marks optional.""" + result = analyze_type(str | Literal[""] | None) + + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is True + + def test_typing_optional_sets_is_optional(self) -> None: + """Optional[str] from typing module returns TypeInfo with is_optional=True.""" + result = analyze_type(Optional[str]) # noqa: UP045 + + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is True + assert result.is_list is False + + +class TestAnalyzeTypeUnionLiteralFiltering: + """Tests for filtering Literal arms out of unions.""" + + def test_type_with_literal_alternative(self) -> None: + """str | Literal[""] filters out the Literal and analyzes the concrete type.""" + result = analyze_type(str | Literal[""]) + + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is False + + +class TestAnalyzeTypeList: + """Tests for list type analysis.""" + + def test_list_str_sets_is_list(self) -> None: + """list[str] returns TypeInfo with is_list=True.""" + result = analyze_type(list[str]) + + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is False + assert result.is_list is True + + def test_nested_list_sets_depth_2(self) -> None: + """list[list[str]] records two levels of nesting.""" + result = analyze_type(list[list[str]]) + + assert result.list_depth == 2 + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + + +class TestAnalyzeTypeComposite: + """Tests for composite/nested type analysis.""" + + def test_list_optional_str(self) -> None: + """list[str | None] sets both is_list and is_optional.""" + result = analyze_type(list[str | None]) + + assert result.base_type == "str" + assert result.is_list is True + assert result.is_optional is True + + def test_optional_list_str(self) -> None: + """list[str] | None sets both is_list and is_optional.""" + result = analyze_type(list[str] | None) + + assert result.base_type == "str" + assert result.is_list is True + assert result.is_optional is True + + def test_annotated_optional_str(self) -> None: + """Annotated[str | None, ...] extracts constraints and sets is_optional.""" + result = analyze_type(Annotated[str | None, "description"]) + + assert result.base_type == "str" + assert result.is_optional is True + assert len(result.constraints) == 1 + assert result.constraints[0].source_ref is None + assert result.constraints[0].constraint == "description" + + def test_annotated_list_str(self) -> None: + """Annotated[list[str], ...] extracts constraints and sets is_list.""" + result = analyze_type(Annotated[list[str], Field(min_length=1)]) + + assert result.base_type == "str" + assert result.is_list is True + assert len(result.constraints) == 1 + assert result.constraints[0].source_ref is None + + +class TestAnalyzeTypeAnnotated: + """Tests for Annotated type analysis.""" + + def test_annotated_int_with_ge_extracts_constraint(self) -> None: + """Annotated[int, Field(ge=0)] unpacks FieldInfo to extract Ge constraint.""" + result = analyze_type(Annotated[int, Field(ge=0)]) + + assert result.base_type == "int" + assert result.kind == TypeKind.PRIMITIVE + assert len(result.constraints) == 1 + cs = result.constraints[0] + assert cs.source_ref is None + assert isinstance(cs.constraint, Ge) + assert cs.constraint.ge == 0 + + def test_annotated_without_constraints(self) -> None: + """Annotated[str, 'description'] extracts non-Field metadata.""" + result = analyze_type(Annotated[str, "just a description"]) + + assert result.base_type == "str" + assert len(result.constraints) == 1 + assert result.constraints[0].source_ref is None + assert result.constraints[0].constraint == "just a description" + + +class TestAnalyzeTypeLiteral: + """Tests for Literal type analysis.""" + + def test_literal_string_extracts_values(self) -> None: + """Literal["active"] stores the value in literal_values tuple.""" + result = analyze_type(Literal["active"]) + + assert result.kind == TypeKind.LITERAL + assert result.literal_values == ("active",) + + def test_literal_int_extracts_values(self) -> None: + """Literal[42] stores the value in literal_values tuple.""" + result = analyze_type(Literal[42]) + + assert result.kind == TypeKind.LITERAL + assert result.literal_values == (42,) + + def test_multi_value_literal_stores_all_args(self) -> None: + """Literal["a", "b"] stores all args in literal_values tuple.""" + result = analyze_type(Literal["a", "b"]) + + assert result.kind == TypeKind.LITERAL + assert result.literal_values == ("a", "b") + + def test_optional_literal_extracts_values(self) -> None: + """Optional[Literal["x"]] unwraps to Literal with is_optional set.""" + result = analyze_type(Literal["x"] | None) + + assert result.kind == TypeKind.LITERAL + assert result.literal_values == ("x",) + assert result.is_optional is True + + +class TestAnalyzeTypeEnum: + """Tests for Enum type analysis.""" + + def test_enum_subclass_returns_kind_enum(self) -> None: + """Enum subclass returns TypeInfo with kind=ENUM.""" + + class Color(Enum): + RED = "red" + GREEN = "green" + + result = analyze_type(Color) + + assert result.base_type == "Color" + assert result.kind == TypeKind.ENUM + + +class TestAnalyzeTypeModel: + """Tests for BaseModel type analysis.""" + + def test_basemodel_subclass_returns_kind_model(self) -> None: + """BaseModel subclass returns TypeInfo with kind=MODEL.""" + + class Person(BaseModel): + name: str + + result = analyze_type(Person) + + assert result.base_type == "Person" + assert result.kind == TypeKind.MODEL + + +class TestAnalyzeTypeNewType: + """Tests for NewType primitive analysis.""" + + def test_int32_returns_newtype_name(self) -> None: + """int32 NewType returns TypeInfo with base_type='int32'.""" + result = analyze_type(int32) + + assert result.base_type == "int32" + assert result.kind == TypeKind.PRIMITIVE + + def test_float64_returns_newtype_name(self) -> None: + """float64 NewType returns TypeInfo with base_type='float64'.""" + result = analyze_type(float64) + + assert result.base_type == "float64" + assert result.kind == TypeKind.PRIMITIVE + + def test_optional_int32(self) -> None: + """int32 | None sets is_optional and preserves base_type.""" + result = analyze_type(int32 | None) + + assert result.base_type == "int32" + assert result.is_optional is True + + +class TestNewtypeName: + """Tests for outermost NewType name tracking.""" + + def test_single_layer_newtype(self) -> None: + """Single NewType like int32 sets newtype_name to its name.""" + result = analyze_type(int32) + + assert result.newtype_name == "int32" + assert result.base_type == "int32" + + def test_nested_newtype_preserves_outermost(self, id_type_info: TypeInfo) -> None: + """Nested NewType chain uses outermost name for newtype_name.""" + assert id_type_info.newtype_name == "Id" + assert id_type_info.base_type == "NoWhitespaceString" + + def test_plain_type_has_no_newtype_name(self) -> None: + """Plain types without NewType wrapping have newtype_name=None.""" + result = analyze_type(str) + + assert result.newtype_name is None + + def test_newtype_ref_set_for_newtype(self, id_type_info: TypeInfo) -> None: + """newtype_ref points to the outermost NewType callable.""" + assert id_type_info.newtype_ref is Id + + def test_newtype_ref_none_for_plain_type(self) -> None: + """Plain types have newtype_ref=None.""" + result = analyze_type(str) + + assert result.newtype_ref is None + + +class TestNewtypeWrappingList: + """Tests for NewType wrapping a list type.""" + + def test_newtype_wrapping_list(self) -> None: + """NewType wrapping a list sets is_list and preserves newtype_name.""" + TestSources = NewType("TestSources", Annotated[list[str], Field(min_length=1)]) + result = analyze_type(TestSources) + + assert result.is_list is True + assert result.newtype_name == "TestSources" + + def test_scalar_newtype_is_not_list(self) -> None: + """Scalar NewType like int32 has is_list=False.""" + result = analyze_type(int32) + + assert result.is_list is False + + def test_plain_list_has_no_newtype_name(self) -> None: + """Plain list[str] without NewType has newtype_name=None.""" + result = analyze_type(list[str]) + + assert result.newtype_name is None + assert result.is_list is True + + def test_newtype_wrapping_list_of_models(self) -> None: + """list[NewType wrapping list[Model]] records depth 2, outer depth 1.""" + + class _Item(BaseModel): + name: str + + Inner = NewType("Inner", Annotated[list[_Item], Field(min_length=1)]) + result = analyze_type(list[Inner]) + + assert result.list_depth == 2 + assert result.newtype_outer_list_depth == 1 + assert result.base_type == "Inner" + assert result.kind == TypeKind.MODEL + assert result.source_type is _Item + + +class TestNewtypeOuterListDepth: + """Tests for newtype_outer_list_depth tracking.""" + + def test_list_of_scalar_newtype_has_outer_depth(self) -> None: + """list[ScalarNewType] records the list layer as outside the NewType.""" + ScalarNT = NewType("ScalarNT", str) + result = analyze_type(list[ScalarNT]) + + assert result.newtype_outer_list_depth == 1 + assert result.list_depth == 1 + + def test_newtype_wrapping_list_has_zero_outer_depth(self) -> None: + """NewType wrapping list[X] records no list layers outside the NewType.""" + ListNT = NewType("ListNT", Annotated[list[str], Field(min_length=1)]) + result = analyze_type(ListNT) + + assert result.newtype_outer_list_depth == 0 + assert result.list_depth == 1 + + @pytest.mark.parametrize( + "annotation", + [ + list[str], # list without NewType + int32, # scalar NewType + str, # plain type + ], + ids=["plain_list", "scalar_newtype", "plain_type"], + ) + def test_zero_outer_depth_without_newtype_boundary( + self, annotation: object + ) -> None: + """Types without a NewType inside a list have newtype_outer_list_depth=0.""" + result = analyze_type(annotation) + + assert result.newtype_outer_list_depth == 0 + + def test_nested_list_of_scalar_newtype_has_outer_depth_2(self) -> None: + """list[list[ScalarNewType]] records two outer list layers.""" + ScalarNT = NewType("ScalarNT", str) + result = analyze_type(list[list[ScalarNT]]) + + assert result.newtype_outer_list_depth == 2 + assert result.list_depth == 2 + + +class TestConstraintProvenance: + """Tests for flattened constraints with provenance tracking.""" + + def test_nested_newtype_flattens_constraints(self, id_type_info: TypeInfo) -> None: + """Id -> NoWhitespaceString -> str flattens all constraints with sources.""" + source_names = { + cs.source_name for cs in id_type_info.constraints if cs.source_name + } + assert "Id" in source_names + assert "NoWhitespaceString" in source_names + + def test_nested_newtype_includes_inner_constraints( + self, id_type_info: TypeInfo + ) -> None: + """Inner NewType constraints are collected with provenance.""" + nws_constraints = [ + cs for cs in id_type_info.constraints if cs.source_ref is NoWhitespaceString + ] + constraint_types = {type(cs.constraint) for cs in nws_constraints} + assert NoWhitespaceConstraint in constraint_types + + def test_direct_annotation_has_none_source(self) -> None: + """Constraints from direct Annotated (no NewType) have source_ref=None.""" + result = analyze_type(Annotated[str, "direct"]) + + assert len(result.constraints) == 1 + assert result.constraints[0].source_ref is None + assert result.constraints[0].constraint == "direct" + + def test_single_newtype_constraints_attributed( + self, hex_color_type_info: TypeInfo + ) -> None: + """HexColor constraints are attributed to the HexColor callable.""" + assert all(cs.source_ref is HexColor for cs in hex_color_type_info.constraints) + assert len(hex_color_type_info.constraints) > 0 + + def test_source_ref_is_newtype_callable( + self, hex_color_type_info: TypeInfo + ) -> None: + """source_ref is the actual NewType callable, not a string.""" + cs = hex_color_type_info.constraints[0] + assert cs.source_ref is HexColor + + def test_constraint_preserves_original_object( + self, hex_color_type_info: TypeInfo + ) -> None: + """ConstraintSource.constraint holds the original constraint object.""" + hcc = next( + cs + for cs in hex_color_type_info.constraints + if type(cs.constraint).__name__ == "HexColorConstraint" + ) + assert hcc.constraint.__class__.__name__ == "HexColorConstraint" + + +class TestTypeInfoDescription: + """Tests for TypeInfo.description from Field(description=...) metadata.""" + + def test_newtype_with_field_description( + self, hex_color_type_info: TypeInfo + ) -> None: + """Should extract Field description from HexColor.""" + assert hex_color_type_info.description is not None + assert "color" in hex_color_type_info.description.lower() + + def test_newtype_without_field_description(self) -> None: + """Should have None description for types without Field(description=...).""" + result = analyze_type(int) + assert result.description is None + + def test_plain_annotated_with_field_description(self) -> None: + """Should extract description from Annotated with Field(description=...).""" + MyType = Annotated[str, Field(description="A test description")] + result = analyze_type(MyType) + assert result.description == "A test description" + + def test_outermost_description_wins(self, id_type_info: TypeInfo) -> None: + """Outermost FieldInfo.description takes precedence in nested NewTypes.""" + assert id_type_info.description is not None + assert "unique identifier" in id_type_info.description.lower() + + def test_newtype_without_field_has_none_description(self) -> None: + """NewType with constraints but no Field(description=...) has None.""" + result = analyze_type(SnakeCaseString) + assert result.description is None + + +class TestAnalyzeTypeAny: + """Tests for typing.Any analysis.""" + + def test_any_returns_primitive(self) -> None: + """Any annotation returns TypeInfo with base_type='Any' and kind=PRIMITIVE.""" + result = analyze_type(Any) + + assert result.base_type == "Any" + assert result.kind == TypeKind.PRIMITIVE + + def test_dict_with_any_value(self) -> None: + """dict[str, Any] analyzes without error.""" + result = analyze_type(dict[str, Any]) + + assert result.is_dict is True + assert result.dict_value_type is not None + assert result.dict_value_type.base_type == "Any" + + +class TestAnalyzeTypeDict: + """Tests for dict type analysis.""" + + @pytest.fixture() + def dict_str_int(self) -> TypeInfo: + return analyze_type(dict[str, int]) + + def test_dict_str_int_sets_is_dict(self, dict_str_int: TypeInfo) -> None: + """dict[str, int] returns TypeInfo with is_dict=True.""" + assert dict_str_int.is_dict is True + assert dict_str_int.is_optional is False + assert dict_str_int.is_list is False + + def test_dict_key_type_analyzed(self, dict_str_int: TypeInfo) -> None: + """dict[str, int] has dict_key_type describing the key.""" + assert dict_str_int.dict_key_type is not None + assert dict_str_int.dict_key_type.base_type == "str" + assert dict_str_int.dict_key_type.kind == TypeKind.PRIMITIVE + + def test_dict_value_type_analyzed(self, dict_str_int: TypeInfo) -> None: + """dict[str, int] has dict_value_type describing the value.""" + assert dict_str_int.dict_value_type is not None + assert dict_str_int.dict_value_type.base_type == "int" + assert dict_str_int.dict_value_type.kind == TypeKind.PRIMITIVE + + def test_optional_dict(self) -> None: + """dict[str, str] | None sets is_dict and is_optional.""" + result = analyze_type(dict[str, str] | None) + + assert result.is_dict is True + assert result.is_optional is True + + def test_newtype_wrapping_dict(self) -> None: + """NewType wrapping dict preserves newtype_name and sets is_dict.""" + TestMapping = NewType("TestMapping", dict[str, str]) + result = analyze_type(TestMapping) + + assert result.is_dict is True + assert result.newtype_name == "TestMapping" + + def test_bare_dict_raises_type_error(self) -> None: + """Bare dict without type arguments raises TypeError.""" + with pytest.raises(TypeError, match="Bare dict"): + analyze_type(dict) + + +class TestAnalyzeTypeErrors: + """Tests for error handling.""" + + def test_unsupported_annotation_raises_type_error(self) -> None: + """Unsupported annotation type raises TypeError.""" + with pytest.raises(TypeError, match="Unsupported annotation type"): + analyze_type("not a type") + + def test_multi_type_union_raises_clear_error(self) -> None: + """Multi-type unions like str | int raise UnsupportedUnionError.""" + with pytest.raises( + UnsupportedUnionError, match="Multi-type unions not supported" + ): + analyze_type(str | int) + + def test_multi_type_union_with_none_raises_clear_error(self) -> None: + """Multi-type optional unions like str | int | None raise UnsupportedUnionError.""" + with pytest.raises( + UnsupportedUnionError, match="Multi-type unions not supported" + ): + analyze_type(str | int | None) + + def test_bare_list_raises_type_error(self) -> None: + """Bare list without type argument raises TypeError.""" + with pytest.raises(TypeError, match="Bare list without type argument"): + analyze_type(list) + + +class UnionModelA(BaseModel): + x: int + + +class UnionModelB(BaseModel): + y: str + + +class TestAnalyzeTypeUnion: + """Tests for discriminated union analysis.""" + + def test_all_model_union_returns_union_kind(self) -> None: + """Annotated[Union of BaseModel subclasses] returns TypeKind.UNION.""" + union_type = Annotated[UnionModelA | UnionModelB, Field(description="test")] + result = analyze_type(union_type) + + assert result.kind == TypeKind.UNION + assert result.union_members is not None + assert len(result.union_members) == 2 + assert UnionModelA in result.union_members + assert UnionModelB in result.union_members + + def test_annotated_wrapped_members_unwrapped(self) -> None: + """Union members wrapped in Annotated[X, Tag(...)] are unwrapped.""" + union_type = Annotated[ + Annotated[UnionModelA, Tag("a")] | Annotated[UnionModelB, Tag("b")], + Field(description="disc"), + ] + result = analyze_type(union_type) + + assert result.kind == TypeKind.UNION + assert result.union_members is not None + assert len(result.union_members) == 2 + assert UnionModelA in result.union_members + assert UnionModelB in result.union_members + + def test_mixed_model_nonmodel_union_still_raises(self) -> None: + """Union of model + non-model types still raises UnsupportedUnionError.""" + with pytest.raises(UnsupportedUnionError): + analyze_type(UnionModelA | str) + + def test_non_model_multi_union_still_raises(self) -> None: + """Multi-type union of non-models still raises UnsupportedUnionError.""" + with pytest.raises(UnsupportedUnionError): + analyze_type(str | int) + + def test_union_base_type_is_first_member_name(self) -> None: + """UNION TypeInfo base_type is the first member's class name.""" + result = analyze_type( + Annotated[UnionModelA | UnionModelB, Field(description="test")] + ) + assert result.base_type == "UnionModelA" + + def test_optional_union_sets_is_optional(self) -> None: + """Union with None among model members sets is_optional.""" + result = analyze_type( + Annotated[UnionModelA | UnionModelB, Field(description="test")] | None + ) + assert result.kind == TypeKind.UNION + assert result.is_optional is True + + +class TestSingleLiteralValue: + """Tests for single_literal_value convenience accessor.""" + + def test_single_value_literal(self) -> None: + """Literal["x"] returns the literal value.""" + assert single_literal_value(Literal["x"]) == "x" + + def test_single_int_literal(self) -> None: + """Literal[42] returns the integer value.""" + assert single_literal_value(Literal[42]) == 42 + + def test_multi_value_literal_returns_none(self) -> None: + """Multi-value Literal returns None (no single default).""" + assert single_literal_value(Literal["a", "b"]) is None + + def test_non_literal_returns_none(self) -> None: + """Non-Literal types return None.""" + assert single_literal_value(str) is None + + def test_unsupported_type_returns_none(self) -> None: + """Types that raise during analysis return None.""" + assert single_literal_value("not a type") is None diff --git a/packages/overture-schema-codegen/tests/test_type_collection.py b/packages/overture-schema-codegen/tests/test_type_collection.py new file mode 100644 index 000000000..154b39e2c --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_type_collection.py @@ -0,0 +1,128 @@ +"""Tests for type collection module.""" + +from codegen_test_support import ( + FeatureWithAddress, + FeatureWithSources, + FeatureWithUrl, + Instrument, + TestSegmentWithSubModel, + has_name, + lookup_by_name, +) +from overture.schema.codegen.extraction.model_extraction import ( + expand_model_tree, + extract_model, +) +from overture.schema.codegen.extraction.specs import ( + EnumSpec, + ModelSpec, + NewTypeSpec, + PydanticTypeSpec, + SupplementarySpec, + TypeIdentity, +) +from overture.schema.codegen.layout.type_collection import ( + collect_all_supplementary_types, +) +from pydantic import BaseModel + + +def _make_feature_with_sub_model(sub_model: type) -> type[BaseModel]: + """Build a feature class whose only field references sub_model.""" + return type( + f"FeatureWith{sub_model.__name__}", + (BaseModel,), + {"__annotations__": {"sub": sub_model}, "sub": None}, + ) + + +def _expanded_supplementary(model_class: type) -> dict[TypeIdentity, SupplementarySpec]: + spec = extract_model(model_class) + expand_model_tree(spec) + return collect_all_supplementary_types([spec]) + + +class TestCollectAllSupplementarySpecs: + """Tests for collect_all_supplementary_types returning specs from expanded trees.""" + + def test_returns_enum_specs(self) -> None: + result = _expanded_supplementary(Instrument) + + assert has_name(result, "InstrumentFamily") + assert isinstance(lookup_by_name(result, "InstrumentFamily"), EnumSpec) + + def test_returns_newtype_specs(self) -> None: + result = _expanded_supplementary(Instrument) + + assert has_name(result, "HexColor") + assert isinstance(lookup_by_name(result, "HexColor"), NewTypeSpec) + + def test_returns_model_specs_from_expanded_tree(self) -> None: + result = _expanded_supplementary(FeatureWithAddress) + + assert has_name(result, "Address") + assert isinstance(lookup_by_name(result, "Address"), ModelSpec) + + def test_collects_transitive_types(self) -> None: + """Types referenced by sub-models are also collected.""" + result = _expanded_supplementary(FeatureWithSources) + + # Sources is a semantic NewType; SourceItem is a sub-model + # referenced transitively via the expanded tree + assert has_name(result, "Sources") + assert has_name(result, "SourceItem") + + def test_same_name_different_types_both_collected(self) -> None: + """Two types with the same __name__ from different modules are both collected.""" + ModelA = type("Address", (BaseModel,), {"__annotations__": {"x": str}}) + ModelB = type("Address", (BaseModel,), {"__annotations__": {"y": int}}) + + outer_a = extract_model(_make_feature_with_sub_model(ModelA)) + expand_model_tree(outer_a) + + outer_b = extract_model(_make_feature_with_sub_model(ModelB)) + expand_model_tree(outer_b) + + result = collect_all_supplementary_types([outer_a, outer_b]) + + address_entries = [ + spec for tid, spec in result.items() if tid.name == "Address" + ] + assert len(address_entries) == 2 + + +class TestCollectUnionMemberSubModels: + """Tests for union members with nested sub-model fields.""" + + def test_union_member_with_sub_model_collects_sub_model(self) -> None: + """Sub-models inside union members are collected without RuntimeError.""" + + class FeatureWithUnionSubModel(BaseModel): + segment: TestSegmentWithSubModel + + result = _expanded_supplementary(FeatureWithUnionSubModel) + + assert has_name(result, "ContactInfo") + assert isinstance(lookup_by_name(result, "ContactInfo"), ModelSpec) + + +class TestCollectPydanticTypes: + """Tests for Pydantic built-in type collection.""" + + def test_collects_pydantic_type_from_field(self) -> None: + """Pydantic types referenced in fields are collected.""" + result = _expanded_supplementary(FeatureWithUrl) + assert has_name(result, "HttpUrl") + assert isinstance(lookup_by_name(result, "HttpUrl"), PydanticTypeSpec) + + def test_collects_pydantic_type_inside_list(self) -> None: + """Pydantic types wrapped in list[] are collected.""" + result = _expanded_supplementary(FeatureWithUrl) + assert has_name(result, "EmailStr") + assert isinstance(lookup_by_name(result, "EmailStr"), PydanticTypeSpec) + + def test_does_not_collect_builtin_primitives(self) -> None: + """Plain primitives like str are not collected as PydanticTypeSpec.""" + result = _expanded_supplementary(FeatureWithUrl) + assert not has_name(result, "str") + assert not has_name(result, "int") diff --git a/packages/overture-schema-codegen/tests/test_type_placement.py b/packages/overture-schema-codegen/tests/test_type_placement.py new file mode 100644 index 000000000..506a2afec --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_type_placement.py @@ -0,0 +1,236 @@ +"""Tests for type placement module.""" + +from pathlib import PurePosixPath + +import overture.schema.system.primitive as _system_primitive +from codegen_test_support import ( + EMAIL_STR_SPEC, + HTTP_URL_SPEC, + STR_TYPE, + flat_specs_from_discovery, + lookup_by_name, + make_union_spec, +) +from overture.schema.codegen.extraction.model_extraction import expand_model_tree +from overture.schema.codegen.extraction.specs import ( + AnnotatedField, + FeatureSpec, + FieldSpec, + ModelSpec, + SupplementarySpec, + TypeIdentity, +) +from overture.schema.codegen.layout.type_collection import ( + collect_all_supplementary_types, +) +from overture.schema.codegen.markdown.link_computation import LinkContext, relative_link +from overture.schema.codegen.markdown.path_assignment import ( + GEOMETRY_PAGE, + PRIMITIVES_PAGE, + build_placement_registry, +) +from overture.schema.codegen.markdown.pipeline import ( + partition_numeric_and_geometry_types, +) +from pydantic import BaseModel + +_NUMERIC_NAMES, _GEOMETRY_NAMES = partition_numeric_and_geometry_types( + _system_primitive +) + +_SCHEMA_ROOT = "overture.schema" + + +def _build_registry( + feature_specs: list[ModelSpec], +) -> tuple[dict[TypeIdentity, PurePosixPath], dict[TypeIdentity, SupplementarySpec]]: + """Build placement registry with standard aggregate names.""" + cache: dict[type, ModelSpec] = {} + for spec in feature_specs: + expand_model_tree(spec, cache) + all_specs = collect_all_supplementary_types(feature_specs) + registry = build_placement_registry( + feature_specs, all_specs, _NUMERIC_NAMES, _GEOMETRY_NAMES, _SCHEMA_ROOT + ) + return registry, all_specs + + +class TestRelativeLink: + """Test relative path computation between pages.""" + + def test_same_directory(self) -> None: + source = PurePosixPath("buildings/building.md") + target = PurePosixPath("buildings/facade_material.md") + assert relative_link(source, target) == "facade_material.md" + + def test_sibling_directory(self) -> None: + source = PurePosixPath("buildings/building.md") + target = PurePosixPath("core/names/names.md") + assert relative_link(source, target) == "../core/names/names.md" + + def test_within_core(self) -> None: + source = PurePosixPath("core/names/names.md") + target = PurePosixPath("core/sources/sources.md") + assert relative_link(source, target) == "../sources/sources.md" + + def test_to_aggregate_page(self) -> None: + source = PurePosixPath("core/names/names.md") + target = PurePosixPath("system/primitive/primitives.md") + assert relative_link(source, target) == "../../system/primitive/primitives.md" + + +class TestBuildPlacementRegistry: + """Test the full placement registry builder with module-mirrored paths.""" + + def test_features_at_theme_level(self) -> None: + """Features land directly in their theme directory.""" + specs = flat_specs_from_discovery("buildings") + registry, _ = _build_registry(specs) + + assert lookup_by_name(registry, "Building") == PurePosixPath( + "buildings/building.md" + ) + assert lookup_by_name(registry, "BuildingPart") == PurePosixPath( + "buildings/building_part.md" + ) + + def test_shared_types_mirror_source_modules(self) -> None: + """Core/system types land in directories matching their module path.""" + specs = flat_specs_from_discovery("buildings") + registry, _ = _build_registry(specs) + + names = {tid.name for tid in registry} + if "Names" in names: + assert str(lookup_by_name(registry, "Names")).startswith("core/") + + def test_no_duplicate_paths(self) -> None: + """No two individual types share an output path.""" + specs = flat_specs_from_discovery() + registry, _ = _build_registry(specs) + + aggregate_pages = { + PurePosixPath("system/primitive/primitives.md"), + PurePosixPath("system/primitive/geometry.md"), + } + individual = [p for p in registry.values() if p not in aggregate_pages] + assert len(individual) == len(set(individual)), ( + "Duplicate output paths detected" + ) + + def test_aggregate_pages_at_system_primitive(self) -> None: + """Primitive and geometry aggregate pages under system/primitive/.""" + assert PRIMITIVES_PAGE == PurePosixPath("system/primitive/primitives.md") + assert GEOMETRY_PAGE == PurePosixPath("system/primitive/geometry.md") + + def test_supplementary_types_nested_under_types(self) -> None: + """Supplementary types in a feature directory go under types/.""" + specs = flat_specs_from_discovery("buildings") + registry, _ = _build_registry(specs) + + # BuildingClass is a supplementary type from the buildings module + assert lookup_by_name(registry, "BuildingClass") == PurePosixPath( + "buildings/types/building_class.md" + ) + + def test_shared_types_not_nested(self) -> None: + """Core/system supplementary types stay at their module-mirrored path.""" + specs = flat_specs_from_discovery("buildings") + registry, _ = _build_registry(specs) + + # Names is from overture.schema.core -- no features there, no nesting + names = {tid.name for tid in registry} + if "Names" in names: + path = str(lookup_by_name(registry, "Names")) + assert path.startswith("core/") + assert "/types/" not in path + + +class TestPlacementWithUnionSpec: + """Tests for placement registry with UnionSpec.""" + + def test_union_spec_gets_placement(self) -> None: + """UnionSpec is placed alongside ModelSpec in the registry.""" + + class Base(BaseModel): + name: str + + class A(Base): + x: int + + union_spec = make_union_spec( + annotated_fields=[ + AnnotatedField( + field_spec=FieldSpec( + name="name", + type_info=STR_TYPE, + description=None, + is_required=True, + ), + variant_sources=None, + ), + ], + members=[A], + common_base=Base, + entry_point="test.package:TestUnion", + ) + + feature_specs: list[FeatureSpec] = [union_spec] + all_specs = collect_all_supplementary_types(feature_specs) + registry = build_placement_registry( + feature_specs, all_specs, [], [], "test.package" + ) + assert any(tid.name == "TestUnion" for tid in registry) + + +class TestLinkContextWithTypeIdentity: + """Tests for LinkContext using TypeIdentity keys.""" + + def test_same_name_different_identity_separate_paths(self) -> None: + """Two types with the same name but different objects resolve to different paths.""" + obj_a = type("Address", (), {}) + obj_b = type("Address", (), {}) + registry = { + TypeIdentity(obj_a, "Address"): PurePosixPath("places/types/address.md"), + TypeIdentity(obj_b, "Address"): PurePosixPath("addresses/address.md"), + } + ctx = LinkContext(page_path=PurePosixPath("places/place.md"), registry=registry) + assert ctx.resolve_link(TypeIdentity(obj_a, "Address")) == "types/address.md" + assert ( + ctx.resolve_link(TypeIdentity(obj_b, "Address")) + == "../addresses/address.md" + ) + + +class TestPydanticTypePlacement: + """Tests for placement of Pydantic built-in types.""" + + def test_pydantic_type_placed_under_module_dir(self) -> None: + registry = build_placement_registry( + feature_specs=[], + all_specs={HTTP_URL_SPEC.identity: HTTP_URL_SPEC}, + numeric_names=[], + geometry_names=[], + schema_root="overture.schema", + ) + assert lookup_by_name(registry, "HttpUrl") == PurePosixPath( + "pydantic/networks/http_url.md" + ) + + def test_multiple_pydantic_types_same_module(self) -> None: + specs: dict[TypeIdentity, SupplementarySpec] = { + HTTP_URL_SPEC.identity: HTTP_URL_SPEC, + EMAIL_STR_SPEC.identity: EMAIL_STR_SPEC, + } + registry = build_placement_registry( + feature_specs=[], + all_specs=specs, + numeric_names=[], + geometry_names=[], + schema_root="overture.schema", + ) + assert lookup_by_name(registry, "HttpUrl") == PurePosixPath( + "pydantic/networks/http_url.md" + ) + assert lookup_by_name(registry, "EmailStr") == PurePosixPath( + "pydantic/networks/email_str.md" + ) diff --git a/packages/overture-schema-codegen/tests/test_type_registry.py b/packages/overture-schema-codegen/tests/test_type_registry.py new file mode 100644 index 000000000..b9d02d2ac --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_type_registry.py @@ -0,0 +1,143 @@ +"""Tests for type registry.""" + +import pytest +from overture.schema.codegen.extraction.type_analyzer import TypeInfo, TypeKind +from overture.schema.codegen.extraction.type_registry import ( + PRIMITIVE_TYPES, + TypeMapping, + get_type_mapping, + resolve_type_name, +) + + +class TestTypeMapping: + """Tests for TypeMapping dataclass.""" + + def test_typemapping_accepts_markdown(self) -> None: + """TypeMapping should construct with markdown field.""" + mapping = TypeMapping(markdown="int32") + + assert mapping.markdown == "int32" + + def test_for_target_returns_markdown(self) -> None: + """for_target should return markdown representation for markdown target.""" + mapping = TypeMapping(markdown="int32") + + assert mapping.for_target("markdown") == "int32" + + def test_for_target_rejects_unknown_target(self) -> None: + """for_target should raise ValueError for unknown targets.""" + mapping = TypeMapping(markdown="int32") + + with pytest.raises(ValueError, match="Unknown target 'scala'"): + mapping.for_target("scala") + + +class TestPrimitiveTypes: + """Tests for PRIMITIVE_TYPES registry.""" + + def test_registry_contains_expected_types(self) -> None: + """Registry should contain all expected primitive types.""" + expected_types = { + "int8", + "int16", + "int32", + "int64", + "uint8", + "uint16", + "uint32", + "float32", + "float64", + "str", + "bool", + "int", + "float", + "Geometry", + "BBox", + } + + assert set(PRIMITIVE_TYPES.keys()) == expected_types + + def test_bbox_mapping(self) -> None: + """BBox should map to bbox.""" + bbox = PRIMITIVE_TYPES["BBox"] + + assert bbox.markdown == "bbox" + + +class TestGetTypeMapping: + """Tests for get_type_mapping function.""" + + def test_returns_mapping_for_known_type(self) -> None: + """Should return TypeMapping for known primitive type.""" + result = get_type_mapping("int32") + + assert result is not None + assert result.markdown == "int32" + + def test_returns_none_for_unknown_type(self) -> None: + """Should return None for unknown type names.""" + result = get_type_mapping("unknown_type") + + assert result is None + + def test_returns_mapping_for_builtin_int(self) -> None: + """Should map Python int to int64.""" + result = get_type_mapping("int") + + assert result is not None + assert result.markdown == "int64" + + def test_returns_mapping_for_builtin_float(self) -> None: + """Should map Python float to float64.""" + result = get_type_mapping("float") + + assert result is not None + assert result.markdown == "float64" + + +class TestResolveTypeNameNewTypeFallback: + """Tests for resolve_type_name with unregistered NewTypes.""" + + def test_unregistered_newtype_falls_back_to_source_type(self) -> None: + """Unregistered NewType resolves to source_type name.""" + ti = TypeInfo( + base_type="Sources", + kind=TypeKind.MODEL, + newtype_name="Sources", + source_type=type("SourceItem", (), {}), + ) + result = resolve_type_name(ti, "markdown") + + assert result == "SourceItem" + + def test_registered_newtype_unaffected(self) -> None: + """Registered NewType (int32) still resolves through the registry.""" + ti = TypeInfo( + base_type="int32", + kind=TypeKind.PRIMITIVE, + newtype_name="int32", + source_type=int, + ) + result = resolve_type_name(ti, "markdown") + + assert result == "int32" + + +class TestResolveTypeName: + """Tests for resolve_type_name with list/optional flags.""" + + def _make_type_info(self, **kwargs: object) -> TypeInfo: + defaults = {"base_type": "str", "kind": TypeKind.PRIMITIVE} + defaults.update(kwargs) + return TypeInfo(**defaults) # type: ignore[arg-type] + + def test_ignores_list_depth(self) -> None: + """resolve_type_name returns the base type regardless of list_depth.""" + ti = self._make_type_info(list_depth=1) + assert resolve_type_name(ti, "markdown") == "string" + + def test_ignores_is_optional(self) -> None: + """resolve_type_name returns the base type regardless of is_optional.""" + ti = self._make_type_info(is_optional=True) + assert resolve_type_name(ti, "markdown") == "string" diff --git a/packages/overture-schema-codegen/tests/test_union_extraction.py b/packages/overture-schema-codegen/tests/test_union_extraction.py new file mode 100644 index 000000000..a8b685c48 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_union_extraction.py @@ -0,0 +1,91 @@ +"""Tests for union extraction.""" + +import pytest +from codegen_test_support import ( + RailSegment, + RoadSegment, + SegmentBase, + TestSegment, + WaterSegment, +) +from overture.schema.codegen.extraction.specs import FieldSpec, UnionSpec +from overture.schema.codegen.extraction.union_extraction import extract_union + + +class TestExtractUnion: + """Tests for extract_union function.""" + + @pytest.fixture + def segment_spec(self) -> UnionSpec: + return extract_union("TestSegment", TestSegment) + + def test_extracts_name_and_description(self, segment_spec: UnionSpec) -> None: + """UnionSpec captures the union name and docstring.""" + assert segment_spec.name == "TestSegment" + assert segment_spec.description == "Test segment union" + + def test_finds_common_base(self, segment_spec: UnionSpec) -> None: + """Identifies SegmentBase as the common base class.""" + assert segment_spec.common_base is SegmentBase + + def test_shared_fields_first(self, segment_spec: UnionSpec) -> None: + """Shared fields from common base come first with variant_sources=None.""" + shared = [ + af for af in segment_spec.annotated_fields if af.variant_sources is None + ] + shared_names = [af.field_spec.name for af in shared] + assert "geometry" in shared_names + assert "subtype" in shared_names + # Shared fields are at the start + first_variant_idx = next( + ( + i + for i, af in enumerate(segment_spec.annotated_fields) + if af.variant_sources is not None + ), + len(segment_spec.annotated_fields), + ) + for af in segment_spec.annotated_fields[:first_variant_idx]: + assert af.variant_sources is None + + def test_variant_specific_fields_have_sources( + self, segment_spec: UnionSpec + ) -> None: + """Variant-only fields carry their source class names.""" + speed = next( + af + for af in segment_spec.annotated_fields + if af.field_spec.name == "speed_limit" + ) + assert speed.variant_sources == ("RoadSegment",) + gauge = next( + af + for af in segment_spec.annotated_fields + if af.field_spec.name == "rail_gauge" + ) + assert gauge.variant_sources == ("RailSegment",) + + def test_heterogeneous_same_name_produces_separate_rows( + self, segment_spec: UnionSpec + ) -> None: + """class_ in Road (str) vs Rail (int): separate rows, not merged.""" + class_fields = [ + af for af in segment_spec.annotated_fields if af.field_spec.name == "class" + ] + assert len(class_fields) == 2 + sources = {af.variant_sources for af in class_fields} + assert ("RoadSegment",) in sources + assert ("RailSegment",) in sources + + def test_members_lists_all_member_classes(self, segment_spec: UnionSpec) -> None: + """UnionSpec.members contains all union member classes.""" + assert set(segment_spec.members) == {RoadSegment, RailSegment, WaterSegment} + + def test_source_annotation_preserved(self, segment_spec: UnionSpec) -> None: + """source_annotation holds the original Annotated[Union[...]].""" + assert segment_spec.source_annotation is TestSegment + + def test_fields_property_returns_plain_list(self, segment_spec: UnionSpec) -> None: + """spec.fields returns list[FieldSpec] without provenance.""" + for f in segment_spec.fields: + assert isinstance(f, FieldSpec) diff --git a/packages/overture-schema-core/README.md b/packages/overture-schema-core/README.md index f4938f871..58f5d3ee9 100644 --- a/packages/overture-schema-core/README.md +++ b/packages/overture-schema-core/README.md @@ -1,6 +1,6 @@ # Overture Schema Core -Core Pydantic models and base classes for Overture Maps schemas, providing foundational types, geometry handling, and a comprehensive scoping system for conditional rule application. +Shared models and conventions for building Overture Maps feature types. Defines the base feature class all themes extend, a scoping framework for expressing conditional values (this speed limit applies *here*, *then*, to *these vehicles*), and common structures for names, sources, and cartographic hints. ## Installation @@ -8,159 +8,90 @@ Core Pydantic models and base classes for Overture Maps schemas, providing found pip install overture-schema-core ``` -## Key Components +## OvertureFeature -- **Base Classes**: Extensible base models for Overture Maps features -- **Geometry Types**: WKB geometry type hints and utilities -- **Common Structures**: Shared models used across all themes -- **Primitive Data Types**: Validated primitive types with multi-target serialization support -- **Scoping System**: Flexible conditional rule application framework - -## Enhanced Primitive Types - -The enhanced primitive types system provides validated primitive types with automatic -constraint checking and multi-target serialization support. This enables consistent type -definitions that can generate appropriate representations for different targets (Spark, -Parquet, etc.). - -### Available Types - -Built-in Python primitive types (`str`, `int`, `float`, `bool`, `list`, etc.) are -automatically mapped. - -We also provide the following additional types: - -#### Integer Types - -- **`uint8`**: 8-bit unsigned integer (0-255) -- **`uint16`**: 16-bit unsigned integer (0-65535) -- **`uint32`**: 32-bit unsigned integer (0-4294967295) -- **`int8`**: 8-bit signed integer (-128 to 127) -- **`int32`**: 32-bit signed integer (-2³¹ to 2³¹-1) -- **`int64`**: 64-bit signed integer (-2⁶³ to 2⁶³-1) - -#### Floating Point Types - -- **`float32`**: 32-bit floating point number -- **`float64`**: 64-bit floating point number - -### Basic Usage +Every Overture feature type inherits from `OvertureFeature`, which extends `system.Feature` with the fields present on all Overture data: `id`, `theme`, `type`, `version`, `geometry`, and `sources`. ```python -from pydantic import BaseModel, Field -from overture.schema.core.primitives import ( - uint8, uint32, float32 -) - -class Building(BaseModel): - """Building feature with specific primitive data types.""" - - height: float32 | None = Field( - None, - description="Height of building in meters" - ) - - num_floors: uint8 | None = Field( - None, - description="Number of floors in building" - ) - - area: uint32 | None = Field( - None, - description="Floor area in square meters" - ) +from typing import Literal +from overture.schema.core import OvertureFeature + +class Park(OvertureFeature[Literal["places"], Literal["park"]]): + area_hectares: float | None = None ``` -### Automatic Validation +## Scoping -Enhanced primitive types automatically validate constraints: +Many Overture values only apply under specific conditions -- a speed limit that holds during rush hour, along a sub-segment, in the forward direction. The `@scoped` decorator adds conditional fields to any Pydantic model: ```python -# Valid values -building = Building(height=45.5, num_floors=12, area=2500) +from pydantic import BaseModel +from overture.schema.core.scoping import Scope, scoped +from overture.schema.system.primitive import float32 -# Invalid values raise ValidationError -Building(num_floors=256) # Error: 256 > UInt8 maximum (255) -Building(num_floors=-1) # Error: -1 < UInt8 minimum (0) +@scoped(Scope.GEOMETRIC_RANGE, Scope.TEMPORAL) +class SpeedLimit(BaseModel): + max_speed: float32 ``` -### Type Safety +This produces a model with `between` (geometric range) and `when.during` (temporal) fields, both optional. The full set of scopes and the fields they inject: -The enhanced primitive types provide strong type safety guarantees at both static and -runtime levels: +| Scope | Field | +|----------------------------|-------------------| +| `Scope.GEOMETRIC_POSITION` | `at` | +| `Scope.GEOMETRIC_RANGE` | `between` | +| `Scope.HEADING` | `when.heading` | +| `Scope.TEMPORAL` | `when.during` | +| `Scope.TRAVEL_MODE` | `when.mode` | +| `Scope.PURPOSE_OF_USE` | `when.using` | +| `Scope.RECOGNIZED_STATUS` | `when.recognized` | +| `Scope.SIDE` | `side` | +| `Scope.VEHICLE` | `when.vehicle` | -**Static Type Checking**: mypy can distinguish between different primitive types, -*preventing common errors: +Scopes are optional by default. Make them mandatory via `required`: ```python -from overture.schema.core.primitives import uint8, uint32 +@scoped(Scope.TEMPORAL, required=(Scope.GEOMETRIC_POSITION, Scope.HEADING)) +class TrafficSignal(BaseModel): + signal_type: str +``` -def process_floor_count(floors: uint8) -> str: - return f"Building has {floors} floors" +## Names -def process_area(area: uint32) -> str: - return f"Area: {area} sq meters" +Multilingual naming with support for common names, name rules (official, alternate, short variants), and scoping by geometric range, side, or political perspective. Mix `Named` into a feature type to give it a `names` field: -# Type checker prevents mixing incompatible types -floors: uint8 = 12 -area: uint32 = 2500 +```python +from typing import Literal +from overture.schema.core import OvertureFeature +from overture.schema.core.names import Named -process_floor_count(area) # mypy error: Expected UInt8, got UInt32 -process_area(floors) # mypy error: Expected UInt32, got UInt8 +class Lake(OvertureFeature[Literal["base"], Literal["water"]], Named): + pass # inherits names: Names | None from Named ``` +Name rules support geometric range and side scoping for cases like a street whose name changes partway along or differs on each side. `NameRule` variants: `common`, `official`, `alternate`, `short`. -### Examples +## Sources -#### Temporal Speed Limit - -```yaml -speed_limits: - - between: [0, 1] - max_speed: {value: 30, unit: km/h} - when: - during: "Mo-Fr 07:00-09:00,17:00-19:00" # Rush hours only -``` +Source attribution tracking. Each `SourceItem` identifies which dataset a feature or property came from, with optional license, record ID, update time, and confidence score. Source items support geometric range scoping for per-segment attribution. -#### Vehicle-Specific Access Restriction - -```yaml -access_restrictions: - - between: [0.2, 0.8] - access_type: denied - when: - vehicle: - - dimension: weight - comparison: greater_than - value: 7.5 - unit: t -``` - -#### Multi-Dimensional Scoping - -```yaml -access_restrictions: - - between: [0, 1] - access_type: denied - when: - mode: [bus] - during: "Mo-Fr 15:00-18:00" - heading: forward - using: [to_deliver] +```python +from overture.schema.core.sources import SourceItem + +sources = [ + SourceItem(property="", dataset="OpenStreetMap"), + SourceItem(property="/geometry", dataset="Microsoft ML Buildings"), + # first 30% of the segment's geometry came from a different source + SourceItem(property="/geometry", dataset="County GIS", between=[0, 0.3]), +] ``` -### Design Principles - -1. **Composability**: Mix-in design allows combining only needed scoping dimensions -2. **Reusability**: Base scope classes work across all rule types and themes -3. **Extensibility**: Easy to add new scoping dimensions or modify existing ones -4. **Type Safety**: Full Pydantic validation for all scoping conditions -5. **Linear Reference Integration**: Seamless integration with geometric positioning +## Cartography -### Rule Complexity Patterns +Rendering hints for map-making: `prominence` (1--100 significance scale), `min_zoom`/`max_zoom` (tile zoom bounds), and `sort_key` (draw order). Mix `CartographicallyHinted` into a model to add a `cartography` field. -- **Simple Rules** (flags, dimensions): Geometric scoping only -- **Complex Rules** (speed limits, access): Geometric + conditional scoping -- **Transition Rules**: Full scoping including directional constraints +## Also Included -This scoping system provides the foundation for precise, flexible rule specification across all Overture Maps transportation features. +- **Types** -- domain-specific aliases built on system primitives: `ConfidenceScore` (0.0--1.0), `Level` (z-order), `FeatureVersion`. +- **Units** -- measurement enumerations: `SpeedUnit`, `LengthUnit`, `WeightUnit`. +- **Discovery** -- entry-point-based model registry. Theme packages register models via `overture.models` entry points; `discover_models()` resolves them at runtime. diff --git a/packages/overture-schema-core/pyproject.toml b/packages/overture-schema-core/pyproject.toml index f7639dd34..010441cb1 100644 --- a/packages/overture-schema-core/pyproject.toml +++ b/packages/overture-schema-core/pyproject.toml @@ -4,14 +4,26 @@ build-backend = "hatchling.build" [project] name = "overture-schema-core" +maintainers = [ + {name = "Overture Maps Schema Working Group"}, +] dynamic = ["version"] description = "Core schemas for Overture Maps" license = "MIT" dependencies = [ - "pydantic>=2.0", + "overture-schema-system", + "pydantic>=2.12.0", "shapely>=2.1.1", ] +[project.urls] +Homepage = "https://overturemaps.org" +Source = "https://github.com/OvertureMaps/schema" +Issues = "https://github.com/OvertureMaps/schema/issues" + +[tool.uv.sources] +overture-schema-system = { workspace = true } + [tool.hatch.version] path = "src/overture/schema/core/__about__.py" @@ -21,7 +33,6 @@ packages = ["src/overture"] [dependency-groups] dev = [ "jsonpath-ng>=1.7.0", - "pytest-subtests>=0.14.2", "types-pyyaml>=6.0.12.20250516", "types-shapely>=2.1.0.20250710", ] diff --git a/packages/overture-schema-core/src/overture/schema/core/__about__.py b/packages/overture-schema-core/src/overture/schema/core/__about__.py index 3dc1f76bc..5e3bb11ca 100644 --- a/packages/overture-schema-core/src/overture/schema/core/__about__.py +++ b/packages/overture-schema-core/src/overture/schema/core/__about__.py @@ -1 +1 @@ -__version__ = "0.1.0" +__version__ = "0.1.1.dev1" diff --git a/packages/overture-schema-core/src/overture/schema/core/cartography.py b/packages/overture-schema-core/src/overture/schema/core/cartography.py index bdf009eb6..71cd60dbf 100644 --- a/packages/overture-schema-core/src/overture/schema/core/cartography.py +++ b/packages/overture-schema-core/src/overture/schema/core/cartography.py @@ -8,12 +8,12 @@ from pydantic import BaseModel, Field from overture.schema.system.model_constraint import no_extra_fields -from overture.schema.system.primitive import uint8 +from overture.schema.system.primitive import int32 Prominence = NewType( "Prominence", Annotated[ - uint8, + int32, Field( ge=1, le=100, @@ -34,7 +34,7 @@ MinZoom = NewType( "MinZoom", Annotated[ - uint8, + int32, Field( ge=0, le=23, @@ -55,7 +55,7 @@ MaxZoom = NewType( "MaxZoom", Annotated[ - uint8, + int32, Field( ge=0, le=23, @@ -76,7 +76,7 @@ SortKey = NewType( "SortKey", Annotated[ - uint8, + int32, Field( description=textwrap.dedent(""" Integer indicating the recommended order in which to draw features. @@ -106,4 +106,9 @@ class CartographicallyHinted(BaseModel): Properties for adding cartographic hints to a model. """ - cartography: Annotated[CartographicHints | None, Field(title="cartography")] = None + cartography: Annotated[ + CartographicHints | None, + Field( + description="Cartographic hints useful when including the feature in maps" + ), + ] = None diff --git a/packages/overture-schema-core/src/overture/schema/core/discovery.py b/packages/overture-schema-core/src/overture/schema/core/discovery.py index 15da3abc4..b9290d29a 100644 --- a/packages/overture-schema-core/src/overture/schema/core/discovery.py +++ b/packages/overture-schema-core/src/overture/schema/core/discovery.py @@ -21,15 +21,15 @@ class ModelKey: The theme name (e.g., "buildings", "places"), or None for non-themed models type : str The feature type (e.g., "building", "place") - class_name : str - The fully qualified class name from the entry point value + entry_point : str + The entry point value in "module:Class" format """ namespace: str theme: str | None type: str - class_name: str + entry_point: str def discover_models( @@ -87,7 +87,7 @@ def discover_models( namespace=ns, theme=theme, type=feature_type, - class_name=entry_point.value, + entry_point=entry_point.value, ) models[key] = model_class except Exception as e: diff --git a/packages/overture-schema-core/src/overture/schema/core/models.py b/packages/overture-schema-core/src/overture/schema/core/models.py index 6460d26ba..dbb77a281 100644 --- a/packages/overture-schema-core/src/overture/schema/core/models.py +++ b/packages/overture-schema-core/src/overture/schema/core/models.py @@ -12,7 +12,6 @@ from pydantic_core import core_schema from typing_extensions import Self -from overture.schema.core.sources import Sources from overture.schema.system.feature import Feature from overture.schema.system.field_constraint import UniqueItemsConstraint from overture.schema.system.model_constraint import no_extra_fields @@ -25,6 +24,7 @@ ) from .enums import PerspectiveMode +from .sources import Sources from .types import ( FeatureVersion, Level, @@ -65,9 +65,9 @@ def __validate_ext_fields__(self) -> Self: maybe_plural = "s" if len(invalid_extra_fields) > 1 else "" raise ValueError( f"invalid extra field name{maybe_plural}: {', '.join(invalid_extra_fields)} " - "(extra fields are temporarily allowed, but only if their names start with 'ext_', " - "but all extra field name support in {self.__class__.name} is on a deprecation path " - "and will be removed)" + f"(extra fields are temporarily allowed, but only if their names start with 'ext_', " + f"but all extra field name support in {self.__class__.__name__} is on a deprecation path " + f"and will be removed)" ) return self diff --git a/packages/overture-schema-core/src/overture/schema/core/names.py b/packages/overture-schema-core/src/overture/schema/core/names.py index e968b24a7..631ec2d8d 100644 --- a/packages/overture-schema-core/src/overture/schema/core/names.py +++ b/packages/overture-schema-core/src/overture/schema/core/names.py @@ -110,7 +110,7 @@ Field(json_schema_extra={"additionalProperties": False}), ], ) -"""A mapping from language to the most commonly used or recognized name in that language.""" +CommonNames.__doc__ = """A mapping from language to the most commonly used or recognized name in that language.""" class NameVariant(str, DocumentedEnum): @@ -224,4 +224,7 @@ class Names(BaseModel): class Named(BaseModel): """Properties defining the names of a model.""" - names: Names | None = None + names: Annotated[ + Names | None, + Field(description="All known names by which the feature is called"), + ] = None diff --git a/packages/overture-schema-core/src/overture/schema/core/scoping/opening_hours.py b/packages/overture-schema-core/src/overture/schema/core/scoping/opening_hours.py index a8efc8c16..8f8acbe6a 100644 --- a/packages/overture-schema-core/src/overture/schema/core/scoping/opening_hours.py +++ b/packages/overture-schema-core/src/overture/schema/core/scoping/opening_hours.py @@ -15,7 +15,7 @@ ), ], ) -""" +OpeningHours.__doc__ = """ Time span or time spans during which something is open or active, specified in the OpenStreetMap opening hours specification: https://wiki.openstreetmap.org/wiki/Key:opening_hours/specification. """ diff --git a/packages/overture-schema-core/src/overture/schema/core/scoping/vehicle.py b/packages/overture-schema-core/src/overture/schema/core/scoping/vehicle.py index d21fe24d6..cd8f3169f 100644 --- a/packages/overture-schema-core/src/overture/schema/core/scoping/vehicle.py +++ b/packages/overture-schema-core/src/overture/schema/core/scoping/vehicle.py @@ -9,7 +9,7 @@ from overture.schema.core.unit import LengthUnit, WeightUnit from overture.schema.system.model_constraint import no_extra_fields -from overture.schema.system.primitive import float32, uint8 +from overture.schema.system.primitive import float64, uint8 class VehicleDimension(str, Enum): @@ -57,7 +57,7 @@ class VehicleHeightSelector(BaseModel): dimension: Literal[VehicleDimension.HEIGHT] comparison: VehicleRelation value: Annotated[ - float32, + float64, Field( ge=0, description="Vehicle height selection threshold in the given `unit`" ), @@ -74,7 +74,7 @@ class VehicleLengthSelector(BaseModel): dimension: Literal[VehicleDimension.LENGTH] comparison: VehicleRelation value: Annotated[ - float32, + float64, Field( ge=0, description="Vehicle length selection threshold in the given `unit`" ), @@ -91,7 +91,7 @@ class VehicleWeightSelector(BaseModel): dimension: Literal[VehicleDimension.WEIGHT] comparison: VehicleRelation value: Annotated[ - float32, + float64, Field( ge=0, description="Vehicle weight selection threshold in the given `unit`" ), @@ -108,7 +108,7 @@ class VehicleWidthSelector(BaseModel): dimension: Literal[VehicleDimension.WIDTH] comparison: VehicleRelation value: Annotated[ - float32, + float64, Field( ge=0, description="Vehicle width selection threshold in the given `unit`" ), diff --git a/packages/overture-schema-core/src/overture/schema/core/types.py b/packages/overture-schema-core/src/overture/schema/core/types.py index 8cf9e8cc8..8cbc8673f 100644 --- a/packages/overture-schema-core/src/overture/schema/core/types.py +++ b/packages/overture-schema-core/src/overture/schema/core/types.py @@ -4,12 +4,12 @@ Field, ) -from overture.schema.system.primitive import float32, int16, int32 +from overture.schema.system.primitive import float64, int32 ConfidenceScore = NewType( "ConfidenceScore", Annotated[ - float32, + float64, Field(description="Confidence score between 0.0 and 1.0", ge=0.0, le=1.0), ], ) @@ -18,7 +18,7 @@ Level = NewType( "Level", Annotated[ - int16, + int32, Field(description="Z-order of the feature where 0 is visual level"), ], ) diff --git a/packages/overture-schema-core/tests/scoping/test_scoped.py b/packages/overture-schema-core/tests/scoping/test_scoped.py index 4bacbfdfc..313579dba 100644 --- a/packages/overture-schema-core/tests/scoping/test_scoped.py +++ b/packages/overture-schema-core/tests/scoping/test_scoped.py @@ -181,7 +181,7 @@ class SingleScoped(BaseModel): when_field_info = SingleScoped.model_fields["when"] assert when_field_info.is_required() == required - when_class = SingleScoped.When + when_class = SingleScoped.When # type: ignore[attr-defined] assert issubclass(when_class, BaseModel) assert len(when_class.model_fields) == 1 @@ -200,7 +200,7 @@ class MultiScopedWhenAllFieldsOptional(BaseModel): when_field_info = MultiScopedWhenAllFieldsOptional.model_fields["when"] assert not when_field_info.is_required() - when_class = MultiScopedWhenAllFieldsOptional.When + when_class = MultiScopedWhenAllFieldsOptional.When # type: ignore[attr-defined] assert issubclass(when_class, BaseModel) assert len(when_class.model_fields) == 2 @@ -229,7 +229,7 @@ class MultiScopedWhenSomeFieldsRequired(BaseModel): when_field_info = MultiScopedWhenSomeFieldsRequired.model_fields["when"] assert when_field_info.is_required() - when_class = MultiScopedWhenSomeFieldsRequired.When + when_class = MultiScopedWhenSomeFieldsRequired.When # type: ignore[attr-defined] assert issubclass(when_class, BaseModel) assert len(when_class.model_fields) == 3 @@ -270,7 +270,7 @@ class Complex(BaseModel): when_field_info = Complex.model_fields["when"] assert when_field_info.is_required() - when_class = Complex.When + when_class = Complex.When # type: ignore[attr-defined] assert issubclass(when_class, BaseModel) assert len(when_class.model_fields) == 3 diff --git a/packages/overture-schema-core/tests/test_models.py b/packages/overture-schema-core/tests/test_models.py index c674ab5e1..c36510bea 100644 --- a/packages/overture-schema-core/tests/test_models.py +++ b/packages/overture-schema-core/tests/test_models.py @@ -58,7 +58,7 @@ def test_feature_json_schema() -> None: "property": {"type": "string"}, "dataset": {"type": "string"}, "license": { - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "type": "string", }, "record_id": {"type": "string"}, diff --git a/packages/overture-schema-divisions-theme/pyproject.toml b/packages/overture-schema-divisions-theme/pyproject.toml index e56b1df6a..2fd1dac53 100644 --- a/packages/overture-schema-divisions-theme/pyproject.toml +++ b/packages/overture-schema-divisions-theme/pyproject.toml @@ -1,8 +1,11 @@ [project] +maintainers = [ + {name = "Overture Maps Schema Working Group"}, +] dependencies = [ "overture-schema-core", "overture-schema-system", - "pydantic>=2.0", + "pydantic>=2.12.0", ] description = "Overture Maps divisions theme shared structures, division, division area and division boundary types" dynamic = ["version"] @@ -11,6 +14,11 @@ name = "overture-schema-divisions-theme" readme = "README.md" requires-python = ">=3.10" +[project.urls] +Homepage = "https://overturemaps.org" +Source = "https://github.com/OvertureMaps/schema" +Issues = "https://github.com/OvertureMaps/schema/issues" + [tool.uv.sources] overture-schema-core = { workspace = true } overture-schema-system = { workspace = true } @@ -29,3 +37,113 @@ packages = ["src/overture"] "overture:divisions:division" = "overture.schema.divisions:Division" "overture:divisions:division_area" = "overture.schema.divisions:DivisionArea" "overture:divisions:division_boundary" = "overture.schema.divisions:DivisionBoundary" + +[[examples.Division]] +id = "350e85f6-68ba-4114-9906-c2844815988b" +geometry = "POINT (-175.2551522 -21.1353686)" +country = "TO" +version = 1 +subtype = "locality" +class = "village" +region = "TO-04" +hierarchies = [ + [ + {division_id = "fef8748b-0c91-46ad-9f2d-976d8d2de3e9", subtype = "country", name = "Tonga"}, + {division_id = "4d67561a-2292-41bd-8996-7853d276a42c", subtype = "region", name = "Tongatapu"}, + {division_id = "8730f0cc-d436-4f11-a7d3-49085813ef44", subtype = "county", name = "Vahe Kolomotu'a"}, + {division_id = "350e85f6-68ba-4114-9906-c2844815988b", subtype = "locality", name = "Sia'atoutai"}, + ], +] +parent_division_id = "8730f0cc-d436-4f11-a7d3-49085813ef44" +population = 534 +theme = "divisions" +type = "division" + +[examples.Division.bbox] +xmin = -175.25515747070312 +xmax = -175.255126953125 +ymin = -21.1353702545166 +ymax = -21.13536834716797 + +[[examples.Division.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "n3173231082@4" +update_time = "2014-12-18T09:17:03Z" + +[examples.Division.cartography] +prominence = 29 + +[examples.Division.names] +primary = "Sia'atoutai" + +[[examples.Division.names.rules]] +variant = "alternate" +value = "Nafualu" + +[examples.Division.local_type] +en = "village" + +[[examples.DivisionArea]] +id = "eb9b112f-ec3c-47f7-b519-6f9f2e6fc2bd" +geometry = "MULTIPOLYGON (((-174.9553949 -21.4730179, -174.9514163 -21.4719978, -174.9520108 -21.4681253, -174.9566122 -21.4687535, -174.9553949 -21.4730179)), ((-174.9634398 -21.3476807, -174.9753507 -21.3833656, -174.9702168 -21.4037277, -174.950488 -21.4269887, -174.9082983 -21.4577763, -174.9004303 -21.4398142, -174.9048159 -21.3698688, -174.9165467 -21.3035402, -174.9126977 -21.2903268, -174.9199765 -21.2834922, -174.9634398 -21.3476807)))" +country = "TO" +version = 2 +subtype = "region" +class = "land" +is_land = true +is_territorial = false +region = "TO-01" +division_id = "21597af0-b564-463c-a356-42c29e712b7d" +theme = "divisions" +type = "division_area" + +[examples.DivisionArea.bbox] +xmin = -174.97535705566406 +xmax = -174.90040588378906 +ymin = -21.473018646240234 +ymax = -21.283489227294922 + +[[examples.DivisionArea.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "r7247527@3" +update_time = "2020-12-30T18:41:56Z" + +[examples.DivisionArea.names] +primary = "ʻEua" + +[[examples.DivisionBoundary]] +id = "2bdf68e4-860d-3d8c-a472-ccf439a5302a" +geometry = "LINESTRING (-147.064823 -15.4231537, -147.0519131 -15.2885069, -147.048482 -15.1511701)" +country = "PF" +version = 1 +subtype = "county" +class = "maritime" +is_land = false +is_territorial = true +division_ids = [ + "ae266459-63a4-4508-8295-0101e27d039b", + "d4a6873d-885a-4f2a-bc0f-37e9d9e874e4" +] +is_disputed = false +theme = "divisions" +type = "division_boundary" + +[examples.DivisionBoundary.bbox] +xmin = -147.06483459472656 +xmax = -147.04847717285156 +ymin = -15.4231538772583 +ymax = -15.151169776916504 + +[[examples.DivisionBoundary.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "r6063055@9" +update_time = "2023-07-20T00:28:40Z" + +[[examples.DivisionBoundary.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "r6063063@12" +update_time = "2023-07-20T00:28:40Z" diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/__about__.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/__about__.py index 3dc1f76bc..5e3bb11ca 100644 --- a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/__about__.py +++ b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/__about__.py @@ -1 +1 @@ -__version__ = "0.1.0" +__version__ = "0.1.1.dev1" diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/__init__.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/__init__.py index ea08ff2e5..00e8cabc6 100644 --- a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/__init__.py +++ b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/__init__.py @@ -6,8 +6,23 @@ __path__ = __import__("pkgutil").extend_path(__path__, __name__) -from .division import Division -from .division_area import DivisionArea -from .division_boundary import DivisionBoundary +from ._common import AdminLevel, DivisionSubtype +from .division import CapitalOfDivisionItem, Division, DivisionClass, Norms +from .division_area import AreaClass, DivisionArea +from .division_boundary import BoundaryClass, DivisionBoundary -__all__ = ["Division", "DivisionArea", "DivisionBoundary"] +# Exclude from `__all__`: internal implementation details, and types that are effectively annotated +# primitives, such as `AdminLevel`, where a person working with one of the feature types likely +# would not need to import that type because they'll just set the field to a Python primitive value +# directly. (e.g., `my_division.admin_level = 4`). +__all__ = [ + "AreaClass", + "BoundaryClass", + "CapitalOfDivisionItem", + "Division", + "DivisionArea", + "DivisionClass", + "DivisionSubtype", + "DivisionBoundary", + "Norms", +] diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/_common.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/_common.py new file mode 100644 index 000000000..a2aaa9da0 --- /dev/null +++ b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/_common.py @@ -0,0 +1,115 @@ +import textwrap +from typing import Annotated, NewType + +from pydantic import Field + +from overture.schema.system.doc import DocumentedEnum +from overture.schema.system.model_constraint import FieldEqCondition +from overture.schema.system.primitive import int32 + +AdminLevel = NewType( + "AdminLevel", + Annotated[ + int32, + Field( + description=textwrap.dedent(""" + Integer representing the division's position in its country's administrative + hierarchy, where lower numbers correspond to higher level administrative units. + """).strip(), + ge=0, + le=16, + ), + ], +) + + +class DivisionSubtype(str, DocumentedEnum): + """ + Category of the division from a finite, hierarchical, ordered list of categories (e.g., country, + region, locality, etc.) similar to a Who's on First placetype. + """ + + COUNTRY = ( + "country", + "Largest unit of independent sovereignty, e.g., the United States, France.", + ) + + DEPENDENCY = ( + "dependency", + textwrap.dedent(""" + A place that is not exactly a sub-region of a country but is dependent on a parent + country for defence, passport control, etc., e.g., Puerto Rico. + """).strip(), + ) + + MACROREGION = ( + "macroregion", + textwrap.dedent(""" + A bundle of regions, e.g., England, Scotland, Île-de-France. These exist mainly in + Europe. + """).strip(), + ) + + REGION = ( + "region", + textwrap.dedent(""" + A state, province, region, etc. Largest sub-country administrative unit in most + countries, except those that have dependencies or macro-regions. + """).strip(), + ) + + MACROCOUNTY = ( + "macrocounty", + "A bundle of counties, e.g. Inverness. These exist mainly in Europe.", + ) + + COUNTY = ( + "county", + textwrap.dedent(""" + Largest sub-region administrative unit in most countries, unless they have + macrocounties. + """).strip(), + ) + + LOCALADMIN = ( + "localadmin", + textwrap.dedent(""" + An administrative unit existing in some parts of the world that contains localities + or populated places, e.g. département de Paris. Often the contained places do not + have independent authority. Often, but not exclusively, found in Europe. + """).strip(), + ) + + LOCALITY = ( + "locality", + "A populated place that may or may not have its own administrative authority.", + ) + + BOROUGH = ( + "borough", + "A local government unit subordinate to a locality.", + ) + + MACROHOOD = ( + "macrohood", + textwrap.dedent(""" + A super-neighborhood that contains smaller divisions of type neighborhood, e.g. + BoCaCa (Boerum Hill, Cobble Hill, and Carroll Gardens). + """).strip(), + ) + + NEIGHBORHOOD = ( + "neighborhood", + textwrap.dedent(""" + A neighborhood. Most neighborhoods will be just this, unless there's enough granular + detail to justify introducing macrohood or microhood divisions. + """).strip(), + ) + + MICROHOOD = ( + "microhood", + "A mini-neighborhood that is contained within a division of type neighborhood.", + ) + + +IS_COUNTRY = FieldEqCondition("subtype", DivisionSubtype.COUNTRY) diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division.py new file mode 100644 index 000000000..865caca4c --- /dev/null +++ b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division.py @@ -0,0 +1,418 @@ +""" +The `Division` feature type model and supporting types. +""" + +from __future__ import annotations + +import textwrap +from typing import Annotated, Literal, NewType + +from pydantic import BaseModel, ConfigDict, Field + +from overture.schema.core import ( + OvertureFeature, +) +from overture.schema.core.cartography import CartographicallyHinted +from overture.schema.core.models import ( + Perspectives, +) +from overture.schema.core.names import CommonNames, Named, Names +from overture.schema.core.scoping.side import Side +from overture.schema.system.doc import DocumentedEnum +from overture.schema.system.field_constraint import ( + UniqueItemsConstraint, +) +from overture.schema.system.model_constraint import ( + FieldEqCondition, + forbid_if, + no_extra_fields, + require_if, +) +from overture.schema.system.primitive import ( + Geometry, + GeometryType, + GeometryTypeConstraint, + int32, +) +from overture.schema.system.ref import Id, Reference, Relationship +from overture.schema.system.string import ( + CountryCodeAlpha2, + RegionCode, + StrippedString, + WikidataId, +) + +from ._common import ( + IS_COUNTRY, + AdminLevel, + DivisionSubtype, +) + + +@no_extra_fields +class Norms(BaseModel): + """Local norms and standards.""" + + # Optional + + driving_side: Annotated[ + Side | None, + Field( + description="Side of the road on which vehicles drive in the division.", + ), + ] = None + + +class DivisionClass(str, DocumentedEnum): + """ + Further classification of a division that is more specific than its `subtype`. + + A division's `class` adds detail to the broad classification found in `DivisionSubtype`. + """ + + MEGACITY = ( + "megacity", + textwrap.dedent(""" + A very large city or metropolitan area, typically having a population of 10 million or + more. Example: Tokyo, Japan. + """).strip(), + ) + + CITY = ( + "city", + "A large, permanent human settlement. Example: Guadalajara, Mexico.", + ) + + TOWN = ( + "town", + textwrap.dedent(""" + A medium-sized permanent human settlement that is smaller than a city, but larger than a + village. Example: Walldürn, Germany. + """).strip(), + ) + + VILLAGE = ( + "village", + textwrap.dedent(""" + A smallish permanent human settlement that is smaller than a town, but larger than a + hamlet. Example: Wadi El Karm, Lebanon. + """).strip(), + ) + + HAMLET = ( + "hamlet", + "A very small, isolated human settlement in a rural area. Example: Tjarnabyggð, Iceland.", + ) + + +@no_extra_fields +class CapitalOfDivisionItem(BaseModel): + """A division of which the owning division is the capital, together with its subtype.""" + + model_config = ConfigDict(frozen=True) + + # Required + + division_id: Annotated[ + Id, + Field(description="ID of the division whose capital is the current division."), + Reference(Relationship.CAPITAL_OF, Division), + ] + subtype: DivisionSubtype + + +@no_extra_fields +class HierarchyItem(BaseModel): + """One division in a hierarchy.""" + + model_config = ConfigDict(frozen=True) + + # Required + + division_id: Annotated[ + Id, + Field( + description=textwrap.dedent(""" + ID of a division that is an ancestor of the current division. + + In the context of division hierarchies, the ancestor divisions of a division include + the division itself, and any other division that is an ancestor of the division's parent. + """).strip() + ), + Reference(Relationship.DESCENDANT_OF, Division), + ] + subtype: DivisionSubtype + name: Annotated[ + StrippedString, Field(min_length=1, description="Primary name of the division") + ] + + +Hierarchy = NewType( + "Hierarchy", + Annotated[ + list[HierarchyItem], + Field( + min_length=1, + description=textwrap.dedent(""" + A hierarchy of divisions, with the first entry being a country; each subsequent + entry, if any, being a division that is a direct child of the previous entry; and + the last entry representing the division that contains the hierarchy. + + For example, a hierarchy for the United States is simply [United States]. A + hierarchy for the U.S. state of New Hampshire would be + [United States, New Hampshire], and a hierarchy for the city of Concord, NH would be + [United States, New Hampshire, Merrimack County, Concord]. + """).strip(), + ), + UniqueItemsConstraint(), + ], +) + + +@forbid_if(["parent_division_id"], IS_COUNTRY) +@require_if(["parent_division_id"], ~IS_COUNTRY) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.COUNTRY)) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.DEPENDENCY)) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.MACROREGION)) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.REGION)) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.MACROCOUNTY)) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.COUNTY)) +class Division( + OvertureFeature[Literal["divisions"], Literal["division"]], + Named, + CartographicallyHinted, +): + """ + Divisions are recognized official or non-official organizations of people as seen from a given + political perspective. + + Examples include countries, provinces, cities, towns, neighborhoods, etc. + """ + + model_config = ConfigDict(title="division") + + # Core + geometry: Annotated[ + Geometry, + GeometryTypeConstraint(GeometryType.POINT), + Field( + description=textwrap.dedent(""" + Approximate location of a position commonly associated with the real-world entity + modeled by the division feature. + """).strip(), + ), + ] + + # Required + + names: Annotated[ + Names, Field(description="All known names by which the division is called") + ] + subtype: Annotated[ + DivisionSubtype, + Field( + description=textwrap.dedent(""" + A broad classification of the division (e.g., country, region, locality, etc.). + """).strip() + ), + ] + country: Annotated[ + CountryCodeAlpha2, + Field( + description=textwrap.dedent(""" + ISO 3166-1 alpha-2 country code of the country or country-like entity, that this + division represents or belongs to. + + If the entity this division represents has a country code, the country property + contains it. If it does not, the country property contains the country code of the + first division encountered by traversing the parent_division_id chain to the root. + + For example: + - The country value for the United States is 'US' + - The country value for New York City is 'US' + - The country value for Puerto Rico, a dependency of the US, is 'PR'. + - The country value for San Juan, Puerto Rico is 'PR'. + + If an entity has an internationally-recognized ISO 3166-1 alpha-2 country code, it + should always be used. In cases where the schema requires the code but no + internationally-recognized code is available, a synthetic code may be used provided + it does not conflict with any internationally-recognized codes. + """).strip(), + ), + ] + hierarchies: Annotated[ + list[Hierarchy], + Field( + min_length=1, + description=textwrap.dedent(""" + Hierarchies in which this division participates. + + Every division participates in at least one hierarchy. Most participate in only one. + Some divisions may participate in more than one hierarchy, for example if they are + claimed by different parent divisions from different political perspectives; or if + there are other real-world reasons why the division or one of its ancestors has + multiple parents. + + The first hierarchy in the list is the default hierarchy, and the second-to-last + entry in the default hierarchy (if there is such an entry) always corresponds to the + `parent_division_id` property. The ordering of hierarchies after the first one is + arbitrary. + """).strip(), + ), + UniqueItemsConstraint(), + ] + parent_division_id: Annotated[ + Id | None, + Field( + description=textwrap.dedent(""" + Division ID of this division's parent division. + + Not allowed for top-level divisions (countries) and required for all other + divisions. + + The default parent division is the parent division as seen from the default + political perspective, if there is one, and is otherwise chosen somewhat + arbitrarily. The hierarchies property can be used to inspect the exhaustive list of + parent divisions. + """).strip() + ), + Reference(Relationship.CHILD_OF, Division), + ] = None + admin_level: AdminLevel | None = None + + # Optional + + class_: Annotated[ + DivisionClass | None, + Field( + alias="class", + description=textwrap.dedent(""" + A more specific classification of the division than is provided by `subtype`. + """).strip(), + ), + ] = None + local_type: Annotated[ + CommonNames | None, + Field( + description=textwrap.dedent(""" + Local name for the subtype property, optionally localized. + + For example, the Canadian province of Quebec has the subtype `"region"`, but in the + local administrative hierarchy it is referred to as a province. Similarly, the + Canadian Yukon territory also has subtype `"region"`, but is locally called a + territory. + + This property is localized using a standard Overture names structure. So for + example, in Switzerland the top-level administrative subdivision corresponding to + subtype 'region' is the canton, which may be translated in each of Switzerland's + official languages as, 'canton' in French, 'kanton' in German, 'cantone' in Italian, + and 'chantun' in Romansh. + """).strip(), + ), + ] = None + region: Annotated[ + RegionCode | None, + Field( + description=textwrap.dedent(""" + ISO 3166-2 principal subdivision code of the subdivision-like entity this division + represents or belongs to. + + If the entity this division represents has a principal subdivision code, the region + property contains it. If it does not, the region property contains the principal + subdivision code of the first division encountered by traversing the + `parent_division_id` chain to the root. + + For example: + - The region value for the United States is omitted. + - The region value for the U.S. state of New York is 'US-NY'. + - The region value for New York City is 'US-NY', which it inherits from the state + of New York. + - The region value for Puerto Rico is 'US-PR'. + """).strip(), + ), + ] = None + perspectives: Annotated[ + Perspectives | None, + Field( + description=textwrap.dedent(""" + Political perspectives from which this division is considered to be an accurate + representation. + + If this property is absent, then this division is not known to be disputed from + any political perspective. Consequently, there is only one division feature + representing the entire real world entity. + + If this property is present, it means the division represents one of several + alternative perspectives on the same real-world entity. + + There are two modes of perspective: + + 1. `accepted_by` means the representation of the division is accepted by the + listed entities and would be included on a map drawn from their perspective. + + 2. `disputed_by` means the representation of the division is disputed by the + listed entities and would be excluded from a map drawn from their perspective. + + When drawing a map from the perspective of a given country, one would start by + gathering all the undisputed divisions (with no `perspectives` property), and then + adding to that first all divisions explicitly accepted by the country, and second + all divisions not explicitly disputed by the country. + """).strip(), + ), + ] = None + # If we decide to include default language, it will go here. But is it really generally-useful information? + norms: Annotated[ + Norms | None, + Field( + description=textwrap.dedent(""" + Collects information about local norms and rules within the division that are + generally useful for mapping and map-related use cases. + + If the norms property or a desired sub-property of the norms property is missing + on a division, but at least one of its ancestor divisions has the norms property + and the desired sub-property, then the value from the nearest ancestor division + may be assumed. + """).strip(), + ), + ] = None + population: Annotated[ + int32 | None, Field(ge=0, description="Population of the division") + ] = None + capital_division_ids: Annotated[ + list[ + Annotated[ + Id, + Reference(Relationship.CAPITALLED_BY, Division), + ] + ] + | None, + Field( + min_length=1, + description=textwrap.dedent(""" + Division IDs of this division's capital divisions. If present, this property will + refer to the division IDs of the capital cities, county seats, etc. of a division. + """).strip(), + ), + UniqueItemsConstraint(), + ] = None + capital_of_divisions: Annotated[ + list[CapitalOfDivisionItem] | None, + Field( + min_length=1, + description="Division IDs and subtypes of divisions this division is a capital of.", + ), + UniqueItemsConstraint(), + ] = None + wikidata: WikidataId | None = None + + +# Materialize forward references to `Division`. +def __materialize_forward_refs(model_class: type[BaseModel]) -> None: + rebuilt: bool | None = model_class.model_rebuild() + assert rebuilt, ( + f"expected `{model_class.__name__}` to be rebuilt to materialize forward references to `{Division.__name__}`, but it wasn't" + ) + + +__materialize_forward_refs(CapitalOfDivisionItem) +__materialize_forward_refs(HierarchyItem) diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division/__init__.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division/__init__.py deleted file mode 100644 index e111fa7f8..000000000 --- a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .models import Division - -__all__ = ["Division"] diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division/models.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division/models.py deleted file mode 100644 index a3ad0a5ca..000000000 --- a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division/models.py +++ /dev/null @@ -1,203 +0,0 @@ -"""Division models for Overture Maps divisions theme.""" - -from typing import Annotated, Literal - -from pydantic import BaseModel, ConfigDict, Field - -from overture.schema.core import ( - OvertureFeature, -) -from overture.schema.core.cartography import CartographicallyHinted -from overture.schema.core.models import ( - Perspectives, -) -from overture.schema.core.names import CommonNames, Named, Names -from overture.schema.core.scoping.side import Side -from overture.schema.system.field_constraint import ( - UniqueItemsConstraint, -) -from overture.schema.system.model_constraint import ( - FieldEqCondition, - forbid_if, - no_extra_fields, - require_if, -) -from overture.schema.system.primitive import ( - Geometry, - GeometryType, - GeometryTypeConstraint, - int32, -) -from overture.schema.system.ref import Id -from overture.schema.system.string import CountryCodeAlpha2, RegionCode, WikidataId - -from ..enums import IS_COUNTRY, DivisionClass, PlaceType -from ..models import CapitalOfDivisionItem -from ..types import AdminLevel, Hierarchy - - -@no_extra_fields -class Norms(BaseModel): - """Local norms and standards.""" - - # Optional - - driving_side: Annotated[ - Side | None, - Field( - description="Side of the road on which vehicles drive in the division.", - ), - ] = None - - -@forbid_if(["parent_division_id"], IS_COUNTRY) -@require_if(["parent_division_id"], ~IS_COUNTRY) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.COUNTRY)) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.DEPENDENCY)) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.MACROREGION)) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.REGION)) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.MACROCOUNTY)) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.COUNTY)) -class Division( - OvertureFeature[Literal["divisions"], Literal["division"]], - Named, - CartographicallyHinted, -): - """Divisions are recognized official or non-official organizations of people as seen - from a given political perspective. - - Examples include countries, provinces, cities, towns, neighborhoods, etc. - """ - - model_config = ConfigDict(title="division") - - # Core - geometry: Annotated[ - Geometry, - GeometryTypeConstraint(GeometryType.POINT), - Field( - description="""Approximate location of a position commonly associated with the real-world entity modeled by the division feature.""", - ), - ] - - # Required - - names: Names - subtype: PlaceType - country: Annotated[ - CountryCodeAlpha2, - Field( - description="""ISO 3166-1 alpha-2 country code of the country or country-like entity, that this division represents or belongs to. - -If the entity this division represents has a country code, the country property contains it. If it does not, the country property contains the country code of the first division encountered by traversing the parent_division_id chain to the root. - -For example: - - The country value for the United States is 'US' - - The country value for New York City is 'US' - - The country value for Puerto Rico, a dependency of the US, - is 'PR'. - - The country value for San Juan, Puerto Rico is 'PR'. - -If an entity has an internationally-recognized ISO 3166-1 alpha-2 country code, it should always be used. In cases where the schema requires the code but no internationally-recognized code is available, a synthetic code may be used provided it does not conflict with any internationally-recognized codes.""", - ), - ] - hierarchies: Annotated[ - list[Hierarchy], - Field( - min_length=1, - description="""Hierarchies in which this division participates. - -Every division participates in at least one hierarchy. Most participate in only one. Some divisions may participate in more than one hierarchy, for example if they are claimed by different parent divisions from different political perspectives; or if there are other real-world reasons why the division or one of its ancestors has multiple parents. - -The first hierarchy in the list is the default hierarchy, and the second-to-last entry in the default hierarchy (if there is such an entry) always corresponds to the `parent_division_id' property. The ordering of hierarchies after the first one is arbitrary.""", - ), - UniqueItemsConstraint(), - ] - parent_division_id: Annotated[ - Id | None, - Field( - min_length=1, - description="""Division ID of this division's parent division. - -Not allowed for top-level divisions (countries) and required for all other divisions. - -The default parent division is the parent division as seen from the default political perspective, if there is one, and is otherwise chosen somewhat arbitrarily. The hierarchies property can be used to inspect the exhaustive list of parent divisions.""", - ), - ] = None - admin_level: AdminLevel | None = None - - # Optional - - class_: Annotated[DivisionClass | None, Field(alias="class")] = None - local_type: Annotated[ - CommonNames | None, - Field( - description="""Local name for the subtype property, optionally localized. - -For example, the Canadian province of Quebec has the subtype 'region', but in the local administrative hierarchy it is referred to as a 'province'. Similarly, the Canadian Yukon territory also has subtype 'region', but is locally called a 'territory'. - -This property is localized using a standard Overture names structure. So for example, in Switzerland the top-level administrative subdivision corresponding to subtype 'region' is the canton, which is may be translated in each of Switzerland's official languages as, 'canton' in French, 'kanton' in German, 'cantone' in Italian, and 'chantun' in Romansh.""", - ), - ] = None - region: Annotated[ - RegionCode | None, - Field( - description="""ISO 3166-2 principal subdivision code of the subdivision-like entity this division represents or belongs to. - -If the entity this division represents has a principal subdivision code, the region property contains it. If it does not, the region property contains the principal subdivision code of the first division encountered by traversing the parent_division_id chain to the root. - -For example: - - The region value for the United States is omitted. - - The region value for the U.S. state of New York is 'US-NY'. - - The region value for New York City is 'US-NY', which it - inherits from the state of New York. - - The region value for Puerto Rico is 'US-PR'.""", - ), - ] = None - perspectives: Annotated[ - Perspectives | None, - Field( - description="""Political perspectives from which this division is considered to be an accurate representation. - -If this property is absent, then this division is not known to be disputed from any political perspective. Consequently, there is only one division feature representing the entire real world entity. - -If this property is present, it means the division represents one of several alternative perspectives on the same real-world entity. - -There are two modes of perspective: - -1. `accepted_by` means the representation of the division is accepted by the listed entities and would be included on a map drawn from their perspective. - -2. `disputed_by` means the representation of the division is disputed by the listed entities and would be excluded from a map drawn from their perspective. - -When drawing a map from the perspective of a given country, one would start by gathering all the undisputed divisions (with no `perspectives` property), and then adding to that first all divisions explicitly accepted by the country, and second all divisions not explicitly disputed by the country.""", - ), - ] = None - # If we decide to include default language, it will go here. But is it really generally-useful information? - norms: Annotated[ - Norms | None, - Field( - description="""Collects information about local norms and rules within the division that are generally useful for mapping and map-related use cases. - -If the norms property or a desired sub-property of the norms property is missing on a division, but at least one of its ancestor divisions has the norms property and the desired sub-property, then the value from the nearest ancestor division may be assumed.""", - ), - ] = None - population: Annotated[ - int32 | None, Field(ge=0, description="Population of the division") - ] = None - capital_division_ids: Annotated[ - list[Id] | None, - Field( - min_length=1, - description="""Division IDs of this division's capital divisions. If present, this property will refer to the division IDs of the capital cities, county seats, etc. of a division.""", - ), - UniqueItemsConstraint(), - ] = None - capital_of_divisions: Annotated[ - list[CapitalOfDivisionItem] | None, - Field( - min_length=1, - description="Division IDs and subtypes of divisions this division is a capital of.", - ), - UniqueItemsConstraint(), - ] = None - wikidata: WikidataId | None = None diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_area.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_area.py new file mode 100644 index 000000000..677906abb --- /dev/null +++ b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_area.py @@ -0,0 +1,156 @@ +"""Division area models for Overture Maps divisions theme.""" + +import textwrap +from typing import Annotated, Literal + +from pydantic import ConfigDict, Field + +from overture.schema.core import ( + OvertureFeature, +) +from overture.schema.core.names import ( + Named, + Names, +) +from overture.schema.system.doc import DocumentedEnum +from overture.schema.system.model_constraint import ( + FieldEqCondition, + radio_group, + require_if, +) +from overture.schema.system.primitive import ( + Geometry, + GeometryType, + GeometryTypeConstraint, +) +from overture.schema.system.ref import Id, Reference, Relationship +from overture.schema.system.string import CountryCodeAlpha2, RegionCode + +from ._common import AdminLevel, DivisionSubtype +from .division import Division + + +class AreaClass(str, DocumentedEnum): + """ + Further classification of a division area that is more specific than its `subtype`. + + A division area's `class` adds detail to the broad classification found in `DivisionSubtype`. + """ + + LAND = ("land", "The area does not extend beyond the coastline.") + MARITIME = ( + "maritime", + textwrap.dedent(""" + The area extends beyond the coastline, in most cases to the extent of the division's + territorial sea, if it has one. + """).strip(), + ) + + +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.COUNTRY)) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.DEPENDENCY)) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.MACROREGION)) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.REGION)) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.MACROCOUNTY)) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.COUNTY)) +@radio_group("is_land", "is_territorial") +class DivisionArea( + OvertureFeature[Literal["divisions"], Literal["division_area"]], Named +): + """ + Division areas are polygon features that represent the land or maritime area covered by a + division. + + Each division area belongs to a division which it references by ID, and for which the division + area provides an area polygon. For ease of use, every division area repeats the subtype, names, + country, and region properties of the division it belongs to. + """ + + model_config = ConfigDict(title="division_area") + + # Core + geometry: Annotated[ + Geometry, + GeometryTypeConstraint(GeometryType.POLYGON, GeometryType.MULTI_POLYGON), + Field( + description=textwrap.dedent(""" + The area covered by the division with which this area feature is + associated. + """).strip(), + ), + ] + + # Required + + names: Annotated[ + Names, + Field(description="All known names by which the owning division is called"), + ] + subtype: Annotated[ + DivisionSubtype, + Field( + description=textwrap.dedent(""" + A broad classification of the division this area belongs to (e.g., country, region, + locality, etc.). + + This value is the same as the owning division's `subtype`. + """).strip() + ), + ] + class_: Annotated[ + AreaClass, + Field( + alias="class", + description=textwrap.dedent(""" + A more specific classification of the division area than is provided by `subtype`. + """).strip(), + ), + ] + is_land: Annotated[ + bool | None, + Field( + description=textwrap.dedent(""" + Flag indicating whether or not the feature geometry represents the land-clipped, + non-maritime boundary. The geometry can be used for map rendering, cartographic + display, and similar purposes. + """).strip(), + strict=True, + ), + ] = None + is_territorial: Annotated[ + bool | None, + Field( + description=textwrap.dedent(""" + Flag indicating whether or not the feature geometry represents Overture's best + approximation of the division's territorial boundary. For coastal places, this will + tend to include the water area. The geometry can be used for data processing, + reverse-geocoding, and similar purposes. + """).strip(), + strict=True, + ), + ] = None + division_id: Annotated[ + Id, + Field( + description="Division ID of the division this area belongs to.", + ), + Reference(Relationship.BELONGS_TO, Division), + ] + country: Annotated[ + CountryCodeAlpha2, + Field( + description="ISO 3166-1 alpha-2 country code of the division this area belongs to.", + ), + ] + + # Optional + + region: Annotated[ + RegionCode | None, + Field( + description=textwrap.dedent(""" + ISO 3166-2 principal subdivision code of the division this area belongs to. + """).strip(), + ), + ] = None + admin_level: AdminLevel | None = None diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_area/__init__.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_area/__init__.py deleted file mode 100644 index e8f19fb17..000000000 --- a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_area/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .models import DivisionArea - -__all__ = ["DivisionArea"] diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_area/enums.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_area/enums.py deleted file mode 100644 index a145d0a96..000000000 --- a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_area/enums.py +++ /dev/null @@ -1,8 +0,0 @@ -from enum import Enum - - -class AreaClass(str, Enum): - """Area and boundary class designations.""" - - LAND = "land" # The area does not extend beyond the coastline. - MARITIME = "maritime" # The area extends beyond the coastline, in most cases to the extent of the division's territorial sea, if it has one. diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_area/models.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_area/models.py deleted file mode 100644 index 06b38bb68..000000000 --- a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_area/models.py +++ /dev/null @@ -1,107 +0,0 @@ -"""Division area models for Overture Maps divisions theme.""" - -from typing import Annotated, Literal - -from pydantic import ConfigDict, Field - -from overture.schema.core import ( - OvertureFeature, -) -from overture.schema.core.names import ( - Named, - Names, -) -from overture.schema.system.model_constraint import ( - FieldEqCondition, - radio_group, - require_if, -) -from overture.schema.system.primitive import ( - Geometry, - GeometryType, - GeometryTypeConstraint, -) -from overture.schema.system.ref import Id, Reference, Relationship -from overture.schema.system.string import CountryCodeAlpha2, RegionCode - -from ..division.models import Division -from ..enums import PlaceType -from ..types import AdminLevel -from .enums import AreaClass - - -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.COUNTRY)) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.DEPENDENCY)) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.MACROREGION)) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.REGION)) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.MACROCOUNTY)) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.COUNTY)) -@radio_group("is_land", "is_territorial") -class DivisionArea( - OvertureFeature[Literal["divisions"], Literal["division_area"]], Named -): - """Division areas are polygons that represent the land or maritime area covered by a - division. - - Each division area belongs to a division which it references by ID, and for which - the division area provides an area polygon. For ease of use, every division area - repeats the subtype, names, country, and region properties of the division it - belongs to. - """ - - model_config = ConfigDict(title="division_area") - - # Core - geometry: Annotated[ - Geometry, - GeometryTypeConstraint(GeometryType.POLYGON, GeometryType.MULTI_POLYGON), - Field( - description="The area covered by the division with which this area feature is associated", - ), - ] - - # Required - - names: Names - subtype: PlaceType - class_: Annotated[ - AreaClass, - Field(alias="class"), - ] - is_land: Annotated[ - bool | None, - Field( - description="""A boolean to indicate whether or not the feature geometry represents the land-clipped, non-maritime boundary. The geometry can be used for map rendering, cartographic display, and similar purposes.""", - strict=True, - ), - ] = None - is_territorial: Annotated[ - bool | None, - Field( - description="""A boolean to indicate whether or not the feature geometry represents Overture's best approximation of this place's maritime boundary. For coastal places, this would tend to include the water area. The geometry can be used for data processing, reverse-geocoding, and similar purposes.""", - strict=True, - ), - ] = None - division_id: Annotated[ - Id, - Field( - description="Division ID of the division this area belongs to.", - ), - Reference(Relationship.BELONGS_TO, Division), - ] - country: Annotated[ - CountryCodeAlpha2, - Field( - description="ISO 3166-1 alpha-2 country code of the division this area belongs to.", - ), - ] - - # Optional - - region: Annotated[ - RegionCode | None, - Field( - description="ISO 3166-2 principal subdivision code of the division this area belongs to.", - ), - ] = None - admin_level: AdminLevel | None = None diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_boundary.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_boundary.py new file mode 100644 index 000000000..0fcf751f9 --- /dev/null +++ b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_boundary.py @@ -0,0 +1,219 @@ +"""Division boundary models for Overture Maps divisions theme.""" + +import textwrap +from typing import Annotated, Literal + +from pydantic import ConfigDict, Field + +from overture.schema.core import ( + OvertureFeature, +) +from overture.schema.core.models import Perspectives +from overture.schema.system.doc import DocumentedEnum +from overture.schema.system.field_constraint import UniqueItemsConstraint +from overture.schema.system.model_constraint import ( + FieldEqCondition, + forbid_if, + radio_group, + require_if, +) +from overture.schema.system.primitive import ( + Geometry, + GeometryType, + GeometryTypeConstraint, +) +from overture.schema.system.ref import Id, Reference, Relationship +from overture.schema.system.string import CountryCodeAlpha2, RegionCode + +from ._common import IS_COUNTRY, AdminLevel, DivisionSubtype +from .division import Division + + +class BoundaryClass(str, DocumentedEnum): + """ + The kind of boundary: land or maritime. + """ + + LAND = ( + "land", + textwrap.dedent(""" + None of the boundary geometry extends beyond the coastline of either associated + division. + """).strip(), + ) + MARITIME = ( + "maritime", + textwrap.dedent(""" + All the boundary geometry extends beyond the coastline of both associated divisions. + """).strip(), + ) + + +@forbid_if(["country"], IS_COUNTRY) +@require_if(["country"], ~IS_COUNTRY) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.COUNTRY)) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.DEPENDENCY)) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.MACROREGION)) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.REGION)) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.MACROCOUNTY)) +@require_if(["admin_level"], FieldEqCondition("subtype", DivisionSubtype.COUNTY)) +@radio_group("is_land", "is_territorial") +class DivisionBoundary( + OvertureFeature[Literal["divisions"], Literal["division_boundary"]] +): + """ + Boundaries represent borders between divisions of the same subtype. + + Some boundaries may be disputed by the divisions on one or both sides. + """ + + model_config = ConfigDict(title="boundary") + + # Core + geometry: Annotated[ + Geometry, + GeometryTypeConstraint( + GeometryType.LINE_STRING, GeometryType.MULTI_LINE_STRING + ), + Field( + description="Boundary line or lines", + ), + ] + + # Required + + subtype: Annotated[ + DivisionSubtype, + Field( + description=textwrap.dedent(""" + A broad classification of the divisions this boundary separates (e.g., country, + region, locality, etc.). + """).strip() + ), + ] + class_: Annotated[ + BoundaryClass, + Field( + alias="class", + description="The kind of boundary: land or maritime.", + ), + ] + is_land: Annotated[ + bool | None, + Field( + description=textwrap.dedent(""" + Flag indicating whether or not the feature geometry represents the land-clipped, + non-maritime boundary. The geometry can be used for map rendering, cartographic + display, and similar purposes. + """).strip(), + strict=True, + ), + ] = None + is_territorial: Annotated[ + bool | None, + Field( + description=textwrap.dedent(""" + Flag indicating whether or not the feature geometry represents Overture's best + approximation of the division's territorial boundary. For coastal places, this will + tend to include the water area. The geometry can be used for data processing, + reverse-geocoding, and similar purposes. + """).strip(), + strict=True, + ), + ] = None + division_ids: Annotated[ + list[ + Annotated[ + Id, + Reference(Relationship.BOUNDARY_OF, Division), + ] + ], + Field( + min_length=2, + max_length=2, + description=textwrap.dedent(""" + Identifies the two divisions to the left and right, respectively, of the + boundary line. The left- and right-hand sides of the boundary are considered + from the perspective of a person standing on the line facing in the direction + in which the geometry is oriented, i.e. facing toward the end of the line. + + The first array element is the Overture ID of the left division. The second + element is the Overture ID of the right division. + """).strip(), + ), + UniqueItemsConstraint(), + ] + country: Annotated[ + CountryCodeAlpha2 | None, + Field( + description=textwrap.dedent(""" + ISO 3166-1 alpha-2 country code of the country or country-like entity that + both sides of the boundary share. + + This property will be present on boundaries between two regions, counties, + or similar entities within the same country, but will not be present on + boundaries between two countries or country-like entities. + """).strip(), + ), + ] = None + + # Optional + + region: Annotated[ + RegionCode | None, + Field( + description=textwrap.dedent(""" + ISO 3166-2 principal subdivision code of the subdivision-like entity that + both sides of the boundary share. + + This property will be present on boundaries between two counties, localadmins + or similar entities within the same principal subdivision, but will not be + present on boundaries between different principal subdivisions or countries. + """).strip(), + ), + ] = None + admin_level: AdminLevel | None = None + is_disputed: Annotated[ + bool | None, + Field( + description=textwrap.dedent(""" + Flag indicating whether this boundary is either disputed outright or is a "best + guess" in a case where the boundary between two divisions is unclear. + + If the boundary is disputed outright, this flag is true and the entities disputing + it are listed in the `perspectives` property. If the boundary is simply a "best + guess", this flag is true but no disputing entities are listed in `perspectives`. + """).strip(), + strict=True, + ), + ] = None + perspectives: Annotated[ + Perspectives | None, + Field( + description=textwrap.dedent(""" + Political perspectives from which this division boundary is considered to be + an accurate representation. + + If this property is absent, then this boundary is not known to be disputed + from any political perspective. Consequently, there is only one boundary + feature representing the entire real world entity. + + If this property is present, it means the boundary represents one of several + alternative perspectives on the same real-world entity. + + There are two modes of perspective: + + 1. `accepted_by` means the representation of the boundary is accepted by the + listed entities and would be included on a map drawn from their perspective. + + 2. `disputed_by` means the representation of the boundary is disputed by the + listed entities and would be excluded from a map drawn from their + perspective. + + When drawing a map from the perspective of a given country, one would start by + gathering all the undisputed boundaries (those with no `perspectives` value); and + then adding to that: first, all boundaries explicitly accepted by the country, and + second, all boundaries not explicitly disputed by the country. + """).strip(), + ), + ] = None diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_boundary/__init__.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_boundary/__init__.py deleted file mode 100644 index 877a778af..000000000 --- a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_boundary/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .models import DivisionBoundary - -__all__ = ["DivisionBoundary"] diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_boundary/enums.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_boundary/enums.py deleted file mode 100644 index 08fb0ba14..000000000 --- a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_boundary/enums.py +++ /dev/null @@ -1,11 +0,0 @@ -from enum import Enum - - -class BoundaryClass(str, Enum): - # None of the boundary geometry extends beyond the - # coastline of either associated division. - LAND = "land" - - # All the boundary geometry extends beyond the - # coastline of both associated divisions. - MARITIME = "maritime" diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_boundary/models.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_boundary/models.py deleted file mode 100644 index 86ea6e7cf..000000000 --- a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/division_boundary/models.py +++ /dev/null @@ -1,157 +0,0 @@ -"""Division boundary models for Overture Maps divisions theme.""" - -from typing import Annotated, Literal - -from pydantic import ConfigDict, Field - -from overture.schema.core import ( - OvertureFeature, -) -from overture.schema.core.models import Perspectives -from overture.schema.system.field_constraint import UniqueItemsConstraint -from overture.schema.system.model_constraint import ( - FieldEqCondition, - forbid_if, - radio_group, - require_if, -) -from overture.schema.system.primitive import ( - Geometry, - GeometryType, - GeometryTypeConstraint, -) -from overture.schema.system.ref import Id, Reference, Relationship -from overture.schema.system.string import CountryCodeAlpha2, RegionCode - -from ..division import Division -from ..enums import IS_COUNTRY, PlaceType -from ..types import AdminLevel -from .enums import BoundaryClass - - -@forbid_if(["country"], IS_COUNTRY) -@require_if(["country"], ~IS_COUNTRY) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.COUNTRY)) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.DEPENDENCY)) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.MACROREGION)) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.REGION)) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.MACROCOUNTY)) -@require_if(["admin_level"], FieldEqCondition("subtype", PlaceType.COUNTY)) -@radio_group("is_land", "is_territorial") -class DivisionBoundary( - OvertureFeature[Literal["divisions"], Literal["division_boundary"]] -): - """Boundaries represent borders between divisions of the same subtype. - - Some boundaries may be disputed by the divisions on one or both sides. - """ - - model_config = ConfigDict(title="boundary") - - # Core - geometry: Annotated[ - Geometry, - GeometryTypeConstraint( - GeometryType.LINE_STRING, GeometryType.MULTI_LINE_STRING - ), - Field( - description="Boundary line or lines", - ), - ] - - # Required - - subtype: PlaceType - class_: Annotated[BoundaryClass, Field(alias="class")] - is_land: Annotated[ - bool | None, - Field( - description="""A boolean to indicate whether or not the feature geometry represents the -land-clipped, non-maritime boundary. The geometry can be used for map -rendering, cartographic display, and similar purposes.""", - strict=True, - ), - ] = None - is_territorial: Annotated[ - bool | None, - Field( - description="""A boolean to indicate whether or not the feature geometry represents -Overture's best approximation of this place's maritime boundary. For -coastal places, this would tend to include the water area. The geometry -can be used for data processing, reverse-geocoding, and similar purposes.""", - strict=True, - ), - ] = None - division_ids: Annotated[ - list[ - Annotated[ - Id, - Reference(Relationship.BOUNDARY_OF, Division), - ] - ], - Field( - min_length=2, - max_length=2, - description="""Identifies the two divisions to the left and right, respectively, of the boundary line. The left- and right-hand sides of the boundary are considered from the perspective of a person standing on the line facing in the direction in which the geometry is oriented, i.e. facing toward the end of the line. - -The first array element is the Overture ID of the left division. The second element is the Overture ID of the right division.""", - ), - UniqueItemsConstraint(), - ] - country: Annotated[ - CountryCodeAlpha2 | None, - Field( - description="""ISO 3166-1 alpha-2 country code of the country or country-like -entity that both sides of the boundary share. - -This property will be present on boundaries between two regions, counties, -or similar entities within the same country, but will not be present on boundaries -between two countries or country-like entities.""", - ), - ] = None - - # Optional - - region: Annotated[ - RegionCode | None, - Field( - description="""ISO 3166-2 principal subdivision code of the subdivision-like -entity that both sides of the boundary share. - -This property will be present on boundaries between two counties, localadmins -or similar entities within the same principal subdivision, but will not be -present on boundaries between different principal subdivisions or countries.""", - ), - ] = None - admin_level: AdminLevel | None = None - is_disputed: Annotated[ - bool | None, - Field( - description="""Indicator if there are entities disputing this division boundary. -Information about entities disputing this boundary should be included in perspectives -property. - -This property should also be true if boundary between two entities is unclear -and this is "best guess". So having it true and no perspectives gives map creators -reason not to fully trust the boundary, but use it if they have no other.""", - strict=True, - ), - ] = None - perspectives: Annotated[ - Perspectives | None, - Field( - description="""Political perspectives from which this division boundary is considered to be an accurate representation. - -If this property is absent, then this boundary is not known to be disputed from any political perspective. Consequently, there is only one boundary feature representing the entire real world entity. - -If this property is present, it means the boundary represents one of several alternative perspectives on the same real-world entity. - -There are two modes of perspective: - - 1. `accepted_by` means the representation of the boundary is accepted by the listed entities and would be included on a map drawn from their perspective. - - 2. `disputed_by` means the representation of the boundary is disputed by the listed entities and would be excluded from a map drawn from their perspective. - -When drawing a map from the perspective of a given country, one would start by gathering all the undisputed boundary (with no `perspectives` property), and then adding to that first all boundary explicitly accepted by the country, and second all boundary not explicitly disputed by the country.""", - ), - ] = None diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/enums.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/enums.py deleted file mode 100644 index 8634f0f5c..000000000 --- a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/enums.py +++ /dev/null @@ -1,80 +0,0 @@ -from enum import Enum - -from overture.schema.system.model_constraint import FieldEqCondition - - -class PlaceType(str, Enum): - """ - Category of the division from a finite, hierarchical, ordered list of categories (e.g., country, - region, locality, etc.) similar to a Who's on First placetype. - """ - - # Largest unit of independent sovereignty, e.g. the United States, France. - COUNTRY = "country" - - # A place that is not exactly a sub-region of a country but is dependent on a parent country for - # defence, passport control, etc., e.g. Puerto Rico. - DEPENDENCY = "dependency" - - # A bundle of regions, e.g. England, Scotland, Île-de-France. These exist mainly in Europe. - MACROREGION = "macroregion" - - # A state, province, region, etc. Largest sub-country administrative unit in most countries, - # except those that have dependencies or macro-regions. - REGION = "region" - - # A bundle of counties, e.g. Inverness. These exist mainly in Europe. - MACROCOUNTY = "macrocounty" - - # Largest sub-region administrative unit in most countries, unless they have macrocounties. - COUNTY = "county" - - # An administrative unit existing in some parts of the world that contains localities or - # populated places, e.g. département de Paris. Often the contained places do not have - # independent authority. Often, but not exclusively, found in Europe. - LOCALADMIN = "localadmin" - - # A populated place that may or may not have its own administrative authority. - LOCALITY = "locality" - - # A local government unit subordinate to a locality. - BOROUGH = "borough" - - # A super-neighborhood that contains smaller divisions of type neighborhood, e.g. BoCaCa (Boerum - # Hill, Cobble Hill, and Carroll Gardens). - MACROHOOD = "macrohood" - - # A neighborhood. Most neighborhoods will be just this, unless there's enough granular detail to - # justify incroducing macrohood or microhood divisions. - NEIGHBORHOOD = "neighborhood" - - # A mini-neighborhood that is contained within a division of type neighborhood. - MICROHOOD = "microhood" - - -class DivisionClass(str, Enum): - """Division-specific class designations.""" - - # A extensive, large human settlement. - # Example: Tokyo, Japan. - MEGACITY = "megacity" - - # A relatively large, permanent human settlement. - # Example: Guadalajara, Mexico. - CITY = "city" - - # A medium-sized human settlement that is smaller than a city, but larger than a village. - # Example: Walldürn, Germany. - TOWN = "town" - - # A smaller human settlement that is smaller than a town, but larger than a hamlet. - # Example: Wadi El Karm, Lebanon. - VILLAGE = "village" - - # A small, isolated human settlement in a rural area - # Example: Tjarnabyggð, Iceland. - HAMLET = "hamlet" - - -# TODO - vic - Migrate this into a better home -IS_COUNTRY = FieldEqCondition("subtype", PlaceType.COUNTRY) diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/models.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/models.py deleted file mode 100644 index 4f43e4965..000000000 --- a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/models.py +++ /dev/null @@ -1,39 +0,0 @@ -from typing import Annotated, NewType - -from pydantic import BaseModel, ConfigDict, Field - -from overture.schema.divisions.enums import PlaceType -from overture.schema.system.model_constraint import no_extra_fields -from overture.schema.system.ref import Id -from overture.schema.system.string import StrippedString - -DivisionId = NewType( - "DivisionId", Annotated[Id, Field(min_length=1, description="ID of the division")] -) - - -@no_extra_fields -class HierarchyItem(BaseModel): - """One division in a hierarchy.""" - - model_config = ConfigDict(frozen=True) - - # Required - - division_id: DivisionId - subtype: PlaceType - name: Annotated[ - StrippedString, Field(min_length=1, description="Primary name of the division") - ] - - -@no_extra_fields -class CapitalOfDivisionItem(BaseModel): - """One division that has capital.""" - - model_config = ConfigDict(frozen=True) - - # Required - - division_id: DivisionId - subtype: PlaceType diff --git a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/types.py b/packages/overture-schema-divisions-theme/src/overture/schema/divisions/types.py deleted file mode 100644 index 621cfc416..000000000 --- a/packages/overture-schema-divisions-theme/src/overture/schema/divisions/types.py +++ /dev/null @@ -1,32 +0,0 @@ -from typing import Annotated, NewType - -from pydantic import Field - -from overture.schema.system.field_constraint import UniqueItemsConstraint -from overture.schema.system.primitive import uint8 - -from .models import HierarchyItem - -AdminLevel = NewType( - "AdminLevel", - Annotated[ - uint8, - Field( - description="Integer representing the division's position in its country's administrative hierarchy, where lower numbers correspond to higher level administrative units.", - ), - ], -) - -Hierarchy = NewType( - "Hierarchy", - Annotated[ - list[HierarchyItem], - Field( - min_length=1, - description="""A hierarchy of divisions, with the first entry being a country; each subsequent entry, if any, being a division that is a direct child of the previous entry; and the last entry representing the division that contains the hierarchy. - -For example, a hierarchy for the United States is simply [United States]. A hierarchy for the U.S. state of New Hampshire would be [United States, New Hampshire], and a hierarchy for the city of Concord, NH would be [United States, New Hampshire, Merrimack County, Concord].""", - ), - UniqueItemsConstraint(), - ], -) diff --git a/packages/overture-schema-divisions-theme/tests/division_area_baseline_schema.json b/packages/overture-schema-divisions-theme/tests/division_area_baseline_schema.json index f3e8c2abd..8f800c551 100644 --- a/packages/overture-schema-divisions-theme/tests/division_area_baseline_schema.json +++ b/packages/overture-schema-divisions-theme/tests/division_area_baseline_schema.json @@ -1,7 +1,7 @@ { "$defs": { "AreaClass": { - "description": "Area and boundary class designations.", + "description": "Further classification of a division area that is more specific than its `subtype`.\n\nA division area's `class` adds detail to the broad classification found in `DivisionSubtype`.", "enum": [ "land", "maritime" @@ -9,6 +9,25 @@ "title": "AreaClass", "type": "string" }, + "DivisionSubtype": { + "description": "Category of the division from a finite, hierarchical, ordered list of categories (e.g., country,\nregion, locality, etc.) similar to a Who's on First placetype.", + "enum": [ + "country", + "dependency", + "macroregion", + "region", + "macrocounty", + "county", + "localadmin", + "locality", + "borough", + "macrohood", + "neighborhood", + "microhood" + ], + "title": "DivisionSubtype", + "type": "string" + }, "NameRule": { "additionalProperties": false, "description": "A rule that can be evaluated to determine the name in advanced scenarios.\n\nName rules are used for cases where the primary name is not sufficient; the common name is not\nthe right fit for the use case and another variant is needed; or where the name only applies in\ncertain specific circumstances.\n\nExamples might include:\n- An official, alternate, or short name.\n- A name that only applies to part of a linear path like a road segment (geometric range\n scoping).\n- A name that only applies to the left or right side of a linear path like a road segment (side\n scoping).\n- A name that is only accepted by some political perspectives.", @@ -42,7 +61,7 @@ "value": { "description": "The actual name value.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Value", "type": "string" }, @@ -78,7 +97,7 @@ "patternProperties": { "^(?:(?:[A-Za-z]{2,3}(?:-[A-Za-z]{3}){0,3}?)|(?:[A-Za-z]{4,8}))(?:-[A-Za-z]{4})?(?:-[A-Za-z]{2}|[0-9]{3})?(?:-(?:[A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(?:-[A-WY-Za-wy-z0-9](?:-[A-Za-z0-9]{2,8})+)*$": { "description": "String with no leading/trailing whitespace", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "type": "string" } }, @@ -91,7 +110,7 @@ "primary": { "description": "The most commonly used name.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Primary", "type": "string" }, @@ -149,25 +168,6 @@ "title": "Perspectives", "type": "object" }, - "PlaceType": { - "description": "Category of the division from a finite, hierarchical, ordered list of categories (e.g., country,\nregion, locality, etc.) similar to a Who's on First placetype.", - "enum": [ - "country", - "dependency", - "macroregion", - "region", - "macrocounty", - "county", - "localadmin", - "locality", - "borough", - "macrohood", - "neighborhood", - "microhood" - ], - "title": "PlaceType", - "type": "string" - }, "Side": { "description": "The side, left or right, on which something appears relative to a facing or heading direction\n(*e.g.*, the side of a road relative to the road orientation), or relative to the direction of\ntravel of a person or vehicle.", "enum": [ @@ -207,7 +207,7 @@ }, "license": { "description": "Source data license name.\n\nThis should be a valid SPDX license identifier when available.\n\nIf omitted, contact the data provider for more license information.", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "License", "type": "string" }, @@ -237,7 +237,7 @@ } }, "additionalProperties": false, - "description": "Division areas are polygons that represent the land or maritime area covered by a\ndivision.\n\nEach division area belongs to a division which it references by ID, and for which\nthe division area provides an area polygon. For ease of use, every division area\nrepeats the subtype, names, country, and region properties of the division it\nbelongs to.", + "description": "Division areas are polygon features that represent the land or maritime area covered by a\ndivision.\n\nEach division area belongs to a division which it references by ID, and for which the division\narea provides an area polygon. For ease of use, every division area repeats the subtype, names,\ncountry, and region properties of the division it belongs to.", "properties": { "bbox": { "description": "An optional bounding box for the feature", @@ -250,7 +250,7 @@ "type": "array" }, "geometry": { - "description": "The area covered by the division with which this area feature is associated", + "description": "The area covered by the division with which this area feature is\nassociated.", "oneOf": [ { "properties": { @@ -350,6 +350,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -364,6 +371,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -378,6 +392,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -392,6 +413,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -406,6 +434,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -420,6 +455,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -456,14 +498,15 @@ }, "properties": { "admin_level": { - "description": "Integer representing the division's position in its country's administrative hierarchy, where lower numbers correspond to higher level administrative units.", - "maximum": 255, + "description": "Integer representing the division's position in its country's administrative\nhierarchy, where lower numbers correspond to higher level administrative units.", + "maximum": 16, "minimum": 0, "title": "Admin Level", "type": "integer" }, "class": { - "$ref": "#/$defs/AreaClass" + "$ref": "#/$defs/AreaClass", + "description": "A more specific classification of the division area than is provided by `subtype`." }, "country": { "description": "ISO 3166-1 alpha-2 country code of the division this area belongs to.", @@ -481,17 +524,18 @@ "type": "string" }, "is_land": { - "description": "A boolean to indicate whether or not the feature geometry represents the land-clipped, non-maritime boundary. The geometry can be used for map rendering, cartographic display, and similar purposes.", + "description": "Flag indicating whether or not the feature geometry represents the land-clipped,\nnon-maritime boundary. The geometry can be used for map rendering, cartographic\ndisplay, and similar purposes.", "title": "Is Land", "type": "boolean" }, "is_territorial": { - "description": "A boolean to indicate whether or not the feature geometry represents Overture's best approximation of this place's maritime boundary. For coastal places, this would tend to include the water area. The geometry can be used for data processing, reverse-geocoding, and similar purposes.", + "description": "Flag indicating whether or not the feature geometry represents Overture's best\napproximation of the division's territorial boundary. For coastal places, this will\ntend to include the water area. The geometry can be used for data processing,\nreverse-geocoding, and similar purposes.", "title": "Is Territorial", "type": "boolean" }, "names": { - "$ref": "#/$defs/Names" + "$ref": "#/$defs/Names", + "description": "All known names by which the owning division is called" }, "region": { "description": "ISO 3166-2 principal subdivision code of the division this area belongs to.", @@ -512,7 +556,8 @@ "uniqueItems": true }, "subtype": { - "$ref": "#/$defs/PlaceType" + "$ref": "#/$defs/DivisionSubtype", + "description": "A broad classification of the division this area belongs to (e.g., country, region,\nlocality, etc.).\n\nThis value is the same as the owning division's `subtype`." }, "theme": { "const": "divisions", diff --git a/packages/overture-schema-divisions-theme/tests/division_baseline_schema.json b/packages/overture-schema-divisions-theme/tests/division_baseline_schema.json index fbd34ad41..8097ee0df 100644 --- a/packages/overture-schema-divisions-theme/tests/division_baseline_schema.json +++ b/packages/overture-schema-divisions-theme/tests/division_baseline_schema.json @@ -2,17 +2,17 @@ "$defs": { "CapitalOfDivisionItem": { "additionalProperties": false, - "description": "One division that has capital.", + "description": "A division of which the owning division is the capital, together with its subtype.", "properties": { "division_id": { - "description": "ID of the division", + "description": "ID of the division whose capital is the current division.", "minLength": 1, "pattern": "^\\S+$", "title": "Division Id", "type": "string" }, "subtype": { - "$ref": "#/$defs/PlaceType" + "$ref": "#/$defs/DivisionSubtype" } }, "required": [ @@ -49,8 +49,8 @@ }, "sort_key": { "description": "Integer indicating the recommended order in which to draw features.\n\nFeatures with a lower number should be drawn \"in front\" of features with a higher\nnumber.", - "maximum": 255, - "minimum": 0, + "maximum": 2147483647, + "minimum": -2147483648, "title": "Sort Key", "type": "integer" } @@ -59,7 +59,7 @@ "type": "object" }, "DivisionClass": { - "description": "Division-specific class designations.", + "description": "Further classification of a division that is more specific than its `subtype`.\n\nA division's `class` adds detail to the broad classification found in `DivisionSubtype`.", "enum": [ "megacity", "city", @@ -70,12 +70,31 @@ "title": "DivisionClass", "type": "string" }, + "DivisionSubtype": { + "description": "Category of the division from a finite, hierarchical, ordered list of categories (e.g., country,\nregion, locality, etc.) similar to a Who's on First placetype.", + "enum": [ + "country", + "dependency", + "macroregion", + "region", + "macrocounty", + "county", + "localadmin", + "locality", + "borough", + "macrohood", + "neighborhood", + "microhood" + ], + "title": "DivisionSubtype", + "type": "string" + }, "HierarchyItem": { "additionalProperties": false, "description": "One division in a hierarchy.", "properties": { "division_id": { - "description": "ID of the division", + "description": "ID of a division that is an ancestor of the current division.\n\nIn the context of division hierarchies, the ancestor divisions of a division include\nthe division itself, and any other division that is an ancestor of the division's parent.", "minLength": 1, "pattern": "^\\S+$", "title": "Division Id", @@ -84,12 +103,12 @@ "name": { "description": "Primary name of the division", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Name", "type": "string" }, "subtype": { - "$ref": "#/$defs/PlaceType" + "$ref": "#/$defs/DivisionSubtype" } }, "required": [ @@ -133,7 +152,7 @@ "value": { "description": "The actual name value.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Value", "type": "string" }, @@ -169,7 +188,7 @@ "patternProperties": { "^(?:(?:[A-Za-z]{2,3}(?:-[A-Za-z]{3}){0,3}?)|(?:[A-Za-z]{4,8}))(?:-[A-Za-z]{4})?(?:-[A-Za-z]{2}|[0-9]{3})?(?:-(?:[A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(?:-[A-WY-Za-wy-z0-9](?:-[A-Za-z0-9]{2,8})+)*$": { "description": "String with no leading/trailing whitespace", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "type": "string" } }, @@ -182,7 +201,7 @@ "primary": { "description": "The most commonly used name.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Primary", "type": "string" }, @@ -252,25 +271,6 @@ "title": "Perspectives", "type": "object" }, - "PlaceType": { - "description": "Category of the division from a finite, hierarchical, ordered list of categories (e.g., country,\nregion, locality, etc.) similar to a Who's on First placetype.", - "enum": [ - "country", - "dependency", - "macroregion", - "region", - "macrocounty", - "county", - "localadmin", - "locality", - "borough", - "macrohood", - "neighborhood", - "microhood" - ], - "title": "PlaceType", - "type": "string" - }, "Side": { "description": "The side, left or right, on which something appears relative to a facing or heading direction\n(*e.g.*, the side of a road relative to the road orientation), or relative to the direction of\ntravel of a person or vehicle.", "enum": [ @@ -310,7 +310,7 @@ }, "license": { "description": "Source data license name.\n\nThis should be a valid SPDX license identifier when available.\n\nIf omitted, contact the data provider for more license information.", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "License", "type": "string" }, @@ -340,7 +340,7 @@ } }, "additionalProperties": false, - "description": "Divisions are recognized official or non-official organizations of people as seen\nfrom a given political perspective.\n\nExamples include countries, provinces, cities, towns, neighborhoods, etc.", + "description": "Divisions are recognized official or non-official organizations of people as seen from a given\npolitical perspective.\n\nExamples include countries, provinces, cities, towns, neighborhoods, etc.", "properties": { "bbox": { "description": "An optional bounding box for the feature", @@ -353,7 +353,7 @@ "type": "array" }, "geometry": { - "description": "Approximate location of a position commonly associated with the real-world entity modeled by the division feature.", + "description": "Approximate location of a position commonly associated with the real-world entity\nmodeled by the division feature.", "properties": { "bbox": { "items": { @@ -401,6 +401,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -415,6 +422,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -429,6 +443,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -443,6 +464,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -457,6 +485,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -471,6 +506,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -487,6 +529,13 @@ } }, "then": { + "properties": { + "parent_division_id": { + "not": { + "type": "null" + } + } + }, "required": [ "parent_division_id" ] @@ -502,6 +551,13 @@ }, "then": { "not": { + "properties": { + "parent_division_id": { + "not": { + "type": "null" + } + } + }, "required": [ "parent_division_id" ] @@ -523,14 +579,14 @@ }, "properties": { "admin_level": { - "description": "Integer representing the division's position in its country's administrative hierarchy, where lower numbers correspond to higher level administrative units.", - "maximum": 255, + "description": "Integer representing the division's position in its country's administrative\nhierarchy, where lower numbers correspond to higher level administrative units.", + "maximum": 16, "minimum": 0, "title": "Admin Level", "type": "integer" }, "capital_division_ids": { - "description": "Division IDs of this division's capital divisions. If present, this property will refer to the division IDs of the capital cities, county seats, etc. of a division.", + "description": "Division IDs of this division's capital divisions. If present, this property will\nrefer to the division IDs of the capital cities, county seats, etc. of a division.", "items": { "description": "A unique identifier", "minLength": 1, @@ -554,13 +610,14 @@ }, "cartography": { "$ref": "#/$defs/CartographicHints", - "title": "cartography" + "description": "Cartographic hints useful when including the feature in maps" }, "class": { - "$ref": "#/$defs/DivisionClass" + "$ref": "#/$defs/DivisionClass", + "description": "A more specific classification of the division than is provided by `subtype`." }, "country": { - "description": "ISO 3166-1 alpha-2 country code of the country or country-like entity, that this division represents or belongs to.\n\nIf the entity this division represents has a country code, the country property contains it. If it does not, the country property contains the country code of the first division encountered by traversing the parent_division_id chain to the root.\n\nFor example:\n - The country value for the United States is 'US'\n - The country value for New York City is 'US'\n - The country value for Puerto Rico, a dependency of the US,\n is 'PR'.\n - The country value for San Juan, Puerto Rico is 'PR'.\n\nIf an entity has an internationally-recognized ISO 3166-1 alpha-2 country code, it should always be used. In cases where the schema requires the code but no internationally-recognized code is available, a synthetic code may be used provided it does not conflict with any internationally-recognized codes.", + "description": "ISO 3166-1 alpha-2 country code of the country or country-like entity, that this\ndivision represents or belongs to.\n\nIf the entity this division represents has a country code, the country property\ncontains it. If it does not, the country property contains the country code of the\nfirst division encountered by traversing the parent_division_id chain to the root.\n\nFor example:\n- The country value for the United States is 'US'\n- The country value for New York City is 'US'\n- The country value for Puerto Rico, a dependency of the US, is 'PR'.\n- The country value for San Juan, Puerto Rico is 'PR'.\n\nIf an entity has an internationally-recognized ISO 3166-1 alpha-2 country code, it\nshould always be used. In cases where the schema requires the code but no\ninternationally-recognized code is available, a synthetic code may be used provided\nit does not conflict with any internationally-recognized codes.", "maxLength": 2, "minLength": 2, "pattern": "^[A-Z]{2}$", @@ -568,9 +625,9 @@ "type": "string" }, "hierarchies": { - "description": "Hierarchies in which this division participates.\n\nEvery division participates in at least one hierarchy. Most participate in only one. Some divisions may participate in more than one hierarchy, for example if they are claimed by different parent divisions from different political perspectives; or if there are other real-world reasons why the division or one of its ancestors has multiple parents.\n\nThe first hierarchy in the list is the default hierarchy, and the second-to-last entry in the default hierarchy (if there is such an entry) always corresponds to the `parent_division_id' property. The ordering of hierarchies after the first one is arbitrary.", + "description": "Hierarchies in which this division participates.\n\nEvery division participates in at least one hierarchy. Most participate in only one.\nSome divisions may participate in more than one hierarchy, for example if they are\nclaimed by different parent divisions from different political perspectives; or if\nthere are other real-world reasons why the division or one of its ancestors has\nmultiple parents.\n\nThe first hierarchy in the list is the default hierarchy, and the second-to-last\nentry in the default hierarchy (if there is such an entry) always corresponds to the\n`parent_division_id` property. The ordering of hierarchies after the first one is\narbitrary.", "items": { - "description": "A hierarchy of divisions, with the first entry being a country; each subsequent entry, if any, being a division that is a direct child of the previous entry; and the last entry representing the division that contains the hierarchy.\n\nFor example, a hierarchy for the United States is simply [United States]. A hierarchy for the U.S. state of New Hampshire would be [United States, New Hampshire], and a hierarchy for the city of Concord, NH would be [United States, New Hampshire, Merrimack County, Concord].", + "description": "A hierarchy of divisions, with the first entry being a country; each subsequent\nentry, if any, being a division that is a direct child of the previous entry; and\nthe last entry representing the division that contains the hierarchy.\n\nFor example, a hierarchy for the United States is simply [United States]. A\nhierarchy for the U.S. state of New Hampshire would be\n[United States, New Hampshire], and a hierarchy for the city of Concord, NH would be\n[United States, New Hampshire, Merrimack County, Concord].", "items": { "$ref": "#/$defs/HierarchyItem" }, @@ -585,11 +642,11 @@ }, "local_type": { "additionalProperties": false, - "description": "Local name for the subtype property, optionally localized.\n\nFor example, the Canadian province of Quebec has the subtype 'region', but in the local administrative hierarchy it is referred to as a 'province'. Similarly, the Canadian Yukon territory also has subtype 'region', but is locally called a 'territory'.\n\nThis property is localized using a standard Overture names structure. So for example, in Switzerland the top-level administrative subdivision corresponding to subtype 'region' is the canton, which is may be translated in each of Switzerland's official languages as, 'canton' in French, 'kanton' in German, 'cantone' in Italian, and 'chantun' in Romansh.", + "description": "Local name for the subtype property, optionally localized.\n\nFor example, the Canadian province of Quebec has the subtype `\"region\"`, but in the\nlocal administrative hierarchy it is referred to as a province. Similarly, the\nCanadian Yukon territory also has subtype `\"region\"`, but is locally called a\nterritory.\n\nThis property is localized using a standard Overture names structure. So for\nexample, in Switzerland the top-level administrative subdivision corresponding to\nsubtype 'region' is the canton, which may be translated in each of Switzerland's\nofficial languages as, 'canton' in French, 'kanton' in German, 'cantone' in Italian,\nand 'chantun' in Romansh.", "patternProperties": { "^(?:(?:[A-Za-z]{2,3}(?:-[A-Za-z]{3}){0,3}?)|(?:[A-Za-z]{4,8}))(?:-[A-Za-z]{4})?(?:-[A-Za-z]{2}|[0-9]{3})?(?:-(?:[A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(?:-[A-WY-Za-wy-z0-9](?:-[A-Za-z0-9]{2,8})+)*$": { "description": "String with no leading/trailing whitespace", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "type": "string" } }, @@ -600,14 +657,15 @@ "type": "object" }, "names": { - "$ref": "#/$defs/Names" + "$ref": "#/$defs/Names", + "description": "All known names by which the division is called" }, "norms": { "$ref": "#/$defs/Norms", - "description": "Collects information about local norms and rules within the division that are generally useful for mapping and map-related use cases.\n\nIf the norms property or a desired sub-property of the norms property is missing on a division, but at least one of its ancestor divisions has the norms property and the desired sub-property, then the value from the nearest ancestor division may be assumed." + "description": "Collects information about local norms and rules within the division that are\ngenerally useful for mapping and map-related use cases.\n\nIf the norms property or a desired sub-property of the norms property is missing\non a division, but at least one of its ancestor divisions has the norms property\nand the desired sub-property, then the value from the nearest ancestor division\nmay be assumed." }, "parent_division_id": { - "description": "Division ID of this division's parent division.\n\nNot allowed for top-level divisions (countries) and required for all other divisions.\n\nThe default parent division is the parent division as seen from the default political perspective, if there is one, and is otherwise chosen somewhat arbitrarily. The hierarchies property can be used to inspect the exhaustive list of parent divisions.", + "description": "Division ID of this division's parent division.\n\nNot allowed for top-level divisions (countries) and required for all other\ndivisions.\n\nThe default parent division is the parent division as seen from the default\npolitical perspective, if there is one, and is otherwise chosen somewhat\narbitrarily. The hierarchies property can be used to inspect the exhaustive list of\nparent divisions.", "minLength": 1, "pattern": "^\\S+$", "title": "Parent Division Id", @@ -615,7 +673,7 @@ }, "perspectives": { "$ref": "#/$defs/Perspectives", - "description": "Political perspectives from which this division is considered to be an accurate representation.\n\nIf this property is absent, then this division is not known to be disputed from any political perspective. Consequently, there is only one division feature representing the entire real world entity.\n\nIf this property is present, it means the division represents one of several alternative perspectives on the same real-world entity.\n\nThere are two modes of perspective:\n\n1. `accepted_by` means the representation of the division is accepted by the listed entities and would be included on a map drawn from their perspective.\n\n2. `disputed_by` means the representation of the division is disputed by the listed entities and would be excluded from a map drawn from their perspective.\n\nWhen drawing a map from the perspective of a given country, one would start by gathering all the undisputed divisions (with no `perspectives` property), and then adding to that first all divisions explicitly accepted by the country, and second all divisions not explicitly disputed by the country." + "description": "Political perspectives from which this division is considered to be an accurate\nrepresentation.\n\nIf this property is absent, then this division is not known to be disputed from\nany political perspective. Consequently, there is only one division feature\nrepresenting the entire real world entity.\n\nIf this property is present, it means the division represents one of several\nalternative perspectives on the same real-world entity.\n\nThere are two modes of perspective:\n\n1. `accepted_by` means the representation of the division is accepted by the\n listed entities and would be included on a map drawn from their perspective.\n\n2. `disputed_by` means the representation of the division is disputed by the\n listed entities and would be excluded from a map drawn from their perspective.\n\nWhen drawing a map from the perspective of a given country, one would start by\ngathering all the undisputed divisions (with no `perspectives` property), and then\nadding to that first all divisions explicitly accepted by the country, and second\nall divisions not explicitly disputed by the country." }, "population": { "description": "Population of the division", @@ -625,7 +683,7 @@ "type": "integer" }, "region": { - "description": "ISO 3166-2 principal subdivision code of the subdivision-like entity this division represents or belongs to.\n\nIf the entity this division represents has a principal subdivision code, the region property contains it. If it does not, the region property contains the principal subdivision code of the first division encountered by traversing the parent_division_id chain to the root.\n\nFor example:\n - The region value for the United States is omitted.\n - The region value for the U.S. state of New York is 'US-NY'.\n - The region value for New York City is 'US-NY', which it\n inherits from the state of New York.\n - The region value for Puerto Rico is 'US-PR'.", + "description": "ISO 3166-2 principal subdivision code of the subdivision-like entity this division\nrepresents or belongs to.\n\nIf the entity this division represents has a principal subdivision code, the region\nproperty contains it. If it does not, the region property contains the principal\nsubdivision code of the first division encountered by traversing the\n`parent_division_id` chain to the root.\n\nFor example:\n- The region value for the United States is omitted.\n- The region value for the U.S. state of New York is 'US-NY'.\n- The region value for New York City is 'US-NY', which it inherits from the state\n of New York.\n- The region value for Puerto Rico is 'US-PR'.", "maxLength": 6, "minLength": 4, "pattern": "^[A-Z]{2}-[A-Z0-9]{1,3}$", @@ -643,7 +701,8 @@ "uniqueItems": true }, "subtype": { - "$ref": "#/$defs/PlaceType" + "$ref": "#/$defs/DivisionSubtype", + "description": "A broad classification of the division (e.g., country, region, locality, etc.)." }, "theme": { "const": "divisions", diff --git a/packages/overture-schema-divisions-theme/tests/division_boundary_baseline_schema.json b/packages/overture-schema-divisions-theme/tests/division_boundary_baseline_schema.json index 336d2d484..82b9b38b9 100644 --- a/packages/overture-schema-divisions-theme/tests/division_boundary_baseline_schema.json +++ b/packages/overture-schema-divisions-theme/tests/division_boundary_baseline_schema.json @@ -1,6 +1,7 @@ { "$defs": { "BoundaryClass": { + "description": "The kind of boundary: land or maritime.", "enum": [ "land", "maritime" @@ -8,6 +9,25 @@ "title": "BoundaryClass", "type": "string" }, + "DivisionSubtype": { + "description": "Category of the division from a finite, hierarchical, ordered list of categories (e.g., country,\nregion, locality, etc.) similar to a Who's on First placetype.", + "enum": [ + "country", + "dependency", + "macroregion", + "region", + "macrocounty", + "county", + "localadmin", + "locality", + "borough", + "macrohood", + "neighborhood", + "microhood" + ], + "title": "DivisionSubtype", + "type": "string" + }, "PerspectiveMode": { "description": "Perspective mode for disputed names.", "enum": [ @@ -47,25 +67,6 @@ "title": "Perspectives", "type": "object" }, - "PlaceType": { - "description": "Category of the division from a finite, hierarchical, ordered list of categories (e.g., country,\nregion, locality, etc.) similar to a Who's on First placetype.", - "enum": [ - "country", - "dependency", - "macroregion", - "region", - "macrocounty", - "county", - "localadmin", - "locality", - "borough", - "macrohood", - "neighborhood", - "microhood" - ], - "title": "PlaceType", - "type": "string" - }, "SourceItem": { "additionalProperties": false, "description": "Specifies the source of the data used for a feature or one of its properties.", @@ -96,7 +97,7 @@ }, "license": { "description": "Source data license name.\n\nThis should be a valid SPDX license identifier when available.\n\nIf omitted, contact the data provider for more license information.", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "License", "type": "string" }, @@ -231,6 +232,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -245,6 +253,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -259,6 +274,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -273,6 +295,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -287,6 +316,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -301,6 +337,13 @@ } }, "then": { + "properties": { + "admin_level": { + "not": { + "type": "null" + } + } + }, "required": [ "admin_level" ] @@ -317,6 +360,13 @@ } }, "then": { + "properties": { + "country": { + "not": { + "type": "null" + } + } + }, "required": [ "country" ] @@ -332,6 +382,13 @@ }, "then": { "not": { + "properties": { + "country": { + "not": { + "type": "null" + } + } + }, "required": [ "country" ] @@ -369,8 +426,8 @@ }, "properties": { "admin_level": { - "description": "Integer representing the division's position in its country's administrative hierarchy, where lower numbers correspond to higher level administrative units.", - "maximum": 255, + "description": "Integer representing the division's position in its country's administrative\nhierarchy, where lower numbers correspond to higher level administrative units.", + "maximum": 16, "minimum": 0, "title": "Admin Level", "type": "integer" @@ -379,7 +436,7 @@ "$ref": "#/$defs/BoundaryClass" }, "country": { - "description": "ISO 3166-1 alpha-2 country code of the country or country-like\nentity that both sides of the boundary share.\n\nThis property will be present on boundaries between two regions, counties,\nor similar entities within the same country, but will not be present on boundaries\nbetween two countries or country-like entities.", + "description": "ISO 3166-1 alpha-2 country code of the country or country-like entity that\nboth sides of the boundary share.\n\nThis property will be present on boundaries between two regions, counties,\nor similar entities within the same country, but will not be present on\nboundaries between two countries or country-like entities.", "maxLength": 2, "minLength": 2, "pattern": "^[A-Z]{2}$", @@ -387,7 +444,7 @@ "type": "string" }, "division_ids": { - "description": "Identifies the two divisions to the left and right, respectively, of the boundary line. The left- and right-hand sides of the boundary are considered from the perspective of a person standing on the line facing in the direction in which the geometry is oriented, i.e. facing toward the end of the line.\n\nThe first array element is the Overture ID of the left division. The second element is the Overture ID of the right division.", + "description": "Identifies the two divisions to the left and right, respectively, of the\nboundary line. The left- and right-hand sides of the boundary are considered\nfrom the perspective of a person standing on the line facing in the direction\nin which the geometry is oriented, i.e. facing toward the end of the line.\n\nThe first array element is the Overture ID of the left division. The second\nelement is the Overture ID of the right division.", "items": { "description": "A unique identifier", "minLength": 1, @@ -401,26 +458,26 @@ "uniqueItems": true }, "is_disputed": { - "description": "Indicator if there are entities disputing this division boundary.\nInformation about entities disputing this boundary should be included in perspectives\nproperty.\n\nThis property should also be true if boundary between two entities is unclear\nand this is \"best guess\". So having it true and no perspectives gives map creators\nreason not to fully trust the boundary, but use it if they have no other.", + "description": "Flag indicating whether this boundary is either disputed outright or is a \"best\nguess\" in a case where the boundary between two divisions is unclear.\n\nIf the boundary is disputed outright, this flag is true and the entities disputing\nit are listed in the `perspectives` property. If the boundary is simply a \"best\nguess\", this flag is true but no disputing entities are listed in `perspectives`.", "title": "Is Disputed", "type": "boolean" }, "is_land": { - "description": "A boolean to indicate whether or not the feature geometry represents the\nland-clipped, non-maritime boundary. The geometry can be used for map\nrendering, cartographic display, and similar purposes.", + "description": "Flag indicating whether or not the feature geometry represents the land-clipped,\nnon-maritime boundary. The geometry can be used for map rendering, cartographic\ndisplay, and similar purposes.", "title": "Is Land", "type": "boolean" }, "is_territorial": { - "description": "A boolean to indicate whether or not the feature geometry represents\nOverture's best approximation of this place's maritime boundary. For\ncoastal places, this would tend to include the water area. The geometry\ncan be used for data processing, reverse-geocoding, and similar purposes.", + "description": "Flag indicating whether or not the feature geometry represents Overture's best\napproximation of the division's territorial boundary. For coastal places, this will\ntend to include the water area. The geometry can be used for data processing,\nreverse-geocoding, and similar purposes.", "title": "Is Territorial", "type": "boolean" }, "perspectives": { "$ref": "#/$defs/Perspectives", - "description": "Political perspectives from which this division boundary is considered to be an accurate representation.\n\nIf this property is absent, then this boundary is not known to be disputed from any political perspective. Consequently, there is only one boundary feature representing the entire real world entity.\n\nIf this property is present, it means the boundary represents one of several alternative perspectives on the same real-world entity.\n\nThere are two modes of perspective:\n\n 1. `accepted_by` means the representation of the boundary is accepted by the listed entities and would be included on a map drawn from their perspective.\n\n 2. `disputed_by` means the representation of the boundary is disputed by the listed entities and would be excluded from a map drawn from their perspective.\n\nWhen drawing a map from the perspective of a given country, one would start by gathering all the undisputed boundary (with no `perspectives` property), and then adding to that first all boundary explicitly accepted by the country, and second all boundary not explicitly disputed by the country." + "description": "Political perspectives from which this division boundary is considered to be\nan accurate representation.\n\nIf this property is absent, then this boundary is not known to be disputed\nfrom any political perspective. Consequently, there is only one boundary\nfeature representing the entire real world entity.\n\nIf this property is present, it means the boundary represents one of several\nalternative perspectives on the same real-world entity.\n\nThere are two modes of perspective:\n\n1. `accepted_by` means the representation of the boundary is accepted by the\n listed entities and would be included on a map drawn from their perspective.\n\n2. `disputed_by` means the representation of the boundary is disputed by the\n listed entities and would be excluded from a map drawn from their\n perspective.\n\nWhen drawing a map from the perspective of a given country, one would start by\ngathering all the undisputed boundaries (those with no `perspectives` value); and\nthen adding to that: first, all boundaries explicitly accepted by the country, and\nsecond, all boundaries not explicitly disputed by the country." }, "region": { - "description": "ISO 3166-2 principal subdivision code of the subdivision-like\nentity that both sides of the boundary share.\n\nThis property will be present on boundaries between two counties, localadmins\nor similar entities within the same principal subdivision, but will not be\npresent on boundaries between different principal subdivisions or countries.", + "description": "ISO 3166-2 principal subdivision code of the subdivision-like entity that\nboth sides of the boundary share.\n\nThis property will be present on boundaries between two counties, localadmins\nor similar entities within the same principal subdivision, but will not be\npresent on boundaries between different principal subdivisions or countries.", "maxLength": 6, "minLength": 4, "pattern": "^[A-Z]{2}-[A-Z0-9]{1,3}$", @@ -438,7 +495,8 @@ "uniqueItems": true }, "subtype": { - "$ref": "#/$defs/PlaceType" + "$ref": "#/$defs/DivisionSubtype", + "description": "A broad classification of the divisions this boundary separates (e.g., country,\nregion, locality, etc.)." }, "theme": { "const": "divisions", diff --git a/packages/overture-schema-places-theme/pyproject.toml b/packages/overture-schema-places-theme/pyproject.toml index 40877a435..48abbcbfa 100644 --- a/packages/overture-schema-places-theme/pyproject.toml +++ b/packages/overture-schema-places-theme/pyproject.toml @@ -1,9 +1,11 @@ [project] +maintainers = [ + {name = "Overture Maps Schema Working Group"}, +] dependencies = [ "overture-schema-core", "overture-schema-system", - "pydantic>=2.0", - "pydantic[email]", + "pydantic[email]>=2.12.0", ] description = "Overture Maps places theme with place type models" dynamic = ["version"] @@ -12,6 +14,11 @@ name = "overture-schema-places-theme" readme = "README.md" requires-python = ">=3.10" +[project.urls] +Homepage = "https://overturemaps.org" +Source = "https://github.com/OvertureMaps/schema" +Issues = "https://github.com/OvertureMaps/schema/issues" + [tool.uv.sources] overture-schema-core = { workspace = true } overture-schema-system = { workspace = true } @@ -28,3 +35,51 @@ packages = ["src/overture"] [project.entry-points."overture.models"] "overture:places:place" = "overture.schema.places:Place" + +[[examples.Place]] +id = "99003ee6-e75b-4dd6-8a8a-53a5a716c50d" +geometry = "POINT (-150.46875 -79.1713346)" +version = 1 +confidence = 0.7337175792507205 +websites = [ + "https://www.superhotel.co.jp/s_hotels/beppu/" +] +socials = [ + "https://www.facebook.com/107663894904826" +] +phones = [ + "+81977009000" +] +operating_status = "open" +theme = "places" +type = "place" + +[examples.Place.bbox] +xmin = -150.46875 +xmax = -150.46875 +ymin = -79.17134094238281 +ymax = -79.17133331298828 + +[[examples.Place.sources]] +property = "" +dataset = "meta" +record_id = "107663894904826" +update_time = "2025-06-30T07:00:00.000Z" +confidence = 0.7337175792507205 + +[examples.Place.names] +primary = "スーパーホテル別府駅前" + +[examples.Place.categories] +primary = "hotel" + +[examples.Place.brand] + +[examples.Place.brand.names] +primary = "SUPER HOTEL" + +[[examples.Place.addresses]] +freeform = "秋田県横手市駅前町13−8" +locality = "横手市" +postcode = "013-0036" +country = "JP" diff --git a/packages/overture-schema-places-theme/src/overture/schema/places/__about__.py b/packages/overture-schema-places-theme/src/overture/schema/places/__about__.py index 3dc1f76bc..5e3bb11ca 100644 --- a/packages/overture-schema-places-theme/src/overture/schema/places/__about__.py +++ b/packages/overture-schema-places-theme/src/overture/schema/places/__about__.py @@ -1 +1 @@ -__version__ = "0.1.0" +__version__ = "0.1.1.dev1" diff --git a/packages/overture-schema-places-theme/src/overture/schema/places/place.py b/packages/overture-schema-places-theme/src/overture/schema/places/place.py index b7ac4d415..f086933c3 100644 --- a/packages/overture-schema-places-theme/src/overture/schema/places/place.py +++ b/packages/overture-schema-places-theme/src/overture/schema/places/place.py @@ -205,10 +205,10 @@ class Place(OvertureFeature[Literal["places"], Literal["place"]], Named): ), ] - # Required + # Optional operating_status: Annotated[ - OperatingStatus, + OperatingStatus | None, Field( description=textwrap.dedent(""" An indication of whether a place is: in continued operation, in a temporary @@ -221,9 +221,7 @@ class Place(OvertureFeature[Literal["places"], Literal["place"]], Named): set to 0. """).strip(), ), - ] - - # Optional + ] = None categories: Categories | None = None basic_category: Annotated[ diff --git a/packages/overture-schema-places-theme/tests/place_baseline_schema.json b/packages/overture-schema-places-theme/tests/place_baseline_schema.json index b4d37759b..b8752ff24 100644 --- a/packages/overture-schema-places-theme/tests/place_baseline_schema.json +++ b/packages/overture-schema-places-theme/tests/place_baseline_schema.json @@ -44,7 +44,8 @@ "description": "A brand associated with a place.\n\nA location with multiple brands is modeled as multiple separate places, each with its own brand.", "properties": { "names": { - "$ref": "#/$defs/Names" + "$ref": "#/$defs/Names", + "description": "All known names by which the feature is called" }, "wikidata": { "description": "Wikidata identifier (Q followed by digits)", @@ -117,7 +118,7 @@ "value": { "description": "The actual name value.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Value", "type": "string" }, @@ -153,7 +154,7 @@ "patternProperties": { "^(?:(?:[A-Za-z]{2,3}(?:-[A-Za-z]{3}){0,3}?)|(?:[A-Za-z]{4,8}))(?:-[A-Za-z]{4})?(?:-[A-Za-z]{2}|[0-9]{3})?(?:-(?:[A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(?:-[A-WY-Za-wy-z0-9](?:-[A-Za-z0-9]{2,8})+)*$": { "description": "String with no leading/trailing whitespace", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "type": "string" } }, @@ -166,7 +167,7 @@ "primary": { "description": "The most commonly used name.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Primary", "type": "string" }, @@ -273,7 +274,7 @@ }, "license": { "description": "Source data license name.\n\nThis should be a valid SPDX license identifier when available.\n\nIf omitted, contact the data provider for more license information.", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "License", "type": "string" }, @@ -450,7 +451,8 @@ "uniqueItems": true }, "names": { - "$ref": "#/$defs/Names" + "$ref": "#/$defs/Names", + "description": "All known names by which the feature is called" }, "operating_status": { "$ref": "#/$defs/OperatingStatus", @@ -528,8 +530,7 @@ "required": [ "theme", "type", - "version", - "operating_status" + "version" ], "type": "object" }, diff --git a/packages/overture-schema-system/README.md b/packages/overture-schema-system/README.md index 33764e38e..238984eb0 100644 --- a/packages/overture-schema-system/README.md +++ b/packages/overture-schema-system/README.md @@ -1 +1,117 @@ -todo: README +# Overture Schema System + +Write Pydantic models once, get validated data that serializes correctly to JSON, Parquet, and Spark. This package provides the primitive types, constraint decorators, and GeoJSON-aware base class that make Pydantic models portable across serialization targets. + +## Installation + +```bash +pip install overture-schema-system +``` + +## Feature + +GeoJSON-compatible Pydantic base model. Subclasses serialize to the GeoJSON format automatically -- `geometry` and `id` at the top level, everything else under `properties` -- and validate from it: + +```python +from overture.schema.system.feature import Feature +from overture.schema.system.primitive import Geometry, float32 + +class Mountain(Feature): + name: str + max_elevation: float32 + +m = Mountain( + geometry=Geometry.from_wkt("POINT(86.9252 27.9888)"), + name="Mount Everest", + max_elevation=8848.86, +) +``` + +## Primitive Types + +Using `int` and `float` in a Pydantic model produces valid Python but loses information downstream -- an `int` field becomes a 64-bit integer in Parquet, Arrow, and Spark StructTypes, even when the domain is 0--255. The primitive types (`uint8`, `int32`, `float32`, etc.) carry range constraints and map to the correct wire type so data round-trips cleanly between Python, Parquet files, PostgreSQL, and JSON Schema: + +```python +from pydantic import BaseModel +from overture.schema.system.primitive import uint8, float32 + +class Building(BaseModel): + height: float32 | None = None + num_floors: uint8 | None = None +``` + +Integer types: `uint8`, `uint16`, `uint32`, `int8`, `int16`, `int32`, `int64`. Float types: `float32`, `float64`. Geometry types: `Geometry`, `BBox`, `GeometryType`, `GeometryTypeConstraint`. + +## String Types + +Validated string types that carry their constraints into generated JSON Schemas and downstream code generation. Using `CountryCodeAlpha2` instead of `str` means Pydantic rejects `"USA"` at validation time, JSON Schema gets the right pattern, and codegen tools produce typed output: + +```python +from overture.schema.system.string import CountryCodeAlpha2, LanguageTag +``` + +Available types: `CountryCodeAlpha2`, `RegionCode`, `LanguageTag`, `HexColor`, `JsonPointer`, `PhoneNumber`, `StrippedString`, `SnakeCaseString`, `NoWhitespaceString`, `WikidataId`. + +## Field Constraints + +Annotations for Pydantic fields that enforce domain rules beyond what the type alone expresses. Each constraint produces the corresponding JSON Schema keywords (e.g., `pattern`, `uniqueItems`) and is introspectable by code generation tools -- unlike Pydantic's `@field_validator`, which runs in Python only. Apply via `Annotated`: + +```python +from typing import Annotated +from pydantic import BaseModel, Field +from overture.schema.system.field_constraint import UniqueItemsConstraint, PatternConstraint + +OsmIdConstraint = PatternConstraint( + pattern=r"^[nwr]\d+$", + error_message="invalid OSM ID format: {value}. Must be n123, w123, or r123.", +) + +class MyModel(BaseModel): + osm_id: Annotated[str, OsmIdConstraint] + tags: Annotated[list[str], UniqueItemsConstraint()] = Field(min_length=1) +``` + +Built-in constraints include `PatternConstraint`, `StrippedConstraint`, `UniqueItemsConstraint`, and all the string-type constraints (`CountryCodeAlpha2Constraint`, `HexColorConstraint`, etc.). All produce error messages with domain context. + +## Model Constraints + +Class-level decorators for cross-field validation -- relationships between fields that no single field annotation can express. Each decorator produces corresponding JSON Schema constructs (`if`/`then`, `anyOf`, etc.) and is introspectable for code generation: + +```python +from pydantic import BaseModel +from overture.schema.system.model_constraint import require_any_of + +@require_any_of("email", "phone") +class Contact(BaseModel): + email: str | None = None + phone: str | None = None +``` + +- `@require_any_of("a", "b", ...)` -- at least one field must be non-None +- `@radio_group("a", "b", ...)` -- at most one field may be truthy +- `@require_if("target", condition)` -- field required when condition holds +- `@forbid_if("target", condition)` -- field forbidden when condition holds +- `@min_fields_set(n, "a", "b", ...)` -- at least *n* fields must be set +- `@no_extra_fields` -- reject unrecognized fields (equivalent to `model_config = ConfigDict(extra="forbid")`) + +## References + +Foreign-key-style annotations that describe relationships between models. These carry no runtime enforcement but provide metadata for code generation and documentation tools: + +```python +from typing import Annotated +from overture.schema.system.ref import Id, Identified, Reference, Relationship + +class Park(Identified): + pass + +class ParkBench(Identified): + park_id: Annotated[Id, Reference(Relationship.BELONGS_TO, Park)] +``` + +## Also Included + +- **Optionality** -- `Omitable[T]` models JSON Schema's "may be absent but not null" semantics, which Pydantic's `T | None` conflates with nullable. +- **DocumentedEnum** -- base class for enumerations whose members carry their own docstrings, enabling code generation tools to produce documented output. +- **Metadata** -- internal key-value store used by model constraints to attach data to classes. +- **JSON Schema** -- schema generator that treats `T | None = None` as "omit when unset" rather than Pydantic's default "nullable with null default." Also handles unions of models. diff --git a/packages/overture-schema-system/pyproject.toml b/packages/overture-schema-system/pyproject.toml index f1f4f8212..d81c649ce 100644 --- a/packages/overture-schema-system/pyproject.toml +++ b/packages/overture-schema-system/pyproject.toml @@ -4,21 +4,29 @@ build-backend = "hatchling.build" [project] name = "overture-schema-system" +maintainers = [ + {name = "Overture Maps Schema Working Group"}, +] dynamic = ["version"] description = "Foundational types at the base of the Overture Maps schema system" readme = "README.md" requires-python = ">=3.10" license = "MIT" dependencies = [ - "pydantic>=2.0.0", + "pydantic>=2.12.0", "shapely>=2.0.0", ] +[project.urls] +Homepage = "https://overturemaps.org" +Source = "https://github.com/OvertureMaps/schema" +Issues = "https://github.com/OvertureMaps/schema/issues" + [dependency-groups] dev = [ - "pytest>=7.0", - "ruff", - "mypy", + "pytest>=9.0.0", + "ruff>=0.13.0", + "mypy>=1.17.0", ] [tool.hatch.version] diff --git a/packages/overture-schema-system/src/overture/schema/system/__about__.py b/packages/overture-schema-system/src/overture/schema/system/__about__.py index 3dc1f76bc..5e3bb11ca 100644 --- a/packages/overture-schema-system/src/overture/schema/system/__about__.py +++ b/packages/overture-schema-system/src/overture/schema/system/__about__.py @@ -1 +1 @@ -__version__ = "0.1.0" +__version__ = "0.1.1.dev1" diff --git a/packages/overture-schema-system/src/overture/schema/system/__init__.py b/packages/overture-schema-system/src/overture/schema/system/__init__.py index c2ade1b8e..1e4b8be72 100644 --- a/packages/overture-schema-system/src/overture/schema/system/__init__.py +++ b/packages/overture-schema-system/src/overture/schema/system/__init__.py @@ -122,15 +122,19 @@ MyModel(foo=42, bar=None) >>> MyModel(bar="hello") # validates OK MyModel(foo=None, bar='hello') ->>> MyModel(foo=None, bar=None) # validates OK because foo and bar are explicitly set to `None` -MyModel(foo=None, bar=None) >>> >>> try: ... MyModel() ... except ValidationError as e: -... assert "at least one of these fields must be explicitly set, but none are: foo, bar" in str(e) -... print("Validation failed") -Validation failed +... assert "at least one of these fields must be set to a value other than None, but none are: foo, bar" in str(e) +... print("Validation failed (no fields set)") +Validation failed (no fields set) +>>> try: +... MyModel(foo=None, bar=None) +... except ValidationError as e: +... assert "at least one of these fields must be set to a value other than None, but none are: foo, bar" in str(e) +... print("Validation failed (all fields None)") +Validation failed (all fields None) Describe a foreign key relationship between two models where one model has a field that contains the unique identifier of another model. diff --git a/packages/overture-schema-system/src/overture/schema/system/_json_schema.py b/packages/overture-schema-system/src/overture/schema/system/_json_schema.py index c9afce006..64bc288ac 100644 --- a/packages/overture-schema-system/src/overture/schema/system/_json_schema.py +++ b/packages/overture-schema-system/src/overture/schema/system/_json_schema.py @@ -331,6 +331,30 @@ def try_move(key: str, src: JsonSchemaValue, dst: JsonSchemaValue) -> None: pass +def required_non_null(aliases: list[str]) -> JsonSchemaValue: + """ + Build a JSON Schema requiring listed properties to be present and non-null. + + Combines `"required"` (property must exist) with a per-property + constraint `{"not": {"type": "null"}}` (value must not be null). + + Parameters + ---------- + aliases : list[str] + Non-empty list of JSON Schema property names to constrain + + Returns + ------- + JsonSchemaValue + Schema requiring each property to be present and non-null + """ + _verify_operands_not_empty(str, aliases) + return { + "required": aliases, + "properties": {a: {"not": {"type": "null"}} for a in aliases}, + } + + T = TypeVar("T", JsonSchemaValue, str) diff --git a/packages/overture-schema-system/src/overture/schema/system/feature.py b/packages/overture-schema-system/src/overture/schema/system/feature.py index de3f14df6..82715014f 100644 --- a/packages/overture-schema-system/src/overture/schema/system/feature.py +++ b/packages/overture-schema-system/src/overture/schema/system/feature.py @@ -30,6 +30,33 @@ from overture.schema.system.ref import Id +def resolve_discriminator_field_name(discriminator: object) -> str | None: + """Resolve a Pydantic discriminator value to its field name string. + + Handles the three forms a discriminator can take: + - A plain string (used directly as the field name). + - A `pydantic.Discriminator` whose `.discriminator` attribute is a string. + - A `pydantic.Discriminator` whose `.discriminator` is a callable + produced by `Feature.field_discriminator`, which stores the field name + as `_field_name` on the callable. + + Returns None if *discriminator* is None or its field name cannot be + determined. + """ + if discriminator is None: + return None + if isinstance(discriminator, str): + return discriminator + inner = getattr(discriminator, "discriminator", None) + if isinstance(inner, str): + return inner + if callable(inner): + field_name = getattr(inner, "_field_name", None) + if isinstance(field_name, str): + return field_name + return None + + class Feature(BaseModel): """ A feature is something you can point to on a map—like a building, road, lake, or park—with the @@ -206,7 +233,10 @@ def field_discriminator( Returns ------- Discriminator - Discriminator that enables discriminated unions that include features + Discriminator that enables discriminated unions that include features. + The inner callable carries a `_field_name` attribute set to *field*, + allowing introspection code to recover the discriminator field name + without hardcoding it. Raises ------ @@ -296,6 +326,7 @@ def get_discriminator_value(data: object) -> Any: else getattr(data, field, None) ) + get_discriminator_value._field_name = field # type: ignore[attr-defined] return Discriminator(get_discriminator_value) @model_serializer(mode="wrap") diff --git a/packages/overture-schema-system/src/overture/schema/system/field_constraint/collection.py b/packages/overture-schema-system/src/overture/schema/system/field_constraint/collection.py index a39159217..f8a699d91 100644 --- a/packages/overture-schema-system/src/overture/schema/system/field_constraint/collection.py +++ b/packages/overture-schema-system/src/overture/schema/system/field_constraint/collection.py @@ -43,7 +43,7 @@ def _is_collection_type(source: type[Any]) -> bool: class UniqueItemsConstraint(CollectionConstraint): - """Ensures all items in a collection are unique.""" + """All items must be unique.""" def validate(self, value: list[Any] | None, info: ValidationInfo) -> None: # Skip validation for None values (used with optional fields) diff --git a/packages/overture-schema-system/src/overture/schema/system/field_constraint/string.py b/packages/overture-schema-system/src/overture/schema/system/field_constraint/string.py index 8c2d90415..a057a3127 100644 --- a/packages/overture-schema-system/src/overture/schema/system/field_constraint/string.py +++ b/packages/overture-schema-system/src/overture/schema/system/field_constraint/string.py @@ -3,7 +3,7 @@ """ import re -from typing import Any +from typing import Any, NoReturn from pydantic import ( GetCoreSchemaHandler, @@ -23,6 +23,23 @@ class StringConstraint(FieldConstraint): """Base class for string-based constraints.""" + def _raise_validation_error( + self, value: str, info: ValidationInfo, message: str + ) -> NoReturn: + context = info.context or {} + loc = context.get("loc_prefix", ()) + ("value",) + raise ValidationError.from_exception_data( + title=self.__class__.__name__, + line_errors=[ + InitErrorDetails( + type="value_error", + loc=loc, + input=value, + ctx={"error": message}, + ) + ], + ) + def __get_pydantic_core_schema__( self, source: type[Any], handler: GetCoreSchemaHandler ) -> core_schema.CoreSchema: @@ -38,26 +55,45 @@ def validate_string(value: str, info: ValidationInfo) -> str: class PatternConstraint(StringConstraint): - """Generic pattern-based string constraint.""" + """Generic pattern-based string constraint. + + Parameters + ---------- + pattern : str + Regular expression to match against. + error_message : str + Error message template. Use `{value}` to interpolate the failing + value (the only available placeholder). + flags : int + Regex flags passed to `re.compile`. + description : str or None + JSON Schema `description` annotation. + min_length : int or None + JSON Schema `minLength` annotation. + max_length : int or None + JSON Schema `maxLength` annotation. + """ - def __init__(self, pattern: str, error_message: str, flags: int = 0): + def __init__( + self, + pattern: str, + error_message: str, + flags: int = 0, + *, + description: str | None = None, + min_length: int | None = None, + max_length: int | None = None, + ): self.pattern = re.compile(pattern, flags) self.error_message = error_message + self.description = description + self.min_length = min_length + self.max_length = max_length def validate(self, value: str, info: ValidationInfo) -> None: if not self.pattern.match(value): - context = info.context or {} - loc = context.get("loc_prefix", ()) + ("value",) - raise ValidationError.from_exception_data( - title=self.__class__.__name__, - line_errors=[ - InitErrorDetails( - type="value_error", - loc=loc, - input=value, - ctx={"error": self.error_message.format(value=value)}, - ) - ], + self._raise_validation_error( + value, info, self.error_message.format(value=value) ) def __get_pydantic_json_schema__( @@ -65,6 +101,12 @@ def __get_pydantic_json_schema__( ) -> dict[str, Any]: json_schema = handler(core_schema) json_schema["pattern"] = self.pattern.pattern + if self.description is not None: + json_schema["description"] = self.description + if self.min_length is not None: + json_schema["minLength"] = self.min_length + if self.max_length is not None: + json_schema["maxLength"] = self.max_length return json_schema @@ -73,72 +115,28 @@ def __get_pydantic_json_schema__( ######################################################################## -class CountryCodeAlpha2Constraint(StringConstraint): +class CountryCodeAlpha2Constraint(PatternConstraint): """Allows only ISO 3166-1 alpha-2 country codes.""" def __init__(self) -> None: - self.pattern = re.compile(r"^[A-Z]{2}$") - - def validate(self, value: str, info: ValidationInfo) -> None: - if not self.pattern.match(value): - context = info.context or {} - loc = context.get("loc_prefix", ()) + ("value",) - raise ValidationError.from_exception_data( - title=self.__class__.__name__, - line_errors=[ - InitErrorDetails( - type="value_error", - loc=loc, - input=value, - ctx={ - "error": f"Invalid ISO 3166-1 alpha-2 country code: {value}" - }, - ) - ], - ) - - def __get_pydantic_json_schema__( - self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler - ) -> dict[str, Any]: - json_schema = handler(core_schema) - json_schema["pattern"] = self.pattern.pattern - json_schema["minLength"] = 2 - json_schema["maxLength"] = 2 - json_schema["description"] = "ISO 3166-1 alpha-2 country code" - return json_schema + super().__init__( + pattern=r"^[A-Z]{2}$", + error_message="Invalid ISO 3166-1 alpha-2 country code: {value}", + description="ISO 3166-1 alpha-2 country code", + min_length=2, + max_length=2, + ) -class HexColorConstraint(StringConstraint): +class HexColorConstraint(PatternConstraint): """Allows only hexadecimal color codes (e.g., #FF0000 or #FFF).""" def __init__(self) -> None: - self.pattern = re.compile(r"^#[0-9A-Fa-f]{3}([0-9A-Fa-f]{3})?$") - - def validate(self, value: str, info: ValidationInfo) -> None: - if not self.pattern.match(value): - context = info.context or {} - loc = context.get("loc_prefix", ()) + ("value",) - raise ValidationError.from_exception_data( - title=self.__class__.__name__, - line_errors=[ - InitErrorDetails( - type="value_error", - loc=loc, - input=value, - ctx={ - "error": f"Invalid hexadecimal color format: {value}. Must be in format #RGB or #RRGGBB (e.g., #FFF or #FF0000)" - }, - ) - ], - ) - - def __get_pydantic_json_schema__( - self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler - ) -> dict[str, Any]: - json_schema = handler(core_schema) - json_schema["pattern"] = self.pattern.pattern - json_schema["description"] = "Hexadecimal color code in format #RGB or #RRGGBB" - return json_schema + super().__init__( + pattern=r"^#[0-9A-Fa-f]{3}([0-9A-Fa-f]{3})?$", + error_message="Invalid hexadecimal color format: {value}. Must be in format #RGB or #RRGGBB (e.g., #FFF or #FF0000)", + description="Hexadecimal color code in format #RGB or #RRGGBB", + ) class JsonPointerConstraint(StringConstraint): @@ -150,20 +148,10 @@ def validate(self, value: str, info: ValidationInfo) -> None: return if not value.startswith("/"): - context = info.context or {} - loc = context.get("loc_prefix", ()) + ("value",) - raise ValidationError.from_exception_data( - title=self.__class__.__name__, - line_errors=[ - InitErrorDetails( - type="value_error", - loc=loc, - input=value, - ctx={ - "error": f"JSON Pointer must start with '/' or be empty string: {value}" - }, - ) - ], + self._raise_validation_error( + value, + info, + f"JSON Pointer must start with '/' or be empty string: {value}", ) def __get_pydantic_json_schema__( @@ -174,7 +162,7 @@ def __get_pydantic_json_schema__( return json_schema -class LanguageTagConstraint(StringConstraint): +class LanguageTagConstraint(PatternConstraint): """ Allows only `BCP-47`_ language tags. @@ -188,237 +176,92 @@ class LanguageTagConstraint(StringConstraint): """ def __init__(self) -> None: - # In understanding the regular expression, remark that '(:?' indicates a non-capturing - # group, and that all the top-level or non-nested groups represent top-level components of - # `langtag` referenced in the syntax section of https://www.rfc-editor.org/rfc/bcp/bcp47.txt. - # In particular, the top-level groups in left-to-right order represent: - # - # 1. language - # 2. ["-" script] - # 3. ["-" region] - # 4. *("-" variant) - # 5. *("-" extension) - self.pattern = re.compile( - r"^(?:(?:[A-Za-z]{2,3}(?:-[A-Za-z]{3}){0,3}?)|(?:[A-Za-z]{4,8}))(?:-[A-Za-z]{4})?(?:-[A-Za-z]{2}|[0-9]{3})?(?:-(?:[A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(?:-[A-WY-Za-wy-z0-9](?:-[A-Za-z0-9]{2,8})+)*$" + # Top-level groups in the pattern (left-to-right) correspond to BCP-47 langtag components: + # 1. language, 2. ["-" script], 3. ["-" region], 4. *("-" variant), 5. *("-" extension) + # See: https://www.rfc-editor.org/rfc/bcp/bcp47.txt + super().__init__( + pattern=r"^(?:(?:[A-Za-z]{2,3}(?:-[A-Za-z]{3}){0,3}?)|(?:[A-Za-z]{4,8}))(?:-[A-Za-z]{4})?(?:-[A-Za-z]{2}|[0-9]{3})?(?:-(?:[A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(?:-[A-WY-Za-wy-z0-9](?:-[A-Za-z0-9]{2,8})+)*$", + error_message="Invalid IETF BCP-47 language tag: {value}", + description="IETF BCP-47 language tag", ) - def validate(self, value: str, info: ValidationInfo) -> None: - if not self.pattern.match(value): - context = info.context or {} - loc = context.get("loc_prefix", ()) + ("value",) - raise ValidationError.from_exception_data( - title=self.__class__.__name__, - line_errors=[ - InitErrorDetails( - type="value_error", - loc=loc, - input=value, - ctx={"error": f"Invalid IETF BCP-47 language tag: {value}"}, - ) - ], - ) - def __get_pydantic_json_schema__( - self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler - ) -> dict[str, Any]: - json_schema = handler(core_schema) - json_schema["pattern"] = self.pattern.pattern - json_schema["description"] = "IETF BCP-47 language tag" - return json_schema - - -class NoWhitespaceConstraint(StringConstraint): +class NoWhitespaceConstraint(PatternConstraint): """Allows only strings that contain no whitespace characters.""" def __init__(self) -> None: - self.pattern = re.compile(r"^\S+$") - - def validate(self, value: str, info: ValidationInfo) -> None: - if not self.pattern.match(value): - context = info.context or {} - loc = context.get("loc_prefix", ()) + ("value",) - raise ValidationError.from_exception_data( - title=self.__class__.__name__, - line_errors=[ - InitErrorDetails( - type="value_error", - loc=loc, - input=value, - ctx={ - "error": f"String cannot contain whitespace characters: '{value}'" - }, - ) - ], - ) - - def __get_pydantic_json_schema__( - self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler - ) -> dict[str, Any]: - json_schema = handler(core_schema) - json_schema["pattern"] = self.pattern.pattern - json_schema["description"] = "String without whitespace characters" - return json_schema + super().__init__( + pattern=r"^\S+$", + error_message="String cannot contain whitespace characters: '{value}'", + description="String without whitespace characters", + ) -class SnakeCaseConstraint(StringConstraint): +class SnakeCaseConstraint(PatternConstraint): """Allows only strings that look like snake case identifiers, *e.g.* `"foo_bar"`.""" def __init__(self) -> None: - self.pattern = re.compile(r"^[a-z0-9]+(_[a-z0-9]+)*$") - - def validate(self, value: str, info: ValidationInfo) -> None: - if not self.pattern.match(value): - context = info.context or {} - loc = context.get("loc_prefix", ()) + ("value",) - raise ValidationError.from_exception_data( - title=self.__class__.__name__, - line_errors=[ - InitErrorDetails( - type="value_error", - loc=loc, - input=value, - ctx={ - "error": f"Invalid category format: {value}. Must be snake_case (lowercase letters, numbers, underscores)" - }, - ) - ], - ) + super().__init__( + pattern=r"^[a-z0-9]+(_[a-z0-9]+)*$", + error_message="Invalid category format: {value}. Must be snake_case (lowercase letters, numbers, underscores)", + description="Category in snake_case format", + ) - def __get_pydantic_json_schema__( - self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler - ) -> dict[str, Any]: - json_schema = handler(core_schema) - json_schema["pattern"] = self.pattern.pattern - json_schema["description"] = "Category in snake_case format" - return json_schema +class StrippedConstraint(PatternConstraint): + r"""Allows only strings that have no leading/trailing whitespace. -class StrippedConstraint(StringConstraint): - """Allows only strings that have no leading/trailing whitespace.""" + Uses ``\Z`` (absolute end-of-string) instead of ``$`` because + Python's ``$`` matches before a trailing ``\n``. ECMA regex (used by + JSON Schema) treats ``$`` as absolute end-of-string, so the JSON + schema output swaps ``\Z`` back to ``$``. + """ - def validate(self, value: str, info: ValidationInfo) -> None: - if value != value.strip(): - context = info.context or {} - loc = context.get("loc_prefix", ()) + ("value",) - raise ValidationError.from_exception_data( - title=self.__class__.__name__, - line_errors=[ - InitErrorDetails( - type="value_error", - loc=loc, - input=value, - ctx={ - "error": f"String cannot have leading or trailing whitespace: {repr(value)}" - }, - ) - ], - ) + def __init__(self) -> None: + super().__init__( + pattern=r"^(\S(.*\S)?)?\Z", + error_message="String cannot have leading or trailing whitespace: {value}", + description="String with no leading/trailing whitespace", + ) def __get_pydantic_json_schema__( self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler ) -> dict[str, Any]: - json_schema = handler(core_schema) - json_schema["pattern"] = r"^(\S.*)?\S$" - json_schema["description"] = "String with no leading/trailing whitespace" + json_schema = super().__get_pydantic_json_schema__(core_schema, handler) + json_schema["pattern"] = self.pattern.pattern.replace(r"\Z", "$") return json_schema -class PhoneNumberConstraint(StringConstraint): - """Constraint for international phone numbers.""" +class PhoneNumberConstraint(PatternConstraint): + """Allows only international phone numbers.""" def __init__(self) -> None: - self.pattern = re.compile(r"^\+\d{1,3}[\s\-\(\)0-9]+$") - - def validate(self, value: str, info: ValidationInfo) -> None: - if not self.pattern.match(value): - context = info.context or {} - loc = context.get("loc_prefix", ()) + ("value",) - raise ValidationError.from_exception_data( - title=self.__class__.__name__, - line_errors=[ - InitErrorDetails( - type="value_error", - loc=loc, - input=value, - ctx={ - "error": f"Invalid phone number format: {value}. Must start with + and country code" - }, - ) - ], - ) - - def __get_pydantic_json_schema__( - self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler - ) -> dict[str, Any]: - json_schema = handler(core_schema) - json_schema["pattern"] = self.pattern.pattern - json_schema["description"] = ( - "International phone number (+ followed by country code and number)" + super().__init__( + pattern=r"^\+\d{1,3}[\s\-\(\)0-9]+$", + error_message="Invalid phone number format: {value}. Must start with + and country code", + description="International phone number (+ followed by country code and number)", ) - return json_schema -class RegionCodeConstraint(StringConstraint): - """ISO 3166-2 principal subdivision code constraint.""" +class RegionCodeConstraint(PatternConstraint): + """Allows only ISO 3166-2 principal subdivision codes.""" def __init__(self) -> None: - self.pattern = re.compile(r"^[A-Z]{2}-[A-Z0-9]{1,3}$") - - def validate(self, value: str, info: ValidationInfo) -> None: - if not self.pattern.match(value): - context = info.context or {} - loc = context.get("loc_prefix", ()) + ("value",) - raise ValidationError.from_exception_data( - title=self.__class__.__name__, - line_errors=[ - InitErrorDetails( - type="value_error", - loc=loc, - input=value, - ctx={"error": f"Invalid ISO 3166-2 subdivision code: {value}"}, - ) - ], - ) - - def __get_pydantic_json_schema__( - self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler - ) -> dict[str, Any]: - json_schema = handler(core_schema) - json_schema["pattern"] = self.pattern.pattern - json_schema["minLength"] = 4 - json_schema["maxLength"] = 6 - json_schema["description"] = "ISO 3166-2 subdivision code" - return json_schema + super().__init__( + pattern=r"^[A-Z]{2}-[A-Z0-9]{1,3}$", + error_message="Invalid ISO 3166-2 subdivision code: {value}", + description="ISO 3166-2 subdivision code", + min_length=4, + max_length=6, + ) -class WikidataIdConstraint(StringConstraint): - """Constraint for Wikidata identifiers (Q followed by digits).""" +class WikidataIdConstraint(PatternConstraint): + """Allows only Wikidata identifiers (Q followed by digits).""" def __init__(self) -> None: - self.pattern = re.compile(r"^Q\d+$") - - def validate(self, value: str, info: ValidationInfo) -> None: - if not self.pattern.match(value): - context = info.context or {} - loc = context.get("loc_prefix", ()) + ("value",) - raise ValidationError.from_exception_data( - title=self.__class__.__name__, - line_errors=[ - InitErrorDetails( - type="value_error", - loc=loc, - input=value, - ctx={ - "error": f"Invalid Wikidata identifier: {value}. Must be Q followed by digits (e.g., Q123)" - }, - ) - ], - ) - - def __get_pydantic_json_schema__( - self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler - ) -> dict[str, Any]: - json_schema = handler(core_schema) - json_schema["pattern"] = self.pattern.pattern - json_schema["description"] = "Wikidata identifier (Q followed by digits)" - return json_schema + super().__init__( + pattern=r"^Q\d+$", + error_message="Invalid Wikidata identifier: {value}. Must be Q followed by digits (e.g., Q123)", + description="Wikidata identifier (Q followed by digits)", + ) diff --git a/packages/overture-schema-system/src/overture/schema/system/model_constraint/forbid_if.py b/packages/overture-schema-system/src/overture/schema/system/model_constraint/forbid_if.py index ab0a8a329..c301edbd4 100644 --- a/packages/overture-schema-system/src/overture/schema/system/model_constraint/forbid_if.py +++ b/packages/overture-schema-system/src/overture/schema/system/model_constraint/forbid_if.py @@ -1,5 +1,5 @@ """ -Prohibit every field in a group of fields from having a value explicitly set, but only if a +Prohibit every field in a group of fields from having a non-`None` value, but only if a condition is true. """ @@ -8,7 +8,7 @@ from pydantic import BaseModel, ConfigDict from typing_extensions import override -from .._json_schema import get_static_json_schema_extra, put_if +from .._json_schema import get_static_json_schema_extra, put_if, required_non_null from .model_constraint import ( Condition, OptionalFieldGroupConstraint, @@ -22,12 +22,11 @@ def forbid_if( ) -> Callable[[type[BaseModel]], type[BaseModel]]: """ Decorate a Pydantic model class with a constraint forbidding any of the named fields from - holding an explicitly-assigned value, but only if a field value condition is true. + holding a non-`None` value, but only if a field value condition is true. To ensure parity between Python and JSON Schema validation, a field's value must be explicitly - set to violate the constraint. This means in particular that fields whose value was set by - Pydantic using a default value do not count as having a set value, and fields containing the - value `None`, if this value was explicitly set rather than being inherited by default, do count. + set to a non-`None` value to violate the constraint. Fields containing the value `None`, + whether set explicitly or by default, are always compliant. Parameters ---------- @@ -56,11 +55,13 @@ def forbid_if( MyModel(foo='something', bar=42, baz='qux') >>> MyModel(foo='special value') # validates OK because bar/baz are omitted MyModel(foo='special value', bar=None, baz=None) + >>> MyModel(foo='special value', bar=None) # validates OK because None is compliant + MyModel(foo='special value', bar=None, baz=None) >>> >>> try: ... MyModel(foo='special value', bar=42) ... except ValidationError as e: - ... assert 'at least one field has an explicit value when it should not: bar' in str(e) + ... assert 'at least one field is set to a value other than None when it must not be: bar' in str(e) ... print('Validation failed') Validation failed """ @@ -123,13 +124,15 @@ def validate_instance(self, model_instance: BaseModel) -> None: return present_fields = [ - f for f in self.field_names if f in model_instance.model_fields_set + f + for f in self.field_names + if self._field_has_non_none_value(model_instance, f) ] if present_fields: raise ValueError( - f"at least one field has an explicit value when it should not: {', '.join(present_fields)} - " - f"these field value(s) are forbidden because {self.__condition} is true " + f"at least one field is set to a value other than None when it must not be: {', '.join(present_fields)} - " + f"these field(s) are forbidden because {self.__condition} is true " f"(`{self.name}`)" ) @@ -139,12 +142,9 @@ def edit_config(self, model_class: type[BaseModel], config: ConfigDict) -> None: json_schema = get_static_json_schema_extra(config) + aliases = [apply_alias(model_class, f) for f in self.field_names] put_if( json_schema, self.__condition.json_schema(model_class), - { - "not": { - "required": [apply_alias(model_class, f) for f in self.field_names] - } - }, + {"not": required_non_null(aliases)}, ) diff --git a/packages/overture-schema-system/src/overture/schema/system/model_constraint/model_constraint.py b/packages/overture-schema-system/src/overture/schema/system/model_constraint/model_constraint.py index 9b8e0c924..ae279da7a 100644 --- a/packages/overture-schema-system/src/overture/schema/system/model_constraint/model_constraint.py +++ b/packages/overture-schema-system/src/overture/schema/system/model_constraint/model_constraint.py @@ -56,10 +56,6 @@ def __init__(self, name: str | None = None): ) self.__name = name - def __validate_instance(self, model_instance: BaseModel) -> BaseModel: - self.validate_instance(model_instance) - return model_instance - @property def name(self) -> str: """Returns the name of the constraint, e.g. "FooConstraint" or "@foo".""" @@ -126,6 +122,15 @@ def decorate(self, model_class: type[BaseModel]) -> type[BaseModel]: metadata = Metadata.retrieve_from(model_class, Metadata()).copy() # type: ignore[union-attr] model_constraints = (*ModelConstraint.get_model_constraints(model_class), self) metadata[_MODEL_CONSTRAINT_KEY] = model_constraints + # Capture the constraint in a closure rather than passing a bound method. + # Some Pydantic versions unwrap bound methods passed through __validators__ + # and rebind `self` to the model instance, breaking the dispatch. + constraint = self + + def _after_validator(model_instance: BaseModel) -> BaseModel: + constraint.validate_instance(model_instance) + return model_instance + new_model_class = create_model( model_class.__name__, __config__=config, @@ -135,7 +140,7 @@ def decorate(self, model_class: type[BaseModel]) -> type[BaseModel]: __validators__={ self.name: cast( Callable[..., Any], - model_validator(mode="after")(self.__validate_instance), + model_validator(mode="after")(_after_validator), ) }, __metadata__=metadata, @@ -334,6 +339,13 @@ class OptionalFieldGroupConstraint(FieldGroupConstraint): def __init__(self, name: str | None, field_names: tuple[str, ...]): super().__init__(name, field_names) + @staticmethod + def _field_has_non_none_value(model_instance: BaseModel, field_name: str) -> bool: + return ( + field_name in model_instance.model_fields_set + and getattr(model_instance, field_name) is not None + ) + @override def validate_class(self, model_class: type[BaseModel]) -> None: super().validate_class(model_class) diff --git a/packages/overture-schema-system/src/overture/schema/system/model_constraint/require_any_of.py b/packages/overture-schema-system/src/overture/schema/system/model_constraint/require_any_of.py index e131c0a66..f05e86043 100644 --- a/packages/overture-schema-system/src/overture/schema/system/model_constraint/require_any_of.py +++ b/packages/overture-schema-system/src/overture/schema/system/model_constraint/require_any_of.py @@ -1,28 +1,26 @@ """ -Require at least one named field to have a value explicitly set. +Require at least one named field to have a non-`None` value. """ from collections.abc import Callable from pydantic import BaseModel, ConfigDict -from pydantic.json_schema import JsonDict from typing_extensions import override -from .._json_schema import get_static_json_schema_extra, put_any_of +from .._json_schema import get_static_json_schema_extra, put_any_of, required_non_null from .model_constraint import OptionalFieldGroupConstraint, apply_alias def require_any_of(*field_names: str) -> Callable[[type[BaseModel]], type[BaseModel]]: """ Decorate a Pydantic model class with a constraint requiring that at least one of the named - fields has a value explicitly set. + fields has a non-`None` value. This function is the decorator version of the `RequireAnyOfConstraint` class. To ensure parity between Python and JSON Schema validation, a field's value must be explicitly - set to satisfy the constraint. This means in particular that fields whose value was set by - Pydantic using a default value do not count as having a set value, and fields containing the - value `None`, if this value was explicitly set rather than being inherited by default, do count. + set to a non-`None` value to satisfy the constraint. Fields whose value was set by Pydantic + using a default value violate the constraint, as do fields explicitly set to `None`. Parameters ---------- @@ -49,16 +47,21 @@ def require_any_of(*field_names: str) -> Callable[[type[BaseModel]], type[BaseMo MyModel(foo=42, bar=None) >>> MyModel(bar="hello") # validates OK MyModel(foo=None, bar='hello') - >>> MyModel(foo=None, bar=None) # validates OK - MyModel(foo=None, bar=None) >>> >>> try: ... MyModel() ... except ValidationError as e: - ... assert "at least one of these fields must be explicitly set, but none are: foo, bar" \ + ... assert "at least one of these fields must be set to a value other than None, but none are: foo, bar" \ in str(e) - ... print("Validation failed") - Validation failed + ... print("Validation failed (no fields set)") + Validation failed (no fields set) + >>> try: + ... MyModel(foo=None, bar=None) + ... except ValidationError as e: + ... assert "at least one of these fields must be set to a value other than None, but none are: foo, bar" \ + in str(e) + ... print("Validation failed (all fields None)") + Validation failed (all fields None) """ model_constraint = RequireAnyOfConstraint._create_internal( f"@{require_any_of.__name__}", *field_names @@ -97,11 +100,11 @@ def __validate_field_names(field_names: tuple[str, ...]) -> tuple[str, ...]: def validate_instance(self, model_instance: BaseModel) -> None: super().validate_instance(model_instance) - if not ( - any(f for f in self.field_names if f in model_instance.model_fields_set) + if not any( + self._field_has_non_none_value(model_instance, f) for f in self.field_names ): raise ValueError( - f"at least one of these fields must be explicitly set, but none are: {', '.join(self.field_names)} (`{self.name}`)" + f"at least one of these fields must be set to a value other than None, but none are: {', '.join(self.field_names)} (`{self.name}`)" ) @override @@ -110,7 +113,10 @@ def edit_config(self, model_class: type[BaseModel], config: ConfigDict) -> None: json_schema = get_static_json_schema_extra(config) - def required(field_name: str) -> JsonDict: - return {"required": [apply_alias(model_class, field_name)]} - - put_any_of(json_schema, [required(f) for f in self.field_names]) + put_any_of( + json_schema, + [ + required_non_null([apply_alias(model_class, f)]) + for f in self.field_names + ], + ) diff --git a/packages/overture-schema-system/src/overture/schema/system/model_constraint/require_if.py b/packages/overture-schema-system/src/overture/schema/system/model_constraint/require_if.py index 4cb5b138e..fbc354d2f 100644 --- a/packages/overture-schema-system/src/overture/schema/system/model_constraint/require_if.py +++ b/packages/overture-schema-system/src/overture/schema/system/model_constraint/require_if.py @@ -1,5 +1,5 @@ """ -Require every field in a group of fields to have a value explicitly set, but only if a condition is +Require every field in a group of fields to have a non-`None` value, but only if a condition is true. """ @@ -8,7 +8,7 @@ from pydantic import BaseModel, ConfigDict from typing_extensions import override -from .._json_schema import get_static_json_schema_extra, put_if +from .._json_schema import get_static_json_schema_extra, put_if, required_non_null from .model_constraint import ( Condition, OptionalFieldGroupConstraint, @@ -22,12 +22,12 @@ def require_if( ) -> Callable[[type[BaseModel]], type[BaseModel]]: """ Decorate a Pydantic model class with a constraint requiring all of the named fields to have a - value explicitly set, but only if a condition is true. + non-`None` value, but only if a condition is true. To ensure parity between Python and JSON Schema validation, a field's value must be explicitly - set to satisfy the constraint. This means in particular that fields whose value was set by - Pydantic using a default value do not count as having a set value, and fields containing the - value `None`, if this value was explicitly set rather than being inherited by default, do count. + set to a non-`None` value to satisfy the constraint. Fields whose value was set by Pydantic + using a default are treated as absent and violate the constraint, as do fields explicitly set + to `None`. Parameters ---------- @@ -61,7 +61,16 @@ def require_if( ... MyModel(foo='special value') ... except ValidationError as e: ... assert ( - ... 'at least one field is missing an explicit value when it should have one: bar, baz' + ... 'at least one field is not set to a value other than None: bar, baz' + ... ) in str(e) + ... print('Validation failed') + Validation failed + >>> + >>> try: + ... MyModel(foo='special value', bar=None, baz=None) + ... except ValidationError as e: + ... assert ( + ... 'at least one field is not set to a value other than None: bar, baz' ... ) in str(e) ... print('Validation failed') Validation failed @@ -125,13 +134,15 @@ def validate_instance(self, model_instance: BaseModel) -> None: return missing_fields = [ - f for f in self.field_names if f not in model_instance.model_fields_set + f + for f in self.field_names + if not self._field_has_non_none_value(model_instance, f) ] if missing_fields: raise ValueError( - f"at least one field is missing an explicit value when it should have one: {', '.join(missing_fields)} - " - f"these field value(s) are required because {self.__condition} is true` (`{self.name}`)" + f"at least one field is not set to a value other than None: {', '.join(missing_fields)} - " + f"these field(s) are required because {self.__condition} is true (`{self.name}`)" ) @override @@ -140,8 +151,9 @@ def edit_config(self, model_class: type[BaseModel], config: ConfigDict) -> None: json_schema = get_static_json_schema_extra(config) + aliases = [apply_alias(model_class, f) for f in self.field_names] put_if( json_schema, self.__condition.json_schema(model_class), - {"required": [apply_alias(model_class, f) for f in self.field_names]}, + required_non_null(aliases), ) diff --git a/packages/overture-schema-system/src/overture/schema/system/primitive/__init__.py b/packages/overture-schema-system/src/overture/schema/system/primitive/__init__.py index 96bfd0250..2b27ef89e 100644 --- a/packages/overture-schema-system/src/overture/schema/system/primitive/__init__.py +++ b/packages/overture-schema-system/src/overture/schema/system/primitive/__init__.py @@ -24,7 +24,7 @@ ) uint8 = NewType("uint8", Annotated[int, Field(ge=0, le=255)]) # type: ignore [type-arg] -""" +uint8.__doc__ = """ Portable 8-bit unsigned integer. This is an `int` at runtime, but using `uint8` for Pydantic model fields instead of `int` makes them @@ -32,7 +32,7 @@ """ uint16 = NewType("uint16", Annotated[int, Field(ge=0, le=65535)]) # type: ignore[type-arg] -""" +uint16.__doc__ = """ Portable 16-bit unsigned integer. This is an `int` at runtime, but using `uint16` for Pydantic model fields instead of `int` makes @@ -40,7 +40,7 @@ """ uint32 = NewType("uint32", Annotated[int, Field(ge=0, le=4294967295)]) # type: ignore[type-arg] -""" +uint32.__doc__ = """ Portable 32-bit unsigned integer. This is an `int` at runtime, but using `uint32` for Pydantic model fields instead of `int` makes @@ -48,7 +48,7 @@ """ int8 = NewType("int8", Annotated[int, Field(ge=-128, le=127)]) # type: ignore[type-arg] -""" +int8.__doc__ = """ Portable 8-bit signed integer. This is an `int` at runtime, but using `int8` for Pydantic model fields instead of `int` makes them @@ -56,7 +56,7 @@ """ int16 = NewType("int16", Annotated[int, Field(ge=-32768, le=32767)]) # type: ignore[type-arg] -""" +int16.__doc__ = """ Portable 16-bit signed integer. This is an `int` at runtime, but using `int16` for Pydantic model fields instead of `int` makes them @@ -64,7 +64,7 @@ """ int32 = NewType("int32", Annotated[int, Field(ge=-(2**31), le=2**31 - 1)]) # type: ignore[type-arg] -""" +int32.__doc__ = """ Portable 32-bit signed integer. This is an `int` at runtime, but using `int32` for Pydantic model fields instead of `int` makes them @@ -72,7 +72,7 @@ """ int64 = NewType("int64", Annotated[int, Field(ge=-(2**63), le=2**63 - 1)]) # type: ignore[type-arg] -""" +int64.__doc__ = """ Portable 64-bit signed integer. This is an `int` at runtime, but using `int64` for Pydantic model fields instead of `int` makes them @@ -80,7 +80,7 @@ """ float32 = NewType("float32", float) # type: ignore[type-arg] -""" +float32.__doc__ = """ Portable IEEE 32-bit floating point number. This is a `float` at runtime, but using `float32` for Pydantic model fields instead of `float` makes @@ -88,7 +88,7 @@ """ float64 = NewType("float64", float) # type: ignore[type-arg] -""" +float64.__doc__ = """ Portable IEEE 64-bit floating point number. This is a `float` at runtime, but using `float64` for Pydantic model fields instead of `float` makes diff --git a/packages/overture-schema-system/src/overture/schema/system/primitive/bbox.py b/packages/overture-schema-system/src/overture/schema/system/primitive/bbox.py index ac0eef391..c078290c5 100644 --- a/packages/overture-schema-system/src/overture/schema/system/primitive/bbox.py +++ b/packages/overture-schema-system/src/overture/schema/system/primitive/bbox.py @@ -237,7 +237,7 @@ def validator( elif isinstance(value, tuple | list): return cls.from_geo_json(value) elif isinstance(value, dict): - BBox(**value) + return BBox(**value) else: raise TypeError( f"expected `BBox` or `tuple` or `list`; got `{type(value).__name__}` with value {repr(value)}" diff --git a/packages/overture-schema-system/src/overture/schema/system/ref/id.py b/packages/overture-schema-system/src/overture/schema/system/ref/id.py index eb467f142..2ffa5dad8 100644 --- a/packages/overture-schema-system/src/overture/schema/system/ref/id.py +++ b/packages/overture-schema-system/src/overture/schema/system/ref/id.py @@ -18,7 +18,7 @@ ), ], ) -""" +Id.__doc__ = """ A unique identifier. """ diff --git a/packages/overture-schema-system/src/overture/schema/system/ref/ref.py b/packages/overture-schema-system/src/overture/schema/system/ref/ref.py index 86c8d19d6..ff712104b 100644 --- a/packages/overture-schema-system/src/overture/schema/system/ref/ref.py +++ b/packages/overture-schema-system/src/overture/schema/system/ref/ref.py @@ -23,7 +23,14 @@ def __init__(self, value: str, doc: str) -> None: BELONGS_TO = "belongs_to", "The relator belongs to the relatee" BOUNDARY_OF = "boundary_of", "The relator is a boundary of the relatee" + CAPITAL_OF = "capital_of", "The relator is a capital of the relatee" + CAPITALLED_BY = "capitalled_by", "The relator has the relatee as its capital" + CHILD_OF = "child_of", "The relator is a child of the relatee" CONNECTS_TO = "connects_to", "The relator connects to the relatee" + DESCENDANT_OF = ( + "descendant_of", + "The relator is a hierarchical descendant of the relatee", + ) @dataclass(frozen=True, slots=True) diff --git a/packages/overture-schema-system/src/overture/schema/system/string.py b/packages/overture-schema-system/src/overture/schema/system/string.py index cac9112ca..6533a192a 100644 --- a/packages/overture-schema-system/src/overture/schema/system/string.py +++ b/packages/overture-schema-system/src/overture/schema/system/string.py @@ -35,7 +35,7 @@ Field(description="An ISO 3166-1 alpha-2 country code"), ], ) # type: ignore [type-arg] -""" +CountryCodeAlpha2.__doc__ = """ An ISO-3166-1 alpha-2 country code. """ @@ -49,7 +49,7 @@ ), ], ) # type: ignore [type-arg] -""" +HexColor.__doc__ = """ A color represented as an #RRGGBB or #RGB hexadecimal string. For example: @@ -67,7 +67,7 @@ Field(description="A JSON Pointer (as described in RFC-6901)"), ], ) # type: ignore [type-arg] -""" +JsonPointer.__doc__ = """ A JSON Pointer As described in `the JSON Pointer specification, RFC-6901`_. @@ -91,7 +91,7 @@ ), ], ) # type: ignore [type-arg] -""" +LanguageTag.__doc__ = """ A BCP-47 language tag. As described in `Tags for Identifying Languages, BCP-47`_. @@ -114,7 +114,7 @@ Field(description="A string that contains no whitespace characters"), ], ) # type: ignore [type-arg] -""" +NoWhitespaceString.__doc__ = """ A string that contains no whitespace characters. """ @@ -124,7 +124,7 @@ str, PhoneNumberConstraint(), Field(description="An international phone number") ], ) # type: ignore [type-arg] -""" +PhoneNumber.__doc__ = """ An international phone number. """ @@ -136,12 +136,12 @@ Field(description="An ISO 3166-2 principal subdivision code"), ], ) # type: ignore [type-arg] -""" +RegionCode.__doc__ = """ An ISO 3166-2 principal subdivision code. """ SnakeCaseString = NewType("SnakeCaseString", Annotated[str, SnakeCaseConstraint()]) -""" +SnakeCaseString.__doc__ = """ A string that looks like a snake case identifier, like a Python variable name (*e.g.*, `foo_bar`). """ @@ -155,7 +155,7 @@ ), ], ) # type: ignore [type-arg] -""" +StrippedString.__doc__ = """ A string without leading or trailing whitespace. """ @@ -167,7 +167,7 @@ Field(description="A wikidata ID, as found on https://www.wikidata.org/"), ], ) # type: ignore [type-arg] -""" +WikidataId.__doc__ = """ A wikidata ID, as found on https://www.wikidata.org/. - `"Q42"` diff --git a/packages/overture-schema-system/tests/field_constraint/test_string_constraints.py b/packages/overture-schema-system/tests/field_constraint/test_string_constraints.py index 6047e896c..14a1ebae1 100644 --- a/packages/overture-schema-system/tests/field_constraint/test_string_constraints.py +++ b/packages/overture-schema-system/tests/field_constraint/test_string_constraints.py @@ -17,6 +17,72 @@ WikidataIdConstraint, ) +PATTERN_CONSTRAINT_CASES = [ + ( + LanguageTagConstraint, + ["en", "en-US", "en-GB", "zh-CN", "fr-CA", "es-MX"], + ["invalid-tag-format", "123", "en_US", "toolongcode"], + "Invalid IETF BCP-47 language tag", + ), + ( + CountryCodeAlpha2Constraint, + ["US", "GB", "CA", "FR", "DE", "JP", "CN", "BR"], + ["USA", "123", "invalid", "gb", "us"], + "Invalid ISO 3166-1 alpha-2 country code", + ), + ( + RegionCodeConstraint, + ["US-CA", "GB-ENG", "CA-ON", "FR-75", "DE-BY"], + ["US", "123-45", "invalid-region", "us-ca"], + "Invalid ISO 3166-2 subdivision code", + ), + ( + WikidataIdConstraint, + ["Q1", "Q123", "Q999999", "Q1234567890"], + ["q123", "P123", "Q", "123", "Q12abc"], + "Invalid Wikidata identifier", + ), + ( + PhoneNumberConstraint, + ["+1-555-123-4567", "+44-20-7946-0958", "+33-1-42-86-83-26", "+81-3-1234-5678"], + ["555-123-4567", "1-555-123-4567", "not-a-phone"], + "Invalid phone number format", + ), + ( + HexColorConstraint, + ["#FFFFFF", "#000000", "#FF0000", "#ffffff", "#FFF", "#fff", "#ABC", "#123"], + ["FFFFFF", "#FF", "#FFFFFFF", "#GGGGGG", "red", "#", "#FFFF"], + "Invalid hexadecimal color format", + ), + ( + NoWhitespaceConstraint, + ["hello", "identifier123", "snake_case_id", "kebab-case-id", "camelCaseId"], + [ + "hello world", + "id with spaces", + "tab\tcharacter", + "new\nline", + "carriage\rreturn", + ], + "cannot contain whitespace", + ), + ( + SnakeCaseConstraint, + ["restaurant", "gas_station", "shopping_mall", "coffee_shop", "bank_atm"], + ["Restaurant", "gas-station", "shopping mall", "category!"], + "Invalid category format", + ), + ( + StrippedConstraint, + ["hello", "hello world", "text with internal spaces", ""], + [" hello", "hello ", "\thello", "hello\n", " hello world "], + "leading or trailing whitespace", + ), +] + + +PATTERN_CONSTRAINT_IDS = [cls.__name__ for cls, *_ in PATTERN_CONSTRAINT_CASES] + class TestStringConstraints: """Test all string-based constraints.""" @@ -28,7 +94,6 @@ def test_pattern_constraint_valid(self) -> None: class TestModel(BaseModel): code: Annotated[str, constraint] - # Valid values model = TestModel(code="US") assert model.code == "US" @@ -42,7 +107,6 @@ def test_pattern_constraint_invalid(self) -> None: class TestModel(BaseModel): code: Annotated[str, constraint] - # Invalid values with pytest.raises(ValidationError) as exc_info: TestModel(code="usa") assert "Must be 2 uppercase letters" in str(exc_info.value) @@ -50,85 +114,46 @@ class TestModel(BaseModel): with pytest.raises(ValidationError): TestModel(code="123") - def test_language_tag_constraint_valid(self) -> None: - """Test LanguageTagConstraint with valid language tags.""" - + @pytest.mark.parametrize( + "constraint_cls,valid,invalid,error_substr", + PATTERN_CONSTRAINT_CASES, + ids=PATTERN_CONSTRAINT_IDS, + ) + def test_subclass_valid( + self, + constraint_cls: type, + valid: list[str], + invalid: list[str], + error_substr: str, + ) -> None: class TestModel(BaseModel): - language: Annotated[str, LanguageTagConstraint()] - - # Valid language tags - valid_tags = ["en", "en-US", "en-GB", "zh-CN", "fr-CA", "es-MX"] - - for tag in valid_tags: - model = TestModel(language=tag) - assert model.language == tag - - def test_language_tag_constraint_invalid(self) -> None: - """Test LanguageTagConstraint with invalid language tags.""" - + value: Annotated[str, constraint_cls()] + + for v in valid: + model = TestModel(value=v) + assert model.value == v + + @pytest.mark.parametrize( + "constraint_cls,valid,invalid,error_substr", + PATTERN_CONSTRAINT_CASES, + ids=PATTERN_CONSTRAINT_IDS, + ) + def test_subclass_invalid( + self, + constraint_cls: type, + valid: list[str], + invalid: list[str], + error_substr: str, + ) -> None: class TestModel(BaseModel): - language: Annotated[str, LanguageTagConstraint()] + value: Annotated[str, constraint_cls()] - invalid_tags = ["invalid-tag-format", "123", "en_US", "toolongcode"] - - for tag in invalid_tags: + for v in invalid: with pytest.raises(ValidationError) as exc_info: - TestModel(language=tag) - assert "Invalid IETF BCP-47 language tag" in str(exc_info.value) - - def test_country_code_constraint_valid(self) -> None: - """Test CountryCodeAlpha2Constraint with valid ISO 3166-1 alpha-2 codes.""" - - class TestModel(BaseModel): - country: Annotated[str, CountryCodeAlpha2Constraint()] - - valid_codes = ["US", "GB", "CA", "FR", "DE", "JP", "CN", "BR"] - - for code in valid_codes: - model = TestModel(country=code) - assert model.country == code - - def test_country_code_constraint_invalid(self) -> None: - """Test CountryCodeAlpha2Constraint with invalid country codes.""" - - class TestModel(BaseModel): - country: Annotated[str, CountryCodeAlpha2Constraint()] - - invalid_codes = ["USA", "123", "invalid", "gb", "us"] - - for code in invalid_codes: - with pytest.raises(ValidationError) as exc_info: - TestModel(country=code) - assert "Invalid ISO 3166-1 alpha-2 country code" in str(exc_info.value) - - def test_region_code_constraint_valid(self) -> None: - """Test RegionCodeConstraint with valid ISO 3166-2 codes.""" - - class TestModel(BaseModel): - region: Annotated[str, RegionCodeConstraint()] - - valid_codes = ["US-CA", "GB-ENG", "CA-ON", "FR-75", "DE-BY"] - - for code in valid_codes: - model = TestModel(region=code) - assert model.region == code - - def test_region_code_constraint_invalid(self) -> None: - """Test RegionCodeConstraint with invalid region codes.""" - - class TestModel(BaseModel): - region: Annotated[str, RegionCodeConstraint()] - - invalid_codes = ["US", "123-45", "invalid-region", "us-ca"] - - for code in invalid_codes: - with pytest.raises(ValidationError) as exc_info: - TestModel(region=code) - assert "Invalid ISO 3166-2 subdivision code" in str(exc_info.value) + TestModel(value=v) + assert error_substr in str(exc_info.value) def test_json_pointer_constraint_valid(self) -> None: - """Test JsonPointerConstraint with valid JSON pointers.""" - class TestModel(BaseModel): pointer: Annotated[str, JsonPointerConstraint()] @@ -138,8 +163,8 @@ class TestModel(BaseModel): "/foo/bar", "/0", "/foo/0/bar", - "/~0", # Represents ~ - "/~1", # Represents / + "/~0", + "/~1", ] for ptr in valid_pointers: @@ -147,210 +172,17 @@ class TestModel(BaseModel): assert model.pointer == ptr def test_json_pointer_constraint_invalid(self) -> None: - """Test JsonPointerConstraint with invalid JSON pointers.""" - class TestModel(BaseModel): pointer: Annotated[str, JsonPointerConstraint()] - invalid_pointers = [ - "foo", # Must start with / - "foo/bar", # Must start with / - ] - - for ptr in invalid_pointers: + for ptr in ["foo", "foo/bar"]: with pytest.raises(ValidationError) as exc_info: TestModel(pointer=ptr) assert "JSON Pointer must start" in str(exc_info.value) - def test_whitespace_constraint_valid(self) -> None: - """Test WhitespaceConstraint with valid strings (no leading/trailing - whitespace).""" - - class TestModel(BaseModel): - text: Annotated[str, StrippedConstraint()] - - valid_strings = [ - "hello", - "hello world", - "text with internal spaces", - "", # Empty string is valid - ] - - for text in valid_strings: - model = TestModel(text=text) - assert model.text == text - - def test_whitespace_constraint_invalid(self) -> None: - """Test WhitespaceConstraint with invalid strings (leading/trailing - whitespace).""" - - class TestModel(BaseModel): - text: Annotated[str, StrippedConstraint()] - - invalid_strings = [ - " hello", # Leading space - "hello ", # Trailing space - "\thello", # Leading tab - "hello\n", # Trailing newline - " hello world ", # Both leading and trailing - ] - - for text in invalid_strings: - with pytest.raises(ValidationError) as exc_info: - TestModel(text=text) - assert "cannot have leading or trailing whitespace" in str(exc_info.value) - - def test_wikidata_constraint_valid(self) -> None: - """Test WikidataConstraint with valid Wikidata identifiers.""" - - class TestModel(BaseModel): - wikidata_id: Annotated[str, WikidataIdConstraint()] - - valid_ids = ["Q1", "Q123", "Q999999", "Q1234567890"] - - for wid in valid_ids: - model = TestModel(wikidata_id=wid) - assert model.wikidata_id == wid - - def test_wikidata_constraint_invalid(self) -> None: - """Test WikidataConstraint with invalid Wikidata identifiers.""" - - class TestModel(BaseModel): - wikidata_id: Annotated[str, WikidataIdConstraint()] - - invalid_ids = [ - "q123", # Lowercase q - "P123", # Property instead of item - "Q", # Missing number - "123", # Missing Q prefix - "Q12abc", # Non-numeric suffix - ] - - for wid in invalid_ids: - with pytest.raises(ValidationError) as exc_info: - TestModel(wikidata_id=wid) - assert "Invalid Wikidata identifier" in str(exc_info.value) - - def test_phone_number_constraint_valid(self) -> None: - """Test PhoneNumberConstraint with valid international phone numbers.""" - - class TestModel(BaseModel): - phone: Annotated[str, PhoneNumberConstraint()] - - valid_phones = [ - "+1-555-123-4567", - "+44-20-7946-0958", - "+33-1-42-86-83-26", - "+81-3-1234-5678", - "+86-10-8888-8888", - ] - - for phone in valid_phones: - model = TestModel(phone=phone) - assert model.phone == phone - - def test_phone_number_constraint_invalid(self) -> None: - """Test PhoneNumberConstraint with invalid phone numbers.""" - - class TestModel(BaseModel): - phone: Annotated[str, PhoneNumberConstraint()] - - invalid_phones = [ - "555-123-4567", # Missing country code - "1-555-123-4567", # Missing + - "not-a-phone", # Not a phone number - ] - - for phone in invalid_phones: - with pytest.raises(ValidationError) as exc_info: - TestModel(phone=phone) - assert "Invalid phone number format" in str(exc_info.value) - - def test_hex_color_constraint_valid(self) -> None: - """Test HexColorConstraint with valid hex colors.""" - - class TestModel(BaseModel): - color: Annotated[str, HexColorConstraint()] - - valid_colors = [ - "#FFFFFF", - "#000000", - "#FF0000", - "#00FF00", - "#0000FF", - "#ABCDEF", - "#123456", - "#ffffff", # lowercase - "#abcdef", # lowercase - "#FFF", # 3-character uppercase - "#fff", # 3-character lowercase - "#ABC", # 3-character mixed case - "#123", # 3-character numbers - ] - - for color in valid_colors: - model = TestModel(color=color) - assert model.color == color - - def test_hex_color_constraint_invalid(self) -> None: - """Test HexColorConstraint with invalid hex colors.""" - - class TestModel(BaseModel): - color: Annotated[str, HexColorConstraint()] - - invalid_colors = [ - "FFFFFF", # Missing # - "#FF", # Too short (2 chars) - "#FFFFFFF", # Too long (7 chars) - "#GGGGGG", # Invalid hex characters - "red", # Not hex - "#", # Just hash - "#FFFF", # Invalid length (4 chars) - ] - - for color in invalid_colors: - with pytest.raises(ValidationError) as exc_info: - TestModel(color=color) - # Just check that validation fails - message may vary - assert len(exc_info.value.errors()) > 0 - - def test_no_whitespace_constraint_valid(self) -> None: - """Test NoWhitespaceConstraint with valid strings (no whitespace).""" - - class TestModel(BaseModel): - identifier: Annotated[str, NoWhitespaceConstraint()] - - valid_identifiers = [ - "hello", - "identifier123", - "snake_case_id", - "kebab-case-id", - "camelCaseId", - ] - - for ident in valid_identifiers: - model = TestModel(identifier=ident) - assert model.identifier == ident - - def test_no_whitespace_constraint_invalid(self) -> None: - """Test NoWhitespaceConstraint with invalid strings (containing whitespace).""" - - class TestModel(BaseModel): - identifier: Annotated[str, NoWhitespaceConstraint()] - - invalid_identifiers = [ - "hello world", - "id with spaces", - "tab\tcharacter", - "new\nline", - "carriage\rreturn", - ] - - for ident in invalid_identifiers: - with pytest.raises(ValidationError) as exc_info: - TestModel(identifier=ident) - # Just check that validation fails - message may vary - assert len(exc_info.value.errors()) > 0 + def test_stripped_constraint_pattern_string(self) -> None: + """Codegen extracts the regex via constraint.pattern.pattern.""" + assert StrippedConstraint().pattern.pattern == r"^(\S(.*\S)?)?\Z" class TestJsonSchemaGeneration: @@ -375,6 +207,23 @@ class TestModel(BaseModel): # Check descriptions assert "IETF BCP-47 language tag" in props["language"].get("description", "") + def test_stripped_constraint_json_schema_pattern(self) -> None: + """StrippedConstraint's JSON schema pattern accepts empty string + and rejects leading/trailing whitespace.""" + import re + + class TestModel(BaseModel): + text: Annotated[str, StrippedConstraint()] + + schema = TestModel.model_json_schema() + pattern = re.compile(schema["properties"]["text"]["pattern"]) + + assert pattern.match("") is not None + assert pattern.match("a") is not None + assert pattern.match("a b c") is not None + assert pattern.match(" leading") is None + assert pattern.match("trailing ") is None + class TestErrorHandling: """Test error handling and validation context.""" @@ -424,38 +273,59 @@ class TestModel(BaseModel): error = exc_info.value assert error.error_count() >= 1 - def test_snake_case_constraint_valid(self) -> None: - """Test CategoryPatternConstraint with valid snake_case patterns.""" + +class TestPatternConstraintHierarchy: + """Test that pattern-based constraints extend PatternConstraint.""" + + @pytest.mark.parametrize( + "constraint_cls", + [ + CountryCodeAlpha2Constraint, + HexColorConstraint, + LanguageTagConstraint, + NoWhitespaceConstraint, + SnakeCaseConstraint, + PhoneNumberConstraint, + RegionCodeConstraint, + StrippedConstraint, + WikidataIdConstraint, + ], + ) + def test_pattern_constraints_are_pattern_constraint_instances( + self, constraint_cls: type + ) -> None: + assert isinstance(constraint_cls(), PatternConstraint) + + def test_pattern_constraint_with_description_kwargs(self) -> None: + """Bare PatternConstraint with description/length kwargs emits correct JSON schema.""" + constraint = PatternConstraint( + r"^[A-Z]{2}$", + "Must be 2 uppercase letters", + description="Two letter code", + min_length=2, + max_length=2, + ) class TestModel(BaseModel): - category: Annotated[str, SnakeCaseConstraint()] - - valid_categories = [ - "restaurant", - "gas_station", - "shopping_mall", - "coffee_shop", - "bank_atm", - ] + code: Annotated[str, constraint] - for cat in valid_categories: - model = TestModel(category=cat) - assert model.category == cat + schema = TestModel.model_json_schema() + props = schema["properties"]["code"] + assert props["pattern"] == "^[A-Z]{2}$" + assert props["description"] == "Two letter code" + assert props["minLength"] == 2 + assert props["maxLength"] == 2 - def test_snake_case_constraint_invalid(self) -> None: - """Test CategoryPatternConstraint with invalid category patterns.""" + def test_pattern_constraint_without_optional_kwargs(self) -> None: + """Bare PatternConstraint without optional kwargs omits them from JSON schema.""" + constraint = PatternConstraint(r"^[A-Z]+$", "Must be uppercase") class TestModel(BaseModel): - category: Annotated[str, SnakeCaseConstraint()] - - invalid_categories = [ - "Restaurant", # Capital letter - "gas-station", # Hyphen instead of underscore - "shopping mall", # Space instead of underscore - "category!", # Special character - ] + code: Annotated[str, constraint] - for cat in invalid_categories: - with pytest.raises(ValidationError) as exc_info: - TestModel(category=cat) - assert "Invalid category format" in str(exc_info.value) + schema = TestModel.model_json_schema() + props = schema["properties"]["code"] + assert props["pattern"] == "^[A-Z]+$" + assert "description" not in props + assert "minLength" not in props + assert "maxLength" not in props diff --git a/packages/overture-schema-system/tests/model_constraint/test_forbid_if.py b/packages/overture-schema-system/tests/model_constraint/test_forbid_if.py index 5f1f06d00..090993442 100644 --- a/packages/overture-schema-system/tests/model_constraint/test_forbid_if.py +++ b/packages/overture-schema-system/tests/model_constraint/test_forbid_if.py @@ -6,6 +6,7 @@ from util import assert_subset from overture.schema.system import create_model +from overture.schema.system._json_schema import required_non_null from overture.schema.system.model_constraint import ( Condition, FieldEqCondition, @@ -16,7 +17,7 @@ @pytest.mark.parametrize("field_names", [[], ()]) -def test_error_not_enough_field_names(field_names: list[str]): +def test_error_not_enough_field_names(field_names: list[str]) -> None: with pytest.raises(ValueError, match="`field_names` cannot be empty, but it is"): forbid_if(field_names, FieldEqCondition("foo", 42)) @@ -77,7 +78,8 @@ class TestModel(BaseModel): constraint.validate_class(TestModel) with pytest.raises( - ValueError, match="at least one field has an explicit value when it should not" + ValueError, + match="at least one field is set to a value other than None when it must not be", ): constraint.validate_instance(model_instance) @@ -105,6 +107,18 @@ class TestModel(BaseModel): constraint.validate_instance(TestModel(baz=42)) +def test_valid_model_instance_fields_explicitly_none_when_condition_true() -> None: + """Setting a forbidden field to None does not violate the prohibition.""" + + class TestModel(BaseModel): + foo: int | None = None + bar: int | None = None + baz: int + + constraint = ForbidIfConstraint(["foo", "bar"], FieldEqCondition("baz", 42)) + constraint.validate_instance(TestModel(foo=None, bar=None, baz=42)) + + @pytest.mark.parametrize("field_names", [["foo"], ["bar"], ["foo", "bar"]]) def test_valid_model_instance_condition_false(field_names: list[str]) -> None: class TestModel(BaseModel): @@ -126,7 +140,7 @@ class TestModel(BaseModel): actual = TestModel.model_json_schema() expect = { "if": {"properties": {"qux": {"const": 42}}}, - "then": {"not": {"required": ["foo", "baz"]}}, + "then": {"not": required_non_null(["foo", "baz"])}, } assert expect == TestModel.model_config["json_schema_extra"] assert_subset(expect, actual, "expect", "actual") @@ -139,7 +153,7 @@ class TestModel(BaseModel): None, { "if": {"not": {"properties": {"corge": {"const": 42}}}}, - "then": {"not": {"required": ["bar", "baz"]}}, + "then": {"not": required_non_null(["bar", "baz"])}, }, ), ( @@ -147,7 +161,7 @@ class TestModel(BaseModel): { "random": "value", "if": {"not": {"properties": {"corge": {"const": 42}}}}, - "then": {"not": {"required": ["bar", "baz"]}}, + "then": {"not": required_non_null(["bar", "baz"])}, }, ), ( @@ -157,7 +171,7 @@ class TestModel(BaseModel): {"if": 123}, { "if": {"not": {"properties": {"corge": {"const": 42}}}}, - "then": {"not": {"required": ["bar", "baz"]}}, + "then": {"not": required_non_null(["bar", "baz"])}, }, ] }, diff --git a/packages/overture-schema-system/tests/model_constraint/test_min_fields_set.py b/packages/overture-schema-system/tests/model_constraint/test_min_fields_set.py index 3c4929188..0618edae3 100644 --- a/packages/overture-schema-system/tests/model_constraint/test_min_fields_set.py +++ b/packages/overture-schema-system/tests/model_constraint/test_min_fields_set.py @@ -3,6 +3,7 @@ import pytest from pydantic import BaseModel, ConfigDict +from pydantic.json_schema import JsonDict from util import assert_subset from overture.schema.system import create_model @@ -195,7 +196,7 @@ class TestModel(BaseModel): def test_model_json_schema_already_set_same() -> None: - expect = {"minProperties": 3, "hello": "world"} + expect: JsonDict = {"minProperties": 3, "hello": "world"} @min_fields_set(3) class TestModel(BaseModel): @@ -211,7 +212,7 @@ class TestModel(BaseModel): def test_model_json_schema_error_already_set_different() -> None: - expect = {"minProperties": 1, "hello": "world"} + expect: JsonDict = {"minProperties": 1, "hello": "world"} with pytest.raises( RuntimeError, diff --git a/packages/overture-schema-system/tests/model_constraint/test_multi_constraint.py b/packages/overture-schema-system/tests/model_constraint/test_multi_constraint.py index d7ff99887..1a046778d 100644 --- a/packages/overture-schema-system/tests/model_constraint/test_multi_constraint.py +++ b/packages/overture-schema-system/tests/model_constraint/test_multi_constraint.py @@ -19,7 +19,7 @@ ) -def test_many_constraints(): +def test_many_constraints() -> None: @forbid_if(["corge", "garply"], FieldEqCondition("qux", "hello")) @min_fields_set(3) @no_extra_fields diff --git a/packages/overture-schema-system/tests/model_constraint/test_no_extra_fields.py b/packages/overture-schema-system/tests/model_constraint/test_no_extra_fields.py index ad50cc0c9..045502cf3 100644 --- a/packages/overture-schema-system/tests/model_constraint/test_no_extra_fields.py +++ b/packages/overture-schema-system/tests/model_constraint/test_no_extra_fields.py @@ -9,7 +9,7 @@ def test_error_invalid_model_class() -> None: - expect_pattern = r"can't apply `@?\w+` to model class `TestModel`: existing `model_config\['extra'\]` is already set to '\w+'" + expect_pattern = r"can't apply `@?\w+` to model class `\w+`: existing `model_config\['extra'\]` is already set to '\w+'" with pytest.raises(TypeError, match=expect_pattern): @@ -19,10 +19,10 @@ class TestModel(BaseModel): with pytest.raises(TypeError, match=expect_pattern): - class TestModel(BaseModel): + class TestModelAllow(BaseModel): model_config = ConfigDict(extra="allow") - NoExtraFieldsConstraint().validate_class(TestModel) + NoExtraFieldsConstraint().validate_class(TestModelAllow) @pytest.mark.parametrize( diff --git a/packages/overture-schema-system/tests/model_constraint/test_radio_group.py b/packages/overture-schema-system/tests/model_constraint/test_radio_group.py index f57b50605..353484739 100644 --- a/packages/overture-schema-system/tests/model_constraint/test_radio_group.py +++ b/packages/overture-schema-system/tests/model_constraint/test_radio_group.py @@ -14,7 +14,7 @@ @pytest.mark.parametrize("field_names", [[], (), ["foo"], ("bar",)]) -def test_error_not_enough_field_names(field_names: list[str]): +def test_error_not_enough_field_names(field_names: list[str]) -> None: with pytest.raises( ValueError, match="`field_names` must contain at least two items" ): @@ -108,7 +108,7 @@ class TestModel(BaseModel): def test_model_json_schema_no_model_config() -> None: @radio_group("foo", "baz", "qux") class TestModel(BaseModel): - foo: bool = Field(default=None, alias="bar") + foo: bool | None = Field(default=None, alias="bar") baz: bool qux: bool = Field(alias="corge") diff --git a/packages/overture-schema-system/tests/model_constraint/test_require_any_of.py b/packages/overture-schema-system/tests/model_constraint/test_require_any_of.py index 6c6b12779..6c28dec04 100644 --- a/packages/overture-schema-system/tests/model_constraint/test_require_any_of.py +++ b/packages/overture-schema-system/tests/model_constraint/test_require_any_of.py @@ -3,6 +3,7 @@ from pydantic.json_schema import JsonDict from util import assert_subset +from overture.schema.system._json_schema import required_non_null from overture.schema.system.model_constraint import ( ModelConstraint, RequireAnyOfConstraint, @@ -25,7 +26,9 @@ def test_error_duplicate_field_names(field_names: list[str]) -> None: def test_error_invalid_model_class() -> None: - expect = "specifies one or more fields that are not in the model class `TestModel`: foo, bar" + expect = ( + r"specifies one or more fields that are not in the model class `\w+`: foo, bar" + ) with pytest.raises(TypeError, match=expect): @@ -35,13 +38,14 @@ class TestModel(BaseModel): with pytest.raises(TypeError, match=expect): - class TestModel(BaseModel): + class TestModel2(BaseModel): baz: int - RequireAnyOfConstraint("foo", "bar").validate_class(TestModel) + RequireAnyOfConstraint("foo", "bar").validate_class(TestModel2) -def test_error_invalid_model_instance() -> None: +@pytest.mark.parametrize("kwargs", [{}, {"foo": None, "bar": None}]) +def test_error_no_non_null_value(kwargs: dict[str, object]) -> None: @require_any_of("foo", "bar") class TestModel(BaseModel): foo: int | None = None @@ -49,9 +53,9 @@ class TestModel(BaseModel): with pytest.raises( ValidationError, - match="at least one of these fields must be explicitly set, but none are: foo, bar", + match="at least one of these fields must be set to a value other than None, but none are: foo, bar", ): - TestModel() + TestModel(**kwargs) @pytest.mark.parametrize("foo,bar", [(42, "hello"), (42, None), (None, "hello")]) @@ -71,7 +75,12 @@ class TestModel(BaseModel): bar: str | None = Field(default=None, alias="baz") actual = TestModel.model_json_schema() - expect = {"anyOf": [{"required": ["foo"]}, {"required": ["baz"]}]} + expect = { + "anyOf": [ + required_non_null(["foo"]), + required_non_null(["baz"]), + ] + } assert expect == TestModel.model_config["json_schema_extra"] assert_subset(expect, actual, "expect", "actual") @@ -79,13 +88,26 @@ class TestModel(BaseModel): @pytest.mark.parametrize( "base_json_schema,expect", [ - (None, {"anyOf": [{"required": ["foo"]}, {"required": ["baz"]}]}), + ( + None, + { + "anyOf": [ + required_non_null(["foo"]), + required_non_null(["baz"]), + ] + }, + ), ( {"anyOf": "anything"}, { "allOf": [ {"anyOf": "anything"}, - {"anyOf": [{"required": ["foo"]}, {"required": ["baz"]}]}, + { + "anyOf": [ + required_non_null(["foo"]), + required_non_null(["baz"]), + ] + }, ] }, ), diff --git a/packages/overture-schema-system/tests/model_constraint/test_require_if.py b/packages/overture-schema-system/tests/model_constraint/test_require_if.py index 82a08a886..2f6f16471 100644 --- a/packages/overture-schema-system/tests/model_constraint/test_require_if.py +++ b/packages/overture-schema-system/tests/model_constraint/test_require_if.py @@ -6,6 +6,7 @@ from util import assert_subset from overture.schema.system import create_model +from overture.schema.system._json_schema import required_non_null from overture.schema.system.model_constraint import ( Condition, FieldEqCondition, @@ -16,7 +17,7 @@ @pytest.mark.parametrize("field_names", [[], ()]) -def test_error_not_enough_field_names(field_names: list[str]): +def test_error_not_enough_field_names(field_names: list[str]) -> None: with pytest.raises(ValueError, match="`field_names` cannot be empty, but it is"): require_if(field_names, FieldEqCondition("foo", 42)) @@ -78,7 +79,7 @@ class TestModel(BaseModel): constraint.validate_class(TestModel) with pytest.raises( ValueError, - match="at least one field is missing an explicit value when it should have one:", + match="at least one field is not set to a value other than None:", ): constraint.validate_instance(model_instance) @@ -107,7 +108,7 @@ class TestModel(BaseModel): @pytest.mark.parametrize("field_names", [["foo"], ["bar"], ["foo", "bar"]]) -def test_valid_model_instance_condition_false(field_names) -> None: +def test_valid_model_instance_condition_false(field_names: list[str]) -> None: class TestModel(BaseModel): foo: int | None = None bar: int | None = None @@ -117,6 +118,20 @@ class TestModel(BaseModel): constraint.validate_instance(TestModel(bar=41, baz=42)) +def test_error_fields_explicitly_none_when_condition_true() -> None: + class TestModel(BaseModel): + foo: int | None = None + bar: int | None = None + baz: int + + constraint = RequireIfConstraint(["foo", "bar"], FieldEqCondition("baz", 42)) + with pytest.raises( + ValueError, + match="at least one field is not set to a value other than None", + ): + constraint.validate_instance(TestModel(foo=None, bar=None, baz=42)) + + def test_model_json_schema_no_model_config() -> None: @require_if(["foo", "baz"], FieldEqCondition("qux", 42)) class TestModel(BaseModel): @@ -127,7 +142,7 @@ class TestModel(BaseModel): actual = TestModel.model_json_schema() expect = { "if": {"properties": {"corge": {"const": 42}}}, - "then": {"required": ["bar", "baz"]}, + "then": required_non_null(["bar", "baz"]), } assert expect == TestModel.model_config["json_schema_extra"] assert_subset(expect, actual, "expect", "actual") @@ -140,7 +155,7 @@ class TestModel(BaseModel): None, { "if": {"not": {"properties": {"qux": {"const": 42}}}}, - "then": {"required": ["foo", "baz"]}, + "then": required_non_null(["foo", "baz"]), }, ), ( @@ -148,7 +163,7 @@ class TestModel(BaseModel): { "random": "value", "if": {"not": {"properties": {"qux": {"const": 42}}}}, - "then": {"required": ["foo", "baz"]}, + "then": required_non_null(["foo", "baz"]), }, ), ( @@ -158,7 +173,7 @@ class TestModel(BaseModel): {"if": 123}, { "if": {"not": {"properties": {"qux": {"const": 42}}}}, - "then": {"required": ["foo", "baz"]}, + "then": required_non_null(["foo", "baz"]), }, ] }, diff --git a/packages/overture-schema-system/tests/primitive/test_bbox.py b/packages/overture-schema-system/tests/primitive/test_bbox.py index 530793bf5..31146720a 100644 --- a/packages/overture-schema-system/tests/primitive/test_bbox.py +++ b/packages/overture-schema-system/tests/primitive/test_bbox.py @@ -195,6 +195,7 @@ class TestModel(BaseModel): [ ((1, 2, 3, 4), BBox(xmin=1, ymin=2, xmax=3, ymax=4)), (BBox(0, -1, -2, 3), BBox(0, -1, -2, 3)), + ({"xmin": -1, "ymin": -2, "xmax": 1, "ymax": 2}, BBox(-1, -2, 1, 2)), ], ) def test_pydantic_validation_success(input: Any, expect: BBox) -> None: diff --git a/packages/overture-schema-system/tests/primitive/test_geom.py b/packages/overture-schema-system/tests/primitive/test_geom.py index c43f783e0..68dc5dd5b 100644 --- a/packages/overture-schema-system/tests/primitive/test_geom.py +++ b/packages/overture-schema-system/tests/primitive/test_geom.py @@ -1,12 +1,11 @@ import re -from collections.abc import Iterator from dataclasses import dataclass -from itertools import chain, combinations +from itertools import combinations from typing import Annotated, Any import pytest +from _pytest.subtests import Subtests from pydantic import BaseModel, ValidationError -from pytest_subtests import SubTests from shapely import wkt from overture.schema.system.primitive import ( @@ -261,21 +260,26 @@ class GeometryTypeCase: ) -def powerset( - iterable: tuple[GeometryTypeCase, ...], -) -> Iterator[tuple[GeometryTypeCase, ...]]: - s = list(iterable) - return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1)) +def _representative_subsets( + cases: tuple[GeometryTypeCase, ...], +) -> tuple[tuple[GeometryTypeCase, ...], ...]: + """Select subsets that cover the constraint behavior without combinatorial explosion. + Singletons test each type accepted/rejected individually. Pairs test + composition. The full set tests unconstrained acceptance. + """ + singletons = [(c,) for c in cases] + pairs = list(combinations(cases, 2)) + full = [cases] + return tuple(singletons + pairs + full) -TEST_GEOMETRY_TYPE_CASE_SUBSETS = tuple( - s for s in powerset(TEST_GEOMETRY_TYPE_CASES) if len(s) > 0 -) + +TEST_GEOMETRY_TYPE_CASE_SUBSETS = _representative_subsets(TEST_GEOMETRY_TYPE_CASES) @pytest.mark.parametrize("geometry_type_case_subset", TEST_GEOMETRY_TYPE_CASE_SUBSETS) def test_geometry_type_constraint_on_allowed_geometry( - geometry_type_case_subset: tuple[GeometryTypeCase, ...], subtests: SubTests + geometry_type_case_subset: tuple[GeometryTypeCase, ...], subtests: Subtests ) -> None: allowed_types = tuple(g.geometry_type for g in geometry_type_case_subset) @@ -291,7 +295,7 @@ class ConstrainedModel(BaseModel): @pytest.mark.parametrize("geometry_type_case_subset", TEST_GEOMETRY_TYPE_CASE_SUBSETS) def test_geometry_type_constraint_on_disallowed_geometry( - geometry_type_case_subset: tuple[GeometryTypeCase, ...], subtests: SubTests + geometry_type_case_subset: tuple[GeometryTypeCase, ...], subtests: Subtests ) -> None: allowed_types = tuple(g.geometry_type for g in geometry_type_case_subset) @@ -313,7 +317,7 @@ class ConstrainedModel(BaseModel): @pytest.mark.parametrize("geometry_type_case_subset", TEST_GEOMETRY_TYPE_CASE_SUBSETS) def test_geometry_type_constraint_on_geometry_counterexamples( - geometry_type_case_subset: tuple[GeometryTypeCase, ...], subtests: SubTests + geometry_type_case_subset: tuple[GeometryTypeCase, ...], subtests: Subtests ) -> None: allowed_types = tuple(g.geometry_type for g in geometry_type_case_subset) diff --git a/packages/overture-schema-system/tests/ref/test_id.py b/packages/overture-schema-system/tests/ref/test_id.py index 288aa2254..59abf9e14 100644 --- a/packages/overture-schema-system/tests/ref/test_id.py +++ b/packages/overture-schema-system/tests/ref/test_id.py @@ -10,12 +10,12 @@ class TestIdentifiedFeature: class IdentifiedFeature(Identified, Feature): pass - def test_id_required_in_model_fields(self): + def test_id_required_in_model_fields(self) -> None: id_field = TestIdentifiedFeature.IdentifiedFeature.model_fields["id"] assert id_field.is_required() - def test_description_same_in_model_fields(self): + def test_description_same_in_model_fields(self) -> None: base_id_field = Identified.model_fields["id"] derived_id_field = TestIdentifiedFeature.IdentifiedFeature.model_fields["id"] diff --git a/packages/overture-schema-system/tests/test___json_schema.py b/packages/overture-schema-system/tests/test___json_schema.py index 85769b805..e820f17e8 100644 --- a/packages/overture-schema-system/tests/test___json_schema.py +++ b/packages/overture-schema-system/tests/test___json_schema.py @@ -14,6 +14,7 @@ put_one_of, put_properties, put_required, + required_non_null, try_move, ) @@ -561,3 +562,44 @@ def test_try_move_missing_key() -> None: assert {"foo": "bar"} == src assert {} == dst + + +#################################################################################################### +# required_non_null # +#################################################################################################### + + +@pytest.mark.parametrize( + "aliases,expect", + [ + ( + ["foo"], + { + "required": ["foo"], + "properties": {"foo": {"not": {"type": "null"}}}, + }, + ), + ( + ["foo", "bar"], + { + "required": ["foo", "bar"], + "properties": { + "foo": {"not": {"type": "null"}}, + "bar": {"not": {"type": "null"}}, + }, + }, + ), + ], +) +def test_required_non_null_success(aliases: list[str], expect: JsonSchemaValue) -> None: + assert expect == required_non_null(aliases) + + +def test_required_non_null_error_empty() -> None: + with pytest.raises(ValueError, match="`operands` cannot be empty"): + required_non_null([]) + + +def test_required_non_null_error_not_list() -> None: + with pytest.raises(TypeError, match="`operands` must be a `list`"): + required_non_null(cast(list[str], "foo")) diff --git a/packages/overture-schema-system/tests/test_feature.py b/packages/overture-schema-system/tests/test_feature.py index 49dfbe85f..fc4a31751 100644 --- a/packages/overture-schema-system/tests/test_feature.py +++ b/packages/overture-schema-system/tests/test_feature.py @@ -5,6 +5,7 @@ from typing import Annotated, Any, Literal, cast import pytest +from _pytest.subtests import Subtests from pydantic import ( BaseModel, ConfigDict, @@ -15,7 +16,6 @@ create_model, ) from pydantic.json_schema import JsonSchemaValue, JsonValue -from pytest_subtests import SubTests from util import assert_subset from overture.schema.system.feature import Feature, _FieldLevel, _maybe_refactor_schema @@ -37,7 +37,7 @@ class TestFieldDiscriminator: @pytest.mark.parametrize("field", ["hello", "type", "properties"]) - def test_validation_success_simple(self, field: str, subtests: SubTests) -> None: + def test_validation_success_simple(self, field: str, subtests: Subtests) -> None: """ Test the discriminated union success case for a discriminator that is a simple string. @@ -150,7 +150,7 @@ def test_validation_success_simple(self, field: str, subtests: SubTests) -> None actual = tap.validate_python(expect) assert expect == actual - def test_validation_success_convert(self, subtests: SubTests) -> None: + def test_validation_success_convert(self, subtests: Subtests) -> None: """ Test the discriminated union success case where the discriminator value is of a variety of types. @@ -226,7 +226,7 @@ class TestEnum(str, Enum): model1_actual = tap.validate_python(model1_expect) assert model1_expect == model1_actual - def test_validation_success_missing_discriminator(self, subtests: SubTests) -> None: + def test_validation_success_missing_discriminator(self, subtests: Subtests) -> None: """ Tests a union of discriminated unions against an input that doesn't contain the contain the discriminator field of the first union, but does contain the discriminator field @@ -332,6 +332,18 @@ class BarModel(BaseModel): ): tap.validate_json(json.dumps(data)) + def test_field_discriminator_attaches_field_name(self) -> None: + """The callable returned by field_discriminator carries _field_name for introspection.""" + + class A(Feature): + kind: Literal["a"] + + class B(Feature): + kind: Literal["b"] + + disc = Feature.field_discriminator("kind", A, B) + assert disc.discriminator._field_name == "kind" # type: ignore[union-attr] + def test_error_field_not_str(self) -> None: with pytest.raises( TypeError, match="`field` must be a `str`, but 42 has type `int`" @@ -1510,8 +1522,14 @@ class PropertiesObjectConstraintFeature(Feature): "properties": { "required": ["baz"], "anyOf": [ - {"required": ["foo"]}, - {"required": ["bar"]}, + { + "required": ["foo"], + "properties": {"foo": {"not": {"type": "null"}}}, + }, + { + "required": ["bar"], + "properties": {"bar": {"not": {"type": "null"}}}, + }, ], "if": { "properties": { @@ -1521,7 +1539,10 @@ class PropertiesObjectConstraintFeature(Feature): }, }, "then": { - "not": {"required": ["bar"]}, + "not": { + "required": ["bar"], + "properties": {"bar": {"not": {"type": "null"}}}, + }, }, }, }, @@ -1556,12 +1577,18 @@ class MixedConstraintFeature(Feature): "properties", ], "anyOf": [ - {"required": ["bbox"]}, + { + "required": ["bbox"], + "properties": {"bbox": {"not": {"type": "null"}}}, + }, { "properties": { "properties": { "type": "object", - "required": ["foo"], + "properties": { + "foo": {"not": {"type": "null"}}, + "properties": {"type": "object", "required": ["foo"]}, + }, } }, }, @@ -1569,7 +1596,13 @@ class MixedConstraintFeature(Feature): "properties": { "properties": { "type": "object", - "required": ["garply"], + "properties": { + "garply": {"not": {"type": "null"}}, + "properties": { + "type": "object", + "required": ["garply"], + }, + }, }, }, }, @@ -1591,9 +1624,17 @@ class MixedConstraintFeature(Feature): "then": { "required": ["id"], "properties": { + "id": {"not": {"type": "null"}}, "properties": { "type": "object", - "required": ["foo", "qux"], + "properties": { + "foo": {"not": {"type": "null"}}, + "qux": {"not": {"type": "null"}}, + "properties": { + "type": "object", + "required": ["foo", "qux"], + }, + }, }, }, }, @@ -1616,7 +1657,14 @@ class MixedConstraintFeature(Feature): "properties": { "properties": { "type": "object", - "required": ["foo", "type"], + "properties": { + "foo": {"not": {"type": "null"}}, + "type": {"not": {"type": "null"}}, + "properties": { + "type": "object", + "required": ["foo", "type"], + }, + }, } } }, diff --git a/packages/overture-schema-system/tests/util.py b/packages/overture-schema-system/tests/util.py index 130e07b47..f9725f5ba 100644 --- a/packages/overture-schema-system/tests/util.py +++ b/packages/overture-schema-system/tests/util.py @@ -1,16 +1,19 @@ +from collections.abc import Mapping from typing import cast -def subset_conflicts(a: dict[str, object], b: dict[str, object]) -> dict[str, object]: +def subset_conflicts( + a: Mapping[str, object], b: Mapping[str, object] +) -> dict[str, object]: """ Returns conflict items that prevent `a` from being a subset of `b`. Parameters ---------- - a : dict[str, object] + a : Mapping[str, object] Candidate subset of `b` - b : dict[str, object] - Candidate supserset of `a` + b : Mapping[str, object] + Candidate superset of `a` Returns ------- @@ -44,7 +47,10 @@ def subset_conflicts(a: dict[str, object], b: dict[str, object]) -> dict[str, ob def assert_subset( - a: dict[str, object], b: dict[str, object], a_name: str = "a", b_name: str = "b" + a: Mapping[str, object], + b: Mapping[str, object], + a_name: str = "a", + b_name: str = "b", ) -> None: conflicts = subset_conflicts(a, b) if conflicts: diff --git a/packages/overture-schema-transportation-theme/pyproject.toml b/packages/overture-schema-transportation-theme/pyproject.toml index 2be7ed8ea..40db4638a 100644 --- a/packages/overture-schema-transportation-theme/pyproject.toml +++ b/packages/overture-schema-transportation-theme/pyproject.toml @@ -1,7 +1,11 @@ [project] +maintainers = [ + {name = "Overture Maps Schema Working Group"}, +] dependencies = [ "overture-schema-core", - "pydantic>=2.0", + "overture-schema-system", + "pydantic>=2.12.0", ] description = "Overture Maps transportation theme with shared structures and connector and segment types" dynamic = ["version"] @@ -10,8 +14,14 @@ name = "overture-schema-transportation-theme" readme = "README.md" requires-python = ">=3.10" +[project.urls] +Homepage = "https://overturemaps.org" +Source = "https://github.com/OvertureMaps/schema" +Issues = "https://github.com/OvertureMaps/schema/issues" + [tool.uv.sources] overture-schema-core = { workspace = true } +overture-schema-system = { workspace = true } [build-system] @@ -27,3 +37,63 @@ packages = ["src/overture"] [project.entry-points."overture.models"] "overture:transportation:connector" = "overture.schema.transportation:Connector" "overture:transportation:segment" = "overture.schema.transportation:Segment" + +[[examples.Connector]] +id = "39542bee-230f-4b91-b7e5-a9b58e0c59b1" +geometry = "POINT (-176.5472979 -43.9679472)" +version = 1 +theme = "transportation" +type = "connector" + +[examples.Connector.bbox] +xmin = -176.54730224609375 +xmax = -176.54727172851562 +ymin = -43.96794891357422 +ymax = -43.96794128417969 + +[[examples.Connector.sources]] +property = "" +dataset = "OpenStreetMap" + +[[examples.Segment]] +id = "1bc62f3b-08b5-42b8-89fe-36f685f60455" +geometry = "LINESTRING (-176.5636191 -43.954404, -176.5643637 -43.9538145, -176.5647264 -43.9535274, -176.5649947 -43.953251)" +version = 1 +subtype = "road" +class = "residential" +theme = "transportation" +type = "segment" + +[examples.Segment.bbox] +xmin = -176.5650177001953 +xmax = -176.56361389160156 +ymin = -43.954410552978516 +ymax = -43.953250885009766 + +[[examples.Segment.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "w53435546@6" +update_time = "2021-05-03T06:37:03Z" + +[examples.Segment.names] +primary = "Meteorological Lane" + +[[examples.Segment.names.rules]] +variant = "common" +value = "Meteorological Lane" + +[[examples.Segment.connectors]] +connector_id = "15b2c131-9137-4add-88c6-2acd3fa61355" +at = 0.0 + +[[examples.Segment.connectors]] +connector_id = "23ae2702-ef77-4d2e-b39d-77360b696d20" +at = 0.523536154 + +[[examples.Segment.connectors]] +connector_id = "8e944ce1-4b81-49eb-a823-7d98779c855c" +at = 1.0 + +[[examples.Segment.road_surface]] +value = "gravel" diff --git a/packages/overture-schema-transportation-theme/src/overture/schema/transportation/__about__.py b/packages/overture-schema-transportation-theme/src/overture/schema/transportation/__about__.py index 3dc1f76bc..5e3bb11ca 100644 --- a/packages/overture-schema-transportation-theme/src/overture/schema/transportation/__about__.py +++ b/packages/overture-schema-transportation-theme/src/overture/schema/transportation/__about__.py @@ -1 +1 @@ -__version__ = "0.1.0" +__version__ = "0.1.1.dev1" diff --git a/packages/overture-schema-transportation-theme/tests/connector_baseline_schema.json b/packages/overture-schema-transportation-theme/tests/connector_baseline_schema.json index b633d1219..a51faf3f0 100644 --- a/packages/overture-schema-transportation-theme/tests/connector_baseline_schema.json +++ b/packages/overture-schema-transportation-theme/tests/connector_baseline_schema.json @@ -30,7 +30,7 @@ }, "license": { "description": "Source data license name.\n\nThis should be a valid SPDX license identifier when available.\n\nIf omitted, contact the data provider for more license information.", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "License", "type": "string" }, diff --git a/packages/overture-schema-transportation-theme/tests/segment_baseline_schema.json b/packages/overture-schema-transportation-theme/tests/segment_baseline_schema.json index 96d10a548..4ec108313 100644 --- a/packages/overture-schema-transportation-theme/tests/segment_baseline_schema.json +++ b/packages/overture-schema-transportation-theme/tests/segment_baseline_schema.json @@ -84,7 +84,7 @@ "value": { "description": "Names the object that is reached", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Value", "type": "string" } @@ -100,11 +100,25 @@ "additionalProperties": false, "anyOf": [ { + "properties": { + "labels": { + "not": { + "type": "null" + } + } + }, "required": [ "labels" ] }, { + "properties": { + "symbols": { + "not": { + "type": "null" + } + } + }, "required": [ "symbols" ] @@ -236,8 +250,8 @@ }, "value": { "description": "Z-order of the feature where 0 is visual level", - "maximum": 32767, - "minimum": -32768, + "maximum": 2147483647, + "minimum": -2147483648, "title": "Value", "type": "integer" } @@ -281,7 +295,7 @@ "value": { "description": "The actual name value.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Value", "type": "string" }, @@ -317,7 +331,7 @@ "patternProperties": { "^(?:(?:[A-Za-z]{2,3}(?:-[A-Za-z]{3}){0,3}?)|(?:[A-Za-z]{4,8}))(?:-[A-Za-z]{4})?(?:-[A-Za-z]{2}|[0-9]{3})?(?:-(?:[A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(?:-[A-WY-Za-wy-z0-9](?:-[A-Za-z0-9]{2,8})+)*$": { "description": "String with no leading/trailing whitespace", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "type": "string" } }, @@ -330,7 +344,7 @@ "primary": { "description": "The most commonly used name.", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Primary", "type": "string" }, @@ -605,7 +619,8 @@ "type": "array" }, "names": { - "$ref": "#/$defs/Names" + "$ref": "#/$defs/Names", + "description": "All known names by which the feature is called" }, "rail_flags": { "description": "Set of boolean attributes applicable to railways. May be specified either as a single flag array of flag values, or as an array of flag rules.", @@ -882,7 +897,8 @@ "type": "array" }, "names": { - "$ref": "#/$defs/Names" + "$ref": "#/$defs/Names", + "description": "All known names by which the feature is called" }, "prohibited_transitions": { "description": "Rules preventing transitions from this segment to another segment.", @@ -1040,28 +1056,28 @@ "name": { "description": "Full name of the route", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Name", "type": "string" }, "network": { "description": "Name of the highway system this route belongs to", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Network", "type": "string" }, "ref": { "description": "Code or number used to reference the route", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Ref", "type": "string" }, "symbol": { "description": "URL or description of route signage", "minLength": 1, - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "Symbol", "type": "string" }, @@ -1140,7 +1156,7 @@ }, "license": { "description": "Source data license name.\n\nThis should be a valid SPDX license identifier when available.\n\nIf omitted, contact the data provider for more license information.", - "pattern": "^(\\S.*)?\\S$", + "pattern": "^(\\S(.*\\S)?)?$", "title": "License", "type": "string" }, @@ -1194,11 +1210,25 @@ "additionalProperties": false, "anyOf": [ { + "properties": { + "max_speed": { + "not": { + "type": "null" + } + } + }, "required": [ "max_speed" ] }, { + "properties": { + "min_speed": { + "not": { + "type": "null" + } + } + }, "required": [ "min_speed" ] @@ -1597,7 +1627,8 @@ "type": "array" }, "names": { - "$ref": "#/$defs/Names" + "$ref": "#/$defs/Names", + "description": "All known names by which the feature is called" }, "routes": { "description": "Routes this segment belongs to", @@ -1715,31 +1746,73 @@ "additionalProperties": false, "anyOf": [ { + "properties": { + "heading": { + "not": { + "type": "null" + } + } + }, "required": [ "heading" ] }, { + "properties": { + "during": { + "not": { + "type": "null" + } + } + }, "required": [ "during" ] }, { + "properties": { + "mode": { + "not": { + "type": "null" + } + } + }, "required": [ "mode" ] }, { + "properties": { + "using": { + "not": { + "type": "null" + } + } + }, "required": [ "using" ] }, { + "properties": { + "recognized": { + "not": { + "type": "null" + } + } + }, "required": [ "recognized" ] }, { + "properties": { + "vehicle": { + "not": { + "type": "null" + } + } + }, "required": [ "vehicle" ] @@ -1836,31 +1909,73 @@ "additionalProperties": false, "anyOf": [ { + "properties": { + "heading": { + "not": { + "type": "null" + } + } + }, "required": [ "heading" ] }, { + "properties": { + "during": { + "not": { + "type": "null" + } + } + }, "required": [ "during" ] }, { + "properties": { + "mode": { + "not": { + "type": "null" + } + } + }, "required": [ "mode" ] }, { + "properties": { + "using": { + "not": { + "type": "null" + } + } + }, "required": [ "using" ] }, { + "properties": { + "recognized": { + "not": { + "type": "null" + } + } + }, "required": [ "recognized" ] }, { + "properties": { + "vehicle": { + "not": { + "type": "null" + } + } + }, "required": [ "vehicle" ] @@ -1942,31 +2057,73 @@ "additionalProperties": false, "anyOf": [ { + "properties": { + "heading": { + "not": { + "type": "null" + } + } + }, "required": [ "heading" ] }, { + "properties": { + "during": { + "not": { + "type": "null" + } + } + }, "required": [ "during" ] }, { + "properties": { + "mode": { + "not": { + "type": "null" + } + } + }, "required": [ "mode" ] }, { + "properties": { + "using": { + "not": { + "type": "null" + } + } + }, "required": [ "using" ] }, { + "properties": { + "recognized": { + "not": { + "type": "null" + } + } + }, "required": [ "recognized" ] }, { + "properties": { + "vehicle": { + "not": { + "type": "null" + } + } + }, "required": [ "vehicle" ] diff --git a/packages/overture-schema/pyproject.toml b/packages/overture-schema/pyproject.toml index 8c57c8421..432116454 100644 --- a/packages/overture-schema/pyproject.toml +++ b/packages/overture-schema/pyproject.toml @@ -1,4 +1,7 @@ [project] +maintainers = [ + {name = "Overture Maps Schema Working Group"}, +] dependencies = [ "overture-schema-addresses-theme", "overture-schema-base-theme", @@ -7,7 +10,7 @@ dependencies = [ "overture-schema-places-theme", "overture-schema-transportation-theme", "overture-schema-core", - "pydantic>=2.0", + "pydantic>=2.12.0", "pyyaml>=6.0.2", "overture-schema-cli", ] @@ -18,6 +21,11 @@ name = "overture-schema" readme = "README.md" requires-python = ">=3.10" +[project.urls] +Homepage = "https://overturemaps.org" +Source = "https://github.com/OvertureMaps/schema" +Issues = "https://github.com/OvertureMaps/schema/issues" + [tool.uv.sources] overture-schema-addresses-theme = { workspace = true } overture-schema-base-theme = { workspace = true } @@ -34,8 +42,8 @@ requires = ["hatchling"] [dependency-groups] dev = [ - "pyyaml", - "deepdiff", + "pyyaml>=6.0.2", + "deepdiff>=8.6.0", "yamlcore>=0.0.4", ] diff --git a/packages/overture-schema/src/overture/schema/__about__.py b/packages/overture-schema/src/overture/schema/__about__.py index 3dc1f76bc..5e3bb11ca 100644 --- a/packages/overture-schema/src/overture/schema/__about__.py +++ b/packages/overture-schema/src/overture/schema/__about__.py @@ -1 +1 @@ -__version__ = "0.1.0" +__version__ = "0.1.1.dev1" diff --git a/pyproject.toml b/pyproject.toml index 92201b713..7ac1e5553 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,10 +6,12 @@ name = "overture-schema-workspace" requires-python = ">=3.10" version = "0.0.0" +[tool.uv] +exclude-newer = "1 week" + [tool.uv.workspace] members = ["packages/*"] - [tool.ruff] line-length = 88 target-version = "py310" @@ -50,17 +52,20 @@ dev = [ "mypy>=1.17.0", "pdoc>=15.0.4", "pydocstyle>=6.3.0", - "pytest>=8.4.1", + "pytest>=9.0.0", "pytest-cov>=7.0.0", - "ruff>=0.12.4", + "ruff>=0.13.0", ] [tool.pytest.ini_options] +verbosity_subtests = 0 pythonpath = [ "packages/overture-schema-addresses-theme/tests", "packages/overture-schema-annex/tests", "packages/overture-schema-base-theme/tests", "packages/overture-schema-buildings-theme/tests", + "packages/overture-schema-cli/tests", + "packages/overture-schema-codegen/tests", "packages/overture-schema-core/tests", "packages/overture-schema-divisions-theme/tests", "packages/overture-schema-places-theme/tests", diff --git a/schema/places/place.yaml b/schema/places/place.yaml index 3c741aea2..0ae5351c0 100644 --- a/schema/places/place.yaml +++ b/schema/places/place.yaml @@ -137,4 +137,3 @@ properties: This is not an indication of 'opening hours' or that the place is open/closed at the current time-of-day or day-of-week. type: string enum: ["open", "permanently_closed", "temporarily_closed"] - required: [operating_status] diff --git a/uv.lock b/uv.lock index 7748ad79c..7a10a557a 100644 --- a/uv.lock +++ b/uv.lock @@ -2,10 +2,15 @@ version = 1 revision = 3 requires-python = ">=3.10" resolution-markers = [ - "python_full_version >= '3.11'", + "python_full_version >= '3.15'", + "python_full_version >= '3.11' and python_full_version < '3.15'", "python_full_version < '3.11'", ] +[options] +exclude-newer = "0001-01-01T00:00:00Z" # This has no effect and is included for backwards compatibility when using relative exclude-newer values. +exclude-newer-span = "P1W" + [manifest] members = [ "overture-schema", @@ -14,6 +19,7 @@ members = [ "overture-schema-base-theme", "overture-schema-buildings-theme", "overture-schema-cli", + "overture-schema-codegen", "overture-schema-core", "overture-schema-divisions-theme", "overture-schema-places-theme", @@ -31,25 +37,16 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] -[[package]] -name = "attrs" -version = "25.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, -] - [[package]] name = "click" -version = "8.3.0" +version = "8.3.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/63/f9e1ea081ce35720d8b92acde70daaedace594dc93b693c869e0d5910718/click-8.3.3.tar.gz", hash = "sha256:398329ad4837b2ff7cbe1dd166a4c0f8900c3ca3a218de04466f38f6497f18a2", size = 328061, upload-time = "2026-04-22T15:11:27.506Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, + { url = "https://files.pythonhosted.org/packages/ae/44/c1221527f6a71a01ec6fbad7fa78f1d50dfa02217385cf0fa3eec7087d59/click-8.3.3-py3-none-any.whl", hash = "sha256:a2bf429bb3033c89fa4936ffb35d5cb471e3719e1f3c8a7c3fff0b8314305613", size = 110502, upload-time = "2026-04-22T15:11:25.044Z" }, ] [[package]] @@ -63,101 +60,115 @@ wheels = [ [[package]] name = "coverage" -version = "7.11.0" +version = "7.13.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1c/38/ee22495420457259d2f3390309505ea98f98a5eed40901cf62196abad006/coverage-7.11.0.tar.gz", hash = "sha256:167bd504ac1ca2af7ff3b81d245dfea0292c5032ebef9d66cc08a7d28c1b8050", size = 811905, upload-time = "2025-10-15T15:15:08.542Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/e0/70553e3000e345daff267cec284ce4cbf3fc141b6da229ac52775b5428f1/coverage-7.13.5.tar.gz", hash = "sha256:c81f6515c4c40141f83f502b07bbfa5c240ba25bbe73da7b33f1e5b6120ff179", size = 915967, upload-time = "2026-03-17T10:33:18.341Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/95/c49df0aceb5507a80b9fe5172d3d39bf23f05be40c23c8d77d556df96cec/coverage-7.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb53f1e8adeeb2e78962bade0c08bfdc461853c7969706ed901821e009b35e31", size = 215800, upload-time = "2025-10-15T15:12:19.824Z" }, - { url = "https://files.pythonhosted.org/packages/dc/c6/7bb46ce01ed634fff1d7bb53a54049f539971862cc388b304ff3c51b4f66/coverage-7.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9a03ec6cb9f40a5c360f138b88266fd8f58408d71e89f536b4f91d85721d075", size = 216198, upload-time = "2025-10-15T15:12:22.549Z" }, - { url = "https://files.pythonhosted.org/packages/94/b2/75d9d8fbf2900268aca5de29cd0a0fe671b0f69ef88be16767cc3c828b85/coverage-7.11.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0d7f0616c557cbc3d1c2090334eddcbb70e1ae3a40b07222d62b3aa47f608fab", size = 242953, upload-time = "2025-10-15T15:12:24.139Z" }, - { url = "https://files.pythonhosted.org/packages/65/ac/acaa984c18f440170525a8743eb4b6c960ace2dbad80dc22056a437fc3c6/coverage-7.11.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e44a86a47bbdf83b0a3ea4d7df5410d6b1a0de984fbd805fa5101f3624b9abe0", size = 244766, upload-time = "2025-10-15T15:12:25.974Z" }, - { url = "https://files.pythonhosted.org/packages/d8/0d/938d0bff76dfa4a6b228c3fc4b3e1c0e2ad4aa6200c141fcda2bd1170227/coverage-7.11.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:596763d2f9a0ee7eec6e643e29660def2eef297e1de0d334c78c08706f1cb785", size = 246625, upload-time = "2025-10-15T15:12:27.387Z" }, - { url = "https://files.pythonhosted.org/packages/38/54/8f5f5e84bfa268df98f46b2cb396b1009734cfb1e5d6adb663d284893b32/coverage-7.11.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ef55537ff511b5e0a43edb4c50a7bf7ba1c3eea20b4f49b1490f1e8e0e42c591", size = 243568, upload-time = "2025-10-15T15:12:28.799Z" }, - { url = "https://files.pythonhosted.org/packages/68/30/8ba337c2877fe3f2e1af0ed7ff4be0c0c4aca44d6f4007040f3ca2255e99/coverage-7.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9cbabd8f4d0d3dc571d77ae5bdbfa6afe5061e679a9d74b6797c48d143307088", size = 244665, upload-time = "2025-10-15T15:12:30.297Z" }, - { url = "https://files.pythonhosted.org/packages/cc/fb/c6f1d6d9a665536b7dde2333346f0cc41dc6a60bd1ffc10cd5c33e7eb000/coverage-7.11.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e24045453384e0ae2a587d562df2a04d852672eb63051d16096d3f08aa4c7c2f", size = 242681, upload-time = "2025-10-15T15:12:32.326Z" }, - { url = "https://files.pythonhosted.org/packages/be/38/1b532319af5f991fa153c20373291dc65c2bf532af7dbcffdeef745c8f79/coverage-7.11.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:7161edd3426c8d19bdccde7d49e6f27f748f3c31cc350c5de7c633fea445d866", size = 242912, upload-time = "2025-10-15T15:12:34.079Z" }, - { url = "https://files.pythonhosted.org/packages/67/3d/f39331c60ef6050d2a861dc1b514fa78f85f792820b68e8c04196ad733d6/coverage-7.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d4ed4de17e692ba6415b0587bc7f12bc80915031fc9db46a23ce70fc88c9841", size = 243559, upload-time = "2025-10-15T15:12:35.809Z" }, - { url = "https://files.pythonhosted.org/packages/4b/55/cb7c9df9d0495036ce582a8a2958d50c23cd73f84a23284bc23bd4711a6f/coverage-7.11.0-cp310-cp310-win32.whl", hash = "sha256:765c0bc8fe46f48e341ef737c91c715bd2a53a12792592296a095f0c237e09cf", size = 218266, upload-time = "2025-10-15T15:12:37.429Z" }, - { url = "https://files.pythonhosted.org/packages/68/a8/b79cb275fa7bd0208767f89d57a1b5f6ba830813875738599741b97c2e04/coverage-7.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:24d6f3128f1b2d20d84b24f4074475457faedc3d4613a7e66b5e769939c7d969", size = 219169, upload-time = "2025-10-15T15:12:39.25Z" }, - { url = "https://files.pythonhosted.org/packages/49/3a/ee1074c15c408ddddddb1db7dd904f6b81bc524e01f5a1c5920e13dbde23/coverage-7.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d58ecaa865c5b9fa56e35efc51d1014d4c0d22838815b9fce57a27dd9576847", size = 215912, upload-time = "2025-10-15T15:12:40.665Z" }, - { url = "https://files.pythonhosted.org/packages/70/c4/9f44bebe5cb15f31608597b037d78799cc5f450044465bcd1ae8cb222fe1/coverage-7.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b679e171f1c104a5668550ada700e3c4937110dbdd153b7ef9055c4f1a1ee3cc", size = 216310, upload-time = "2025-10-15T15:12:42.461Z" }, - { url = "https://files.pythonhosted.org/packages/42/01/5e06077cfef92d8af926bdd86b84fb28bf9bc6ad27343d68be9b501d89f2/coverage-7.11.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ca61691ba8c5b6797deb221a0d09d7470364733ea9c69425a640f1f01b7c5bf0", size = 246706, upload-time = "2025-10-15T15:12:44.001Z" }, - { url = "https://files.pythonhosted.org/packages/40/b8/7a3f1f33b35cc4a6c37e759137533119560d06c0cc14753d1a803be0cd4a/coverage-7.11.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:aef1747ede4bd8ca9cfc04cc3011516500c6891f1b33a94add3253f6f876b7b7", size = 248634, upload-time = "2025-10-15T15:12:45.768Z" }, - { url = "https://files.pythonhosted.org/packages/7a/41/7f987eb33de386bc4c665ab0bf98d15fcf203369d6aacae74f5dd8ec489a/coverage-7.11.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1839d08406e4cba2953dcc0ffb312252f14d7c4c96919f70167611f4dee2623", size = 250741, upload-time = "2025-10-15T15:12:47.222Z" }, - { url = "https://files.pythonhosted.org/packages/23/c1/a4e0ca6a4e83069fb8216b49b30a7352061ca0cb38654bd2dc96b7b3b7da/coverage-7.11.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e0eb0a2dcc62478eb5b4cbb80b97bdee852d7e280b90e81f11b407d0b81c4287", size = 246837, upload-time = "2025-10-15T15:12:48.904Z" }, - { url = "https://files.pythonhosted.org/packages/5d/03/ced062a17f7c38b4728ff76c3acb40d8465634b20b4833cdb3cc3a74e115/coverage-7.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bc1fbea96343b53f65d5351d8fd3b34fd415a2670d7c300b06d3e14a5af4f552", size = 248429, upload-time = "2025-10-15T15:12:50.73Z" }, - { url = "https://files.pythonhosted.org/packages/97/af/a7c6f194bb8c5a2705ae019036b8fe7f49ea818d638eedb15fdb7bed227c/coverage-7.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:214b622259dd0cf435f10241f1333d32caa64dbc27f8790ab693428a141723de", size = 246490, upload-time = "2025-10-15T15:12:52.646Z" }, - { url = "https://files.pythonhosted.org/packages/ab/c3/aab4df02b04a8fde79068c3c41ad7a622b0ef2b12e1ed154da986a727c3f/coverage-7.11.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:258d9967520cca899695d4eb7ea38be03f06951d6ca2f21fb48b1235f791e601", size = 246208, upload-time = "2025-10-15T15:12:54.586Z" }, - { url = "https://files.pythonhosted.org/packages/30/d8/e282ec19cd658238d60ed404f99ef2e45eed52e81b866ab1518c0d4163cf/coverage-7.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cf9e6ff4ca908ca15c157c409d608da77a56a09877b97c889b98fb2c32b6465e", size = 247126, upload-time = "2025-10-15T15:12:56.485Z" }, - { url = "https://files.pythonhosted.org/packages/d1/17/a635fa07fac23adb1a5451ec756216768c2767efaed2e4331710342a3399/coverage-7.11.0-cp311-cp311-win32.whl", hash = "sha256:fcc15fc462707b0680cff6242c48625da7f9a16a28a41bb8fd7a4280920e676c", size = 218314, upload-time = "2025-10-15T15:12:58.365Z" }, - { url = "https://files.pythonhosted.org/packages/2a/29/2ac1dfcdd4ab9a70026edc8d715ece9b4be9a1653075c658ee6f271f394d/coverage-7.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:865965bf955d92790f1facd64fe7ff73551bd2c1e7e6b26443934e9701ba30b9", size = 219203, upload-time = "2025-10-15T15:12:59.902Z" }, - { url = "https://files.pythonhosted.org/packages/03/21/5ce8b3a0133179115af4c041abf2ee652395837cb896614beb8ce8ddcfd9/coverage-7.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:5693e57a065760dcbeb292d60cc4d0231a6d4b6b6f6a3191561e1d5e8820b745", size = 217879, upload-time = "2025-10-15T15:13:01.35Z" }, - { url = "https://files.pythonhosted.org/packages/c4/db/86f6906a7c7edc1a52b2c6682d6dd9be775d73c0dfe2b84f8923dfea5784/coverage-7.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9c49e77811cf9d024b95faf86c3f059b11c0c9be0b0d61bc598f453703bd6fd1", size = 216098, upload-time = "2025-10-15T15:13:02.916Z" }, - { url = "https://files.pythonhosted.org/packages/21/54/e7b26157048c7ba555596aad8569ff903d6cd67867d41b75287323678ede/coverage-7.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a61e37a403a778e2cda2a6a39abcc895f1d984071942a41074b5c7ee31642007", size = 216331, upload-time = "2025-10-15T15:13:04.403Z" }, - { url = "https://files.pythonhosted.org/packages/b9/19/1ce6bf444f858b83a733171306134a0544eaddf1ca8851ede6540a55b2ad/coverage-7.11.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c79cae102bb3b1801e2ef1511fb50e91ec83a1ce466b2c7c25010d884336de46", size = 247825, upload-time = "2025-10-15T15:13:05.92Z" }, - { url = "https://files.pythonhosted.org/packages/71/0b/d3bcbbc259fcced5fb67c5d78f6e7ee965f49760c14afd931e9e663a83b2/coverage-7.11.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:16ce17ceb5d211f320b62df002fa7016b7442ea0fd260c11cec8ce7730954893", size = 250573, upload-time = "2025-10-15T15:13:07.471Z" }, - { url = "https://files.pythonhosted.org/packages/58/8d/b0ff3641a320abb047258d36ed1c21d16be33beed4152628331a1baf3365/coverage-7.11.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80027673e9d0bd6aef86134b0771845e2da85755cf686e7c7c59566cf5a89115", size = 251706, upload-time = "2025-10-15T15:13:09.4Z" }, - { url = "https://files.pythonhosted.org/packages/59/c8/5a586fe8c7b0458053d9c687f5cff515a74b66c85931f7fe17a1c958b4ac/coverage-7.11.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4d3ffa07a08657306cd2215b0da53761c4d73cb54d9143b9303a6481ec0cd415", size = 248221, upload-time = "2025-10-15T15:13:10.964Z" }, - { url = "https://files.pythonhosted.org/packages/d0/ff/3a25e3132804ba44cfa9a778cdf2b73dbbe63ef4b0945e39602fc896ba52/coverage-7.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a3b6a5f8b2524fd6c1066bc85bfd97e78709bb5e37b5b94911a6506b65f47186", size = 249624, upload-time = "2025-10-15T15:13:12.5Z" }, - { url = "https://files.pythonhosted.org/packages/c5/12/ff10c8ce3895e1b17a73485ea79ebc1896a9e466a9d0f4aef63e0d17b718/coverage-7.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fcc0a4aa589de34bc56e1a80a740ee0f8c47611bdfb28cd1849de60660f3799d", size = 247744, upload-time = "2025-10-15T15:13:14.554Z" }, - { url = "https://files.pythonhosted.org/packages/16/02/d500b91f5471b2975947e0629b8980e5e90786fe316b6d7299852c1d793d/coverage-7.11.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dba82204769d78c3fd31b35c3d5f46e06511936c5019c39f98320e05b08f794d", size = 247325, upload-time = "2025-10-15T15:13:16.438Z" }, - { url = "https://files.pythonhosted.org/packages/77/11/dee0284fbbd9cd64cfce806b827452c6df3f100d9e66188e82dfe771d4af/coverage-7.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:81b335f03ba67309a95210caf3eb43bd6fe75a4e22ba653ef97b4696c56c7ec2", size = 249180, upload-time = "2025-10-15T15:13:17.959Z" }, - { url = "https://files.pythonhosted.org/packages/59/1b/cdf1def928f0a150a057cab03286774e73e29c2395f0d30ce3d9e9f8e697/coverage-7.11.0-cp312-cp312-win32.whl", hash = "sha256:037b2d064c2f8cc8716fe4d39cb705779af3fbf1ba318dc96a1af858888c7bb5", size = 218479, upload-time = "2025-10-15T15:13:19.608Z" }, - { url = "https://files.pythonhosted.org/packages/ff/55/e5884d55e031da9c15b94b90a23beccc9d6beee65e9835cd6da0a79e4f3a/coverage-7.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d66c0104aec3b75e5fd897e7940188ea1892ca1d0235316bf89286d6a22568c0", size = 219290, upload-time = "2025-10-15T15:13:21.593Z" }, - { url = "https://files.pythonhosted.org/packages/23/a8/faa930cfc71c1d16bc78f9a19bb73700464f9c331d9e547bfbc1dbd3a108/coverage-7.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:d91ebeac603812a09cf6a886ba6e464f3bbb367411904ae3790dfe28311b15ad", size = 217924, upload-time = "2025-10-15T15:13:23.39Z" }, - { url = "https://files.pythonhosted.org/packages/60/7f/85e4dfe65e400645464b25c036a26ac226cf3a69d4a50c3934c532491cdd/coverage-7.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cc3f49e65ea6e0d5d9bd60368684fe52a704d46f9e7fc413918f18d046ec40e1", size = 216129, upload-time = "2025-10-15T15:13:25.371Z" }, - { url = "https://files.pythonhosted.org/packages/96/5d/dc5fa98fea3c175caf9d360649cb1aa3715e391ab00dc78c4c66fabd7356/coverage-7.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f39ae2f63f37472c17b4990f794035c9890418b1b8cca75c01193f3c8d3e01be", size = 216380, upload-time = "2025-10-15T15:13:26.976Z" }, - { url = "https://files.pythonhosted.org/packages/b2/f5/3da9cc9596708273385189289c0e4d8197d37a386bdf17619013554b3447/coverage-7.11.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7db53b5cdd2917b6eaadd0b1251cf4e7d96f4a8d24e174bdbdf2f65b5ea7994d", size = 247375, upload-time = "2025-10-15T15:13:28.923Z" }, - { url = "https://files.pythonhosted.org/packages/65/6c/f7f59c342359a235559d2bc76b0c73cfc4bac7d61bb0df210965cb1ecffd/coverage-7.11.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10ad04ac3a122048688387828b4537bc9cf60c0bf4869c1e9989c46e45690b82", size = 249978, upload-time = "2025-10-15T15:13:30.525Z" }, - { url = "https://files.pythonhosted.org/packages/e7/8c/042dede2e23525e863bf1ccd2b92689692a148d8b5fd37c37899ba882645/coverage-7.11.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4036cc9c7983a2b1f2556d574d2eb2154ac6ed55114761685657e38782b23f52", size = 251253, upload-time = "2025-10-15T15:13:32.174Z" }, - { url = "https://files.pythonhosted.org/packages/7b/a9/3c58df67bfa809a7bddd786356d9c5283e45d693edb5f3f55d0986dd905a/coverage-7.11.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ab934dd13b1c5e94b692b1e01bd87e4488cb746e3a50f798cb9464fd128374b", size = 247591, upload-time = "2025-10-15T15:13:34.147Z" }, - { url = "https://files.pythonhosted.org/packages/26/5b/c7f32efd862ee0477a18c41e4761305de6ddd2d49cdeda0c1116227570fd/coverage-7.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59a6e5a265f7cfc05f76e3bb53eca2e0dfe90f05e07e849930fecd6abb8f40b4", size = 249411, upload-time = "2025-10-15T15:13:38.425Z" }, - { url = "https://files.pythonhosted.org/packages/76/b5/78cb4f1e86c1611431c990423ec0768122905b03837e1b4c6a6f388a858b/coverage-7.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:df01d6c4c81e15a7c88337b795bb7595a8596e92310266b5072c7e301168efbd", size = 247303, upload-time = "2025-10-15T15:13:40.464Z" }, - { url = "https://files.pythonhosted.org/packages/87/c9/23c753a8641a330f45f221286e707c427e46d0ffd1719b080cedc984ec40/coverage-7.11.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:8c934bd088eed6174210942761e38ee81d28c46de0132ebb1801dbe36a390dcc", size = 247157, upload-time = "2025-10-15T15:13:42.087Z" }, - { url = "https://files.pythonhosted.org/packages/c5/42/6e0cc71dc8a464486e944a4fa0d85bdec031cc2969e98ed41532a98336b9/coverage-7.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a03eaf7ec24078ad64a07f02e30060aaf22b91dedf31a6b24d0d98d2bba7f48", size = 248921, upload-time = "2025-10-15T15:13:43.715Z" }, - { url = "https://files.pythonhosted.org/packages/e8/1c/743c2ef665e6858cccb0f84377dfe3a4c25add51e8c7ef19249be92465b6/coverage-7.11.0-cp313-cp313-win32.whl", hash = "sha256:695340f698a5f56f795b2836abe6fb576e7c53d48cd155ad2f80fd24bc63a040", size = 218526, upload-time = "2025-10-15T15:13:45.336Z" }, - { url = "https://files.pythonhosted.org/packages/ff/d5/226daadfd1bf8ddbccefbd3aa3547d7b960fb48e1bdac124e2dd13a2b71a/coverage-7.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2727d47fce3ee2bac648528e41455d1b0c46395a087a229deac75e9f88ba5a05", size = 219317, upload-time = "2025-10-15T15:13:47.401Z" }, - { url = "https://files.pythonhosted.org/packages/97/54/47db81dcbe571a48a298f206183ba8a7ba79200a37cd0d9f4788fcd2af4a/coverage-7.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:0efa742f431529699712b92ecdf22de8ff198df41e43aeaaadf69973eb93f17a", size = 217948, upload-time = "2025-10-15T15:13:49.096Z" }, - { url = "https://files.pythonhosted.org/packages/e5/8b/cb68425420154e7e2a82fd779a8cc01549b6fa83c2ad3679cd6c088ebd07/coverage-7.11.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:587c38849b853b157706407e9ebdca8fd12f45869edb56defbef2daa5fb0812b", size = 216837, upload-time = "2025-10-15T15:13:51.09Z" }, - { url = "https://files.pythonhosted.org/packages/33/55/9d61b5765a025685e14659c8d07037247de6383c0385757544ffe4606475/coverage-7.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b971bdefdd75096163dd4261c74be813c4508477e39ff7b92191dea19f24cd37", size = 217061, upload-time = "2025-10-15T15:13:52.747Z" }, - { url = "https://files.pythonhosted.org/packages/52/85/292459c9186d70dcec6538f06ea251bc968046922497377bf4a1dc9a71de/coverage-7.11.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:269bfe913b7d5be12ab13a95f3a76da23cf147be7fa043933320ba5625f0a8de", size = 258398, upload-time = "2025-10-15T15:13:54.45Z" }, - { url = "https://files.pythonhosted.org/packages/1f/e2/46edd73fb8bf51446c41148d81944c54ed224854812b6ca549be25113ee0/coverage-7.11.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dadbcce51a10c07b7c72b0ce4a25e4b6dcb0c0372846afb8e5b6307a121eb99f", size = 260574, upload-time = "2025-10-15T15:13:56.145Z" }, - { url = "https://files.pythonhosted.org/packages/07/5e/1df469a19007ff82e2ca8fe509822820a31e251f80ee7344c34f6cd2ec43/coverage-7.11.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ed43fa22c6436f7957df036331f8fe4efa7af132054e1844918866cd228af6c", size = 262797, upload-time = "2025-10-15T15:13:58.635Z" }, - { url = "https://files.pythonhosted.org/packages/f9/50/de216b31a1434b94d9b34a964c09943c6be45069ec704bfc379d8d89a649/coverage-7.11.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9516add7256b6713ec08359b7b05aeff8850c98d357784c7205b2e60aa2513fa", size = 257361, upload-time = "2025-10-15T15:14:00.409Z" }, - { url = "https://files.pythonhosted.org/packages/82/1e/3f9f8344a48111e152e0fd495b6fff13cc743e771a6050abf1627a7ba918/coverage-7.11.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb92e47c92fcbcdc692f428da67db33337fa213756f7adb6a011f7b5a7a20740", size = 260349, upload-time = "2025-10-15T15:14:02.188Z" }, - { url = "https://files.pythonhosted.org/packages/65/9b/3f52741f9e7d82124272f3070bbe316006a7de1bad1093f88d59bfc6c548/coverage-7.11.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d06f4fc7acf3cabd6d74941d53329e06bab00a8fe10e4df2714f0b134bfc64ef", size = 258114, upload-time = "2025-10-15T15:14:03.907Z" }, - { url = "https://files.pythonhosted.org/packages/0b/8b/918f0e15f0365d50d3986bbd3338ca01178717ac5678301f3f547b6619e6/coverage-7.11.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:6fbcee1a8f056af07ecd344482f711f563a9eb1c2cad192e87df00338ec3cdb0", size = 256723, upload-time = "2025-10-15T15:14:06.324Z" }, - { url = "https://files.pythonhosted.org/packages/44/9e/7776829f82d3cf630878a7965a7d70cc6ca94f22c7d20ec4944f7148cb46/coverage-7.11.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dbbf012be5f32533a490709ad597ad8a8ff80c582a95adc8d62af664e532f9ca", size = 259238, upload-time = "2025-10-15T15:14:08.002Z" }, - { url = "https://files.pythonhosted.org/packages/9a/b8/49cf253e1e7a3bedb85199b201862dd7ca4859f75b6cf25ffa7298aa0760/coverage-7.11.0-cp313-cp313t-win32.whl", hash = "sha256:cee6291bb4fed184f1c2b663606a115c743df98a537c969c3c64b49989da96c2", size = 219180, upload-time = "2025-10-15T15:14:09.786Z" }, - { url = "https://files.pythonhosted.org/packages/ac/e1/1a541703826be7ae2125a0fb7f821af5729d56bb71e946e7b933cc7a89a4/coverage-7.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a386c1061bf98e7ea4758e4313c0ab5ecf57af341ef0f43a0bf26c2477b5c268", size = 220241, upload-time = "2025-10-15T15:14:11.471Z" }, - { url = "https://files.pythonhosted.org/packages/d5/d1/5ee0e0a08621140fd418ec4020f595b4d52d7eb429ae6a0c6542b4ba6f14/coverage-7.11.0-cp313-cp313t-win_arm64.whl", hash = "sha256:f9ea02ef40bb83823b2b04964459d281688fe173e20643870bb5d2edf68bc836", size = 218510, upload-time = "2025-10-15T15:14:13.46Z" }, - { url = "https://files.pythonhosted.org/packages/f4/06/e923830c1985ce808e40a3fa3eb46c13350b3224b7da59757d37b6ce12b8/coverage-7.11.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c770885b28fb399aaf2a65bbd1c12bf6f307ffd112d6a76c5231a94276f0c497", size = 216110, upload-time = "2025-10-15T15:14:15.157Z" }, - { url = "https://files.pythonhosted.org/packages/42/82/cdeed03bfead45203fb651ed756dfb5266028f5f939e7f06efac4041dad5/coverage-7.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a3d0e2087dba64c86a6b254f43e12d264b636a39e88c5cc0a01a7c71bcfdab7e", size = 216395, upload-time = "2025-10-15T15:14:16.863Z" }, - { url = "https://files.pythonhosted.org/packages/fc/ba/e1c80caffc3199aa699813f73ff097bc2df7b31642bdbc7493600a8f1de5/coverage-7.11.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:73feb83bb41c32811973b8565f3705caf01d928d972b72042b44e97c71fd70d1", size = 247433, upload-time = "2025-10-15T15:14:18.589Z" }, - { url = "https://files.pythonhosted.org/packages/80/c0/5b259b029694ce0a5bbc1548834c7ba3db41d3efd3474489d7efce4ceb18/coverage-7.11.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c6f31f281012235ad08f9a560976cc2fc9c95c17604ff3ab20120fe480169bca", size = 249970, upload-time = "2025-10-15T15:14:20.307Z" }, - { url = "https://files.pythonhosted.org/packages/8c/86/171b2b5e1aac7e2fd9b43f7158b987dbeb95f06d1fbecad54ad8163ae3e8/coverage-7.11.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9570ad567f880ef675673992222746a124b9595506826b210fbe0ce3f0499cd", size = 251324, upload-time = "2025-10-15T15:14:22.419Z" }, - { url = "https://files.pythonhosted.org/packages/1a/7e/7e10414d343385b92024af3932a27a1caf75c6e27ee88ba211221ff1a145/coverage-7.11.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8badf70446042553a773547a61fecaa734b55dc738cacf20c56ab04b77425e43", size = 247445, upload-time = "2025-10-15T15:14:24.205Z" }, - { url = "https://files.pythonhosted.org/packages/c4/3b/e4f966b21f5be8c4bf86ad75ae94efa0de4c99c7bbb8114476323102e345/coverage-7.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a09c1211959903a479e389685b7feb8a17f59ec5a4ef9afde7650bd5eabc2777", size = 249324, upload-time = "2025-10-15T15:14:26.234Z" }, - { url = "https://files.pythonhosted.org/packages/00/a2/8479325576dfcd909244d0df215f077f47437ab852ab778cfa2f8bf4d954/coverage-7.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:5ef83b107f50db3f9ae40f69e34b3bd9337456c5a7fe3461c7abf8b75dd666a2", size = 247261, upload-time = "2025-10-15T15:14:28.42Z" }, - { url = "https://files.pythonhosted.org/packages/7b/d8/3a9e2db19d94d65771d0f2e21a9ea587d11b831332a73622f901157cc24b/coverage-7.11.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f91f927a3215b8907e214af77200250bb6aae36eca3f760f89780d13e495388d", size = 247092, upload-time = "2025-10-15T15:14:30.784Z" }, - { url = "https://files.pythonhosted.org/packages/b3/b1/bbca3c472544f9e2ad2d5116b2379732957048be4b93a9c543fcd0207e5f/coverage-7.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cdbcd376716d6b7fbfeedd687a6c4be019c5a5671b35f804ba76a4c0a778cba4", size = 248755, upload-time = "2025-10-15T15:14:32.585Z" }, - { url = "https://files.pythonhosted.org/packages/89/49/638d5a45a6a0f00af53d6b637c87007eb2297042186334e9923a61aa8854/coverage-7.11.0-cp314-cp314-win32.whl", hash = "sha256:bab7ec4bb501743edc63609320aaec8cd9188b396354f482f4de4d40a9d10721", size = 218793, upload-time = "2025-10-15T15:14:34.972Z" }, - { url = "https://files.pythonhosted.org/packages/30/cc/b675a51f2d068adb3cdf3799212c662239b0ca27f4691d1fff81b92ea850/coverage-7.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:3d4ba9a449e9364a936a27322b20d32d8b166553bfe63059bd21527e681e2fad", size = 219587, upload-time = "2025-10-15T15:14:37.047Z" }, - { url = "https://files.pythonhosted.org/packages/93/98/5ac886876026de04f00820e5094fe22166b98dcb8b426bf6827aaf67048c/coverage-7.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:ce37f215223af94ef0f75ac68ea096f9f8e8c8ec7d6e8c346ee45c0d363f0479", size = 218168, upload-time = "2025-10-15T15:14:38.861Z" }, - { url = "https://files.pythonhosted.org/packages/14/d1/b4145d35b3e3ecf4d917e97fc8895bcf027d854879ba401d9ff0f533f997/coverage-7.11.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:f413ce6e07e0d0dc9c433228727b619871532674b45165abafe201f200cc215f", size = 216850, upload-time = "2025-10-15T15:14:40.651Z" }, - { url = "https://files.pythonhosted.org/packages/ca/d1/7f645fc2eccd318369a8a9948acc447bb7c1ade2911e31d3c5620544c22b/coverage-7.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:05791e528a18f7072bf5998ba772fe29db4da1234c45c2087866b5ba4dea710e", size = 217071, upload-time = "2025-10-15T15:14:42.755Z" }, - { url = "https://files.pythonhosted.org/packages/54/7d/64d124649db2737ceced1dfcbdcb79898d5868d311730f622f8ecae84250/coverage-7.11.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cacb29f420cfeb9283b803263c3b9a068924474ff19ca126ba9103e1278dfa44", size = 258570, upload-time = "2025-10-15T15:14:44.542Z" }, - { url = "https://files.pythonhosted.org/packages/6c/3f/6f5922f80dc6f2d8b2c6f974835c43f53eb4257a7797727e6ca5b7b2ec1f/coverage-7.11.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314c24e700d7027ae3ab0d95fbf8d53544fca1f20345fd30cd219b737c6e58d3", size = 260738, upload-time = "2025-10-15T15:14:46.436Z" }, - { url = "https://files.pythonhosted.org/packages/0e/5f/9e883523c4647c860b3812b417a2017e361eca5b635ee658387dc11b13c1/coverage-7.11.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:630d0bd7a293ad2fc8b4b94e5758c8b2536fdf36c05f1681270203e463cbfa9b", size = 262994, upload-time = "2025-10-15T15:14:48.3Z" }, - { url = "https://files.pythonhosted.org/packages/07/bb/43b5a8e94c09c8bf51743ffc65c4c841a4ca5d3ed191d0a6919c379a1b83/coverage-7.11.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e89641f5175d65e2dbb44db15fe4ea48fade5d5bbb9868fdc2b4fce22f4a469d", size = 257282, upload-time = "2025-10-15T15:14:50.236Z" }, - { url = "https://files.pythonhosted.org/packages/aa/e5/0ead8af411411330b928733e1d201384b39251a5f043c1612970310e8283/coverage-7.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c9f08ea03114a637dab06cedb2e914da9dc67fa52c6015c018ff43fdde25b9c2", size = 260430, upload-time = "2025-10-15T15:14:52.413Z" }, - { url = "https://files.pythonhosted.org/packages/ae/66/03dd8bb0ba5b971620dcaac145461950f6d8204953e535d2b20c6b65d729/coverage-7.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce9f3bde4e9b031eaf1eb61df95c1401427029ea1bfddb8621c1161dcb0fa02e", size = 258190, upload-time = "2025-10-15T15:14:54.268Z" }, - { url = "https://files.pythonhosted.org/packages/45/ae/28a9cce40bf3174426cb2f7e71ee172d98e7f6446dff936a7ccecee34b14/coverage-7.11.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:e4dc07e95495923d6fd4d6c27bf70769425b71c89053083843fd78f378558996", size = 256658, upload-time = "2025-10-15T15:14:56.436Z" }, - { url = "https://files.pythonhosted.org/packages/5c/7c/3a44234a8599513684bfc8684878fd7b126c2760f79712bb78c56f19efc4/coverage-7.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:424538266794db2861db4922b05d729ade0940ee69dcf0591ce8f69784db0e11", size = 259342, upload-time = "2025-10-15T15:14:58.538Z" }, - { url = "https://files.pythonhosted.org/packages/e1/e6/0108519cba871af0351725ebdb8660fd7a0fe2ba3850d56d32490c7d9b4b/coverage-7.11.0-cp314-cp314t-win32.whl", hash = "sha256:4c1eeb3fb8eb9e0190bebafd0462936f75717687117339f708f395fe455acc73", size = 219568, upload-time = "2025-10-15T15:15:00.382Z" }, - { url = "https://files.pythonhosted.org/packages/c9/76/44ba876e0942b4e62fdde23ccb029ddb16d19ba1bef081edd00857ba0b16/coverage-7.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b56efee146c98dbf2cf5cffc61b9829d1e94442df4d7398b26892a53992d3547", size = 220687, upload-time = "2025-10-15T15:15:02.322Z" }, - { url = "https://files.pythonhosted.org/packages/b9/0c/0df55ecb20d0d0ed5c322e10a441775e1a3a5d78c60f0c4e1abfe6fcf949/coverage-7.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:b5c2705afa83f49bd91962a4094b6b082f94aef7626365ab3f8f4bd159c5acf3", size = 218711, upload-time = "2025-10-15T15:15:04.575Z" }, - { url = "https://files.pythonhosted.org/packages/5f/04/642c1d8a448ae5ea1369eac8495740a79eb4e581a9fb0cbdce56bbf56da1/coverage-7.11.0-py3-none-any.whl", hash = "sha256:4b7589765348d78fb4e5fb6ea35d07564e387da2fc5efff62e0222971f155f68", size = 207761, upload-time = "2025-10-15T15:15:06.439Z" }, + { url = "https://files.pythonhosted.org/packages/69/33/e8c48488c29a73fd089f9d71f9653c1be7478f2ad6b5bc870db11a55d23d/coverage-7.13.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0723d2c96324561b9aa76fb982406e11d93cdb388a7a7da2b16e04719cf7ca5", size = 219255, upload-time = "2026-03-17T10:29:51.081Z" }, + { url = "https://files.pythonhosted.org/packages/da/bd/b0ebe9f677d7f4b74a3e115eec7ddd4bcf892074963a00d91e8b164a6386/coverage-7.13.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52f444e86475992506b32d4e5ca55c24fc88d73bcbda0e9745095b28ef4dc0cf", size = 219772, upload-time = "2026-03-17T10:29:52.867Z" }, + { url = "https://files.pythonhosted.org/packages/48/cc/5cb9502f4e01972f54eedd48218bb203fe81e294be606a2bc93970208013/coverage-7.13.5-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:704de6328e3d612a8f6c07000a878ff38181ec3263d5a11da1db294fa6a9bdf8", size = 246532, upload-time = "2026-03-17T10:29:54.688Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d8/3217636d86c7e7b12e126e4f30ef1581047da73140614523af7495ed5f2d/coverage-7.13.5-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a1a6d79a14e1ec1832cabc833898636ad5f3754a678ef8bb4908515208bf84f4", size = 248333, upload-time = "2026-03-17T10:29:56.221Z" }, + { url = "https://files.pythonhosted.org/packages/2b/30/2002ac6729ba2d4357438e2ed3c447ad8562866c8c63fc16f6dfc33afe56/coverage-7.13.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79060214983769c7ba3f0cee10b54c97609dca4d478fa1aa32b914480fd5738d", size = 250211, upload-time = "2026-03-17T10:29:57.938Z" }, + { url = "https://files.pythonhosted.org/packages/6c/85/552496626d6b9359eb0e2f86f920037c9cbfba09b24d914c6e1528155f7d/coverage-7.13.5-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:356e76b46783a98c2a2fe81ec79df4883a1e62895ea952968fb253c114e7f930", size = 252125, upload-time = "2026-03-17T10:29:59.388Z" }, + { url = "https://files.pythonhosted.org/packages/44/21/40256eabdcbccdb6acf6b381b3016a154399a75fe39d406f790ae84d1f3c/coverage-7.13.5-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0cef0cdec915d11254a7f549c1170afecce708d30610c6abdded1f74e581666d", size = 247219, upload-time = "2026-03-17T10:30:01.199Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e8/96e2a6c3f21a0ea77d7830b254a1542d0328acc8d7bdf6a284ba7e529f77/coverage-7.13.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dc022073d063b25a402454e5712ef9e007113e3a676b96c5f29b2bda29352f40", size = 248248, upload-time = "2026-03-17T10:30:03.317Z" }, + { url = "https://files.pythonhosted.org/packages/da/ba/8477f549e554827da390ec659f3c38e4b6d95470f4daafc2d8ff94eaa9c2/coverage-7.13.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9b74db26dfea4f4e50d48a4602207cd1e78be33182bc9cbf22da94f332f99878", size = 246254, upload-time = "2026-03-17T10:30:04.832Z" }, + { url = "https://files.pythonhosted.org/packages/55/59/bc22aef0e6aa179d5b1b001e8b3654785e9adf27ef24c93dc4228ebd5d68/coverage-7.13.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ad146744ca4fd09b50c482650e3c1b1f4dfa1d4792e0a04a369c7f23336f0400", size = 250067, upload-time = "2026-03-17T10:30:06.535Z" }, + { url = "https://files.pythonhosted.org/packages/de/1b/c6a023a160806a5137dca53468fd97530d6acad24a22003b1578a9c2e429/coverage-7.13.5-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:c555b48be1853fe3997c11c4bd521cdd9a9612352de01fa4508f16ec341e6fe0", size = 246521, upload-time = "2026-03-17T10:30:08.486Z" }, + { url = "https://files.pythonhosted.org/packages/2d/3f/3532c85a55aa2f899fa17c186f831cfa1aa434d88ff792a709636f64130e/coverage-7.13.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7034b5c56a58ae5e85f23949d52c14aca2cfc6848a31764995b7de88f13a1ea0", size = 247126, upload-time = "2026-03-17T10:30:09.966Z" }, + { url = "https://files.pythonhosted.org/packages/aa/2e/b9d56af4a24ef45dfbcda88e06870cb7d57b2b0bfa3a888d79b4c8debd76/coverage-7.13.5-cp310-cp310-win32.whl", hash = "sha256:eb7fdf1ef130660e7415e0253a01a7d5a88c9c4d158bcf75cbbd922fd65a5b58", size = 221860, upload-time = "2026-03-17T10:30:11.393Z" }, + { url = "https://files.pythonhosted.org/packages/9f/cc/d938417e7a4d7f0433ad4edee8bb2acdc60dc7ac5af19e2a07a048ecbee3/coverage-7.13.5-cp310-cp310-win_amd64.whl", hash = "sha256:3e1bb5f6c78feeb1be3475789b14a0f0a5b47d505bfc7267126ccbd50289999e", size = 222788, upload-time = "2026-03-17T10:30:12.886Z" }, + { url = "https://files.pythonhosted.org/packages/4b/37/d24c8f8220ff07b839b2c043ea4903a33b0f455abe673ae3c03bbdb7f212/coverage-7.13.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66a80c616f80181f4d643b0f9e709d97bcea413ecd9631e1dedc7401c8e6695d", size = 219381, upload-time = "2026-03-17T10:30:14.68Z" }, + { url = "https://files.pythonhosted.org/packages/35/8b/cd129b0ca4afe886a6ce9d183c44d8301acbd4ef248622e7c49a23145605/coverage-7.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:145ede53ccbafb297c1c9287f788d1bc3efd6c900da23bf6931b09eafc931587", size = 219880, upload-time = "2026-03-17T10:30:16.231Z" }, + { url = "https://files.pythonhosted.org/packages/55/2f/e0e5b237bffdb5d6c530ce87cc1d413a5b7d7dfd60fb067ad6d254c35c76/coverage-7.13.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0672854dc733c342fa3e957e0605256d2bf5934feeac328da9e0b5449634a642", size = 250303, upload-time = "2026-03-17T10:30:17.748Z" }, + { url = "https://files.pythonhosted.org/packages/92/be/b1afb692be85b947f3401375851484496134c5554e67e822c35f28bf2fbc/coverage-7.13.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ec10e2a42b41c923c2209b846126c6582db5e43a33157e9870ba9fb70dc7854b", size = 252218, upload-time = "2026-03-17T10:30:19.804Z" }, + { url = "https://files.pythonhosted.org/packages/da/69/2f47bb6fa1b8d1e3e5d0c4be8ccb4313c63d742476a619418f85740d597b/coverage-7.13.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be3d4bbad9d4b037791794ddeedd7d64a56f5933a2c1373e18e9e568b9141686", size = 254326, upload-time = "2026-03-17T10:30:21.321Z" }, + { url = "https://files.pythonhosted.org/packages/d5/d0/79db81da58965bd29dabc8f4ad2a2af70611a57cba9d1ec006f072f30a54/coverage-7.13.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d2afbc5cc54d286bfb54541aa50b64cdb07a718227168c87b9e2fb8f25e1743", size = 256267, upload-time = "2026-03-17T10:30:23.094Z" }, + { url = "https://files.pythonhosted.org/packages/e5/32/d0d7cc8168f91ddab44c0ce4806b969df5f5fdfdbb568eaca2dbc2a04936/coverage-7.13.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3ad050321264c49c2fa67bb599100456fc51d004b82534f379d16445da40fb75", size = 250430, upload-time = "2026-03-17T10:30:25.311Z" }, + { url = "https://files.pythonhosted.org/packages/4d/06/a055311d891ddbe231cd69fdd20ea4be6e3603ffebddf8704b8ca8e10a3c/coverage-7.13.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7300c8a6d13335b29bb76d7651c66af6bd8658517c43499f110ddc6717bfc209", size = 252017, upload-time = "2026-03-17T10:30:27.284Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f6/d0fd2d21e29a657b5f77a2fe7082e1568158340dceb941954f776dce1b7b/coverage-7.13.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:eb07647a5738b89baab047f14edd18ded523de60f3b30e75c2acc826f79c839a", size = 250080, upload-time = "2026-03-17T10:30:29.481Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ab/0d7fb2efc2e9a5eb7ddcc6e722f834a69b454b7e6e5888c3a8567ecffb31/coverage-7.13.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9adb6688e3b53adffefd4a52d72cbd8b02602bfb8f74dcd862337182fd4d1a4e", size = 253843, upload-time = "2026-03-17T10:30:31.301Z" }, + { url = "https://files.pythonhosted.org/packages/ba/6f/7467b917bbf5408610178f62a49c0ed4377bb16c1657f689cc61470da8ce/coverage-7.13.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7c8d4bc913dd70b93488d6c496c77f3aff5ea99a07e36a18f865bca55adef8bd", size = 249802, upload-time = "2026-03-17T10:30:33.358Z" }, + { url = "https://files.pythonhosted.org/packages/75/2c/1172fb689df92135f5bfbbd69fc83017a76d24ea2e2f3a1154007e2fb9f8/coverage-7.13.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0e3c426ffc4cd952f54ee9ffbdd10345709ecc78a3ecfd796a57236bfad0b9b8", size = 250707, upload-time = "2026-03-17T10:30:35.2Z" }, + { url = "https://files.pythonhosted.org/packages/67/21/9ac389377380a07884e3b48ba7a620fcd9dbfaf1d40565facdc6b36ec9ef/coverage-7.13.5-cp311-cp311-win32.whl", hash = "sha256:259b69bb83ad9894c4b25be2528139eecba9a82646ebdda2d9db1ba28424a6bf", size = 221880, upload-time = "2026-03-17T10:30:36.775Z" }, + { url = "https://files.pythonhosted.org/packages/af/7f/4cd8a92531253f9d7c1bbecd9fa1b472907fb54446ca768c59b531248dc5/coverage-7.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:258354455f4e86e3e9d0d17571d522e13b4e1e19bf0f8596bcf9476d61e7d8a9", size = 222816, upload-time = "2026-03-17T10:30:38.891Z" }, + { url = "https://files.pythonhosted.org/packages/12/a6/1d3f6155fb0010ca68eba7fe48ca6c9da7385058b77a95848710ecf189b1/coverage-7.13.5-cp311-cp311-win_arm64.whl", hash = "sha256:bff95879c33ec8da99fc9b6fe345ddb5be6414b41d6d1ad1c8f188d26f36e028", size = 221483, upload-time = "2026-03-17T10:30:40.463Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c3/a396306ba7db865bf96fc1fb3b7fd29bcbf3d829df642e77b13555163cd6/coverage-7.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:460cf0114c5016fa841214ff5564aa4864f11948da9440bc97e21ad1f4ba1e01", size = 219554, upload-time = "2026-03-17T10:30:42.208Z" }, + { url = "https://files.pythonhosted.org/packages/a6/16/a68a19e5384e93f811dccc51034b1fd0b865841c390e3c931dcc4699e035/coverage-7.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e223ce4b4ed47f065bfb123687686512e37629be25cc63728557ae7db261422", size = 219908, upload-time = "2026-03-17T10:30:43.906Z" }, + { url = "https://files.pythonhosted.org/packages/29/72/20b917c6793af3a5ceb7fb9c50033f3ec7865f2911a1416b34a7cfa0813b/coverage-7.13.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e3370441f4513c6252bf042b9c36d22491142385049243253c7e48398a15a9f", size = 251419, upload-time = "2026-03-17T10:30:45.545Z" }, + { url = "https://files.pythonhosted.org/packages/8c/49/cd14b789536ac6a4778c453c6a2338bc0a2fb60c5a5a41b4008328b9acc1/coverage-7.13.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:03ccc709a17a1de074fb1d11f217342fb0d2b1582ed544f554fc9fc3f07e95f5", size = 254159, upload-time = "2026-03-17T10:30:47.204Z" }, + { url = "https://files.pythonhosted.org/packages/9d/00/7b0edcfe64e2ed4c0340dac14a52ad0f4c9bd0b8b5e531af7d55b703db7c/coverage-7.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f4818d065964db3c1c66dc0fbdac5ac692ecbc875555e13374fdbe7eedb4376", size = 255270, upload-time = "2026-03-17T10:30:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/7ffc4ba0f5d0a55c1e84ea7cee39c9fc06af7b170513d83fbf3bbefce280/coverage-7.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:012d5319e66e9d5a218834642d6c35d265515a62f01157a45bcc036ecf947256", size = 257538, upload-time = "2026-03-17T10:30:50.77Z" }, + { url = "https://files.pythonhosted.org/packages/81/bd/73ddf85f93f7e6fa83e77ccecb6162d9415c79007b4bc124008a4995e4a7/coverage-7.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8dd02af98971bdb956363e4827d34425cb3df19ee550ef92855b0acb9c7ce51c", size = 251821, upload-time = "2026-03-17T10:30:52.5Z" }, + { url = "https://files.pythonhosted.org/packages/a0/81/278aff4e8dec4926a0bcb9486320752811f543a3ce5b602cc7a29978d073/coverage-7.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f08fd75c50a760c7eb068ae823777268daaf16a80b918fa58eea888f8e3919f5", size = 253191, upload-time = "2026-03-17T10:30:54.543Z" }, + { url = "https://files.pythonhosted.org/packages/70/ee/fe1621488e2e0a58d7e94c4800f0d96f79671553488d401a612bebae324b/coverage-7.13.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:843ea8643cf967d1ac7e8ecd4bb00c99135adf4816c0c0593fdcc47b597fcf09", size = 251337, upload-time = "2026-03-17T10:30:56.663Z" }, + { url = "https://files.pythonhosted.org/packages/37/a6/f79fb37aa104b562207cc23cb5711ab6793608e246cae1e93f26b2236ed9/coverage-7.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9d44d7aa963820b1b971dbecd90bfe5fe8f81cff79787eb6cca15750bd2f79b9", size = 255404, upload-time = "2026-03-17T10:30:58.427Z" }, + { url = "https://files.pythonhosted.org/packages/75/f0/ed15262a58ec81ce457ceb717b7f78752a1713556b19081b76e90896e8d4/coverage-7.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:7132bed4bd7b836200c591410ae7d97bf7ae8be6fc87d160b2bd881df929e7bf", size = 250903, upload-time = "2026-03-17T10:31:00.093Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e9/9129958f20e7e9d4d56d51d42ccf708d15cac355ff4ac6e736e97a9393d2/coverage-7.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a698e363641b98843c517817db75373c83254781426e94ada3197cabbc2c919c", size = 252780, upload-time = "2026-03-17T10:31:01.916Z" }, + { url = "https://files.pythonhosted.org/packages/a4/d7/0ad9b15812d81272db94379fe4c6df8fd17781cc7671fdfa30c76ba5ff7b/coverage-7.13.5-cp312-cp312-win32.whl", hash = "sha256:bdba0a6b8812e8c7df002d908a9a2ea3c36e92611b5708633c50869e6d922fdf", size = 222093, upload-time = "2026-03-17T10:31:03.642Z" }, + { url = "https://files.pythonhosted.org/packages/29/3d/821a9a5799fac2556bcf0bd37a70d1d11fa9e49784b6d22e92e8b2f85f18/coverage-7.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:d2c87e0c473a10bffe991502eac389220533024c8082ec1ce849f4218dded810", size = 222900, upload-time = "2026-03-17T10:31:05.651Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fa/2238c2ad08e35cf4f020ea721f717e09ec3152aea75d191a7faf3ef009a8/coverage-7.13.5-cp312-cp312-win_arm64.whl", hash = "sha256:bf69236a9a81bdca3bff53796237aab096cdbf8d78a66ad61e992d9dac7eb2de", size = 221515, upload-time = "2026-03-17T10:31:07.293Z" }, + { url = "https://files.pythonhosted.org/packages/74/8c/74fedc9663dcf168b0a059d4ea756ecae4da77a489048f94b5f512a8d0b3/coverage-7.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ec4af212df513e399cf11610cc27063f1586419e814755ab362e50a85ea69c1", size = 219576, upload-time = "2026-03-17T10:31:09.045Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c9/44fb661c55062f0818a6ffd2685c67aa30816200d5f2817543717d4b92eb/coverage-7.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:941617e518602e2d64942c88ec8499f7fbd49d3f6c4327d3a71d43a1973032f3", size = 219942, upload-time = "2026-03-17T10:31:10.708Z" }, + { url = "https://files.pythonhosted.org/packages/5f/13/93419671cee82b780bab7ea96b67c8ef448f5f295f36bf5031154ec9a790/coverage-7.13.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:da305e9937617ee95c2e39d8ff9f040e0487cbf1ac174f777ed5eddd7a7c1f26", size = 250935, upload-time = "2026-03-17T10:31:12.392Z" }, + { url = "https://files.pythonhosted.org/packages/ac/68/1666e3a4462f8202d836920114fa7a5ee9275d1fa45366d336c551a162dd/coverage-7.13.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:78e696e1cc714e57e8b25760b33a8b1026b7048d270140d25dafe1b0a1ee05a3", size = 253541, upload-time = "2026-03-17T10:31:14.247Z" }, + { url = "https://files.pythonhosted.org/packages/4e/5e/3ee3b835647be646dcf3c65a7c6c18f87c27326a858f72ab22c12730773d/coverage-7.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02ca0eed225b2ff301c474aeeeae27d26e2537942aa0f87491d3e147e784a82b", size = 254780, upload-time = "2026-03-17T10:31:16.193Z" }, + { url = "https://files.pythonhosted.org/packages/44/b3/cb5bd1a04cfcc49ede6cd8409d80bee17661167686741e041abc7ee1b9a9/coverage-7.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:04690832cbea4e4663d9149e05dba142546ca05cb1848816760e7f58285c970a", size = 256912, upload-time = "2026-03-17T10:31:17.89Z" }, + { url = "https://files.pythonhosted.org/packages/1b/66/c1dceb7b9714473800b075f5c8a84f4588f887a90eb8645282031676e242/coverage-7.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0590e44dd2745c696a778f7bab6aa95256de2cbc8b8cff4f7db8ff09813d6969", size = 251165, upload-time = "2026-03-17T10:31:19.605Z" }, + { url = "https://files.pythonhosted.org/packages/b7/62/5502b73b97aa2e53ea22a39cf8649ff44827bef76d90bf638777daa27a9d/coverage-7.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d7cfad2d6d81dd298ab6b89fe72c3b7b05ec7544bdda3b707ddaecff8d25c161", size = 252908, upload-time = "2026-03-17T10:31:21.312Z" }, + { url = "https://files.pythonhosted.org/packages/7d/37/7792c2d69854397ca77a55c4646e5897c467928b0e27f2d235d83b5d08c6/coverage-7.13.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e092b9499de38ae0fbfbc603a74660eb6ff3e869e507b50d85a13b6db9863e15", size = 250873, upload-time = "2026-03-17T10:31:23.565Z" }, + { url = "https://files.pythonhosted.org/packages/a3/23/bc866fb6163be52a8a9e5d708ba0d3b1283c12158cefca0a8bbb6e247a43/coverage-7.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:48c39bc4a04d983a54a705a6389512883d4a3b9862991b3617d547940e9f52b1", size = 255030, upload-time = "2026-03-17T10:31:25.58Z" }, + { url = "https://files.pythonhosted.org/packages/7d/8b/ef67e1c222ef49860701d346b8bbb70881bef283bd5f6cbba68a39a086c7/coverage-7.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2d3807015f138ffea1ed9afeeb8624fd781703f2858b62a8dd8da5a0994c57b6", size = 250694, upload-time = "2026-03-17T10:31:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/46/0d/866d1f74f0acddbb906db212e096dee77a8e2158ca5e6bb44729f9d93298/coverage-7.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee2aa19e03161671ec964004fb74b2257805d9710bf14a5c704558b9d8dbaf17", size = 252469, upload-time = "2026-03-17T10:31:29.472Z" }, + { url = "https://files.pythonhosted.org/packages/7a/f5/be742fec31118f02ce42b21c6af187ad6a344fed546b56ca60caacc6a9a0/coverage-7.13.5-cp313-cp313-win32.whl", hash = "sha256:ce1998c0483007608c8382f4ff50164bfc5bd07a2246dd272aa4043b75e61e85", size = 222112, upload-time = "2026-03-17T10:31:31.526Z" }, + { url = "https://files.pythonhosted.org/packages/66/40/7732d648ab9d069a46e686043241f01206348e2bbf128daea85be4d6414b/coverage-7.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:631efb83f01569670a5e866ceb80fe483e7c159fac6f167e6571522636104a0b", size = 222923, upload-time = "2026-03-17T10:31:33.633Z" }, + { url = "https://files.pythonhosted.org/packages/48/af/fea819c12a095781f6ccd504890aaddaf88b8fab263c4940e82c7b770124/coverage-7.13.5-cp313-cp313-win_arm64.whl", hash = "sha256:f4cd16206ad171cbc2470dbea9103cf9a7607d5fe8c242fdf1edf36174020664", size = 221540, upload-time = "2026-03-17T10:31:35.445Z" }, + { url = "https://files.pythonhosted.org/packages/23/d2/17879af479df7fbbd44bd528a31692a48f6b25055d16482fdf5cdb633805/coverage-7.13.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0428cbef5783ad91fe240f673cc1f76b25e74bbfe1a13115e4aa30d3f538162d", size = 220262, upload-time = "2026-03-17T10:31:37.184Z" }, + { url = "https://files.pythonhosted.org/packages/5b/4c/d20e554f988c8f91d6a02c5118f9abbbf73a8768a3048cb4962230d5743f/coverage-7.13.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e0b216a19534b2427cc201a26c25da4a48633f29a487c61258643e89d28200c0", size = 220617, upload-time = "2026-03-17T10:31:39.245Z" }, + { url = "https://files.pythonhosted.org/packages/29/9c/f9f5277b95184f764b24e7231e166dfdb5780a46d408a2ac665969416d61/coverage-7.13.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:972a9cd27894afe4bc2b1480107054e062df08e671df7c2f18c205e805ccd806", size = 261912, upload-time = "2026-03-17T10:31:41.324Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f6/7f1ab39393eeb50cfe4747ae8ef0e4fc564b989225aa1152e13a180d74f8/coverage-7.13.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4b59148601efcd2bac8c4dbf1f0ad6391693ccf7a74b8205781751637076aee3", size = 263987, upload-time = "2026-03-17T10:31:43.724Z" }, + { url = "https://files.pythonhosted.org/packages/a0/d7/62c084fb489ed9c6fbdf57e006752e7c516ea46fd690e5ed8b8617c7d52e/coverage-7.13.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:505d7083c8b0c87a8fa8c07370c285847c1f77739b22e299ad75a6af6c32c5c9", size = 266416, upload-time = "2026-03-17T10:31:45.769Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f6/df63d8660e1a0bff6125947afda112a0502736f470d62ca68b288ea762d8/coverage-7.13.5-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:60365289c3741e4db327e7baff2a4aaacf22f788e80fa4683393891b70a89fbd", size = 267558, upload-time = "2026-03-17T10:31:48.293Z" }, + { url = "https://files.pythonhosted.org/packages/5b/02/353ca81d36779bd108f6d384425f7139ac3c58c750dcfaafe5d0bee6436b/coverage-7.13.5-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1b88c69c8ef5d4b6fe7dea66d6636056a0f6a7527c440e890cf9259011f5e606", size = 261163, upload-time = "2026-03-17T10:31:50.125Z" }, + { url = "https://files.pythonhosted.org/packages/2c/16/2e79106d5749bcaf3aee6d309123548e3276517cd7851faa8da213bc61bf/coverage-7.13.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5b13955d31d1633cf9376908089b7cebe7d15ddad7aeaabcbe969a595a97e95e", size = 263981, upload-time = "2026-03-17T10:31:51.961Z" }, + { url = "https://files.pythonhosted.org/packages/29/c7/c29e0c59ffa6942030ae6f50b88ae49988e7e8da06de7ecdbf49c6d4feae/coverage-7.13.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f70c9ab2595c56f81a89620e22899eea8b212a4041bd728ac6f4a28bf5d3ddd0", size = 261604, upload-time = "2026-03-17T10:31:53.872Z" }, + { url = "https://files.pythonhosted.org/packages/40/48/097cdc3db342f34006a308ab41c3a7c11c3f0d84750d340f45d88a782e00/coverage-7.13.5-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:084b84a8c63e8d6fc7e3931b316a9bcafca1458d753c539db82d31ed20091a87", size = 265321, upload-time = "2026-03-17T10:31:55.997Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/4994af354689e14fd03a75f8ec85a9a68d94e0188bbdab3fc1516b55e512/coverage-7.13.5-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:ad14385487393e386e2ea988b09d62dd42c397662ac2dabc3832d71253eee479", size = 260502, upload-time = "2026-03-17T10:31:58.308Z" }, + { url = "https://files.pythonhosted.org/packages/22/c6/9bb9ef55903e628033560885f5c31aa227e46878118b63ab15dc7ba87797/coverage-7.13.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f2c47b36fe7709a6e83bfadf4eefb90bd25fbe4014d715224c4316f808e59a2", size = 262688, upload-time = "2026-03-17T10:32:00.141Z" }, + { url = "https://files.pythonhosted.org/packages/14/4f/f5df9007e50b15e53e01edea486814783a7f019893733d9e4d6caad75557/coverage-7.13.5-cp313-cp313t-win32.whl", hash = "sha256:67e9bc5449801fad0e5dff329499fb090ba4c5800b86805c80617b4e29809b2a", size = 222788, upload-time = "2026-03-17T10:32:02.246Z" }, + { url = "https://files.pythonhosted.org/packages/e1/98/aa7fccaa97d0f3192bec013c4e6fd6d294a6ed44b640e6bb61f479e00ed5/coverage-7.13.5-cp313-cp313t-win_amd64.whl", hash = "sha256:da86cdcf10d2519e10cabb8ac2de03da1bcb6e4853790b7fbd48523332e3a819", size = 223851, upload-time = "2026-03-17T10:32:04.416Z" }, + { url = "https://files.pythonhosted.org/packages/3d/8b/e5c469f7352651e5f013198e9e21f97510b23de957dd06a84071683b4b60/coverage-7.13.5-cp313-cp313t-win_arm64.whl", hash = "sha256:0ecf12ecb326fe2c339d93fc131816f3a7367d223db37817208905c89bded911", size = 222104, upload-time = "2026-03-17T10:32:06.65Z" }, + { url = "https://files.pythonhosted.org/packages/8e/77/39703f0d1d4b478bfd30191d3c14f53caf596fac00efb3f8f6ee23646439/coverage-7.13.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fbabfaceaeb587e16f7008f7795cd80d20ec548dc7f94fbb0d4ec2e038ce563f", size = 219621, upload-time = "2026-03-17T10:32:08.589Z" }, + { url = "https://files.pythonhosted.org/packages/e2/3e/51dff36d99ae14639a133d9b164d63e628532e2974d8b1edb99dd1ebc733/coverage-7.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9bb2a28101a443669a423b665939381084412b81c3f8c0fcfbac57f4e30b5b8e", size = 219953, upload-time = "2026-03-17T10:32:10.507Z" }, + { url = "https://files.pythonhosted.org/packages/6a/6c/1f1917b01eb647c2f2adc9962bd66c79eb978951cab61bdc1acab3290c07/coverage-7.13.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bd3a2fbc1c6cccb3c5106140d87cc6a8715110373ef42b63cf5aea29df8c217a", size = 250992, upload-time = "2026-03-17T10:32:12.41Z" }, + { url = "https://files.pythonhosted.org/packages/22/e5/06b1f88f42a5a99df42ce61208bdec3bddb3d261412874280a19796fc09c/coverage-7.13.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6c36ddb64ed9d7e496028d1d00dfec3e428e0aabf4006583bb1839958d280510", size = 253503, upload-time = "2026-03-17T10:32:14.449Z" }, + { url = "https://files.pythonhosted.org/packages/80/28/2a148a51e5907e504fa7b85490277734e6771d8844ebcc48764a15e28155/coverage-7.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:380e8e9084d8eb38db3a9176a1a4f3c0082c3806fa0dc882d1d87abc3c789247", size = 254852, upload-time = "2026-03-17T10:32:16.56Z" }, + { url = "https://files.pythonhosted.org/packages/61/77/50e8d3d85cc0b7ebe09f30f151d670e302c7ff4a1bf6243f71dd8b0981fa/coverage-7.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e808af52a0513762df4d945ea164a24b37f2f518cbe97e03deaa0ee66139b4d6", size = 257161, upload-time = "2026-03-17T10:32:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c4/b5fd1d4b7bf8d0e75d997afd3925c59ba629fc8616f1b3aae7605132e256/coverage-7.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e301d30dd7e95ae068671d746ba8c34e945a82682e62918e41b2679acd2051a0", size = 251021, upload-time = "2026-03-17T10:32:21.344Z" }, + { url = "https://files.pythonhosted.org/packages/f8/66/6ea21f910e92d69ef0b1c3346ea5922a51bad4446c9126db2ae96ee24c4c/coverage-7.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:800bc829053c80d240a687ceeb927a94fd108bbdc68dfbe505d0d75ab578a882", size = 252858, upload-time = "2026-03-17T10:32:23.506Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ea/879c83cb5d61aa2a35fb80e72715e92672daef8191b84911a643f533840c/coverage-7.13.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:0b67af5492adb31940ee418a5a655c28e48165da5afab8c7fa6fd72a142f8740", size = 250823, upload-time = "2026-03-17T10:32:25.516Z" }, + { url = "https://files.pythonhosted.org/packages/8a/fb/616d95d3adb88b9803b275580bdeee8bd1b69a886d057652521f83d7322f/coverage-7.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9136ff29c3a91e25b1d1552b5308e53a1e0653a23e53b6366d7c2dcbbaf8a16", size = 255099, upload-time = "2026-03-17T10:32:27.944Z" }, + { url = "https://files.pythonhosted.org/packages/1c/93/25e6917c90ec1c9a56b0b26f6cad6408e5f13bb6b35d484a0d75c9cf000d/coverage-7.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:cff784eef7f0b8f6cb28804fbddcfa99f89efe4cc35fb5627e3ac58f91ed3ac0", size = 250638, upload-time = "2026-03-17T10:32:29.914Z" }, + { url = "https://files.pythonhosted.org/packages/fc/7b/dc1776b0464145a929deed214aef9fb1493f159b59ff3c7eeeedf91eddd0/coverage-7.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:68a4953be99b17ac3c23b6efbc8a38330d99680c9458927491d18700ef23ded0", size = 252295, upload-time = "2026-03-17T10:32:31.981Z" }, + { url = "https://files.pythonhosted.org/packages/ea/fb/99cbbc56a26e07762a2740713f3c8f9f3f3106e3a3dd8cc4474954bccd34/coverage-7.13.5-cp314-cp314-win32.whl", hash = "sha256:35a31f2b1578185fbe6aa2e74cea1b1d0bbf4c552774247d9160d29b80ed56cc", size = 222360, upload-time = "2026-03-17T10:32:34.233Z" }, + { url = "https://files.pythonhosted.org/packages/8d/b7/4758d4f73fb536347cc5e4ad63662f9d60ba9118cb6785e9616b2ce5d7fa/coverage-7.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:2aa055ae1857258f9e0045be26a6d62bdb47a72448b62d7b55f4820f361a2633", size = 223174, upload-time = "2026-03-17T10:32:36.369Z" }, + { url = "https://files.pythonhosted.org/packages/2c/f2/24d84e1dfe70f8ac9fdf30d338239860d0d1d5da0bda528959d0ebc9da28/coverage-7.13.5-cp314-cp314-win_arm64.whl", hash = "sha256:1b11eef33edeae9d142f9b4358edb76273b3bfd30bc3df9a4f95d0e49caf94e8", size = 221739, upload-time = "2026-03-17T10:32:38.736Z" }, + { url = "https://files.pythonhosted.org/packages/60/5b/4a168591057b3668c2428bff25dd3ebc21b629d666d90bcdfa0217940e84/coverage-7.13.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:10a0c37f0b646eaff7cce1874c31d1f1ccb297688d4c747291f4f4c70741cc8b", size = 220351, upload-time = "2026-03-17T10:32:41.196Z" }, + { url = "https://files.pythonhosted.org/packages/f5/21/1fd5c4dbfe4a58b6b99649125635df46decdfd4a784c3cd6d410d303e370/coverage-7.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b5db73ba3c41c7008037fa731ad5459fc3944cb7452fc0aa9f822ad3533c583c", size = 220612, upload-time = "2026-03-17T10:32:43.204Z" }, + { url = "https://files.pythonhosted.org/packages/d6/fe/2a924b3055a5e7e4512655a9d4609781b0d62334fa0140c3e742926834e2/coverage-7.13.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:750db93a81e3e5a9831b534be7b1229df848b2e125a604fe6651e48aa070e5f9", size = 261985, upload-time = "2026-03-17T10:32:45.514Z" }, + { url = "https://files.pythonhosted.org/packages/d7/0d/c8928f2bd518c45990fe1a2ab8db42e914ef9b726c975facc4282578c3eb/coverage-7.13.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ddb4f4a5479f2539644be484da179b653273bca1a323947d48ab107b3ed1f29", size = 264107, upload-time = "2026-03-17T10:32:47.971Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ae/4ae35bbd9a0af9d820362751f0766582833c211224b38665c0f8de3d487f/coverage-7.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8a7a2049c14f413163e2bdabd37e41179b1d1ccb10ffc6ccc4b7a718429c607", size = 266513, upload-time = "2026-03-17T10:32:50.1Z" }, + { url = "https://files.pythonhosted.org/packages/9c/20/d326174c55af36f74eac6ae781612d9492f060ce8244b570bb9d50d9d609/coverage-7.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1c85e0b6c05c592ea6d8768a66a254bfb3874b53774b12d4c89c481eb78cb90", size = 267650, upload-time = "2026-03-17T10:32:52.391Z" }, + { url = "https://files.pythonhosted.org/packages/7a/5e/31484d62cbd0eabd3412e30d74386ece4a0837d4f6c3040a653878bfc019/coverage-7.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:777c4d1eff1b67876139d24288aaf1817f6c03d6bae9c5cc8d27b83bcfe38fe3", size = 261089, upload-time = "2026-03-17T10:32:54.544Z" }, + { url = "https://files.pythonhosted.org/packages/e9/d8/49a72d6de146eebb0b7e48cc0f4bc2c0dd858e3d4790ab2b39a2872b62bd/coverage-7.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6697e29b93707167687543480a40f0db8f356e86d9f67ddf2e37e2dfd91a9dab", size = 263982, upload-time = "2026-03-17T10:32:56.803Z" }, + { url = "https://files.pythonhosted.org/packages/06/3b/0351f1bd566e6e4dd39e978efe7958bde1d32f879e85589de147654f57bb/coverage-7.13.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8fdf453a942c3e4d99bd80088141c4c6960bb232c409d9c3558e2dbaa3998562", size = 261579, upload-time = "2026-03-17T10:32:59.466Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ce/796a2a2f4017f554d7810f5c573449b35b1e46788424a548d4d19201b222/coverage-7.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:32ca0c0114c9834a43f045a87dcebd69d108d8ffb666957ea65aa132f50332e2", size = 265316, upload-time = "2026-03-17T10:33:01.847Z" }, + { url = "https://files.pythonhosted.org/packages/3d/16/d5ae91455541d1a78bc90abf495be600588aff8f6db5c8b0dae739fa39c9/coverage-7.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8769751c10f339021e2638cd354e13adeac54004d1941119b2c96fe5276d45ea", size = 260427, upload-time = "2026-03-17T10:33:03.945Z" }, + { url = "https://files.pythonhosted.org/packages/48/11/07f413dba62db21fb3fad5d0de013a50e073cc4e2dc4306e770360f6dfc8/coverage-7.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cec2d83125531bd153175354055cdb7a09987af08a9430bd173c937c6d0fba2a", size = 262745, upload-time = "2026-03-17T10:33:06.285Z" }, + { url = "https://files.pythonhosted.org/packages/91/15/d792371332eb4663115becf4bad47e047d16234b1aff687b1b18c58d60ae/coverage-7.13.5-cp314-cp314t-win32.whl", hash = "sha256:0cd9ed7a8b181775459296e402ca4fb27db1279740a24e93b3b41942ebe4b215", size = 223146, upload-time = "2026-03-17T10:33:08.756Z" }, + { url = "https://files.pythonhosted.org/packages/db/51/37221f59a111dca5e85be7dbf09696323b5b9f13ff65e0641d535ed06ea8/coverage-7.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:301e3b7dfefecaca37c9f1aa6f0049b7d4ab8dd933742b607765d757aca77d43", size = 224254, upload-time = "2026-03-17T10:33:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/54/83/6acacc889de8987441aa7d5adfbdbf33d288dad28704a67e574f1df9bcbb/coverage-7.13.5-cp314-cp314t-win_arm64.whl", hash = "sha256:9dacc2ad679b292709e0f5fc1ac74a6d4d5562e424058962c7bb0c658ad25e45", size = 222276, upload-time = "2026-03-17T10:33:13.466Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ee/a4cf96b8ce1e566ed238f0659ac2d3f007ed1d14b181bcb684e19561a69a/coverage-7.13.5-py3-none-any.whl", hash = "sha256:34b02417cf070e173989b3db962f7ed56d2f644307b2cf9d5a0f258e13084a61", size = 211346, upload-time = "2026-03-17T10:33:15.691Z" }, ] [package.optional-dependencies] @@ -167,14 +178,14 @@ toml = [ [[package]] name = "deepdiff" -version = "8.6.1" +version = "9.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "orderly-set" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/19/76/36c9aab3d5c19a94091f7c6c6e784efca50d87b124bf026c36e94719f33c/deepdiff-8.6.1.tar.gz", hash = "sha256:ec56d7a769ca80891b5200ec7bd41eec300ced91ebcc7797b41eb2b3f3ff643a", size = 634054, upload-time = "2025-09-03T19:40:41.461Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/20/63dd34163ed07393968128dc8c7ab948c96e47c4ce76976ea533de64909d/deepdiff-9.0.0.tar.gz", hash = "sha256:4872005306237b5b50829803feff58a1dfd20b2b357a55de22e7ded65b2008a7", size = 151952, upload-time = "2026-03-30T05:52:23.769Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/e6/efe534ef0952b531b630780e19cabd416e2032697019d5295defc6ef9bd9/deepdiff-8.6.1-py3-none-any.whl", hash = "sha256:ee8708a7f7d37fb273a541fa24ad010ed484192cd0c4ffc0fa0ed5e2d4b9e78b", size = 91378, upload-time = "2025-09-03T19:40:39.679Z" }, + { url = "https://files.pythonhosted.org/packages/dc/c4/da7089cd7aa4ab554f56e18a7fb08dcfed8fd2ae91fa528f5b1be207a148/deepdiff-9.0.0-py3-none-any.whl", hash = "sha256:b1ae0dd86290d86a03de5fbee728fde43095c1472ae4974bdab23ab4656305bd", size = 170540, upload-time = "2026-03-30T05:52:22.008Z" }, ] [[package]] @@ -201,32 +212,32 @@ wheels = [ [[package]] name = "exceptiongroup" -version = "1.3.0" +version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" }, ] [[package]] name = "idna" -version = "3.11" +version = "3.13" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/cc/762dfb036166873f0059f3b7de4565e1b5bc3d6f28a414c13da27e442f99/idna-3.13.tar.gz", hash = "sha256:585ea8fe5d69b9181ec1afba340451fba6ba764af97026f92a91d4eef164a242", size = 194210, upload-time = "2026-04-22T16:42:42.314Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, + { url = "https://files.pythonhosted.org/packages/5d/13/ad7d7ca3808a898b4612b6fe93cde56b53f3034dcde235acb1f0e1df24c6/idna-3.13-py3-none-any.whl", hash = "sha256:892ea0cde124a99ce773decba204c5552b69c3c67ffd5f232eb7696135bc8bb3", size = 68629, upload-time = "2026-04-22T16:42:40.909Z" }, ] [[package]] name = "iniconfig" -version = "2.1.0" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, ] [[package]] @@ -243,14 +254,96 @@ wheels = [ [[package]] name = "jsonpath-ng" -version = "1.7.0" +version = "1.8.0" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "ply" }, +sdist = { url = "https://files.pythonhosted.org/packages/32/58/250751940d75c8019659e15482d548a4aa3b6ce122c515102a4bfdac50e3/jsonpath_ng-1.8.0.tar.gz", hash = "sha256:54252968134b5e549ea5b872f1df1168bd7defe1a52fed5a358c194e1943ddc3", size = 74513, upload-time = "2026-02-24T14:42:06.182Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/99/33c7d78a3fb70d545fd5411ac67a651c81602cc09c9cf0df383733f068c5/jsonpath_ng-1.8.0-py3-none-any.whl", hash = "sha256:b8dde192f8af58d646fc031fac9c99fe4d00326afc4148f1f043c601a8cfe138", size = 67844, upload-time = "2026-02-28T00:53:19.637Z" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6d/86/08646239a313f895186ff0a4573452038eed8c86f54380b3ebac34d32fb2/jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c", size = 37838, upload-time = "2024-10-11T15:41:42.404Z" } + +[[package]] +name = "librt" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/6b/3d5c13fb3e3c4f43206c8f9dfed13778c2ed4f000bacaa0b7ce3c402a265/librt-0.9.0.tar.gz", hash = "sha256:a0951822531e7aee6e0dfb556b30d5ee36bbe234faf60c20a16c01be3530869d", size = 184368, upload-time = "2026-04-09T16:06:26.173Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/35/5a/73ecb3d82f8615f32ccdadeb9356726d6cae3a4bbc840b437ceb95708063/jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6", size = 30105, upload-time = "2024-11-20T17:58:30.418Z" }, + { url = "https://files.pythonhosted.org/packages/f3/4a/c64265d71b84030174ff3ac2cd16d8b664072afab8c41fccd8e2ee5a6f8d/librt-0.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f8e12706dcb8ff6b3ed57514a19e45c49ad00bcd423e87b2b2e4b5f64578443", size = 67529, upload-time = "2026-04-09T16:04:27.373Z" }, + { url = "https://files.pythonhosted.org/packages/23/b1/30ca0b3a8bdac209a00145c66cf42e5e7da2cc056ffc6ebc5c7b430ddd34/librt-0.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4e3dda8345307fd7306db0ed0cb109a63a2c85ba780eb9dc2d09b2049a931f9c", size = 70248, upload-time = "2026-04-09T16:04:28.758Z" }, + { url = "https://files.pythonhosted.org/packages/fa/fc/c6018dc181478d6ac5aa24a5846b8185101eb90894346db239eb3ea53209/librt-0.9.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:de7dac64e3eb832ffc7b840eb8f52f76420cde1b845be51b2a0f6b870890645e", size = 202184, upload-time = "2026-04-09T16:04:29.893Z" }, + { url = "https://files.pythonhosted.org/packages/bf/58/d69629f002203370ef41ea69ff71c49a2c618aec39b226ff49986ecd8623/librt-0.9.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22a904cbdb678f7cb348c90d543d3c52f581663d687992fee47fd566dcbf5285", size = 212926, upload-time = "2026-04-09T16:04:31.126Z" }, + { url = "https://files.pythonhosted.org/packages/cc/55/01d859f57824e42bd02465c77bec31fa5ef9d8c2bcee702ccf8ef1b9f508/librt-0.9.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:224b9727eb8bc188bc3bcf29d969dba0cd61b01d9bac80c41575520cc4baabb2", size = 225664, upload-time = "2026-04-09T16:04:32.352Z" }, + { url = "https://files.pythonhosted.org/packages/9b/02/32f63ad0ef085a94a70315291efe1151a48b9947af12261882f8445b2a30/librt-0.9.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e94cbc6ad9a6aeea46d775cbb11f361022f778a9cc8cc90af653d3a594b057ce", size = 219534, upload-time = "2026-04-09T16:04:33.667Z" }, + { url = "https://files.pythonhosted.org/packages/6a/5a/9d77111a183c885acf3b3b6e4c00f5b5b07b5817028226499a55f1fedc59/librt-0.9.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7bc30ad339f4e1a01d4917d645e522a0bc0030644d8973f6346397c93ba1503f", size = 227322, upload-time = "2026-04-09T16:04:34.945Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e7/05d700c93063753e12ab230b972002a3f8f3b9c95d8a980c2f646c8b6963/librt-0.9.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:56d65b583cf43b8cf4c8fbe1e1da20fa3076cc32a1149a141507af1062718236", size = 223407, upload-time = "2026-04-09T16:04:36.22Z" }, + { url = "https://files.pythonhosted.org/packages/c0/26/26c3124823c67c987456977c683da9a27cc874befc194ddcead5f9988425/librt-0.9.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:0a1be03168b2691ba61927e299b352a6315189199ca18a57b733f86cb3cc8d38", size = 221302, upload-time = "2026-04-09T16:04:37.62Z" }, + { url = "https://files.pythonhosted.org/packages/50/2b/c7cc2be5cf4ff7b017d948a789256288cb33a517687ff1995e72a7eea79f/librt-0.9.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:63c12efcd160e1d14da11af0c46c0217473e1e0d2ae1acbccc83f561ea4c2a7b", size = 243893, upload-time = "2026-04-09T16:04:38.909Z" }, + { url = "https://files.pythonhosted.org/packages/62/d3/da553d37417a337d12660450535d5fd51373caffbedf6962173c87867246/librt-0.9.0-cp310-cp310-win32.whl", hash = "sha256:e9002e98dcb1c0a66723592520decd86238ddcef168b37ff6cfb559200b4b774", size = 55375, upload-time = "2026-04-09T16:04:40.148Z" }, + { url = "https://files.pythonhosted.org/packages/9b/5a/46fa357bab8311b6442a83471591f2f9e5b15ecc1d2121a43725e0c529b8/librt-0.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:9fcb461fbf70654a52a7cc670e606f04449e2374c199b1825f754e16dacfedd8", size = 62581, upload-time = "2026-04-09T16:04:41.452Z" }, + { url = "https://files.pythonhosted.org/packages/e2/1e/2ec7afcebcf3efea593d13aee18bbcfdd3a243043d848ebf385055e9f636/librt-0.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90904fac73c478f4b83f4ed96c99c8208b75e6f9a8a1910548f69a00f1eaa671", size = 67155, upload-time = "2026-04-09T16:04:42.933Z" }, + { url = "https://files.pythonhosted.org/packages/18/77/72b85afd4435268338ad4ec6231b3da8c77363f212a0227c1ff3b45e4d35/librt-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:789fff71757facc0738e8d89e3b84e4f0251c1c975e85e81b152cdaca927cc2d", size = 69916, upload-time = "2026-04-09T16:04:44.042Z" }, + { url = "https://files.pythonhosted.org/packages/27/fb/948ea0204fbe2e78add6d46b48330e58d39897e425560674aee302dca81c/librt-0.9.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1bf465d1e5b0a27713862441f6467b5ab76385f4ecf8f1f3a44f8aa3c695b4b6", size = 199635, upload-time = "2026-04-09T16:04:45.5Z" }, + { url = "https://files.pythonhosted.org/packages/ac/cd/894a29e251b296a27957856804cfd21e93c194aa131de8bb8032021be07e/librt-0.9.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f819e0c6413e259a17a7c0d49f97f405abadd3c2a316a3b46c6440b7dbbedbb1", size = 211051, upload-time = "2026-04-09T16:04:47.016Z" }, + { url = "https://files.pythonhosted.org/packages/18/8f/dcaed0bc084a35f3721ff2d081158db569d2c57ea07d35623ddaca5cfc8e/librt-0.9.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e0785c2fb4a81e1aece366aa3e2e039f4a4d7d21aaaded5227d7f3c703427882", size = 224031, upload-time = "2026-04-09T16:04:48.207Z" }, + { url = "https://files.pythonhosted.org/packages/03/44/88f6c1ed1132cd418601cc041fbd92fed28b3a09f39de81978e0822d13ff/librt-0.9.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:80b25c7b570a86c03b5da69e665809deb39265476e8e21d96a9328f9762f9990", size = 218069, upload-time = "2026-04-09T16:04:50.025Z" }, + { url = "https://files.pythonhosted.org/packages/a3/90/7d02e981c2db12188d82b4410ff3e35bfdb844b26aecd02233626f46af2b/librt-0.9.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d4d16b608a1c43d7e33142099a75cd93af482dadce0bf82421e91cad077157f4", size = 224857, upload-time = "2026-04-09T16:04:51.684Z" }, + { url = "https://files.pythonhosted.org/packages/ef/c3/c77e706b7215ca32e928d47535cf13dbc3d25f096f84ddf8fbc06693e229/librt-0.9.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:194fc1a32e1e21fe809d38b5faea66cc65eaa00217c8901fbdb99866938adbdb", size = 219865, upload-time = "2026-04-09T16:04:52.949Z" }, + { url = "https://files.pythonhosted.org/packages/52/d1/32b0c1a0eb8461c70c11656c46a29f760b7c7edf3c36d6f102470c17170f/librt-0.9.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:8c6bc1384d9738781cfd41d09ad7f6e8af13cfea2c75ece6bd6d2566cdea2076", size = 218451, upload-time = "2026-04-09T16:04:54.174Z" }, + { url = "https://files.pythonhosted.org/packages/74/d1/adfd0f9c44761b1d49b1bec66173389834c33ee2bd3c7fd2e2367f1942d4/librt-0.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:15cb151e52a044f06e54ac7f7b47adbfc89b5c8e2b63e1175a9d587c43e8942a", size = 241300, upload-time = "2026-04-09T16:04:55.452Z" }, + { url = "https://files.pythonhosted.org/packages/09/b0/9074b64407712f0003c27f5b1d7655d1438979155f049720e8a1abd9b1a1/librt-0.9.0-cp311-cp311-win32.whl", hash = "sha256:f100bfe2acf8a3689af9d0cc660d89f17286c9c795f9f18f7b62dd1a6b247ae6", size = 55668, upload-time = "2026-04-09T16:04:56.689Z" }, + { url = "https://files.pythonhosted.org/packages/24/19/40b77b77ce80b9389fb03971431b09b6b913911c38d412059e0b3e2a9ef2/librt-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:0b73e4266307e51c95e09c0750b7ec383c561d2e97d58e473f6f6a209952fbb8", size = 62976, upload-time = "2026-04-09T16:04:57.733Z" }, + { url = "https://files.pythonhosted.org/packages/70/9d/9fa7a64041e29035cb8c575af5f0e3840be1b97b4c4d9061e0713f171849/librt-0.9.0-cp311-cp311-win_arm64.whl", hash = "sha256:bc5518873822d2faa8ebdd2c1a4d7c8ef47b01a058495ab7924cb65bdbf5fc9a", size = 53502, upload-time = "2026-04-09T16:04:58.806Z" }, + { url = "https://files.pythonhosted.org/packages/bf/90/89ddba8e1c20b0922783cd93ed8e64f34dc05ab59c38a9c7e313632e20ff/librt-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9b3e3bc363f71bda1639a4ee593cb78f7fbfeacc73411ec0d4c92f00730010a4", size = 68332, upload-time = "2026-04-09T16:05:00.09Z" }, + { url = "https://files.pythonhosted.org/packages/a8/40/7aa4da1fb08bdeeb540cb07bfc8207cb32c5c41642f2594dbd0098a0662d/librt-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0a09c2f5869649101738653a9b7ab70cf045a1105ac66cbb8f4055e61df78f2d", size = 70581, upload-time = "2026-04-09T16:05:01.213Z" }, + { url = "https://files.pythonhosted.org/packages/48/ac/73a2187e1031041e93b7e3a25aae37aa6f13b838c550f7e0f06f66766212/librt-0.9.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5ca8e133d799c948db2ab1afc081c333a825b5540475164726dcbf73537e5c2f", size = 203984, upload-time = "2026-04-09T16:05:02.542Z" }, + { url = "https://files.pythonhosted.org/packages/5e/3d/23460d571e9cbddb405b017681df04c142fb1b04cbfce77c54b08e28b108/librt-0.9.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:603138ee838ee1583f1b960b62d5d0007845c5c423feb68e44648b1359014e27", size = 215762, upload-time = "2026-04-09T16:05:04.127Z" }, + { url = "https://files.pythonhosted.org/packages/de/1e/42dc7f8ab63e65b20640d058e63e97fd3e482c1edbda3570d813b4d0b927/librt-0.9.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f4003f70c56a5addd6aa0897f200dd59afd3bf7bcd5b3cce46dd21f925743bc2", size = 230288, upload-time = "2026-04-09T16:05:05.883Z" }, + { url = "https://files.pythonhosted.org/packages/dc/08/ca812b6d8259ad9ece703397f8ad5c03af5b5fedfce64279693d3ce4087c/librt-0.9.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:78042f6facfd98ecb25e9829c7e37cce23363d9d7c83bc5f72702c5059eb082b", size = 224103, upload-time = "2026-04-09T16:05:07.148Z" }, + { url = "https://files.pythonhosted.org/packages/b6/3f/620490fb2fa66ffd44e7f900254bc110ebec8dac6c1b7514d64662570e6f/librt-0.9.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a361c9434a64d70a7dbb771d1de302c0cc9f13c0bffe1cf7e642152814b35265", size = 232122, upload-time = "2026-04-09T16:05:08.386Z" }, + { url = "https://files.pythonhosted.org/packages/e9/83/12864700a1b6a8be458cf5d05db209b0d8e94ae281e7ec261dbe616597b4/librt-0.9.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:dd2c7e082b0b92e1baa4da28163a808672485617bc855cc22a2fd06978fa9084", size = 225045, upload-time = "2026-04-09T16:05:09.707Z" }, + { url = "https://files.pythonhosted.org/packages/fd/1b/845d339c29dc7dbc87a2e992a1ba8d28d25d0e0372f9a0a2ecebde298186/librt-0.9.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:7e6274fd33fc5b2a14d41c9119629d3ff395849d8bcbc80cf637d9e8d2034da8", size = 227372, upload-time = "2026-04-09T16:05:10.942Z" }, + { url = "https://files.pythonhosted.org/packages/8d/fe/277985610269d926a64c606f761d58d3db67b956dbbf40024921e95e7fcb/librt-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5093043afb226ecfa1400120d1ebd4442b4f99977783e4f4f7248879009b227f", size = 248224, upload-time = "2026-04-09T16:05:12.254Z" }, + { url = "https://files.pythonhosted.org/packages/92/1b/ee486d244b8de6b8b5dbaefabe6bfdd4a72e08f6353edf7d16d27114da8d/librt-0.9.0-cp312-cp312-win32.whl", hash = "sha256:9edcc35d1cae9fd5320171b1a838c7da8a5c968af31e82ecc3dff30b4be0957f", size = 55986, upload-time = "2026-04-09T16:05:13.529Z" }, + { url = "https://files.pythonhosted.org/packages/89/7a/ba1737012308c17dc6d5516143b5dce9a2c7ba3474afd54e11f44a4d1ef3/librt-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:3cc2917258e131ae5f958a4d872e07555b51cb7466a43433218061c74ef33745", size = 63260, upload-time = "2026-04-09T16:05:14.68Z" }, + { url = "https://files.pythonhosted.org/packages/36/e4/01752c113da15127f18f7bf11142f5640038f062407a611c059d0036c6aa/librt-0.9.0-cp312-cp312-win_arm64.whl", hash = "sha256:90e6d5420fc8a300518d4d2288154ff45005e920425c22cbbfe8330f3f754bd9", size = 53694, upload-time = "2026-04-09T16:05:16.095Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d7/1b3e26fffde1452d82f5666164858a81c26ebe808e7ae8c9c88628981540/librt-0.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f29b68cd9714531672db62cc54f6e8ff981900f824d13fa0e00749189e13778e", size = 68367, upload-time = "2026-04-09T16:05:17.243Z" }, + { url = "https://files.pythonhosted.org/packages/a5/5b/c61b043ad2e091fbe1f2d35d14795e545d0b56b03edaa390fa1dcee3d160/librt-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d5c8a5929ac325729f6119802070b561f4db793dffc45e9ac750992a4ed4d22", size = 70595, upload-time = "2026-04-09T16:05:18.471Z" }, + { url = "https://files.pythonhosted.org/packages/a3/22/2448471196d8a73370aa2f23445455dc42712c21404081fcd7a03b9e0749/librt-0.9.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:756775d25ec8345b837ab52effee3ad2f3b2dfd6bbee3e3f029c517bd5d8f05a", size = 204354, upload-time = "2026-04-09T16:05:19.593Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5e/39fc4b153c78cfd2c8a2dcb32700f2d41d2312aa1050513183be4540930d/librt-0.9.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b8f5d00b49818f4e2b1667db994488b045835e0ac16fe2f924f3871bd2b8ac5", size = 216238, upload-time = "2026-04-09T16:05:20.868Z" }, + { url = "https://files.pythonhosted.org/packages/d7/42/bc2d02d0fa7badfa63aa8d6dcd8793a9f7ef5a94396801684a51ed8d8287/librt-0.9.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c81aef782380f0f13ead670aae01825eb653b44b046aa0e5ebbb79f76ed4aa11", size = 230589, upload-time = "2026-04-09T16:05:22.305Z" }, + { url = "https://files.pythonhosted.org/packages/c8/7b/e2d95cc513866373692aa5edf98080d5602dd07cabfb9e5d2f70df2f25f7/librt-0.9.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:66b58fed90a545328e80d575467244de3741e088c1af928f0b489ebec3ef3858", size = 224610, upload-time = "2026-04-09T16:05:23.647Z" }, + { url = "https://files.pythonhosted.org/packages/31/d5/6cec4607e998eaba57564d06a1295c21b0a0c8de76e4e74d699e627bd98c/librt-0.9.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e78fb7419e07d98c2af4b8567b72b3eaf8cb05caad642e9963465569c8b2d87e", size = 232558, upload-time = "2026-04-09T16:05:25.025Z" }, + { url = "https://files.pythonhosted.org/packages/95/8c/27f1d8d3aaf079d3eb26439bf0b32f1482340c3552e324f7db9dca858671/librt-0.9.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c3786f0f4490a5cd87f1ed6cefae833ad6b1060d52044ce0434a2e85893afd0", size = 225521, upload-time = "2026-04-09T16:05:26.311Z" }, + { url = "https://files.pythonhosted.org/packages/6b/d8/1e0d43b1c329b416017619469b3c3801a25a6a4ef4a1c68332aeaa6f72ca/librt-0.9.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:8494cfc61e03542f2d381e71804990b3931175a29b9278fdb4a5459948778dc2", size = 227789, upload-time = "2026-04-09T16:05:27.624Z" }, + { url = "https://files.pythonhosted.org/packages/2c/b4/d3d842e88610fcd4c8eec7067b0c23ef2d7d3bff31496eded6a83b0f99be/librt-0.9.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:07cf11f769831186eeac424376e6189f20ace4f7263e2134bdb9757340d84d4d", size = 248616, upload-time = "2026-04-09T16:05:29.181Z" }, + { url = "https://files.pythonhosted.org/packages/ec/28/527df8ad0d1eb6c8bdfa82fc190f1f7c4cca5a1b6d7b36aeabf95b52d74d/librt-0.9.0-cp313-cp313-win32.whl", hash = "sha256:850d6d03177e52700af605fd60db7f37dcb89782049a149674d1a9649c2138fd", size = 56039, upload-time = "2026-04-09T16:05:30.709Z" }, + { url = "https://files.pythonhosted.org/packages/f3/a7/413652ad0d92273ee5e30c000fc494b361171177c83e57c060ecd3c21538/librt-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:a5af136bfba820d592f86c67affcef9b3ff4d4360ac3255e341e964489b48519", size = 63264, upload-time = "2026-04-09T16:05:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/a4/0a/92c244309b774e290ddb15e93363846ae7aa753d9586b8aad511c5e6145b/librt-0.9.0-cp313-cp313-win_arm64.whl", hash = "sha256:4c4d0440a3a8e31d962340c3e1cc3fc9ee7febd34c8d8f770d06adb947779ea5", size = 53728, upload-time = "2026-04-09T16:05:33.31Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c1/184e539543f06ea2912f4b92a5ffaede4f9b392689e3f00acbf8134bee92/librt-0.9.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:3f05d145df35dca5056a8bc3838e940efebd893a54b3e19b2dda39ceaa299bcb", size = 67830, upload-time = "2026-04-09T16:05:34.517Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ad/23399bdcb7afca819acacdef31b37ee59de261bd66b503a7995c03c4b0dc/librt-0.9.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1c587494461ebd42229d0f1739f3aa34237dd9980623ecf1be8d3bcba79f4499", size = 70280, upload-time = "2026-04-09T16:05:35.649Z" }, + { url = "https://files.pythonhosted.org/packages/9f/0b/4542dc5a2b8772dbf92cafb9194701230157e73c14b017b6961a23598b03/librt-0.9.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b0a2040f801406b93657a70b72fa12311063a319fee72ce98e1524da7200171f", size = 201925, upload-time = "2026-04-09T16:05:36.739Z" }, + { url = "https://files.pythonhosted.org/packages/31/d4/8ee7358b08fd0cfce051ef96695380f09b3c2c11b77c9bfbc367c921cce5/librt-0.9.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f38bc489037eca88d6ebefc9c4d41a4e07c8e8b4de5188a9e6d290273ad7ebb1", size = 212381, upload-time = "2026-04-09T16:05:38.043Z" }, + { url = "https://files.pythonhosted.org/packages/f2/94/a2025fe442abedf8b038038dab3dba942009ad42b38ea064a1a9e6094241/librt-0.9.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f3fd278f5e6bf7c75ccd6d12344eb686cc020712683363b66f46ac79d37c799f", size = 227065, upload-time = "2026-04-09T16:05:39.394Z" }, + { url = "https://files.pythonhosted.org/packages/7c/e9/b9fcf6afa909f957cfbbf918802f9dada1bd5d3c1da43d722fd6a310dc3f/librt-0.9.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fcbdf2a9ca24e87bbebb47f1fe34e531ef06f104f98c9ccfc953a3f3344c567a", size = 221333, upload-time = "2026-04-09T16:05:40.999Z" }, + { url = "https://files.pythonhosted.org/packages/ac/7c/ba54cd6aa6a3c8cd12757a6870e0c79a64b1e6327f5248dcff98423f4d43/librt-0.9.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e306d956cfa027fe041585f02a1602c32bfa6bb8ebea4899d373383295a6c62f", size = 229051, upload-time = "2026-04-09T16:05:42.605Z" }, + { url = "https://files.pythonhosted.org/packages/4b/4b/8cfdbad314c8677a0148bf0b70591d6d18587f9884d930276098a235461b/librt-0.9.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:465814ab157986acb9dfa5ccd7df944be5eefc0d08d31ec6e8d88bc71251d845", size = 222492, upload-time = "2026-04-09T16:05:43.842Z" }, + { url = "https://files.pythonhosted.org/packages/1f/d1/2eda69563a1a88706808decdce035e4b32755dbfbb0d05e1a65db9547ed1/librt-0.9.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:703f4ae36d6240bfe24f542bac784c7e4194ec49c3ba5a994d02891649e2d85b", size = 223849, upload-time = "2026-04-09T16:05:45.054Z" }, + { url = "https://files.pythonhosted.org/packages/04/44/b2ed37df6be5b3d42cfe36318e0598e80843d5c6308dd63d0bf4e0ce5028/librt-0.9.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3be322a15ee5e70b93b7a59cfd074614f22cc8c9ff18bd27f474e79137ea8d3b", size = 245001, upload-time = "2026-04-09T16:05:46.34Z" }, + { url = "https://files.pythonhosted.org/packages/47/e7/617e412426df89169dd2a9ed0cc8752d5763336252c65dbf945199915119/librt-0.9.0-cp314-cp314-win32.whl", hash = "sha256:b8da9f8035bb417770b1e1610526d87ad4fc58a2804dc4d79c53f6d2cf5a6eb9", size = 51799, upload-time = "2026-04-09T16:05:47.738Z" }, + { url = "https://files.pythonhosted.org/packages/24/ed/c22ca4db0ca3cbc285e4d9206108746beda561a9792289c3c31281d7e9df/librt-0.9.0-cp314-cp314-win_amd64.whl", hash = "sha256:b8bd70d5d816566a580d193326912f4a76ec2d28a97dc4cd4cc831c0af8e330e", size = 59165, upload-time = "2026-04-09T16:05:49.198Z" }, + { url = "https://files.pythonhosted.org/packages/24/56/875398fafa4cbc8f15b89366fc3287304ddd3314d861f182a4b87595ace0/librt-0.9.0-cp314-cp314-win_arm64.whl", hash = "sha256:fc5758e2b7a56532dc33e3c544d78cbaa9ecf0a0f2a2da2df882c1d6b99a317f", size = 49292, upload-time = "2026-04-09T16:05:50.362Z" }, + { url = "https://files.pythonhosted.org/packages/4c/61/bc448ecbf9b2d69c5cff88fe41496b19ab2a1cbda0065e47d4d0d51c0867/librt-0.9.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:f24b90b0e0c8cc9491fb1693ae91fe17cb7963153a1946395acdbdd5818429a4", size = 70175, upload-time = "2026-04-09T16:05:51.564Z" }, + { url = "https://files.pythonhosted.org/packages/60/f2/c47bb71069a73e2f04e70acbd196c1e5cc411578ac99039a224b98920fd4/librt-0.9.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3fe56e80badb66fdcde06bef81bbaa5bfcf6fbd7aefb86222d9e369c38c6b228", size = 72951, upload-time = "2026-04-09T16:05:52.699Z" }, + { url = "https://files.pythonhosted.org/packages/29/19/0549df59060631732df758e8886d92088da5fdbedb35b80e4643664e8412/librt-0.9.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:527b5b820b47a09e09829051452bb0d1dd2122261254e2a6f674d12f1d793d54", size = 225864, upload-time = "2026-04-09T16:05:53.895Z" }, + { url = "https://files.pythonhosted.org/packages/9d/f8/3b144396d302ac08e50f89e64452c38db84bc7b23f6c60479c5d3abd303c/librt-0.9.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d429bdd4ac0ab17c8e4a8af0ed2a7440b16eba474909ab357131018fe8c7e71", size = 241155, upload-time = "2026-04-09T16:05:55.191Z" }, + { url = "https://files.pythonhosted.org/packages/7a/ce/ee67ec14581de4043e61d05786d2aed6c9b5338816b7859bcf07455c6a9f/librt-0.9.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7202bdcac47d3a708271c4304a474a8605a4a9a4a709e954bf2d3241140aa938", size = 252235, upload-time = "2026-04-09T16:05:56.549Z" }, + { url = "https://files.pythonhosted.org/packages/8a/fa/0ead15daa2b293a54101550b08d4bafe387b7d4a9fc6d2b985602bae69b6/librt-0.9.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0d620e74897f8c2613b3c4e2e9c1e422eb46d2ddd07df540784d44117836af3", size = 244963, upload-time = "2026-04-09T16:05:57.858Z" }, + { url = "https://files.pythonhosted.org/packages/29/68/9fbf9a9aa704ba87689e40017e720aced8d9a4d2b46b82451d8142f91ec9/librt-0.9.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d69fc39e627908f4c03297d5a88d9284b73f4d90b424461e32e8c2485e21c283", size = 257364, upload-time = "2026-04-09T16:05:59.686Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8d/9d60869f1b6716c762e45f66ed945b1e5dd649f7377684c3b176ae424648/librt-0.9.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:c2640e23d2b7c98796f123ffd95cf2022c7777aa8a4a3b98b36c570d37e85eee", size = 247661, upload-time = "2026-04-09T16:06:00.938Z" }, + { url = "https://files.pythonhosted.org/packages/70/ff/a5c365093962310bfdb4f6af256f191085078ffb529b3f0cbebb5b33ebe2/librt-0.9.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:451daa98463b7695b0a30aa56bf637831ea559e7b8101ac2ef6382e8eb15e29c", size = 248238, upload-time = "2026-04-09T16:06:02.537Z" }, + { url = "https://files.pythonhosted.org/packages/a0/3c/2d34365177f412c9e19c0a29f969d70f5343f27634b76b765a54d8b27705/librt-0.9.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:928bd06eca2c2bbf4349e5b817f837509b0604342e65a502de1d50a7570afd15", size = 269457, upload-time = "2026-04-09T16:06:03.833Z" }, + { url = "https://files.pythonhosted.org/packages/bc/cd/de45b239ea3bdf626f982a00c14bfcf2e12d261c510ba7db62c5969a27cd/librt-0.9.0-cp314-cp314t-win32.whl", hash = "sha256:a9c63e04d003bc0fb6a03b348018b9a3002f98268200e22cc80f146beac5dc40", size = 52453, upload-time = "2026-04-09T16:06:05.229Z" }, + { url = "https://files.pythonhosted.org/packages/7f/f9/bfb32ae428aa75c0c533915622176f0a17d6da7b72b5a3c6363685914f70/librt-0.9.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f162af66a2ed3f7d1d161a82ca584efd15acd9c1cff190a373458c32f7d42118", size = 60044, upload-time = "2026-04-09T16:06:06.398Z" }, + { url = "https://files.pythonhosted.org/packages/aa/47/7d70414bcdbb3bc1f458a8d10558f00bbfdb24e5a11740fc8197e12c3255/librt-0.9.0-cp314-cp314t-win_arm64.whl", hash = "sha256:a4b25c6c25cac5d0d9d6d6da855195b254e0021e513e0249f0e3b444dc6e0e61", size = 50009, upload-time = "2026-04-09T16:06:07.995Z" }, ] [[package]] @@ -265,6 +358,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, ] +[[package]] +name = "markdown2" +version = "2.5.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/ae/07d4a5fcaa5509221287d289323d75ac8eda5a5a4ac9de2accf7bbcc2b88/markdown2-2.5.5.tar.gz", hash = "sha256:001547e68f6e7fcf0f1cb83f7e82f48aa7d48b2c6a321f0cd20a853a8a2d1664", size = 157249, upload-time = "2026-03-02T20:46:53.411Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/af/4b3891eb0a49d6cfd5cbf3e9bf514c943afc2b0f13e2c57cc57cd88ecc21/markdown2-2.5.5-py3-none-any.whl", hash = "sha256:be798587e09d1f52d2e4d96a649c4b82a778c75f9929aad52a2c95747fa26941", size = 56250, upload-time = "2026-03-02T20:46:52.032Z" }, +] + [[package]] name = "markupsafe" version = "3.0.3" @@ -361,47 +463,60 @@ wheels = [ [[package]] name = "mypy" -version = "1.18.2" +version = "1.20.2" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "librt", marker = "platform_python_implementation != 'PyPy'" }, { name = "mypy-extensions" }, { name = "pathspec" }, { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/77/8f0d0001ffad290cef2f7f216f96c814866248a0b92a722365ed54648e7e/mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b", size = 3448846, upload-time = "2025-09-19T00:11:10.519Z" } +sdist = { url = "https://files.pythonhosted.org/packages/04/af/e3d4b3e9ec91a0ff9aabfdb38692952acf49bbb899c2e4c29acb3a6da3ae/mypy-1.20.2.tar.gz", hash = "sha256:e8222c26daaafd9e8626dec58ae36029f82585890589576f769a650dd20fd665", size = 3817349, upload-time = "2026-04-21T17:12:28.473Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/03/6f/657961a0743cff32e6c0611b63ff1c1970a0b482ace35b069203bf705187/mypy-1.18.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eab0cf6294dafe397c261a75f96dc2c31bffe3b944faa24db5def4e2b0f77c", size = 12807973, upload-time = "2025-09-19T00:10:35.282Z" }, - { url = "https://files.pythonhosted.org/packages/10/e9/420822d4f661f13ca8900f5fa239b40ee3be8b62b32f3357df9a3045a08b/mypy-1.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a780ca61fc239e4865968ebc5240bb3bf610ef59ac398de9a7421b54e4a207e", size = 11896527, upload-time = "2025-09-19T00:10:55.791Z" }, - { url = "https://files.pythonhosted.org/packages/aa/73/a05b2bbaa7005f4642fcfe40fb73f2b4fb6bb44229bd585b5878e9a87ef8/mypy-1.18.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448acd386266989ef11662ce3c8011fd2a7b632e0ec7d61a98edd8e27472225b", size = 12507004, upload-time = "2025-09-19T00:11:05.411Z" }, - { url = "https://files.pythonhosted.org/packages/4f/01/f6e4b9f0d031c11ccbd6f17da26564f3a0f3c4155af344006434b0a05a9d/mypy-1.18.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9e171c465ad3901dc652643ee4bffa8e9fef4d7d0eece23b428908c77a76a66", size = 13245947, upload-time = "2025-09-19T00:10:46.923Z" }, - { url = "https://files.pythonhosted.org/packages/d7/97/19727e7499bfa1ae0773d06afd30ac66a58ed7437d940c70548634b24185/mypy-1.18.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:592ec214750bc00741af1f80cbf96b5013d81486b7bb24cb052382c19e40b428", size = 13499217, upload-time = "2025-09-19T00:09:39.472Z" }, - { url = "https://files.pythonhosted.org/packages/9f/4f/90dc8c15c1441bf31cf0f9918bb077e452618708199e530f4cbd5cede6ff/mypy-1.18.2-cp310-cp310-win_amd64.whl", hash = "sha256:7fb95f97199ea11769ebe3638c29b550b5221e997c63b14ef93d2e971606ebed", size = 9766753, upload-time = "2025-09-19T00:10:49.161Z" }, - { url = "https://files.pythonhosted.org/packages/88/87/cafd3ae563f88f94eec33f35ff722d043e09832ea8530ef149ec1efbaf08/mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f", size = 12731198, upload-time = "2025-09-19T00:09:44.857Z" }, - { url = "https://files.pythonhosted.org/packages/0f/e0/1e96c3d4266a06d4b0197ace5356d67d937d8358e2ee3ffac71faa843724/mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341", size = 11817879, upload-time = "2025-09-19T00:09:47.131Z" }, - { url = "https://files.pythonhosted.org/packages/72/ef/0c9ba89eb03453e76bdac5a78b08260a848c7bfc5d6603634774d9cd9525/mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d", size = 12427292, upload-time = "2025-09-19T00:10:22.472Z" }, - { url = "https://files.pythonhosted.org/packages/1a/52/ec4a061dd599eb8179d5411d99775bec2a20542505988f40fc2fee781068/mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86", size = 13163750, upload-time = "2025-09-19T00:09:51.472Z" }, - { url = "https://files.pythonhosted.org/packages/c4/5f/2cf2ceb3b36372d51568f2208c021870fe7834cf3186b653ac6446511839/mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37", size = 13351827, upload-time = "2025-09-19T00:09:58.311Z" }, - { url = "https://files.pythonhosted.org/packages/c8/7d/2697b930179e7277529eaaec1513f8de622818696857f689e4a5432e5e27/mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8", size = 9757983, upload-time = "2025-09-19T00:10:09.071Z" }, - { url = "https://files.pythonhosted.org/packages/07/06/dfdd2bc60c66611dd8335f463818514733bc763e4760dee289dcc33df709/mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34", size = 12908273, upload-time = "2025-09-19T00:10:58.321Z" }, - { url = "https://files.pythonhosted.org/packages/81/14/6a9de6d13a122d5608e1a04130724caf9170333ac5a924e10f670687d3eb/mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764", size = 11920910, upload-time = "2025-09-19T00:10:20.043Z" }, - { url = "https://files.pythonhosted.org/packages/5f/a9/b29de53e42f18e8cc547e38daa9dfa132ffdc64f7250e353f5c8cdd44bee/mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893", size = 12465585, upload-time = "2025-09-19T00:10:33.005Z" }, - { url = "https://files.pythonhosted.org/packages/77/ae/6c3d2c7c61ff21f2bee938c917616c92ebf852f015fb55917fd6e2811db2/mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914", size = 13348562, upload-time = "2025-09-19T00:10:11.51Z" }, - { url = "https://files.pythonhosted.org/packages/4d/31/aec68ab3b4aebdf8f36d191b0685d99faa899ab990753ca0fee60fb99511/mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8", size = 13533296, upload-time = "2025-09-19T00:10:06.568Z" }, - { url = "https://files.pythonhosted.org/packages/9f/83/abcb3ad9478fca3ebeb6a5358bb0b22c95ea42b43b7789c7fb1297ca44f4/mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074", size = 9828828, upload-time = "2025-09-19T00:10:28.203Z" }, - { url = "https://files.pythonhosted.org/packages/5f/04/7f462e6fbba87a72bc8097b93f6842499c428a6ff0c81dd46948d175afe8/mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc", size = 12898728, upload-time = "2025-09-19T00:10:01.33Z" }, - { url = "https://files.pythonhosted.org/packages/99/5b/61ed4efb64f1871b41fd0b82d29a64640f3516078f6c7905b68ab1ad8b13/mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e", size = 11910758, upload-time = "2025-09-19T00:10:42.607Z" }, - { url = "https://files.pythonhosted.org/packages/3c/46/d297d4b683cc89a6e4108c4250a6a6b717f5fa96e1a30a7944a6da44da35/mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986", size = 12475342, upload-time = "2025-09-19T00:11:00.371Z" }, - { url = "https://files.pythonhosted.org/packages/83/45/4798f4d00df13eae3bfdf726c9244bcb495ab5bd588c0eed93a2f2dd67f3/mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d", size = 13338709, upload-time = "2025-09-19T00:11:03.358Z" }, - { url = "https://files.pythonhosted.org/packages/d7/09/479f7358d9625172521a87a9271ddd2441e1dab16a09708f056e97007207/mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba", size = 13529806, upload-time = "2025-09-19T00:10:26.073Z" }, - { url = "https://files.pythonhosted.org/packages/71/cf/ac0f2c7e9d0ea3c75cd99dff7aec1c9df4a1376537cb90e4c882267ee7e9/mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544", size = 9833262, upload-time = "2025-09-19T00:10:40.035Z" }, - { url = "https://files.pythonhosted.org/packages/5a/0c/7d5300883da16f0063ae53996358758b2a2df2a09c72a5061fa79a1f5006/mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce", size = 12893775, upload-time = "2025-09-19T00:10:03.814Z" }, - { url = "https://files.pythonhosted.org/packages/50/df/2cffbf25737bdb236f60c973edf62e3e7b4ee1c25b6878629e88e2cde967/mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d", size = 11936852, upload-time = "2025-09-19T00:10:51.631Z" }, - { url = "https://files.pythonhosted.org/packages/be/50/34059de13dd269227fb4a03be1faee6e2a4b04a2051c82ac0a0b5a773c9a/mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c", size = 12480242, upload-time = "2025-09-19T00:11:07.955Z" }, - { url = "https://files.pythonhosted.org/packages/5b/11/040983fad5132d85914c874a2836252bbc57832065548885b5bb5b0d4359/mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb", size = 13326683, upload-time = "2025-09-19T00:09:55.572Z" }, - { url = "https://files.pythonhosted.org/packages/e9/ba/89b2901dd77414dd7a8c8729985832a5735053be15b744c18e4586e506ef/mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075", size = 13514749, upload-time = "2025-09-19T00:10:44.827Z" }, - { url = "https://files.pythonhosted.org/packages/25/bc/cc98767cffd6b2928ba680f3e5bc969c4152bf7c2d83f92f5a504b92b0eb/mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf", size = 9982959, upload-time = "2025-09-19T00:10:37.344Z" }, - { url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" }, + { url = "https://files.pythonhosted.org/packages/76/97/ce2502df2cecf2ef997b6c6527c4a223b92feb9e7b790cdc8dcd683f3a8a/mypy-1.20.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cf5a4db6dca263010e2c7bff081c89383c72d187ba2cf4c44759aac970e2f0c4", size = 14457059, upload-time = "2026-04-21T17:06:14.935Z" }, + { url = "https://files.pythonhosted.org/packages/c9/34/417ee60b822cc80c0f3dc9f495ad7fd8dbb8d8b2cf4baf22d4046d25d01d/mypy-1.20.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7b0e817b518bff7facd7f85ea05b643ad8bdcce684cf29784987b0a7c8e1f997", size = 13346816, upload-time = "2026-04-21T17:10:41.433Z" }, + { url = "https://files.pythonhosted.org/packages/4a/85/e20951978702df58379d0bcc2e8f7ccdca4e78cd7dc66dd3ddbf9b29d517/mypy-1.20.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97d7b9a485b40f8ca425460e89bf1da2814625b2da627c0dcc6aa46c92631d14", size = 13772593, upload-time = "2026-04-21T17:08:11.24Z" }, + { url = "https://files.pythonhosted.org/packages/63/a5/5441a13259ec516c56fd5de0fd96a69a9590ae6c5e5d3e5174aa84b97973/mypy-1.20.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e1c12f6d2db3d78b909b5f77513c11eb7f2dd2782b96a3ab6dffc7d44575c99", size = 14656635, upload-time = "2026-04-21T17:09:54.042Z" }, + { url = "https://files.pythonhosted.org/packages/3b/51/b89c69157c5e1f19fd125a65d991166a26906e7902f026f00feebbcfa2b9/mypy-1.20.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:89dce27e142d25ffbc154c1819383b69f2e9234dc4ed4766f42e0e8cb264ab5c", size = 14943278, upload-time = "2026-04-21T17:09:15.599Z" }, + { url = "https://files.pythonhosted.org/packages/e9/44/6b0eeecfe96d7cce1d71c66b8e03cb304aa70ec11f1955dc1d6b46aca3c3/mypy-1.20.2-cp310-cp310-win_amd64.whl", hash = "sha256:f376e37f9bf2a946872fc5fd1199c99310748e3c26c7a26683f13f8bdb756cbd", size = 10851915, upload-time = "2026-04-21T17:06:03.5Z" }, + { url = "https://files.pythonhosted.org/packages/3c/36/6593dc88545d75fb96416184be5392da5e2a8e8c2802a8597913e16ae25c/mypy-1.20.2-cp310-cp310-win_arm64.whl", hash = "sha256:6e2b469efd811707bc530fd1effef0f5d6eebcb7fe376affae69025da4b979a2", size = 9786676, upload-time = "2026-04-21T17:07:02.035Z" }, + { url = "https://files.pythonhosted.org/packages/1f/4d/9ebeae211caccbdaddde7ed5e31dfcf57faac66be9b11deb1dc6526c8078/mypy-1.20.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4077797a273e56e8843d001e9dfe4ba10e33323d6ade647ff260e5cd97d9758c", size = 14371307, upload-time = "2026-04-21T17:08:56.442Z" }, + { url = "https://files.pythonhosted.org/packages/95/d7/93473d34b61f04fac1aecc01368485c89c5c4af7a4b9a0cab5d77d04b63f/mypy-1.20.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cdecf62abcc4292500d7858aeae87a1f8f1150f4c4dd08fb0b336ee79b2a6df3", size = 13258917, upload-time = "2026-04-21T17:05:50.978Z" }, + { url = "https://files.pythonhosted.org/packages/e2/30/3dd903e8bafb7b5f7bf87fcd58f8382086dea2aa19f0a7b357f21f63071b/mypy-1.20.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c566c3a88b6ece59b3d70f65bedef17304f48eb52ff040a6a18214e1917b3254", size = 13700516, upload-time = "2026-04-21T17:11:33.161Z" }, + { url = "https://files.pythonhosted.org/packages/07/05/c61a140aba4c729ac7bc99ae26fc627c78a6e08f5b9dd319244ea71a3d7e/mypy-1.20.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0deb80d062b2479f2c87ae568f89845afc71d11bc41b04179e58165fd9f31e98", size = 14562889, upload-time = "2026-04-21T17:05:27.674Z" }, + { url = "https://files.pythonhosted.org/packages/fd/87/da78243742ffa8a36d98c3010f0d829f93d5da4e6786f1a1a6f2ad616502/mypy-1.20.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bba9ad231e92a3e424b3e56b65aa17704993425bba97e302c832f9466bb85bac", size = 14803844, upload-time = "2026-04-21T17:10:06.2Z" }, + { url = "https://files.pythonhosted.org/packages/37/52/10a1ddf91b40f843943a3c6db51e2df59c9e237f29d355e95eaab427461f/mypy-1.20.2-cp311-cp311-win_amd64.whl", hash = "sha256:baf593f2765fa3a6b1ef95807dbaa3d25b594f6a52adcc506a6b9cb115e1be67", size = 10846300, upload-time = "2026-04-21T17:12:23.886Z" }, + { url = "https://files.pythonhosted.org/packages/20/02/f9a4415b664c53bd34d6709be59da303abcae986dc4ac847b402edb6fa1e/mypy-1.20.2-cp311-cp311-win_arm64.whl", hash = "sha256:20175a1c0f49863946ec20b7f63255768058ac4f07d2b9ded6a6b46cfb5a9100", size = 9779498, upload-time = "2026-04-21T17:09:23.695Z" }, + { url = "https://files.pythonhosted.org/packages/71/4e/7560e4528db9e9b147e4c0f22660466bf30a0a1fe3d63d1b9d3b0fd354ee/mypy-1.20.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4dbfcf869f6b0517f70cf0030ba6ea1d6645e132337a7d5204a18d8d5636c02b", size = 14539393, upload-time = "2026-04-21T17:07:12.52Z" }, + { url = "https://files.pythonhosted.org/packages/32/d9/34a5efed8124f5a9234f55ac6a4ced4201e2c5b81e1109c49ad23190ec8c/mypy-1.20.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b6481b228d072315b053210b01ac320e1be243dc17f9e5887ef167f23f5fae4", size = 13361642, upload-time = "2026-04-21T17:06:53.742Z" }, + { url = "https://files.pythonhosted.org/packages/d1/14/eb377acf78c03c92d566a1510cda8137348215b5335085ef662ab82ecd3a/mypy-1.20.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:34397cdced6b90b836e38182076049fdb41424322e0b0728c946b0939ebdf9f6", size = 13740347, upload-time = "2026-04-21T17:12:04.73Z" }, + { url = "https://files.pythonhosted.org/packages/b9/94/7e4634a32b641aa1c112422eed1bbece61ee16205f674190e8b536f884de/mypy-1.20.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a5da6976f20cae27059ea8d0c86e7cef3de720e04c4bb9ee18e3690fdb792066", size = 14734042, upload-time = "2026-04-21T17:07:43.16Z" }, + { url = "https://files.pythonhosted.org/packages/7a/f3/f7e62395cb7f434541b4491a01149a4439e28ace4c0c632bbf5431e92d1f/mypy-1.20.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:56908d7e08318d39f85b1f0c6cfd47b0cac1a130da677630dac0de3e0623e102", size = 14964958, upload-time = "2026-04-21T17:11:00.665Z" }, + { url = "https://files.pythonhosted.org/packages/3e/0d/47e3c3a0ec2a876e35aeac365df3cac7776c36bbd4ed18cc521e1b9d255b/mypy-1.20.2-cp312-cp312-win_amd64.whl", hash = "sha256:d52ad8d78522da1d308789df651ee5379088e77c76cb1994858d40a426b343b9", size = 10911340, upload-time = "2026-04-21T17:10:49.179Z" }, + { url = "https://files.pythonhosted.org/packages/d6/b2/6c852d72e0ea8b01f49da817fb52539993cde327e7d010e0103dc12d0dac/mypy-1.20.2-cp312-cp312-win_arm64.whl", hash = "sha256:785b08db19c9f214dc37d65f7c165d19a30fcecb48abfa30f31b01b5acaabb58", size = 9833947, upload-time = "2026-04-21T17:09:05.267Z" }, + { url = "https://files.pythonhosted.org/packages/5b/c4/b93812d3a192c9bcf5df405bd2f30277cd0e48106a14d1023c7f6ed6e39b/mypy-1.20.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:edfbfca868cdd6bd8d974a60f8a3682f5565d3f5c99b327640cedd24c4264026", size = 14524670, upload-time = "2026-04-21T17:10:30.737Z" }, + { url = "https://files.pythonhosted.org/packages/f3/47/42c122501bff18eaf1e8f457f5c017933452d8acdc52918a9f59f6812955/mypy-1.20.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e2877a02380adfcdbc69071a0f74d6e9dbbf593c0dc9d174e1f223ffd5281943", size = 13336218, upload-time = "2026-04-21T17:08:44.069Z" }, + { url = "https://files.pythonhosted.org/packages/92/8f/75bbc92f41725fbd585fb17b440b1119b576105df1013622983e18640a93/mypy-1.20.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7488448de6007cd5177c6cea0517ac33b4c0f5ee9b5e9f2be51ce75511a85517", size = 13724906, upload-time = "2026-04-21T17:08:01.02Z" }, + { url = "https://files.pythonhosted.org/packages/a1/32/4c49da27a606167391ff0c39aa955707a00edc500572e562f7c36c08a71f/mypy-1.20.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bb9c2fa06887e21d6a3a868762acb82aec34e2c6fd0174064f27c93ede68ad15", size = 14726046, upload-time = "2026-04-21T17:11:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/7f/fc/4e354a1bd70216359deb0c9c54847ee6b32ef78dfb09f5131ff99b494078/mypy-1.20.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d56a78b646f2e3daa865bc70cd5ec5a46c50045801ca8ff17a0c43abc97e3ee", size = 14955587, upload-time = "2026-04-21T17:12:16.033Z" }, + { url = "https://files.pythonhosted.org/packages/62/b2/c0f2056e9eb8f08c62cafd9715e4584b89132bdc832fcf85d27d07b5f3e5/mypy-1.20.2-cp313-cp313-win_amd64.whl", hash = "sha256:2a4102b03bb7481d9a91a6da8d174740c9c8c4401024684b9ca3b7cc5e49852f", size = 10922681, upload-time = "2026-04-21T17:06:35.842Z" }, + { url = "https://files.pythonhosted.org/packages/e5/14/065e333721f05de8ef683d0aa804c23026bcc287446b61cac657b902ccac/mypy-1.20.2-cp313-cp313-win_arm64.whl", hash = "sha256:a95a9248b0c6fd933a442c03c3b113c3b61320086b88e2c444676d3fd1ca3330", size = 9830560, upload-time = "2026-04-21T17:07:51.023Z" }, + { url = "https://files.pythonhosted.org/packages/ae/d1/b4ec96b0ecc620a4443570c6e95c867903428cfcde4206518eafdd5880c3/mypy-1.20.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:419413398fe250aae057fd2fe50166b61077083c9b82754c341cf4fd73038f30", size = 14524561, upload-time = "2026-04-21T17:06:27.325Z" }, + { url = "https://files.pythonhosted.org/packages/3a/63/d2c2ff4fa66bc49477d32dfa26e8a167ba803ea6a69c5efb416036909d30/mypy-1.20.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e73c07f23009962885c197ccb9b41356a30cc0e5a1d0c2ea8fd8fb1362d7f924", size = 13363883, upload-time = "2026-04-21T17:11:11.239Z" }, + { url = "https://files.pythonhosted.org/packages/2a/56/983916806bf4eddeaaa2c9230903c3669c6718552a921154e1c5182c701f/mypy-1.20.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0c64e5973df366b747646fc98da921f9d6eba9716d57d1db94a83c026a08e0fb", size = 13742945, upload-time = "2026-04-21T17:08:34.181Z" }, + { url = "https://files.pythonhosted.org/packages/19/65/0cd9285ab010ee8214c83d67c6b49417c40d86ce46f1aa109457b5a9b8d7/mypy-1.20.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a65aa591af023864fd08a97da9974e919452cfe19cb146c8a5dc692626445dc", size = 14706163, upload-time = "2026-04-21T17:05:15.51Z" }, + { url = "https://files.pythonhosted.org/packages/94/97/48ff3b297cafcc94d185243a9190836fb1b01c1b0918fff64e941e973cc9/mypy-1.20.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4fef51b01e638974a6e69885687e9bd40c8d1e09a6cd291cca0619625cf1f558", size = 14938677, upload-time = "2026-04-21T17:05:39.562Z" }, + { url = "https://files.pythonhosted.org/packages/fd/a1/1b4233d255bdd0b38a1f284feeb1c143ca508c19184964e22f8d837ec851/mypy-1.20.2-cp314-cp314-win_amd64.whl", hash = "sha256:913485a03f1bcf5d279409a9d2b9ed565c151f61c09f29991e5faa14033da4c8", size = 11089322, upload-time = "2026-04-21T17:06:44.29Z" }, + { url = "https://files.pythonhosted.org/packages/78/c2/ce7ee2ba36aeb954ba50f18fa25d9c1188578654b97d02a66a15b6f09531/mypy-1.20.2-cp314-cp314-win_arm64.whl", hash = "sha256:c3bae4f855d965b5453784300c12ffc63a548304ac7f99e55d4dc7c898673aa3", size = 10017775, upload-time = "2026-04-21T17:07:20.732Z" }, + { url = "https://files.pythonhosted.org/packages/4e/a1/9d93a7d0b5859af0ead82b4888b46df6c8797e1bc5e1e262a08518c6d48e/mypy-1.20.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2de3dcea53babc1c3237a19002bc3d228ce1833278f093b8d619e06e7cc79609", size = 15549002, upload-time = "2026-04-21T17:08:23.107Z" }, + { url = "https://files.pythonhosted.org/packages/00/d2/09a6a10ee1bf0008f6c144d9676f2ca6a12512151b4e0ad0ff6c4fac5337/mypy-1.20.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:52b176444e2e5054dfcbcb8c75b0b719865c96247b37407184bbfca5c353f2c2", size = 14401942, upload-time = "2026-04-21T17:07:31.837Z" }, + { url = "https://files.pythonhosted.org/packages/57/da/9594b75c3c019e805250bed3583bdf4443ff9e6ef08f97e39ae308cb06f2/mypy-1.20.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:688c3312e5dadb573a2c69c82af3a298d43ecf9e6d264e0f95df960b5f6ac19c", size = 15041649, upload-time = "2026-04-21T17:09:34.653Z" }, + { url = "https://files.pythonhosted.org/packages/97/77/f75a65c278e6e8eba2071f7f5a90481891053ecc39878cc444634d892abe/mypy-1.20.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29752dbbf8cc53f89f6ac096d363314333045c257c9c75cbd189ca2de0455744", size = 15864588, upload-time = "2026-04-21T17:11:44.936Z" }, + { url = "https://files.pythonhosted.org/packages/d7/46/1a4e1c66e96c1a3246ddf5403d122ac9b0a8d2b7e65730b9d6533ba7a6d3/mypy-1.20.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:803203d2b6ea644982c644895c2f78b28d0e208bba7b27d9b921e0ec5eb207c6", size = 16093956, upload-time = "2026-04-21T17:10:17.683Z" }, + { url = "https://files.pythonhosted.org/packages/5a/2c/78a8851264dec38cd736ca5b8bc9380674df0dd0be7792f538916157716c/mypy-1.20.2-cp314-cp314t-win_amd64.whl", hash = "sha256:9bcb8aa397ff0093c824182fd76a935a9ba7ad097fcbef80ae89bf6c1731d8ec", size = 12568661, upload-time = "2026-04-21T17:11:54.473Z" }, + { url = "https://files.pythonhosted.org/packages/83/01/cd7318aa03493322ce275a0e14f4f52b8896335e4e79d4fb8153a7ad2b77/mypy-1.20.2-cp314-cp314t-win_arm64.whl", hash = "sha256:e061b58443f1736f8a37c48978d7ab581636d6ab03e3d4f99e3fa90463bb9382", size = 10389240, upload-time = "2026-04-21T17:09:42.719Z" }, + { url = "https://files.pythonhosted.org/packages/28/9a/f23c163e25b11074188251b0b5a0342625fc1cdb6af604757174fa9acc9b/mypy-1.20.2-py3-none-any.whl", hash = "sha256:a94c5a76ab46c5e6257c7972b6c8cff0574201ca7dc05647e33e795d78680563", size = 2637314, upload-time = "2026-04-21T17:05:54.5Z" }, ] [[package]] @@ -480,86 +595,85 @@ wheels = [ [[package]] name = "numpy" -version = "2.3.4" +version = "2.4.4" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.11'", + "python_full_version >= '3.15'", + "python_full_version >= '3.11' and python_full_version < '3.15'", ] -sdist = { url = "https://files.pythonhosted.org/packages/b5/f4/098d2270d52b41f1bd7db9fc288aaa0400cb48c2a3e2af6fa365d9720947/numpy-2.3.4.tar.gz", hash = "sha256:a7d018bfedb375a8d979ac758b120ba846a7fe764911a64465fd87b8729f4a6a", size = 20582187, upload-time = "2025-10-15T16:18:11.77Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/9f/b8cef5bffa569759033adda9481211426f12f53299629b410340795c2514/numpy-2.4.4.tar.gz", hash = "sha256:2d390634c5182175533585cc89f3608a4682ccb173cc9bb940b2881c8d6f8fa0", size = 20731587, upload-time = "2026-03-29T13:22:01.298Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/60/e7/0e07379944aa8afb49a556a2b54587b828eb41dc9adc56fb7615b678ca53/numpy-2.3.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e78aecd2800b32e8347ce49316d3eaf04aed849cd5b38e0af39f829a4e59f5eb", size = 21259519, upload-time = "2025-10-15T16:15:19.012Z" }, - { url = "https://files.pythonhosted.org/packages/d0/cb/5a69293561e8819b09e34ed9e873b9a82b5f2ade23dce4c51dc507f6cfe1/numpy-2.3.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7fd09cc5d65bda1e79432859c40978010622112e9194e581e3415a3eccc7f43f", size = 14452796, upload-time = "2025-10-15T16:15:23.094Z" }, - { url = "https://files.pythonhosted.org/packages/e4/04/ff11611200acd602a1e5129e36cfd25bf01ad8e5cf927baf2e90236eb02e/numpy-2.3.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1b219560ae2c1de48ead517d085bc2d05b9433f8e49d0955c82e8cd37bd7bf36", size = 5381639, upload-time = "2025-10-15T16:15:25.572Z" }, - { url = "https://files.pythonhosted.org/packages/ea/77/e95c757a6fe7a48d28a009267408e8aa382630cc1ad1db7451b3bc21dbb4/numpy-2.3.4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:bafa7d87d4c99752d07815ed7a2c0964f8ab311eb8168f41b910bd01d15b6032", size = 6914296, upload-time = "2025-10-15T16:15:27.079Z" }, - { url = "https://files.pythonhosted.org/packages/a3/d2/137c7b6841c942124eae921279e5c41b1c34bab0e6fc60c7348e69afd165/numpy-2.3.4-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36dc13af226aeab72b7abad501d370d606326a0029b9f435eacb3b8c94b8a8b7", size = 14591904, upload-time = "2025-10-15T16:15:29.044Z" }, - { url = "https://files.pythonhosted.org/packages/bb/32/67e3b0f07b0aba57a078c4ab777a9e8e6bc62f24fb53a2337f75f9691699/numpy-2.3.4-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7b2f9a18b5ff9824a6af80de4f37f4ec3c2aab05ef08f51c77a093f5b89adda", size = 16939602, upload-time = "2025-10-15T16:15:31.106Z" }, - { url = "https://files.pythonhosted.org/packages/95/22/9639c30e32c93c4cee3ccdb4b09c2d0fbff4dcd06d36b357da06146530fb/numpy-2.3.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9984bd645a8db6ca15d850ff996856d8762c51a2239225288f08f9050ca240a0", size = 16372661, upload-time = "2025-10-15T16:15:33.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/e9/a685079529be2b0156ae0c11b13d6be647743095bb51d46589e95be88086/numpy-2.3.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:64c5825affc76942973a70acf438a8ab618dbd692b84cd5ec40a0a0509edc09a", size = 18884682, upload-time = "2025-10-15T16:15:36.105Z" }, - { url = "https://files.pythonhosted.org/packages/cf/85/f6f00d019b0cc741e64b4e00ce865a57b6bed945d1bbeb1ccadbc647959b/numpy-2.3.4-cp311-cp311-win32.whl", hash = "sha256:ed759bf7a70342f7817d88376eb7142fab9fef8320d6019ef87fae05a99874e1", size = 6570076, upload-time = "2025-10-15T16:15:38.225Z" }, - { url = "https://files.pythonhosted.org/packages/7d/10/f8850982021cb90e2ec31990291f9e830ce7d94eef432b15066e7cbe0bec/numpy-2.3.4-cp311-cp311-win_amd64.whl", hash = "sha256:faba246fb30ea2a526c2e9645f61612341de1a83fb1e0c5edf4ddda5a9c10996", size = 13089358, upload-time = "2025-10-15T16:15:40.404Z" }, - { url = "https://files.pythonhosted.org/packages/d1/ad/afdd8351385edf0b3445f9e24210a9c3971ef4de8fd85155462fc4321d79/numpy-2.3.4-cp311-cp311-win_arm64.whl", hash = "sha256:4c01835e718bcebe80394fd0ac66c07cbb90147ebbdad3dcecd3f25de2ae7e2c", size = 10462292, upload-time = "2025-10-15T16:15:42.896Z" }, - { url = "https://files.pythonhosted.org/packages/96/7a/02420400b736f84317e759291b8edaeee9dc921f72b045475a9cbdb26b17/numpy-2.3.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ef1b5a3e808bc40827b5fa2c8196151a4c5abe110e1726949d7abddfe5c7ae11", size = 20957727, upload-time = "2025-10-15T16:15:44.9Z" }, - { url = "https://files.pythonhosted.org/packages/18/90/a014805d627aa5750f6f0e878172afb6454552da929144b3c07fcae1bb13/numpy-2.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c2f91f496a87235c6aaf6d3f3d89b17dba64996abadccb289f48456cff931ca9", size = 14187262, upload-time = "2025-10-15T16:15:47.761Z" }, - { url = "https://files.pythonhosted.org/packages/c7/e4/0a94b09abe89e500dc748e7515f21a13e30c5c3fe3396e6d4ac108c25fca/numpy-2.3.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f77e5b3d3da652b474cc80a14084927a5e86a5eccf54ca8ca5cbd697bf7f2667", size = 5115992, upload-time = "2025-10-15T16:15:50.144Z" }, - { url = "https://files.pythonhosted.org/packages/88/dd/db77c75b055c6157cbd4f9c92c4458daef0dd9cbe6d8d2fe7f803cb64c37/numpy-2.3.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8ab1c5f5ee40d6e01cbe96de5863e39b215a4d24e7d007cad56c7184fdf4aeef", size = 6648672, upload-time = "2025-10-15T16:15:52.442Z" }, - { url = "https://files.pythonhosted.org/packages/e1/e6/e31b0d713719610e406c0ea3ae0d90760465b086da8783e2fd835ad59027/numpy-2.3.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77b84453f3adcb994ddbd0d1c5d11db2d6bda1a2b7fd5ac5bd4649d6f5dc682e", size = 14284156, upload-time = "2025-10-15T16:15:54.351Z" }, - { url = "https://files.pythonhosted.org/packages/f9/58/30a85127bfee6f108282107caf8e06a1f0cc997cb6b52cdee699276fcce4/numpy-2.3.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4121c5beb58a7f9e6dfdee612cb24f4df5cd4db6e8261d7f4d7450a997a65d6a", size = 16641271, upload-time = "2025-10-15T16:15:56.67Z" }, - { url = "https://files.pythonhosted.org/packages/06/f2/2e06a0f2adf23e3ae29283ad96959267938d0efd20a2e25353b70065bfec/numpy-2.3.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:65611ecbb00ac9846efe04db15cbe6186f562f6bb7e5e05f077e53a599225d16", size = 16059531, upload-time = "2025-10-15T16:15:59.412Z" }, - { url = "https://files.pythonhosted.org/packages/b0/e7/b106253c7c0d5dc352b9c8fab91afd76a93950998167fa3e5afe4ef3a18f/numpy-2.3.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dabc42f9c6577bcc13001b8810d300fe814b4cfbe8a92c873f269484594f9786", size = 18578983, upload-time = "2025-10-15T16:16:01.804Z" }, - { url = "https://files.pythonhosted.org/packages/73/e3/04ecc41e71462276ee867ccbef26a4448638eadecf1bc56772c9ed6d0255/numpy-2.3.4-cp312-cp312-win32.whl", hash = "sha256:a49d797192a8d950ca59ee2d0337a4d804f713bb5c3c50e8db26d49666e351dc", size = 6291380, upload-time = "2025-10-15T16:16:03.938Z" }, - { url = "https://files.pythonhosted.org/packages/3d/a8/566578b10d8d0e9955b1b6cd5db4e9d4592dd0026a941ff7994cedda030a/numpy-2.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:985f1e46358f06c2a09921e8921e2c98168ed4ae12ccd6e5e87a4f1857923f32", size = 12787999, upload-time = "2025-10-15T16:16:05.801Z" }, - { url = "https://files.pythonhosted.org/packages/58/22/9c903a957d0a8071b607f5b1bff0761d6e608b9a965945411f867d515db1/numpy-2.3.4-cp312-cp312-win_arm64.whl", hash = "sha256:4635239814149e06e2cb9db3dd584b2fa64316c96f10656983b8026a82e6e4db", size = 10197412, upload-time = "2025-10-15T16:16:07.854Z" }, - { url = "https://files.pythonhosted.org/packages/57/7e/b72610cc91edf138bc588df5150957a4937221ca6058b825b4725c27be62/numpy-2.3.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c090d4860032b857d94144d1a9976b8e36709e40386db289aaf6672de2a81966", size = 20950335, upload-time = "2025-10-15T16:16:10.304Z" }, - { url = "https://files.pythonhosted.org/packages/3e/46/bdd3370dcea2f95ef14af79dbf81e6927102ddf1cc54adc0024d61252fd9/numpy-2.3.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a13fc473b6db0be619e45f11f9e81260f7302f8d180c49a22b6e6120022596b3", size = 14179878, upload-time = "2025-10-15T16:16:12.595Z" }, - { url = "https://files.pythonhosted.org/packages/ac/01/5a67cb785bda60f45415d09c2bc245433f1c68dd82eef9c9002c508b5a65/numpy-2.3.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:3634093d0b428e6c32c3a69b78e554f0cd20ee420dcad5a9f3b2a63762ce4197", size = 5108673, upload-time = "2025-10-15T16:16:14.877Z" }, - { url = "https://files.pythonhosted.org/packages/c2/cd/8428e23a9fcebd33988f4cb61208fda832800ca03781f471f3727a820704/numpy-2.3.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:043885b4f7e6e232d7df4f51ffdef8c36320ee9d5f227b380ea636722c7ed12e", size = 6641438, upload-time = "2025-10-15T16:16:16.805Z" }, - { url = "https://files.pythonhosted.org/packages/3e/d1/913fe563820f3c6b079f992458f7331278dcd7ba8427e8e745af37ddb44f/numpy-2.3.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4ee6a571d1e4f0ea6d5f22d6e5fbd6ed1dc2b18542848e1e7301bd190500c9d7", size = 14281290, upload-time = "2025-10-15T16:16:18.764Z" }, - { url = "https://files.pythonhosted.org/packages/9e/7e/7d306ff7cb143e6d975cfa7eb98a93e73495c4deabb7d1b5ecf09ea0fd69/numpy-2.3.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fc8a63918b04b8571789688b2780ab2b4a33ab44bfe8ccea36d3eba51228c953", size = 16636543, upload-time = "2025-10-15T16:16:21.072Z" }, - { url = "https://files.pythonhosted.org/packages/47/6a/8cfc486237e56ccfb0db234945552a557ca266f022d281a2f577b98e955c/numpy-2.3.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:40cc556d5abbc54aabe2b1ae287042d7bdb80c08edede19f0c0afb36ae586f37", size = 16056117, upload-time = "2025-10-15T16:16:23.369Z" }, - { url = "https://files.pythonhosted.org/packages/b1/0e/42cb5e69ea901e06ce24bfcc4b5664a56f950a70efdcf221f30d9615f3f3/numpy-2.3.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ecb63014bb7f4ce653f8be7f1df8cbc6093a5a2811211770f6606cc92b5a78fd", size = 18577788, upload-time = "2025-10-15T16:16:27.496Z" }, - { url = "https://files.pythonhosted.org/packages/86/92/41c3d5157d3177559ef0a35da50f0cda7fa071f4ba2306dd36818591a5bc/numpy-2.3.4-cp313-cp313-win32.whl", hash = "sha256:e8370eb6925bb8c1c4264fec52b0384b44f675f191df91cbe0140ec9f0955646", size = 6282620, upload-time = "2025-10-15T16:16:29.811Z" }, - { url = "https://files.pythonhosted.org/packages/09/97/fd421e8bc50766665ad35536c2bb4ef916533ba1fdd053a62d96cc7c8b95/numpy-2.3.4-cp313-cp313-win_amd64.whl", hash = "sha256:56209416e81a7893036eea03abcb91c130643eb14233b2515c90dcac963fe99d", size = 12784672, upload-time = "2025-10-15T16:16:31.589Z" }, - { url = "https://files.pythonhosted.org/packages/ad/df/5474fb2f74970ca8eb978093969b125a84cc3d30e47f82191f981f13a8a0/numpy-2.3.4-cp313-cp313-win_arm64.whl", hash = "sha256:a700a4031bc0fd6936e78a752eefb79092cecad2599ea9c8039c548bc097f9bc", size = 10196702, upload-time = "2025-10-15T16:16:33.902Z" }, - { url = "https://files.pythonhosted.org/packages/11/83/66ac031464ec1767ea3ed48ce40f615eb441072945e98693bec0bcd056cc/numpy-2.3.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:86966db35c4040fdca64f0816a1c1dd8dbd027d90fca5a57e00e1ca4cd41b879", size = 21049003, upload-time = "2025-10-15T16:16:36.101Z" }, - { url = "https://files.pythonhosted.org/packages/5f/99/5b14e0e686e61371659a1d5bebd04596b1d72227ce36eed121bb0aeab798/numpy-2.3.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:838f045478638b26c375ee96ea89464d38428c69170360b23a1a50fa4baa3562", size = 14302980, upload-time = "2025-10-15T16:16:39.124Z" }, - { url = "https://files.pythonhosted.org/packages/2c/44/e9486649cd087d9fc6920e3fc3ac2aba10838d10804b1e179fb7cbc4e634/numpy-2.3.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d7315ed1dab0286adca467377c8381cd748f3dc92235f22a7dfc42745644a96a", size = 5231472, upload-time = "2025-10-15T16:16:41.168Z" }, - { url = "https://files.pythonhosted.org/packages/3e/51/902b24fa8887e5fe2063fd61b1895a476d0bbf46811ab0c7fdf4bd127345/numpy-2.3.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:84f01a4d18b2cc4ade1814a08e5f3c907b079c847051d720fad15ce37aa930b6", size = 6739342, upload-time = "2025-10-15T16:16:43.777Z" }, - { url = "https://files.pythonhosted.org/packages/34/f1/4de9586d05b1962acdcdb1dc4af6646361a643f8c864cef7c852bf509740/numpy-2.3.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:817e719a868f0dacde4abdfc5c1910b301877970195db9ab6a5e2c4bd5b121f7", size = 14354338, upload-time = "2025-10-15T16:16:46.081Z" }, - { url = "https://files.pythonhosted.org/packages/1f/06/1c16103b425de7969d5a76bdf5ada0804b476fed05d5f9e17b777f1cbefd/numpy-2.3.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85e071da78d92a214212cacea81c6da557cab307f2c34b5f85b628e94803f9c0", size = 16702392, upload-time = "2025-10-15T16:16:48.455Z" }, - { url = "https://files.pythonhosted.org/packages/34/b2/65f4dc1b89b5322093572b6e55161bb42e3e0487067af73627f795cc9d47/numpy-2.3.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2ec646892819370cf3558f518797f16597b4e4669894a2ba712caccc9da53f1f", size = 16134998, upload-time = "2025-10-15T16:16:51.114Z" }, - { url = "https://files.pythonhosted.org/packages/d4/11/94ec578896cdb973aaf56425d6c7f2aff4186a5c00fac15ff2ec46998b46/numpy-2.3.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:035796aaaddfe2f9664b9a9372f089cfc88bd795a67bd1bfe15e6e770934cf64", size = 18651574, upload-time = "2025-10-15T16:16:53.429Z" }, - { url = "https://files.pythonhosted.org/packages/62/b7/7efa763ab33dbccf56dade36938a77345ce8e8192d6b39e470ca25ff3cd0/numpy-2.3.4-cp313-cp313t-win32.whl", hash = "sha256:fea80f4f4cf83b54c3a051f2f727870ee51e22f0248d3114b8e755d160b38cfb", size = 6413135, upload-time = "2025-10-15T16:16:55.992Z" }, - { url = "https://files.pythonhosted.org/packages/43/70/aba4c38e8400abcc2f345e13d972fb36c26409b3e644366db7649015f291/numpy-2.3.4-cp313-cp313t-win_amd64.whl", hash = "sha256:15eea9f306b98e0be91eb344a94c0e630689ef302e10c2ce5f7e11905c704f9c", size = 12928582, upload-time = "2025-10-15T16:16:57.943Z" }, - { url = "https://files.pythonhosted.org/packages/67/63/871fad5f0073fc00fbbdd7232962ea1ac40eeaae2bba66c76214f7954236/numpy-2.3.4-cp313-cp313t-win_arm64.whl", hash = "sha256:b6c231c9c2fadbae4011ca5e7e83e12dc4a5072f1a1d85a0a7b3ed754d145a40", size = 10266691, upload-time = "2025-10-15T16:17:00.048Z" }, - { url = "https://files.pythonhosted.org/packages/72/71/ae6170143c115732470ae3a2d01512870dd16e0953f8a6dc89525696069b/numpy-2.3.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:81c3e6d8c97295a7360d367f9f8553973651b76907988bb6066376bc2252f24e", size = 20955580, upload-time = "2025-10-15T16:17:02.509Z" }, - { url = "https://files.pythonhosted.org/packages/af/39/4be9222ffd6ca8a30eda033d5f753276a9c3426c397bb137d8e19dedd200/numpy-2.3.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7c26b0b2bf58009ed1f38a641f3db4be8d960a417ca96d14e5b06df1506d41ff", size = 14188056, upload-time = "2025-10-15T16:17:04.873Z" }, - { url = "https://files.pythonhosted.org/packages/6c/3d/d85f6700d0a4aa4f9491030e1021c2b2b7421b2b38d01acd16734a2bfdc7/numpy-2.3.4-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:62b2198c438058a20b6704351b35a1d7db881812d8512d67a69c9de1f18ca05f", size = 5116555, upload-time = "2025-10-15T16:17:07.499Z" }, - { url = "https://files.pythonhosted.org/packages/bf/04/82c1467d86f47eee8a19a464c92f90a9bb68ccf14a54c5224d7031241ffb/numpy-2.3.4-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:9d729d60f8d53a7361707f4b68a9663c968882dd4f09e0d58c044c8bf5faee7b", size = 6643581, upload-time = "2025-10-15T16:17:09.774Z" }, - { url = "https://files.pythonhosted.org/packages/0c/d3/c79841741b837e293f48bd7db89d0ac7a4f2503b382b78a790ef1dc778a5/numpy-2.3.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bd0c630cf256b0a7fd9d0a11c9413b42fef5101219ce6ed5a09624f5a65392c7", size = 14299186, upload-time = "2025-10-15T16:17:11.937Z" }, - { url = "https://files.pythonhosted.org/packages/e8/7e/4a14a769741fbf237eec5a12a2cbc7a4c4e061852b6533bcb9e9a796c908/numpy-2.3.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5e081bc082825f8b139f9e9fe42942cb4054524598aaeb177ff476cc76d09d2", size = 16638601, upload-time = "2025-10-15T16:17:14.391Z" }, - { url = "https://files.pythonhosted.org/packages/93/87/1c1de269f002ff0a41173fe01dcc925f4ecff59264cd8f96cf3b60d12c9b/numpy-2.3.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:15fb27364ed84114438fff8aaf998c9e19adbeba08c0b75409f8c452a8692c52", size = 16074219, upload-time = "2025-10-15T16:17:17.058Z" }, - { url = "https://files.pythonhosted.org/packages/cd/28/18f72ee77408e40a76d691001ae599e712ca2a47ddd2c4f695b16c65f077/numpy-2.3.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:85d9fb2d8cd998c84d13a79a09cc0c1091648e848e4e6249b0ccd7f6b487fa26", size = 18576702, upload-time = "2025-10-15T16:17:19.379Z" }, - { url = "https://files.pythonhosted.org/packages/c3/76/95650169b465ececa8cf4b2e8f6df255d4bf662775e797ade2025cc51ae6/numpy-2.3.4-cp314-cp314-win32.whl", hash = "sha256:e73d63fd04e3a9d6bc187f5455d81abfad05660b212c8804bf3b407e984cd2bc", size = 6337136, upload-time = "2025-10-15T16:17:22.886Z" }, - { url = "https://files.pythonhosted.org/packages/dc/89/a231a5c43ede5d6f77ba4a91e915a87dea4aeea76560ba4d2bf185c683f0/numpy-2.3.4-cp314-cp314-win_amd64.whl", hash = "sha256:3da3491cee49cf16157e70f607c03a217ea6647b1cea4819c4f48e53d49139b9", size = 12920542, upload-time = "2025-10-15T16:17:24.783Z" }, - { url = "https://files.pythonhosted.org/packages/0d/0c/ae9434a888f717c5ed2ff2393b3f344f0ff6f1c793519fa0c540461dc530/numpy-2.3.4-cp314-cp314-win_arm64.whl", hash = "sha256:6d9cd732068e8288dbe2717177320723ccec4fb064123f0caf9bbd90ab5be868", size = 10480213, upload-time = "2025-10-15T16:17:26.935Z" }, - { url = "https://files.pythonhosted.org/packages/83/4b/c4a5f0841f92536f6b9592694a5b5f68c9ab37b775ff342649eadf9055d3/numpy-2.3.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:22758999b256b595cf0b1d102b133bb61866ba5ceecf15f759623b64c020c9ec", size = 21052280, upload-time = "2025-10-15T16:17:29.638Z" }, - { url = "https://files.pythonhosted.org/packages/3e/80/90308845fc93b984d2cc96d83e2324ce8ad1fd6efea81b324cba4b673854/numpy-2.3.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9cb177bc55b010b19798dc5497d540dea67fd13a8d9e882b2dae71de0cf09eb3", size = 14302930, upload-time = "2025-10-15T16:17:32.384Z" }, - { url = "https://files.pythonhosted.org/packages/3d/4e/07439f22f2a3b247cec4d63a713faae55e1141a36e77fb212881f7cda3fb/numpy-2.3.4-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0f2bcc76f1e05e5ab58893407c63d90b2029908fa41f9f1cc51eecce936c3365", size = 5231504, upload-time = "2025-10-15T16:17:34.515Z" }, - { url = "https://files.pythonhosted.org/packages/ab/de/1e11f2547e2fe3d00482b19721855348b94ada8359aef5d40dd57bfae9df/numpy-2.3.4-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:8dc20bde86802df2ed8397a08d793da0ad7a5fd4ea3ac85d757bf5dd4ad7c252", size = 6739405, upload-time = "2025-10-15T16:17:36.128Z" }, - { url = "https://files.pythonhosted.org/packages/3b/40/8cd57393a26cebe2e923005db5134a946c62fa56a1087dc7c478f3e30837/numpy-2.3.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5e199c087e2aa71c8f9ce1cb7a8e10677dc12457e7cc1be4798632da37c3e86e", size = 14354866, upload-time = "2025-10-15T16:17:38.884Z" }, - { url = "https://files.pythonhosted.org/packages/93/39/5b3510f023f96874ee6fea2e40dfa99313a00bf3ab779f3c92978f34aace/numpy-2.3.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85597b2d25ddf655495e2363fe044b0ae999b75bc4d630dc0d886484b03a5eb0", size = 16703296, upload-time = "2025-10-15T16:17:41.564Z" }, - { url = "https://files.pythonhosted.org/packages/41/0d/19bb163617c8045209c1996c4e427bccbc4bbff1e2c711f39203c8ddbb4a/numpy-2.3.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:04a69abe45b49c5955923cf2c407843d1c85013b424ae8a560bba16c92fe44a0", size = 16136046, upload-time = "2025-10-15T16:17:43.901Z" }, - { url = "https://files.pythonhosted.org/packages/e2/c1/6dba12fdf68b02a21ac411c9df19afa66bed2540f467150ca64d246b463d/numpy-2.3.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e1708fac43ef8b419c975926ce1eaf793b0c13b7356cfab6ab0dc34c0a02ac0f", size = 18652691, upload-time = "2025-10-15T16:17:46.247Z" }, - { url = "https://files.pythonhosted.org/packages/f8/73/f85056701dbbbb910c51d846c58d29fd46b30eecd2b6ba760fc8b8a1641b/numpy-2.3.4-cp314-cp314t-win32.whl", hash = "sha256:863e3b5f4d9915aaf1b8ec79ae560ad21f0b8d5e3adc31e73126491bb86dee1d", size = 6485782, upload-time = "2025-10-15T16:17:48.872Z" }, - { url = "https://files.pythonhosted.org/packages/17/90/28fa6f9865181cb817c2471ee65678afa8a7e2a1fb16141473d5fa6bacc3/numpy-2.3.4-cp314-cp314t-win_amd64.whl", hash = "sha256:962064de37b9aef801d33bc579690f8bfe6c5e70e29b61783f60bcba838a14d6", size = 13113301, upload-time = "2025-10-15T16:17:50.938Z" }, - { url = "https://files.pythonhosted.org/packages/54/23/08c002201a8e7e1f9afba93b97deceb813252d9cfd0d3351caed123dcf97/numpy-2.3.4-cp314-cp314t-win_arm64.whl", hash = "sha256:8b5a9a39c45d852b62693d9b3f3e0fe052541f804296ff401a72a1b60edafb29", size = 10547532, upload-time = "2025-10-15T16:17:53.48Z" }, - { url = "https://files.pythonhosted.org/packages/b1/b6/64898f51a86ec88ca1257a59c1d7fd077b60082a119affefcdf1dd0df8ca/numpy-2.3.4-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6e274603039f924c0fe5cb73438fa9246699c78a6df1bd3decef9ae592ae1c05", size = 21131552, upload-time = "2025-10-15T16:17:55.845Z" }, - { url = "https://files.pythonhosted.org/packages/ce/4c/f135dc6ebe2b6a3c77f4e4838fa63d350f85c99462012306ada1bd4bc460/numpy-2.3.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d149aee5c72176d9ddbc6803aef9c0f6d2ceeea7626574fc68518da5476fa346", size = 14377796, upload-time = "2025-10-15T16:17:58.308Z" }, - { url = "https://files.pythonhosted.org/packages/d0/a4/f33f9c23fcc13dd8412fc8614559b5b797e0aba9d8e01dfa8bae10c84004/numpy-2.3.4-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:6d34ed9db9e6395bb6cd33286035f73a59b058169733a9db9f85e650b88df37e", size = 5306904, upload-time = "2025-10-15T16:18:00.596Z" }, - { url = "https://files.pythonhosted.org/packages/28/af/c44097f25f834360f9fb960fa082863e0bad14a42f36527b2a121abdec56/numpy-2.3.4-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:fdebe771ca06bb8d6abce84e51dca9f7921fe6ad34a0c914541b063e9a68928b", size = 6819682, upload-time = "2025-10-15T16:18:02.32Z" }, - { url = "https://files.pythonhosted.org/packages/c5/8c/cd283b54c3c2b77e188f63e23039844f56b23bba1712318288c13fe86baf/numpy-2.3.4-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e92defe6c08211eb77902253b14fe5b480ebc5112bc741fd5e9cd0608f847", size = 14422300, upload-time = "2025-10-15T16:18:04.271Z" }, - { url = "https://files.pythonhosted.org/packages/b0/f0/8404db5098d92446b3e3695cf41c6f0ecb703d701cb0b7566ee2177f2eee/numpy-2.3.4-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13b9062e4f5c7ee5c7e5be96f29ba71bc5a37fed3d1d77c37390ae00724d296d", size = 16760806, upload-time = "2025-10-15T16:18:06.668Z" }, - { url = "https://files.pythonhosted.org/packages/95/8e/2844c3959ce9a63acc7c8e50881133d86666f0420bcde695e115ced0920f/numpy-2.3.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:81b3a59793523e552c4a96109dde028aa4448ae06ccac5a76ff6532a85558a7f", size = 12973130, upload-time = "2025-10-15T16:18:09.397Z" }, + { url = "https://files.pythonhosted.org/packages/ef/c6/4218570d8c8ecc9704b5157a3348e486e84ef4be0ed3e38218ab473c83d2/numpy-2.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f983334aea213c99992053ede6168500e5f086ce74fbc4acc3f2b00f5762e9db", size = 16976799, upload-time = "2026-03-29T13:18:15.438Z" }, + { url = "https://files.pythonhosted.org/packages/dd/92/b4d922c4a5f5dab9ed44e6153908a5c665b71acf183a83b93b690996e39b/numpy-2.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72944b19f2324114e9dc86a159787333b77874143efcf89a5167ef83cfee8af0", size = 14971552, upload-time = "2026-03-29T13:18:18.606Z" }, + { url = "https://files.pythonhosted.org/packages/8a/dc/df98c095978fa6ee7b9a9387d1d58cbb3d232d0e69ad169a4ce784bde4fd/numpy-2.4.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:86b6f55f5a352b48d7fbfd2dbc3d5b780b2d79f4d3c121f33eb6efb22e9a2015", size = 5476566, upload-time = "2026-03-29T13:18:21.532Z" }, + { url = "https://files.pythonhosted.org/packages/28/34/b3fdcec6e725409223dd27356bdf5a3c2cc2282e428218ecc9cb7acc9763/numpy-2.4.4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:ba1f4fc670ed79f876f70082eff4f9583c15fb9a4b89d6188412de4d18ae2f40", size = 6806482, upload-time = "2026-03-29T13:18:23.634Z" }, + { url = "https://files.pythonhosted.org/packages/68/62/63417c13aa35d57bee1337c67446761dc25ea6543130cf868eace6e8157b/numpy-2.4.4-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a87ec22c87be071b6bdbd27920b129b94f2fc964358ce38f3822635a3e2e03d", size = 15973376, upload-time = "2026-03-29T13:18:26.677Z" }, + { url = "https://files.pythonhosted.org/packages/cf/c5/9fcb7e0e69cef59cf10c746b84f7d58b08bc66a6b7d459783c5a4f6101a6/numpy-2.4.4-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:df3775294accfdd75f32c74ae39fcba920c9a378a2fc18a12b6820aa8c1fb502", size = 16925137, upload-time = "2026-03-29T13:18:30.14Z" }, + { url = "https://files.pythonhosted.org/packages/7e/43/80020edacb3f84b9efdd1591120a4296462c23fd8db0dde1666f6ef66f13/numpy-2.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d4e437e295f18ec29bc79daf55e8a47a9113df44d66f702f02a293d93a2d6dd", size = 17329414, upload-time = "2026-03-29T13:18:33.733Z" }, + { url = "https://files.pythonhosted.org/packages/fd/06/af0658593b18a5f73532d377188b964f239eb0894e664a6c12f484472f97/numpy-2.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6aa3236c78803afbcb255045fbef97a9e25a1f6c9888357d205ddc42f4d6eba5", size = 18658397, upload-time = "2026-03-29T13:18:37.511Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ce/13a09ed65f5d0ce5c7dd0669250374c6e379910f97af2c08c57b0608eee4/numpy-2.4.4-cp311-cp311-win32.whl", hash = "sha256:30caa73029a225b2d40d9fae193e008e24b2026b7ee1a867b7ee8d96ca1a448e", size = 6239499, upload-time = "2026-03-29T13:18:40.372Z" }, + { url = "https://files.pythonhosted.org/packages/bd/63/05d193dbb4b5eec1eca73822d80da98b511f8328ad4ae3ca4caf0f4db91d/numpy-2.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:6bbe4eb67390b0a0265a2c25458f6b90a409d5d069f1041e6aff1e27e3d9a79e", size = 12614257, upload-time = "2026-03-29T13:18:42.95Z" }, + { url = "https://files.pythonhosted.org/packages/87/c5/8168052f080c26fa984c413305012be54741c9d0d74abd7fbeeccae3889f/numpy-2.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:fcfe2045fd2e8f3cb0ce9d4ba6dba6333b8fa05bb8a4939c908cd43322d14c7e", size = 10486775, upload-time = "2026-03-29T13:18:45.835Z" }, + { url = "https://files.pythonhosted.org/packages/28/05/32396bec30fb2263770ee910142f49c1476d08e8ad41abf8403806b520ce/numpy-2.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15716cfef24d3a9762e3acdf87e27f58dc823d1348f765bbea6bef8c639bfa1b", size = 16689272, upload-time = "2026-03-29T13:18:49.223Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f3/a983d28637bfcd763a9c7aafdb6d5c0ebf3d487d1e1459ffdb57e2f01117/numpy-2.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23cbfd4c17357c81021f21540da84ee282b9c8fba38a03b7b9d09ba6b951421e", size = 14699573, upload-time = "2026-03-29T13:18:52.629Z" }, + { url = "https://files.pythonhosted.org/packages/9b/fd/e5ecca1e78c05106d98028114f5c00d3eddb41207686b2b7de3e477b0e22/numpy-2.4.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8b3b60bb7cba2c8c81837661c488637eee696f59a877788a396d33150c35d842", size = 5204782, upload-time = "2026-03-29T13:18:55.579Z" }, + { url = "https://files.pythonhosted.org/packages/de/2f/702a4594413c1a8632092beae8aba00f1d67947389369b3777aed783fdca/numpy-2.4.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e4a010c27ff6f210ff4c6ef34394cd61470d01014439b192ec22552ee867f2a8", size = 6552038, upload-time = "2026-03-29T13:18:57.769Z" }, + { url = "https://files.pythonhosted.org/packages/7f/37/eed308a8f56cba4d1fdf467a4fc67ef4ff4bf1c888f5fc980481890104b1/numpy-2.4.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9e75681b59ddaa5e659898085ae0eaea229d054f2ac0c7e563a62205a700121", size = 15670666, upload-time = "2026-03-29T13:19:00.341Z" }, + { url = "https://files.pythonhosted.org/packages/0a/0d/0e3ecece05b7a7e87ab9fb587855548da437a061326fff64a223b6dcb78a/numpy-2.4.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:81f4a14bee47aec54f883e0cad2d73986640c1590eb9bfaaba7ad17394481e6e", size = 16645480, upload-time = "2026-03-29T13:19:03.63Z" }, + { url = "https://files.pythonhosted.org/packages/34/49/f2312c154b82a286758ee2f1743336d50651f8b5195db18cdb63675ff649/numpy-2.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:62d6b0f03b694173f9fcb1fb317f7222fd0b0b103e784c6549f5e53a27718c44", size = 17020036, upload-time = "2026-03-29T13:19:07.428Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e9/736d17bd77f1b0ec4f9901aaec129c00d59f5d84d5e79bba540ef12c2330/numpy-2.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fbc356aae7adf9e6336d336b9c8111d390a05df88f1805573ebb0807bd06fd1d", size = 18368643, upload-time = "2026-03-29T13:19:10.775Z" }, + { url = "https://files.pythonhosted.org/packages/63/f6/d417977c5f519b17c8a5c3bc9e8304b0908b0e21136fe43bf628a1343914/numpy-2.4.4-cp312-cp312-win32.whl", hash = "sha256:0d35aea54ad1d420c812bfa0385c71cd7cc5bcf7c65fed95fc2cd02fe8c79827", size = 5961117, upload-time = "2026-03-29T13:19:13.464Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5b/e1deebf88ff431b01b7406ca3583ab2bbb90972bbe1c568732e49c844f7e/numpy-2.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:b5f0362dc928a6ecd9db58868fca5e48485205e3855957bdedea308f8672ea4a", size = 12320584, upload-time = "2026-03-29T13:19:16.155Z" }, + { url = "https://files.pythonhosted.org/packages/58/89/e4e856ac82a68c3ed64486a544977d0e7bdd18b8da75b78a577ca31c4395/numpy-2.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:846300f379b5b12cc769334464656bc882e0735d27d9726568bc932fdc49d5ec", size = 10221450, upload-time = "2026-03-29T13:19:18.994Z" }, + { url = "https://files.pythonhosted.org/packages/14/1d/d0a583ce4fefcc3308806a749a536c201ed6b5ad6e1322e227ee4848979d/numpy-2.4.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:08f2e31ed5e6f04b118e49821397f12767934cfdd12a1ce86a058f91e004ee50", size = 16684933, upload-time = "2026-03-29T13:19:22.47Z" }, + { url = "https://files.pythonhosted.org/packages/c1/62/2b7a48fbb745d344742c0277f01286dead15f3f68e4f359fbfcf7b48f70f/numpy-2.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e823b8b6edc81e747526f70f71a9c0a07ac4e7ad13020aa736bb7c9d67196115", size = 14694532, upload-time = "2026-03-29T13:19:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/e5/87/499737bfba066b4a3bebff24a8f1c5b2dee410b209bc6668c9be692580f0/numpy-2.4.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4a19d9dba1a76618dd86b164d608566f393f8ec6ac7c44f0cc879011c45e65af", size = 5199661, upload-time = "2026-03-29T13:19:28.31Z" }, + { url = "https://files.pythonhosted.org/packages/cd/da/464d551604320d1491bc345efed99b4b7034143a85787aab78d5691d5a0e/numpy-2.4.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d2a8490669bfe99a233298348acc2d824d496dee0e66e31b66a6022c2ad74a5c", size = 6547539, upload-time = "2026-03-29T13:19:30.97Z" }, + { url = "https://files.pythonhosted.org/packages/7d/90/8d23e3b0dafd024bf31bdec225b3bb5c2dbfa6912f8a53b8659f21216cbf/numpy-2.4.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:45dbed2ab436a9e826e302fcdcbe9133f9b0006e5af7168afb8963a6520da103", size = 15668806, upload-time = "2026-03-29T13:19:33.887Z" }, + { url = "https://files.pythonhosted.org/packages/d1/73/a9d864e42a01896bb5974475438f16086be9ba1f0d19d0bb7a07427c4a8b/numpy-2.4.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c901b15172510173f5cb310eae652908340f8dede90fff9e3bf6c0d8dfd92f83", size = 16632682, upload-time = "2026-03-29T13:19:37.336Z" }, + { url = "https://files.pythonhosted.org/packages/34/fb/14570d65c3bde4e202a031210475ae9cde9b7686a2e7dc97ee67d2833b35/numpy-2.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:99d838547ace2c4aace6c4f76e879ddfe02bb58a80c1549928477862b7a6d6ed", size = 17019810, upload-time = "2026-03-29T13:19:40.963Z" }, + { url = "https://files.pythonhosted.org/packages/8a/77/2ba9d87081fd41f6d640c83f26fb7351e536b7ce6dd9061b6af5904e8e46/numpy-2.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0aec54fd785890ecca25a6003fd9a5aed47ad607bbac5cd64f836ad8666f4959", size = 18357394, upload-time = "2026-03-29T13:19:44.859Z" }, + { url = "https://files.pythonhosted.org/packages/a2/23/52666c9a41708b0853fa3b1a12c90da38c507a3074883823126d4e9d5b30/numpy-2.4.4-cp313-cp313-win32.whl", hash = "sha256:07077278157d02f65c43b1b26a3886bce886f95d20aabd11f87932750dfb14ed", size = 5959556, upload-time = "2026-03-29T13:19:47.661Z" }, + { url = "https://files.pythonhosted.org/packages/57/fb/48649b4971cde70d817cf97a2a2fdc0b4d8308569f1dd2f2611959d2e0cf/numpy-2.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:5c70f1cc1c4efbe316a572e2d8b9b9cc44e89b95f79ca3331553fbb63716e2bf", size = 12317311, upload-time = "2026-03-29T13:19:50.67Z" }, + { url = "https://files.pythonhosted.org/packages/ba/d8/11490cddd564eb4de97b4579ef6bfe6a736cc07e94c1598590ae25415e01/numpy-2.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:ef4059d6e5152fa1a39f888e344c73fdc926e1b2dd58c771d67b0acfbf2aa67d", size = 10222060, upload-time = "2026-03-29T13:19:54.229Z" }, + { url = "https://files.pythonhosted.org/packages/99/5d/dab4339177a905aad3e2221c915b35202f1ec30d750dd2e5e9d9a72b804b/numpy-2.4.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4bbc7f303d125971f60ec0aaad5e12c62d0d2c925f0ab1273debd0e4ba37aba5", size = 14822302, upload-time = "2026-03-29T13:19:57.585Z" }, + { url = "https://files.pythonhosted.org/packages/eb/e4/0564a65e7d3d97562ed6f9b0fd0fb0a6f559ee444092f105938b50043876/numpy-2.4.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:4d6d57903571f86180eb98f8f0c839fa9ebbfb031356d87f1361be91e433f5b7", size = 5327407, upload-time = "2026-03-29T13:20:00.601Z" }, + { url = "https://files.pythonhosted.org/packages/29/8d/35a3a6ce5ad371afa58b4700f1c820f8f279948cca32524e0a695b0ded83/numpy-2.4.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:4636de7fd195197b7535f231b5de9e4b36d2c440b6e566d2e4e4746e6af0ca93", size = 6647631, upload-time = "2026-03-29T13:20:02.855Z" }, + { url = "https://files.pythonhosted.org/packages/f4/da/477731acbd5a58a946c736edfdabb2ac5b34c3d08d1ba1a7b437fa0884df/numpy-2.4.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ad2e2ef14e0b04e544ea2fa0a36463f847f113d314aa02e5b402fdf910ef309e", size = 15727691, upload-time = "2026-03-29T13:20:06.004Z" }, + { url = "https://files.pythonhosted.org/packages/e6/db/338535d9b152beabeb511579598418ba0212ce77cf9718edd70262cc4370/numpy-2.4.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a285b3b96f951841799528cd1f4f01cd70e7e0204b4abebac9463eecfcf2a40", size = 16681241, upload-time = "2026-03-29T13:20:09.417Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a9/ad248e8f58beb7a0219b413c9c7d8151c5d285f7f946c3e26695bdbbe2df/numpy-2.4.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f8474c4241bc18b750be2abea9d7a9ec84f46ef861dbacf86a4f6e043401f79e", size = 17085767, upload-time = "2026-03-29T13:20:13.126Z" }, + { url = "https://files.pythonhosted.org/packages/b5/1a/3b88ccd3694681356f70da841630e4725a7264d6a885c8d442a697e1146b/numpy-2.4.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4e874c976154687c1f71715b034739b45c7711bec81db01914770373d125e392", size = 18403169, upload-time = "2026-03-29T13:20:17.096Z" }, + { url = "https://files.pythonhosted.org/packages/c2/c9/fcfd5d0639222c6eac7f304829b04892ef51c96a75d479214d77e3ce6e33/numpy-2.4.4-cp313-cp313t-win32.whl", hash = "sha256:9c585a1790d5436a5374bac930dad6ed244c046ed91b2b2a3634eb2971d21008", size = 6083477, upload-time = "2026-03-29T13:20:20.195Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e3/3938a61d1c538aaec8ed6fd6323f57b0c2d2d2219512434c5c878db76553/numpy-2.4.4-cp313-cp313t-win_amd64.whl", hash = "sha256:93e15038125dc1e5345d9b5b68aa7f996ec33b98118d18c6ca0d0b7d6198b7e8", size = 12457487, upload-time = "2026-03-29T13:20:22.946Z" }, + { url = "https://files.pythonhosted.org/packages/97/6a/7e345032cc60501721ef94e0e30b60f6b0bd601f9174ebd36389a2b86d40/numpy-2.4.4-cp313-cp313t-win_arm64.whl", hash = "sha256:0dfd3f9d3adbe2920b68b5cd3d51444e13a10792ec7154cd0a2f6e74d4ab3233", size = 10292002, upload-time = "2026-03-29T13:20:25.909Z" }, + { url = "https://files.pythonhosted.org/packages/6e/06/c54062f85f673dd5c04cbe2f14c3acb8c8b95e3384869bb8cc9bff8cb9df/numpy-2.4.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f169b9a863d34f5d11b8698ead99febeaa17a13ca044961aa8e2662a6c7766a0", size = 16684353, upload-time = "2026-03-29T13:20:29.504Z" }, + { url = "https://files.pythonhosted.org/packages/4c/39/8a320264a84404c74cc7e79715de85d6130fa07a0898f67fb5cd5bd79908/numpy-2.4.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2483e4584a1cb3092da4470b38866634bafb223cbcd551ee047633fd2584599a", size = 14704914, upload-time = "2026-03-29T13:20:33.547Z" }, + { url = "https://files.pythonhosted.org/packages/91/fb/287076b2614e1d1044235f50f03748f31fa287e3dbe6abeb35cdfa351eca/numpy-2.4.4-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:2d19e6e2095506d1736b7d80595e0f252d76b89f5e715c35e06e937679ea7d7a", size = 5210005, upload-time = "2026-03-29T13:20:36.45Z" }, + { url = "https://files.pythonhosted.org/packages/63/eb/fcc338595309910de6ecabfcef2419a9ce24399680bfb149421fa2df1280/numpy-2.4.4-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:6a246d5914aa1c820c9443ddcee9c02bec3e203b0c080349533fae17727dfd1b", size = 6544974, upload-time = "2026-03-29T13:20:39.014Z" }, + { url = "https://files.pythonhosted.org/packages/44/5d/e7e9044032a716cdfaa3fba27a8e874bf1c5f1912a1ddd4ed071bf8a14a6/numpy-2.4.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:989824e9faf85f96ec9c7761cd8d29c531ad857bfa1daa930cba85baaecf1a9a", size = 15684591, upload-time = "2026-03-29T13:20:42.146Z" }, + { url = "https://files.pythonhosted.org/packages/98/7c/21252050676612625449b4807d6b695b9ce8a7c9e1c197ee6216c8a65c7c/numpy-2.4.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:27a8d92cd10f1382a67d7cf4db7ce18341b66438bdd9f691d7b0e48d104c2a9d", size = 16637700, upload-time = "2026-03-29T13:20:46.204Z" }, + { url = "https://files.pythonhosted.org/packages/b1/29/56d2bbef9465db24ef25393383d761a1af4f446a1df9b8cded4fe3a5a5d7/numpy-2.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e44319a2953c738205bf3354537979eaa3998ed673395b964c1176083dd46252", size = 17035781, upload-time = "2026-03-29T13:20:50.242Z" }, + { url = "https://files.pythonhosted.org/packages/e3/2b/a35a6d7589d21f44cea7d0a98de5ddcbb3d421b2622a5c96b1edf18707c3/numpy-2.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e892aff75639bbef0d2a2cfd55535510df26ff92f63c92cd84ef8d4ba5a5557f", size = 18362959, upload-time = "2026-03-29T13:20:54.019Z" }, + { url = "https://files.pythonhosted.org/packages/64/c9/d52ec581f2390e0f5f85cbfd80fb83d965fc15e9f0e1aec2195faa142cde/numpy-2.4.4-cp314-cp314-win32.whl", hash = "sha256:1378871da56ca8943c2ba674530924bb8ca40cd228358a3b5f302ad60cf875fc", size = 6008768, upload-time = "2026-03-29T13:20:56.912Z" }, + { url = "https://files.pythonhosted.org/packages/fa/22/4cc31a62a6c7b74a8730e31a4274c5dc80e005751e277a2ce38e675e4923/numpy-2.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:715d1c092715954784bc79e1174fc2a90093dc4dc84ea15eb14dad8abdcdeb74", size = 12449181, upload-time = "2026-03-29T13:20:59.548Z" }, + { url = "https://files.pythonhosted.org/packages/70/2e/14cda6f4d8e396c612d1bf97f22958e92148801d7e4f110cabebdc0eef4b/numpy-2.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:2c194dd721e54ecad9ad387c1d35e63dce5c4450c6dc7dd5611283dda239aabb", size = 10496035, upload-time = "2026-03-29T13:21:02.524Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e8/8fed8c8d848d7ecea092dc3469643f9d10bc3a134a815a3b033da1d2039b/numpy-2.4.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2aa0613a5177c264ff5921051a5719d20095ea586ca88cc802c5c218d1c67d3e", size = 14824958, upload-time = "2026-03-29T13:21:05.671Z" }, + { url = "https://files.pythonhosted.org/packages/05/1a/d8007a5138c179c2bf33ef44503e83d70434d2642877ee8fbb230e7c0548/numpy-2.4.4-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:42c16925aa5a02362f986765f9ebabf20de75cdefdca827d14315c568dcab113", size = 5330020, upload-time = "2026-03-29T13:21:08.635Z" }, + { url = "https://files.pythonhosted.org/packages/99/64/ffb99ac6ae93faf117bcbd5c7ba48a7f45364a33e8e458545d3633615dda/numpy-2.4.4-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:874f200b2a981c647340f841730fc3a2b54c9d940566a3c4149099591e2c4c3d", size = 6650758, upload-time = "2026-03-29T13:21:10.949Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6e/795cc078b78a384052e73b2f6281ff7a700e9bf53bcce2ee579d4f6dd879/numpy-2.4.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9b39d38a9bd2ae1becd7eac1303d031c5c110ad31f2b319c6e7d98b135c934d", size = 15729948, upload-time = "2026-03-29T13:21:14.047Z" }, + { url = "https://files.pythonhosted.org/packages/5f/86/2acbda8cc2af5f3d7bfc791192863b9e3e19674da7b5e533fded124d1299/numpy-2.4.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b268594bccac7d7cf5844c7732e3f20c50921d94e36d7ec9b79e9857694b1b2f", size = 16679325, upload-time = "2026-03-29T13:21:17.561Z" }, + { url = "https://files.pythonhosted.org/packages/bc/59/cafd83018f4aa55e0ac6fa92aa066c0a1877b77a615ceff1711c260ffae8/numpy-2.4.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ac6b31e35612a26483e20750126d30d0941f949426974cace8e6b5c58a3657b0", size = 17084883, upload-time = "2026-03-29T13:21:21.106Z" }, + { url = "https://files.pythonhosted.org/packages/f0/85/a42548db84e65ece46ab2caea3d3f78b416a47af387fcbb47ec28e660dc2/numpy-2.4.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8e3ed142f2728df44263aaf5fb1f5b0b99f4070c553a0d7f033be65338329150", size = 18403474, upload-time = "2026-03-29T13:21:24.828Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ad/483d9e262f4b831000062e5d8a45e342166ec8aaa1195264982bca267e62/numpy-2.4.4-cp314-cp314t-win32.whl", hash = "sha256:dddbbd259598d7240b18c9d87c56a9d2fb3b02fe266f49a7c101532e78c1d871", size = 6155500, upload-time = "2026-03-29T13:21:28.205Z" }, + { url = "https://files.pythonhosted.org/packages/c7/03/2fc4e14c7bd4ff2964b74ba90ecb8552540b6315f201df70f137faa5c589/numpy-2.4.4-cp314-cp314t-win_amd64.whl", hash = "sha256:a7164afb23be6e37ad90b2f10426149fd75aee07ca55653d2aa41e66c4ef697e", size = 12637755, upload-time = "2026-03-29T13:21:31.107Z" }, + { url = "https://files.pythonhosted.org/packages/58/78/548fb8e07b1a341746bfbecb32f2c268470f45fa028aacdbd10d9bc73aab/numpy-2.4.4-cp314-cp314t-win_arm64.whl", hash = "sha256:ba203255017337d39f89bdd58417f03c4426f12beed0440cfd933cb15f8669c7", size = 10566643, upload-time = "2026-03-29T13:21:34.339Z" }, + { url = "https://files.pythonhosted.org/packages/6b/33/8fae8f964a4f63ed528264ddf25d2b683d0b663e3cba26961eb838a7c1bd/numpy-2.4.4-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:58c8b5929fcb8287cbd6f0a3fae19c6e03a5c48402ae792962ac465224a629a4", size = 16854491, upload-time = "2026-03-29T13:21:38.03Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d0/1aabee441380b981cf8cdda3ae7a46aa827d1b5a8cce84d14598bc94d6d9/numpy-2.4.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:eea7ac5d2dce4189771cedb559c738a71512768210dc4e4753b107a2048b3d0e", size = 14895830, upload-time = "2026-03-29T13:21:41.509Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b8/aafb0d1065416894fccf4df6b49ef22b8db045187949545bced89c034b8e/numpy-2.4.4-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:51fc224f7ca4d92656d5a5eb315f12eb5fe2c97a66249aa7b5f562528a3be38c", size = 5400927, upload-time = "2026-03-29T13:21:44.747Z" }, + { url = "https://files.pythonhosted.org/packages/d6/77/063baa20b08b431038c7f9ff5435540c7b7265c78cf56012a483019ca72d/numpy-2.4.4-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:28a650663f7314afc3e6ec620f44f333c386aad9f6fc472030865dc0ebb26ee3", size = 6715557, upload-time = "2026-03-29T13:21:47.406Z" }, + { url = "https://files.pythonhosted.org/packages/c7/a8/379542d45a14f149444c5c4c4e7714707239ce9cc1de8c2803958889da14/numpy-2.4.4-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:19710a9ca9992d7174e9c52f643d4272dcd1558c5f7af7f6f8190f633bd651a7", size = 15804253, upload-time = "2026-03-29T13:21:50.753Z" }, + { url = "https://files.pythonhosted.org/packages/a2/c8/f0a45426d6d21e7ea3310a15cf90c43a14d9232c31a837702dba437f3373/numpy-2.4.4-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9b2aec6af35c113b05695ebb5749a787acd63cafc83086a05771d1e1cd1e555f", size = 16753552, upload-time = "2026-03-29T13:21:54.344Z" }, + { url = "https://files.pythonhosted.org/packages/04/74/f4c001f4714c3ad9ce037e18cf2b9c64871a84951eaa0baf683a9ca9301c/numpy-2.4.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f2cf083b324a467e1ab358c105f6cad5ea950f50524668a80c486ff1db24e119", size = 12509075, upload-time = "2026-03-29T13:21:57.644Z" }, ] [[package]] @@ -604,14 +718,14 @@ requires-dist = [ { name = "overture-schema-divisions-theme", editable = "packages/overture-schema-divisions-theme" }, { name = "overture-schema-places-theme", editable = "packages/overture-schema-places-theme" }, { name = "overture-schema-transportation-theme", editable = "packages/overture-schema-transportation-theme" }, - { name = "pydantic", specifier = ">=2.0" }, + { name = "pydantic", specifier = ">=2.12.0" }, { name = "pyyaml", specifier = ">=6.0.2" }, ] [package.metadata.requires-dev] dev = [ - { name = "deepdiff" }, - { name = "pyyaml" }, + { name = "deepdiff", specifier = ">=8.6.0" }, + { name = "pyyaml", specifier = ">=6.0.2" }, { name = "yamlcore", specifier = ">=0.0.4" }, ] @@ -626,7 +740,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "overture-schema-core", editable = "packages/overture-schema-core" }, - { name = "pydantic", specifier = ">=2.0" }, + { name = "pydantic", specifier = ">=2.12.0" }, ] [[package]] @@ -640,7 +754,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "overture-schema-core", editable = "packages/overture-schema-core" }, - { name = "pydantic", specifier = ">=2.0" }, + { name = "pydantic", specifier = ">=2.12.0" }, ] [[package]] @@ -648,13 +762,15 @@ name = "overture-schema-base-theme" source = { editable = "packages/overture-schema-base-theme" } dependencies = [ { name = "overture-schema-core" }, + { name = "overture-schema-system" }, { name = "pydantic" }, ] [package.metadata] requires-dist = [ { name = "overture-schema-core", editable = "packages/overture-schema-core" }, - { name = "pydantic", specifier = ">=2.0" }, + { name = "overture-schema-system", editable = "packages/overture-schema-system" }, + { name = "pydantic", specifier = ">=2.12.0" }, ] [[package]] @@ -662,13 +778,15 @@ name = "overture-schema-buildings-theme" source = { editable = "packages/overture-schema-buildings-theme" } dependencies = [ { name = "overture-schema-core" }, + { name = "overture-schema-system" }, { name = "pydantic" }, ] [package.metadata] requires-dist = [ { name = "overture-schema-core", editable = "packages/overture-schema-core" }, - { name = "pydantic", specifier = ">=2.0" }, + { name = "overture-schema-system", editable = "packages/overture-schema-system" }, + { name = "pydantic", specifier = ">=2.12.0" }, ] [[package]] @@ -677,6 +795,7 @@ source = { editable = "packages/overture-schema-cli" } dependencies = [ { name = "click" }, { name = "overture-schema-core" }, + { name = "overture-schema-system" }, { name = "pydantic" }, { name = "pyyaml" }, { name = "rich" }, @@ -694,7 +813,8 @@ dev = [ requires-dist = [ { name = "click", specifier = ">=8.0" }, { name = "overture-schema-core", editable = "packages/overture-schema-core" }, - { name = "pydantic", specifier = ">=2.0" }, + { name = "overture-schema-system", editable = "packages/overture-schema-system" }, + { name = "pydantic", specifier = ">=2.12.0" }, { name = "pyyaml", specifier = ">=6.0.2" }, { name = "rich", specifier = ">=13.0" }, { name = "yamlcore", specifier = ">=0.0.4" }, @@ -702,15 +822,36 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ - { name = "mypy" }, - { name = "pytest", specifier = ">=7.0" }, - { name = "ruff" }, + { name = "mypy", specifier = ">=1.17.0" }, + { name = "pytest", specifier = ">=9.0.0" }, + { name = "ruff", specifier = ">=0.13.0" }, +] + +[[package]] +name = "overture-schema-codegen" +source = { editable = "packages/overture-schema-codegen" } +dependencies = [ + { name = "click" }, + { name = "jinja2" }, + { name = "overture-schema-core" }, + { name = "overture-schema-system" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] + +[package.metadata] +requires-dist = [ + { name = "click", specifier = ">=8.0" }, + { name = "jinja2", specifier = ">=3.0" }, + { name = "overture-schema-core", editable = "packages/overture-schema-core" }, + { name = "overture-schema-system", editable = "packages/overture-schema-system" }, + { name = "tomli", marker = "python_full_version < '3.11'", specifier = ">=2.0" }, ] [[package]] name = "overture-schema-core" source = { editable = "packages/overture-schema-core" } dependencies = [ + { name = "overture-schema-system" }, { name = "pydantic" }, { name = "shapely" }, ] @@ -718,21 +859,20 @@ dependencies = [ [package.dev-dependencies] dev = [ { name = "jsonpath-ng" }, - { name = "pytest-subtests" }, { name = "types-pyyaml" }, { name = "types-shapely" }, ] [package.metadata] requires-dist = [ - { name = "pydantic", specifier = ">=2.0" }, + { name = "overture-schema-system", editable = "packages/overture-schema-system" }, + { name = "pydantic", specifier = ">=2.12.0" }, { name = "shapely", specifier = ">=2.1.1" }, ] [package.metadata.requires-dev] dev = [ { name = "jsonpath-ng", specifier = ">=1.7.0" }, - { name = "pytest-subtests", specifier = ">=0.14.2" }, { name = "types-pyyaml", specifier = ">=6.0.12.20250516" }, { name = "types-shapely", specifier = ">=2.1.0.20250710" }, ] @@ -750,7 +890,7 @@ dependencies = [ requires-dist = [ { name = "overture-schema-core", editable = "packages/overture-schema-core" }, { name = "overture-schema-system", editable = "packages/overture-schema-system" }, - { name = "pydantic", specifier = ">=2.0" }, + { name = "pydantic", specifier = ">=2.12.0" }, ] [[package]] @@ -766,8 +906,7 @@ dependencies = [ requires-dist = [ { name = "overture-schema-core", editable = "packages/overture-schema-core" }, { name = "overture-schema-system", editable = "packages/overture-schema-system" }, - { name = "pydantic", specifier = ">=2.0" }, - { name = "pydantic", extras = ["email"] }, + { name = "pydantic", extras = ["email"], specifier = ">=2.12.0" }, ] [[package]] @@ -787,15 +926,15 @@ dev = [ [package.metadata] requires-dist = [ - { name = "pydantic", specifier = ">=2.0.0" }, + { name = "pydantic", specifier = ">=2.12.0" }, { name = "shapely", specifier = ">=2.0.0" }, ] [package.metadata.requires-dev] dev = [ - { name = "mypy" }, - { name = "pytest", specifier = ">=7.0" }, - { name = "ruff" }, + { name = "mypy", specifier = ">=1.17.0" }, + { name = "pytest", specifier = ">=9.0.0" }, + { name = "ruff", specifier = ">=0.13.0" }, ] [[package]] @@ -803,13 +942,15 @@ name = "overture-schema-transportation-theme" source = { editable = "packages/overture-schema-transportation-theme" } dependencies = [ { name = "overture-schema-core" }, + { name = "overture-schema-system" }, { name = "pydantic" }, ] [package.metadata] requires-dist = [ { name = "overture-schema-core", editable = "packages/overture-schema-core" }, - { name = "pydantic", specifier = ">=2.0" }, + { name = "overture-schema-system", editable = "packages/overture-schema-system" }, + { name = "pydantic", specifier = ">=2.12.0" }, ] [[package]] @@ -825,7 +966,6 @@ dev = [ { name = "pytest" }, { name = "pytest-cov" }, { name = "ruff" }, - { name = "semver" }, ] [package.metadata] @@ -835,42 +975,42 @@ dev = [ { name = "mypy", specifier = ">=1.17.0" }, { name = "pdoc", specifier = ">=15.0.4" }, { name = "pydocstyle", specifier = ">=6.3.0" }, - { name = "pytest", specifier = ">=8.4.1" }, + { name = "pytest", specifier = ">=9.0.0" }, { name = "pytest-cov", specifier = ">=7.0.0" }, - { name = "ruff", specifier = ">=0.12.4" }, - { name = "semver", specifier = ">=3.0.4" }, + { name = "ruff", specifier = ">=0.13.0" }, ] [[package]] name = "packaging" -version = "25.0" +version = "26.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/f1/e7a6dd94a8d4a5626c03e4e99c87f241ba9e350cd9e6d75123f992427270/packaging-26.2.tar.gz", hash = "sha256:ff452ff5a3e828ce110190feff1178bb1f2ea2281fa2075aadb987c2fb221661", size = 228134, upload-time = "2026-04-24T20:15:23.917Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, + { url = "https://files.pythonhosted.org/packages/df/b2/87e62e8c3e2f4b32e5fe99e0b86d576da1312593b39f47d8ceef365e95ed/packaging-26.2-py3-none-any.whl", hash = "sha256:5fc45236b9446107ff2415ce77c807cee2862cb6fac22b8a73826d0693b0980e", size = 100195, upload-time = "2026-04-24T20:15:22.081Z" }, ] [[package]] name = "pathspec" -version = "0.12.1" +version = "1.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/82/42f767fc1c1143d6fd36efb827202a2d997a375e160a71eb2888a925aac1/pathspec-1.1.1.tar.gz", hash = "sha256:17db5ecd524104a120e173814c90367a96a98d07c45b2e10c2f3919fff91bf5a", size = 135180, upload-time = "2026-04-27T01:46:08.907Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d9/7fb5aa316bc299258e68c73ba3bddbc499654a07f151cba08f6153988714/pathspec-1.1.1-py3-none-any.whl", hash = "sha256:a00ce642f577bf7f473932318056212bc4f8bfdf53128c78bbd5af0b9b20b189", size = 57328, upload-time = "2026-04-27T01:46:07.06Z" }, ] [[package]] name = "pdoc" -version = "15.0.4" +version = "16.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jinja2" }, + { name = "markdown2" }, { name = "markupsafe" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/91/5c/e94c1ab4aa2f8a9cc29d81e1c513c6216946cb3a90957ef7115b12e9363d/pdoc-15.0.4.tar.gz", hash = "sha256:cf9680f10f5b4863381f44ef084b1903f8f356acb0d4cc6b64576ba9fb712c82", size = 155678, upload-time = "2025-06-04T17:05:49.639Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/fe/ab3f34a5fb08c6b698439a2c2643caf8fef0d61a86dd3fdcd5501c670ab8/pdoc-16.0.0.tar.gz", hash = "sha256:fdadc40cc717ec53919e3cd720390d4e3bcd40405cb51c4918c119447f913514", size = 111890, upload-time = "2025-10-27T16:02:16.345Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/2c/87250ac73ca8730b2c4e0185b573585f0b42e09562132e6c29d00b3a9bb9/pdoc-15.0.4-py3-none-any.whl", hash = "sha256:f9028e85e7bb8475b054e69bde1f6d26fc4693d25d9fa1b1ce9009bec7f7a5c4", size = 145978, upload-time = "2025-06-04T17:05:48.473Z" }, + { url = "https://files.pythonhosted.org/packages/16/a1/56a17b7f9e18c2bb8df73f3833345d97083b344708b97bab148fdd7e0b82/pdoc-16.0.0-py3-none-any.whl", hash = "sha256:070b51de2743b9b1a4e0ab193a06c9e6c12cf4151cf9137656eebb16e8556628", size = 100014, upload-time = "2025-10-27T16:02:15.007Z" }, ] [[package]] @@ -882,18 +1022,9 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] -[[package]] -name = "ply" -version = "3.11" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e5/69/882ee5c9d017149285cab114ebeab373308ef0f874fcdac9beb90e0ac4da/ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3", size = 159130, upload-time = "2018-02-15T19:01:31.097Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/58/35da89ee790598a0700ea49b2a66594140f44dec458c07e8e3d4979137fc/ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce", size = 49567, upload-time = "2018-02-15T19:01:27.172Z" }, -] - [[package]] name = "pydantic" -version = "2.12.2" +version = "2.13.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -901,9 +1032,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8d/35/d319ed522433215526689bad428a94058b6dd12190ce7ddd78618ac14b28/pydantic-2.12.2.tar.gz", hash = "sha256:7b8fa15b831a4bbde9d5b84028641ac3080a4ca2cbd4a621a661687e741624fd", size = 816358, upload-time = "2025-10-14T15:02:21.842Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d9/e4/40d09941a2cebcb20609b86a559817d5b9291c49dd6f8c87e5feffbe703a/pydantic-2.13.3.tar.gz", hash = "sha256:af09e9d1d09f4e7fe37145c1f577e1d61ceb9a41924bf0094a36506285d0a84d", size = 844068, upload-time = "2026-04-20T14:46:43.632Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6c/98/468cb649f208a6f1279448e6e5247b37ae79cf5e4041186f1e2ef3d16345/pydantic-2.12.2-py3-none-any.whl", hash = "sha256:25ff718ee909acd82f1ff9b1a4acfd781bb23ab3739adaa7144f19a6a4e231ae", size = 460628, upload-time = "2025-10-14T15:02:19.623Z" }, + { url = "https://files.pythonhosted.org/packages/f3/0a/fd7d723f8f8153418fb40cf9c940e82004fce7e987026b08a68a36dd3fe7/pydantic-2.13.3-py3-none-any.whl", hash = "sha256:6db14ac8dfc9a1e57f87ea2c0de670c251240f43cb0c30a5130e9720dc612927", size = 471981, upload-time = "2026-04-20T14:46:41.402Z" }, ] [package.optional-dependencies] @@ -913,116 +1044,118 @@ email = [ [[package]] name = "pydantic-core" -version = "2.41.4" +version = "2.46.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557, upload-time = "2025-10-14T10:23:47.909Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/ef/f7abb56c49382a246fd2ce9c799691e3c3e7175ec74b14d99e798bcddb1a/pydantic_core-2.46.3.tar.gz", hash = "sha256:41c178f65b8c29807239d47e6050262eb6bf84eb695e41101e62e38df4a5bc2c", size = 471412, upload-time = "2026-04-20T14:40:56.672Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/3d/9b8ca77b0f76fcdbf8bc6b72474e264283f461284ca84ac3fde570c6c49a/pydantic_core-2.41.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2442d9a4d38f3411f22eb9dd0912b7cbf4b7d5b6c92c4173b75d3e1ccd84e36e", size = 2111197, upload-time = "2025-10-14T10:19:43.303Z" }, - { url = "https://files.pythonhosted.org/packages/59/92/b7b0fe6ed4781642232755cb7e56a86e2041e1292f16d9ae410a0ccee5ac/pydantic_core-2.41.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30a9876226dda131a741afeab2702e2d127209bde3c65a2b8133f428bc5d006b", size = 1917909, upload-time = "2025-10-14T10:19:45.194Z" }, - { url = "https://files.pythonhosted.org/packages/52/8c/3eb872009274ffa4fb6a9585114e161aa1a0915af2896e2d441642929fe4/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d55bbac04711e2980645af68b97d445cdbcce70e5216de444a6c4b6943ebcccd", size = 1969905, upload-time = "2025-10-14T10:19:46.567Z" }, - { url = "https://files.pythonhosted.org/packages/f4/21/35adf4a753bcfaea22d925214a0c5b880792e3244731b3f3e6fec0d124f7/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1d778fb7849a42d0ee5927ab0f7453bf9f85eef8887a546ec87db5ddb178945", size = 2051938, upload-time = "2025-10-14T10:19:48.237Z" }, - { url = "https://files.pythonhosted.org/packages/7d/d0/cdf7d126825e36d6e3f1eccf257da8954452934ede275a8f390eac775e89/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b65077a4693a98b90ec5ad8f203ad65802a1b9b6d4a7e48066925a7e1606706", size = 2250710, upload-time = "2025-10-14T10:19:49.619Z" }, - { url = "https://files.pythonhosted.org/packages/2e/1c/af1e6fd5ea596327308f9c8d1654e1285cc3d8de0d584a3c9d7705bf8a7c/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62637c769dee16eddb7686bf421be48dfc2fae93832c25e25bc7242e698361ba", size = 2367445, upload-time = "2025-10-14T10:19:51.269Z" }, - { url = "https://files.pythonhosted.org/packages/d3/81/8cece29a6ef1b3a92f956ea6da6250d5b2d2e7e4d513dd3b4f0c7a83dfea/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfe3aa529c8f501babf6e502936b9e8d4698502b2cfab41e17a028d91b1ac7b", size = 2072875, upload-time = "2025-10-14T10:19:52.671Z" }, - { url = "https://files.pythonhosted.org/packages/e3/37/a6a579f5fc2cd4d5521284a0ab6a426cc6463a7b3897aeb95b12f1ba607b/pydantic_core-2.41.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca2322da745bf2eeb581fc9ea3bbb31147702163ccbcbf12a3bb630e4bf05e1d", size = 2191329, upload-time = "2025-10-14T10:19:54.214Z" }, - { url = "https://files.pythonhosted.org/packages/ae/03/505020dc5c54ec75ecba9f41119fd1e48f9e41e4629942494c4a8734ded1/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e8cd3577c796be7231dcf80badcf2e0835a46665eaafd8ace124d886bab4d700", size = 2151658, upload-time = "2025-10-14T10:19:55.843Z" }, - { url = "https://files.pythonhosted.org/packages/cb/5d/2c0d09fb53aa03bbd2a214d89ebfa6304be7df9ed86ee3dc7770257f41ee/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1cae8851e174c83633f0833e90636832857297900133705ee158cf79d40f03e6", size = 2316777, upload-time = "2025-10-14T10:19:57.607Z" }, - { url = "https://files.pythonhosted.org/packages/ea/4b/c2c9c8f5e1f9c864b57d08539d9d3db160e00491c9f5ee90e1bfd905e644/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a26d950449aae348afe1ac8be5525a00ae4235309b729ad4d3399623125b43c9", size = 2320705, upload-time = "2025-10-14T10:19:59.016Z" }, - { url = "https://files.pythonhosted.org/packages/28/c3/a74c1c37f49c0a02c89c7340fafc0ba816b29bd495d1a31ce1bdeacc6085/pydantic_core-2.41.4-cp310-cp310-win32.whl", hash = "sha256:0cf2a1f599efe57fa0051312774280ee0f650e11152325e41dfd3018ef2c1b57", size = 1975464, upload-time = "2025-10-14T10:20:00.581Z" }, - { url = "https://files.pythonhosted.org/packages/d6/23/5dd5c1324ba80303368f7569e2e2e1a721c7d9eb16acb7eb7b7f85cb1be2/pydantic_core-2.41.4-cp310-cp310-win_amd64.whl", hash = "sha256:a8c2e340d7e454dc3340d3d2e8f23558ebe78c98aa8f68851b04dcb7bc37abdc", size = 2024497, upload-time = "2025-10-14T10:20:03.018Z" }, - { url = "https://files.pythonhosted.org/packages/62/4c/f6cbfa1e8efacd00b846764e8484fe173d25b8dab881e277a619177f3384/pydantic_core-2.41.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:28ff11666443a1a8cf2a044d6a545ebffa8382b5f7973f22c36109205e65dc80", size = 2109062, upload-time = "2025-10-14T10:20:04.486Z" }, - { url = "https://files.pythonhosted.org/packages/21/f8/40b72d3868896bfcd410e1bd7e516e762d326201c48e5b4a06446f6cf9e8/pydantic_core-2.41.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61760c3925d4633290292bad462e0f737b840508b4f722247d8729684f6539ae", size = 1916301, upload-time = "2025-10-14T10:20:06.857Z" }, - { url = "https://files.pythonhosted.org/packages/94/4d/d203dce8bee7faeca791671c88519969d98d3b4e8f225da5b96dad226fc8/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae547b7315d055b0de2ec3965643b0ab82ad0106a7ffd29615ee9f266a02827", size = 1968728, upload-time = "2025-10-14T10:20:08.353Z" }, - { url = "https://files.pythonhosted.org/packages/65/f5/6a66187775df87c24d526985b3a5d78d861580ca466fbd9d4d0e792fcf6c/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef9ee5471edd58d1fcce1c80ffc8783a650e3e3a193fe90d52e43bb4d87bff1f", size = 2050238, upload-time = "2025-10-14T10:20:09.766Z" }, - { url = "https://files.pythonhosted.org/packages/5e/b9/78336345de97298cf53236b2f271912ce11f32c1e59de25a374ce12f9cce/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15dd504af121caaf2c95cb90c0ebf71603c53de98305621b94da0f967e572def", size = 2249424, upload-time = "2025-10-14T10:20:11.732Z" }, - { url = "https://files.pythonhosted.org/packages/99/bb/a4584888b70ee594c3d374a71af5075a68654d6c780369df269118af7402/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a926768ea49a8af4d36abd6a8968b8790f7f76dd7cbd5a4c180db2b4ac9a3a2", size = 2366047, upload-time = "2025-10-14T10:20:13.647Z" }, - { url = "https://files.pythonhosted.org/packages/5f/8d/17fc5de9d6418e4d2ae8c675f905cdafdc59d3bf3bf9c946b7ab796a992a/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916b9b7d134bff5440098a4deb80e4cb623e68974a87883299de9124126c2a8", size = 2071163, upload-time = "2025-10-14T10:20:15.307Z" }, - { url = "https://files.pythonhosted.org/packages/54/e7/03d2c5c0b8ed37a4617430db68ec5e7dbba66358b629cd69e11b4d564367/pydantic_core-2.41.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cf90535979089df02e6f17ffd076f07237efa55b7343d98760bde8743c4b265", size = 2190585, upload-time = "2025-10-14T10:20:17.3Z" }, - { url = "https://files.pythonhosted.org/packages/be/fc/15d1c9fe5ad9266a5897d9b932b7f53d7e5cfc800573917a2c5d6eea56ec/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7533c76fa647fade2d7ec75ac5cc079ab3f34879626dae5689b27790a6cf5a5c", size = 2150109, upload-time = "2025-10-14T10:20:19.143Z" }, - { url = "https://files.pythonhosted.org/packages/26/ef/e735dd008808226c83ba56972566138665b71477ad580fa5a21f0851df48/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:37e516bca9264cbf29612539801ca3cd5d1be465f940417b002905e6ed79d38a", size = 2315078, upload-time = "2025-10-14T10:20:20.742Z" }, - { url = "https://files.pythonhosted.org/packages/90/00/806efdcf35ff2ac0f938362350cd9827b8afb116cc814b6b75cf23738c7c/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c19cb355224037c83642429b8ce261ae108e1c5fbf5c028bac63c77b0f8646e", size = 2318737, upload-time = "2025-10-14T10:20:22.306Z" }, - { url = "https://files.pythonhosted.org/packages/41/7e/6ac90673fe6cb36621a2283552897838c020db343fa86e513d3f563b196f/pydantic_core-2.41.4-cp311-cp311-win32.whl", hash = "sha256:09c2a60e55b357284b5f31f5ab275ba9f7f70b7525e18a132ec1f9160b4f1f03", size = 1974160, upload-time = "2025-10-14T10:20:23.817Z" }, - { url = "https://files.pythonhosted.org/packages/e0/9d/7c5e24ee585c1f8b6356e1d11d40ab807ffde44d2db3b7dfd6d20b09720e/pydantic_core-2.41.4-cp311-cp311-win_amd64.whl", hash = "sha256:711156b6afb5cb1cb7c14a2cc2c4a8b4c717b69046f13c6b332d8a0a8f41ca3e", size = 2021883, upload-time = "2025-10-14T10:20:25.48Z" }, - { url = "https://files.pythonhosted.org/packages/33/90/5c172357460fc28b2871eb4a0fb3843b136b429c6fa827e4b588877bf115/pydantic_core-2.41.4-cp311-cp311-win_arm64.whl", hash = "sha256:6cb9cf7e761f4f8a8589a45e49ed3c0d92d1d696a45a6feaee8c904b26efc2db", size = 1968026, upload-time = "2025-10-14T10:20:27.039Z" }, - { url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043, upload-time = "2025-10-14T10:20:28.561Z" }, - { url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699, upload-time = "2025-10-14T10:20:30.217Z" }, - { url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121, upload-time = "2025-10-14T10:20:32.246Z" }, - { url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590, upload-time = "2025-10-14T10:20:34.332Z" }, - { url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869, upload-time = "2025-10-14T10:20:35.965Z" }, - { url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169, upload-time = "2025-10-14T10:20:37.627Z" }, - { url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165, upload-time = "2025-10-14T10:20:39.246Z" }, - { url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067, upload-time = "2025-10-14T10:20:41.015Z" }, - { url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997, upload-time = "2025-10-14T10:20:43.106Z" }, - { url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187, upload-time = "2025-10-14T10:20:44.849Z" }, - { url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204, upload-time = "2025-10-14T10:20:46.781Z" }, - { url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536, upload-time = "2025-10-14T10:20:48.39Z" }, - { url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132, upload-time = "2025-10-14T10:20:50.421Z" }, - { url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483, upload-time = "2025-10-14T10:20:52.35Z" }, - { url = "https://files.pythonhosted.org/packages/13/d0/c20adabd181a029a970738dfe23710b52a31f1258f591874fcdec7359845/pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746", size = 2105688, upload-time = "2025-10-14T10:20:54.448Z" }, - { url = "https://files.pythonhosted.org/packages/00/b6/0ce5c03cec5ae94cca220dfecddc453c077d71363b98a4bbdb3c0b22c783/pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced", size = 1910807, upload-time = "2025-10-14T10:20:56.115Z" }, - { url = "https://files.pythonhosted.org/packages/68/3e/800d3d02c8beb0b5c069c870cbb83799d085debf43499c897bb4b4aaff0d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a", size = 1956669, upload-time = "2025-10-14T10:20:57.874Z" }, - { url = "https://files.pythonhosted.org/packages/60/a4/24271cc71a17f64589be49ab8bd0751f6a0a03046c690df60989f2f95c2c/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02", size = 2051629, upload-time = "2025-10-14T10:21:00.006Z" }, - { url = "https://files.pythonhosted.org/packages/68/de/45af3ca2f175d91b96bfb62e1f2d2f1f9f3b14a734afe0bfeff079f78181/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1", size = 2224049, upload-time = "2025-10-14T10:21:01.801Z" }, - { url = "https://files.pythonhosted.org/packages/af/8f/ae4e1ff84672bf869d0a77af24fd78387850e9497753c432875066b5d622/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2", size = 2342409, upload-time = "2025-10-14T10:21:03.556Z" }, - { url = "https://files.pythonhosted.org/packages/18/62/273dd70b0026a085c7b74b000394e1ef95719ea579c76ea2f0cc8893736d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84", size = 2069635, upload-time = "2025-10-14T10:21:05.385Z" }, - { url = "https://files.pythonhosted.org/packages/30/03/cf485fff699b4cdaea469bc481719d3e49f023241b4abb656f8d422189fc/pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d", size = 2194284, upload-time = "2025-10-14T10:21:07.122Z" }, - { url = "https://files.pythonhosted.org/packages/f9/7e/c8e713db32405dfd97211f2fc0a15d6bf8adb7640f3d18544c1f39526619/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d", size = 2137566, upload-time = "2025-10-14T10:21:08.981Z" }, - { url = "https://files.pythonhosted.org/packages/04/f7/db71fd4cdccc8b75990f79ccafbbd66757e19f6d5ee724a6252414483fb4/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2", size = 2316809, upload-time = "2025-10-14T10:21:10.805Z" }, - { url = "https://files.pythonhosted.org/packages/76/63/a54973ddb945f1bca56742b48b144d85c9fc22f819ddeb9f861c249d5464/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab", size = 2311119, upload-time = "2025-10-14T10:21:12.583Z" }, - { url = "https://files.pythonhosted.org/packages/f8/03/5d12891e93c19218af74843a27e32b94922195ded2386f7b55382f904d2f/pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c", size = 1981398, upload-time = "2025-10-14T10:21:14.584Z" }, - { url = "https://files.pythonhosted.org/packages/be/d8/fd0de71f39db91135b7a26996160de71c073d8635edfce8b3c3681be0d6d/pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4", size = 2030735, upload-time = "2025-10-14T10:21:16.432Z" }, - { url = "https://files.pythonhosted.org/packages/72/86/c99921c1cf6650023c08bfab6fe2d7057a5142628ef7ccfa9921f2dda1d5/pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564", size = 1973209, upload-time = "2025-10-14T10:21:18.213Z" }, - { url = "https://files.pythonhosted.org/packages/36/0d/b5706cacb70a8414396efdda3d72ae0542e050b591119e458e2490baf035/pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4", size = 1877324, upload-time = "2025-10-14T10:21:20.363Z" }, - { url = "https://files.pythonhosted.org/packages/de/2d/cba1fa02cfdea72dfb3a9babb067c83b9dff0bbcb198368e000a6b756ea7/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2", size = 1884515, upload-time = "2025-10-14T10:21:22.339Z" }, - { url = "https://files.pythonhosted.org/packages/07/ea/3df927c4384ed9b503c9cc2d076cf983b4f2adb0c754578dfb1245c51e46/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf", size = 2042819, upload-time = "2025-10-14T10:21:26.683Z" }, - { url = "https://files.pythonhosted.org/packages/6a/ee/df8e871f07074250270a3b1b82aad4cd0026b588acd5d7d3eb2fcb1471a3/pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2", size = 1995866, upload-time = "2025-10-14T10:21:28.951Z" }, - { url = "https://files.pythonhosted.org/packages/fc/de/b20f4ab954d6d399499c33ec4fafc46d9551e11dc1858fb7f5dca0748ceb/pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89", size = 1970034, upload-time = "2025-10-14T10:21:30.869Z" }, - { url = "https://files.pythonhosted.org/packages/54/28/d3325da57d413b9819365546eb9a6e8b7cbd9373d9380efd5f74326143e6/pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1", size = 2102022, upload-time = "2025-10-14T10:21:32.809Z" }, - { url = "https://files.pythonhosted.org/packages/9e/24/b58a1bc0d834bf1acc4361e61233ee217169a42efbdc15a60296e13ce438/pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac", size = 1905495, upload-time = "2025-10-14T10:21:34.812Z" }, - { url = "https://files.pythonhosted.org/packages/fb/a4/71f759cc41b7043e8ecdaab81b985a9b6cad7cec077e0b92cff8b71ecf6b/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554", size = 1956131, upload-time = "2025-10-14T10:21:36.924Z" }, - { url = "https://files.pythonhosted.org/packages/b0/64/1e79ac7aa51f1eec7c4cda8cbe456d5d09f05fdd68b32776d72168d54275/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e", size = 2052236, upload-time = "2025-10-14T10:21:38.927Z" }, - { url = "https://files.pythonhosted.org/packages/e9/e3/a3ffc363bd4287b80f1d43dc1c28ba64831f8dfc237d6fec8f2661138d48/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616", size = 2223573, upload-time = "2025-10-14T10:21:41.574Z" }, - { url = "https://files.pythonhosted.org/packages/28/27/78814089b4d2e684a9088ede3790763c64693c3d1408ddc0a248bc789126/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af", size = 2342467, upload-time = "2025-10-14T10:21:44.018Z" }, - { url = "https://files.pythonhosted.org/packages/92/97/4de0e2a1159cb85ad737e03306717637842c88c7fd6d97973172fb183149/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12", size = 2063754, upload-time = "2025-10-14T10:21:46.466Z" }, - { url = "https://files.pythonhosted.org/packages/0f/50/8cb90ce4b9efcf7ae78130afeb99fd1c86125ccdf9906ef64b9d42f37c25/pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d", size = 2196754, upload-time = "2025-10-14T10:21:48.486Z" }, - { url = "https://files.pythonhosted.org/packages/34/3b/ccdc77af9cd5082723574a1cc1bcae7a6acacc829d7c0a06201f7886a109/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad", size = 2137115, upload-time = "2025-10-14T10:21:50.63Z" }, - { url = "https://files.pythonhosted.org/packages/ca/ba/e7c7a02651a8f7c52dc2cff2b64a30c313e3b57c7d93703cecea76c09b71/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a", size = 2317400, upload-time = "2025-10-14T10:21:52.959Z" }, - { url = "https://files.pythonhosted.org/packages/2c/ba/6c533a4ee8aec6b812c643c49bb3bd88d3f01e3cebe451bb85512d37f00f/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025", size = 2312070, upload-time = "2025-10-14T10:21:55.419Z" }, - { url = "https://files.pythonhosted.org/packages/22/ae/f10524fcc0ab8d7f96cf9a74c880243576fd3e72bd8ce4f81e43d22bcab7/pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e", size = 1982277, upload-time = "2025-10-14T10:21:57.474Z" }, - { url = "https://files.pythonhosted.org/packages/b4/dc/e5aa27aea1ad4638f0c3fb41132f7eb583bd7420ee63204e2d4333a3bbf9/pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894", size = 2024608, upload-time = "2025-10-14T10:21:59.557Z" }, - { url = "https://files.pythonhosted.org/packages/3e/61/51d89cc2612bd147198e120a13f150afbf0bcb4615cddb049ab10b81b79e/pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d", size = 1967614, upload-time = "2025-10-14T10:22:01.847Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c2/472f2e31b95eff099961fa050c376ab7156a81da194f9edb9f710f68787b/pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da", size = 1876904, upload-time = "2025-10-14T10:22:04.062Z" }, - { url = "https://files.pythonhosted.org/packages/4a/07/ea8eeb91173807ecdae4f4a5f4b150a520085b35454350fc219ba79e66a3/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e", size = 1882538, upload-time = "2025-10-14T10:22:06.39Z" }, - { url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183, upload-time = "2025-10-14T10:22:08.812Z" }, - { url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542, upload-time = "2025-10-14T10:22:11.332Z" }, - { url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897, upload-time = "2025-10-14T10:22:13.444Z" }, - { url = "https://files.pythonhosted.org/packages/b0/12/5ba58daa7f453454464f92b3ca7b9d7c657d8641c48e370c3ebc9a82dd78/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:a1b2cfec3879afb742a7b0bcfa53e4f22ba96571c9e54d6a3afe1052d17d843b", size = 2122139, upload-time = "2025-10-14T10:22:47.288Z" }, - { url = "https://files.pythonhosted.org/packages/21/fb/6860126a77725c3108baecd10fd3d75fec25191d6381b6eb2ac660228eac/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:d175600d975b7c244af6eb9c9041f10059f20b8bbffec9e33fdd5ee3f67cdc42", size = 1936674, upload-time = "2025-10-14T10:22:49.555Z" }, - { url = "https://files.pythonhosted.org/packages/de/be/57dcaa3ed595d81f8757e2b44a38240ac5d37628bce25fb20d02c7018776/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f184d657fa4947ae5ec9c47bd7e917730fa1cbb78195037e32dcbab50aca5ee", size = 1956398, upload-time = "2025-10-14T10:22:52.19Z" }, - { url = "https://files.pythonhosted.org/packages/2f/1d/679a344fadb9695f1a6a294d739fbd21d71fa023286daeea8c0ed49e7c2b/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed810568aeffed3edc78910af32af911c835cc39ebbfacd1f0ab5dd53028e5c", size = 2138674, upload-time = "2025-10-14T10:22:54.499Z" }, - { url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087, upload-time = "2025-10-14T10:22:56.818Z" }, - { url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387, upload-time = "2025-10-14T10:22:59.342Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495, upload-time = "2025-10-14T10:23:02.089Z" }, - { url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008, upload-time = "2025-10-14T10:23:04.539Z" }, - { url = "https://files.pythonhosted.org/packages/5d/d4/912e976a2dd0b49f31c98a060ca90b353f3b73ee3ea2fd0030412f6ac5ec/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e5ab4fc177dd41536b3c32b2ea11380dd3d4619a385860621478ac2d25ceb00", size = 2106739, upload-time = "2025-10-14T10:23:06.934Z" }, - { url = "https://files.pythonhosted.org/packages/71/f0/66ec5a626c81eba326072d6ee2b127f8c139543f1bf609b4842978d37833/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d88d0054d3fa11ce936184896bed3c1c5441d6fa483b498fac6a5d0dd6f64a9", size = 1932549, upload-time = "2025-10-14T10:23:09.24Z" }, - { url = "https://files.pythonhosted.org/packages/c4/af/625626278ca801ea0a658c2dcf290dc9f21bb383098e99e7c6a029fccfc0/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2a054a8725f05b4b6503357e0ac1c4e8234ad3b0c2ac130d6ffc66f0e170e2", size = 2135093, upload-time = "2025-10-14T10:23:11.626Z" }, - { url = "https://files.pythonhosted.org/packages/20/f6/2fba049f54e0f4975fef66be654c597a1d005320fa141863699180c7697d/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0d9db5a161c99375a0c68c058e227bee1d89303300802601d76a3d01f74e258", size = 2187971, upload-time = "2025-10-14T10:23:14.437Z" }, - { url = "https://files.pythonhosted.org/packages/0e/80/65ab839a2dfcd3b949202f9d920c34f9de5a537c3646662bdf2f7d999680/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6273ea2c8ffdac7b7fda2653c49682db815aebf4a89243a6feccf5e36c18c347", size = 2147939, upload-time = "2025-10-14T10:23:16.831Z" }, - { url = "https://files.pythonhosted.org/packages/44/58/627565d3d182ce6dfda18b8e1c841eede3629d59c9d7cbc1e12a03aeb328/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:4c973add636efc61de22530b2ef83a65f39b6d6f656df97f678720e20de26caa", size = 2311400, upload-time = "2025-10-14T10:23:19.234Z" }, - { url = "https://files.pythonhosted.org/packages/24/06/8a84711162ad5a5f19a88cead37cca81b4b1f294f46260ef7334ae4f24d3/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b69d1973354758007f46cf2d44a4f3d0933f10b6dc9bf15cf1356e037f6f731a", size = 2316840, upload-time = "2025-10-14T10:23:21.738Z" }, - { url = "https://files.pythonhosted.org/packages/aa/8b/b7bb512a4682a2f7fbfae152a755d37351743900226d29bd953aaf870eaa/pydantic_core-2.41.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3619320641fd212aaf5997b6ca505e97540b7e16418f4a241f44cdf108ffb50d", size = 2149135, upload-time = "2025-10-14T10:23:24.379Z" }, - { url = "https://files.pythonhosted.org/packages/7e/7d/138e902ed6399b866f7cfe4435d22445e16fff888a1c00560d9dc79a780f/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:491535d45cd7ad7e4a2af4a5169b0d07bebf1adfd164b0368da8aa41e19907a5", size = 2104721, upload-time = "2025-10-14T10:23:26.906Z" }, - { url = "https://files.pythonhosted.org/packages/47/13/0525623cf94627f7b53b4c2034c81edc8491cbfc7c28d5447fa318791479/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:54d86c0cada6aba4ec4c047d0e348cbad7063b87ae0f005d9f8c9ad04d4a92a2", size = 1931608, upload-time = "2025-10-14T10:23:29.306Z" }, - { url = "https://files.pythonhosted.org/packages/d6/f9/744bc98137d6ef0a233f808bfc9b18cf94624bf30836a18d3b05d08bf418/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca1124aced216b2500dc2609eade086d718e8249cb9696660ab447d50a758bd", size = 2132986, upload-time = "2025-10-14T10:23:32.057Z" }, - { url = "https://files.pythonhosted.org/packages/17/c8/629e88920171173f6049386cc71f893dff03209a9ef32b4d2f7e7c264bcf/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c9024169becccf0cb470ada03ee578d7348c119a0d42af3dcf9eda96e3a247c", size = 2187516, upload-time = "2025-10-14T10:23:34.871Z" }, - { url = "https://files.pythonhosted.org/packages/2e/0f/4f2734688d98488782218ca61bcc118329bf5de05bb7fe3adc7dd79b0b86/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:26895a4268ae5a2849269f4991cdc97236e4b9c010e51137becf25182daac405", size = 2146146, upload-time = "2025-10-14T10:23:37.342Z" }, - { url = "https://files.pythonhosted.org/packages/ed/f2/ab385dbd94a052c62224b99cf99002eee99dbec40e10006c78575aead256/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:ca4df25762cf71308c446e33c9b1fdca2923a3f13de616e2a949f38bf21ff5a8", size = 2311296, upload-time = "2025-10-14T10:23:40.145Z" }, - { url = "https://files.pythonhosted.org/packages/fc/8e/e4f12afe1beeb9823bba5375f8f258df0cc61b056b0195fb1cf9f62a1a58/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5a28fcedd762349519276c36634e71853b4541079cab4acaaac60c4421827308", size = 2315386, upload-time = "2025-10-14T10:23:42.624Z" }, - { url = "https://files.pythonhosted.org/packages/48/f7/925f65d930802e3ea2eb4d5afa4cb8730c8dc0d2cb89a59dc4ed2fcb2d74/pydantic_core-2.41.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c173ddcd86afd2535e2b695217e82191580663a1d1928239f877f5a1649ef39f", size = 2147775, upload-time = "2025-10-14T10:23:45.406Z" }, + { url = "https://files.pythonhosted.org/packages/22/98/b50eb9a411e87483b5c65dba4fa430a06bac4234d3403a40e5a9905ebcd0/pydantic_core-2.46.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1da3786b8018e60349680720158cc19161cc3b4bdd815beb0a321cd5ce1ad5b1", size = 2108971, upload-time = "2026-04-20T14:43:51.945Z" }, + { url = "https://files.pythonhosted.org/packages/08/4b/f364b9d161718ff2217160a4b5d41ce38de60aed91c3689ebffa1c939d23/pydantic_core-2.46.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc0988cb29d21bf4a9d5cf2ef970b5c0e38d8d8e107a493278c05dc6c1dda69f", size = 1949588, upload-time = "2026-04-20T14:44:10.386Z" }, + { url = "https://files.pythonhosted.org/packages/8f/8b/30bd03ee83b2f5e29f5ba8e647ab3c456bf56f2ec72fdbcc0215484a0854/pydantic_core-2.46.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f9067c3bfadd04c55484b89c0d267981b2f3512850f6f66e1e74204a4e4ce3", size = 1975986, upload-time = "2026-04-20T14:43:57.106Z" }, + { url = "https://files.pythonhosted.org/packages/3c/54/13ccf954d84ec275d5d023d5786e4aa48840bc9f161f2838dc98e1153518/pydantic_core-2.46.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a642ac886ecf6402d9882d10c405dcf4b902abeb2972cd5fb4a48c83cd59279a", size = 2055830, upload-time = "2026-04-20T14:44:15.499Z" }, + { url = "https://files.pythonhosted.org/packages/be/0e/65f38125e660fdbd72aa858e7dfae893645cfa0e7b13d333e174a367cd23/pydantic_core-2.46.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79f561438481f28681584b89e2effb22855e2179880314bcddbf5968e935e807", size = 2222340, upload-time = "2026-04-20T14:41:51.353Z" }, + { url = "https://files.pythonhosted.org/packages/d1/88/f3ab7739efe0e7e80777dbb84c59eb98518e3f57ea433206194c2e425272/pydantic_core-2.46.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57a973eae4665352a47cf1a99b4ee864620f2fe663a217d7a8da68a1f3a5bfda", size = 2280727, upload-time = "2026-04-20T14:41:30.461Z" }, + { url = "https://files.pythonhosted.org/packages/2a/6d/c228219080817bec4982f9531cadb18da6aaa770fdeb114f49c237ac2c9f/pydantic_core-2.46.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83d002b97072a53ea150d63e0a3adfae5670cef5aa8a6e490240e482d3b22e57", size = 2092158, upload-time = "2026-04-20T14:44:07.305Z" }, + { url = "https://files.pythonhosted.org/packages/0f/b1/525a16711e7c6d61635fac3b0bd54600b5c5d9f60c6fc5aaab26b64a2297/pydantic_core-2.46.3-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:b40ddd51e7c44b28cfaef746c9d3c506d658885e0a46f9eeef2ee815cbf8e045", size = 2116626, upload-time = "2026-04-20T14:42:34.118Z" }, + { url = "https://files.pythonhosted.org/packages/ef/7c/17d30673351439a6951bf54f564cf2443ab00ae264ec9df00e2efd710eb5/pydantic_core-2.46.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac5ec7fb9b87f04ee839af2d53bcadea57ded7d229719f56c0ed895bff987943", size = 2160691, upload-time = "2026-04-20T14:41:14.023Z" }, + { url = "https://files.pythonhosted.org/packages/86/66/af8adbcbc0886ead7f1a116606a534d75a307e71e6e08226000d51b880d2/pydantic_core-2.46.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a3b11c812f61b3129c4905781a2601dfdfdea5fe1e6c1cfb696b55d14e9c054f", size = 2182543, upload-time = "2026-04-20T14:40:48.886Z" }, + { url = "https://files.pythonhosted.org/packages/b0/37/6de71e0f54c54a4190010f57deb749e1ddf75c568ada3b1320b70067f121/pydantic_core-2.46.3-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1108da631e602e5b3c38d6d04fe5bb3bfa54349e6918e3ca6cf570b2e2b2f9d4", size = 2324513, upload-time = "2026-04-20T14:42:36.121Z" }, + { url = "https://files.pythonhosted.org/packages/51/b1/9fc74ce94f603d5ef59ff258ca9c2c8fb902fb548d340a96f77f4d1c3b7f/pydantic_core-2.46.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:de885175515bcfa98ae618c1df7a072f13d179f81376c8007112af20567fd08a", size = 2361853, upload-time = "2026-04-20T14:43:24.886Z" }, + { url = "https://files.pythonhosted.org/packages/40/d0/4c652fc592db35f100279ee751d5a145aca1b9a7984b9684ba7c1b5b0535/pydantic_core-2.46.3-cp310-cp310-win32.whl", hash = "sha256:d11058e3201527d41bc6b545c79187c9e4bf85e15a236a6007f0e991518882b7", size = 1980465, upload-time = "2026-04-20T14:44:46.239Z" }, + { url = "https://files.pythonhosted.org/packages/27/b8/a920453c38afbe1f355e1ea0b0d94a0a3e0b0879d32d793108755fa171d5/pydantic_core-2.46.3-cp310-cp310-win_amd64.whl", hash = "sha256:3612edf65c8ea67ac13616c4d23af12faef1ae435a8a93e5934c2a0cbbdd1fd6", size = 2073884, upload-time = "2026-04-20T14:43:01.201Z" }, + { url = "https://files.pythonhosted.org/packages/22/a2/1ba90a83e85a3f94c796b184f3efde9c72f2830dcda493eea8d59ba78e6d/pydantic_core-2.46.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ab124d49d0459b2373ecf54118a45c28a1e6d4192a533fbc915e70f556feb8e5", size = 2106740, upload-time = "2026-04-20T14:41:20.932Z" }, + { url = "https://files.pythonhosted.org/packages/b6/f6/99ae893c89a0b9d3daec9f95487aa676709aa83f67643b3f0abaf4ab628a/pydantic_core-2.46.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cca67d52a5c7a16aed2b3999e719c4bcf644074eac304a5d3d62dd70ae7d4b2c", size = 1948293, upload-time = "2026-04-20T14:43:42.115Z" }, + { url = "https://files.pythonhosted.org/packages/3e/b8/2e8e636dc9e3f16c2e16bf0849e24be82c5ee82c603c65fc0326666328fc/pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c024e08c0ba23e6fd68c771a521e9d6a792f2ebb0fa734296b36394dc30390e", size = 1973222, upload-time = "2026-04-20T14:41:57.841Z" }, + { url = "https://files.pythonhosted.org/packages/34/36/0e730beec4d83c5306f417afbd82ff237d9a21e83c5edf675f31ed84c1fe/pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6645ce7eec4928e29a1e3b3d5c946621d105d3e79f0c9cddf07c2a9770949287", size = 2053852, upload-time = "2026-04-20T14:40:43.077Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f0/3071131f47e39136a17814576e0fada9168569f7f8c0e6ac4d1ede6a4958/pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a712c7118e6c5ea96562f7b488435172abb94a3c53c22c9efc1412264a45cbbe", size = 2221134, upload-time = "2026-04-20T14:43:03.349Z" }, + { url = "https://files.pythonhosted.org/packages/2f/a9/a2dc023eec5aa4b02a467874bad32e2446957d2adcab14e107eab502e978/pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69a868ef3ff206343579021c40faf3b1edc64b1cc508ff243a28b0a514ccb050", size = 2279785, upload-time = "2026-04-20T14:41:19.285Z" }, + { url = "https://files.pythonhosted.org/packages/0a/44/93f489d16fb63fbd41c670441536541f6e8cfa1e5a69f40bc9c5d30d8c90/pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc7e8c32db809aa0f6ea1d6869ebc8518a65d5150fdfad8bcae6a49ae32a22e2", size = 2089404, upload-time = "2026-04-20T14:43:10.108Z" }, + { url = "https://files.pythonhosted.org/packages/2a/78/8692e3aa72b2d004f7a5d937f1dfdc8552ba26caf0bec75f342c40f00dec/pydantic_core-2.46.3-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:3481bd1341dc85779ee506bc8e1196a277ace359d89d28588a9468c3ecbe63fa", size = 2114898, upload-time = "2026-04-20T14:44:51.475Z" }, + { url = "https://files.pythonhosted.org/packages/6a/62/e83133f2e7832532060175cebf1f13748f4c7e7e7165cdd1f611f174494b/pydantic_core-2.46.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8690eba565c6d68ffd3a8655525cbdd5246510b44a637ee2c6c03a7ebfe64d3c", size = 2157856, upload-time = "2026-04-20T14:43:46.64Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ec/6a500e3ad7718ee50583fae79c8651f5d37e3abce1fa9ae177ae65842c53/pydantic_core-2.46.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4de88889d7e88d50d40ee5b39d5dac0bcaef9ba91f7e536ac064e6b2834ecccf", size = 2180168, upload-time = "2026-04-20T14:42:00.302Z" }, + { url = "https://files.pythonhosted.org/packages/d8/53/8267811054b1aa7fc1dc7ded93812372ef79a839f5e23558136a6afbfde1/pydantic_core-2.46.3-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:e480080975c1ef7f780b8f99ed72337e7cc5efea2e518a20a692e8e7b278eb8b", size = 2322885, upload-time = "2026-04-20T14:41:05.253Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c1/1c0acdb3aa0856ddc4ecc55214578f896f2de16f400cf51627eb3c26c1c4/pydantic_core-2.46.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de3a5c376f8cd94da9a1b8fd3dd1c16c7a7b216ed31dc8ce9fd7a22bf13b836e", size = 2360328, upload-time = "2026-04-20T14:41:43.991Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d0/ef39cd0f4a926814f360e71c1adeab48ad214d9727e4deb48eedfb5bce1a/pydantic_core-2.46.3-cp311-cp311-win32.whl", hash = "sha256:fc331a5314ffddd5385b9ee9d0d2fee0b13c27e0e02dad71b1ae5d6561f51eeb", size = 1979464, upload-time = "2026-04-20T14:43:12.215Z" }, + { url = "https://files.pythonhosted.org/packages/18/9c/f41951b0d858e343f1cf09398b2a7b3014013799744f2c4a8ad6a3eec4f2/pydantic_core-2.46.3-cp311-cp311-win_amd64.whl", hash = "sha256:b5b9c6cf08a8a5e502698f5e153056d12c34b8fb30317e0c5fd06f45162a6346", size = 2070837, upload-time = "2026-04-20T14:41:47.707Z" }, + { url = "https://files.pythonhosted.org/packages/9f/1e/264a17cd582f6ed50950d4d03dd5fefd84e570e238afe1cb3e25cf238769/pydantic_core-2.46.3-cp311-cp311-win_arm64.whl", hash = "sha256:5dfd51cf457482f04ec49491811a2b8fd5b843b64b11eecd2d7a1ee596ea78a6", size = 2053647, upload-time = "2026-04-20T14:42:27.535Z" }, + { url = "https://files.pythonhosted.org/packages/4b/cb/5b47425556ecc1f3fe18ed2a0083188aa46e1dd812b06e406475b3a5d536/pydantic_core-2.46.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b11b59b3eee90a80a36701ddb4576d9ae31f93f05cb9e277ceaa09e6bf074a67", size = 2101946, upload-time = "2026-04-20T14:40:52.581Z" }, + { url = "https://files.pythonhosted.org/packages/a1/4f/2fb62c2267cae99b815bbf4a7b9283812c88ca3153ef29f7707200f1d4e5/pydantic_core-2.46.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:af8653713055ea18a3abc1537fe2ebc42f5b0bbb768d1eb79fd74eb47c0ac089", size = 1951612, upload-time = "2026-04-20T14:42:42.996Z" }, + { url = "https://files.pythonhosted.org/packages/50/6e/b7348fd30d6556d132cddd5bd79f37f96f2601fe0608afac4f5fb01ec0b3/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75a519dab6d63c514f3a81053e5266c549679e4aa88f6ec57f2b7b854aceb1b0", size = 1977027, upload-time = "2026-04-20T14:42:02.001Z" }, + { url = "https://files.pythonhosted.org/packages/82/11/31d60ee2b45540d3fb0b29302a393dbc01cd771c473f5b5147bcd353e593/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6cd87cb1575b1ad05ba98894c5b5c96411ef678fa2f6ed2576607095b8d9789", size = 2063008, upload-time = "2026-04-20T14:44:17.952Z" }, + { url = "https://files.pythonhosted.org/packages/8a/db/3a9d1957181b59258f44a2300ab0f0be9d1e12d662a4f57bb31250455c52/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f80a55484b8d843c8ada81ebf70a682f3f00a3d40e378c06cf17ecb44d280d7d", size = 2233082, upload-time = "2026-04-20T14:40:57.934Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e1/3277c38792aeb5cfb18c2f0c5785a221d9ff4e149abbe1184d53d5f72273/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3861f1731b90c50a3266316b9044f5c9b405eecb8e299b0a7120596334e4fe9c", size = 2304615, upload-time = "2026-04-20T14:42:12.584Z" }, + { url = "https://files.pythonhosted.org/packages/5e/d5/e3d9717c9eba10855325650afd2a9cba8e607321697f18953af9d562da2f/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb528e295ed31570ac3dcc9bfdd6e0150bc11ce6168ac87a8082055cf1a67395", size = 2094380, upload-time = "2026-04-20T14:43:05.522Z" }, + { url = "https://files.pythonhosted.org/packages/a1/20/abac35dedcbfd66c6f0b03e4e3564511771d6c9b7ede10a362d03e110d9b/pydantic_core-2.46.3-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:367508faa4973b992b271ba1494acaab36eb7e8739d1e47be5035fb1ea225396", size = 2135429, upload-time = "2026-04-20T14:41:55.549Z" }, + { url = "https://files.pythonhosted.org/packages/6c/a5/41bfd1df69afad71b5cf0535055bccc73022715ad362edbc124bc1e021d7/pydantic_core-2.46.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ad3c826fe523e4becf4fe39baa44286cff85ef137c729a2c5e269afbfd0905d", size = 2174582, upload-time = "2026-04-20T14:41:45.96Z" }, + { url = "https://files.pythonhosted.org/packages/79/65/38d86ea056b29b2b10734eb23329b7a7672ca604df4f2b6e9c02d4ee22fe/pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ec638c5d194ef8af27db69f16c954a09797c0dc25015ad6123eb2c73a4d271ca", size = 2187533, upload-time = "2026-04-20T14:40:55.367Z" }, + { url = "https://files.pythonhosted.org/packages/b6/55/a1129141678a2026badc539ad1dee0a71d06f54c2f06a4bd68c030ac781b/pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:28ed528c45446062ee66edb1d33df5d88828ae167de76e773a3c7f64bd14e976", size = 2332985, upload-time = "2026-04-20T14:44:13.05Z" }, + { url = "https://files.pythonhosted.org/packages/d7/60/cb26f4077719f709e54819f4e8e1d43f4091f94e285eb6bd21e1190a7b7c/pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aed19d0c783886d5bd86d80ae5030006b45e28464218747dcf83dabfdd092c7b", size = 2373670, upload-time = "2026-04-20T14:41:53.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/7e/c3f21882bdf1d8d086876f81b5e296206c69c6082551d776895de7801fa0/pydantic_core-2.46.3-cp312-cp312-win32.whl", hash = "sha256:06d5d8820cbbdb4147578c1fe7ffcd5b83f34508cb9f9ab76e807be7db6ff0a4", size = 1966722, upload-time = "2026-04-20T14:44:30.588Z" }, + { url = "https://files.pythonhosted.org/packages/57/be/6b5e757b859013ebfbd7adba02f23b428f37c86dcbf78b5bb0b4ffd36e99/pydantic_core-2.46.3-cp312-cp312-win_amd64.whl", hash = "sha256:c3212fda0ee959c1dd04c60b601ec31097aaa893573a3a1abd0a47bcac2968c1", size = 2072970, upload-time = "2026-04-20T14:42:54.248Z" }, + { url = "https://files.pythonhosted.org/packages/bf/f8/a989b21cc75e9a32d24192ef700eea606521221a89faa40c919ce884f2b1/pydantic_core-2.46.3-cp312-cp312-win_arm64.whl", hash = "sha256:f1f8338dd7a7f31761f1f1a3c47503a9a3b34eea3c8b01fa6ee96408affb5e72", size = 2035963, upload-time = "2026-04-20T14:44:20.4Z" }, + { url = "https://files.pythonhosted.org/packages/9b/3c/9b5e8eb9821936d065439c3b0fb1490ffa64163bfe7e1595985a47896073/pydantic_core-2.46.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:12bc98de041458b80c86c56b24df1d23832f3e166cbaff011f25d187f5c62c37", size = 2102109, upload-time = "2026-04-20T14:41:24.219Z" }, + { url = "https://files.pythonhosted.org/packages/91/97/1c41d1f5a19f241d8069f1e249853bcce378cdb76eec8ab636d7bc426280/pydantic_core-2.46.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:85348b8f89d2c3508b65b16c3c33a4da22b8215138d8b996912bb1532868885f", size = 1951820, upload-time = "2026-04-20T14:42:14.236Z" }, + { url = "https://files.pythonhosted.org/packages/30/b4/d03a7ae14571bc2b6b3c7b122441154720619afe9a336fa3a95434df5e2f/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1105677a6df914b1fb71a81b96c8cce7726857e1717d86001f29be06a25ee6f8", size = 1977785, upload-time = "2026-04-20T14:42:31.648Z" }, + { url = "https://files.pythonhosted.org/packages/ae/0c/4086f808834b59e3c8f1aa26df8f4b6d998cdcf354a143d18ef41529d1fe/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87082cd65669a33adeba5470769e9704c7cf026cc30afb9cc77fd865578ebaad", size = 2062761, upload-time = "2026-04-20T14:40:37.093Z" }, + { url = "https://files.pythonhosted.org/packages/fa/71/a649be5a5064c2df0db06e0a512c2281134ed2fcc981f52a657936a7527c/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60e5f66e12c4f5212d08522963380eaaeac5ebd795826cfd19b2dfb0c7a52b9c", size = 2232989, upload-time = "2026-04-20T14:42:59.254Z" }, + { url = "https://files.pythonhosted.org/packages/a2/84/7756e75763e810b3a710f4724441d1ecc5883b94aacb07ca71c5fb5cfb69/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6cdf19bf84128d5e7c37e8a73a0c5c10d51103a650ac585d42dd6ae233f2b7f", size = 2303975, upload-time = "2026-04-20T14:41:32.287Z" }, + { url = "https://files.pythonhosted.org/packages/6c/35/68a762e0c1e31f35fa0dac733cbd9f5b118042853698de9509c8e5bf128b/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031bb17f4885a43773c8c763089499f242aee2ea85cf17154168775dccdecf35", size = 2095325, upload-time = "2026-04-20T14:42:47.685Z" }, + { url = "https://files.pythonhosted.org/packages/77/bf/1bf8c9a8e91836c926eae5e3e51dce009bf495a60ca56060689d3df3f340/pydantic_core-2.46.3-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:bcf2a8b2982a6673693eae7348ef3d8cf3979c1d63b54fca7c397a635cc68687", size = 2133368, upload-time = "2026-04-20T14:41:22.766Z" }, + { url = "https://files.pythonhosted.org/packages/e5/50/87d818d6bab915984995157ceb2380f5aac4e563dddbed6b56f0ed057aba/pydantic_core-2.46.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28e8cf2f52d72ced402a137145923a762cbb5081e48b34312f7a0c8f55928ec3", size = 2173908, upload-time = "2026-04-20T14:42:52.044Z" }, + { url = "https://files.pythonhosted.org/packages/91/88/a311fb306d0bd6185db41fa14ae888fb81d0baf648a761ae760d30819d33/pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:17eaface65d9fc5abb940003020309c1bf7a211f5f608d7870297c367e6f9022", size = 2186422, upload-time = "2026-04-20T14:43:29.55Z" }, + { url = "https://files.pythonhosted.org/packages/8f/79/28fd0d81508525ab2054fef7c77a638c8b5b0afcbbaeee493cf7c3fef7e1/pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:93fd339f23408a07e98950a89644f92c54d8729719a40b30c0a30bb9ebc55d23", size = 2332709, upload-time = "2026-04-20T14:42:16.134Z" }, + { url = "https://files.pythonhosted.org/packages/b3/21/795bf5fe5c0f379308b8ef19c50dedab2e7711dbc8d0c2acf08f1c7daa05/pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:23cbdb3aaa74dfe0837975dbf69b469753bbde8eacace524519ffdb6b6e89eb7", size = 2372428, upload-time = "2026-04-20T14:41:10.974Z" }, + { url = "https://files.pythonhosted.org/packages/45/b3/ed14c659cbe7605e3ef063077680a64680aec81eb1a04763a05190d49b7f/pydantic_core-2.46.3-cp313-cp313-win32.whl", hash = "sha256:610eda2e3838f401105e6326ca304f5da1e15393ae25dacae5c5c63f2c275b13", size = 1965601, upload-time = "2026-04-20T14:41:42.128Z" }, + { url = "https://files.pythonhosted.org/packages/ef/bb/adb70d9a762ddd002d723fbf1bd492244d37da41e3af7b74ad212609027e/pydantic_core-2.46.3-cp313-cp313-win_amd64.whl", hash = "sha256:68cc7866ed863db34351294187f9b729964c371ba33e31c26f478471c52e1ed0", size = 2071517, upload-time = "2026-04-20T14:43:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/52/eb/66faefabebfe68bd7788339c9c9127231e680b11906368c67ce112fdb47f/pydantic_core-2.46.3-cp313-cp313-win_arm64.whl", hash = "sha256:f64b5537ac62b231572879cd08ec05600308636a5d63bcbdb15063a466977bec", size = 2035802, upload-time = "2026-04-20T14:43:38.507Z" }, + { url = "https://files.pythonhosted.org/packages/7f/db/a7bcb4940183fda36022cd18ba8dd12f2dff40740ec7b58ce7457befa416/pydantic_core-2.46.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:afa3aa644f74e290cdede48a7b0bee37d1c35e71b05105f6b340d484af536d9b", size = 2097614, upload-time = "2026-04-20T14:44:38.374Z" }, + { url = "https://files.pythonhosted.org/packages/24/35/e4066358a22e3e99519db370494c7528f5a2aa1367370e80e27e20283543/pydantic_core-2.46.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ced3310e51aa425f7f77da8bbbb5212616655bedbe82c70944320bc1dbe5e018", size = 1951896, upload-time = "2026-04-20T14:40:53.996Z" }, + { url = "https://files.pythonhosted.org/packages/87/92/37cf4049d1636996e4b888c05a501f40a43ff218983a551d57f9d5e14f0d/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e29908922ce9da1a30b4da490bd1d3d82c01dcfdf864d2a74aacee674d0bfa34", size = 1979314, upload-time = "2026-04-20T14:41:49.446Z" }, + { url = "https://files.pythonhosted.org/packages/d8/36/9ff4d676dfbdfb2d591cf43f3d90ded01e15b1404fd101180ed2d62a2fd3/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0c9ff69140423eea8ed2d5477df3ba037f671f5e897d206d921bc9fdc39613e7", size = 2056133, upload-time = "2026-04-20T14:42:23.574Z" }, + { url = "https://files.pythonhosted.org/packages/bc/f0/405b442a4d7ba855b06eec8b2bf9c617d43b8432d099dfdc7bf999293495/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b675ab0a0d5b1c8fdb81195dc5bcefea3f3c240871cdd7ff9a2de8aa50772eb2", size = 2228726, upload-time = "2026-04-20T14:44:22.816Z" }, + { url = "https://files.pythonhosted.org/packages/e7/f8/65cd92dd5a0bd89ba277a98ecbfaf6fc36bbd3300973c7a4b826d6ab1391/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0087084960f209a9a4af50ecd1fb063d9ad3658c07bb81a7a53f452dacbfb2ba", size = 2301214, upload-time = "2026-04-20T14:44:48.792Z" }, + { url = "https://files.pythonhosted.org/packages/fd/86/ef96a4c6e79e7a2d0410826a68fbc0eccc0fd44aa733be199d5fcac3bb87/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed42e6cc8e1b0e2b9b96e2276bad70ae625d10d6d524aed0c93de974ae029f9f", size = 2099927, upload-time = "2026-04-20T14:41:40.196Z" }, + { url = "https://files.pythonhosted.org/packages/6d/53/269caf30e0096e0a8a8f929d1982a27b3879872cca2d917d17c2f9fdf4fe/pydantic_core-2.46.3-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:f1771ce258afb3e4201e67d154edbbae712a76a6081079fe247c2f53c6322c22", size = 2128789, upload-time = "2026-04-20T14:41:15.868Z" }, + { url = "https://files.pythonhosted.org/packages/00/b0/1a6d9b6a587e118482910c244a1c5acf4d192604174132efd12bf0ac486f/pydantic_core-2.46.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a7610b6a5242a6c736d8ad47fd5fff87fcfe8f833b281b1c409c3d6835d9227f", size = 2173815, upload-time = "2026-04-20T14:44:25.152Z" }, + { url = "https://files.pythonhosted.org/packages/87/56/e7e00d4041a7e62b5a40815590114db3b535bf3ca0bf4dca9f16cef25246/pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:ff5e7783bcc5476e1db448bf268f11cb257b1c276d3e89f00b5727be86dd0127", size = 2181608, upload-time = "2026-04-20T14:41:28.933Z" }, + { url = "https://files.pythonhosted.org/packages/e8/22/4bd23c3d41f7c185d60808a1de83c76cf5aeabf792f6c636a55c3b1ec7f9/pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:9d2e32edcc143bc01e95300671915d9ca052d4f745aa0a49c48d4803f8a85f2c", size = 2326968, upload-time = "2026-04-20T14:42:03.962Z" }, + { url = "https://files.pythonhosted.org/packages/24/ac/66cd45129e3915e5ade3b292cb3bc7fd537f58f8f8dbdaba6170f7cabb74/pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6e42d83d1c6b87fa56b521479cff237e626a292f3b31b6345c15a99121b454c1", size = 2369842, upload-time = "2026-04-20T14:41:35.52Z" }, + { url = "https://files.pythonhosted.org/packages/a2/51/dd4248abb84113615473aa20d5545b7c4cd73c8644003b5259686f93996c/pydantic_core-2.46.3-cp314-cp314-win32.whl", hash = "sha256:07bc6d2a28c3adb4f7c6ae46aa4f2d2929af127f587ed44057af50bf1ce0f505", size = 1959661, upload-time = "2026-04-20T14:41:00.042Z" }, + { url = "https://files.pythonhosted.org/packages/20/eb/59980e5f1ae54a3b86372bd9f0fa373ea2d402e8cdcd3459334430f91e91/pydantic_core-2.46.3-cp314-cp314-win_amd64.whl", hash = "sha256:8940562319bc621da30714617e6a7eaa6b98c84e8c685bcdc02d7ed5e7c7c44e", size = 2071686, upload-time = "2026-04-20T14:43:16.471Z" }, + { url = "https://files.pythonhosted.org/packages/8c/db/1cf77e5247047dfee34bc01fa9bca134854f528c8eb053e144298893d370/pydantic_core-2.46.3-cp314-cp314-win_arm64.whl", hash = "sha256:5dcbbcf4d22210ced8f837c96db941bdb078f419543472aca5d9a0bb7cddc7df", size = 2026907, upload-time = "2026-04-20T14:43:31.732Z" }, + { url = "https://files.pythonhosted.org/packages/57/c0/b3df9f6a543276eadba0a48487b082ca1f201745329d97dbfa287034a230/pydantic_core-2.46.3-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:d0fe3dce1e836e418f912c1ad91c73357d03e556a4d286f441bf34fed2dbeecf", size = 2095047, upload-time = "2026-04-20T14:42:37.982Z" }, + { url = "https://files.pythonhosted.org/packages/66/57/886a938073b97556c168fd99e1a7305bb363cd30a6d2c76086bf0587b32a/pydantic_core-2.46.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9ce92e58abc722dac1bf835a6798a60b294e48eb0e625ec9fd994b932ac5feee", size = 1934329, upload-time = "2026-04-20T14:43:49.655Z" }, + { url = "https://files.pythonhosted.org/packages/0b/7c/b42eaa5c34b13b07ecb51da21761297a9b8eb43044c864a035999998f328/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a03e6467f0f5ab796a486146d1b887b2dc5e5f9b3288898c1b1c3ad974e53e4a", size = 1974847, upload-time = "2026-04-20T14:42:10.737Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9b/92b42db6543e7de4f99ae977101a2967b63122d4b6cf7773812da2d7d5b5/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2798b6ba041b9d70acfb9071a2ea13c8456dd1e6a5555798e41ba7b0790e329c", size = 2041742, upload-time = "2026-04-20T14:40:44.262Z" }, + { url = "https://files.pythonhosted.org/packages/0f/19/46fbe1efabb5aa2834b43b9454e70f9a83ad9c338c1291e48bdc4fecf167/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9be3e221bdc6d69abf294dcf7aff6af19c31a5cdcc8f0aa3b14be29df4bd03b1", size = 2236235, upload-time = "2026-04-20T14:41:27.307Z" }, + { url = "https://files.pythonhosted.org/packages/77/da/b3f95bc009ad60ec53120f5d16c6faa8cabdbe8a20d83849a1f2b8728148/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f13936129ce841f2a5ddf6f126fea3c43cd128807b5a59588c37cf10178c2e64", size = 2282633, upload-time = "2026-04-20T14:44:33.271Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6e/401336117722e28f32fb8220df676769d28ebdf08f2f4469646d404c43a3/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28b5f2ef03416facccb1c6ef744c69793175fd27e44ef15669201601cf423acb", size = 2109679, upload-time = "2026-04-20T14:44:41.065Z" }, + { url = "https://files.pythonhosted.org/packages/fc/53/b289f9bc8756a32fe718c46f55afaeaf8d489ee18d1a1e7be1db73f42cc4/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:830d1247d77ad23852314f069e9d7ddafeec5f684baf9d7e7065ed46a049c4e6", size = 2108342, upload-time = "2026-04-20T14:42:50.144Z" }, + { url = "https://files.pythonhosted.org/packages/10/5b/8292fc7c1f9111f1b2b7c1b0dcf1179edcd014fc3ea4517499f50b829d71/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0793c90c1a3c74966e7975eaef3ed30ebdff3260a0f815a62a22adc17e4c01c", size = 2157208, upload-time = "2026-04-20T14:42:08.133Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9e/f80044e9ec07580f057a89fc131f78dda7a58751ddf52bbe05eaf31db50f/pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:d2d0aead851b66f5245ec0c4fb2612ef457f8bbafefdf65a2bf9d6bac6140f47", size = 2167237, upload-time = "2026-04-20T14:42:25.412Z" }, + { url = "https://files.pythonhosted.org/packages/f8/84/6781a1b037f3b96be9227edbd1101f6d3946746056231bf4ac48cdff1a8d/pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:2f40e4246676beb31c5ce77c38a55ca4e465c6b38d11ea1bd935420568e0b1ab", size = 2312540, upload-time = "2026-04-20T14:40:40.313Z" }, + { url = "https://files.pythonhosted.org/packages/3e/db/19c0839feeb728e7df03255581f198dfdf1c2aeb1e174a8420b63c5252e5/pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:cf489cf8986c543939aeee17a09c04d6ffb43bfef8ca16fcbcc5cfdcbed24dba", size = 2369556, upload-time = "2026-04-20T14:41:09.427Z" }, + { url = "https://files.pythonhosted.org/packages/e0/15/3228774cb7cd45f5f721ddf1b2242747f4eb834d0c491f0c02d606f09fed/pydantic_core-2.46.3-cp314-cp314t-win32.whl", hash = "sha256:ffe0883b56cfc05798bf994164d2b2ff03efe2d22022a2bb080f3b626176dd56", size = 1949756, upload-time = "2026-04-20T14:41:25.717Z" }, + { url = "https://files.pythonhosted.org/packages/b8/2a/c79cf53fd91e5a87e30d481809f52f9a60dd221e39de66455cf04deaad37/pydantic_core-2.46.3-cp314-cp314t-win_amd64.whl", hash = "sha256:706d9d0ce9cf4593d07270d8e9f53b161f90c57d315aeec4fb4fd7a8b10240d8", size = 2051305, upload-time = "2026-04-20T14:43:18.627Z" }, + { url = "https://files.pythonhosted.org/packages/0b/db/d8182a7f1d9343a032265aae186eb063fe26ca4c40f256b21e8da4498e89/pydantic_core-2.46.3-cp314-cp314t-win_arm64.whl", hash = "sha256:77706aeb41df6a76568434701e0917da10692da28cb69d5fb6919ce5fdb07374", size = 2026310, upload-time = "2026-04-20T14:41:01.778Z" }, + { url = "https://files.pythonhosted.org/packages/66/7f/03dbad45cd3aa9083fbc93c210ae8b005af67e4136a14186950a747c6874/pydantic_core-2.46.3-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:9715525891ed524a0a1eb6d053c74d4d4ad5017677fb00af0b7c2644a31bae46", size = 2105683, upload-time = "2026-04-20T14:42:19.779Z" }, + { url = "https://files.pythonhosted.org/packages/26/22/4dc186ac8ea6b257e9855031f51b62a9637beac4d68ac06bee02f046f836/pydantic_core-2.46.3-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:9d2f400712a99a013aff420ef1eb9be077f8189a36c1e3ef87660b4e1088a874", size = 1940052, upload-time = "2026-04-20T14:43:59.274Z" }, + { url = "https://files.pythonhosted.org/packages/0d/ca/d376391a5aff1f2e8188960d7873543608130a870961c2b6b5236627c116/pydantic_core-2.46.3-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd2aab0e2e9dc2daf36bd2686c982535d5e7b1d930a1344a7bb6e82baab42a76", size = 1988172, upload-time = "2026-04-20T14:41:17.469Z" }, + { url = "https://files.pythonhosted.org/packages/0e/6b/523b9f85c23788755d6ab949329de692a2e3a584bc6beb67fef5e035aa9d/pydantic_core-2.46.3-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e9d76736da5f362fabfeea6a69b13b7f2be405c6d6966f06b2f6bfff7e64531", size = 2128596, upload-time = "2026-04-20T14:40:41.707Z" }, + { url = "https://files.pythonhosted.org/packages/34/42/f426db557e8ab2791bc7562052299944a118655496fbff99914e564c0a94/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:b12dd51f1187c2eb489af8e20f880362db98e954b54ab792fa5d92e8bcc6b803", size = 2091877, upload-time = "2026-04-20T14:43:27.091Z" }, + { url = "https://files.pythonhosted.org/packages/5c/4f/86a832a9d14df58e663bfdf4627dc00d3317c2bd583c4fb23390b0f04b8e/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:f00a0961b125f1a47af7bcc17f00782e12f4cd056f83416006b30111d941dfa3", size = 1932428, upload-time = "2026-04-20T14:40:45.781Z" }, + { url = "https://files.pythonhosted.org/packages/11/1a/fe857968954d93fb78e0d4b6df5c988c74c4aaa67181c60be7cfe327c0ca/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57697d7c056aca4bbb680200f96563e841a6386ac1129370a0102592f4dddff5", size = 1997550, upload-time = "2026-04-20T14:44:02.425Z" }, + { url = "https://files.pythonhosted.org/packages/17/eb/9d89ad2d9b0ba8cd65393d434471621b98912abb10fbe1df08e480ba57b5/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd35aa21299def8db7ef4fe5c4ff862941a9a158ca7b63d61e66fe67d30416b4", size = 2137657, upload-time = "2026-04-20T14:42:45.149Z" }, + { url = "https://files.pythonhosted.org/packages/1f/da/99d40830684f81dec901cac521b5b91c095394cc1084b9433393cde1c2df/pydantic_core-2.46.3-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:13afdd885f3d71280cf286b13b310ee0f7ccfefd1dbbb661514a474b726e2f25", size = 2107973, upload-time = "2026-04-20T14:42:06.175Z" }, + { url = "https://files.pythonhosted.org/packages/99/a5/87024121818d75bbb2a98ddbaf638e40e7a18b5e0f5492c9ca4b1b316107/pydantic_core-2.46.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f91c0aff3e3ee0928edd1232c57f643a7a003e6edf1860bc3afcdc749cb513f3", size = 1947191, upload-time = "2026-04-20T14:43:14.319Z" }, + { url = "https://files.pythonhosted.org/packages/60/62/0c1acfe10945b83a6a59d19fbaa92f48825381509e5701b855c08f13db76/pydantic_core-2.46.3-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6529d1d128321a58d30afcc97b49e98836542f68dd41b33c2e972bb9e5290536", size = 2123791, upload-time = "2026-04-20T14:43:22.766Z" }, + { url = "https://files.pythonhosted.org/packages/75/3e/3b2393b4c8f44285561dc30b00cf307a56a2eff7c483a824db3b8221ca51/pydantic_core-2.46.3-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:975c267cff4f7e7272eacbe50f6cc03ca9a3da4c4fbd66fffd89c94c1e311aa1", size = 2153197, upload-time = "2026-04-20T14:44:27.932Z" }, + { url = "https://files.pythonhosted.org/packages/ba/75/5af02fb35505051eee727c061f2881c555ab4f8ddb2d42da715a42c9731b/pydantic_core-2.46.3-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2b8e4f2bbdf71415c544b4b1138b8060db7b6611bc927e8064c769f64bed651c", size = 2181073, upload-time = "2026-04-20T14:43:20.729Z" }, + { url = "https://files.pythonhosted.org/packages/10/92/7e0e1bd9ca3c68305db037560ca2876f89b2647deb2f8b6319005de37505/pydantic_core-2.46.3-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e61ea8e9fff9606d09178f577ff8ccdd7206ff73d6552bcec18e1033c4254b85", size = 2315886, upload-time = "2026-04-20T14:44:04.826Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d8/101655f27eaf3e44558ead736b2795d12500598beed4683f279396fa186e/pydantic_core-2.46.3-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b504bda01bafc69b6d3c7a0c7f039dcf60f47fab70e06fe23f57b5c75bdc82b8", size = 2360528, upload-time = "2026-04-20T14:40:47.431Z" }, + { url = "https://files.pythonhosted.org/packages/07/0f/1c34a74c8d07136f0d729ffe5e1fdab04fbdaa7684f61a92f92511a84a15/pydantic_core-2.46.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:b00b76f7142fc60c762ce579bd29c8fa44aaa56592dd3c54fab3928d0d4ca6ff", size = 2184144, upload-time = "2026-04-20T14:42:57Z" }, ] [[package]] @@ -1039,16 +1172,16 @@ wheels = [ [[package]] name = "pygments" -version = "2.19.2" +version = "2.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, + { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" }, ] [[package]] name = "pytest" -version = "8.4.2" +version = "9.0.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -1059,36 +1192,23 @@ dependencies = [ { name = "pygments" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/0d/549bd94f1a0a402dc8cf64563a117c0f3765662e2e668477624baeec44d5/pytest-9.0.3.tar.gz", hash = "sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c", size = 1572165, upload-time = "2026-04-07T17:16:18.027Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, + { url = "https://files.pythonhosted.org/packages/d4/24/a372aaf5c9b7208e7112038812994107bc65a84cd00e0354a88c2c77a617/pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9", size = 375249, upload-time = "2026-04-07T17:16:16.13Z" }, ] [[package]] name = "pytest-cov" -version = "7.0.0" +version = "7.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coverage", extra = ["toml"] }, { name = "pluggy" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/51/a849f96e117386044471c8ec2bd6cfebacda285da9525c9106aeb28da671/pytest_cov-7.1.0.tar.gz", hash = "sha256:30674f2b5f6351aa09702a9c8c364f6a01c27aae0c1366ae8016160d1efc56b2", size = 55592, upload-time = "2026-03-21T20:11:16.284Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, -] - -[[package]] -name = "pytest-subtests" -version = "0.14.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "pytest" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/59/30/6ec8dfc678ddfd1c294212bbd7088c52d3f7fbf3f05e6d8a440c13b9741a/pytest_subtests-0.14.2.tar.gz", hash = "sha256:7154a8665fd528ee70a76d00216a44d139dc3c9c83521a0f779f7b0ad4f800de", size = 18083, upload-time = "2025-06-13T10:50:01.636Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/47/d4/9bf12e59fb882b0cf4f993871e1adbee094802224c429b00861acee1a169/pytest_subtests-0.14.2-py3-none-any.whl", hash = "sha256:8da0787c994ab372a13a0ad7d390533ad2e4385cac167b3ac501258c885d0b66", size = 9115, upload-time = "2025-06-13T10:50:00.543Z" }, + { url = "https://files.pythonhosted.org/packages/9d/7a/d968e294073affff457b041c2be9868a40c1c71f4a35fcc1e45e5493067b/pytest_cov-7.1.0-py3-none-any.whl", hash = "sha256:a0461110b7865f9a271aa1b51e516c9a95de9d696734a2f71e3e78f46e1d4678", size = 22876, upload-time = "2026-03-21T20:11:14.438Z" }, ] [[package]] @@ -1157,50 +1277,40 @@ wheels = [ [[package]] name = "rich" -version = "14.1.0" +version = "15.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/8f/0722ca900cc807c13a6a0c696dacf35430f72e0ec571c4275d2371fca3e9/rich-15.0.0.tar.gz", hash = "sha256:edd07a4824c6b40189fb7ac9bc4c52536e9780fbbfbddf6f1e2502c31b068c36", size = 230680, upload-time = "2026-04-12T08:24:00.75Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, + { url = "https://files.pythonhosted.org/packages/82/3b/64d4899d73f91ba49a8c18a8ff3f0ea8f1c1d75481760df8c68ef5235bf5/rich-15.0.0-py3-none-any.whl", hash = "sha256:33bd4ef74232fb73fe9279a257718407f169c09b78a87ad3d296f548e27de0bb", size = 310654, upload-time = "2026-04-12T08:24:02.83Z" }, ] [[package]] name = "ruff" -version = "0.14.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/41/b9/9bd84453ed6dd04688de9b3f3a4146a1698e8faae2ceeccce4e14c67ae17/ruff-0.14.0.tar.gz", hash = "sha256:62ec8969b7510f77945df916de15da55311fade8d6050995ff7f680afe582c57", size = 5452071, upload-time = "2025-10-07T18:21:55.763Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/4e/79d463a5f80654e93fa653ebfb98e0becc3f0e7cf6219c9ddedf1e197072/ruff-0.14.0-py3-none-linux_armv6l.whl", hash = "sha256:58e15bffa7054299becf4bab8a1187062c6f8cafbe9f6e39e0d5aface455d6b3", size = 12494532, upload-time = "2025-10-07T18:21:00.373Z" }, - { url = "https://files.pythonhosted.org/packages/ee/40/e2392f445ed8e02aa6105d49db4bfff01957379064c30f4811c3bf38aece/ruff-0.14.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:838d1b065f4df676b7c9957992f2304e41ead7a50a568185efd404297d5701e8", size = 13160768, upload-time = "2025-10-07T18:21:04.73Z" }, - { url = "https://files.pythonhosted.org/packages/75/da/2a656ea7c6b9bd14c7209918268dd40e1e6cea65f4bb9880eaaa43b055cd/ruff-0.14.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:703799d059ba50f745605b04638fa7e9682cc3da084b2092feee63500ff3d9b8", size = 12363376, upload-time = "2025-10-07T18:21:07.833Z" }, - { url = "https://files.pythonhosted.org/packages/42/e2/1ffef5a1875add82416ff388fcb7ea8b22a53be67a638487937aea81af27/ruff-0.14.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ba9a8925e90f861502f7d974cc60e18ca29c72bb0ee8bfeabb6ade35a3abde7", size = 12608055, upload-time = "2025-10-07T18:21:10.72Z" }, - { url = "https://files.pythonhosted.org/packages/4a/32/986725199d7cee510d9f1dfdf95bf1efc5fa9dd714d0d85c1fb1f6be3bc3/ruff-0.14.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e41f785498bd200ffc276eb9e1570c019c1d907b07cfb081092c8ad51975bbe7", size = 12318544, upload-time = "2025-10-07T18:21:13.741Z" }, - { url = "https://files.pythonhosted.org/packages/9a/ed/4969cefd53315164c94eaf4da7cfba1f267dc275b0abdd593d11c90829a3/ruff-0.14.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30a58c087aef4584c193aebf2700f0fbcfc1e77b89c7385e3139956fa90434e2", size = 14001280, upload-time = "2025-10-07T18:21:16.411Z" }, - { url = "https://files.pythonhosted.org/packages/ab/ad/96c1fc9f8854c37681c9613d825925c7f24ca1acfc62a4eb3896b50bacd2/ruff-0.14.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f8d07350bc7af0a5ce8812b7d5c1a7293cf02476752f23fdfc500d24b79b783c", size = 15027286, upload-time = "2025-10-07T18:21:19.577Z" }, - { url = "https://files.pythonhosted.org/packages/b3/00/1426978f97df4fe331074baf69615f579dc4e7c37bb4c6f57c2aad80c87f/ruff-0.14.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eec3bbbf3a7d5482b5c1f42d5fc972774d71d107d447919fca620b0be3e3b75e", size = 14451506, upload-time = "2025-10-07T18:21:22.779Z" }, - { url = "https://files.pythonhosted.org/packages/58/d5/9c1cea6e493c0cf0647674cca26b579ea9d2a213b74b5c195fbeb9678e15/ruff-0.14.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16b68e183a0e28e5c176d51004aaa40559e8f90065a10a559176713fcf435206", size = 13437384, upload-time = "2025-10-07T18:21:25.758Z" }, - { url = "https://files.pythonhosted.org/packages/29/b4/4cd6a4331e999fc05d9d77729c95503f99eae3ba1160469f2b64866964e3/ruff-0.14.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb732d17db2e945cfcbbc52af0143eda1da36ca8ae25083dd4f66f1542fdf82e", size = 13447976, upload-time = "2025-10-07T18:21:28.83Z" }, - { url = "https://files.pythonhosted.org/packages/3b/c0/ac42f546d07e4f49f62332576cb845d45c67cf5610d1851254e341d563b6/ruff-0.14.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:c958f66ab884b7873e72df38dcabee03d556a8f2ee1b8538ee1c2bbd619883dd", size = 13682850, upload-time = "2025-10-07T18:21:31.842Z" }, - { url = "https://files.pythonhosted.org/packages/5f/c4/4b0c9bcadd45b4c29fe1af9c5d1dc0ca87b4021665dfbe1c4688d407aa20/ruff-0.14.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7eb0499a2e01f6e0c285afc5bac43ab380cbfc17cd43a2e1dd10ec97d6f2c42d", size = 12449825, upload-time = "2025-10-07T18:21:35.074Z" }, - { url = "https://files.pythonhosted.org/packages/4b/a8/e2e76288e6c16540fa820d148d83e55f15e994d852485f221b9524514730/ruff-0.14.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c63b2d99fafa05efca0ab198fd48fa6030d57e4423df3f18e03aa62518c565f", size = 12272599, upload-time = "2025-10-07T18:21:38.08Z" }, - { url = "https://files.pythonhosted.org/packages/18/14/e2815d8eff847391af632b22422b8207704222ff575dec8d044f9ab779b2/ruff-0.14.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:668fce701b7a222f3f5327f86909db2bbe99c30877c8001ff934c5413812ac02", size = 13193828, upload-time = "2025-10-07T18:21:41.216Z" }, - { url = "https://files.pythonhosted.org/packages/44/c6/61ccc2987cf0aecc588ff8f3212dea64840770e60d78f5606cd7dc34de32/ruff-0.14.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a86bf575e05cb68dcb34e4c7dfe1064d44d3f0c04bbc0491949092192b515296", size = 13628617, upload-time = "2025-10-07T18:21:44.04Z" }, - { url = "https://files.pythonhosted.org/packages/73/e6/03b882225a1b0627e75339b420883dc3c90707a8917d2284abef7a58d317/ruff-0.14.0-py3-none-win32.whl", hash = "sha256:7450a243d7125d1c032cb4b93d9625dea46c8c42b4f06c6b709baac168e10543", size = 12367872, upload-time = "2025-10-07T18:21:46.67Z" }, - { url = "https://files.pythonhosted.org/packages/41/77/56cf9cf01ea0bfcc662de72540812e5ba8e9563f33ef3d37ab2174892c47/ruff-0.14.0-py3-none-win_amd64.whl", hash = "sha256:ea95da28cd874c4d9c922b39381cbd69cb7e7b49c21b8152b014bd4f52acddc2", size = 13464628, upload-time = "2025-10-07T18:21:50.318Z" }, - { url = "https://files.pythonhosted.org/packages/c6/2a/65880dfd0e13f7f13a775998f34703674a4554906167dce02daf7865b954/ruff-0.14.0-py3-none-win_arm64.whl", hash = "sha256:f42c9495f5c13ff841b1da4cb3c2a42075409592825dada7c5885c2c844ac730", size = 12565142, upload-time = "2025-10-07T18:21:53.577Z" }, -] - -[[package]] -name = "semver" -version = "3.0.4" +version = "0.15.12" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/d1/d3159231aec234a59dd7d601e9dd9fe96f3afff15efd33c1070019b26132/semver-3.0.4.tar.gz", hash = "sha256:afc7d8c584a5ed0a11033af086e8af226a9c0b206f313e0301f8dd7b6b589602", size = 269730, upload-time = "2025-01-24T13:19:27.617Z" } +sdist = { url = "https://files.pythonhosted.org/packages/99/43/3291f1cc9106f4c63bdce7a8d0df5047fe8422a75b091c16b5e9355e0b11/ruff-0.15.12.tar.gz", hash = "sha256:ecea26adb26b4232c0c2ca19ccbc0083a68344180bba2a600605538ce51a40a6", size = 4643852, upload-time = "2026-04-24T18:17:14.305Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a6/24/4d91e05817e92e3a61c8a21e08fd0f390f5301f1c448b137c57c4bc6e543/semver-3.0.4-py3-none-any.whl", hash = "sha256:9c824d87ba7f7ab4a1890799cec8596f15c1241cb473404ea1cb0c55e4b04746", size = 17912, upload-time = "2025-01-24T13:19:24.949Z" }, + { url = "https://files.pythonhosted.org/packages/c3/6e/e78ffb61d4686f3d96ba3df2c801161843746dcbcbb17a1e927d4829312b/ruff-0.15.12-py3-none-linux_armv6l.whl", hash = "sha256:f86f176e188e94d6bdbc09f09bfd9dc729059ad93d0e7390b5a73efe19f8861c", size = 10640713, upload-time = "2026-04-24T18:17:22.841Z" }, + { url = "https://files.pythonhosted.org/packages/ae/08/a317bc231fb9e7b93e4ef3089501e51922ff88d6936ce5cf870c4fe55419/ruff-0.15.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e3bcd123364c3770b8e1b7baaf343cc99a35f197c5c6e8af79015c666c423a6c", size = 11069267, upload-time = "2026-04-24T18:17:30.105Z" }, + { url = "https://files.pythonhosted.org/packages/aa/a4/f828e9718d3dce1f5f11c39c4f65afd32783c8b2aebb2e3d259e492c47bd/ruff-0.15.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fe87510d000220aa1ed530d4448a7c696a0cae1213e5ec30e5874287b66557b5", size = 10397182, upload-time = "2026-04-24T18:17:07.177Z" }, + { url = "https://files.pythonhosted.org/packages/71/e0/3310fc6d1b5e1fdea22bf3b1b807c7e187b581021b0d7d4514cccdb5fb71/ruff-0.15.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84a1630093121375a3e2a95b4a6dc7b59e2b4ee76216e32d81aae550a832d002", size = 10758012, upload-time = "2026-04-24T18:16:55.759Z" }, + { url = "https://files.pythonhosted.org/packages/11/c1/a606911aee04c324ddaa883ae418f3569792fd3c4a10c50e0dd0a2311e1e/ruff-0.15.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fb129f40f114f089ebe0ca56c0d251cf2061b17651d464bb6478dc01e69f11f5", size = 10447479, upload-time = "2026-04-24T18:16:51.677Z" }, + { url = "https://files.pythonhosted.org/packages/9d/68/4201e8444f0894f21ab4aeeaee68aa4f10b51613514a20d80bd628d57e88/ruff-0.15.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0c862b172d695db7598426b8af465e7e9ac00a3ea2a3630ee67eb82e366aaa6", size = 11234040, upload-time = "2026-04-24T18:17:16.529Z" }, + { url = "https://files.pythonhosted.org/packages/34/ff/8a6d6cf4ccc23fd67060874e832c18919d1557a0611ebef03fdb01fff11e/ruff-0.15.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2849ea9f3484c3aca43a82f484210370319e7170df4dfe4843395ddf6c57bc33", size = 12087377, upload-time = "2026-04-24T18:17:04.944Z" }, + { url = "https://files.pythonhosted.org/packages/85/f6/c669cf73f5152f623d34e69866a46d5e6185816b19fcd5b6dd8a2d299922/ruff-0.15.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e77c7e51c07fe396826d5969a5b846d9cd4c402535835fb6e21ce8b28fef847", size = 11367784, upload-time = "2026-04-24T18:17:25.409Z" }, + { url = "https://files.pythonhosted.org/packages/e8/39/c61d193b8a1daaa8977f7dea9e8d8ba866e02ea7b65d32f6861693aa4c12/ruff-0.15.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b2f4f2f3b1026b5fb449b467d9264bf22067b600f7b6f41fc5958909f449d0", size = 11344088, upload-time = "2026-04-24T18:17:12.258Z" }, + { url = "https://files.pythonhosted.org/packages/c2/8d/49afab3645e31e12c590acb6d3b5b69d7aab5b81926dbaf7461f9441f37a/ruff-0.15.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9ba3b8f1afd7e2e43d8943e55f249e13f9682fde09711644a6e7290eb4f3e339", size = 11271770, upload-time = "2026-04-24T18:17:02.457Z" }, + { url = "https://files.pythonhosted.org/packages/46/06/33f41fe94403e2b755481cdfb9b7ef3e4e0ed031c4581124658d935d52b4/ruff-0.15.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e852ba9fdc890655e1d78f2df1499efbe0e54126bd405362154a75e2bde159c5", size = 10719355, upload-time = "2026-04-24T18:17:27.648Z" }, + { url = "https://files.pythonhosted.org/packages/0d/59/18aa4e014debbf559670e4048e39260a85c7fcee84acfd761ac01e7b8d35/ruff-0.15.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dd8aed930da53780d22fc70bdf84452c843cf64f8cb4eb38984319c24c5cd5fd", size = 10462758, upload-time = "2026-04-24T18:17:32.347Z" }, + { url = "https://files.pythonhosted.org/packages/25/e7/cc9f16fd0f3b5fddcbd7ec3d6ae30c8f3fde1047f32a4093a98d633c6570/ruff-0.15.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:01da3988d225628b709493d7dc67c3b9b12c0210016b08690ef9bd27970b262b", size = 10953498, upload-time = "2026-04-24T18:17:20.674Z" }, + { url = "https://files.pythonhosted.org/packages/72/7a/a9ba7f98c7a575978698f4230c5e8cc54bbc761af34f560818f933dafa0c/ruff-0.15.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:9cae0f92bd5700d1213188b31cd3bdd2b315361296d10b96b8e2337d3d11f53e", size = 11447765, upload-time = "2026-04-24T18:17:09.755Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f9/0ae446942c846b8266059ad8a30702a35afae55f5cdc54c5adf8d7afdc27/ruff-0.15.12-py3-none-win32.whl", hash = "sha256:d0185894e038d7043ba8fd6aee7499ece6462dc0ea9f1e260c7451807c714c20", size = 10657277, upload-time = "2026-04-24T18:17:18.591Z" }, + { url = "https://files.pythonhosted.org/packages/33/f1/9614e03e1cdcbf9437570b5400ced8a720b5db22b28d8e0f1bda429f660d/ruff-0.15.12-py3-none-win_amd64.whl", hash = "sha256:c87a162d61ab3adca47c03f7f717c68672edec7d1b5499e652331780fe74950d", size = 11837758, upload-time = "2026-04-24T18:17:00.113Z" }, + { url = "https://files.pythonhosted.org/packages/c0/98/6beb4b351e472e5f4c4613f7c35a5290b8be2497e183825310c4c3a3984b/ruff-0.15.12-py3-none-win_arm64.whl", hash = "sha256:a538f7a82d061cee7be55542aca1d86d1393d55d81d4fcc314370f4340930d4f", size = 11120821, upload-time = "2026-04-24T18:16:57.979Z" }, ] [[package]] @@ -1209,7 +1319,7 @@ version = "2.1.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/4d/bc/0989043118a27cccb4e906a46b7565ce36ca7b57f5a18b78f4f1b0f72d9d/shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9", size = 315489, upload-time = "2025-09-24T13:51:41.432Z" } wheels = [ @@ -1282,73 +1392,78 @@ wheels = [ [[package]] name = "tomli" -version = "2.3.0" +version = "2.4.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } +sdist = { url = "https://files.pythonhosted.org/packages/22/de/48c59722572767841493b26183a0d1cc411d54fd759c5607c4590b6563a6/tomli-2.4.1.tar.gz", hash = "sha256:7c7e1a961a0b2f2472c1ac5b69affa0ae1132c39adcb67aba98568702b9cc23f", size = 17543, upload-time = "2026-03-25T20:22:03.828Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, - { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, - { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, - { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, - { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, - { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, - { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, - { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, - { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, - { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, - { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, - { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, - { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, - { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, - { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, - { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, - { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, - { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, - { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, - { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, - { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, - { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, - { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, - { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, - { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" }, - { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" }, - { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" }, - { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" }, - { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" }, - { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" }, - { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" }, - { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" }, - { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" }, - { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" }, - { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" }, - { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" }, - { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" }, - { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" }, - { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" }, - { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" }, - { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, + { url = "https://files.pythonhosted.org/packages/f4/11/db3d5885d8528263d8adc260bb2d28ebf1270b96e98f0e0268d32b8d9900/tomli-2.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f8f0fc26ec2cc2b965b7a3b87cd19c5c6b8c5e5f436b984e85f486d652285c30", size = 154704, upload-time = "2026-03-25T20:21:10.473Z" }, + { url = "https://files.pythonhosted.org/packages/6d/f7/675db52c7e46064a9aa928885a9b20f4124ecb9bc2e1ce74c9106648d202/tomli-2.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ab97e64ccda8756376892c53a72bd1f964e519c77236368527f758fbc36a53a", size = 149454, upload-time = "2026-03-25T20:21:12.036Z" }, + { url = "https://files.pythonhosted.org/packages/61/71/81c50943cf953efa35bce7646caab3cf457a7d8c030b27cfb40d7235f9ee/tomli-2.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96481a5786729fd470164b47cdb3e0e58062a496f455ee41b4403be77cb5a076", size = 237561, upload-time = "2026-03-25T20:21:13.098Z" }, + { url = "https://files.pythonhosted.org/packages/48/c1/f41d9cb618acccca7df82aaf682f9b49013c9397212cb9f53219e3abac37/tomli-2.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a881ab208c0baf688221f8cecc5401bd291d67e38a1ac884d6736cbcd8247e9", size = 243824, upload-time = "2026-03-25T20:21:14.569Z" }, + { url = "https://files.pythonhosted.org/packages/22/e4/5a816ecdd1f8ca51fb756ef684b90f2780afc52fc67f987e3c61d800a46d/tomli-2.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47149d5bd38761ac8be13a84864bf0b7b70bc051806bc3669ab1cbc56216b23c", size = 242227, upload-time = "2026-03-25T20:21:15.712Z" }, + { url = "https://files.pythonhosted.org/packages/6b/49/2b2a0ef529aa6eec245d25f0c703e020a73955ad7edf73e7f54ddc608aa5/tomli-2.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ec9bfaf3ad2df51ace80688143a6a4ebc09a248f6ff781a9945e51937008fcbc", size = 247859, upload-time = "2026-03-25T20:21:17.001Z" }, + { url = "https://files.pythonhosted.org/packages/83/bd/6c1a630eaca337e1e78c5903104f831bda934c426f9231429396ce3c3467/tomli-2.4.1-cp311-cp311-win32.whl", hash = "sha256:ff2983983d34813c1aeb0fa89091e76c3a22889ee83ab27c5eeb45100560c049", size = 97204, upload-time = "2026-03-25T20:21:18.079Z" }, + { url = "https://files.pythonhosted.org/packages/42/59/71461df1a885647e10b6bb7802d0b8e66480c61f3f43079e0dcd315b3954/tomli-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:5ee18d9ebdb417e384b58fe414e8d6af9f4e7a0ae761519fb50f721de398dd4e", size = 108084, upload-time = "2026-03-25T20:21:18.978Z" }, + { url = "https://files.pythonhosted.org/packages/b8/83/dceca96142499c069475b790e7913b1044c1a4337e700751f48ed723f883/tomli-2.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:c2541745709bad0264b7d4705ad453b76ccd191e64aa6f0fc66b69a293a45ece", size = 95285, upload-time = "2026-03-25T20:21:20.309Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ba/42f134a3fe2b370f555f44b1d72feebb94debcab01676bf918d0cb70e9aa/tomli-2.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c742f741d58a28940ce01d58f0ab2ea3ced8b12402f162f4d534dfe18ba1cd6a", size = 155924, upload-time = "2026-03-25T20:21:21.626Z" }, + { url = "https://files.pythonhosted.org/packages/dc/c7/62d7a17c26487ade21c5422b646110f2162f1fcc95980ef7f63e73c68f14/tomli-2.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7f86fd587c4ed9dd76f318225e7d9b29cfc5a9d43de44e5754db8d1128487085", size = 150018, upload-time = "2026-03-25T20:21:23.002Z" }, + { url = "https://files.pythonhosted.org/packages/5c/05/79d13d7c15f13bdef410bdd49a6485b1c37d28968314eabee452c22a7fda/tomli-2.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ff18e6a727ee0ab0388507b89d1bc6a22b138d1e2fa56d1ad494586d61d2eae9", size = 244948, upload-time = "2026-03-25T20:21:24.04Z" }, + { url = "https://files.pythonhosted.org/packages/10/90/d62ce007a1c80d0b2c93e02cab211224756240884751b94ca72df8a875ca/tomli-2.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:136443dbd7e1dee43c68ac2694fde36b2849865fa258d39bf822c10e8068eac5", size = 253341, upload-time = "2026-03-25T20:21:25.177Z" }, + { url = "https://files.pythonhosted.org/packages/1a/7e/caf6496d60152ad4ed09282c1885cca4eea150bfd007da84aea07bcc0a3e/tomli-2.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5e262d41726bc187e69af7825504c933b6794dc3fbd5945e41a79bb14c31f585", size = 248159, upload-time = "2026-03-25T20:21:26.364Z" }, + { url = "https://files.pythonhosted.org/packages/99/e7/c6f69c3120de34bbd882c6fba7975f3d7a746e9218e56ab46a1bc4b42552/tomli-2.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5cb41aa38891e073ee49d55fbc7839cfdb2bc0e600add13874d048c94aadddd1", size = 253290, upload-time = "2026-03-25T20:21:27.46Z" }, + { url = "https://files.pythonhosted.org/packages/d6/2f/4a3c322f22c5c66c4b836ec58211641a4067364f5dcdd7b974b4c5da300c/tomli-2.4.1-cp312-cp312-win32.whl", hash = "sha256:da25dc3563bff5965356133435b757a795a17b17d01dbc0f42fb32447ddfd917", size = 98141, upload-time = "2026-03-25T20:21:28.492Z" }, + { url = "https://files.pythonhosted.org/packages/24/22/4daacd05391b92c55759d55eaee21e1dfaea86ce5c571f10083360adf534/tomli-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:52c8ef851d9a240f11a88c003eacb03c31fc1c9c4ec64a99a0f922b93874fda9", size = 108847, upload-time = "2026-03-25T20:21:29.386Z" }, + { url = "https://files.pythonhosted.org/packages/68/fd/70e768887666ddd9e9f5d85129e84910f2db2796f9096aa02b721a53098d/tomli-2.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:f758f1b9299d059cc3f6546ae2af89670cb1c4d48ea29c3cacc4fe7de3058257", size = 95088, upload-time = "2026-03-25T20:21:30.677Z" }, + { url = "https://files.pythonhosted.org/packages/07/06/b823a7e818c756d9a7123ba2cda7d07bc2dd32835648d1a7b7b7a05d848d/tomli-2.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:36d2bd2ad5fb9eaddba5226aa02c8ec3fa4f192631e347b3ed28186d43be6b54", size = 155866, upload-time = "2026-03-25T20:21:31.65Z" }, + { url = "https://files.pythonhosted.org/packages/14/6f/12645cf7f08e1a20c7eb8c297c6f11d31c1b50f316a7e7e1e1de6e2e7b7e/tomli-2.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:eb0dc4e38e6a1fd579e5d50369aa2e10acfc9cace504579b2faabb478e76941a", size = 149887, upload-time = "2026-03-25T20:21:33.028Z" }, + { url = "https://files.pythonhosted.org/packages/5c/e0/90637574e5e7212c09099c67ad349b04ec4d6020324539297b634a0192b0/tomli-2.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7f2c7f2b9ca6bdeef8f0fa897f8e05085923eb091721675170254cbc5b02897", size = 243704, upload-time = "2026-03-25T20:21:34.51Z" }, + { url = "https://files.pythonhosted.org/packages/10/8f/d3ddb16c5a4befdf31a23307f72828686ab2096f068eaf56631e136c1fdd/tomli-2.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f3c6818a1a86dd6dca7ddcaaf76947d5ba31aecc28cb1b67009a5877c9a64f3f", size = 251628, upload-time = "2026-03-25T20:21:36.012Z" }, + { url = "https://files.pythonhosted.org/packages/e3/f1/dbeeb9116715abee2485bf0a12d07a8f31af94d71608c171c45f64c0469d/tomli-2.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d312ef37c91508b0ab2cee7da26ec0b3ed2f03ce12bd87a588d771ae15dcf82d", size = 247180, upload-time = "2026-03-25T20:21:37.136Z" }, + { url = "https://files.pythonhosted.org/packages/d3/74/16336ffd19ed4da28a70959f92f506233bd7cfc2332b20bdb01591e8b1d1/tomli-2.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:51529d40e3ca50046d7606fa99ce3956a617f9b36380da3b7f0dd3dd28e68cb5", size = 251674, upload-time = "2026-03-25T20:21:38.298Z" }, + { url = "https://files.pythonhosted.org/packages/16/f9/229fa3434c590ddf6c0aa9af64d3af4b752540686cace29e6281e3458469/tomli-2.4.1-cp313-cp313-win32.whl", hash = "sha256:2190f2e9dd7508d2a90ded5ed369255980a1bcdd58e52f7fe24b8162bf9fedbd", size = 97976, upload-time = "2026-03-25T20:21:39.316Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1e/71dfd96bcc1c775420cb8befe7a9d35f2e5b1309798f009dca17b7708c1e/tomli-2.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:8d65a2fbf9d2f8352685bc1364177ee3923d6baf5e7f43ea4959d7d8bc326a36", size = 108755, upload-time = "2026-03-25T20:21:40.248Z" }, + { url = "https://files.pythonhosted.org/packages/83/7a/d34f422a021d62420b78f5c538e5b102f62bea616d1d75a13f0a88acb04a/tomli-2.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:4b605484e43cdc43f0954ddae319fb75f04cc10dd80d830540060ee7cd0243cd", size = 95265, upload-time = "2026-03-25T20:21:41.219Z" }, + { url = "https://files.pythonhosted.org/packages/3c/fb/9a5c8d27dbab540869f7c1f8eb0abb3244189ce780ba9cd73f3770662072/tomli-2.4.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fd0409a3653af6c147209d267a0e4243f0ae46b011aa978b1080359fddc9b6cf", size = 155726, upload-time = "2026-03-25T20:21:42.23Z" }, + { url = "https://files.pythonhosted.org/packages/62/05/d2f816630cc771ad836af54f5001f47a6f611d2d39535364f148b6a92d6b/tomli-2.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a120733b01c45e9a0c34aeef92bf0cf1d56cfe81ed9d47d562f9ed591a9828ac", size = 149859, upload-time = "2026-03-25T20:21:43.386Z" }, + { url = "https://files.pythonhosted.org/packages/ce/48/66341bdb858ad9bd0ceab5a86f90eddab127cf8b046418009f2125630ecb/tomli-2.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:559db847dc486944896521f68d8190be1c9e719fced785720d2216fe7022b662", size = 244713, upload-time = "2026-03-25T20:21:44.474Z" }, + { url = "https://files.pythonhosted.org/packages/df/6d/c5fad00d82b3c7a3ab6189bd4b10e60466f22cfe8a08a9394185c8a8111c/tomli-2.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01f520d4f53ef97964a240a035ec2a869fe1a37dde002b57ebc4417a27ccd853", size = 252084, upload-time = "2026-03-25T20:21:45.62Z" }, + { url = "https://files.pythonhosted.org/packages/00/71/3a69e86f3eafe8c7a59d008d245888051005bd657760e96d5fbfb0b740c2/tomli-2.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7f94b27a62cfad8496c8d2513e1a222dd446f095fca8987fceef261225538a15", size = 247973, upload-time = "2026-03-25T20:21:46.937Z" }, + { url = "https://files.pythonhosted.org/packages/67/50/361e986652847fec4bd5e4a0208752fbe64689c603c7ae5ea7cb16b1c0ca/tomli-2.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ede3e6487c5ef5d28634ba3f31f989030ad6af71edfb0055cbbd14189ff240ba", size = 256223, upload-time = "2026-03-25T20:21:48.467Z" }, + { url = "https://files.pythonhosted.org/packages/8c/9a/b4173689a9203472e5467217e0154b00e260621caa227b6fa01feab16998/tomli-2.4.1-cp314-cp314-win32.whl", hash = "sha256:3d48a93ee1c9b79c04bb38772ee1b64dcf18ff43085896ea460ca8dec96f35f6", size = 98973, upload-time = "2026-03-25T20:21:49.526Z" }, + { url = "https://files.pythonhosted.org/packages/14/58/640ac93bf230cd27d002462c9af0d837779f8773bc03dee06b5835208214/tomli-2.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:88dceee75c2c63af144e456745e10101eb67361050196b0b6af5d717254dddf7", size = 109082, upload-time = "2026-03-25T20:21:50.506Z" }, + { url = "https://files.pythonhosted.org/packages/d5/2f/702d5e05b227401c1068f0d386d79a589bb12bf64c3d2c72ce0631e3bc49/tomli-2.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:b8c198f8c1805dc42708689ed6864951fd2494f924149d3e4bce7710f8eb5232", size = 96490, upload-time = "2026-03-25T20:21:51.474Z" }, + { url = "https://files.pythonhosted.org/packages/45/4b/b877b05c8ba62927d9865dd980e34a755de541eb65fffba52b4cc495d4d2/tomli-2.4.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:d4d8fe59808a54658fcc0160ecfb1b30f9089906c50b23bcb4c69eddc19ec2b4", size = 164263, upload-time = "2026-03-25T20:21:52.543Z" }, + { url = "https://files.pythonhosted.org/packages/24/79/6ab420d37a270b89f7195dec5448f79400d9e9c1826df982f3f8e97b24fd/tomli-2.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7008df2e7655c495dd12d2a4ad038ff878d4ca4b81fccaf82b714e07eae4402c", size = 160736, upload-time = "2026-03-25T20:21:53.674Z" }, + { url = "https://files.pythonhosted.org/packages/02/e0/3630057d8eb170310785723ed5adcdfb7d50cb7e6455f85ba8a3deed642b/tomli-2.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1d8591993e228b0c930c4bb0db464bdad97b3289fb981255d6c9a41aedc84b2d", size = 270717, upload-time = "2026-03-25T20:21:55.129Z" }, + { url = "https://files.pythonhosted.org/packages/7a/b4/1613716072e544d1a7891f548d8f9ec6ce2faf42ca65acae01d76ea06bb0/tomli-2.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:734e20b57ba95624ecf1841e72b53f6e186355e216e5412de414e3c51e5e3c41", size = 278461, upload-time = "2026-03-25T20:21:56.228Z" }, + { url = "https://files.pythonhosted.org/packages/05/38/30f541baf6a3f6df77b3df16b01ba319221389e2da59427e221ef417ac0c/tomli-2.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8a650c2dbafa08d42e51ba0b62740dae4ecb9338eefa093aa5c78ceb546fcd5c", size = 274855, upload-time = "2026-03-25T20:21:57.653Z" }, + { url = "https://files.pythonhosted.org/packages/77/a3/ec9dd4fd2c38e98de34223b995a3b34813e6bdadf86c75314c928350ed14/tomli-2.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:504aa796fe0569bb43171066009ead363de03675276d2d121ac1a4572397870f", size = 283144, upload-time = "2026-03-25T20:21:59.089Z" }, + { url = "https://files.pythonhosted.org/packages/ef/be/605a6261cac79fba2ec0c9827e986e00323a1945700969b8ee0b30d85453/tomli-2.4.1-cp314-cp314t-win32.whl", hash = "sha256:b1d22e6e9387bf4739fbe23bfa80e93f6b0373a7f1b96c6227c32bef95a4d7a8", size = 108683, upload-time = "2026-03-25T20:22:00.214Z" }, + { url = "https://files.pythonhosted.org/packages/12/64/da524626d3b9cc40c168a13da8335fe1c51be12c0a63685cc6db7308daae/tomli-2.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:2c1c351919aca02858f740c6d33adea0c5deea37f9ecca1cc1ef9e884a619d26", size = 121196, upload-time = "2026-03-25T20:22:01.169Z" }, + { url = "https://files.pythonhosted.org/packages/5a/cd/e80b62269fc78fc36c9af5a6b89c835baa8af28ff5ad28c7028d60860320/tomli-2.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eab21f45c7f66c13f2a9e0e1535309cee140182a9cdae1e041d02e47291e8396", size = 100393, upload-time = "2026-03-25T20:22:02.137Z" }, + { url = "https://files.pythonhosted.org/packages/7b/61/cceae43728b7de99d9b847560c262873a1f6c98202171fd5ed62640b494b/tomli-2.4.1-py3-none-any.whl", hash = "sha256:0d85819802132122da43cb86656f8d1f8c6587d54ae7dcaf30e90533028b49fe", size = 14583, upload-time = "2026-03-25T20:22:03.012Z" }, ] [[package]] name = "types-pyyaml" -version = "6.0.12.20250915" +version = "6.0.12.20260408" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7e/69/3c51b36d04da19b92f9e815be12753125bd8bc247ba0470a982e6979e71c/types_pyyaml-6.0.12.20250915.tar.gz", hash = "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3", size = 17522, upload-time = "2025-09-15T03:01:00.728Z" } +sdist = { url = "https://files.pythonhosted.org/packages/74/73/b759b1e413c31034cc01ecdfb96b38115d0ab4db55a752a3929f0cd449fd/types_pyyaml-6.0.12.20260408.tar.gz", hash = "sha256:92a73f2b8d7f39ef392a38131f76b970f8c66e4c42b3125ae872b7c93b556307", size = 17735, upload-time = "2026-04-08T04:30:50.974Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/e0/1eed384f02555dde685fff1a1ac805c1c7dcb6dd019c916fe659b1c1f9ec/types_pyyaml-6.0.12.20250915-py3-none-any.whl", hash = "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6", size = 20338, upload-time = "2025-09-15T03:00:59.218Z" }, + { url = "https://files.pythonhosted.org/packages/1c/f0/c391068b86abb708882c6d75a08cd7d25b2c7227dab527b3a3685a3c635b/types_pyyaml-6.0.12.20260408-py3-none-any.whl", hash = "sha256:fbc42037d12159d9c801ebfcc79ebd28335a7c13b08a4cfbc6916df78fee9384", size = 20339, upload-time = "2026-04-08T04:30:50.113Z" }, ] [[package]] name = "types-shapely" -version = "2.1.0.20250917" +version = "2.1.0.20260408" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fa/19/7f28b10994433d43b9caa66f3b9bd6a0a9192b7ce8b5a7fc41534e54b821/types_shapely-2.1.0.20250917.tar.gz", hash = "sha256:5c56670742105aebe40c16414390d35fcaa55d6f774d328c1a18273ab0e2134a", size = 26363, upload-time = "2025-09-17T02:47:44.604Z" } +sdist = { url = "https://files.pythonhosted.org/packages/10/8d/bf9e3eb51249601e22d797481999a06fb34998c4db5c76804394f8a3fa28/types_shapely-2.1.0.20260408.tar.gz", hash = "sha256:8552549d9429baa52ec4331e43b5db3b334fc3a7f30da48663010b7454b1451c", size = 26529, upload-time = "2026-04-08T04:34:42.111Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/a9/554ac40810e530263b6163b30a2b623bc16aae3fb64416f5d2b3657d0729/types_shapely-2.1.0.20250917-py3-none-any.whl", hash = "sha256:9334a79339504d39b040426be4938d422cec419168414dc74972aa746a8bf3a1", size = 37813, upload-time = "2025-09-17T02:47:43.788Z" }, + { url = "https://files.pythonhosted.org/packages/8e/3d/cbec691f56e71636192a07bf6809f598bed06d869b03b4e2b1ad2f7df032/types_shapely-2.1.0.20260408-py3-none-any.whl", hash = "sha256:8a31e2b074342a363f0c9d0c7d6e1e6c0dcce302a92ef94d64d0ca2a2b94a1d1", size = 37818, upload-time = "2026-04-08T04:34:41.243Z" }, ] [[package]]