diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 966087019..b92694c15 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,46 +2,15 @@ name: CI on: [push, pull_request] jobs: build: - if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: true - - uses: actions/setup-python@v5 - with: - python-version: '3.10' - cache: pip - cache-dependency-path: '**/requirements*.txt' - # Don't install editable projects in the current working directory. - # https://pip.pypa.io/en/latest/reference/pip_install/#install-src - - run: pip install --src $GITHUB_WORKSPACE/../src -r requirements.txt - - if: github.repository == 'open-contracting/standard_profile_template' - run: make update extract - - run: make - # Disable linkcheck as it is too slow for development. - # - run: | - # make linkcheck - # rm -f output.json output.txt + uses: open-contracting/.github/.github/workflows/ci-profile.yml@main + secrets: + private-key: ${{ secrets.PRIVATE_KEY }} + elasticsearch-password: ${{ secrets.ELASTICSEARCH_PASSWORD }} + with: + path-prefix: '' + version: '1.1' + production-refs: '["refs/heads/1.0", "refs/heads/1.1"]' + # Linkcheck is slow for development. + linkcheck: false # "ResourceWarning: unclosed file <_io.BufferedWriter name='/dev/null'>" - - run: pytest -W error -W ignore::ResourceWarning - # Deploy the built documentation to the staging directory. - - if: github.event_name == 'push' - uses: shimataro/ssh-key-action@v2 - with: - key: ${{ secrets.PRIVATE_KEY }} - known_hosts: standard.open-contracting.org ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGveFGTJ9yyObNGDUCUyzyFkm6Kzh3YqIt1qB7B/KU6E - - if: github.event_name == 'push' - uses: bcomnes/netrc-creds@v3 - with: - machine: standard.open-contracting.org - login: manage - password: ${{ secrets.ELASTICSEARCH_PASSWORD }} - - if: github.event_name == 'push' && success() && github.repository != 'open-contracting/standard_profile_template' - env: - PATH_PREFIX: "" - PRODUCTION: ${{ startsWith(github.ref, 'refs/tags') || github.ref == 'refs/heads/latest' || github.ref == 'refs/heads/1.0' || github.ref == 'refs/heads/1.1' }} - RELEASE: ${{ startsWith(github.ref, 'refs/tags') }} - VERSION: "1.1" - shell: bash - run: curl -sS https://raw.githubusercontent.com/open-contracting/deploy/main/deploy-docs.sh | bash - + pytest-options: -W ignore::ResourceWarning diff --git a/.github/workflows/js.yml b/.github/workflows/js.yml index 4c8147e26..f0d0e38b5 100644 --- a/.github/workflows/js.yml +++ b/.github/workflows/js.yml @@ -1,10 +1,7 @@ name: Lint JavaScript on: [push, pull_request] jobs: - build: - if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: biomejs/setup-biome@v2 - - run: biome ci --indent-style=space --line-width=119 docs/_static/script.js + lint: + uses: open-contracting/.github/.github/workflows/js.yml@main + with: + filenames: docs/_static/script.js diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 621563ce9..600ad9d5a 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,33 +1,11 @@ name: Lint on: [push, pull_request] -env: - BASEDIR: https://raw.githubusercontent.com/open-contracting/standard-maintenance-scripts/main jobs: - build: - if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository - runs-on: ubuntu-latest - env: - PAT: ${{ secrets.PAT }} - steps: - - uses: actions/checkout@v4 - with: - token: ${{ secrets.PAT || github.token }} - - uses: actions/setup-python@v5 - with: - python-version: '3.10' - cache: pip - cache-dependency-path: '**/requirements*.txt' - - id: changed-files - uses: tj-actions/changed-files@v45 - - uses: pre-commit/action@v3.0.1 - continue-on-error: true - with: - extra_args: pip-compile --files ${{ steps.changed-files.outputs.all_changed_files }} - - if: ${{ env.PAT }} - uses: stefanzweifel/git-auto-commit-action@v5 - with: - commit_message: '[github-actions] pre-commit autoupdate' - - shell: bash - run: curl -s -S --retry 3 $BASEDIR/tests/install.sh | bash - - - shell: bash - run: curl -s -S --retry 3 $BASEDIR/tests/script.sh | bash - + lint: + uses: open-contracting/.github/.github/workflows/lint.yml@main + permissions: + contents: write + secrets: + personal-access-token: ${{ secrets.PAT }} + with: + python-version: '3.10' diff --git a/.github/workflows/shell.yml b/.github/workflows/shell.yml index fe96765b8..dfdb0acfb 100644 --- a/.github/workflows/shell.yml +++ b/.github/workflows/shell.yml @@ -1,14 +1,5 @@ name: Lint Shell on: [push, pull_request] jobs: - build: - if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - run: | - sudo apt update - sudo apt install devscripts shellcheck shfmt - - run: checkbashisms $(shfmt -f .) - - run: shellcheck $(shfmt -f .) - - run: shfmt -d -i 4 -sr $(shfmt -f .) + lint: + uses: open-contracting/.github/.github/workflows/shell.yml@main diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index b60b28a2b..d67a6d01e 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -1,13 +1,8 @@ name: Spell-check on: [push, pull_request] jobs: - build: - if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - with: - python-version: '3.10' - - run: pip install codespell - - run: codespell -S .git,docson,locale,examples,country.csv,currency.csv,language.csv,mediaType.csv -L fo,sme,zar,SME . + lint: + uses: open-contracting/.github/.github/workflows/spellcheck.yml@main + with: + ignore: fo,sme,zar + skip: docson,locale,examples,country.csv,currency.csv,language.csv,mediaType.csv diff --git a/common-requirements.txt b/common-requirements.txt index 9dd5cffd2..3ca01e41a 100644 --- a/common-requirements.txt +++ b/common-requirements.txt @@ -90,7 +90,7 @@ myst-parser==0.18.1 # via -r common-requirements.in ocds-babel==0.3.6 # via -r common-requirements.in -ocdsextensionregistry==0.5.0 +ocdsextensionregistry==0.6.5 # via -r common-requirements.in ocdsindex==0.2.0 # via -r common-requirements.in @@ -158,7 +158,7 @@ sphinxcontrib-qthelp==1.0.3 # via sphinx sphinxcontrib-serializinghtml==1.1.5 # via sphinx -standard-theme @ git+https://github.com/open-contracting/standard_theme.git@07ca0e39979a244656dd6df0658f2ead428184b9#egg=standard_theme +standard-theme @ git+https://github.com/open-contracting/standard_theme.git@5de343d1d8e342b5f2a42c6132db37aebe382e36#egg=standard_theme # via -r common-requirements.in starlette==0.40.0 # via sphinx-autobuild diff --git a/docs/conf.py b/docs/conf.py index 5f8d438c2..98d1c159b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -6,10 +6,6 @@ # -- Path setup -------------------------------------------------------------- -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. - import csv import json import os @@ -24,12 +20,12 @@ # -- Project information ----------------------------------------------------- -project = 'Open Contracting Data Standard' -copyright = 'Open Contracting Partnership' -author = 'Open Contracting Partnership' +project = "Open Contracting Data Standard" +copyright = "Open Contracting Partnership" +author = "Open Contracting Partnership" -version = '1.1' -release = '1.1.5' +version = "1.1" +release = "1.1.5" # -- General configuration --------------------------------------------------- @@ -38,21 +34,21 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'myst_parser', - 'sphinx.ext.ifconfig', - 'sphinxcontrib.jsonschema', - 'sphinxcontrib.opencontracting', - 'sphinxcontrib.opendataservices', - 'sphinx_design', + "myst_parser", + "sphinx.ext.ifconfig", + "sphinxcontrib.jsonschema", + "sphinxcontrib.opencontracting", + "sphinxcontrib.opendataservices", + "sphinx_design", ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '**/docson/[!p]**', '**/docson/package*.json'] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "**/docson/[!p]**", "**/docson/package*.json"] # -- Options for HTML output ------------------------------------------------- @@ -60,82 +56,81 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'standard_theme' # 'pydata_sphinx_theme' +html_theme = "standard_theme" # 'pydata_sphinx_theme' html_theme_path = [standard_theme.get_html_theme_path()] -html_favicon = '_static/favicon-16x16.ico' +html_favicon = "_static/favicon-16x16.ico" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # -- Local configuration ----------------------------------------------------- -_ = get_translation('theme') +_ = get_translation("theme") -profile_identifier = '' -repository_url = 'https://github.com/open-contracting/standard' +profile_identifier = "" +repository_url = "https://github.com/open-contracting/standard" # Internationalization. gettext_compact = False # `DOMAIN_PREFIX` from `config.mk`. -gettext_domain_prefix = f'{profile_identifier}-' if profile_identifier else '' -locale_dirs = ['locale/', os.path.join(standard_theme.get_html_theme_path(), 'locale')] +gettext_domain_prefix = f"{profile_identifier}-" if profile_identifier else "" +locale_dirs = ["locale/", os.path.join(standard_theme.get_html_theme_path(), "locale")] # We use single quotes for codes, which docutils will change to double quotes. # https://sourceforge.net/p/docutils/code/HEAD/tree/trunk/docutils/docutils/utils/smartquotes.py smartquotes = False # MyST configuration. -myst_enable_extensions = ['linkify'] +myst_enable_extensions = ["linkify"] myst_heading_anchors = 6 myst_heading_slug_func = make_id # https://github.com/executablebooks/MyST-Parser/issues/357 -suppress_warnings = ['myst.anchor'] +suppress_warnings = ["myst.anchor"] # Theme customization. navigation_with_keys = False # restore the Sphinx default html_context = { - 'analytics_id': 'HTWZHRIZ', + "analytics_id": "HTWZHRIZ", } html_theme_options = { - 'analytics_id': 'HTWZHRIZ', - 'display_version': True, - 'root_url': f'/profiles/{profile_identifier}' if profile_identifier else '', - 'short_project': project.replace('Open Contracting Data Standard', 'OCDS'), - 'copyright': copyright, - 'license_name': 'Apache License 2.0', - 'license_url': f'{repository_url}/blob/HEAD/LICENSE', - 'repository_url': repository_url, + "analytics_id": "HTWZHRIZ", + "display_version": True, + "root_url": f"/profiles/{profile_identifier}" if profile_identifier else "", + "short_project": project.replace("Open Contracting Data Standard", "OCDS"), + "copyright": copyright, + "license_name": "Apache License 2.0", + "license_url": f"{repository_url}/blob/HEAD/LICENSE", + "repository_url": repository_url, } -html_short_title = f'{html_theme_options["short_project"]} v{release}' +html_short_title = f"{html_theme_options['short_project']} v{release}" # List the extension identifiers and versions that should be part of this specification. The extensions must be in # the extension registry: https://github.com/open-contracting/extension_registry/blob/main/extension_versions.csv -default_extension_version = f'v{release}' +default_extension_version = f"v{release}" extension_versions = { - 'bids': default_extension_version, - 'enquiries': default_extension_version, - 'location': default_extension_version, - 'lots': default_extension_version, - 'milestone_documents': default_extension_version, - 'participation_fee': default_extension_version, - 'process_title': default_extension_version, + "bids": default_extension_version, + "enquiries": default_extension_version, + "location": default_extension_version, + "lots": default_extension_version, + "participation_fee": default_extension_version, + "process_title": default_extension_version, } # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-the-linkcheck-builder # Ignore Google Sheets. -linkcheck_anchors_ignore = [r'^gid='] +linkcheck_anchors_ignore = [r"^gid="] linkcheck_ignore = [ # Avoid GitHub.com rate limiting. - r'^https://github.com/open-contracting/standard/(?:issues|pull)/\d+$', + r"^https://github.com/open-contracting/standard/(?:issues|pull)/\d+$", # Ignore irreproducible false positives. - r'^https://www.fcny.org/fcny/$', - r'^http://www.eprocurementtoolkit.org/sites/default/files/2016-11/OCDS_Implemetation_Methodology_0.pdf#page=27$', + r"^https://www.fcny.org/fcny/$", + r"^http://www.eprocurementtoolkit.org/sites/default/files/2016-11/OCDS_Implemetation_Methodology_0.pdf#page=27$", # Ignore unwanted links created by linkify. - r'^http://vnd\.', + r"^http://vnd\.", # Ignore expected redirects. - r'^https://docs.google.com/spreadsheets/d/[^/]+/pub?gid=\d+&single=true&output=csv$', + r"^https://docs.google.com/spreadsheets/d/[^/]+/pub?gid=\d+&single=true&output=csv$", ] @@ -143,32 +138,38 @@ def setup(app): # The root of the repository. basedir = Path(__file__).resolve().parents[1] # `LOCALE_DIR` from `config.mk`. - localedir = basedir / 'docs' / 'locale' + localedir = basedir / "docs" / "locale" - language = app.config.overrides.get('language', 'en') + language = app.config.overrides.get("language", "en") - headers = ['Title', 'Description', 'Extension'] + headers = ["Title", "Description", "Extension"] # The gettext domain for schema translations. Should match the domain in the `pybabel compile` command. - schema_domain = f'{gettext_domain_prefix}schema' + schema_domain = f"{gettext_domain_prefix}schema" # The gettext domain for codelist translations. Should match the domain in the `pybabel compile` command. - codelists_domain = f'{gettext_domain_prefix}codelists' - - standard_dir = basedir / 'schema' - standard_build_dir = basedir / 'build' / language - - branch = os.getenv('GITHUB_REF_NAME', 'latest') - - translate([ - # The glob patterns in `babel_ocds_schema.cfg` should match these filenames. - (glob(str(standard_dir / '*-schema.json')), standard_build_dir, schema_domain), - # The glob patterns in `babel_ocds_codelist.cfg` should match these. - (glob(str(standard_dir / 'codelists' / '*.csv')), standard_build_dir / 'codelists', codelists_domain), - ], localedir, language, headers, version=branch) - - with (standard_build_dir / 'release-schema.json').open() as f: + codelists_domain = f"{gettext_domain_prefix}codelists" + + standard_dir = basedir / "schema" + standard_build_dir = basedir / "build" / language + + branch = os.getenv("GITHUB_REF_NAME", "latest") + + translate( + [ + # The glob patterns in `babel_ocds_schema.cfg` should match these filenames. + (glob(str(standard_dir / "*-schema.json")), standard_build_dir, schema_domain), + # The glob patterns in `babel_ocds_codelist.cfg` should match these. + (glob(str(standard_dir / "codelists" / "*.csv")), standard_build_dir / "codelists", codelists_domain), + ], + localedir, + language, + headers, + version=branch, + ) + + with (standard_build_dir / "release-schema.json").open() as f: fieldnames, rows = mapping_sheet(json.load(f), infer_required=True) - with (standard_build_dir / 'release-schema.csv').open('w') as f: + with (standard_build_dir / "release-schema.csv").open("w") as f: writer = csv.DictWriter(f, fieldnames) writer.writeheader() writer.writerows(rows) diff --git a/docs/history/changelog.md b/docs/history/changelog.md index a717f9dc2..d97ea9573 100644 --- a/docs/history/changelog.md +++ b/docs/history/changelog.md @@ -402,7 +402,7 @@ See the changelogs for: * [Enquiries](https://extensions.open-contracting.org/en/extensions/enquiries/master/#changelog) * [Location](https://extensions.open-contracting.org/en/extensions/location/master/#changelog) * [Lots](https://extensions.open-contracting.org/en/extensions/lots/master/#changelog) -* [Milestone documents](https://extensions.open-contracting.org/en/extensions/milestone_documents/master/#changelog) +* [Milestone documents](https://github.com/open-contracting-extensions/ocds_milestone_documents_extension#changelog) * [Participation fees](https://extensions.open-contracting.org/en/extensions/participation_fee/master/#changelog) * [Process level title and description](https://extensions.open-contracting.org/en/extensions/process_title/master/#changelog) diff --git a/docs/locale/es/LC_MESSAGES/history/changelog.po b/docs/locale/es/LC_MESSAGES/history/changelog.po index e99fd7d4b..7a2399215 100644 --- a/docs/locale/es/LC_MESSAGES/history/changelog.po +++ b/docs/locale/es/LC_MESSAGES/history/changelog.po @@ -792,11 +792,9 @@ msgstr "" #: ../../docs/history/changelog.md:111 msgid "" -"[Milestone documents](https://extensions.open-" -"contracting.org/en/extensions/milestone_documents/master/#changelog)" +"[Milestone documents](https://github.com/open-contracting-extensions/ocds_milestone_documents_extension#changelog)" msgstr "" -"[Documentos de hito](https://extensions.open-" -"contracting.org/en/extensions/milestone_documents/master/#changelog)" +"[Documentos de hito](https://github.com/open-contracting-extensions/ocds_milestone_documents_extension#changelog)" #: ../../docs/history/changelog.md:112 ../../docs/history/changelog.md:190 msgid "" diff --git a/docs/locale/fr/LC_MESSAGES/history/changelog.po b/docs/locale/fr/LC_MESSAGES/history/changelog.po index 9863d3c23..2eb30a8d7 100644 --- a/docs/locale/fr/LC_MESSAGES/history/changelog.po +++ b/docs/locale/fr/LC_MESSAGES/history/changelog.po @@ -601,8 +601,7 @@ msgstr "" #: ../../docs/history/changelog.md:110 msgid "" -"[Milestone documents](https://extensions.open-" -"contracting.org/en/extensions/milestone_documents/master/#changelog)" +"[Milestone documents](https://github.com/open-contracting-extensions/ocds_milestone_documents_extension#changelog)" msgstr "" #: ../../docs/history/changelog.md:111 ../../docs/history/changelog.md:189 diff --git a/docs/schema/reference.md b/docs/schema/reference.md index 698b4754c..e53f6b44a 100644 --- a/docs/schema/reference.md +++ b/docs/schema/reference.md @@ -629,10 +629,6 @@ For delivery milestones, if there is a time frame for delivery, use `.dueAfterDa :collapse: documents ``` -```{extensionlist} The following extensions to milestone are available -:list: milestones -``` - ```{workedexamplelist} The following worked examples are available for milestones :tag: milestone ``` diff --git a/manage.py b/manage.py index 9969859b9..dc2ffe475 100755 --- a/manage.py +++ b/manage.py @@ -9,20 +9,20 @@ import warnings from collections import defaultdict from contextlib import contextmanager -from copy import deepcopy from glob import glob from io import StringIO from pathlib import Path import click import json_merge_patch -import jsonref import lxml.etree import lxml.html import requests from babel.messages.pofile import read_po from docutils.utils import relative_path from lxml import etree +from ocdsextensionregistry import get_versioned_release_schema +from ocdsextensionregistry.util import replace_refs from ocdskit.schema import get_schema_fields basedir = Path(__file__).resolve().parent @@ -40,82 +40,11 @@ def custom_warning_formatter(message, category, filename, lineno, line=None): warnings.formatwarning = custom_warning_formatter -versioned_template = json.loads(""" -{ - "type": "array", - "items": { - "type": "object", - "properties": { - "releaseDate": { - "format": "date-time", - "type": "string" - }, - "releaseID": { - "type": "string" - }, - "value": {}, - "releaseTag": { - "type": "array", - "items": { - "type": "string" - } - } - } - } -} -""") - -common_versioned_definitions = { - "StringNullUriVersioned": { - "type": ["string", "null"], - "format": "uri", - }, - "StringNullDateTimeVersioned": { - "type": ["string", "null"], - "format": "date-time", - }, - "StringNullVersioned": { - "type": ["string", "null"], - "format": None, - }, -} - -recognized_types = ( - # Array - ["array"], - ["array", "null"], # optional string arrays - # Object - ["object"], - ["object", "null"], # /Organization/details - # String - ["string"], - ["string", "null"], - # Literal - ["boolean", "null"], - ["integer", "null"], - ["number", "null"], - # Mixed - ["string", "integer"], - ["string", "integer", "null"], -) - -keywords_to_remove = ( - # Metadata keywords - # https://tools.ietf.org/html/draft-fge-json-schema-validation-00#section-6 - "title", - "description", - "default", - # Extended keywords - # http://os4d.opendataservices.coop/development/schema/#extended-json-schema - "omitWhenMerged", - "wholeListMerge", -) - - -def json_load(filename, library=json, **kwargs): + +def json_load(filename): """Load JSON data from the given filename.""" with (schemadir / filename).open() as f: - return library.load(f, **kwargs) + return json.load(f) def json_dump(filename, data): @@ -149,14 +78,6 @@ def get(url): return response -def coerce_to_list(data, key): - """Return the value of the ``key`` key in the ``data`` mapping. If the value is a string, wrap it in an array.""" - item = data.get(key, []) - if isinstance(item, str): - return [item] - return item - - def get_metaschema(): """Patches and returns the JSON Schema Draft 4 metaschema.""" return json_merge_patch.merge( @@ -164,234 +85,6 @@ def get_metaschema(): ) -def get_common_definition_ref(item): - """ - Return a schema that references the common definition that the ``item`` matches: "StringNullUriVersioned", - "StringNullDateTimeVersioned" or "StringNullVersioned". - """ - for name, keywords in common_versioned_definitions.items(): - # If the item matches the definition. - if any(item.get(keyword) != value for keyword, value in keywords.items()): - continue - # And adds no keywords to the definition. - if any(keyword not in {*keywords, *keywords_to_remove} for keyword in item): - continue - return {"$ref": f"#/definitions/{name}"} - return None - - -def add_versioned(schema, unversioned_pointers, pointer=""): - """Call ``_add_versioned`` on each field.""" - for key, value in schema["properties"].items(): - new_pointer = f"{pointer}/properties/{key}" - _add_versioned(schema, unversioned_pointers, new_pointer, key, value) - - for key, value in schema.get("definitions", {}).items(): - new_pointer = f"{pointer}/definitions/{key}" - add_versioned(value, unversioned_pointers, pointer=new_pointer) - - -def _add_versioned(schema, unversioned_pointers, pointer, key, value): - """ - Perform the changes to the schema to refer to versioned/unversioned definitions. - - :param schema dict: the schema of the object on which the field is defined - :param unversioned_pointers set: JSON Pointers to ``id`` fields to leave unversioned if the object is in an array - :param pointer str: the field's pointer - :param key str: the field's name - :param value str: the field's schema - """ - # Skip unversioned fields. - if pointer in unversioned_pointers: - return - - types = coerce_to_list(value, "type") - - # If a type is unrecognized, we might need to update this script. - if ( - "$ref" not in value - and types not in recognized_types - and not (pointer == "/definitions/Quantity/properties/value" and types == ["string", "number", "null"]) - ): - warnings.warn(f"{pointer} has unrecognized type {types}") - - # For example, if $ref is used. - if not types: - # Ignore the `amendment` field, which had no `id` field in OCDS 1.0. - if "deprecated" not in value: - versioned_pointer = f"{value['$ref'][1:]}/properties/id" - # If the `id` field is on an object not in an array, it needs to be versioned (e.g. buyer/properties/id). - if versioned_pointer in unversioned_pointers: - value["$ref"] = value["$ref"] + "VersionedId" - return - - # Reference a common versioned definition if possible, to limit the size of the schema. - ref = get_common_definition_ref(value) - if ref: - schema["properties"][key] = ref - - # Iterate into objects with properties like `Item.unit`. Otherwise, version objects with no properties as a - # whole, like `Organization.details`. - elif types == ["object"] and "properties" in value: - add_versioned(value, unversioned_pointers, pointer=pointer) - - else: - new_value = deepcopy(value) - - if types == ["array"]: - item_types = coerce_to_list(value["items"], "type") - - # See https://standard.open-contracting.org/latest/en/schema/merging/#whole-list-merge - if value.get("wholeListMerge"): - # Update `$ref` to the unversioned definition. - if "$ref" in value["items"]: - new_value["items"]["$ref"] = value["items"]["$ref"] + "Unversioned" - # Otherwise, similarly, don't iterate over item properties. - # See https://standard.open-contracting.org/latest/en/schema/merging/#lists - elif "$ref" in value["items"]: - # Leave `$ref` to the versioned definition. - return - # Exceptional case for deprecated `Amendment.changes`. - elif item_types == ["object"] and pointer == "/definitions/Amendment/properties/changes": - return - # Warn in case new combinations are added to the release schema. - elif item_types != ["string"]: - # Note: Versioning the properties of un-$ref'erenced objects in arrays isn't implemented. However, - # this combination hasn't occurred, with the exception of `Amendment/changes`. - warnings.warn(f"{pointer}/items has unexpected type {item_types}") - - versioned = deepcopy(versioned_template) - versioned["items"]["properties"]["value"] = new_value - schema["properties"][key] = versioned - - -def update_refs_to_unversioned_definitions(schema): - """Replace ``$ref`` values with unversioned definitions.""" - for key, value in schema.items(): - if key == "$ref": - schema[key] = value + "Unversioned" - elif isinstance(value, dict): - update_refs_to_unversioned_definitions(value) - - -def get_unversioned_pointers(schema, fields, pointer=""): - """Return the JSON Pointers to ``id`` fields that must not be versioned if the object is in an array.""" - if isinstance(schema, list): - for index, item in enumerate(schema): - get_unversioned_pointers(item, fields, pointer=f"{pointer}/{index}") - elif isinstance(schema, dict): - # Follows the logic of _get_merge_rules in merge.py from ocds-merge. - types = coerce_to_list(schema, "type") - - # If an array is whole list merge, its items are unversioned. - if "array" in types and schema.get("wholeListMerge"): - return - if "array" in types and "items" in schema: - item_types = coerce_to_list(schema["items"], "type") - # If an array mixes objects and non-objects, it is whole list merge. - if any(item_type != "object" for item_type in item_types): - return - # If it is an array of objects, any `id` fields are unversioned. - if "id" in schema["items"]["properties"]: - if hasattr(schema["items"], "__reference__"): - reference = schema["items"].__reference__["$ref"][1:] - else: - reference = pointer - fields.add(f"{reference}/properties/id") - - for key, value in schema.items(): - get_unversioned_pointers(value, fields, pointer=f"{pointer}/{key}") - - -def remove_omit_when_merged(schema): - """Remove properties that set ``omitWhenMerged``.""" - if isinstance(schema, list): - for item in schema: - remove_omit_when_merged(item) - elif isinstance(schema, dict): - for key, value in schema.items(): - if key == "properties": - for prop in list(value): - if value[prop].get("omitWhenMerged"): - del value[prop] - if prop in schema["required"]: - schema["required"].remove(prop) - remove_omit_when_merged(value) - - -def remove_metadata_and_extended_keywords(schema): - """Remove metadata and extended keywords from properties and definitions.""" - if isinstance(schema, list): - for item in schema: - remove_metadata_and_extended_keywords(item) - elif isinstance(schema, dict): - for key, value in schema.items(): - if key in {"definitions", "properties"}: - for subschema in value.values(): - for keyword in keywords_to_remove: - subschema.pop(keyword, None) - remove_metadata_and_extended_keywords(value) - - -def get_versioned_release_schema(schema): - """Return the versioned release schema.""" - # Update schema metadata. - release_with_underscores = release.replace(".", "__") - schema["id"] = ( - f"https://standard.open-contracting.org/schema/{release_with_underscores}/versioned-release-validation-schema.json" - ) - schema["title"] = "Schema for a compiled, versioned Open Contracting Release." - - # Release IDs, dates and tags appear alongside values in the versioned release schema. - remove_omit_when_merged(schema) - - # Create unversioned copies of all definitions. - unversioned_definitions = {k + "Unversioned": deepcopy(v) for k, v in schema["definitions"].items()} - update_refs_to_unversioned_definitions(unversioned_definitions) - - # Determine which `id` fields occur on objects in arrays. - unversioned_pointers = set() - get_unversioned_pointers(jsonref.replace_refs(schema), unversioned_pointers) - - # Omit `ocid` from versioning. - ocid = schema["properties"].pop("ocid") - add_versioned(schema, unversioned_pointers) - schema["properties"]["ocid"] = ocid - - # Add the common versioned definitions. - for name, keywords in common_versioned_definitions.items(): - versioned = deepcopy(versioned_template) - for keyword, value in keywords.items(): - if value: - versioned["items"]["properties"]["value"][keyword] = value - schema["definitions"][name] = versioned - - # Add missing definitions. - while True: - try: - jsonref.replace_refs(schema, lazy_load=False) - break - except jsonref.JsonRefError as e: - name = e.cause.args[0] - - if name.endswith("VersionedId"): - # Add a copy of an definition with a versioned `id` field, using the same logic as before. - definition = deepcopy(schema["definitions"][name[:-11]]) - pointer = f"/definitions/{name[:-11]}/properties/id" - pointers = unversioned_pointers - {pointer} - _add_versioned(definition, pointers, pointer, "id", definition["properties"]["id"]) - else: - # Add a copy of an definition with no versioned fields. - definition = unversioned_definitions[name] - - schema["definitions"][name] = definition - - # Remove all metadata and extended keywords. - remove_metadata_and_extended_keywords(schema) - - return schema - - @click.group() def cli(): pass @@ -516,14 +209,14 @@ def pre_commit(): } release_schema = json_load("release-schema.json") - jsonref_release_schema = json_load("release-schema.json", jsonref, merge_props=True) + jsonref_release_schema = replace_refs(json_load("release-schema.json"), keep_defs=True) counts = defaultdict(list) nonstring = ("boolean", "integer", "number", "object") for field in get_schema_fields(jsonref_release_schema): name = field.path_components[-1] # Skip definitions (output dereferenced properties only). Skip deprecated fields. - if field.definition_pointer_components or field.deprecated: + if field.definition or field.deprecated: continue multilingual = ( # If a field can be a non-string, it is not multilingual. @@ -566,7 +259,10 @@ def pre_commit(): json_dump("meta-schema.json", get_metaschema()) json_dump("dereferenced-release-schema.json", jsonref_release_schema) - json_dump("versioned-release-validation-schema.json", get_versioned_release_schema(release_schema)) + json_dump( + "versioned-release-validation-schema.json", + get_versioned_release_schema(release_schema, release.replace(".", "__")), + ) @cli.command() diff --git a/requirements.txt b/requirements.txt index 9c2ea5690..33d3d98dd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,8 +2,8 @@ # Add your own requirements below. -ocdskit==1.1.3 +ocdskit==1.3.0 sphinx-design==0.4.1 sphinxcontrib-opencontracting==0.0.8 -sphinxcontrib-opendataservices-jsonschema==0.6.1 +sphinxcontrib-opendataservices-jsonschema==0.7.1 sphinxcontrib-opendataservices==0.5.0 diff --git a/schema/dereferenced-release-schema.json b/schema/dereferenced-release-schema.json index f71c5592f..d3df697bd 100644 --- a/schema/dereferenced-release-schema.json +++ b/schema/dereferenced-release-schema.json @@ -4128,7 +4128,7 @@ "number", "null" ], - "pattern": "^[([\\]][0-9]+,([0-9]+|INF)[)[\\]]$" + "pattern": "^[(\\[\\]][0-9]+,([0-9]+|INF)[)\\[\\]]$" } }, "minProperties": 1 @@ -10087,7 +10087,7 @@ "number", "null" ], - "pattern": "^[([\\]][0-9]+,([0-9]+|INF)[)[\\]]$" + "pattern": "^[(\\[\\]][0-9]+,([0-9]+|INF)[)\\[\\]]$" } }, "minProperties": 1 @@ -12641,7 +12641,7 @@ "number", "null" ], - "pattern": "^[([\\]][0-9]+,([0-9]+|INF)[)[\\]]$" + "pattern": "^[(\\[\\]][0-9]+,([0-9]+|INF)[)\\[\\]]$" } }, "minProperties": 1 @@ -19734,7 +19734,7 @@ "number", "null" ], - "pattern": "^[([\\]][0-9]+,([0-9]+|INF)[)[\\]]$" + "pattern": "^[(\\[\\]][0-9]+,([0-9]+|INF)[)\\[\\]]$" } }, "minProperties": 1 @@ -25689,7 +25689,7 @@ "number", "null" ], - "pattern": "^[([\\]][0-9]+,([0-9]+|INF)[)[\\]]$" + "pattern": "^[(\\[\\]][0-9]+,([0-9]+|INF)[)\\[\\]]$" } }, "minProperties": 1 @@ -28237,7 +28237,7 @@ "number", "null" ], - "pattern": "^[([\\]][0-9]+,([0-9]+|INF)[)[\\]]$" + "pattern": "^[(\\[\\]][0-9]+,([0-9]+|INF)[)\\[\\]]$" } }, "minProperties": 1 @@ -39413,7 +39413,7 @@ "number", "null" ], - "pattern": "^[([\\]][0-9]+,([0-9]+|INF)[)[\\]]$" + "pattern": "^[(\\[\\]][0-9]+,([0-9]+|INF)[)\\[\\]]$" } }, "minProperties": 1 @@ -41378,7 +41378,7 @@ "number", "null" ], - "pattern": "^[([\\]][0-9]+,([0-9]+|INF)[)[\\]]$" + "pattern": "^[(\\[\\]][0-9]+,([0-9]+|INF)[)\\[\\]]$" } }, "minProperties": 1 @@ -41428,7 +41428,7 @@ "number", "null" ], - "pattern": "^[([\\]][0-9]+,([0-9]+|INF)[)[\\]]$" + "pattern": "^[(\\[\\]][0-9]+,([0-9]+|INF)[)\\[\\]]$" } }, "minProperties": 1 diff --git a/schema/release-schema.json b/schema/release-schema.json index 46f3ea6d8..5260ed095 100644 --- a/schema/release-schema.json +++ b/schema/release-schema.json @@ -2828,7 +2828,7 @@ "number", "null" ], - "pattern": "^[([\\]][0-9]+,([0-9]+|INF)[)[\\]]$" + "pattern": "^[(\\[\\]][0-9]+,([0-9]+|INF)[)\\[\\]]$" } }, "minProperties": 1 diff --git a/schema/versioned-release-validation-schema.json b/schema/versioned-release-validation-schema.json index 55b8de369..6f30442d7 100644 --- a/schema/versioned-release-validation-schema.json +++ b/schema/versioned-release-validation-schema.json @@ -3308,7 +3308,7 @@ "number", "null" ], - "pattern": "^[([\\]][0-9]+,([0-9]+|INF)[)[\\]]$" + "pattern": "^[(\\[\\]][0-9]+,([0-9]+|INF)[)\\[\\]]$" }, "releaseTag": { "type": "array", diff --git a/script/diff b/script/diff index cf7e85060..85fb10c99 100755 --- a/script/diff +++ b/script/diff @@ -5,7 +5,7 @@ set -u curl -sS https://raw.githubusercontent.com/open-contracting/standard_profile_template/latest/docs/conf.py | diff -u - docs/conf.py -for f in .github/workflows/ci.yml .github/workflows/js.yml .github/workflows/spellcheck.yml include/config.mk; do +for f in .github/workflows/ci.yml .github/workflows/spellcheck.yml include/config.mk; do curl -sS https://raw.githubusercontent.com/open-contracting/standard_profile_template/latest/$f | diff -u - $f done diff --git a/tests/__init__.py b/tests/__init__.py index fc301d0a9..29bd6731d 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,17 +1,17 @@ languages = { - 'en': 'English', - 'es': 'Español', - 'fr': 'Français', + "en": "English", + "es": "Español", + "fr": "Français", } test_basic_params = { - 'en': 'Open Contracting Data Standard', - 'es': 'Estándar de Datos para las Contrataciones Abiertas', - 'fr': 'Standard de Données sur la Commande Publique Ouverte', + "en": "Open Contracting Data Standard", + "es": "Estándar de Datos para las Contrataciones Abiertas", + "fr": "Standard de Données sur la Commande Publique Ouverte", } test_search_params = [ - ('en', r'found \d+ page\(s\) matching'), + ("en", r"found \d+ page\(s\) matching"), # See https://github.com/sphinx-doc/sphinx/issues/11008 # ('es', r'encontraron \d+ páginas que coinciden'), # noqa: ERA001 # ('fr', r'\d+ page\(s\) correspondant'), # noqa: ERA001 diff --git a/tests/test_schema_integrity.py b/tests/test_schema_integrity.py index 39ed3bd6e..fdbab6b76 100644 --- a/tests/test_schema_integrity.py +++ b/tests/test_schema_integrity.py @@ -3,38 +3,43 @@ """ import json -import os.path import sys +from pathlib import Path import jsonref +from ocdsextensionregistry import get_versioned_release_schema -sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) +basedir = Path(__file__).resolve().parent.parent -from manage import get_metaschema, get_versioned_release_schema +sys.path.extend([str(basedir), str(basedir / "docs")]) + +from conf import release # noqa: E402 + +from manage import get_metaschema # noqa: E402 def test_versioned_release_schema_is_in_sync(): - with open('schema/versioned-release-validation-schema.json') as f: + with open("schema/versioned-release-validation-schema.json") as f: actual = json.load(f) - with open('schema/release-schema.json') as f: - expected = get_versioned_release_schema(json.load(f)) + with open("schema/release-schema.json") as f: + expected = get_versioned_release_schema(json.load(f), release.replace(".", "__")) - assert actual == expected, 'Run: python manage.py pre-commit' + assert actual == expected, "Run: python manage.py pre-commit" def test_dereferenced_release_schema_is_in_sync(): - with open('schema/dereferenced-release-schema.json') as f: + with open("schema/dereferenced-release-schema.json") as f: actual = json.load(f) - with open('schema/release-schema.json') as f: + with open("schema/release-schema.json") as f: expected = jsonref.load(f, merge_props=True) - assert actual == expected, 'Run: python manage.py pre-commit' + assert actual == expected, "Run: python manage.py pre-commit" def test_meta_schema_is_in_sync(): - with open('schema/meta-schema.json') as f: + with open("schema/meta-schema.json") as f: actual = json.load(f) - assert actual == get_metaschema(), 'Run: python manage.py pre-commit' + assert actual == get_metaschema(), "Run: python manage.py pre-commit"