diff --git a/.github/workflows/coverage.yaml b/.github/workflows/coverage.yaml
new file mode 100644
index 0000000..08ee46f
--- /dev/null
+++ b/.github/workflows/coverage.yaml
@@ -0,0 +1,43 @@
+name: "coverage"
+
+on:
+ push:
+ branches: [ master ]
+ paths: [ 'src', 'tests' ]
+ pull_request:
+ branches: [ master ]
+
+jobs:
+
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Python install
+ uses: actions/setup-python@v2
+ with:
+ python-version: "3.11"
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip setuptools wheel
+ pip install -e .[test]
+ pip install coverage
+
+ - name: Tests
+ run: |
+ coverage run -m pytest
+ coverage xml
+
+ - name: Upload coverage reports to Codecov
+ uses: codecov/codecov-action@v4.0.1
+ with:
+ token: ${{ secrets.CODECOV_TOKEN }}
+ slug: violet-black/jsonschema-gen
+
+ - name: Upload coverage locally
+ uses: actions/upload-artifact@v3
+ with:
+ name: jsonschema-gen
+ path: coverage.xml
diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml
new file mode 100644
index 0000000..290d290
--- /dev/null
+++ b/.github/workflows/pre-commit.yaml
@@ -0,0 +1,15 @@
+name: pre-commit
+
+on:
+ pull_request:
+ branches: [ master ]
+ push:
+ branches: [ master ]
+
+jobs:
+ pre-commit:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - uses: actions/setup-python@v3
+ - uses: pre-commit/action@v3.0.1
diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml
new file mode 100644
index 0000000..464bb55
--- /dev/null
+++ b/.github/workflows/publish.yaml
@@ -0,0 +1,43 @@
+name: "publish"
+
+on:
+ push:
+ tags:
+ - "*"
+
+jobs:
+
+ pypi-publish:
+ name: Upload release to PyPI
+ runs-on: ubuntu-latest
+ environment:
+ name: pypi
+ url: https://pypi.org/project/jsonschema-gen/
+ permissions:
+ id-token: write # IMPORTANT: this permission is mandatory for trusted publishing
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.11'
+ cache: 'pip'
+ - name: Test
+ run: |
+ pip install -U pip setuptools wheel
+ pip install -e .[test]
+ pytest
+ - name: Build
+ run: |
+ pip install build twine
+ python -m build --wheel --outdir dist/
+ twine check dist/*.whl
+ - name: Publish
+ uses: pypa/gh-action-pypi-publish@release/v1
+ with:
+ password: ${{ secrets.PYPI_API_TOKEN }}
+ - name: Upload package
+ uses: actions/upload-artifact@v3
+ with:
+ name: jsonschema-gen
+ path: dist/*.whl
diff --git a/.github/workflows/py310.yaml b/.github/workflows/py310.yaml
new file mode 100644
index 0000000..fe6e268
--- /dev/null
+++ b/.github/workflows/py310.yaml
@@ -0,0 +1,29 @@
+name: "3.10"
+
+on:
+ push:
+ branches: [ master ]
+ paths: [ 'src', 'tests' ]
+ pull_request:
+ branches: [ master ]
+
+jobs:
+
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Python install
+ uses: actions/setup-python@v2
+ with:
+ python-version: "3.10"
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip setuptools wheel
+ pip install -e .[test]
+
+ - name: Tests
+ run: |
+ pytest
diff --git a/.github/workflows/py311.yaml b/.github/workflows/py311.yaml
new file mode 100644
index 0000000..7e16813
--- /dev/null
+++ b/.github/workflows/py311.yaml
@@ -0,0 +1,29 @@
+name: "3.11"
+
+on:
+ push:
+ branches: [ master ]
+ paths: [ 'src', 'tests' ]
+ pull_request:
+ branches: [ master ]
+
+jobs:
+
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Python install
+ uses: actions/setup-python@v2
+ with:
+ python-version: "3.11"
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip setuptools wheel
+ pip install -e .[test]
+
+ - name: Tests
+ run: |
+ pytest
diff --git a/.github/workflows/py312.yaml b/.github/workflows/py312.yaml
new file mode 100644
index 0000000..42b45f7
--- /dev/null
+++ b/.github/workflows/py312.yaml
@@ -0,0 +1,29 @@
+name: "3.12"
+
+on:
+ push:
+ branches: [ master ]
+ paths: [ 'src', 'tests' ]
+ pull_request:
+ branches: [ master ]
+
+jobs:
+
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Python install
+ uses: actions/setup-python@v2
+ with:
+ python-version: "3.12"
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip setuptools wheel
+ pip install -e .[test]
+
+ - name: Tests
+ run: |
+ pytest
diff --git a/.github/workflows/py38.yaml b/.github/workflows/py38.yaml
new file mode 100644
index 0000000..347f1db
--- /dev/null
+++ b/.github/workflows/py38.yaml
@@ -0,0 +1,29 @@
+name: "3.8"
+
+on:
+ push:
+ branches: [ master ]
+ paths: [ 'src', 'tests' ]
+ pull_request:
+ branches: [ master ]
+
+jobs:
+
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Python install
+ uses: actions/setup-python@v2
+ with:
+ python-version: "3.8"
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip setuptools wheel
+ pip install -e .[test]
+
+ - name: Tests
+ run: |
+ pytest
diff --git a/.github/workflows/py39.yaml b/.github/workflows/py39.yaml
new file mode 100644
index 0000000..fc148ad
--- /dev/null
+++ b/.github/workflows/py39.yaml
@@ -0,0 +1,29 @@
+name: "3.9"
+
+on:
+ push:
+ branches: [ master ]
+ paths: [ 'src', 'tests' ]
+ pull_request:
+ branches: [ master ]
+
+jobs:
+
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Python install
+ uses: actions/setup-python@v2
+ with:
+ python-version: "3.9"
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip setuptools wheel
+ pip install -e .[test]
+
+ - name: Tests
+ run: |
+ pytest
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..8de67be
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,167 @@
+.dist/
+venv*
+docs/build
+jsonschema_gen.egg-info
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+
+.idea/*
+!.idea/runConfigurations
diff --git a/.idea/runConfigurations/Build.xml b/.idea/runConfigurations/Build.xml
new file mode 100644
index 0000000..41f285f
--- /dev/null
+++ b/.idea/runConfigurations/Build.xml
@@ -0,0 +1,17 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/runConfigurations/Docs.xml b/.idea/runConfigurations/Docs.xml
new file mode 100644
index 0000000..8d8cf6e
--- /dev/null
+++ b/.idea/runConfigurations/Docs.xml
@@ -0,0 +1,17 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/runConfigurations/Pylint.xml b/.idea/runConfigurations/Pylint.xml
new file mode 100644
index 0000000..40f6db9
--- /dev/null
+++ b/.idea/runConfigurations/Pylint.xml
@@ -0,0 +1,17 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/runConfigurations/Tox.xml b/.idea/runConfigurations/Tox.xml
new file mode 100644
index 0000000..ee017c4
--- /dev/null
+++ b/.idea/runConfigurations/Tox.xml
@@ -0,0 +1,22 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/runConfigurations/pytest.xml b/.idea/runConfigurations/pytest.xml
new file mode 100644
index 0000000..f8f37b0
--- /dev/null
+++ b/.idea/runConfigurations/pytest.xml
@@ -0,0 +1,21 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100755
index 0000000..fec24d7
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,32 @@
+fail_fast: true
+repos:
+ - repo: https://github.com/pycqa/flake8
+ rev: 7.0.0
+ hooks:
+ - id: flake8
+ language: python
+ types: [ python ]
+ pass_filenames: true
+ files: ^src/.+
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.6.0
+ hooks:
+ - id: check-case-conflict
+ - id: check-json
+ - id: check-toml
+ - id: check-yaml
+ - id: check-merge-conflict
+ - id: end-of-file-fixer
+ - id: fix-byte-order-marker
+ - id: mixed-line-ending
+ - id: debug-statements
+ - id: check-added-large-files
+ - id: check-symlinks
+ - id: trailing-whitespace
+ - repo: https://github.com/psf/black-pre-commit-mirror
+ rev: 24.3.0
+ hooks:
+ - id: black
+ types: [ python ]
+ pass_filenames: true
+ files: ^src/.+
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..51a1009
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2024 violet-black
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..0b1d3c1
--- /dev/null
+++ b/README.md
@@ -0,0 +1,113 @@
+[![PyPi Version](https://img.shields.io/pypi/v/jsonschema-gen.svg)](https://pypi.python.org/pypi/jsonschema-gen/)
+[![Docs](https://readthedocs.org/projects/jsonschema-gen/badge/?version=latest&style=flat)](https://jsonschema-gen.readthedocs.io)
+[![codecov](https://codecov.io/gh/violet-black/jsonschema-gen/graph/badge.svg?token=FEUUMQELFX)](https://codecov.io/gh/violet-black/jsonschema-gen)
+[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
+
+[![3.8](https://github.com/violet-black/jsonschema-gen/actions/workflows/py38.yaml/badge.svg)](https://github.com/violet-black/jsonschema-gen/actions/workflows/py38.yaml)
+[![3.9](https://github.com/violet-black/jsonschema-gen/actions/workflows/py39.yaml/badge.svg)](https://github.com/violet-black/jsonschema-gen/actions/workflows/py39.yaml)
+[![3.10](https://github.com/violet-black/jsonschema-gen/actions/workflows/py310.yaml/badge.svg)](https://github.com/violet-black/jsonschema-gen/actions/workflows/py310.yaml)
+[![3.11](https://github.com/violet-black/jsonschema-gen/actions/workflows/py311.yaml/badge.svg)](https://github.com/violet-black/jsonschema-gen/actions/workflows/py311.yaml)
+[![3.12](https://github.com/violet-black/jsonschema-gen/actions/workflows/py312.yaml/badge.svg)](https://github.com/violet-black/jsonschema-gen/actions/workflows/py312.yaml)
+
+**jsonschema-gen** is Python type hints parser which can convert function and method annotations
+into [JSONSchema](https://json-schema.org) objects.
+
+- Pythonic [classes](https://jsonschema-gen.readthedocs.io/reference.html) for JSONSchema types
+- Extensive type coverage: TypedDict, Generic, NewType, etc.
+- No external dependencies
+
+# Installation
+
+With pip and python 3.8+:
+
+```bash
+pip3 install jsonschema-gen
+```
+
+# How to use
+
+See the [user guide](https://jsonschema-gen.readthedocs.io/guide.html) for more info.
+
+Create a parser:
+
+```python
+from jsonschema_gen import Parser
+
+parser = Parser(strict=True)
+```
+
+Generate schema for your function or method from Python type hints
+(see the list of [supported types](https://jsonschema-gen.readthedocs.io/type_map.html)):
+
+```python
+from typing import NewType
+
+UserName = NewType('UserName', str)
+
+class UserData:
+ def get_user(self, name: UserName, active: bool = True) -> dict:
+ """Get user by username."""
+
+annotation = parser.parse_function(UserData.get_user, UserData)
+```
+
+The result is an annotation object with input `.kwargs` and output `.returns`. You can get a JSONSchema compatible dict
+using `json_repr()` on `.kwargs`:
+
+```python
+schema = annotation.kwargs.json_repr()
+```
+
+The result would look like this (if converted to JSON with `dumps`):
+
+```json
+{
+ "type": "object",
+ "title": "Get user by username.",
+ "properties": {
+ "name": {
+ "title": "Username",
+ "type": "string"
+ },
+ "active": {
+ "type": "boolean",
+ "default": true
+ }
+ },
+ "required": [
+ "name"
+ ],
+ "additionalProperties": false
+}
+```
+
+Use [fastjsonschema](https://github.com/horejsek/python-fastjsonschema) or other JSONSchema validation library to
+create a validator for the schema:
+
+```python
+from fastjsonschema import compile
+
+validator = compile(schema)
+valiator({'name': 'John', 'email': 'john@dowe'})
+```
+
+Alternatively you can pass the whole class to the parser to get the annotation mapping:
+
+```python
+annotations = parser.parse_class(UserData)
+annotations['get_user'].kwargs.json_repr()
+```
+
+# Compatibility
+
+The Python type hints are vast and yet not well organized, so there could always be some data type I forgot to add
+here. Read the customization guide to extend the standard list of type parsers.
+
+Some annotations cannot be converted to JSONSchema objects, for example: positional-only arguments, variable
+positionals, etc. There are [different strategies](https://jsonschema-gen.readthedocs.io/guide.html#variable-args)
+considering these types of parameters.
+
+Python 3.8 compatibility is so-so due to lots of features and changes made in 3.9. However, it still should support
+most of the functionality.
+
+Also read about the [strict mode](https://jsonschema-gen.readthedocs.io/guide.html#strict-mode).
diff --git a/docs/Makefile b/docs/Makefile
new file mode 100755
index 0000000..d0c3cbf
--- /dev/null
+++ b/docs/Makefile
@@ -0,0 +1,20 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line, and also
+# from the environment for the first two.
+SPHINXOPTS ?=
+SPHINXBUILD ?= sphinx-build
+SOURCEDIR = source
+BUILDDIR = build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/docs/requirements.txt b/docs/requirements.txt
new file mode 100644
index 0000000..dfdf28b
--- /dev/null
+++ b/docs/requirements.txt
@@ -0,0 +1,56 @@
+#
+# This file is autogenerated by pip-compile with Python 3.13
+# by the following command:
+#
+# pip-compile --extra=docs --output-file=docs/requirements.txt setup.cfg
+#
+alabaster==0.7.16
+ # via sphinx
+babel==2.14.0
+ # via sphinx
+certifi==2024.2.2
+ # via requests
+charset-normalizer==3.3.2
+ # via requests
+docutils==0.20.1
+ # via
+ # m2r2
+ # sphinx
+idna==3.6
+ # via requests
+imagesize==1.4.1
+ # via sphinx
+jinja2==3.1.3
+ # via sphinx
+m2r2==0.3.3.post2
+ # via jsonschema-gen (setup.cfg)
+markupsafe==2.1.5
+ # via jinja2
+mistune==0.8.4
+ # via m2r2
+packaging==24.0
+ # via sphinx
+pygments==2.17.2
+ # via sphinx
+python-docs-theme==2024.3
+ # via jsonschema-gen (setup.cfg)
+requests==2.31.0
+ # via sphinx
+snowballstemmer==2.2.0
+ # via sphinx
+sphinx==7.2.6
+ # via jsonschema-gen (setup.cfg)
+sphinxcontrib-applehelp==1.0.8
+ # via sphinx
+sphinxcontrib-devhelp==1.0.6
+ # via sphinx
+sphinxcontrib-htmlhelp==2.0.5
+ # via sphinx
+sphinxcontrib-jsmath==1.0.1
+ # via sphinx
+sphinxcontrib-qthelp==1.0.7
+ # via sphinx
+sphinxcontrib-serializinghtml==1.1.10
+ # via sphinx
+urllib3==2.2.1
+ # via requests
diff --git a/docs/source/.gitignore b/docs/source/.gitignore
new file mode 100755
index 0000000..f59ec20
--- /dev/null
+++ b/docs/source/.gitignore
@@ -0,0 +1 @@
+*
\ No newline at end of file
diff --git a/docs/source/conf.py b/docs/source/conf.py
new file mode 100755
index 0000000..1ac34ad
--- /dev/null
+++ b/docs/source/conf.py
@@ -0,0 +1,83 @@
+# Configuration file for the Sphinx documentation builder.
+#
+# This file only contains a selection of the most common options. For a full
+# list see the documentation:
+# https://www.sphinx-doc.org/en/master/usage/configuration.html
+
+# -- Path setup --------------------------------------------------------------
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#
+import os
+import sys
+
+sys.path.insert(0, os.path.abspath('..'))
+sys.path.insert(0, os.path.abspath('../../src'))
+
+# -- Project information -----------------------------------------------------
+
+project = 'jsonschema-gen'
+copyright = '2024, Elemento Systems (violetblackdev@gmail.com)'
+author = 'violetblackdev@gmail.com'
+
+# The full version, including alpha/beta/rc tags
+release = '1'
+
+
+# -- General configuration ---------------------------------------------------
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.viewcode',
+ 'm2r2'
+]
+
+source_suffix = ['.rst', '.md']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This pattern also affects html_static_path and html_extra_path.
+exclude_patterns = [
+ '**/tests'
+]
+
+# -- Options for HTML output -------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+#
+# html_theme = 'alabaster'
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+html_theme = 'python_docs_theme'
+html_sidebars = {
+ 'index': ['localtoc.html', 'relations.html'],
+ 'uvlog.*': ['globaltoc.html', 'relations.html']
+}
+
+# -- Run apidoc --------------------------------------------------------------
+#
+# from sphinx.ext.apidoc import main
+
+add_module_names = False
+autoclass_content = 'class'
+autodoc_class_signature = 'separated'
+autodoc_typehints = 'signature'
+autodoc_typehints_format = 'short'
+autodoc_typehints_description_target = 'documented'
+autodoc_preserve_defaults = True
+autodoc_member_order = 'bysource'
+autodoc_default_options = {
+ 'exclude-members': '__new__'
+}
\ No newline at end of file
diff --git a/docs/source/customization.rst b/docs/source/customization.rst
new file mode 100644
index 0000000..07f6e01
--- /dev/null
+++ b/docs/source/customization.rst
@@ -0,0 +1,66 @@
+.. _customization:
+
+Customization
+=============
+
+Use :py:class:`~jsonschema_gen.Parser.TypeParser` as you base class to create custom type classes.
+
+Let's try to create a custom type parser for a special user-defined `Email` type.
+
+.. code-block:: python
+
+ from typing import NewType
+
+ Email = NewType('Email', str)
+
+ def set_user_email(user_id: str, email: Email) -> bool: ...
+
+We have to create a special `EmailParser` for this type. However the `NewType` is not a real type and cannot be used
+in type checks. We have to customize `can_parse` method then to tell the parser when to parse an annotation.
+
+.. code-block:: python
+
+ from typing import NewType
+
+ from jsonschema_gen.parsers import TypeParser, TYPES
+ from jsonschema_gen.schema import Email
+
+ class EmailParser(TypeParser):
+ # JSONSchema type to map to
+ annotation = Email
+
+ # you may pass a dictionary of default attributes for the JSONSchema object here
+ attrs = {'title': 'user email'}
+
+ # allow this type in the Parser strict mode
+ strict = True
+
+ def can_parse(self, annotation, /) -> bool:
+ return type(annotation) is NewType and annotation.__name__ == 'Email'
+
+Then you need to add it to the list of standard types. Note that the resolution order of types is from 0 to the last
+element, so you must insert your parser at the beginning for it to take effect.
+
+.. code-block:: python
+
+ TYPES.insert(0, EmailParser)
+
+Now you can parse the email type and create a JSONSchema annotation with it.
+
+.. code-block:: python
+
+ annotation = parser.parse_function(set_user_email)
+
+The annotation `kwargs` would look like this in this case.
+
+.. code-block:: python
+
+ {
+ 'properties': {
+ 'user_id': {'type': 'string'},
+ 'email': {'format': 'email', 'type': 'string', 'title': 'user email'}
+ },
+ 'additionalProperties': False,
+ 'required': ['user_id', 'email'],
+ 'type': 'object'
+ }
diff --git a/docs/source/guide.rst b/docs/source/guide.rst
new file mode 100644
index 0000000..68a3c97
--- /dev/null
+++ b/docs/source/guide.rst
@@ -0,0 +1,154 @@
+.. _guide:
+
+User guide
+==========
+
+Using the generator is quite simple. You need to create a parser and pass a class and a method of this class
+there to generate an annotation namedtuple with `kwargs` containing the input schema and `returns` containing
+the return value schema (:py:meth:`~jsonschema_gen.Parser.parse_function`).
+
+.. code-block:: python
+
+ from typing import NewType, TypedDict
+ from jsonschema_gen import Parser
+
+ Username = NewType('Username', str)
+
+
+ class User(TypedDict):
+ name: str
+ blocked: bool
+
+
+ class UserData:
+
+ def get_user(self, name: Username) -> User:
+ """Get an API user."""
+
+ parser = Parser()
+ annotations = parser.parse_function(UserData.get_user, UserData)
+
+The schema consists of a schema object (see :ref:`schema`) which can be converted to a JSON-compatible dict using
+its :py:meth:`~jsonschema_gen.schema.JSONSchemaType.json_repr` method (keep in mind that you should check for `None`
+there since for a method with no input args the `kwargs` may be `None`).
+
+.. code-block:: python
+
+ annotations.kwargs.json_repr()
+
+The resulting object for this particular example would look like this.
+
+.. code-block:: python
+
+ {
+ "type": "object",
+ "title": "Get an API user.",
+ "properties": {
+ "name": {
+ "title": "Username",
+ "type": "string"
+ }
+ },
+ "required": [
+ "name"
+ ],
+ "additionalProperties": False
+ }
+
+There's also a way to parse all public methods of the class using :py:meth:`~jsonschema_gen.Parser.parse_class`.
+
+.. code-block:: python
+
+ annotations = parser.parse_class(UserData)
+
+The result is a dictionary with name: annotations data.
+
+You can use a JSONSchema validation library, such as `fastjsonschema `_,
+to validate input arguments for your API methods. Something like this:
+
+.. code-block:: python
+
+ from fastjsonschema import compile
+
+ users_validators = {method_name: compile(annotation.kwargs.json_repr())}
+
+ @route('/users/{method}')
+ def handle_request(request):
+ method = request.match_args['method']
+ args = request.json()
+ users_validators[method](args)
+ return getattr(users, method)(**args)
+
+Private args
+------------
+
+You can specify 'private' arguments for your input by prefixing them with underscore. They will be ignored in the
+annotation output. However, the parser does no default value check - it's on your own responsibility.
+
+You can use this pattern if you, for example, have a public API where the session is passed automatically by some
+middleware. You then can create her as a 'private' input argument.
+
+.. code-block:: python
+
+ def get_user(self, name: Username, _session=None) -> User:
+ """Get an API user."""
+
+`_session` will not be present in the resulting schema, so if someone will try to pass it explicitly from the API,
+the validator would raise a validation error.
+
+Variable args
+-------------
+
+Since the resulting schema must translate to a JSONSchema object, currently the positional variable arguments are
+ignored. In this case the two method definitions are equivalent for the parser:
+
+.. code-block:: python
+
+ def get_user(self, name: Username, *args) -> User: ...
+
+ def get_user(self, name: Username) -> User: ...
+
+Variable keyword arguments are accepted and would change `additionalProperties` of the input schema object to `true`.
+However, I would not recommend using variable keyword arguments in a public API.
+
+.. code-block:: python
+
+ def get_user(self, name: Username, **kwargs) -> User: ...
+ # "additionalProperties" will be 'true'
+
+Strict mode
+-----------
+
+By default the parser is initialize in the *strict mode*. It means that it won't be able to parse types what cannot be
+mapped to JSON types explicitly.
+
+For example, the python `UUID` type, although the JSONSchema has 'uuid' string format, has no equivalent in JSON, which
+means that it may create confusion and errors when the method expecting a `UUID` object will receive an UUID formatted
+`string` instead. The same can be said for `datetime`, `date` and other special types.
+
+See :ref:`type-map` for the full list of types supported in the strict mode.
+
+Some JSON parsers like `orjson `_ can in fact convert date-time strings to Python `datetime`
+objects. In this case you may either switch to the non-strict mode or modify a particular type parser to
+allow it in the strict mode.
+
+.. code-block:: python
+
+ from jsonschema_gen.parsers import DateTimeParser, DateParser
+
+ DateTimeParser.strict = True
+ DateParser.strict = True
+
+It depends on the situation whether you want to use the strict or non-strict mode in your code.
+
+Limitations
+-----------
+
+Positional-only arguments are not supported at the moment (and I honestly don't know how to support them properly).
+
+.. code-block:: python
+
+ # would raise a `IncompatibleTypesError`
+ def get_user(self, name: Username, /) -> User: ...
+
+To resolve string annotations (references) you must pass a dictionary of your globals to the parser's `__init__`.
diff --git a/docs/source/index.rst b/docs/source/index.rst
new file mode 100755
index 0000000..1e8cf1a
--- /dev/null
+++ b/docs/source/index.rst
@@ -0,0 +1,15 @@
+Jsonschema-gen
+--------------
+
+.. toctree::
+ :maxdepth: 1
+
+ guide
+ type_map
+ schema
+ parser
+ customization
+
+.. include:: quickstart.rst
+
+.. include:: license.rst
diff --git a/docs/source/license.rst b/docs/source/license.rst
new file mode 100755
index 0000000..10ad7c9
--- /dev/null
+++ b/docs/source/license.rst
@@ -0,0 +1,4 @@
+License
+-------
+
+.. include:: ../../LICENSE
diff --git a/docs/source/parser.rst b/docs/source/parser.rst
new file mode 100644
index 0000000..e04e47d
--- /dev/null
+++ b/docs/source/parser.rst
@@ -0,0 +1,116 @@
+.. _parser:
+
+:tocdepth: 2
+
+Parsers
+=======
+
+This module contains the main parser class as well as a list of type annotation parsers. To add your custom parser to
+the default list of parsers you must add it to the :py:obj:`jsonschema_gen.parsers.TYPES` list.
+
+.. autodata:: jsonschema_gen.parsers.TYPES
+ :annotation: : List[TypeParser] - default list of type parsers
+
+.. autoclass:: jsonschema_gen.parsers.FunctionAnnotation
+ :members:
+ :undoc-members:
+ :exclude-members: __init__, __new__
+
+.. autoclass:: jsonschema_gen.parsers.Parser
+ :members:
+ :undoc-members:
+ :exclude-members:
+
+Base type parser class.
+
+.. autoclass:: jsonschema_gen.parsers.TypeParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+Type-specific parsers.
+
+.. autoclass:: jsonschema_gen.parsers.AnyParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.parsers.BooleanParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.parsers.ConstantParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.parsers.DictParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.parsers.EnumTypeParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.parsers.EnumValueParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.parsers.IntegerParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.parsers.ListParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.parsers.NamedTupleParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.parsers.NewTypeParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.parsers.NullParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.parsers.NumberParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.parsers.SetParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.parsers.StringParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.parsers.TupleParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.parsers.TypedDictParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.parsers.UnionParser
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
diff --git a/docs/source/quickstart.rst b/docs/source/quickstart.rst
new file mode 100644
index 0000000..2eaeea9
--- /dev/null
+++ b/docs/source/quickstart.rst
@@ -0,0 +1,8 @@
+Quickstart
+----------
+
+.. raw:: html
+
+
+
+.. mdinclude:: ../../README.md
diff --git a/docs/source/schema.rst b/docs/source/schema.rst
new file mode 100644
index 0000000..ac3fdcf
--- /dev/null
+++ b/docs/source/schema.rst
@@ -0,0 +1,138 @@
+.. _schema:
+
+:tocdepth: 2
+
+JSONSchema types
+================
+
+This module contains Python classes for `JSONSchema types `_
+which can be helpful when manipulating such data in Python. Each class has a number of attributes and
+`json_repr()` method which returns a JSONSchema compatible dictionary.
+
+.. code-block:: python
+
+ import json
+ import jsonschema_gen.schema as js
+
+ user = js.Object({
+ 'name': js.String(title='full name', minLength=1),
+ 'email': js.String(pattern='/^[^\.\s][\w\-]+(\.[\w\-]+)*@([\w-]+\.)+[\w-]{2,}$/gm')
+ }, required=['name', 'email'], additionalProperties=False)
+
+ user_or_guest = js.Nullable(user)
+
+ user_or_guest.json_repr() # dumps the resulting schema into the python dict
+ json.dumps(user_or_guest.json_repr()) # dumps the schema to the JSON string
+
+To create your own type you need to implement :py:class:`~jsonschema_gen.schema.JSONSchemaType` interface, i.e.
+the `json_repr()` method itself (subclassing is not required).
+
+.. note::
+
+ To make it easier to use with the original JSONSchema documentation the type attribute names have been left in
+ camel-case.
+
+.. autoclass:: jsonschema_gen.schema.JSONSchemaType
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.schema.JSONSchemaObject
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.schema.AllOf
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.schema.AnyOf
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.schema.Array
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.schema.Boolean
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.schema.Const
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.schema.Date
+ :members:
+ :inherited-members:
+ :undoc-members:
+ :exclude-members: __init__, format, __post_init__
+
+.. autoclass:: jsonschema_gen.schema.DateTime
+ :members:
+ :inherited-members:
+ :undoc-members:
+ :exclude-members: __init__, format, __post_init__
+
+.. autoclass:: jsonschema_gen.schema.Email
+ :members:
+ :inherited-members:
+ :undoc-members:
+ :exclude-members: __init__, format, __post_init__
+
+.. autoclass:: jsonschema_gen.schema.Enum
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.schema.GUID
+ :members:
+ :inherited-members:
+ :undoc-members:
+ :exclude-members: __init__, format, __post_init__
+
+.. autoclass:: jsonschema_gen.schema.Integer
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.schema.Not
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.schema.Null
+ :members:
+ :inherited-members:
+ :undoc-members:
+ :exclude-members: __init__, enum, __post_init__
+
+.. autoclass:: jsonschema_gen.schema.Nullable
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.schema.Number
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.schema.Object
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.schema.OneOf
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
+
+.. autoclass:: jsonschema_gen.schema.String
+ :members:
+ :undoc-members:
+ :exclude-members: __init__
diff --git a/docs/source/type_map.rst b/docs/source/type_map.rst
new file mode 100644
index 0000000..53dea3b
--- /dev/null
+++ b/docs/source/type_map.rst
@@ -0,0 +1,79 @@
+.. _type-map:
+
+Type reference
+==============
+
+These tables contain the full list of supported type hints and their normalized JSONSchema types.
+The `strict` attribute means that the conversion is available only in the parser's *strict* mode.
+
+The mapping between the standard Python types and JSONSchema types.
+
+.. csv-table::
+ :header: "Annotation", "JSONSchema", "strict", "comment"
+ :align: left
+
+ "bool", "boolean", "YES", ""
+ "str", "string", "YES", ""
+ "bytes", "string", "YES", ""
+ "date", "string", "no", "format=date"
+ "datetime", "string", "no", "format=date-time"
+ "UUID", "string", "no", "format=uuid"
+ "SafeUUID", "string", "no", "format=uuid"
+ "float", "number", "YES", ""
+ "int", "integer", "YES", ""
+ "Decimal", "number", "YES", ""
+ "Number", "number", "YES", ""
+ "None", "null", "YES", ""
+ "Any", "unspecified", "YES", ""
+ "list", "array", "YES", ""
+ "tuple", "array", "YES", "with prefixItems"
+ "NamedTuple", "array", "no", "with prefixItems=true"
+ "set", "array", "YES", "with uniqueItems=true"
+ "frozenset", "array", "YES", "with uniqueItems=true"
+ "dict", "object", "YES", ""
+ "TypedDict", "object", "YES", "additionalProperties=false"
+ "dataclass", "object", "no", "additionalProperties=false"
+ "Enum", "enum", "no", "non-strict because in Python enum type != its value"
+ "*args", "", "YES", "ignored"
+ "**kwargs", "", "YES", "sets additionalProperties=true"
+
+The mapping between Python base and abstract types and JSONSchema types.
+
+.. csv-table::
+ :header: "Annotation", "JSONSchema", "strict", "comment"
+ :align: left
+
+ "typing.List", "array", "YES", ""
+ "typing.Collection", "array", "YES", ""
+ "collections.abc.Collection", "array", "YES", ""
+ "typing.Iterable", "array", "YES", ""
+ "collections.abc.Iterable", "array", "YES", ""
+ "typing.Tuple", "array", "YES", "with prefixItems=true"
+ "typing.Set", "array", "YES", "with uniqueItems=true"
+ "collections.abc.Set", "array", "YES", "with uniqueItems=true"
+ "typing.MutableSet", "array", "YES", "with uniqueItems=true"
+ "collections.abc.MutableSet", "array", "YES", "with uniqueItems=true"
+ "typing.FrozenSet", "array", "YES", "with uniqueItems=true"
+ "typing.Dict", "object", "YES", ""
+ "collections.abc.Collection", "object", "YES", ""
+ "typing.Mapping", "object", "YES", ""
+ "collections.abc.Mapping", "object", "YES", ""
+ "typing.MutableMapping", "object", "YES", ""
+ "collections.abc.MutableMapping", "object", "YES", ""
+
+The mapping between Python special type hint types and JSONSchema types.
+
+.. csv-table::
+ :header: "Annotation", "JSONSchema", "strict", "comment"
+ :align: left
+
+ "typing.Literal", "enum", "YES", "constant value or values"
+ "typing.Union", "anyOf", "YES", ""
+ "union operator |", "anyOf", "YES", ""
+ "typing.Optional", "anyOf", "YES", "value or null"
+ "typing.TypeVar", "", "YES", "converts to the bound type"
+ "typing.NewType", "", "YES", "converts to the bound type"
+ "typing.Generic", "", "YES", "resolves type vars to the bound type"
+ "typing.TypeVar", "", "YES", "converts to a bound type"
+ "typing.Required", "", "YES", "required key added to the 'required' array"
+ "typing.NotRequired", "", "YES", "not required key removed from the 'required' array"
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..6d8b66b
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,30 @@
+[tool.black]
+line-length = 120
+target-version = ['py38']
+
+[tool.coverage.report]
+omit = ["setup.py", "tests/*", "docs/*", "src/jsonschema_gen/utils.py"]
+
+[tool.pytest.ini_options]
+minversion = "8.0"
+addopts = "-s --doctest-modules"
+log_level = "DEBUG"
+log_cli = true
+testpaths = ["tests", "src/jsonschema_gen"]
+pythonpath = [
+ "src"
+]
+
+[tool.pylint]
+load-plugins = [
+ 'pylint.extensions.check_elif',
+ 'pylint.extensions.docstyle',
+ 'pylint.extensions.dunder',
+ 'pylint.extensions.eq_without_hash',
+ 'pylint.extensions.mccabe',
+ 'pylint.extensions.overlapping_exceptions',
+ 'pylint.extensions.private_import',
+ ''
+]
+disable = ['C0103', 'C0115', 'C0116', 'R0902', 'R0903', 'R0913', 'R1705', 'W0106', 'W0603', 'W0212']
+max-line-length = 135
diff --git a/readthedocs.yaml b/readthedocs.yaml
new file mode 100644
index 0000000..fb38344
--- /dev/null
+++ b/readthedocs.yaml
@@ -0,0 +1,24 @@
+# .readthedocs.yaml
+# Read the Docs configuration file
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
+
+# Required
+version: 2
+
+# Set the version of Python and other tools you might need
+build:
+ os: ubuntu-22.04
+ tools:
+ python: "3.11"
+
+# Build documentation in the docs/ directory with Sphinx
+sphinx:
+ builder: html
+ configuration: docs/source/conf.py
+
+formats:
+ - htmlzip
+
+python:
+ install:
+ - requirements: docs/requirements.txt
\ No newline at end of file
diff --git a/setup.cfg b/setup.cfg
new file mode 100755
index 0000000..3df6d26
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,72 @@
+[metadata]
+name = jsonschema-gen
+version = attr: jsonschema_gen.__version__
+description = JSONSchema generation from Python type hints
+long_description = file: README.md
+long_description_content_type = text/markdown
+url = https://github.com/violet-black/jsonschema-gen
+author = violetblackdev@gmail.com
+license = MIT
+license_files = LICENSE
+classifiers =
+ Development Status :: 3 - Alpha
+ License :: OSI Approved :: MIT License
+ Operating System :: MacOS
+ Operating System :: Microsoft
+ Operating System :: POSIX
+ Programming Language :: Python
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3 :: Only
+ Programming Language :: Python :: 3.8
+ Programming Language :: Python :: 3.9
+ Programming Language :: Python :: 3.10
+ Programming Language :: Python :: 3.11
+ Programming Language :: Python :: 3.12
+ Programming Language :: Python :: 3.13
+ Programming Language :: Python :: Implementation :: CPython
+keywords =
+ jsonschema
+ validation
+ typing
+
+[options]
+package_dir=
+ =src
+packages = find:
+python_requires = >=3.8
+install_requires =
+
+[options.entry_points]
+console_scripts =
+
+[options.packages.find]
+where=src
+
+[options.extras_require]
+test=
+ pytest == 8.1.1
+dev =
+ pip-tools
+ tox
+ coverage
+ mypy
+ isort
+ black
+ bump2version
+ bandit
+ xenon
+docs=
+ sphinx
+ python_docs_theme
+ m2r2
+
+[build-system]
+requires =
+ setuptools
+ wheel
+build-backend = "setuptools.build_meta"
+
+[flake8]
+max-line-length = 125
+max-doc-length = 135
+select = E
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..6068493
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,3 @@
+from setuptools import setup
+
+setup()
diff --git a/src/jsonschema_gen/__init__.py b/src/jsonschema_gen/__init__.py
new file mode 100644
index 0000000..e9371c0
--- /dev/null
+++ b/src/jsonschema_gen/__init__.py
@@ -0,0 +1,9 @@
+"""JSONSchema generation from Python type hints."""
+
+from jsonschema_gen import schema
+from jsonschema_gen.parsers import *
+
+__python_version__ = "3.8"
+__author__ = "violetblackdev@gmail.com"
+__license__ = "MIT"
+__version__ = "0.1.1"
diff --git a/src/jsonschema_gen/parsers.py b/src/jsonschema_gen/parsers.py
new file mode 100644
index 0000000..e5e487c
--- /dev/null
+++ b/src/jsonschema_gen/parsers.py
@@ -0,0 +1,533 @@
+"""Collection of type parsers."""
+
+import collections.abc as c
+import inspect
+import typing as t
+from abc import ABC
+from contextlib import suppress
+from dataclasses import MISSING, fields, is_dataclass
+from datetime import date, datetime
+from decimal import Decimal
+from enum import Enum
+from inspect import _ParameterKind # noqa: magic is required
+from numbers import Number
+from uuid import UUID, SafeUUID
+from weakref import proxy
+
+import jsonschema_gen.schema as j
+from jsonschema_gen.utils import (NoneType, compatible_py39, compatible_py310,
+ compatible_py311, get_args,
+ get_function_summary, get_generic_alias,
+ get_origin, is_namedtuple, is_typeddict,
+ is_union)
+
+__all__ = [
+ "TYPES",
+ "IncompatibleTypesError",
+ "Parser",
+ "TypeParser",
+ "ListParser",
+ "TupleParser",
+ "DictParser",
+ "SetParser",
+ "StringParser",
+ "IntegerParser",
+ "NumberParser",
+ "BooleanParser",
+ "ConstantParser",
+ "EnumTypeParser",
+ "EnumValueParser",
+ "TypedDictParser",
+ "NewTypeParser",
+ "NamedTupleParser",
+ "AnyParser",
+ "NullParser",
+ "UnionParser",
+]
+
+TYPES: t.List[t.Type["TypeParser"]] = [] #: default collection of type parsers
+
+
+class IncompatibleTypesError(ValueError):
+ """Annotation type is incompatible with JSONSchema."""
+
+
+class FunctionAnnotation(t.NamedTuple):
+ """Function annotation with input kwargs and return values schemas."""
+
+ kwargs: t.Optional[j.Object]
+ returns: t.Optional[j.JSONSchemaType]
+
+
+class Parser:
+ """Python annotations parser.
+
+ Parse an annotation:
+
+ >>> Parser().parse_annotation(t.List[str], default=[]).json_repr()
+ {'items': {'type': 'string'}, 'default': [], 'type': 'array'}
+
+ Parse a function (method):
+
+ >>> def test(value: str) -> int: ...
+ >>> annotations = Parser().parse_function(test)
+ >>> annotations.kwargs.json_repr()
+ {'properties': {'value': {'type': 'string'}}, 'additionalProperties': False, 'required': ['value'], 'type': 'object'}
+
+ Parse a class:
+
+ >>> class C:
+ ... def test(self, value: str) -> int: ...
+ >>> annotations_map = Parser().parse_class(C)
+ >>> annotations_map['test'].kwargs.json_repr()
+ {'properties': {'value': {'type': 'string'}}, 'additionalProperties': False, 'required': ['value'], 'type': 'object'}
+ """
+
+ def __init__(
+ self,
+ *,
+ strict: bool = True,
+ private_arg_prefix: str = "_",
+ types: t.Optional[t.List[t.Type["TypeParser"]]] = None,
+ locals: t.Optional[dict] = None,
+ ):
+ """Initialize
+
+ :param strict: strict parsing - allow only JSONSchema compatible types
+ for example: UUID type is not allowed in `strict` because it's not an actual data type in JSON
+ :param private_arg_prefix: ignore args starting with such prefix
+ :param types: list of type parsers, by default :py:obj:`~jsonschema_gen.parsers.TYPES` is used
+ :param locals: a map of local variables to resolve plain string references in type hints
+ """
+ self.strict = strict
+ self.private_arg_prefix = private_arg_prefix
+ self.types = types or TYPES
+ self.locals = locals or {}
+ self._types = [t(self) for t in self.types]
+
+ def parse_class(self, cls: t.Type, /) -> t.Dict[str, FunctionAnnotation]:
+ """Parse class methods and create an annotation map for the whole class."""
+ _method_map = {}
+ for name, value in vars(cls).items():
+ if self.private_arg_prefix and name.startswith(self.private_arg_prefix):
+ continue
+ if inspect.isfunction(value):
+ _method_map[name] = self.parse_function(value, cls)
+ return _method_map
+
+ def parse_function(
+ self, f: t.Callable, /, cls: t.Optional[t.Type] = None
+ ) -> FunctionAnnotation:
+ """Parse method or function arguments and return type into jsonschema style annotations."""
+ sign = inspect.signature(f)
+ params, required = {}, []
+ additional_properties = False
+ is_staticmethod = isinstance(f, staticmethod)
+
+ for n, (name, arg) in enumerate(sign.parameters.items()):
+
+ if self.private_arg_prefix and name.startswith(self.private_arg_prefix):
+ continue
+
+ # ignoring the first argument for class and instance methods
+ if cls and n == 0 and not is_staticmethod:
+ continue
+
+ if arg.kind == _ParameterKind.VAR_POSITIONAL:
+ continue
+
+ if arg.kind == _ParameterKind.VAR_KEYWORD:
+ additional_properties = True
+ continue
+
+ if arg.kind == _ParameterKind.POSITIONAL_ONLY:
+ raise IncompatibleTypesError(
+ "Positional only arguments cannot be converted to a JSONSchema object."
+ )
+
+ if type(arg.annotation) is t.TypeVar:
+ if cls:
+ annotation = _parse_generic_class(cls, arg.annotation)
+ else:
+ annotation = TypeVarParser(self).parse_annotation(arg.annotation)
+ else:
+ annotation = arg.annotation
+
+ if arg.default == arg.empty:
+ default = ...
+ required.append(name)
+ else:
+ default = arg.default
+
+ params[name] = self.parse_annotation(annotation, default=default)
+
+ if params:
+ params = j.Object(
+ properties=params,
+ required=required,
+ additionalProperties=additional_properties,
+ )
+ else:
+ params = None
+
+ if sign.return_annotation is inspect._empty: # noqa: magic
+ returns = None
+ else:
+ returns = self.parse_annotation(sign.return_annotation, default=...)
+ return FunctionAnnotation(params, returns)
+
+ def parse_annotation(self, annotation, /, default=...) -> j.JSONSchemaType:
+ """Convert python annotation into a jsonschema object."""
+ if type(annotation) is t.ForwardRef:
+ annotation = self.locals.get(annotation.__forward_arg__, t.Any)
+
+ for parser in self._types:
+ if not parser.can_parse(annotation):
+ continue
+ if self.strict and not parser.strict:
+ continue
+
+ annotation = parser.parse_annotation(annotation)
+ if default is not ...:
+ annotation.default = default
+ return annotation
+
+ if self.strict:
+ raise IncompatibleTypesError(
+ f"Unable to parse annotation of type {annotation} as jsonschema type in strict mode"
+ )
+
+ title = None if annotation == inspect._empty else str(annotation) # noqa: magic
+ return j.JSONSchemaObject(title=title, default=default)
+
+
+class TypeParser(ABC):
+ """Type parser"""
+
+ types: t.Tuple[t.Type]
+ annotation: t.Type[j.JSONSchemaObject]
+ attrs: dict = None
+ strict: bool = True
+
+ def __init__(self, _parser: "Parser"):
+ self._parser = proxy(_parser)
+
+ def can_parse(self, annotation, /) -> bool:
+ origin = get_origin(annotation)
+ return origin in self.types if origin else annotation in self.types
+
+ def parse_annotation(self, annotation, /) -> j.JSONSchemaType:
+ _attrs = {} if self.attrs is None else self.attrs
+ return self.annotation(**_attrs) # noqa
+
+ def parse_args(self, args, /):
+ if args:
+ for arg in args:
+ if arg is not ...:
+ yield self._parser.parse_annotation(arg)
+
+
+def _parse_generic_class(cls: t.Type, annotation, /) -> j.JSONSchemaObject:
+ """Parse a class containing Generic hints in itself."""
+ alias = base_alias = get_generic_alias(cls)
+ args = base_args = alias.__args__
+ while base_alias:
+ base_args = base_alias.__args__
+ base_cls = base_alias.__origin__
+ base_alias = get_generic_alias(base_cls)
+ with suppress(ValueError, IndexError):
+ annotation = args[base_args.index(annotation)]
+ return annotation
+
+
+class AnyParser(TypeParser):
+ types = (t.Any,)
+ annotation = j.JSONSchemaObject
+
+
+class StringParser(TypeParser):
+ types = (str, bytes, t.AnyStr)
+ annotation = j.String
+
+
+class UUIDParser(TypeParser):
+ types = (UUID, SafeUUID)
+ annotation = j.GUID
+ strict = False
+
+
+class DateParser(TypeParser):
+ types = (date,)
+ annotation = j.Date
+ strict = False
+
+
+class DateTimeParser(TypeParser):
+ types = (datetime,)
+ annotation = j.DateTime
+ strict = False
+
+
+class IntegerParser(TypeParser):
+ types = (int,)
+ annotation = j.Integer
+
+
+class NumberParser(TypeParser):
+ types = (float, Decimal, Number)
+ annotation = j.Number
+
+
+class BooleanParser(TypeParser):
+ types = (bool,)
+ annotation = j.Boolean
+
+
+class NullParser(TypeParser):
+ types = (None, NoneType)
+ annotation = j.Null
+
+
+class TypeVarParser(TypeParser):
+
+ def can_parse(self, annotation, /) -> bool:
+ return isinstance(annotation, t.TypeVar)
+
+ def parse_annotation(self, annotation: t.TypeVar, /) -> j.JSONSchemaType:
+ title = annotation.__name__
+ annotation = annotation.__bound__
+ if annotation:
+ arg = self._parser.parse_annotation(annotation)
+ else:
+ arg = j.JSONSchemaObject()
+ arg.title = title
+ return arg
+
+
+class NewTypeParser(TypeParser):
+
+ def can_parse(self, annotation, /) -> bool:
+ if compatible_py310():
+ return isinstance(annotation, t.NewType) # noqa: magic
+ else:
+ return getattr(annotation, "__qualname__", "").split(".")[0] == "NewType"
+
+ def parse_annotation(self, annotation, /) -> j.JSONSchemaType:
+ title = annotation.__name__
+ annotation = annotation.__supertype__
+ arg = self._parser.parse_annotation(annotation)
+ arg.title = title
+ return arg
+
+
+class ConstantParser(TypeParser):
+
+ def can_parse(self, annotation, /) -> bool:
+ return getattr(annotation, "__origin__", None) is t.Literal
+
+ def parse_annotation(self, annotation, /) -> j.JSONSchemaType:
+ return j.Enum(enum=list(annotation.__args__))
+
+
+class UnionParser(TypeParser):
+
+ def can_parse(self, annotation, /) -> bool:
+ return is_union(annotation)
+
+ def parse_annotation(self, annotation, /) -> j.JSONSchemaType:
+ args = get_args(annotation)
+ return j.AnyOf(list(self.parse_args(args)))
+
+
+class ListParser(TypeParser):
+ types = (list, t.List, t.Collection, c.Collection, c.Iterable, t.Iterable)
+ annotation = j.Array
+
+ def parse_annotation(self, annotation, /) -> j.JSONSchemaType:
+ args = get_args(annotation)
+ if args is None:
+ return self.annotation()
+ _args = []
+ for arg in self.parse_args(args):
+ if type(arg) is j.AnyOf:
+ arg = t.cast(j.AnyOf, arg)
+ _args.extend(arg.items) # noqa (ported)
+ else:
+ _args.append(arg)
+ return self.annotation(items=_args[0])
+
+
+class SetParser(ListParser):
+ types = (set, frozenset, t.Set, c.Set, t.FrozenSet, t.MutableSet, c.MutableSet)
+ annotation = j.Array
+
+ def parse_annotation(self, annotation, /) -> j.JSONSchemaType:
+ annotation = super().parse_annotation(annotation)
+ annotation.uniqueItems = True
+ return annotation
+
+
+class TupleParser(ListParser):
+ types = (tuple, t.Tuple)
+ annotation = j.Array
+
+ def parse_annotation(self, annotation, /) -> j.JSONSchemaType:
+ args = get_args(annotation)
+ if not args or (len(args) > 1 and args[1] is ...):
+ return super().parse_annotation(annotation)
+ return self.annotation(prefixItems=list(self.parse_args(args)))
+
+
+class DictParser(TypeParser):
+ types = (dict, c.Mapping, t.Mapping, t.MutableMapping, c.MutableMapping)
+ annotation = j.Object
+
+ def parse_annotation(self, annotation, /) -> j.JSONSchemaType:
+ args = get_args(annotation)
+ properties = None
+ if args and len(annotation.__args__) > 1:
+ if self._parser.strict and annotation.__args__[0] not in (
+ t.AnyStr,
+ str,
+ bytes,
+ ):
+ raise IncompatibleTypesError(
+ f"Dictionary keys must be strings, got {annotation.__args__[0]}"
+ )
+ if annotation.__args__[1] not in (t.Any, ...):
+ properties = {
+ "^.+$": self._parser.parse_annotation(annotation.__args__[1])
+ }
+ return self.annotation(patternProperties=properties)
+
+
+class TypedDictParser(TypeParser):
+ annotation = j.Object
+
+ def can_parse(self, annotation, /) -> bool:
+ return is_typeddict(annotation)
+
+ def parse_annotation(self, annotation: t.TypedDict, /) -> j.JSONSchemaType:
+ title = annotation.__name__
+ total = getattr(annotation, "__total__", True)
+ properties, required = {}, []
+ for key, arg in annotation.__annotations__.items():
+ origin = get_origin(arg)
+ if compatible_py311():
+ if origin is t.Required: # noqa: magic
+ arg = get_args(arg)[0]
+ required.append(key)
+ elif origin is t.NotRequired: # noqa: magic
+ arg = get_args(arg)[0]
+ elif total or key in annotation.__required_keys__: # noqa: magic
+ required.append(key)
+ elif compatible_py39():
+ if total or key in annotation.__required_keys__: # noqa: magic
+ required.append(key)
+ elif total:
+ required.append(key)
+ arg = self._parser.parse_annotation(arg)
+ properties[key] = arg
+ return self.annotation(
+ properties=properties,
+ required=required,
+ description=annotation.__doc__,
+ additionalProperties=False,
+ title=title,
+ )
+
+
+class NamedTupleParser(TypeParser):
+ annotation = j.Array
+ strict = False
+
+ def can_parse(self, annotation, /) -> bool:
+ return is_namedtuple(annotation)
+
+ def parse_annotation(self, annotation: t.NamedTuple, /) -> j.JSONSchemaType:
+ title = annotation.__name__
+ defaults = annotation._field_defaults # noqa: no public attr
+ annotations = getattr(annotation, "__annotations__", {})
+ items = []
+ for key in annotation._fields: # noqa: no public attr
+ arg = (
+ self._parser.parse_annotation(annotations[key])
+ if key in annotations
+ else j.JSONSchemaObject()
+ )
+ arg.title = key
+ if key in defaults:
+ arg.default = defaults[key]
+ items.append(arg)
+ return self.annotation(
+ prefixItems=items, description=annotation.__doc__, title=title
+ )
+
+
+class EnumValueParser(TypeParser):
+ types = (Enum,)
+ annotation = j.Const
+ strict = False
+
+ def can_parse(self, annotation, /) -> bool:
+ return isinstance(annotation, self.types)
+
+ def parse_annotation(self, annotation: Enum, /) -> j.JSONSchemaType:
+ return j.Const(
+ const=annotation.value,
+ title=f"{annotation.__class__.__name__}.{annotation.name}",
+ )
+
+
+class EnumTypeParser(TypeParser):
+ types = (Enum,)
+ annotation = j.Enum
+ strict = False
+
+ def can_parse(self, annotation, /) -> bool:
+ return inspect.isclass(annotation) and issubclass(annotation, Enum)
+
+ def parse_annotation(self, annotation: t.Type[Enum], /) -> j.JSONSchemaType:
+ title = annotation.__name__
+ return self.annotation(
+ description=annotation.__doc__,
+ enum=[
+ v.value for k, v in annotation._member_map_.items()
+ ], # noqa: no public attr
+ title=title,
+ )
+
+
+class DataclassParser(TypeParser):
+ annotation = j.Object
+ strict = False
+
+ def can_parse(self, annotation, /) -> bool:
+ return is_dataclass(annotation)
+
+ def parse_annotation(self, annotation, /) -> j.JSONSchemaType:
+ title = annotation.__name__
+ properties, required = {}, []
+ for field in fields(annotation):
+ if not field.name.startswith("_"):
+ properties[field.name] = arg = self._parser.parse_annotation(field.type)
+ if field.default is MISSING:
+ required.append(field.name)
+ else:
+ arg.default = field.default
+ return self.annotation(
+ properties=properties,
+ required=required,
+ additionalProperties=False,
+ title=title,
+ description=get_function_summary(annotation.__doc__),
+ )
+
+
+for value in tuple(locals().values()):
+ if (
+ inspect.isclass(value)
+ and issubclass(value, TypeParser)
+ and value is not TypeParser
+ ):
+ TYPES.append(value)
diff --git a/src/jsonschema_gen/schema.py b/src/jsonschema_gen/schema.py
new file mode 100644
index 0000000..2dca9f4
--- /dev/null
+++ b/src/jsonschema_gen/schema.py
@@ -0,0 +1,423 @@
+"""Jsonschema types."""
+
+# pylint: disable=C0103
+
+from dataclasses import dataclass, field
+from typing import (Any, Collection, Dict, List, Literal, Mapping, Protocol,
+ TypeVar)
+
+__all__ = (
+ "JSONSchemaObject",
+ "JSONSchemaType",
+ "Boolean",
+ "String",
+ "Number",
+ "Integer",
+ "Array",
+ "Object",
+ "AnyOf",
+ "OneOf",
+ "AllOf",
+ "Not",
+ "GUID",
+ "Date",
+ "DateTime",
+ "Null",
+ "Const",
+ "Nullable",
+ "Enum",
+ "Email",
+)
+
+_T = TypeVar("_T")
+_StringFormat = Literal[
+ "JSONSchemaType",
+ "date-time",
+ "time",
+ "date",
+ "email",
+ "idn-email",
+ "hostname",
+ "idn-hostname",
+ "ipv4",
+ "ipv6",
+ "uri",
+ "uri-reference",
+ "iri",
+ "iri-reference",
+ "regex",
+]
+
+
+class JSONSchemaType(Protocol):
+ """Json schema object interface."""
+
+ def json_repr(self) -> Dict[str, Any]:
+ """Produce a JSON-compatible representation of the object."""
+ return _serialize_schema_keys(vars(self))
+
+
+def _serialize_schema_value(value: Any, /) -> Any:
+ if isinstance(value, Mapping):
+ return _serialize_schema_keys(value)
+ if isinstance(value, (list, tuple)):
+ return [_serialize_schema_value(sub_value) for sub_value in value]
+ if hasattr(value, "json_repr"):
+ return value.json_repr()
+ return value
+
+
+def _serialize_schema_keys(obj: Mapping) -> Dict[str, Any]:
+ return {
+ key: _serialize_schema_value(value)
+ for key, value in obj.items()
+ if not key.startswith("_") and value is not None and value is not ...
+ }
+
+
+@dataclass
+class JSONSchemaObject(JSONSchemaType):
+ """Generic JSONSchema object.
+
+ >>> JSONSchemaObject().json_repr()
+ {}
+ """
+
+ title: str = None
+ description: str = None
+ examples: List = None
+ default: Any = ...
+
+
+@dataclass
+class Enum(JSONSchemaType):
+ """Enum value.
+
+ >>> Enum([1, 2, 3]).json_repr()
+ {'enum': [1, 2, 3]}
+ """
+
+ enum: List
+ title: str = None
+ description: str = None
+ examples: List = None
+ default: Any = ...
+
+
+@dataclass
+class Const(JSONSchemaType):
+ """Constant value.
+
+ See `constants `_
+
+ >>> Const('1').json_repr()
+ {'const': '1'}
+ """
+
+ const: Any
+ title: str = None
+ description: str = None
+
+
+@dataclass
+class Boolean(JSONSchemaType):
+ """Boolean type.
+
+ See `boolean type `_
+
+ >>> Boolean().json_repr()
+ {'type': 'boolean'}
+ """
+
+ title: str = None
+ description: str = None
+ default: bool = ...
+
+ def json_repr(self) -> Dict[str, Any]:
+ data = _serialize_schema_keys(vars(self))
+ data["type"] = "boolean"
+ return data
+
+
+@dataclass
+class String(JSONSchemaType):
+ """String type.
+
+ See `string type `_
+
+ >>> String().json_repr()
+ {'type': 'string'}
+ """
+
+ minLength: int = None
+ maxLength: int = None
+ pattern: str = None #: regex validation pattern
+ format: _StringFormat = None #: string format
+ title: str = None
+ description: str = None
+ examples: List = None
+ enum: List[str] = None
+ default: str = ...
+
+ def json_repr(self) -> Dict[str, Any]:
+ data = _serialize_schema_keys(vars(self))
+ data["type"] = "string"
+ return data
+
+
+@dataclass
+class DateTime(String):
+ """Datetime type alias.
+
+ >>> DateTime().json_repr()
+ {'format': 'date-time', 'type': 'string'}
+ """
+
+ format: str = "date-time"
+
+
+@dataclass
+class Date(String):
+ """Date type alias.
+
+ >>> Date().json_repr()
+ {'format': 'date', 'type': 'string'}
+ """
+
+ format: str = field(init=False)
+
+ def __post_init__(self):
+ self.format = "date"
+
+
+@dataclass
+class GUID(String):
+ """UUID type alias.
+
+ >>> GUID().json_repr()
+ {'format': 'uuid', 'type': 'string'}
+ """
+
+ format: str = field(init=False)
+
+ def __post_init__(self):
+ self.format = "uuid"
+
+
+@dataclass
+class Email(String):
+ """UUID type alias.
+
+ >>> Email().json_repr()
+ {'format': 'email', 'type': 'string'}
+ """
+
+ format: str = field(init=False)
+
+ def __post_init__(self):
+ self.format = "email"
+
+
+@dataclass
+class Null(Enum):
+ """Null value alias.
+
+ >>> Null().json_repr()
+ {'enum': [None]}
+ """
+
+ enum: list = field(init=False)
+
+ def __post_init__(self):
+ self.enum = [None]
+
+
+@dataclass
+class Number(JSONSchemaType):
+ """Numeric data type.
+
+ See `numeric type `_
+
+ >>> Number().json_repr()
+ {'type': 'number'}
+ """
+
+ multipleOf: float = None
+ minimum: float = None
+ maximum: float = None
+ exclusiveMinimum: float = None
+ exclusiveMaximum: float = None
+ title: str = None
+ description: str = None
+ examples: List = None
+ enum: List[float] = None
+ default: float = ...
+
+ def json_repr(self) -> Dict[str, Any]:
+ data = _serialize_schema_keys(vars(self))
+ data["type"] = "number"
+ return data
+
+
+@dataclass
+class Integer(JSONSchemaType):
+ """Integer type.
+
+ See `integer type `_
+
+ >>> Integer().json_repr()
+ {'type': 'integer'}
+ """
+
+ multipleOf: int = None
+ minimum: int = None
+ maximum: int = None
+ exclusiveMinimum: int = None
+ exclusiveMaximum: int = None
+ title: str = None
+ description: str = None
+ examples: List = None
+ enum: List[int] = None
+ default: int = ...
+
+ def json_repr(self) -> Dict[str, Any]:
+ data = _serialize_schema_keys(vars(self))
+ data["type"] = "integer"
+ return data
+
+
+@dataclass
+class Array(JSONSchemaType):
+ """Array type.
+
+ See `array type `_
+
+ >>> Array(String()).json_repr()
+ {'items': {'type': 'string'}, 'type': 'array'}
+ """
+
+ items: JSONSchemaType = None #: item type for a strict typed array
+ prefixItems: Collection[JSONSchemaType] = (
+ None #: a List of fixed object positions for a tuple type
+ )
+ contains: JSONSchemaType = None #: must contain this type of object
+ additionalItems: bool = None #: allow additional items
+ uniqueItems: bool = None #: specify an array as a set type
+ minItems: int = None
+ maxItems: int = None
+ title: str = None
+ description: str = None
+ examples: List = None
+ enum: List[List] = None
+ default: List = ...
+
+ def json_repr(self) -> Dict[str, Any]:
+ data = _serialize_schema_keys(vars(self))
+ data["type"] = "array"
+ return data
+
+
+@dataclass
+class Object(JSONSchemaType):
+ """JSON object type (dictionary-like).
+
+ See `object type `_
+
+ >>> Object({'name': String()}).json_repr()
+ {'properties': {'name': {'type': 'string'}}, 'type': 'object'}
+ """
+
+ properties: Dict[str, JSONSchemaType] = None
+ patternProperties: Dict[str, JSONSchemaType] = None
+ additionalProperties: bool = None
+ minProperties: int = None
+ maxProperties: int = None
+ required: List[str] = None
+ title: str = None
+ description: str = None
+ examples: List = None
+ enum: List[Dict] = None
+ default: Dict = ...
+
+ def json_repr(self) -> Dict[str, Any]:
+ data = _serialize_schema_keys(vars(self))
+ data["type"] = "object"
+ return data
+
+
+@dataclass
+class AnyOf(JSONSchemaType):
+ """Any of the included schemas must be valid.
+
+ See `anyOf keyword `_
+
+ >>> AnyOf([String(), Integer()]).json_repr()
+ {'anyOf': [{'type': 'string'}, {'type': 'integer'}]}
+ """
+
+ items: Collection[JSONSchemaType]
+
+ def json_repr(self) -> Dict[str, Any]:
+ return {"anyOf": [item.json_repr() for item in self.items]}
+
+
+@dataclass
+class OneOf(JSONSchemaType):
+ """Only one of the included schemas must be valid.
+
+ See `oneOf keyword `_
+
+ >>> OneOf([String(), Integer()]).json_repr()
+ {'oneOf': [{'type': 'string'}, {'type': 'integer'}]}
+ """
+
+ items: Collection[JSONSchemaType]
+
+ def json_repr(self) -> Dict[str, Any]:
+ return {"oneOf": [item.json_repr() for item in self.items]}
+
+
+@dataclass
+class AllOf(JSONSchemaType):
+ """All the included schemas must be valid.
+
+ See `allOf keyword `_
+
+ >>> AllOf([String(), Integer()]).json_repr()
+ {'allOf': [{'type': 'string'}, {'type': 'integer'}]}
+ """
+
+ items: Collection[JSONSchemaType]
+
+ def json_repr(self) -> Dict[str, Any]:
+ return {"allOf": [item.json_repr() for item in self.items]}
+
+
+@dataclass
+class Not(JSONSchemaType):
+ """Revert the condition of the schema.
+
+ See `Not keyword `_
+
+ >>> Not(Boolean()).json_repr()
+ {'not': {'type': 'boolean'}}
+ """
+
+ item: JSONSchemaType
+
+ def json_repr(self) -> Dict[str, Any]:
+ return {"not": self.item.json_repr()}
+
+
+@dataclass
+class Nullable(JSONSchemaType):
+ """Nullable value alias.
+
+ >>> Nullable(String()).json_repr()
+ {'oneOf': [{'type': 'string'}, {'enum': [None]}]}
+ """
+
+ item: JSONSchemaType
+
+ def json_repr(self) -> Dict[str, Any]:
+ return {"oneOf": [self.item.json_repr(), Null().json_repr()]}
diff --git a/src/jsonschema_gen/utils.py b/src/jsonschema_gen/utils.py
new file mode 100644
index 0000000..e9bafc2
--- /dev/null
+++ b/src/jsonschema_gen/utils.py
@@ -0,0 +1,84 @@
+"""Type hints manipulation utilities."""
+
+import inspect
+import sys
+from textwrap import dedent
+from typing import (Any, Type, Union, _GenericAlias, # noqa: magic
+ _TypedDictMeta)
+
+__all__ = [
+ "NoneType",
+ "is_generic",
+ "is_typeddict",
+ "is_namedtuple",
+ "is_union",
+ "get_origin",
+ "get_args",
+ "get_generic_alias",
+ "get_function_summary",
+ "compatible_py39",
+ "compatible_py310",
+ "compatible_py311",
+]
+
+NoneType = type(None)
+
+
+def compatible_py39() -> bool:
+ return sys.version_info >= (3, 9)
+
+
+def compatible_py310() -> bool:
+ return sys.version_info >= (3, 10)
+
+
+def compatible_py311() -> bool:
+ return sys.version_info >= (3, 11)
+
+
+def get_function_summary(doc: Union[str, None], /) -> Union[str, None]:
+ """Extract and normalize function description (first row)."""
+ if doc:
+ doc = dedent(doc)
+ doc = doc.split("\n")[0]
+ doc.capitalize()
+ return doc
+ return None
+
+
+def get_origin(value, /):
+ return getattr(value, "__origin__", None)
+
+
+def get_args(value, /):
+ return getattr(value, "__args__", None)
+
+
+def is_generic(value: Any, /) -> bool:
+ return isinstance(value, _GenericAlias)
+
+
+def get_generic_alias(obj: Type, /) -> Union[_GenericAlias, None]:
+ return next((n for n in getattr(obj, "__orig_bases__", []) if is_generic(n)), None)
+
+
+def is_typeddict(value, /) -> bool:
+ return isinstance(value, _TypedDictMeta)
+
+
+def is_namedtuple(value, /) -> bool:
+ return (
+ inspect.isclass(value)
+ and issubclass(value, tuple)
+ and hasattr(value, "_fields")
+ )
+
+
+def is_union(value, /) -> bool:
+ if getattr(value, "__origin__", None) is Union:
+ return True
+ if compatible_py310():
+ from types import UnionType
+
+ return type(value) is UnionType
+ return False
diff --git a/tests/test_annotations.py b/tests/test_annotations.py
new file mode 100644
index 0000000..0d6b4b9
--- /dev/null
+++ b/tests/test_annotations.py
@@ -0,0 +1,355 @@
+import collections.abc as c
+import typing as t
+from dataclasses import dataclass
+from datetime import date, datetime
+from decimal import Decimal
+from enum import Enum
+from numbers import Number
+from uuid import UUID, SafeUUID
+
+import pytest
+
+from jsonschema_gen.parsers import *
+
+
+class _InnerSchema(t.TypedDict):
+ id: str
+ value: int
+
+
+class _CompoundSchema(t.TypedDict):
+ """Compound schema"""
+ id: str
+ value: _InnerSchema
+
+
+class _CompoundSchemaIndirectRef(t.TypedDict):
+ id: str
+ value: '_InnerSchema'
+
+
+class _Schema(t.TypedDict):
+ """Schema"""
+ id: str
+ value: int
+
+
+class _VariableSchema(t.TypedDict, total=False):
+ """Variable schema"""
+ id: str
+ value: int
+
+
+class _NamedTuple(t.NamedTuple):
+ id: int
+ data: dict
+ name: str = 'test'
+
+
+@dataclass
+class _DataClass:
+ """Data class"""
+ id: int
+ data: dict
+ flag: bool = True
+
+
+_NewTypeStr = t.NewType("_NewTypeStr", str)
+_TypeVar = t.TypeVar("_TypeVar")
+_TypeVarStr = t.TypeVar("_TypeVarStr", bound=str)
+_T = t.TypeVar("_T", bound=dict)
+
+
+class _Enum(Enum):
+ """Enum value"""
+ value_1 = 'text'
+ value_2 = 1
+
+
+class _GenericClass(t.Generic[_T]):
+
+ def f_type_var(self, value: _T): ...
+
+ def f_default(self, value: int = 42): ...
+
+ def f_var_kws(self, value: int, **kws): ...
+
+ def f_pos_args(self, value: int, *args): ...
+
+
+class _GenericSubclass(_GenericClass[_Schema]):
+ ...
+
+
+class _TestClass:
+
+ value = {
+ 'properties': {'name': {'type': 'string'}},
+ 'type': 'object',
+ "required": ["name"],
+ "additionalProperties": False
+ }
+
+ def f(self, name: str, _private: str = None): ...
+
+ @classmethod
+ def f_cls(cls, name: str, _private: str = None): ...
+
+ @staticmethod
+ def f_static(name: str, _private: str = None): ...
+
+
+BASIC_TYPES = [
+ (int, {'type': 'integer'}),
+ (float, {'type': 'number'}),
+ (str, {'type': 'string'}),
+ (bytes, {'type': 'string'}),
+ (bool, {'type': 'boolean'}),
+ (None, {'enum': [None]}),
+ (t.Any, {}),
+ (Decimal, {'type': 'number'}),
+ (Number, {'type': 'number'}),
+]
+
+SPECIAL_TYPES = [
+ (datetime, {'type': 'string', 'format': 'date-time'}),
+ (date, {'type': 'string', 'format': 'date'}),
+ (UUID, {'type': 'string', 'format': 'uuid'}),
+ (SafeUUID, {'type': 'string', 'format': 'uuid'}),
+ (_NamedTuple, {
+ 'title': '_NamedTuple',
+ 'description': '_NamedTuple(id, data, name)',
+ 'type': 'array',
+ 'prefixItems': [
+ {'type': 'integer', 'title': 'id'},
+ {'type': 'object', 'title': 'data'},
+ {'type': 'string', 'title': 'name', 'default': 'test'}
+ ],
+ }),
+ (_DataClass, {
+ 'title': '_DataClass',
+ 'description': 'Data class',
+ 'type': 'object',
+ 'properties': {
+ 'id': {'type': 'integer'},
+ 'data': {'type': 'object'},
+ 'flag': {'type': 'boolean', 'default': True},
+ },
+ 'required': ['id', 'data'],
+ 'additionalProperties': False
+ }),
+ (
+ _Enum, {
+ 'title': '_Enum',
+ 'description': 'Enum value',
+ 'enum': ['text', 1],
+ }
+ ),
+ (
+ _Enum.value_1, {
+ 'title': '_Enum.value_1',
+ 'const': 'text',
+ }
+ ),
+ (t.Hashable, {'title': 'typing.Hashable'}), # 'unsupported' types
+]
+
+COLLECTIONS = [
+ (list, {'type': 'array'}),
+ (t.List[str], {'type': 'array', 'items': {'type': 'string'}}),
+ (t.List[t.List[str]], {'type': 'array', 'items': {'type': 'array', 'items': {'type': 'string'}}}),
+ (t.Collection[str], {'type': 'array', 'items': {'type': 'string'}}),
+ (c.Collection, {'type': 'array'}),
+ (t.Iterable[str], {'type': 'array', 'items': {'type': 'string'}}),
+ (c.Iterable, {'type': 'array'}),
+ (tuple, {'type': 'array'}),
+ (t.Tuple[str], {'type': 'array', 'prefixItems': [{'type': 'string'}]}),
+ (t.Tuple[str, int], {'type': 'array', 'prefixItems': [{'type': 'string'}, {'type': 'integer'}]}),
+ (t.Tuple[str, ...], {'type': 'array', 'items': {'type': 'string'}}),
+ (t.Dict[str, t.Any], {'type': 'object'}),
+ (t.Dict[str, dict], {'type': 'object', 'patternProperties': {'^.+$': {'type': 'object'}}}),
+ (t.Dict[str, int], {'type': 'object', 'patternProperties': {'^.+$': {'type': 'integer'}}}),
+ (t.Mapping[str, int], {'type': 'object', 'patternProperties': {'^.+$': {'type': 'integer'}}}),
+ (t.MutableMapping[str, int], {'type': 'object', 'patternProperties': {'^.+$': {'type': 'integer'}}}),
+ (c.MutableMapping, {'type': 'object'}),
+ (set, {'type': 'array', 'uniqueItems': True}),
+ (frozenset, {'type': 'array', 'uniqueItems': True}),
+ (t.Set[int], {'type': 'array', 'items': {'type': 'integer'}, 'uniqueItems': True}),
+ (c.Set, {'type': 'array', 'uniqueItems': True}),
+ (t.MutableSet[int], {'type': 'array', 'items': {'type': 'integer'}, 'uniqueItems': True}),
+ (c.MutableSet, {'type': 'array', 'uniqueItems': True}),
+ (t.FrozenSet[int], {'type': 'array', 'items': {'type': 'integer'}, 'uniqueItems': True}),
+]
+
+TYPED_DICTS = [
+ (
+ _Schema, {
+ 'type': 'object',
+ 'title': '_Schema',
+ 'description': 'Schema',
+ 'properties': {'id': {'type': 'string'}, 'value': {'type': 'integer'}},
+ 'additionalProperties': False,
+ 'required': ['id', 'value']
+ }
+ ),
+ (
+ _VariableSchema, {
+ 'type': 'object',
+ 'title': '_VariableSchema',
+ 'description': 'Variable schema',
+ 'properties': {'id': {'type': 'string'}, 'value': {'type': 'integer'}},
+ 'additionalProperties': False,
+ 'required': []
+ }
+ ),
+ (
+ _CompoundSchema, {
+ 'type': 'object',
+ 'title': '_CompoundSchema',
+ 'description': 'Compound schema',
+ 'properties': {
+ 'id': {'type': 'string'},
+ 'value': {
+ 'type': 'object',
+ 'title': '_InnerSchema',
+ 'properties': {'id': {'type': 'string'}, 'value': {'type': 'integer'}},
+ 'additionalProperties': False,
+ 'required': ['id', 'value']
+ }
+ },
+ 'additionalProperties': False,
+ 'required': ['id', 'value']
+ }
+ ),
+ (
+ _CompoundSchemaIndirectRef, {
+ 'type': 'object',
+ 'title': '_CompoundSchemaIndirectRef',
+ 'properties': {
+ 'id': {'type': 'string'},
+ 'value': {
+ 'type': 'object',
+ 'title': '_InnerSchema',
+ 'properties': {'id': {'type': 'string'}, 'value': {'type': 'integer'}},
+ 'additionalProperties': False,
+ 'required': ['id', 'value']
+ }
+ },
+ 'additionalProperties': False,
+ 'required': ['id', 'value']
+ }
+ )
+]
+
+OPERATORS = [
+ (t.Union[str, int], {'anyOf': [{'type': 'string'}, {'type': 'integer'}]}),
+ (t.Optional[str], {'anyOf': [{'type': 'string'}, {'enum': [None]}]}),
+ (t.Union[str, None], {'anyOf': [{'type': 'string'}, {'enum': [None]}]}),
+ (_TypeVar, {'title': '_TypeVar'}),
+ (_TypeVarStr, {'title': '_TypeVarStr', 'type': 'string'}),
+ (_NewTypeStr, {'title': '_NewTypeStr', 'type': 'string'}),
+]
+
+
+@pytest.mark.parametrize(['annotation', 'result'], [
+ *BASIC_TYPES,
+ *COLLECTIONS,
+ *TYPED_DICTS,
+ *OPERATORS
+])
+def test_types(annotation, result):
+ assert (Parser(locals=globals())
+ .parse_annotation(annotation)
+ .json_repr() == result)
+
+
+@pytest.mark.parametrize(['annotation', 'result'], [
+ *SPECIAL_TYPES,
+])
+def test_special_types(annotation, result):
+ assert (Parser(strict=False, locals=globals())
+ .parse_annotation(annotation)
+ .json_repr() == result)
+
+
+@pytest.mark.parametrize(['annotation', 'result'], [
+ *SPECIAL_TYPES,
+ (t.Dict[int, str], ())
+])
+def test_strict_errors(annotation, result):
+ with pytest.raises(IncompatibleTypesError):
+ Parser(locals=globals()).parse_annotation(annotation)
+
+
+@pytest.mark.parametrize(['cls', 'method', 'result'], [
+ (
+ _GenericClass, _GenericClass.f_pos_args, {
+ 'additionalProperties': False,
+ 'properties': {
+ 'value': {'type': 'integer'}
+ },
+ 'required': ['value'],
+ 'type': 'object'
+ }
+ ),
+ (
+ _GenericClass, _GenericClass.f_default, {
+ 'additionalProperties': False,
+ 'properties': {
+ 'value': {'type': 'integer', 'default': 42}
+ },
+ 'required': [],
+ 'type': 'object'
+ }
+ ),
+ (
+ _GenericClass, _GenericClass.f_var_kws, {
+ 'additionalProperties': True,
+ 'properties': {
+ 'value': {'type': 'integer'}
+ },
+ 'required': ['value'],
+ 'type': 'object'
+ }
+ ),
+ (
+ _GenericClass, _GenericClass.f_type_var, {
+ 'additionalProperties': False,
+ 'properties': {
+ 'value': {'title': '_T', 'type': 'object'}
+ },
+ 'required': ['value'],
+ 'type': 'object'
+ }
+ ), (
+ _GenericSubclass, _GenericSubclass.f_type_var, {
+ 'additionalProperties': False,
+ 'properties': {
+ 'value': {
+ 'additionalProperties': False,
+ 'description': 'Schema',
+ 'properties': {
+ 'id': {'type': 'string'},
+ 'value': {'type': 'integer'}
+ },
+ 'required': ['id', 'value'],
+ 'title': '_Schema',
+ 'type': 'object'
+ }
+ },
+ 'required': ['value'],
+ 'type': 'object'
+ }
+ )
+])
+def test_method_args_parser(cls, method, result):
+ assert (Parser(locals=globals())
+ .parse_function(method, cls)
+ .kwargs
+ .json_repr() == result)
+
+
+def test_class_parser():
+ annotations = Parser(locals=globals()).parse_class(_TestClass)
+ for key, value in annotations.items():
+ assert value.kwargs.json_repr() == _TestClass.value
diff --git a/tests/test_annotations_310.py b/tests/test_annotations_310.py
new file mode 100644
index 0000000..bfd57aa
--- /dev/null
+++ b/tests/test_annotations_310.py
@@ -0,0 +1,19 @@
+import pytest
+
+from jsonschema_gen.parsers import *
+from jsonschema_gen.utils import compatible_py311
+
+TYPES_310 = []
+
+
+if compatible_py311():
+ TYPES_310 = [
+ (int | str, {'anyOf': [{'type': 'integer'}, {'type': 'string'}]}),
+ ]
+
+
+@pytest.mark.parametrize(['annotation', 'result'], [
+ *TYPES_310
+])
+def test_types(annotation, result):
+ assert Parser(locals=globals()).parse_annotation(annotation).json_repr() == result
diff --git a/tests/test_annotations_311.py b/tests/test_annotations_311.py
new file mode 100644
index 0000000..1768e62
--- /dev/null
+++ b/tests/test_annotations_311.py
@@ -0,0 +1,64 @@
+import typing as t
+
+import pytest
+
+from jsonschema_gen.parsers import *
+from jsonschema_gen.utils import compatible_py311
+
+TYPES_311 = []
+
+
+if compatible_py311():
+
+ class _SchemaNotRequired(t.TypedDict):
+ """Schema not required"""
+ id: str
+ value: t.NotRequired[int]
+
+
+ class _SchemaRequired(t.TypedDict, total=False):
+ """Schema required"""
+ id: t.Required[str]
+ value: int
+
+
+ TYPES_311 = [
+ (t.Dict[str, ...], {'type': 'object'}),
+ (t.Mapping[str, ...], {'type': 'object'}),
+ (t.MutableMapping[str, ...], {'type': 'object'}),
+ (
+ _SchemaNotRequired, {
+ 'title': '_SchemaNotRequired',
+ 'description': 'Schema not required',
+ "type": "object",
+ "properties": {
+ 'id': {'type': 'string'},
+ 'value': {'type': 'integer'}
+ },
+ "additionalProperties": False,
+ "required": ["id"]
+ }
+ ),
+ (
+ _SchemaRequired, {
+ 'title': '_SchemaRequired',
+ 'description': "Schema required",
+ "type": "object",
+ "properties": {
+ 'id': {'type': 'string'},
+ 'value': {'type': 'integer'}
+ },
+ "additionalProperties": False,
+ "required": ["id"]
+ }
+ ),
+ ]
+
+
+@pytest.mark.parametrize(['annotation', 'result'], [
+ *TYPES_311,
+])
+def test_types(annotation, result):
+ assert (Parser(locals=globals())
+ .parse_annotation(annotation)
+ .json_repr() == result)
diff --git a/tests/test_annotations_39.py b/tests/test_annotations_39.py
new file mode 100644
index 0000000..3a32a14
--- /dev/null
+++ b/tests/test_annotations_39.py
@@ -0,0 +1,44 @@
+import collections.abc as c
+import typing as t
+
+import pytest
+
+from jsonschema_gen.parsers import *
+from jsonschema_gen.utils import compatible_py39
+
+TYPES_39 = []
+
+if compatible_py39():
+ TYPES_39 = [
+ (dict, {'type': 'object'}),
+ (list[str], {'type': 'array', 'items': {'type': 'string'}}),
+ (tuple[str], {'type': 'array', 'prefixItems': [{'type': 'string'}]}),
+ (tuple[str, int], {'type': 'array', 'prefixItems': [{'type': 'string'}, {'type': 'integer'}]}),
+ (tuple[str, ...], {'type': 'array', 'items': {'type': 'string'}}),
+ (set[int], {'type': 'array', 'items': {'type': 'integer'}, 'uniqueItems': True}),
+ (c.Collection[str], {'type': 'array', 'items': {'type': 'string'}}),
+ (c.Iterable[str], {'type': 'array', 'items': {'type': 'string'}}),
+ (c.Mapping[str, ...], {'type': 'object'}),
+ (c.Mapping[str, int], {'type': 'object', 'patternProperties': {'^.+$': {'type': 'integer'}}}),
+ (c.MutableMapping[str, ...], {'type': 'object'}),
+ (c.MutableMapping[str, int], {'type': 'object', 'patternProperties': {'^.+$': {'type': 'integer'}}}),
+ (c.Set[int], {'type': 'array', 'items': {'type': 'integer'}, 'uniqueItems': True}),
+ (c.MutableSet[int], {'type': 'array', 'items': {'type': 'integer'}, 'uniqueItems': True}),
+ (t.List, {'type': 'array'}),
+ (t.Collection, {'type': 'array'}),
+ (t.Iterable, {'type': 'array'}),
+ (t.Tuple, {'type': 'array'}),
+ (t.Dict, {'type': 'object'}),
+ (t.Set, {'type': 'array', 'uniqueItems': True}),
+ (t.MutableSet, {'type': 'array', 'uniqueItems': True}),
+ (t.FrozenSet, {'type': 'array', 'uniqueItems': True}),
+ (t.Mapping, {'type': 'object'}),
+ (t.MutableMapping, {'type': 'object'}),
+ ]
+
+
+@pytest.mark.parametrize(['annotation', 'result'], [
+ *TYPES_39
+])
+def test_types(annotation, result):
+ assert Parser(locals=globals()).parse_annotation(annotation).json_repr() == result
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..986eb4a
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,8 @@
+[tox]
+min_version = 4.0
+envlist = py{38, 39, 310, 311, 312, 313}
+setenv = VIRTUALENV_DISCOVERY=pyenv
+
+[testenv]
+deps = .[test]
+commands = pytest