diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 76b217213..c77ce0c50 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,7 +12,7 @@ jobs: strategy: fail-fast: true matrix: - python: ["3.9"] + python: ["3.7", "3.10"] os: [ubuntu-latest] # TODO: macos-latest, windows-latest runs-on: ${{ matrix.os }} steps: diff --git a/.gitignore b/.gitignore index 69e1e7aee..26d3d4997 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,9 @@ .venv +__pycache__ +/docs/_build temporalio/api/* !temporalio/api/__init__.py temporalio/bridge/proto/* !temporalio/bridge/proto/__init__.py -__pycache__ +temporalio/bridge/target/ + diff --git a/docs/api.rst b/docs/api.rst new file mode 100644 index 000000000..6d6322537 --- /dev/null +++ b/docs/api.rst @@ -0,0 +1,18 @@ +.. _api: + +API +=== + +.. module:: temporalio + +Client +------ + +.. automodule:: temporalio.client + :members: + +Converters +---------- + +.. automodule:: temporalio.converter + :members: diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 000000000..cf3bf98e2 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,69 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys + +sys.path.insert(0, os.path.abspath("../")) + + +# -- Project information ----------------------------------------------------- + +project = "Temporal Python SDK" +copyright = "2022, Temporal Technologies Inc" +author = "Temporal Technologies Inc" + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.napoleon", +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "sphinx_rtd_theme" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +intersphinx_mapping = { + "python": ("https://docs.python.org/3/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + +autodoc_docstring_signature = True + +autodoc_typehints = "description" + +autodoc_typehints_description_target = "documented" + +autodoc_preserve_defaults = True diff --git a/docs/direct_api.rst b/docs/direct_api.rst new file mode 100644 index 000000000..73d605647 --- /dev/null +++ b/docs/direct_api.rst @@ -0,0 +1,69 @@ +.. _direct_api: + +Direct gRPC API +=============== + +Raw gRPC Client +--------------- + +.. automodule:: temporalio.workflow_service + :members: + +Temporal API Objects +-------------------- + +.. automodule:: temporalio.api.common.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.command.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.enums.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.errordetails.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.failure.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.filter.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.history.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.namespace.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.query.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.replication.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.taskqueue.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.version.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.workflow.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.workflowservice.v1 + :members: + :imported-members: \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 000000000..7aed7c49c --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,23 @@ +.. Temporal Python SDK documentation master file, created by + sphinx-quickstart on Fri Feb 4 11:52:42 2022. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Temporal Python SDK +=================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + api + direct_api + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/poetry.lock b/poetry.lock index 3f9179203..83fa43598 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,11 @@ +[[package]] +name = "alabaster" +version = "0.7.12" +description = "A configurable sidebar-enabled Sphinx theme" +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "atomicwrites" version = "1.4.0" @@ -20,6 +28,17 @@ docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +[[package]] +name = "babel" +version = "2.9.1" +description = "Internationalization utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +pytz = ">=2015.7" + [[package]] name = "black" version = "21.12b0" @@ -47,6 +66,25 @@ jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] python2 = ["typed-ast (>=1.4.3)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "certifi" +version = "2021.10.8" +description = "Python package for providing Mozilla's CA Bundle." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "charset-normalizer" +version = "2.0.11" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "dev" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + [[package]] name = "click" version = "8.0.3" @@ -67,6 +105,14 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "docutils" +version = "0.17.1" +description = "Docutils -- Python Documentation Utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + [[package]] name = "grpcio" version = "1.43.0" @@ -93,6 +139,22 @@ python-versions = ">=3.6" grpcio = ">=1.43.0" protobuf = ">=3.5.0.post1,<4.0dev" +[[package]] +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "imagesize" +version = "1.3.0" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + [[package]] name = "importlib-metadata" version = "4.10.1" @@ -132,6 +194,39 @@ requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] plugins = ["setuptools"] +[[package]] +name = "jinja2" +version = "3.0.3" +description = "A very fast and expressive template engine." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.0.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "maturin" +version = "0.12.6" +description = "Build and publish crates with pyo3, rust-cpython and cffi bindings as well as rust binaries as python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +toml = ">=0.10.2,<0.11.0" + [[package]] name = "mypy" version = "0.931" @@ -216,9 +311,20 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pockets" +version = "0.9.1" +description = "A collection of helpful Python tools!" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +six = ">=1.5.2" + [[package]] name = "protobuf" -version = "3.19.3" +version = "3.19.4" description = "Protocol Buffers" category = "dev" optional = false @@ -232,6 +338,28 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "pydocstyle" +version = "6.1.1" +description = "Python docstring style checker" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +snowballstemmer = "*" + +[package.extras] +toml = ["toml"] + +[[package]] +name = "pygments" +version = "2.11.2" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.5" + [[package]] name = "pyparsing" version = "3.0.7" @@ -280,6 +408,32 @@ typing-extensions = {version = ">=4.0", markers = "python_version < \"3.8\""} [package.extras] testing = ["coverage (==6.2)", "hypothesis (>=5.7.1)", "flaky (>=3.5.0)", "mypy (==0.931)"] +[[package]] +name = "pytz" +version = "2021.3" +description = "World timezone definitions, modern and historical" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "requests" +version = "2.27.1" +description = "Python HTTP for Humans." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] + [[package]] name = "six" version = "1.16.0" @@ -288,6 +442,144 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "sphinx" +version = "4.4.0" +description = "Python documentation generator" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=1.3" +colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.18" +imagesize = "*" +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} +Jinja2 = ">=2.3" +packaging = "*" +Pygments = ">=2.0" +requests = ">=2.5.0" +snowballstemmer = ">=1.1" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.931)", "docutils-stubs", "types-typed-ast", "types-requests"] +test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] + +[[package]] +name = "sphinx-rtd-theme" +version = "1.0.0" +description = "Read the Docs theme for Sphinx" +category = "dev" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" + +[package.dependencies] +docutils = "<0.18" +sphinx = ">=1.6" + +[package.extras] +dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.2" +description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.0" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest", "html5lib"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +test = ["pytest", "flake8", "mypy"] + +[[package]] +name = "sphinxcontrib-napoleon" +version = "0.7" +description = "Sphinx \"napoleon\" extension." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pockets = ">=0.3" +six = ">=1.5.2" + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + [[package]] name = "toml" version = "0.10.2" @@ -314,7 +606,7 @@ python-versions = ">=3.6" [[package]] name = "types-futures" -version = "3.3.7" +version = "3.3.8" description = "Typing stubs for futures" category = "main" optional = false @@ -322,7 +614,7 @@ python-versions = "*" [[package]] name = "types-protobuf" -version = "3.19.6" +version = "3.19.8" description = "Typing stubs for protobuf" category = "main" optional = false @@ -339,6 +631,19 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "urllib3" +version = "1.26.8" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +brotli = ["brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + [[package]] name = "zipp" version = "3.7.0" @@ -354,9 +659,13 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "2c3b7797e9c425dff9cb79f175ff7d4f1a1e2097f9c7a6e6728cd26f4e6bf4e0" +content-hash = "088af139ce74f15f34950abcf0f6d5a1333572d062bbcb23004580808ec18de0" [metadata.files] +alabaster = [ + {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, + {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, +] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, @@ -365,10 +674,22 @@ attrs = [ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] +babel = [ + {file = "Babel-2.9.1-py2.py3-none-any.whl", hash = "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9"}, + {file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"}, +] black = [ {file = "black-21.12b0-py3-none-any.whl", hash = "sha256:a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"}, {file = "black-21.12b0.tar.gz", hash = "sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3"}, ] +certifi = [ + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.0.11.tar.gz", hash = "sha256:98398a9d69ee80548c762ba991a4728bfc3836768ed226b3945908d1a688371c"}, + {file = "charset_normalizer-2.0.11-py3-none-any.whl", hash = "sha256:2842d8f5e82a1f6aa437380934d5e1cd4fcf2003b06fed6940769c164a480a45"}, +] click = [ {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, @@ -377,6 +698,10 @@ colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] +docutils = [ + {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, + {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, +] grpcio = [ {file = "grpcio-1.43.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:a4e786a8ee8b30b25d70ee52cda6d1dbba2a8ca2f1208d8e20ed8280774f15c8"}, {file = "grpcio-1.43.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:af9c3742f6c13575c0d4147a8454da0ff5308c4d9469462ff18402c6416942fe"}, @@ -469,6 +794,14 @@ grpcio-tools = [ {file = "grpcio_tools-1.43.0-cp39-cp39-win32.whl", hash = "sha256:ebfb94ddb454a6dc3a505d9531dc81c948e6364e181b8795bfad3f3f479974dc"}, {file = "grpcio_tools-1.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:d21928b680e6e29538688cffbf53f3d5a53cff0ec8f0c33139641700045bdf1a"}, ] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] +imagesize = [ + {file = "imagesize-1.3.0-py2.py3-none-any.whl", hash = "sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c"}, + {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"}, +] importlib-metadata = [ {file = "importlib_metadata-4.10.1-py3-none-any.whl", hash = "sha256:899e2a40a8c4a1aec681feef45733de8a6c58f3f6a0dbed2eb6574b4387a77b6"}, {file = "importlib_metadata-4.10.1.tar.gz", hash = "sha256:951f0d8a5b7260e9db5e41d429285b5f451e928479f19d80818878527d36e95e"}, @@ -481,6 +814,100 @@ isort = [ {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, ] +jinja2 = [ + {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, + {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, +] +markupsafe = [ + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, + {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, +] +maturin = [ + {file = "maturin-0.12.6-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:7c36e8ee53fb6f544d8f1b1b480035bf204806494be5aa44394a278b5cf9b522"}, + {file = "maturin-0.12.6-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:90d6ed47fa51d902e0afba0b469652f76e1b0e799af2910712c5dec3d1343003"}, + {file = "maturin-0.12.6-py3-none-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8f703e15fefc1424d3d4e65db62364bf930cbc46cf0fdaf44b788f6d895dc8b7"}, + {file = "maturin-0.12.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d502c69f55e21de00645005aa1e4562979c07e26264e5ca00c0369fac1419b96"}, + {file = "maturin-0.12.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:55b9e04eaa6809ed2b6b62bf56e0cfc9cc700b0824ffa565ff3b6cbcff519a46"}, + {file = "maturin-0.12.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:da86507d756c478b0847ae673f49073643d48633d0297cfdf8a1af4205c8cdce"}, + {file = "maturin-0.12.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ff18b33ca7453c38868d3a8a74540052f12b5a67de87c8ed409c19ba6aad48f2"}, + {file = "maturin-0.12.6-py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a67e20421d5f9f82f9c22b2fe5aa787b21f2dbbb8456d939b57d6999e02c4ccf"}, + {file = "maturin-0.12.6-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:52a278cae8905b52be760a01186647c94bb87bd7e5788ff1fa6e8fd53e075f7b"}, + {file = "maturin-0.12.6-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:ef5fe408f5f1966d80bc78a65288501d721aed5a33a6bfe41c801504f266197d"}, + {file = "maturin-0.12.6-py3-none-musllinux_1_1_i686.whl", hash = "sha256:f0f76f607652be769f2c3f3c41eb82b450901b6e17208c1086fd2e896056f5de"}, + {file = "maturin-0.12.6-py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:9829673dc54b306dd92ff3e5230a9e6e9704573ff2a4d1289728c277686113be"}, + {file = "maturin-0.12.6-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:1d77c2ed1ec225a5d013195605f32d8206230da441e1c95eb2627692acf68b44"}, + {file = "maturin-0.12.6-py3-none-win32.whl", hash = "sha256:1ecafea37426a49e2f5369b350c62a76a87ed6fb97d627e08d7349df99099dc8"}, + {file = "maturin-0.12.6-py3-none-win_amd64.whl", hash = "sha256:9c5476aae0d60215039d76d64175791c20741e7b6ab0174b6bb356289029890e"}, + {file = "maturin-0.12.6-py3-none-win_arm64.whl", hash = "sha256:3c28d685e0449f0110b1f5810643eca348a382adffef9cdd1a76cb181e3ddede"}, + {file = "maturin-0.12.6.tar.gz", hash = "sha256:2b14cfae808b45a130e19b2999acea423d2e10e7a29ae2336996ba72ba442ff6"}, +] mypy = [ {file = "mypy-0.931-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c5b42d0815e15518b1f0990cff7a705805961613e701db60387e6fb663fe78a"}, {file = "mypy-0.931-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c89702cac5b302f0c5d33b172d2b55b5df2bede3344a2fbed99ff96bddb2cf00"}, @@ -527,38 +954,50 @@ pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] +pockets = [ + {file = "pockets-0.9.1-py2.py3-none-any.whl", hash = "sha256:68597934193c08a08eb2bf6a1d85593f627c22f9b065cc727a4f03f669d96d86"}, + {file = "pockets-0.9.1.tar.gz", hash = "sha256:9320f1a3c6f7a9133fe3b571f283bcf3353cd70249025ae8d618e40e9f7e92b3"}, +] protobuf = [ - {file = "protobuf-3.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1cb2ed66aac593adbf6dca4f07cd7ee7e2958b17bbc85b2cc8bc564ebeb258ec"}, - {file = "protobuf-3.19.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:898bda9cd37ec0c781b598891e86435de80c3bfa53eb483a9dac5a11ec93e942"}, - {file = "protobuf-3.19.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ad761ef3be34c8bdc7285bec4b40372a8dad9e70cfbdc1793cd3cf4c1a4ce74"}, - {file = "protobuf-3.19.3-cp310-cp310-win32.whl", hash = "sha256:2cddcbcc222f3144765ccccdb35d3621dc1544da57a9aca7e1944c1a4fe3db11"}, - {file = "protobuf-3.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:6202df8ee8457cb00810c6e76ced480f22a1e4e02c899a14e7b6e6e1de09f938"}, - {file = "protobuf-3.19.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:397d82f1c58b76445469c8c06b8dee1ff67b3053639d054f52599a458fac9bc6"}, - {file = "protobuf-3.19.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e54b8650e849ee8e95e481024bff92cf98f5ec61c7650cb838d928a140adcb63"}, - {file = "protobuf-3.19.3-cp36-cp36m-win32.whl", hash = "sha256:3bf3a07d17ba3511fe5fa916afb7351f482ab5dbab5afe71a7a384274a2cd550"}, - {file = "protobuf-3.19.3-cp36-cp36m-win_amd64.whl", hash = "sha256:afa8122de8064fd577f49ae9eef433561c8ace97a0a7b969d56e8b1d39b5d177"}, - {file = "protobuf-3.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18c40a1b8721026a85187640f1786d52407dc9c1ba8ec38accb57a46e84015f6"}, - {file = "protobuf-3.19.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:af7238849fa79285d448a24db686517570099739527a03c9c2971cce99cc5ae2"}, - {file = "protobuf-3.19.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e765e6dfbbb02c55e4d6d1145743401a84fc0b508f5a81b2c5a738cf86353139"}, - {file = "protobuf-3.19.3-cp37-cp37m-win32.whl", hash = "sha256:c781402ed5396ab56358d7b866d78c03a77cbc26ba0598d8bb0ac32084b1a257"}, - {file = "protobuf-3.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:544fe9705189b249380fae07952d220c97f5c6c9372a6f936cc83a79601dcb70"}, - {file = "protobuf-3.19.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84bf3aa3efb00dbe1c7ed55da0f20800b0662541e582d7e62b3e1464d61ed365"}, - {file = "protobuf-3.19.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:3f80a3491eaca767cdd86cb8660dc778f634b44abdb0dffc9b2a8e8d0cd617d0"}, - {file = "protobuf-3.19.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9401d96552befcc7311f5ef8f0fa7dba0ef5fd805466b158b141606cd0ab6a8"}, - {file = "protobuf-3.19.3-cp38-cp38-win32.whl", hash = "sha256:ef02d112c025e83db5d1188a847e358beab3e4bbfbbaf10eaf69e67359af51b2"}, - {file = "protobuf-3.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:1291a0a7db7d792745c99d4657b4c5c4942695c8b1ac1bfb993a34035ec123f7"}, - {file = "protobuf-3.19.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49677e5e9c7ea1245a90c2e8a00d304598f22ea3aa0628f0e0a530a9e70665fa"}, - {file = "protobuf-3.19.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:df2ba379ee42427e8fcc6a0a76843bff6efb34ef5266b17f95043939b5e25b69"}, - {file = "protobuf-3.19.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2acd7ca329be544d1a603d5f13a4e34a3791c90d651ebaf130ba2e43ae5397c6"}, - {file = "protobuf-3.19.3-cp39-cp39-win32.whl", hash = "sha256:b53519b2ebec70cfe24b4ddda21e9843f0918d7c3627a785393fb35d402ab8ad"}, - {file = "protobuf-3.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:8ceaf5fdb72c8e1fcb7be9f2b3b07482ce058a3548180c0bdd5c7e4ac5e14165"}, - {file = "protobuf-3.19.3-py2.py3-none-any.whl", hash = "sha256:f6d4b5b7595a57e69eb7314c67bef4a3c745b4caf91accaf72913d8e0635111b"}, - {file = "protobuf-3.19.3.tar.gz", hash = "sha256:d975a6314fbf5c524d4981e24294739216b5fb81ef3c14b86fb4b045d6690907"}, + {file = "protobuf-3.19.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f51d5a9f137f7a2cec2d326a74b6e3fc79d635d69ffe1b036d39fc7d75430d37"}, + {file = "protobuf-3.19.4-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:09297b7972da685ce269ec52af761743714996b4381c085205914c41fcab59fb"}, + {file = "protobuf-3.19.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072fbc78d705d3edc7ccac58a62c4c8e0cec856987da7df8aca86e647be4e35c"}, + {file = "protobuf-3.19.4-cp310-cp310-win32.whl", hash = "sha256:7bb03bc2873a2842e5ebb4801f5c7ff1bfbdf426f85d0172f7644fcda0671ae0"}, + {file = "protobuf-3.19.4-cp310-cp310-win_amd64.whl", hash = "sha256:f358aa33e03b7a84e0d91270a4d4d8f5df6921abe99a377828839e8ed0c04e07"}, + {file = "protobuf-3.19.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1c91ef4110fdd2c590effb5dca8fdbdcb3bf563eece99287019c4204f53d81a4"}, + {file = "protobuf-3.19.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c438268eebb8cf039552897d78f402d734a404f1360592fef55297285f7f953f"}, + {file = "protobuf-3.19.4-cp36-cp36m-win32.whl", hash = "sha256:835a9c949dc193953c319603b2961c5c8f4327957fe23d914ca80d982665e8ee"}, + {file = "protobuf-3.19.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4276cdec4447bd5015453e41bdc0c0c1234eda08420b7c9a18b8d647add51e4b"}, + {file = "protobuf-3.19.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6cbc312be5e71869d9d5ea25147cdf652a6781cf4d906497ca7690b7b9b5df13"}, + {file = "protobuf-3.19.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:54a1473077f3b616779ce31f477351a45b4fef8c9fd7892d6d87e287a38df368"}, + {file = "protobuf-3.19.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:435bb78b37fc386f9275a7035fe4fb1364484e38980d0dd91bc834a02c5ec909"}, + {file = "protobuf-3.19.4-cp37-cp37m-win32.whl", hash = "sha256:16f519de1313f1b7139ad70772e7db515b1420d208cb16c6d7858ea989fc64a9"}, + {file = "protobuf-3.19.4-cp37-cp37m-win_amd64.whl", hash = "sha256:cdc076c03381f5c1d9bb1abdcc5503d9ca8b53cf0a9d31a9f6754ec9e6c8af0f"}, + {file = "protobuf-3.19.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:69da7d39e39942bd52848438462674c463e23963a1fdaa84d88df7fbd7e749b2"}, + {file = "protobuf-3.19.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:48ed3877fa43e22bcacc852ca76d4775741f9709dd9575881a373bd3e85e54b2"}, + {file = "protobuf-3.19.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd95d1dfb9c4f4563e6093a9aa19d9c186bf98fa54da5252531cc0d3a07977e7"}, + {file = "protobuf-3.19.4-cp38-cp38-win32.whl", hash = "sha256:b38057450a0c566cbd04890a40edf916db890f2818e8682221611d78dc32ae26"}, + {file = "protobuf-3.19.4-cp38-cp38-win_amd64.whl", hash = "sha256:7ca7da9c339ca8890d66958f5462beabd611eca6c958691a8fe6eccbd1eb0c6e"}, + {file = "protobuf-3.19.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:36cecbabbda242915529b8ff364f2263cd4de7c46bbe361418b5ed859677ba58"}, + {file = "protobuf-3.19.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c1068287025f8ea025103e37d62ffd63fec8e9e636246b89c341aeda8a67c934"}, + {file = "protobuf-3.19.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96bd766831596d6014ca88d86dc8fe0fb2e428c0b02432fd9db3943202bf8c5e"}, + {file = "protobuf-3.19.4-cp39-cp39-win32.whl", hash = "sha256:84123274d982b9e248a143dadd1b9815049f4477dc783bf84efe6250eb4b836a"}, + {file = "protobuf-3.19.4-cp39-cp39-win_amd64.whl", hash = "sha256:3112b58aac3bac9c8be2b60a9daf6b558ca3f7681c130dcdd788ade7c9ffbdca"}, + {file = "protobuf-3.19.4-py2.py3-none-any.whl", hash = "sha256:8961c3a78ebfcd000920c9060a262f082f29838682b1f7201889300c1fbe0616"}, + {file = "protobuf-3.19.4.tar.gz", hash = "sha256:9df0c10adf3e83015ced42a9a7bd64e13d06c4cf45c340d2c63020ea04499d0a"}, ] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] +pydocstyle = [ + {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, + {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, +] +pygments = [ + {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, + {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, +] pyparsing = [ {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, @@ -571,10 +1010,58 @@ pytest-asyncio = [ {file = "pytest-asyncio-0.17.2.tar.gz", hash = "sha256:6d895b02432c028e6957d25fc936494e78c6305736e785d9fee408b1efbc7ff4"}, {file = "pytest_asyncio-0.17.2-py3-none-any.whl", hash = "sha256:e0fe5dbea40516b661ef1bcfe0bd9461c2847c4ef4bb40012324f2454fb7d56d"}, ] +pytz = [ + {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, + {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, +] +requests = [ + {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, + {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, +] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +snowballstemmer = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] +sphinx = [ + {file = "Sphinx-4.4.0-py3-none-any.whl", hash = "sha256:5da895959511473857b6d0200f56865ed62c31e8f82dd338063b84ec022701fe"}, + {file = "Sphinx-4.4.0.tar.gz", hash = "sha256:6caad9786055cb1fa22b4a365c1775816b876f91966481765d7d50e9f0dd35cc"}, +] +sphinx-rtd-theme = [ + {file = "sphinx_rtd_theme-1.0.0-py2.py3-none-any.whl", hash = "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8"}, + {file = "sphinx_rtd_theme-1.0.0.tar.gz", hash = "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"}, +] +sphinxcontrib-applehelp = [ + {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, + {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, +] +sphinxcontrib-devhelp = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] +sphinxcontrib-htmlhelp = [ + {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, + {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, +] +sphinxcontrib-jsmath = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] +sphinxcontrib-napoleon = [ + {file = "sphinxcontrib-napoleon-0.7.tar.gz", hash = "sha256:407382beed396e9f2d7f3043fad6afda95719204a1e1a231ac865f40abcbfcf8"}, + {file = "sphinxcontrib_napoleon-0.7-py2.py3-none-any.whl", hash = "sha256:711e41a3974bdf110a484aec4c1a556799eb0b3f3b897521a018ad7e2db13fef"}, +] +sphinxcontrib-qthelp = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] +sphinxcontrib-serializinghtml = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, @@ -610,17 +1097,21 @@ typed-ast = [ {file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"}, ] types-futures = [ - {file = "types-futures-3.3.7.tar.gz", hash = "sha256:d286db818fb67e3ce5c28acd9058c067329b91865acc443ac3cf91497fa36f05"}, - {file = "types_futures-3.3.7-py3-none-any.whl", hash = "sha256:67fcd373796c8b4fb94fdb9cb006718c34be306f292b1e1f1e01f980d3630be5"}, + {file = "types-futures-3.3.8.tar.gz", hash = "sha256:6fe8ccc2c2af7ef2fdd9bf73eab6d617074f09f30ad7d373510b4043d39c42de"}, + {file = "types_futures-3.3.8-py3-none-any.whl", hash = "sha256:d6e97ec51d56b96debfbf1dea32ebec22c1687f16d2547ea0a34b48db45df205"}, ] types-protobuf = [ - {file = "types-protobuf-3.19.6.tar.gz", hash = "sha256:ba586359dc80f09abbff7d143cedd5c86d2c6e376b46647c7aa2262741accba1"}, - {file = "types_protobuf-3.19.6-py3-none-any.whl", hash = "sha256:0d704eca8c7cf2cc9d8b52f63a5d03d7c39dff8f0915c23b3e90cb1563617e18"}, + {file = "types-protobuf-3.19.8.tar.gz", hash = "sha256:5ff1a5b7d0f36e3600ad1a3d4b55ba6c446cef2ef82d25f06a0aa43912345fb4"}, + {file = "types_protobuf-3.19.8-py3-none-any.whl", hash = "sha256:1364327ebfb4360b36bd62b55fb32f704a516c8c26d82bad566938a23e644eca"}, ] typing-extensions = [ {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, ] +urllib3 = [ + {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"}, + {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"}, +] zipp = [ {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, diff --git a/pyproject.toml b/pyproject.toml index 122543b43..269539543 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,16 +21,24 @@ typing-extensions = "^4.0.1" black = "^21.12b0" grpcio-tools = "^1.43.0" isort = "^5.10.1" +maturin = "^0.12.6" mypy = "^0.931" mypy-protobuf = "^3.2.0" +pydocstyle = "^6.1.1" pytest = "^6.2.5" pytest-asyncio = "^0.17.2" +Sphinx = "^4.4.0" +sphinx-rtd-theme = "^1.0.0" +sphinxcontrib-napoleon = "^0.7" [tool.poe.tasks] -build = ["gen-protos", "test"] +build = ["gen-protos", "build-bridge"] +build-bridge = "python scripts/build-bridge.py" format = [{cmd = "black ."}, {cmd = "isort ."}] +gen-docs = "sphinx-build docs docs/_build" gen-protos = "python scripts/gen-protos.py" -lint = [{cmd = "black --check ."}, {cmd = "isort --check-only ."}, "lint-types"] +lint = [{cmd = "black --check ."}, {cmd = "isort --check-only ."}, "lint-types"] # TODO(cretz): Add lint-docs +lint-docs = "pydocstyle" lint-types = "mypy ." test = "pytest" @@ -49,6 +57,11 @@ exclude = [ 'temporalio/bridge/proto', ] +[tool.pydocstyle] +convention = "google" +# https://github.com/PyCQA/pydocstyle/issues/363#issuecomment-625563088 +match_dir = "^(?!(docs|scripts|tests|api|proto|\\.)).*" + [build-system] build-backend = "poetry.core.masonry.api" requires = ["poetry-core>=1.0.0"] diff --git a/scripts/build-bridge.py b/scripts/build-bridge.py new file mode 100644 index 000000000..8ece044e9 --- /dev/null +++ b/scripts/build-bridge.py @@ -0,0 +1,12 @@ +import subprocess +import sys +from pathlib import Path + +base_dir = Path(__file__).parent.parent + +if __name__ == "__main__": + print("Building Core bridge", file=sys.stderr) + subprocess.check_call( + ["maturin", "develop"], cwd=str(base_dir / "temporalio" / "bridge") + ) + print("Done", file=sys.stderr) diff --git a/scripts/gen-protos.py b/scripts/gen-protos.py index 4a6129961..d01d2b9ea 100644 --- a/scripts/gen-protos.py +++ b/scripts/gen-protos.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 import collections -import os import re import shutil import subprocess @@ -8,7 +7,7 @@ import tempfile from functools import partial from pathlib import Path -from typing import Mapping +from typing import List, Mapping base_dir = Path(__file__).parent.parent proto_dir = base_dir / "temporalio" / "bridge" / "sdk-core" / "protos" @@ -19,15 +18,19 @@ api_out_dir = base_dir / "temporalio" / "api" sdk_out_dir = base_dir / "temporalio" / "bridge" / "proto" -fix_api_import = partial( - re.compile(r"from temporal\.api\.").sub, r"from temporalio.api." -) -fix_dependency_import = partial( - re.compile(r"from dependencies\.").sub, r"from temporalio.api.dependencies." -) -fix_sdk_import = partial( - re.compile(r"from temporal\.sdk\.core\.").sub, r"from temporalio.bridge.proto." -) +py_fixes = [ + partial(re.compile(r"from temporal\.api\.").sub, r"from temporalio.api."), + partial( + re.compile(r"from dependencies\.").sub, r"from temporalio.api.dependencies." + ), + partial( + re.compile(r"from temporal\.sdk\.core\.").sub, r"from temporalio.bridge.proto." + ), +] + +pyi_fixes = [ + partial(re.compile(r"temporal\.api\.").sub, r"temporalio.api."), +] find_message_re = re.compile(r"_sym_db\.RegisterMessage\(([^\)\.]+)\)") find_enum_re = re.compile(r"DESCRIPTOR\.enum_types_by_name\['([^']+)'\] =") @@ -43,22 +46,24 @@ def fix_generated_output(base_path: Path): (https://github.com/protocolbuffers/protobuf/issues/1491) """ - imports: Mapping[str, list[str]] = collections.defaultdict(list) + imports: Mapping[str, List[str]] = collections.defaultdict(list) for p in base_path.iterdir(): if p.is_dir(): fix_generated_output(p) - else: + elif p.suffix == ".py" or p.suffix == ".pyi": with p.open(encoding="utf8") as f: content = f.read() - content = fix_api_import(content) - content = fix_dependency_import(content) - content = fix_sdk_import(content) - # Only use .py files to determine imports, not pyi ones if p.suffix == ".py": + for fix in py_fixes: + content = fix(content) + # Only use .py files to determine imports, not pyi ones imports[p.stem] += find_message_re.findall(content) imports[p.stem] += find_enum_re.findall(content) imports[p.stem] += find_class_re.findall(content) imports[p.stem] += find_def_re.findall(content) + else: + for fix in pyi_fixes: + content = fix(content) with p.open("w") as f: f.write(content) # Write init @@ -85,7 +90,7 @@ def fix_generated_output(base_path: Path): *map(str, proto_paths), ] ) - # Apply import fixes before moving code + # Apply fixes before moving code fix_generated_output(temp_dir) # Move protos for p in (temp_dir / "temporal" / "api").iterdir(): diff --git a/temporalio/__init__.py b/temporalio/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/temporalio/bridge/Cargo.lock b/temporalio/bridge/Cargo.lock new file mode 100644 index 000000000..352e39c13 --- /dev/null +++ b/temporalio/bridge/Cargo.lock @@ -0,0 +1,2330 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom", + "once_cell", + "version_check", +] + +[[package]] +name = "aho-corasick" +version = "0.7.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" +dependencies = [ + "memchr", +] + +[[package]] +name = "ansi_term" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +dependencies = [ + "winapi", +] + +[[package]] +name = "anyhow" +version = "1.0.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94a45b455c14666b85fc40a019e8ab9eb75e3a124e05494f5397122bc9eb06e0" + +[[package]] +name = "arc-swap" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5d78ce20460b82d3fa150275ed9d55e21064fc7951177baacf86a145c4a4b1f" + +[[package]] +name = "async-stream" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "171374e7e3b2504e0e5236e3b59260560f9fe94bfe9ac39ba5e4e929c5590625" +dependencies = [ + "async-stream-impl", + "futures-core", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "648ed8c8d2ce5409ccd57453d9d1b214b342a0d69376a6feda1fd6cae3299308" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "async-trait" +version = "0.1.52" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "061a7acccaa286c011ddc30970520b98fa40e00c9d644633fb26b5fc63a265e3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "autocfg" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" + +[[package]] +name = "backoff" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fe17f59a06fe8b87a6fc8bf53bb70b3aba76d7685f432487a68cd5552853625" +dependencies = [ + "getrandom", + "instant", + "rand", +] + +[[package]] +name = "base64" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bumpalo" +version = "3.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" + +[[package]] +name = "bytes" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" + +[[package]] +name = "cache-padded" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1db59621ec70f09c5e9b597b220c7a2b43611f4710dc03ceb8748637775692c" + +[[package]] +name = "cc" +version = "1.0.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22a9137b95ea06864e018375b72adfb7db6e6f68cfc8df5a04d00288050485ee" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "core-foundation" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6888e10551bb93e424d8df1d07f1a8b4fceb0001a3a4b048bfc47554946f47b3" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" + +[[package]] +name = "crossbeam" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ae5588f6b3c3cb05239e90bd110f257254aecd01e4635400391aeae07497845" +dependencies = [ + "cfg-if", + "crossbeam-channel", + "crossbeam-deque", + "crossbeam-epoch", + "crossbeam-queue", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e54ea8bc3fb1ee042f5aace6e3c6e025d3874866da222930f70ce62aceba0bfa" +dependencies = [ + "cfg-if", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" +dependencies = [ + "cfg-if", + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97242a70df9b89a65d0b6df3c4bf5b9ce03c5b7309019777fbde37e7537f8762" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "lazy_static", + "memoffset", + "scopeguard", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b979d76c9fcb84dffc80a73f7290da0f83e4c95773494674cb44b76d13a7a110" +dependencies = [ + "cfg-if", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcae03edb34f947e64acdb1c33ec169824e20657e9ecb61cef6c8c74dcb8120" +dependencies = [ + "cfg-if", + "lazy_static", +] + +[[package]] +name = "ctor" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccc0a48a9b826acdf4028595adc9db92caea352f7af011a3034acd172a52a0aa" +dependencies = [ + "quote", + "syn", +] + +[[package]] +name = "darling" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f2c43f534ea4b0b049015d00269734195e6d3f0f6635cb692251aca6f9f8b3c" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e91455b86830a1c21799d94524df0845183fa55bafd9aa137b01c7d1065fa36" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29b5acf0dea37a7f66f7b25d2c5e93fd46f8f6968b1a5d7a3e02e97768afc95a" +dependencies = [ + "darling_core", + "quote", + "syn", +] + +[[package]] +name = "dashmap" +version = "4.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e77a43b28d0668df09411cb0bc9a8c2adc40f9a048afe863e05fd43251e8e39c" +dependencies = [ + "cfg-if", + "num_cpus", +] + +[[package]] +name = "dashmap" +version = "5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b799062aaf67eb976af3bdca031ee6f846d2f0a5710ddbb0d2efee33f3cc4760" +dependencies = [ + "cfg-if", + "num_cpus", + "parking_lot", +] + +[[package]] +name = "derive_builder" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d13202debe11181040ae9063d739fa32cfcaaebe2275fe387703460ae2365b30" +dependencies = [ + "derive_builder_macro", +] + +[[package]] +name = "derive_builder_core" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66e616858f6187ed828df7c64a6d71720d83767a7f19740b2d1b6fe6327b36e5" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "derive_builder_macro" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58a94ace95092c5acb1e97a7e846b310cfbd499652f72297da7493f618a98d73" +dependencies = [ + "derive_builder_core", + "syn", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version", + "syn", +] + +[[package]] +name = "difflib" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" + +[[package]] +name = "downcast" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1435fa1053d8b2fbbe9be7e97eca7f33d37b28409959813daefc1446a14247f1" + +[[package]] +name = "either" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" + +[[package]] +name = "enum_dispatch" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd53b3fde38a39a06b2e66dc282f3e86191e53bd04cc499929c15742beae3df8" +dependencies = [ + "once_cell", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "fastrand" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf" +dependencies = [ + "instant", +] + +[[package]] +name = "fixedbitset" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d" + +[[package]] +name = "fixedbitset" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "279fb028e20b3c4c320317955b77c5e0c9701f05a1d309905d6fc702cdc5053e" + +[[package]] +name = "float-cmp" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4" +dependencies = [ + "num-traits", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191" +dependencies = [ + "matches", + "percent-encoding", +] + +[[package]] +name = "fragile" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8da1b8f89c5b5a5b7e59405cfcf0bb9588e5ed19f0b57a4cd542bbba3f164a6d" + +[[package]] +name = "futures" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28560757fe2bb34e79f907794bb6b22ae8b0e5c669b638a1132f2592b19035b4" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3dda0b6588335f360afc675d0564c17a77a2bda81ca178a4b6081bd86c7f0b" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0c8ff0461b82559810cdccfde3215c3f373807f5e5232b71479bff7bb2583d7" + +[[package]] +name = "futures-executor" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29d6d2ff5bb10fb95c85b8ce46538a2e5f5e7fdc755623a7d4529ab8a4ed9d2a" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f9d34af5a1aac6fb380f735fe510746c38067c5bf16c7fd250280503c971b2" + +[[package]] +name = "futures-macro" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbd947adfffb0efc70599b3ddcf7b5597bb5fa9e245eb99f62b3a5f7bb8bd3c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-retry" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fde5a672a61f96552aa5ed9fd9c81c3fbdae4be9b1e205d6eaf17c83705adc0f" +dependencies = [ + "futures", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "futures-sink" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3055baccb68d74ff6480350f8d6eb8fcfa3aa11bdc1a1ae3afdd0514617d508" + +[[package]] +name = "futures-task" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ee7c6485c30167ce4dfb83ac568a849fe53274c831081476ee13e0dce1aad72" + +[[package]] +name = "futures-util" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b5cf40b47a271f77a8b1bec03ca09044d99d2372c0de244e66430761127164" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "getrandom" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418d37c8b1d42553c93648be529cb70f920d3baf8ef469b74b9638df426e0b4c" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "ghost" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a5bcf1bbeab73aa4cf2fde60a846858dc036163c7c33bec309f8d17de785479" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "h2" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9f1f717ddc7b2ba36df7e871fd88db79326551d3d6f1fc406fbfd28b582ff8e" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" +dependencies = [ + "ahash", +] + +[[package]] +name = "heck" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "http" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31f4c6746584866f0feabcc69893c5b51beef3831656a968ed7ae254cdc4fd03" +dependencies = [ + "bytes", + "fnv", + "itoa 1.0.1", +] + +[[package]] +name = "http-body" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ff4f84919677303da5f147645dbea6b1881f368d03ac84e1dc09031ebd7b2c6" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acd94fdbe1d4ff688b67b04eee2e17bd50995534a61539e45adfefb45e5e5503" + +[[package]] +name = "httpdate" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" + +[[package]] +name = "hyper" +version = "0.14.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7ec3e62bdc98a2f0393a5048e4c30ef659440ea6e0e572965103e72bd836f55" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa 0.4.8", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "indexmap" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "indoc" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47741a8bc60fb26eb8d6e0238bbb26d8575ff623fdc97b1a2c00c050b9684ed8" +dependencies = [ + "indoc-impl", + "proc-macro-hack", +] + +[[package]] +name = "indoc-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce046d161f000fffde5f432a0d034d0341dc152643b2598ed5bfce44c4f3a8f0" +dependencies = [ + "proc-macro-hack", + "proc-macro2", + "quote", + "syn", + "unindent", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "inventory" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0eb5160c60ba1e809707918ee329adb99d222888155835c6feedba19f6c3fd4" +dependencies = [ + "ctor", + "ghost", + "inventory-impl", +] + +[[package]] +name = "inventory-impl" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e41b53715c6f0c4be49510bb82dee2c1e51c8586d885abe65396e82ed518548" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "itertools" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" + +[[package]] +name = "itoa" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" + +[[package]] +name = "js-sys" +version = "0.3.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.116" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "565dbd88872dbe4cc8a46e527f26483c1d1f7afa6b884a3bd6cd893d4f98da74" + +[[package]] +name = "lock_api" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88943dd7ef4a2e5a4bfa2753aaab3013e34ce2533d1996fb18ef591e315e2b3b" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "lru" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "274353858935c992b13c0ca408752e2121da852d07dec7ce5f108c77dfa14d1f" +dependencies = [ + "hashbrown", +] + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "matches" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" + +[[package]] +name = "memchr" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" + +[[package]] +name = "memoffset" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" +dependencies = [ + "autocfg", +] + +[[package]] +name = "mio" +version = "0.7.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8067b404fe97c70829f082dec8bcf4f71225d7eaea1d8645349cb76fa06205cc" +dependencies = [ + "libc", + "log", + "miow", + "ntapi", + "winapi", +] + +[[package]] +name = "miow" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" +dependencies = [ + "winapi", +] + +[[package]] +name = "mockall" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d4d70639a72f972725db16350db56da68266ca368b2a1fe26724a903ad3d6b8" +dependencies = [ + "cfg-if", + "downcast", + "fragile", + "lazy_static", + "mockall_derive", + "predicates", + "predicates-tree", +] + +[[package]] +name = "mockall_derive" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79ef208208a0dea3f72221e26e904cdc6db2e481d9ade89081ddd494f1dbaa6b" +dependencies = [ + "cfg-if", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "multimap" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" + +[[package]] +name = "normalize-line-endings" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" + +[[package]] +name = "ntapi" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f6bb902e437b6d86e03cce10a7e2af662292c5dfef23b65899ea3ac9354ad44" +dependencies = [ + "winapi", +] + +[[package]] +name = "num-traits" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "once_cell" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "opentelemetry" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cf9b1c4e9a6c4de793c632496fa490bdc0e1eea73f0c91394f7b6990935d22" +dependencies = [ + "async-trait", + "crossbeam-channel", + "dashmap 4.0.2", + "fnv", + "futures", + "js-sys", + "lazy_static", + "percent-encoding", + "pin-project", + "rand", + "thiserror", + "tokio", + "tokio-stream", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f19d4b43842433c420c548c985d158f5628bba5b518e0be64627926d19889992" +dependencies = [ + "async-trait", + "futures", + "http", + "opentelemetry", + "prost 0.8.0", + "thiserror", + "tokio", + "tonic 0.5.2", + "tonic-build 0.5.2", +] + +[[package]] +name = "opentelemetry-prometheus" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee9c06c1366665e7d4dba6540a42ea48900a9c92dc5b963f3ae05fbba76dc63" +dependencies = [ + "opentelemetry", + "prometheus", + "protobuf", +] + +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall", + "smallvec", + "winapi", +] + +[[package]] +name = "paste" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45ca20c77d80be666aef2b45486da86238fabe33e38306bd3118fe4af33fa880" +dependencies = [ + "paste-impl", + "proc-macro-hack", +] + +[[package]] +name = "paste-impl" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d95a7db200b97ef370c8e6de0088252f7e0dfff7d047a28528e47456c0fc98b6" +dependencies = [ + "proc-macro-hack", +] + +[[package]] +name = "percent-encoding" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" + +[[package]] +name = "petgraph" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7" +dependencies = [ + "fixedbitset 0.2.0", + "indexmap", +] + +[[package]] +name = "petgraph" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a13a2fa9d0b63e5f22328828741e523766fff0ee9e779316902290dff3f824f" +dependencies = [ + "fixedbitset 0.4.1", + "indexmap", +] + +[[package]] +name = "pin-project" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58ad3879ad3baf4e44784bc6a718a8698867bb991f8ce24d1bcbe2cfb4c3a75e" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "744b6f092ba29c3650faf274db506afd39944f48420f6c86b17cfe0ee1cb36bb" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "ppv-lite86" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" + +[[package]] +name = "predicates" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5aab5be6e4732b473071984b3164dbbfb7a3674d30ea5ff44410b6bcd960c3c" +dependencies = [ + "difflib", + "float-cmp", + "itertools", + "normalize-line-endings", + "predicates-core", + "regex", +] + +[[package]] +name = "predicates-core" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da1c2388b1513e1b605fcec39a95e0a9e8ef088f71443ef37099fa9ae6673fcb" + +[[package]] +name = "predicates-tree" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d86de6de25020a36c6d3643a86d9a6a9f552107c0559c60ea03551b5e16c032" +dependencies = [ + "predicates-core", + "termtree", +] + +[[package]] +name = "proc-macro-hack" +version = "0.5.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" + +[[package]] +name = "proc-macro2" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "prometheus" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5986aa8d62380092d2f50f8b1cdba9cb9b6731ffd4b25b51fd126b6c3e05b99c" +dependencies = [ + "cfg-if", + "fnv", + "lazy_static", + "memchr", + "parking_lot", + "protobuf", + "thiserror", +] + +[[package]] +name = "prost" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de5e2533f59d08fcf364fd374ebda0692a70bd6d7e66ef97f306f45c6c5d8020" +dependencies = [ + "bytes", + "prost-derive 0.8.0", +] + +[[package]] +name = "prost" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "444879275cb4fd84958b1a1d5420d15e6fcf7c235fe47f053c9c2a80aceb6001" +dependencies = [ + "bytes", + "prost-derive 0.9.0", +] + +[[package]] +name = "prost-build" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "355f634b43cdd80724ee7848f95770e7e70eefa6dcf14fea676216573b8fd603" +dependencies = [ + "bytes", + "heck", + "itertools", + "log", + "multimap", + "petgraph 0.5.1", + "prost 0.8.0", + "prost-types 0.8.0", + "tempfile", + "which", +] + +[[package]] +name = "prost-build" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62941722fb675d463659e49c4f3fe1fe792ff24fe5bbaa9c08cd3b98a1c354f5" +dependencies = [ + "bytes", + "heck", + "itertools", + "lazy_static", + "log", + "multimap", + "petgraph 0.6.0", + "prost 0.9.0", + "prost-types 0.9.0", + "regex", + "tempfile", + "which", +] + +[[package]] +name = "prost-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "600d2f334aa05acb02a755e217ef1ab6dea4d51b58b7846588b747edec04efba" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "prost-derive" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9cc1a3263e07e0bf68e96268f37665207b49560d98739662cdfaae215c720fe" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "prost-types" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "603bbd6394701d13f3f25aada59c7de9d35a6a5887cfc156181234a44002771b" +dependencies = [ + "bytes", + "prost 0.8.0", +] + +[[package]] +name = "prost-types" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534b7a0e836e3c482d2693070f982e39e7611da9695d4d1f5a4b186b51faef0a" +dependencies = [ + "bytes", + "prost 0.9.0", +] + +[[package]] +name = "protobuf" +version = "2.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00e95f7417529a121d3c1d0bd831fd86cc5d5bf7b77ae1449259db3d5ff8b3e7" + +[[package]] +name = "pyo3" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cf01dbf1c05af0a14c7779ed6f3aa9deac9c3419606ac9de537a2d649005720" +dependencies = [ + "cfg-if", + "indoc", + "libc", + "parking_lot", + "paste", + "pyo3-build-config", + "pyo3-macros", + "unindent", +] + +[[package]] +name = "pyo3-asyncio" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0897c7e36110a32b726b975359b2bbe90c37fcf1266046d3b1c08c616a47a886" +dependencies = [ + "futures", + "inventory", + "once_cell", + "pin-project-lite", + "pyo3", + "tokio", +] + +[[package]] +name = "pyo3-build-config" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbf9e4d128bfbddc898ad3409900080d8d5095c379632fbbfbb9c8cfb1fb852b" +dependencies = [ + "once_cell", +] + +[[package]] +name = "pyo3-macros" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67701eb32b1f9a9722b4bc54b548ff9d7ebfded011c12daece7b9063be1fd755" +dependencies = [ + "pyo3-macros-backend", + "quote", + "syn", +] + +[[package]] +name = "pyo3-macros-backend" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f44f09e825ee49a105f2c7b23ebee50886a9aee0746f4dd5a704138a64b0218a" +dependencies = [ + "proc-macro2", + "pyo3-build-config", + "quote", + "syn", +] + +[[package]] +name = "quote" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", + "rand_hc", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_hc" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7" +dependencies = [ + "rand_core", +] + +[[package]] +name = "redox_syscall" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" +dependencies = [ + "bitflags", +] + +[[package]] +name = "regex" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.6.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin", + "untrusted", + "web-sys", + "winapi", +] + +[[package]] +name = "ringbuf" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c60f3923939c33e6c543ddbff14d0ee6a407fcd186d560be37282559616adf3" +dependencies = [ + "cache-padded", +] + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rustfsm" +version = "0.1.0" +dependencies = [ + "rustfsm_procmacro", + "rustfsm_trait", +] + +[[package]] +name = "rustfsm_procmacro" +version = "0.1.0" +dependencies = [ + "derive_more", + "proc-macro2", + "quote", + "rustfsm_trait", + "syn", +] + +[[package]] +name = "rustfsm_trait" +version = "0.1.0" + +[[package]] +name = "rustls" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35edb675feee39aec9c99fa5ff985081995a06d594114ae14cbe797ad7b7a6d7" +dependencies = [ + "base64", + "log", + "ring", + "sct", + "webpki", +] + +[[package]] +name = "rustls-native-certs" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a07b7c1885bd8ed3831c289b7870b13ef46fe0e856d288c30d9cc17d75a2092" +dependencies = [ + "openssl-probe", + "rustls", + "schannel", + "security-framework", +] + +[[package]] +name = "ryu" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" + +[[package]] +name = "schannel" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f05ba609c234e60bee0d547fe94a4c7e9da733d1c962cf6e59efa4cd9c8bc75" +dependencies = [ + "lazy_static", + "winapi", +] + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "sct" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b362b83898e0e69f38515b82ee15aa80636befe47c3b6d3d89a911e78fc228ce" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "security-framework" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fed7948b6c68acbb6e20c334f55ad635dc0f75506963de4464289fbd3b051ac" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a57321bf8bc2362081b2599912d2961fe899c0efadf1b4b2f8d48b3e253bb96c" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "568a8e6258aa33c13358f81fd834adb854c6f7c9468520910a9b1e8fac068012" + +[[package]] +name = "serde" +version = "1.0.136" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.136" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.78" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d23c1ba4cf0efd44be32017709280b32d1cea5c3f1275c3b6d9e8bc54f758085" +dependencies = [ + "itoa 1.0.1", + "ryu", + "serde", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" +dependencies = [ + "libc", +] + +[[package]] +name = "slab" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9def91fd1e018fe007022791f865d0ccc9b3a0d5001e01aabb8b40e46000afb5" + +[[package]] +name = "slotmap" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1e08e261d0e8f5c43123b7adf3e4ca1690d655377ac93a03b2c9d3e98de1342" +dependencies = [ + "version_check", +] + +[[package]] +name = "smallvec" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" + +[[package]] +name = "socket2" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "syn" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "tempfile" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" +dependencies = [ + "cfg-if", + "fastrand", + "libc", + "redox_syscall", + "remove_dir_all", + "winapi", +] + +[[package]] +name = "temporal-client" +version = "0.1.0" +dependencies = [ + "async-trait", + "backoff", + "derive_builder", + "derive_more", + "futures", + "futures-retry", + "http", + "mockall", + "opentelemetry", + "prost-types 0.9.0", + "temporal-sdk-core-protos", + "thiserror", + "tokio", + "tonic 0.6.2", + "tower", + "tracing", + "url", + "uuid", +] + +[[package]] +name = "temporal-sdk-bridge" +version = "0.1.0" +dependencies = [ + "prost 0.9.0", + "prost-types 0.9.0", + "pyo3", + "pyo3-asyncio", + "temporal-client", + "temporal-sdk-core", + "temporal-sdk-core-api", + "temporal-sdk-core-protos", + "tokio", + "tonic 0.6.2", + "url", +] + +[[package]] +name = "temporal-sdk-core" +version = "0.1.0" +dependencies = [ + "anyhow", + "arc-swap", + "async-trait", + "base64", + "crossbeam", + "dashmap 5.0.0", + "derive_builder", + "derive_more", + "enum_dispatch", + "futures", + "http", + "hyper", + "itertools", + "lazy_static", + "log", + "lru", + "once_cell", + "opentelemetry", + "opentelemetry-otlp", + "opentelemetry-prometheus", + "parking_lot", + "prometheus", + "prost 0.9.0", + "prost-types 0.9.0", + "rand", + "ringbuf", + "rustfsm", + "serde", + "slotmap", + "temporal-client", + "temporal-sdk-core-api", + "temporal-sdk-core-protos", + "thiserror", + "tokio", + "tokio-stream", + "tokio-util", + "tonic 0.6.2", + "tonic-build 0.6.2", + "tracing", + "tracing-futures", + "tracing-opentelemetry", + "tracing-subscriber", + "url", + "uuid", +] + +[[package]] +name = "temporal-sdk-core-api" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "derive_builder", + "log", + "opentelemetry", + "prost-types 0.9.0", + "temporal-client", + "temporal-sdk-core-protos", + "thiserror", + "tonic 0.6.2", +] + +[[package]] +name = "temporal-sdk-core-protos" +version = "0.1.0" +dependencies = [ + "anyhow", + "base64", + "derive_more", + "prost 0.9.0", + "prost-types 0.9.0", + "rand", + "serde", + "serde_json", + "thiserror", + "tonic 0.6.2", + "tonic-build 0.6.2", + "uuid", +] + +[[package]] +name = "termtree" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507e9898683b6c43a9aa55b64259b721b52ba226e0f3779137e50ad114a4c90b" + +[[package]] +name = "thiserror" +version = "1.0.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" +dependencies = [ + "once_cell", +] + +[[package]] +name = "tinyvec" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" + +[[package]] +name = "tokio" +version = "1.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c27a64b625de6d309e8c57716ba93021dccf1b3b5c97edd6d3dd2d2135afc0a" +dependencies = [ + "bytes", + "libc", + "memchr", + "mio", + "num_cpus", + "once_cell", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "tokio-macros", + "winapi", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b557f72f448c511a979e2564e55d74e6c4432fc96ff4f6241bc6bded342643b7" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-rustls" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6844de72e57df1980054b38be3a9f4702aba4858be64dd700181a8a6d0e1b6" +dependencies = [ + "rustls", + "tokio", + "webpki", +] + +[[package]] +name = "tokio-stream" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50145484efff8818b5ccd256697f36863f587da82cf8b409c53adf1e840798e3" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e99e1983e5d376cd8eb4b66604d2e99e79f5bd988c3055891dcd8c9e2604cc0" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "log", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tonic" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "796c5e1cd49905e65dd8e700d4cb1dffcbfdb4fc9d017de08c1a537afd83627c" +dependencies = [ + "async-stream", + "async-trait", + "base64", + "bytes", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost 0.8.0", + "prost-derive 0.8.0", + "tokio", + "tokio-stream", + "tokio-util", + "tower", + "tower-layer", + "tower-service", + "tracing", + "tracing-futures", +] + +[[package]] +name = "tonic" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff08f4649d10a70ffa3522ca559031285d8e421d727ac85c60825761818f5d0a" +dependencies = [ + "async-stream", + "async-trait", + "base64", + "bytes", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost 0.9.0", + "prost-derive 0.9.0", + "rustls-native-certs", + "tokio", + "tokio-rustls", + "tokio-stream", + "tokio-util", + "tower", + "tower-layer", + "tower-service", + "tracing", + "tracing-futures", +] + +[[package]] +name = "tonic-build" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12b52d07035516c2b74337d2ac7746075e7dcae7643816c1b12c5ff8a7484c08" +dependencies = [ + "proc-macro2", + "prost-build 0.8.0", + "quote", + "syn", +] + +[[package]] +name = "tonic-build" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9403f1bafde247186684b230dc6f38b5cd514584e8bec1dd32514be4745fa757" +dependencies = [ + "proc-macro2", + "prost-build 0.9.0", + "quote", + "syn", +] + +[[package]] +name = "tower" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5651b5f6860a99bd1adb59dbfe1db8beb433e73709d9032b413a77e2fb7c066a" +dependencies = [ + "futures-core", + "futures-util", + "indexmap", + "pin-project", + "pin-project-lite", + "rand", + "slab", + "tokio", + "tokio-stream", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "343bc9466d3fe6b0f960ef45960509f84480bf4fd96f92901afe7ff3df9d3a62" + +[[package]] +name = "tower-service" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" + +[[package]] +name = "tracing" +version = "0.1.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "375a639232caf30edfc78e8d89b2d4c375515393e7af7e16f01cd96917fb2105" +dependencies = [ + "cfg-if", + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f480b8f81512e825f337ad51e94c1eb5d3bbdf2b363dcd01e2b19a9ffe3f8e" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f4ed65637b8390770814083d20756f87bfa2c21bf2f110babdc5438351746e4" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + +[[package]] +name = "tracing-log" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6923477a48e41c1951f1999ef8bb5a3023eb723ceadafe78ffb65dc366761e3" +dependencies = [ + "lazy_static", + "log", + "tracing-core", +] + +[[package]] +name = "tracing-opentelemetry" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ffbf13a0f8b054a4e59df3a173b818e9c6177c02789871f2073977fd0062076" +dependencies = [ + "opentelemetry", + "tracing", + "tracing-core", + "tracing-log", + "tracing-subscriber", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5312f325fe3588e277415f5a6cca1f4ccad0f248c4cd5a4bd33032d7286abc22" +dependencies = [ + "ansi_term", + "lazy_static", + "matchers", + "parking_lot", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "try-lock" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" + +[[package]] +name = "unicode-bidi" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f" + +[[package]] +name = "unicode-normalization" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b" + +[[package]] +name = "unicode-xid" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" + +[[package]] +name = "unindent" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f14ee04d9415b52b3aeab06258a3f07093182b88ba0f9b8d203f211a7a7d41c7" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "url" +version = "2.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" +dependencies = [ + "form_urlencoded", + "idna", + "matches", + "percent-encoding", +] + +[[package]] +name = "uuid" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" +dependencies = [ + "getrandom", +] + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "want" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +dependencies = [ + "log", + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" + +[[package]] +name = "wasm-bindgen" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca" +dependencies = [ + "bumpalo", + "lazy_static", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2" + +[[package]] +name = "web-sys" +version = "0.3.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c060b319f29dd25724f09a2ba1418f142f539b2be99fbf4d2d5a8f7330afb8eb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki" +version = "0.21.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e38c0608262c46d4a56202ebabdeb094cef7e560ca7a226c6bf055188aa4ea" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "which" +version = "4.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a5a7e487e921cf220206864a94a89b6c6905bfc19f1057fa26a4cb360e5c1d2" +dependencies = [ + "either", + "lazy_static", + "libc", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/temporalio/bridge/Cargo.toml b/temporalio/bridge/Cargo.toml new file mode 100644 index 000000000..5edd55cf9 --- /dev/null +++ b/temporalio/bridge/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "temporal-sdk-bridge" +version = "0.1.0" +edition = "2021" + +[lib] +name = "temporal_sdk_bridge" +crate-type = ["cdylib"] + +[dependencies] +prost = "0.9" +prost-types = "0.9" +pyo3 = { version = "0.15", features = ["extension-module"] } +pyo3-asyncio = { version = "0.15", features = ["tokio-runtime"] } +temporal-client = { version = "0.1.0", path = "./sdk-core/client" } +temporal-sdk-core = { version = "0.1.0", path = "./sdk-core/core" } +temporal-sdk-core-api = { version = "0.1.0", path = "./sdk-core/core-api" } +temporal-sdk-core-protos = { version = "0.1.0", path = "./sdk-core/sdk-core-protos" } +tokio = "1.15" +tonic = "0.6" +url = "2.2" \ No newline at end of file diff --git a/temporalio/bridge/client.py b/temporalio/bridge/client.py new file mode 100644 index 000000000..7490c7e3b --- /dev/null +++ b/temporalio/bridge/client.py @@ -0,0 +1,67 @@ +import os +import socket +from dataclasses import dataclass, field +from typing import Mapping, Optional, Type, TypeVar + +import google.protobuf.message +import temporal_sdk_bridge + +import temporalio.api.workflowservice.v1 + + +@dataclass +class ClientTlsConfig: + server_root_ca_cert: Optional[bytes] + domain: Optional[str] + client_cert: Optional[bytes] + client_private_key: Optional[bytes] + + +@dataclass +class ClientRetryConfig: + initial_interval_millis: int = 100 + randomization_factor: float = 0.2 + multiplier: float = 1.5 + max_interval_millis: int = 5000 + max_elapsed_time_millis: Optional[int] = 10000 + max_retries: int = 10 + + +@dataclass +class ClientOptions: + target_url: str + client_name: str = "temporal-python" + # TODO(cretz): Take from importlib ref https://stackoverflow.com/a/54869712 + client_version: str = "0.1.0" + static_headers: Mapping[str, str] = field(default_factory=dict) + identity: str = f"{os.getpid()}@{socket.gethostname()}" + # TODO(cretz): Use proper name/version + worker_binary_id: str = "python-sdk@0.1.0" + tls_config: Optional[ClientTlsConfig] = None + retry_config: Optional[ClientRetryConfig] = None + + +ProtoMessage = TypeVar("ProtoMessage", bound=google.protobuf.message.Message) + + +class Client: + @staticmethod + async def connect(opts: ClientOptions) -> "Client": + return Client(await temporal_sdk_bridge.new_client(opts)) + + _ref: temporal_sdk_bridge.ClientRef + + def __init__(self, ref: temporal_sdk_bridge.ClientRef): + self._ref = ref + + async def rpc_call( + self, + rpc: str, + req: google.protobuf.message.Message, + resp_type: Type[ProtoMessage], + *, + retry: bool = False, + ) -> ProtoMessage: + resp = resp_type() + resp.ParseFromString(await self._ref.call(rpc, retry, req.SerializeToString())) + return resp diff --git a/temporalio/bridge/sdk-core b/temporalio/bridge/sdk-core index f58f6305b..9db7b0544 160000 --- a/temporalio/bridge/sdk-core +++ b/temporalio/bridge/sdk-core @@ -1 +1 @@ -Subproject commit f58f6305b8e4b4cc21bcdd9637d669e9930d718f +Subproject commit 9db7b0544b301469dcedfffcdab86f602dae2a04 diff --git a/temporalio/bridge/src/lib.rs b/temporalio/bridge/src/lib.rs new file mode 100644 index 000000000..3b775c171 --- /dev/null +++ b/temporalio/bridge/src/lib.rs @@ -0,0 +1,296 @@ +use pyo3::exceptions::{PyRuntimeError, PyValueError}; +use pyo3::prelude::*; +use std::collections::HashMap; +use std::time::Duration; +use temporal_client::WorkflowService; +use tonic; + +#[pymodule] +fn temporal_sdk_bridge(_py: Python, m: &PyModule) -> PyResult<()> { + m.add_class::()?; + m.add_function(wrap_pyfunction!(new_client, m)?)?; + Ok(()) +} + +type Client = temporal_client::RetryGateway< + temporal_client::ConfiguredClient, +>; + +#[pyclass] +pub struct ClientRef { + retry_client: Client, +} + +#[derive(FromPyObject)] +pub struct ClientOptions { + target_url: String, + client_name: String, + client_version: String, + static_headers: HashMap, + identity: String, + worker_binary_id: String, + tls_config: Option, + retry_config: Option, +} + +#[derive(FromPyObject)] +pub struct ClientTlsConfig { + server_root_ca_cert: Option>, + domain: Option, + client_cert: Option>, + client_private_key: Option>, +} + +#[derive(FromPyObject)] +pub struct ClientRetryConfig { + pub initial_interval_millis: u64, + pub randomization_factor: f64, + pub multiplier: f64, + pub max_interval_millis: u64, + pub max_elapsed_time_millis: Option, + pub max_retries: usize, +} + +#[pyfunction] +fn new_client(py: Python, opts: ClientOptions) -> PyResult<&PyAny> { + // TODO(cretz): Add metrics_meter? + let opts: temporal_client::ServerGatewayOptions = opts.try_into()?; + pyo3_asyncio::tokio::future_into_py(py, async move { + Ok(ClientRef { + retry_client: opts.connect_no_namespace(None).await.map_err(|err| { + PyRuntimeError::new_err(format!("Failed client connect: {}", err)) + })?, + }) + }) +} + +#[pymethods] +impl ClientRef { + fn call<'p>( + &self, + py: Python<'p>, + rpc: String, + retry: bool, + req: Vec, + ) -> PyResult<&'p PyAny> { + let mut retry_client = self.retry_client.clone(); + pyo3_asyncio::tokio::future_into_py(py, async move { + let bytes = match rpc.as_str() { + "count_workflow_executions" => { + rpc_call!(retry_client, retry, count_workflow_executions, req) + } + "deprecate_namespace" => rpc_call!(retry_client, retry, deprecate_namespace, req), + "describe_namespace" => rpc_call!(retry_client, retry, describe_namespace, req), + "describe_task_queue" => rpc_call!(retry_client, retry, describe_task_queue, req), + "describe_workflow_execution" => { + rpc_call!(retry_client, retry, describe_workflow_execution, req) + } + "get_cluster_info" => rpc_call!(retry_client, retry, get_cluster_info, req), + "get_search_attributes" => { + rpc_call!(retry_client, retry, get_search_attributes, req) + } + "get_workflow_execution_history" => { + rpc_call!(retry_client, retry, get_workflow_execution_history, req) + } + "list_archived_workflow_executions" => { + rpc_call!(retry_client, retry, list_archived_workflow_executions, req) + } + "list_closed_workflow_executions" => { + rpc_call!(retry_client, retry, list_closed_workflow_executions, req) + } + "list_namespaces" => rpc_call!(retry_client, retry, list_namespaces, req), + "list_open_workflow_executions" => { + rpc_call!(retry_client, retry, list_open_workflow_executions, req) + } + "list_task_queue_partitions" => { + rpc_call!(retry_client, retry, list_task_queue_partitions, req) + } + "list_workflow_executions" => { + rpc_call!(retry_client, retry, list_workflow_executions, req) + } + "poll_activity_task_queue" => { + rpc_call!(retry_client, retry, poll_activity_task_queue, req) + } + "poll_workflow_task_queue" => { + rpc_call!(retry_client, retry, poll_workflow_task_queue, req) + } + "query_workflow" => rpc_call!(retry_client, retry, query_workflow, req), + "record_activity_task_heartbeat" => { + rpc_call!(retry_client, retry, record_activity_task_heartbeat, req) + } + "record_activity_task_heartbeat_by_id" => rpc_call!( + retry_client, + retry, + record_activity_task_heartbeat_by_id, + req + ), + "register_namespace" => rpc_call!(retry_client, retry, register_namespace, req), + "request_cancel_workflow_execution" => { + rpc_call!(retry_client, retry, request_cancel_workflow_execution, req) + } + "reset_sticky_task_queue" => { + rpc_call!(retry_client, retry, reset_sticky_task_queue, req) + } + "reset_workflow_execution" => { + rpc_call!(retry_client, retry, reset_workflow_execution, req) + } + "respond_activity_task_canceled" => { + rpc_call!(retry_client, retry, respond_activity_task_canceled, req) + } + "respond_activity_task_canceled_by_id" => rpc_call!( + retry_client, + retry, + respond_activity_task_canceled_by_id, + req + ), + "respond_activity_task_completed" => { + rpc_call!(retry_client, retry, respond_activity_task_completed, req) + } + "respond_activity_task_completed_by_id" => rpc_call!( + retry_client, + retry, + respond_activity_task_completed_by_id, + req + ), + "respond_activity_task_failed" => { + rpc_call!(retry_client, retry, respond_activity_task_failed, req) + } + "respond_activity_task_failed_by_id" => { + rpc_call!(retry_client, retry, respond_activity_task_failed_by_id, req) + } + "respond_query_task_completed" => { + rpc_call!(retry_client, retry, respond_query_task_completed, req) + } + "respond_workflow_task_completed" => { + rpc_call!(retry_client, retry, respond_workflow_task_completed, req) + } + "respond_workflow_task_failed" => { + rpc_call!(retry_client, retry, respond_workflow_task_failed, req) + } + "scan_workflow_executions" => { + rpc_call!(retry_client, retry, scan_workflow_executions, req) + } + "signal_with_start_workflow_execution" => rpc_call!( + retry_client, + retry, + signal_with_start_workflow_execution, + req + ), + "signal_workflow_execution" => { + rpc_call!(retry_client, retry, signal_workflow_execution, req) + } + "start_workflow_execution" => { + rpc_call!(retry_client, retry, start_workflow_execution, req) + } + "terminate_workflow_execution" => { + rpc_call!(retry_client, retry, terminate_workflow_execution, req) + } + "update_namespace" => rpc_call!(retry_client, retry, update_namespace, req), + _ => return Err(PyValueError::new_err(format!("Unknown RPC call {}", rpc))), + }?; + let bytes: &[u8] = &bytes; + Ok(Python::with_gil(|py| bytes.into_py(py))) + }) + } +} + +fn rpc_req

(bytes: Vec) -> PyResult> +where + P: prost::Message, + P: Default, +{ + let proto = P::decode(&*bytes) + .map_err(|err| PyValueError::new_err(format!("Invalid proto: {}", err)))?; + Ok(tonic::Request::new(proto)) +} + +fn rpc_resp

(res: Result, tonic::Status>) -> PyResult> +where + P: prost::Message, + P: Default, +{ + match res { + Ok(resp) => Ok(resp.get_ref().encode_to_vec()), + // TODO(cretz): Better error struct here w/ all the details + Err(err) => Err(PyRuntimeError::new_err(format!("RPC failed: {}", err))), + } +} + +#[macro_export] +macro_rules! rpc_call { + ($retry_client:ident, $retry:ident, $call_name:ident, $req:ident) => { + if $retry { + rpc_resp($retry_client.$call_name(rpc_req($req)?).await) + } else { + rpc_resp($retry_client.into_inner().$call_name(rpc_req($req)?).await) + } + }; +} + +impl TryFrom for temporal_client::ServerGatewayOptions { + type Error = PyErr; + + fn try_from(opts: ClientOptions) -> PyResult { + let mut gateway_opts = temporal_client::ServerGatewayOptionsBuilder::default(); + gateway_opts + .target_url( + url::Url::parse(&opts.target_url) + .map_err(|err| PyValueError::new_err(format!("invalid target URL: {}", err)))?, + ) + .client_name(opts.client_name) + .client_version(opts.client_version) + .static_headers(opts.static_headers) + .identity(opts.identity) + .worker_binary_id(opts.worker_binary_id) + .retry_config( + opts.retry_config + .map_or(temporal_client::RetryConfig::default(), |c| c.into()), + ); + // Builder does not allow us to set option here, so we have to make + // a conditional to even call it + if let Some(tls_config) = opts.tls_config { + gateway_opts.tls_cfg(tls_config.try_into()?); + } + return gateway_opts + .build() + .map_err(|err| PyValueError::new_err(format!("Invalid client options: {}", err))); + } +} + +impl TryFrom for temporal_client::TlsConfig { + type Error = PyErr; + + fn try_from(conf: ClientTlsConfig) -> PyResult { + Ok(temporal_client::TlsConfig { + server_root_ca_cert: conf.server_root_ca_cert, + domain: conf.domain, + client_tls_config: match (conf.client_cert, conf.client_private_key) { + (None, None) => None, + (Some(client_cert), Some(client_private_key)) => { + Some(temporal_client::ClientTlsConfig { + client_cert, + client_private_key, + }) + } + _ => { + return Err(PyValueError::new_err( + "Must have both client cert and private key or neither", + )) + } + }, + }) + } +} + +impl From for temporal_client::RetryConfig { + fn from(conf: ClientRetryConfig) -> Self { + temporal_client::RetryConfig { + initial_interval: Duration::from_millis(conf.initial_interval_millis), + randomization_factor: conf.randomization_factor, + multiplier: conf.multiplier, + max_interval: Duration::from_millis(conf.max_interval_millis), + max_elapsed_time: conf.max_elapsed_time_millis.map(Duration::from_millis), + max_retries: conf.max_retries, + } + } +} diff --git a/temporalio/client.py b/temporalio/client.py new file mode 100644 index 000000000..068f328b7 --- /dev/null +++ b/temporalio/client.py @@ -0,0 +1,935 @@ +"""Client for accessing Temporal.""" + +import logging +import os +import socket +import uuid +from dataclasses import dataclass +from datetime import timedelta +from enum import IntEnum +from typing import Any, Generic, Iterable, Mapping, Optional, TypeVar, Union, cast + +import temporalio.api.common.v1 +import temporalio.api.enums.v1 +import temporalio.api.failure.v1 +import temporalio.api.history.v1 +import temporalio.api.taskqueue.v1 +import temporalio.api.workflowservice.v1 +import temporalio.common +import temporalio.converter +import temporalio.failure +import temporalio.workflow_service + +logger = logging.getLogger(__name__) + + +class WorkflowIDReusePolicy(IntEnum): + """How already-in-use workflow IDs are handled on start. + + See :py:class:`temporalio.api.enums.v1.WorkflowIdReusePolicy`. + """ + + ALLOW_DUPLICATE = int( + temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE + ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE`.""" + + ALLOW_DUPLICATE_FAILED_ONLY = int( + temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE_FAILED_ONLY + ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE_FAILED_ONLY`.""" + + REJECT_DUPLICATE = int( + temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_REJECT_DUPLICATE + ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_REJECT_DUPLICATE`.""" + + +class WorkflowQueryRejectCondition(IntEnum): + """Whether a query should be rejected in certain conditions. + + See :py:class:`temporalio.api.enums.v1.QueryRejectCondition`. + """ + + NONE = int(temporalio.api.enums.v1.QueryRejectCondition.QUERY_REJECT_CONDITION_NONE) + """See :py:attr:`temporalio.api.enums.v1.QueryRejectCondition.QUERY_REJECT_CONDITION_NONE`.""" + + NOT_OPEN = int( + temporalio.api.enums.v1.QueryRejectCondition.QUERY_REJECT_CONDITION_NOT_OPEN + ) + """See :py:attr:`temporalio.api.enums.v1.QueryRejectCondition.QUERY_REJECT_CONDITION_NOT_OPEN`.""" + + NOT_COMPLETED_CLEANLY = int( + temporalio.api.enums.v1.QueryRejectCondition.QUERY_REJECT_CONDITION_NOT_COMPLETED_CLEANLY + ) + """See :py:attr:`temporalio.api.enums.v1.QueryRejectCondition.QUERY_REJECT_CONDITION_NOT_COMPLETED_CLEANLY`.""" + + +class WorkflowExecutionStatus(IntEnum): + """Status of a workflow execution. + + See :py:class:`temporalio.api.enums.v1.WorkflowExecutionStatus`. + """ + + RUNNING = int( + temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_RUNNING + ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_RUNNING`.""" + + COMPLETED = int( + temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_COMPLETED + ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_COMPLETED`.""" + + FAILED = int( + temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_FAILED + ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_FAILED`.""" + + CANCELED = int( + temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_CANCELED + ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_CANCELED`.""" + + TERMINATED = int( + temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_TERMINATED + ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_TERMINATED`.""" + + CONTINUED_AS_NEW = int( + temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_CONTINUED_AS_NEW + ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_CONTINUED_AS_NEW`.""" + + TIMED_OUT = int( + temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_TIMED_OUT + ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_TIMED_OUT`.""" + + +class Client: + """Client for accessing Temporal. + + Most users will use :py:meth:`connect` to create a client. The + :py:attr:`service` property provides access to a raw gRPC client. To create + another client, like for a different namespace, :py:func:`Client` may be + directly instantiated with a :py:attr:`service` of another. + """ + + @staticmethod + async def connect( + target_url: str, + *, + namespace: str = "default", + identity: str = f"{os.getpid()}@{socket.gethostname()}", + data_converter: temporalio.converter.DataConverter = temporalio.converter.default(), + interceptors: Iterable["Interceptor"] = [], + workflow_query_reject_condition: Optional[WorkflowQueryRejectCondition] = None, + ) -> "Client": + """Connect to a Temporal server. + + Args: + target_url: URL for the Temporal server. For local development, this + is often "http://localhost:7233". + namespace: Namespace to use for client calls. + identity: Identity to use for client calls. + data_converter: Data converter to use for all data conversions + to/from payloads. + interceptors: Set of interceptors that are chained together to allow + intercepting of client calls. The earlier interceptors wrap the + later ones. + workflow_query_reject_condition: When to reject a query. + """ + return Client( + await temporalio.workflow_service.WorkflowService.connect(target_url), + namespace=namespace, + identity=identity, + data_converter=data_converter, + interceptors=interceptors, + workflow_query_reject_condition=workflow_query_reject_condition, + ) + + def __init__( + self, + service: temporalio.workflow_service.WorkflowService, + *, + namespace: str = "default", + identity: str = f"{os.getpid()}@{socket.gethostname()}", + data_converter: temporalio.converter.DataConverter = temporalio.converter.default(), + interceptors: Iterable["Interceptor"] = [], + workflow_query_reject_condition: Optional[WorkflowQueryRejectCondition] = None, + ): + """Create a Temporal client from a workflow service. + + See :py:meth:`connect` for details on the parameters. + """ + self._service = service + self._namespace = namespace + self._identity = identity + self._data_converter = data_converter + self._interceptors = interceptors + self._workflow_query_reject_condition = workflow_query_reject_condition + + # Iterate over interceptors in reverse building the impl + self._impl: OutboundInterceptor = _ClientImpl(self) + for interceptor in reversed(list(interceptors)): + self._impl = interceptor.intercept_client(self._impl) + + @property + def service(self) -> temporalio.workflow_service.WorkflowService: + """Raw gRPC service for this client.""" + return self._service + + @property + def namespace(self) -> str: + """Namespace used in calls by this client.""" + return self._namespace + + @property + def identity(self) -> str: + """Identity used in calls by this client.""" + return self._identity + + @property + def data_converter(self) -> temporalio.converter.DataConverter: + """Data converter used by this client.""" + return self._data_converter + + async def start_workflow( + self, + workflow: str, + *args: Any, + id: str, + task_queue: str, + execution_timeout: Optional[timedelta] = None, + run_timeout: Optional[timedelta] = None, + task_timeout: Optional[timedelta] = None, + id_reuse_policy: WorkflowIDReusePolicy = WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: Optional[temporalio.common.RetryPolicy] = None, + cron_schedule: str = "", + memo: Optional[Mapping[str, Any]] = None, + search_attributes: Optional[Mapping[str, Any]] = None, + header: Optional[Mapping[str, Any]] = None, + start_signal: Optional[str] = None, + start_signal_args: Iterable[Any] = [], + ) -> "WorkflowHandle[Any]": + """Start a workflow and return its handle. + + Args: + workflow: Name of the workflow to start. + args: Arguments for the workflow if any. + id: Unique identifier for the workflow execution. + task_queue: Task queue to run the workflow on. + execution_timeout: Total workflow execution timeout including + retries and continue as new. + run_timeout: Timeout of a single workflow run. + task_timeout: Timeout of a single workflow task. + id_reuse_policy: How already-existing IDs are treated. + retry_policy: Retry policy for the workflow. + cron_schedule: See https://docs.temporal.io/docs/content/what-is-a-temporal-cron-job/ + memo: Memo for the workflow. + search_attributes: Search attributes for the workflow. + header: Header for the workflow. + start_signal: If present, this signal is sent as signal-with-start + instead of traditional workflow start. + start_signal_args: Arguments for start_signal if start_signal + present. + + Returns: + A workflow handle to the started/existing workflow. + :py:attr:`WorkflowHandle.run_id` will be populated with the current + run ID. + """ + return await self._impl.start_workflow( + StartWorkflowInput( + workflow=workflow, + args=args, + id=id, + task_queue=task_queue, + execution_timeout=execution_timeout, + run_timeout=run_timeout, + task_timeout=task_timeout, + id_reuse_policy=id_reuse_policy, + retry_policy=retry_policy, + cron_schedule=cron_schedule, + memo=memo, + search_attributes=search_attributes, + header=header, + start_signal=start_signal, + start_signal_args=start_signal_args, + ) + ) + + async def execute_workflow( + self, + workflow: str, + *args: Any, + id: str, + task_queue: str, + execution_timeout: Optional[timedelta] = None, + run_timeout: Optional[timedelta] = None, + task_timeout: Optional[timedelta] = None, + id_reuse_policy: WorkflowIDReusePolicy = WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: Optional[temporalio.common.RetryPolicy] = None, + cron_schedule: str = "", + memo: Optional[Mapping[str, Any]] = None, + search_attributes: Optional[Mapping[str, Any]] = None, + header: Optional[Mapping[str, Any]] = None, + start_signal: Optional[str] = None, + start_signal_args: list[Any] = [], + ) -> Any: + """Start a workflow and wait for completion. + + This is a shortcut for :py:meth:`start_workflow` + + :py:meth:`WorkflowHandle.result`. + """ + return await ( + await self.start_workflow( + workflow, + *args, + task_queue=task_queue, + id=id, + execution_timeout=execution_timeout, + run_timeout=run_timeout, + task_timeout=task_timeout, + id_reuse_policy=id_reuse_policy, + retry_policy=retry_policy, + cron_schedule=cron_schedule, + memo=memo, + search_attributes=search_attributes, + header=header, + start_signal=start_signal, + start_signal_args=start_signal_args, + ) + ).result() + + def get_workflow_handle( + self, workflow_id: str, *, run_id: Optional[str] = None + ) -> "WorkflowHandle[Any]": + """Get a workflow handle to an existing workflow by its ID. + + Args: + workflow_id: Workflow ID to get a handle to. + run_id: Run ID that will be used for all calls. + """ + return WorkflowHandle(self, workflow_id, run_id=run_id) + + +T = TypeVar("T") + + +class WorkflowHandle(Generic[T]): + """Handle for interacting with a workflow. + + This is usually created via :py:meth:`Client.get_workflow_handle` or + returned from :py:meth:`Client.start_workflow`/:py:meth:`Client.execute_workflow`. + """ + + SELF_RUN_ID = "__" + + def __init__( + self, client: Client, id: str, *, run_id: Optional[str] = None + ) -> None: + """Create workflow handle.""" + self._client = client + self._id = id + self._run_id = run_id + + @property + def id(self) -> str: + """ID for the workflow.""" + return self._id + + @property + def run_id(self) -> Optional[str]: + """Run ID used for calls on this handle if present.""" + return self._run_id + + async def result( + self, *, starting_run_id: Optional[str] = SELF_RUN_ID, follow_runs: bool = True + ) -> T: + """Wait for result of the workflow. + + Args: + starting_run_id: Run ID to fetch result for. Defaults to using + :py:meth:`run_id`. If set to None or there is no + :py:meth:`run_id`, this will get the latest result for the + workflow ID. + follow_runs: If true (default), workflow runs will be continually + fetched, until the most recent one is found. If false, the first + result is used. + + Returns: + Result of the workflow after being converted by the data converter. + + Raises: + Exception: Any failure of the workflow. + """ + if starting_run_id == WorkflowHandle.SELF_RUN_ID: + starting_run_id = self._run_id + req = temporalio.api.workflowservice.v1.GetWorkflowExecutionHistoryRequest( + namespace=self._client.namespace, + execution=temporalio.api.common.v1.WorkflowExecution( + workflow_id=self._id, run_id=starting_run_id or "" + ), + wait_new_event=True, + history_event_filter_type=temporalio.api.enums.v1.HistoryEventFilterType.HISTORY_EVENT_FILTER_TYPE_CLOSE_EVENT, + skip_archival=True, + ) + while True: + resp = await self._client.service.get_workflow_execution_history(req) + # Continually ask for pages until we get close + if len(resp.history.events) == 0: + req.next_page_token = resp.next_page_token + continue + elif len(resp.history.events) != 1: + raise RuntimeError( + f"Expected single close event, got {len(resp.history.events)}" + ) + event = resp.history.events[0] + if event.HasField("workflow_execution_completed_event_attributes"): + complete_attr = event.workflow_execution_completed_event_attributes + # Follow execution + if follow_runs and complete_attr.new_execution_run_id: + req.execution.run_id = complete_attr.new_execution_run_id + req.next_page_token = b"" + continue + # Ignoring anything after the first response like TypeScript + if not complete_attr.result: + return cast(T, None) + results = await self._client.data_converter.decode( + complete_attr.result.payloads + ) + if not results: + return cast(T, None) + elif len(results) > 1: + logger.warning("Expected single result, got %s", len(results)) + return cast(T, results[0]) + elif event.HasField("workflow_execution_failed_event_attributes"): + fail_attr = event.workflow_execution_failed_event_attributes + # Follow execution + if follow_runs and fail_attr.new_execution_run_id: + req.execution.run_id = fail_attr.new_execution_run_id + req.next_page_token = b"" + continue + raise WorkflowFailureError( + cause=await temporalio.failure.FailureError.from_proto( + fail_attr.failure, self._client.data_converter + ) + ) + elif event.HasField("workflow_execution_canceled_event_attributes"): + cancel_attr = event.workflow_execution_canceled_event_attributes + details = [] + if cancel_attr.details and cancel_attr.details.payloads: + details = await self._client.data_converter.decode( + cancel_attr.details.payloads + ) + raise WorkflowFailureError( + cause=temporalio.failure.FailureError( + "Workflow cancelled", + temporalio.failure.CancelledFailure(*details), + ) + ) + elif event.HasField("workflow_execution_terminated_event_attributes"): + term_attr = event.workflow_execution_terminated_event_attributes + details = [] + if term_attr.details and term_attr.details.payloads: + details = await self._client.data_converter.decode( + term_attr.details.payloads + ) + raise WorkflowFailureError( + cause=temporalio.failure.FailureError( + term_attr.reason or "Workflow terminated", + temporalio.failure.TerminatedFailure( + *details, + reason=term_attr.reason or None, + ), + ) + ) + elif event.HasField("workflow_execution_timed_out_event_attributes"): + time_attr = event.workflow_execution_timed_out_event_attributes + # Follow execution + if follow_runs and time_attr.new_execution_run_id: + req.execution.run_id = time_attr.new_execution_run_id + req.next_page_token = b"" + continue + raise WorkflowFailureError( + cause=temporalio.failure.FailureError( + "Workflow timed out", + temporalio.failure.TimeoutFailure( + temporalio.failure.TimeoutType.START_TO_CLOSE + ), + ) + ) + elif event.HasField("workflow_execution_continued_as_new_event_attributes"): + cont_attr = event.workflow_execution_continued_as_new_event_attributes + if not cont_attr.new_execution_run_id: + raise RuntimeError( + "Unexpectedly missing new run ID from continue as new" + ) + # Follow execution + if follow_runs: + req.execution.run_id = cont_attr.new_execution_run_id + req.next_page_token = b"" + continue + raise WorkflowContinuedAsNewError(cont_attr.new_execution_run_id) + else: + raise RuntimeError( + f"Unexpected close event attribute of {event.WhichOneof('attributes')}" + ) + + async def cancel( + self, + *, + run_id: Optional[str] = SELF_RUN_ID, + first_execution_run_id: Optional[None], + ) -> None: + """Cancel the workflow. + + Args: + run_id: Run ID to cancel. Defaults to using :py:meth:`run_id`. If + set to None or there is no :py:meth:`run_id`, this will cancel + the latest run for the workflow ID. + first_execution_run_id: First run ID that started the workflow. If + set, the cancellation makes sure that the workflow was started + with the given run ID. + + TODO(cretz): Raises + """ + if run_id == WorkflowHandle.SELF_RUN_ID: + run_id = self._run_id + await self._client._impl.cancel_workflow( + CancelWorkflowInput( + id=self._id, + run_id=run_id, + first_execution_run_id=first_execution_run_id, + ) + ) + + # TODO(cretz): Wrap the result in Python-friendlier type? + async def describe( + self, + *, + run_id: Optional[str] = SELF_RUN_ID, + ) -> temporalio.api.workflowservice.v1.DescribeWorkflowExecutionResponse: + """Get workflow details. + + Args: + run_id: Run ID to describe. Defaults to using :py:meth:`run_id`. If + set to None or there is no :py:meth:`run_id`, this will describe + the latest run for the workflow ID. + + Returns: + Workflow details. + + TODO(cretz): Raises + """ + if run_id == WorkflowHandle.SELF_RUN_ID: + run_id = self._run_id + return await self._client.service.describe_workflow_execution( + temporalio.api.workflowservice.v1.DescribeWorkflowExecutionRequest( + namespace=self._client.namespace, + execution=temporalio.api.common.v1.WorkflowExecution( + workflow_id=self._id, + run_id=run_id or "", + ), + ) + ) + + async def query( + self, name: str, *args: Any, run_id: Optional[str] = SELF_RUN_ID + ) -> Any: + """Query the workflow. + + Args: + name: Query name on the workflow. + args: Query arguments. + run_id: Run ID to query. Defaults to using :py:meth:`run_id`. If set + to None or there is no :py:meth:`run_id`, this will query the + latest run for the workflow ID. + + Returns: + Result of the query. + + TODO(cretz): Raises + """ + if run_id == WorkflowHandle.SELF_RUN_ID: + run_id = self._run_id + return await self._client._impl.query_workflow( + QueryWorkflowInput( + id=self._id, + run_id=run_id, + query=name, + args=args, + reject_condition=self._client._workflow_query_reject_condition, + ) + ) + + async def signal( + self, name: str, *args: Any, run_id: Optional[str] = SELF_RUN_ID + ) -> None: + """Send a signal to the workflow. + + Args: + name: Signal name on the workflow. + args: Signal arguments. + run_id: Run ID to signal. Defaults to using :py:meth:`run_id`. If + set to None or there is no :py:meth:`run_id`, this will query + the latest run for the workflow ID. + + TODO(cretz): Raises + """ + if run_id == WorkflowHandle.SELF_RUN_ID: + run_id = self._run_id + await self._client._impl.signal_workflow( + SignalWorkflowInput( + id=self._id, + run_id=run_id, + signal=name, + args=args, + ) + ) + + async def terminate( + self, + *args: Any, + reason: Optional[str] = None, + run_id: Optional[str] = SELF_RUN_ID, + first_execution_run_id: Optional[None], + ) -> None: + """Terminate the workflow. + + Args: + args: Details to store on the termination. + reason: Reason for the termination. + run_id: Run ID to terminate. Defaults to using :py:meth:`run_id`. If + set to None or there is no :py:meth:`run_id`, this will + terminate the latest run for the workflow ID. + first_execution_run_id: First run ID that started the workflow. If + set, the termination makes sure that the workflow was started + with the given run ID. + + TODO(cretz): Raises + """ + if run_id == WorkflowHandle.SELF_RUN_ID: + run_id = self._run_id + await self._client._impl.terminate_workflow( + TerminateWorkflowInput( + id=self._id, + run_id=run_id, + args=args, + reason=reason, + first_execution_run_id=first_execution_run_id, + ) + ) + + +@dataclass +class StartWorkflowInput: + """Input for :py:meth:`OutboundInterceptor.start_workflow`.""" + + workflow: str + args: Iterable[Any] + id: str + task_queue: str + execution_timeout: Optional[timedelta] + run_timeout: Optional[timedelta] + task_timeout: Optional[timedelta] + id_reuse_policy: WorkflowIDReusePolicy + retry_policy: Optional[temporalio.common.RetryPolicy] + cron_schedule: str + memo: Optional[Mapping[str, Any]] + search_attributes: Optional[Mapping[str, Any]] + header: Optional[Mapping[str, Any]] + start_signal: Optional[str] + start_signal_args: Iterable[Any] + + +@dataclass +class CancelWorkflowInput: + """Input for :py:meth:`OutboundInterceptor.cancel_workflow`.""" + + id: str + run_id: Optional[str] + first_execution_run_id: Optional[str] + + +@dataclass +class QueryWorkflowInput: + """Input for :py:meth:`OutboundInterceptor.query_workflow`.""" + + id: str + run_id: Optional[str] + query: str + args: Iterable[Any] + reject_condition: Optional[WorkflowQueryRejectCondition] + + +@dataclass +class SignalWorkflowInput: + """Input for :py:meth:`OutboundInterceptor.signal_workflow`.""" + + id: str + run_id: Optional[str] + signal: str + args: Iterable[Any] + + +@dataclass +class TerminateWorkflowInput: + """Input for :py:meth:`OutboundInterceptor.terminate_workflow`.""" + + id: str + run_id: Optional[str] + first_execution_run_id: Optional[str] + args: Iterable[Any] + reason: Optional[str] + + +class Interceptor: + """Interceptor for clients. + + This should be extended by any client interceptors. + """ + + def intercept_client(self, next: "OutboundInterceptor") -> "OutboundInterceptor": + """Method called for intercepting a client. + + Args: + next: The underlying outbound interceptor this interceptor should + delegate to. + + Returns: + The new interceptor that will be called for each client call. + """ + return next + + +class OutboundInterceptor: + """OutboundInterceptor for intercepting client calls. + + This should be extended by any client outbound interceptors. + """ + + def __init__(self, next: "OutboundInterceptor") -> None: + """Create the outbound interceptor. + + Args: + next: The next interceptor in the chain. The default implementation + of all calls is to delegate to the next interceptor. + """ + self.next = next + + async def start_workflow(self, input: StartWorkflowInput) -> WorkflowHandle[Any]: + """Called for every :py:meth:`Client.start_workflow` call.""" + return await self.next.start_workflow(input) + + async def cancel_workflow(self, input: CancelWorkflowInput) -> None: + """Called for every :py:meth:`WorkflowHandle.cancel` call.""" + await self.next.cancel_workflow(input) + + async def query_workflow(self, input: QueryWorkflowInput) -> Any: + """Called for every :py:meth:`WorkflowHandle.query` call.""" + return await self.next.query_workflow(input) + + async def signal_workflow(self, input: SignalWorkflowInput) -> None: + """Called for every :py:meth:`WorkflowHandle.signal` call.""" + await self.next.signal_workflow(input) + + async def terminate_workflow(self, input: TerminateWorkflowInput) -> None: + """Called for every :py:meth:`WorkflowHandle.terminate` call.""" + await self.next.terminate_workflow(input) + + +class _ClientImpl(OutboundInterceptor): + def __init__(self, client: Client) -> None: + # We are intentionally not calling the base class's __init__ here + self._client = client + + async def start_workflow(self, input: StartWorkflowInput) -> WorkflowHandle[Any]: + # Build request + req: Union[ + temporalio.api.workflowservice.v1.StartWorkflowExecutionRequest, + temporalio.api.workflowservice.v1.SignalWithStartWorkflowExecutionRequest, + ] + if input.start_signal is not None: + req = temporalio.api.workflowservice.v1.SignalWithStartWorkflowExecutionRequest( + signal_name=input.start_signal + ) + if input.start_signal_args: + req.signal_input.payloads.extend( + await self._client.data_converter.encode(input.start_signal_args) + ) + else: + req = temporalio.api.workflowservice.v1.StartWorkflowExecutionRequest() + req.namespace = self._client.namespace + req.workflow_id = input.id + req.workflow_type.name = input.workflow + req.task_queue.name = input.task_queue + if input.args: + req.input.payloads.extend( + await self._client.data_converter.encode(input.args) + ) + if input.execution_timeout is not None: + req.workflow_execution_timeout.FromTimedelta(input.execution_timeout) + if input.run_timeout is not None: + req.workflow_run_timeout.FromTimedelta(input.run_timeout) + if input.task_timeout is not None: + req.workflow_task_timeout.FromTimedelta(input.task_timeout) + req.identity = self._client.identity + req.request_id = str(uuid.uuid4()) + req.workflow_id_reuse_policy = cast( + "temporalio.api.enums.v1.WorkflowIdReusePolicy.ValueType", + int(input.id_reuse_policy), + ) + if input.retry_policy is not None: + input.retry_policy.apply_to_proto(req.retry_policy) + req.cron_schedule = input.cron_schedule + if input.memo is not None: + for k, v in input.memo.items(): + req.memo.fields[k] = (await self._client.data_converter.encode([v]))[0] + if input.search_attributes is not None: + for k, v in input.search_attributes.items(): + req.search_attributes.indexed_fields[k] = ( + await self._client.data_converter.encode([v]) + )[0] + if input.header is not None: + for k, v in input.header.items(): + req.header.fields[k] = (await self._client.data_converter.encode([v]))[ + 0 + ] + + # Start with signal or just normal start + resp: Union[ + temporalio.api.workflowservice.v1.SignalWithStartWorkflowExecutionResponse, + temporalio.api.workflowservice.v1.StartWorkflowExecutionResponse, + ] + if isinstance( + req, + temporalio.api.workflowservice.v1.SignalWithStartWorkflowExecutionRequest, + ): + resp = await self._client.service.signal_with_start_workflow_execution( + req, retry=True + ) + else: + resp = await self._client.service.start_workflow_execution(req, retry=True) + return WorkflowHandle(self._client, req.workflow_id, run_id=resp.run_id) + + async def cancel_workflow(self, input: CancelWorkflowInput) -> None: + await self._client.service.request_cancel_workflow_execution( + temporalio.api.workflowservice.v1.RequestCancelWorkflowExecutionRequest( + namespace=self._client.namespace, + workflow_execution=temporalio.api.common.v1.WorkflowExecution( + workflow_id=input.id, + run_id=input.run_id or "", + ), + identity=self._client.identity, + request_id=str(uuid.uuid4()), + first_execution_run_id=input.first_execution_run_id or "", + ), + retry=True, + ) + + async def query_workflow(self, input: QueryWorkflowInput) -> Any: + req = temporalio.api.workflowservice.v1.QueryWorkflowRequest( + namespace=self._client.namespace, + execution=temporalio.api.common.v1.WorkflowExecution( + workflow_id=input.id, + run_id=input.run_id or "", + ) + # TODO(cretz): Headers here and elsewhere + ) + if input.reject_condition: + req.query_reject_condition = cast( + "temporalio.api.enums.v1.QueryRejectCondition.ValueType", + int(input.reject_condition), + ) + req.query.query_type = input.query + if input.args: + req.query.query_args.payloads.extend( + await self._client.data_converter.encode(input.args) + ) + # TODO(cretz): Wrap error + resp = await self._client.service.query_workflow(req, retry=True) + if resp.HasField("query_rejected"): + raise WorkflowQueryRejectedError( + WorkflowExecutionStatus(resp.query_rejected.status) + if resp.query_rejected.status + else None + ) + if not resp.query_result.payloads: + return None + results = await self._client.data_converter.decode(resp.query_result.payloads) + if not results: + return None + elif len(results) > 1: + logger.warning("Expected single query result, got %s", len(results)) + return results[0] + + async def signal_workflow(self, input: SignalWorkflowInput) -> None: + req = temporalio.api.workflowservice.v1.SignalWorkflowExecutionRequest( + namespace=self._client.namespace, + workflow_execution=temporalio.api.common.v1.WorkflowExecution( + workflow_id=input.id, + run_id=input.run_id or "", + ), + signal_name=input.signal, + identity=self._client.identity, + request_id=str(uuid.uuid4()), + # TODO(cretz): Headers here and elsewhere + ) + if input.args: + req.input.payloads.extend( + await self._client.data_converter.encode(input.args) + ) + await self._client.service.signal_workflow_execution(req, retry=True) + + async def terminate_workflow(self, input: TerminateWorkflowInput) -> None: + req = temporalio.api.workflowservice.v1.TerminateWorkflowExecutionRequest( + namespace=self._client.namespace, + workflow_execution=temporalio.api.common.v1.WorkflowExecution( + workflow_id=input.id, + run_id=input.run_id or "", + ), + reason=input.reason or "", + identity=self._client.identity, + first_execution_run_id=input.first_execution_run_id or "", + ) + if input.args: + req.details.payloads.extend( + await self._client.data_converter.encode(input.args) + ) + await self._client.service.terminate_workflow_execution(req, retry=True) + + +class WorkflowFailureError(Exception): + """Error that occurs when a workflow is unsuccessful.""" + + def __init__(self, *, cause: temporalio.failure.FailureError) -> None: + """Create workflow failure error.""" + super().__init__("Workflow execution failed") + # TODO(cretz): Confirm setting this __cause__ is acceptable + self.__cause__ = cause + + +class WorkflowContinuedAsNewError(Exception): + """Error that occurs when a workflow was continued as new.""" + + def __init__(self, new_execution_run_id: str) -> None: + """Create workflow continue as new error.""" + super().__init__("Workflow continued as new") + self._new_execution_run_id = new_execution_run_id + + +class WorkflowQueryRejectedError(Exception): + """Error that occurs when a query was rejected.""" + + def __init__(self, status: Optional[WorkflowExecutionStatus]) -> None: + """Create workflow query rejected error.""" + super().__init__(f"Query rejected, status: {status}") + self._status = status + + @property + def status(self) -> Optional[WorkflowExecutionStatus]: + """Get workflow execution status causing rejection.""" + return self._status diff --git a/temporalio/common/__init__.py b/temporalio/common/__init__.py new file mode 100644 index 000000000..e1445662f --- /dev/null +++ b/temporalio/common/__init__.py @@ -0,0 +1,3 @@ +"""Common utilities.""" + +from .retry_policy import RetryPolicy diff --git a/temporalio/common/retry_policy.py b/temporalio/common/retry_policy.py new file mode 100644 index 000000000..7c4b1829d --- /dev/null +++ b/temporalio/common/retry_policy.py @@ -0,0 +1,18 @@ +from dataclasses import dataclass +from datetime import timedelta +from typing import Optional + +import temporalio.api.common.v1 + + +@dataclass +class RetryPolicy: + initial_interval: timedelta + randomization_factor: float + multiplier: float + max_interval: timedelta + max_elapsed_time: Optional[timedelta] + max_retries: int + + def apply_to_proto(self, proto: temporalio.api.common.v1.RetryPolicy) -> None: + raise NotImplementedError diff --git a/temporalio/converter.py b/temporalio/converter.py new file mode 100644 index 000000000..6f4bb2ba0 --- /dev/null +++ b/temporalio/converter.py @@ -0,0 +1,421 @@ +"""Base converter and default implementations for conversion to/from values/payloads.""" + +import dataclasses +import inspect +import json +from abc import ABC, abstractmethod +from typing import Any, Iterable, List, Mapping, Optional, Type + +import google.protobuf.json_format +import google.protobuf.message +import google.protobuf.symbol_database + +import temporalio.api.common.v1 + + +class DataConverter(ABC): + """Base converter to/from multiple payloads/values.""" + + @abstractmethod + async def encode( + self, values: Iterable[Any] + ) -> List[temporalio.api.common.v1.Payload]: + """Encode values into payloads. + + Args: + values: Values to be converted. + + Returns: + Converted payloads. Note, this does not have to be the same number + as values given, but at least one must be present. + + Raises: + Exception: Any issue during conversion. + """ + raise NotImplementedError + + @abstractmethod + async def decode( + self, + payloads: Iterable[temporalio.api.common.v1.Payload], + type_hints: Optional[List[Type]] = None, + ) -> List[Any]: + """Decode payloads into values. + + Args: + payloads: Payloads to convert to Python values. + type_hints: Types that are expected if any. This may not have any + types if there are no annotations on the target. If this is + present, it must have the exact same length as payloads even if + the values are just "object". + + Returns: + Collection of Python values. Note, this does not have to be the same + number as values given, but at least one must be present. + + Raises: + Exception: Any issue during conversion. + """ + raise NotImplementedError + + +class PayloadConverter(ABC): + """Base converter to/from single payload/value.""" + + @property + @abstractmethod + def encoding(self) -> str: + """Encoding for the payload this converter works with.""" + raise NotImplementedError + + @abstractmethod + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + """Encode a single value to a payload or None. + + Args: + value: Value to be converted. + + Returns: + Payload of the value or None if unable to convert. + + Raises: + TypeError: Value is not the expected type. + ValueError: Value is of the expected type but otherwise incorrect. + RuntimeError: General error during encoding. + """ + raise NotImplementedError + + @abstractmethod + async def decode( + self, + payload: temporalio.api.common.v1.Payload, + type_hint: Optional[Type] = None, + ) -> Any: + """Decode a single payload to a Python value or raise exception. + + Args: + payload: Payload to convert to Python value. + type_hints: Type that is expected if any. This may not have a type + if there are no annotations on the target. + + Return: + The decoded value from the payload. Since the encoding is checked by + the caller, this should raise an exception if the payload cannot be + converted. + + Raises: + RuntimeError: General error during decoding. + """ + raise NotImplementedError + + +class CompositeDataConverter(DataConverter): + """Composite data converter that delegates to a list of payload converters. + + Encoding/decoding are attempted on each payload converter successively until + it succeeds. + + Attributes: + converters: List of payload converters to delegate to, in order. + """ + + converters: Mapping[bytes, PayloadConverter] + + def __init__(self, *converters: PayloadConverter) -> None: + """Initializes the data converter. + + Args: + converters: Payload converters to delegate to, in order. + """ + # Insertion order preserved here + self.converters = {c.encoding.encode(): c for c in converters} + + async def encode( + self, values: Iterable[Any] + ) -> List[temporalio.api.common.v1.Payload]: + """Encode values trying each converter. + + See base class. Always returns the same number of payloads as values. + + Raises: + RuntimeError: No known converter + """ + payloads = [] + for index, value in enumerate(values): + # We intentionally attempt these serially just in case a stateful + # converter may rely on the previous values + payload = None + for converter in self.converters.values(): + payload = await converter.encode(value) + if payload is not None: + break + if payload is None: + raise RuntimeError( + f"Value at index {index} of type {type(value)} has no known converter" + ) + payloads.append(payload) + return payloads + + async def decode( + self, + payloads: Iterable[temporalio.api.common.v1.Payload], + type_hints: Optional[List[Type]] = None, + ) -> List[Any]: + """Decode values trying each converter. + + See base class. Always returns the same number of values as payloads. + + Raises: + KeyError: Unknown payload encoding + RuntimeError: Error during decode + """ + values = [] + for index, payload in enumerate(payloads): + encoding = payload.metadata.get("encoding", b"") + converter = self.converters.get(encoding) + if converter is None: + raise KeyError(f"Unknown payload encoding {encoding.decode()}") + type_hint = None + if type_hints is not None: + type_hint = type_hints[index] + try: + values.append(await converter.decode(payload, type_hint)) + except RuntimeError as err: + raise RuntimeError( + f"Payload at index {index} with encoding {encoding.decode()} could not be converted" + ) from err + return values + + +class BinaryNullPayloadConverter(PayloadConverter): + """Converter for 'binary/null' payloads supporting None values.""" + + @property + def encoding(self) -> str: + """See base class.""" + return "binary/null" + + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + """See base class.""" + if value is None: + return temporalio.api.common.v1.Payload( + metadata={"encoding": self.encoding.encode()} + ) + return None + + async def decode( + self, + payload: temporalio.api.common.v1.Payload, + type_hint: Optional[Type] = None, + ) -> Any: + """See base class.""" + if len(payload.data) > 0: + raise RuntimeError("Expected empty data set for binary/null") + return None + + +class BinaryPlainPayloadConverter(PayloadConverter): + """Converter for 'binary/plain' payloads supporting bytes values.""" + + @property + def encoding(self) -> str: + """See base class.""" + return "binary/plain" + + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + """See base class.""" + if isinstance(value, bytes): + return temporalio.api.common.v1.Payload( + metadata={"encoding": self.encoding.encode()}, data=value + ) + return None + + async def decode( + self, + payload: temporalio.api.common.v1.Payload, + type_hint: Optional[Type] = None, + ) -> Any: + """See base class.""" + return payload.data + + +_sym_db = google.protobuf.symbol_database.Default() + + +class JSONProtoPayloadConverter(PayloadConverter): + """Converter for 'json/protobuf' payloads supporting protobuf Message values.""" + + @property + def encoding(self) -> str: + """See base class.""" + return "json/protobuf" + + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + """See base class.""" + if ( + isinstance(value, google.protobuf.message.Message) + and value.DESCRIPTOR is not None + ): + # We have to convert to dict then to JSON because MessageToJson does + # not have a compact option removing spaces and newlines + json_str = json.dumps( + google.protobuf.json_format.MessageToDict(value), + separators=(",", ":"), + sort_keys=True, + ) + return temporalio.api.common.v1.Payload( + metadata={ + "encoding": self.encoding.encode(), + "messageType": value.DESCRIPTOR.full_name.encode(), + }, + data=json_str.encode(), + ) + return None + + async def decode( + self, + payload: temporalio.api.common.v1.Payload, + type_hint: Optional[Type] = None, + ) -> Any: + """See base class.""" + message_type = payload.metadata.get("messageType", b"").decode() + try: + value = _sym_db.GetSymbol(message_type)() + return google.protobuf.json_format.Parse(payload.data, value) + except KeyError as err: + raise RuntimeError(f"Unknown Protobuf type {message_type}") from err + except google.protobuf.json_format.ParseError as err: + raise RuntimeError("Failed parsing") from err + + +class BinaryProtoPayloadConverter(PayloadConverter): + """Converter for 'binary/protobuf' payloads supporting protobuf Message values.""" + + @property + def encoding(self) -> str: + """See base class.""" + return "binary/protobuf" + + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + """See base class.""" + if ( + isinstance(value, google.protobuf.message.Message) + and value.DESCRIPTOR is not None + ): + return temporalio.api.common.v1.Payload( + metadata={ + "encoding": self.encoding.encode(), + "messageType": value.DESCRIPTOR.full_name.encode(), + }, + data=value.SerializeToString(), + ) + return None + + async def decode( + self, + payload: temporalio.api.common.v1.Payload, + type_hint: Optional[Type] = None, + ) -> Any: + """See base class.""" + message_type = payload.metadata.get("messageType", b"").decode() + try: + value = _sym_db.GetSymbol(message_type)() + value.ParseFromString(payload.data) + return value + except KeyError as err: + raise RuntimeError(f"Unknown Protobuf type {message_type}") from err + except google.protobuf.message.DecodeError as err: + raise RuntimeError("Failed parsing") from err + + +class JSONPlainPayloadConverter(PayloadConverter): + """Converter for 'json/plain' payloads supporting common Python values. + + This supports all values that :py:func:`json.dump` supports and also adds + encoding support for :py:mod:`dataclasses` by converting them using + :py:mod:`dataclasses.asdict`. Note that on decode, if there is a type hint, + it will be used to construct the data class. + """ + + _encoder: Optional[Type[json.JSONEncoder]] + _decoder: Optional[Type[json.JSONDecoder]] + _encoding: str + + def __init__( + self, + *, + encoder: Optional[Type[json.JSONEncoder]] = None, + decoder: Optional[Type[json.JSONDecoder]] = None, + encoding: str = "json/plain", + ) -> None: + """Initialize a JSON data converter. + + Args: + encoder: Custom encoder class object to use. + decoder: Custom decoder class object to use. + encoding: Encoding name to use. + """ + super().__init__() + self._encoder = encoder + self._decoder = decoder + self._encoding = encoding + + @property + def encoding(self) -> str: + """See base class.""" + return self._encoding + + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + """See base class.""" + if dataclasses.is_dataclass(value): + value = dataclasses.asdict(value) + # We swallow JSON encode error and just return None + try: + return temporalio.api.common.v1.Payload( + metadata={"encoding": self._encoding.encode()}, + data=json.dumps( + value, cls=self._encoder, separators=(",", ":"), sort_keys=True + ).encode(), + ) + except (RuntimeError, TypeError, ValueError): + return None + + async def decode( + self, + payload: temporalio.api.common.v1.Payload, + type_hint: Optional[Type] = None, + ) -> Any: + """See base class.""" + try: + obj = json.loads(payload.data, cls=self._decoder) + # If the object is a dict and the type hint is present for a data + # class, we instantiate the data class with the value + if ( + isinstance(obj, dict) + and inspect.isclass(type_hint) + and dataclasses.is_dataclass(type_hint) + ): + obj = type_hint(**obj) + return obj + except json.JSONDecodeError as err: + raise RuntimeError("Failed parsing") from err + + +# TODO(cretz): Should this be a var that can be changed instead? If so, can it +# be replaced _after_ client creation? We'd just have to fallback to this +# default at conversion time instead of instantiation time. +def default() -> CompositeDataConverter: + """Default converter compatible with other Temporal SDKs. + + This handles None, bytes, all protobuf message types, and any type that + :py:func:`json.dump` accepts. In addition, this supports encoding + :py:mod:`dataclasses` and also decoding them provided the data class is in + the type hint. + """ + return CompositeDataConverter( + BinaryNullPayloadConverter(), + BinaryPlainPayloadConverter(), + JSONProtoPayloadConverter(), + BinaryProtoPayloadConverter(), + JSONPlainPayloadConverter(), + ) diff --git a/temporalio/failure.py b/temporalio/failure.py new file mode 100644 index 000000000..7f2ffe4d3 --- /dev/null +++ b/temporalio/failure.py @@ -0,0 +1,61 @@ +from enum import IntEnum +from typing import Any, List, Optional + +import temporalio.api.enums.v1 +import temporalio.api.failure.v1 +import temporalio.converter + + +class FailureError(Exception): + @staticmethod + async def from_proto( + failure: temporalio.api.failure.v1.Failure, + data_converter: temporalio.converter.DataConverter, + ) -> "FailureError": + raise NotImplementedError + + def __init__(self, message: str, failure: "Failure") -> None: + super().__init__(message) + raise NotImplementedError + + +class Failure: + def __init__( + self, + *details: Any, + proto_failure: Optional[temporalio.api.failure.v1.Failure] = None + ) -> None: + raise NotImplementedError + + +class CancelledFailure(Failure): + def __init__(self, *details: Any) -> None: + super().__init__(*details) + raise NotImplementedError + + +class TerminatedFailure(Failure): + def __init__(self, *details: Any, reason: Optional[str]) -> None: + super().__init__(*details) + raise NotImplementedError + + +class TimeoutType(IntEnum): + START_TO_CLOSE = int( + temporalio.api.enums.v1.TimeoutType.TIMEOUT_TYPE_START_TO_CLOSE + ) + SCHEDULE_TO_START = int( + temporalio.api.enums.v1.TimeoutType.TIMEOUT_TYPE_SCHEDULE_TO_START + ) + SCHEDULE_TO_CLOSE = int( + temporalio.api.enums.v1.TimeoutType.TIMEOUT_TYPE_SCHEDULE_TO_CLOSE + ) + HEARTBEAT = int(temporalio.api.enums.v1.TimeoutType.TIMEOUT_TYPE_HEARTBEAT) + + +class TimeoutFailure(Failure): + def __init__( + self, type: TimeoutType, last_heartbeat_details: Optional[List[Any]] = None + ) -> None: + super().__init__() + raise NotImplementedError diff --git a/temporalio/workflow_service.py b/temporalio/workflow_service.py new file mode 100644 index 000000000..968cb3ce6 --- /dev/null +++ b/temporalio/workflow_service.py @@ -0,0 +1,279 @@ +from abc import ABC, abstractmethod +from typing import Generic, Type, TypeVar + +import google.protobuf.message + +import temporalio.api.workflowservice.v1 +import temporalio.bridge.client + +WorkflowServiceRequest = TypeVar( + "WorkflowServiceRequest", bound=google.protobuf.message.Message +) +WorkflowServiceResponse = TypeVar( + "WorkflowServiceResponse", bound=google.protobuf.message.Message +) + + +class WorkflowService(ABC): + @staticmethod + async def connect(target_url: str) -> "WorkflowService": + return await BridgeWorkflowService.connect(target_url=target_url) + + def __init__(self) -> None: + super().__init__() + + wsv1 = temporalio.api.workflowservice.v1 + + self.count_workflow_executions = self._new_call( + "count_workflow_executions", + wsv1.CountWorkflowExecutionsRequest, + wsv1.CountWorkflowExecutionsResponse, + ) + self.deprecate_namespace = self._new_call( + "deprecate_namespace", + wsv1.DeprecateNamespaceRequest, + wsv1.DeprecateNamespaceResponse, + ) + self.describe_namespace = self._new_call( + "describe_namespace", + wsv1.DescribeNamespaceRequest, + wsv1.DescribeNamespaceResponse, + ) + self.describe_task_queue = self._new_call( + "describe_task_queue", + wsv1.DescribeTaskQueueRequest, + wsv1.DescribeTaskQueueResponse, + ) + self.describe_workflow_execution = self._new_call( + "describe_workflow_execution", + wsv1.DescribeWorkflowExecutionRequest, + wsv1.DescribeWorkflowExecutionResponse, + ) + self.get_cluster_info = self._new_call( + "get_cluster_info", + wsv1.GetClusterInfoRequest, + wsv1.GetClusterInfoResponse, + ) + self.get_search_attributes = self._new_call( + "get_search_attributes", + wsv1.GetSearchAttributesRequest, + wsv1.GetSearchAttributesResponse, + ) + self.get_workflow_execution_history = self._new_call( + "get_workflow_execution_history", + wsv1.GetWorkflowExecutionHistoryRequest, + wsv1.GetWorkflowExecutionHistoryResponse, + ) + self.list_archived_workflow_executions = self._new_call( + "list_archived_workflow_executions", + wsv1.ListArchivedWorkflowExecutionsRequest, + wsv1.ListArchivedWorkflowExecutionsResponse, + ) + self.list_closed_workflow_executions = self._new_call( + "list_closed_workflow_executions", + wsv1.ListClosedWorkflowExecutionsRequest, + wsv1.ListClosedWorkflowExecutionsResponse, + ) + self.list_namespaces = self._new_call( + "list_namespaces", + wsv1.ListNamespacesRequest, + wsv1.ListNamespacesResponse, + ) + self.list_open_workflow_executions = self._new_call( + "list_open_workflow_executions", + wsv1.ListOpenWorkflowExecutionsRequest, + wsv1.ListOpenWorkflowExecutionsResponse, + ) + self.list_task_queue_partitions = self._new_call( + "list_task_queue_partitions", + wsv1.ListTaskQueuePartitionsRequest, + wsv1.ListTaskQueuePartitionsResponse, + ) + self.list_workflow_executions = self._new_call( + "list_workflow_executions", + wsv1.ListWorkflowExecutionsRequest, + wsv1.ListWorkflowExecutionsResponse, + ) + self.poll_activity_task_queue = self._new_call( + "poll_activity_task_queue", + wsv1.PollActivityTaskQueueRequest, + wsv1.PollActivityTaskQueueResponse, + ) + self.poll_workflow_task_queue = self._new_call( + "poll_workflow_task_queue", + wsv1.PollWorkflowTaskQueueRequest, + wsv1.PollWorkflowTaskQueueResponse, + ) + self.query_workflow = self._new_call( + "query_workflow", + wsv1.QueryWorkflowRequest, + wsv1.QueryWorkflowResponse, + ) + self.record_activity_task_heartbeat = self._new_call( + "record_activity_task_heartbeat", + wsv1.RecordActivityTaskHeartbeatRequest, + wsv1.RecordActivityTaskHeartbeatResponse, + ) + self.record_activity_task_heartbeat_by_id = self._new_call( + "record_activity_task_heartbeat_by_id", + wsv1.RecordActivityTaskHeartbeatByIdRequest, + wsv1.RecordActivityTaskHeartbeatByIdResponse, + ) + self.register_namespace = self._new_call( + "register_namespace", + wsv1.RegisterNamespaceRequest, + wsv1.RegisterNamespaceResponse, + ) + self.request_cancel_workflow_execution = self._new_call( + "request_cancel_workflow_execution", + wsv1.RequestCancelWorkflowExecutionRequest, + wsv1.RequestCancelWorkflowExecutionResponse, + ) + self.reset_sticky_task_queue = self._new_call( + "reset_sticky_task_queue", + wsv1.ResetStickyTaskQueueRequest, + wsv1.ResetStickyTaskQueueResponse, + ) + self.reset_workflow_execution = self._new_call( + "reset_workflow_execution", + wsv1.ResetWorkflowExecutionRequest, + wsv1.ResetWorkflowExecutionResponse, + ) + self.respond_activity_task_canceled = self._new_call( + "respond_activity_task_canceled", + wsv1.RespondActivityTaskCanceledRequest, + wsv1.RespondActivityTaskCanceledResponse, + ) + self.respond_activity_task_canceled_by_id = self._new_call( + "respond_activity_task_canceled_by_id", + wsv1.RespondActivityTaskCanceledByIdRequest, + wsv1.RespondActivityTaskCanceledByIdResponse, + ) + self.respond_activity_task_completed = self._new_call( + "respond_activity_task_completed", + wsv1.RespondActivityTaskCompletedRequest, + wsv1.RespondActivityTaskCompletedResponse, + ) + self.respond_activity_task_completed_by_id = self._new_call( + "respond_activity_task_completed_by_id", + wsv1.RespondActivityTaskCompletedByIdRequest, + wsv1.RespondActivityTaskCompletedByIdResponse, + ) + self.respond_activity_task_failed = self._new_call( + "respond_activity_task_failed", + wsv1.RespondActivityTaskFailedRequest, + wsv1.RespondActivityTaskFailedResponse, + ) + self.respond_activity_task_failed_by_id = self._new_call( + "respond_activity_task_failed_by_id", + wsv1.RespondActivityTaskFailedByIdRequest, + wsv1.RespondActivityTaskFailedByIdResponse, + ) + self.respond_query_task_completed = self._new_call( + "respond_query_task_completed", + wsv1.RespondQueryTaskCompletedRequest, + wsv1.RespondQueryTaskCompletedResponse, + ) + self.respond_workflow_task_completed = self._new_call( + "respond_workflow_task_completed", + wsv1.RespondWorkflowTaskCompletedRequest, + wsv1.RespondWorkflowTaskCompletedResponse, + ) + self.respond_workflow_task_failed = self._new_call( + "respond_workflow_task_failed", + wsv1.RespondWorkflowTaskFailedRequest, + wsv1.RespondWorkflowTaskFailedResponse, + ) + self.scan_workflow_executions = self._new_call( + "scan_workflow_executions", + wsv1.ScanWorkflowExecutionsRequest, + wsv1.ScanWorkflowExecutionsResponse, + ) + self.signal_with_start_workflow_execution = self._new_call( + "signal_with_start_workflow_execution", + wsv1.SignalWithStartWorkflowExecutionRequest, + wsv1.SignalWithStartWorkflowExecutionResponse, + ) + self.signal_workflow_execution = self._new_call( + "signal_workflow_execution", + wsv1.SignalWorkflowExecutionRequest, + wsv1.SignalWorkflowExecutionResponse, + ) + self.start_workflow_execution = self._new_call( + "start_workflow_execution", + wsv1.StartWorkflowExecutionRequest, + wsv1.StartWorkflowExecutionResponse, + ) + self.terminate_workflow_execution = self._new_call( + "terminate_workflow_execution", + wsv1.TerminateWorkflowExecutionRequest, + wsv1.TerminateWorkflowExecutionResponse, + ) + self.update_namespace = self._new_call( + "update_namespace", + wsv1.UpdateNamespaceRequest, + wsv1.UpdateNamespaceResponse, + ) + + @abstractmethod + async def _rpc_call( + self, + rpc: str, + req: google.protobuf.message.Message, + resp_type: Type[WorkflowServiceResponse], + *, + retry: bool = False, + ) -> WorkflowServiceResponse: + raise NotImplementedError + + def _new_call( + self, + name: str, + req_type: Type[WorkflowServiceRequest], + resp_type: Type[WorkflowServiceResponse], + ) -> "WorkflowServiceCall[WorkflowServiceRequest, WorkflowServiceResponse]": + return WorkflowServiceCall(self, name, req_type, resp_type) + + +class WorkflowServiceCall(Generic[WorkflowServiceRequest, WorkflowServiceResponse]): + def __init__( + self, + service: WorkflowService, + name: str, + req_type: Type[WorkflowServiceRequest], + resp_type: Type[WorkflowServiceResponse], + ) -> None: + self.service = service + self.name = name + self.resp_type = resp_type + + async def __call__( + self, req: WorkflowServiceRequest, *, retry: bool = False + ) -> WorkflowServiceResponse: + return await self.service._rpc_call(self.name, req, self.resp_type, retry=retry) + + +class BridgeWorkflowService(WorkflowService): + @staticmethod + async def connect(target_url: str) -> "BridgeWorkflowService": + return BridgeWorkflowService( + await temporalio.bridge.client.Client.connect( + temporalio.bridge.client.ClientOptions(target_url=target_url) + ) + ) + + _bridge_client: temporalio.bridge.client.Client + + def __init__(self, bridge_client: temporalio.bridge.client.Client) -> None: + super().__init__() + self._bridge_client = bridge_client + + async def _rpc_call( + self, + rpc: str, + req: google.protobuf.message.Message, + resp_type: Type[WorkflowServiceResponse], + *, + retry: bool = False, + ) -> WorkflowServiceResponse: + return await self._bridge_client.rpc_call(rpc, req, resp_type, retry=retry) diff --git a/tests/api/test_grpc_stub.py b/tests/api/test_grpc_stub.py index ad9025110..2601cc887 100644 --- a/tests/api/test_grpc_stub.py +++ b/tests/api/test_grpc_stub.py @@ -1,5 +1,4 @@ import logging -from concurrent import futures import grpc @@ -8,7 +7,7 @@ class SimpleServer(temporalio.api.workflowservice.v1.WorkflowServiceServicer): - async def CountWorkflowExecutions( + async def CountWorkflowExecutions( # type: ignore # https://github.com/nipunn1313/mypy-protobuf/issues/216 self, request: temporalio.api.workflowservice.v1.CountWorkflowExecutionsRequest, context: grpc.aio.ServicerContext, diff --git a/tests/test_client.py b/tests/test_client.py new file mode 100644 index 000000000..9f950c1b6 --- /dev/null +++ b/tests/test_client.py @@ -0,0 +1,15 @@ +import uuid + +import temporalio.client + + +async def test_client_simple(): + client = await temporalio.client.Client.connect("http://localhost:7233") + handle = await client.start_workflow( + "my-workflow", + "arg1", + id=f"my-workflow-id-{uuid.uuid4}", + task_queue=f"my-workflow-id-{uuid.uuid4}", + ) + assert handle.run_id + print(f"Workflow created with run ID: {handle.run_id}") diff --git a/tests/test_converter.py b/tests/test_converter.py new file mode 100644 index 000000000..ee7219061 --- /dev/null +++ b/tests/test_converter.py @@ -0,0 +1,100 @@ +from dataclasses import dataclass + +import pytest + +import temporalio.api.common.v1 +import temporalio.converter + + +async def test_default(): + async def assert_payload( + input, + expected_encoding, + expected_data, + *, + expected_decoded_input=None, + type_hint=None + ): + payloads = await temporalio.converter.default().encode([input]) + # Check encoding and data + assert len(payloads) == 1 + if isinstance(expected_encoding, str): + expected_encoding = expected_encoding.encode() + assert payloads[0].metadata["encoding"] == expected_encoding + if isinstance(expected_data, str): + expected_data = expected_data.encode() + assert payloads[0].data == expected_data + # Decode and check + actual_inputs = await temporalio.converter.default().decode( + payloads, [type_hint] + ) + assert len(actual_inputs) == 1 + if expected_decoded_input is None: + expected_decoded_input = input + assert actual_inputs[0] == expected_decoded_input + return payloads[0] + + # Basic types + await assert_payload(None, "binary/null", "") + await assert_payload(b"some binary", "binary/plain", "some binary") + payload = await assert_payload( + temporalio.api.common.v1.WorkflowExecution(workflow_id="id1", run_id="id2"), + "json/protobuf", + '{"runId":"id2","workflowId":"id1"}', + ) + assert ( + payload.metadata["messageType"] == b"temporal.api.common.v1.WorkflowExecution" + ) + await assert_payload( + {"foo": "bar", "baz": "qux"}, "json/plain", '{"baz":"qux","foo":"bar"}' + ) + await assert_payload("somestr", "json/plain", '"somestr"') + await assert_payload(1234, "json/plain", "1234") + await assert_payload(12.34, "json/plain", "12.34") + await assert_payload(True, "json/plain", "true") + await assert_payload(False, "json/plain", "false") + + # Unknown type + with pytest.raises(RuntimeError) as excinfo: + + class NonSerializableClass: + pass + + await assert_payload(NonSerializableClass(), None, None) + assert "has no known converter" in str(excinfo.value) + + @dataclass + class MyDataClass: + foo: str + bar: int + + # Data class without type hint is just dict + await assert_payload( + MyDataClass(foo="somestr", bar=123), + "json/plain", + '{"bar":123,"foo":"somestr"}', + expected_decoded_input={"foo": "somestr", "bar": 123}, + ) + + # Data class with type hint reconstructs the class + await assert_payload( + MyDataClass(foo="somestr", bar=123), + "json/plain", + '{"bar":123,"foo":"somestr"}', + type_hint=MyDataClass, + ) + + +async def test_binary_proto(): + # We have to test this separately because by default it never encodes + # anything since JSON proto takes precedence + conv = temporalio.converter.BinaryProtoPayloadConverter() + proto = temporalio.api.common.v1.WorkflowExecution(workflow_id="id1", run_id="id2") + payload = await conv.encode(proto) + assert payload.metadata["encoding"] == b"binary/protobuf" + assert ( + payload.metadata["messageType"] == b"temporal.api.common.v1.WorkflowExecution" + ) + assert payload.data == proto.SerializeToString() + decoded = await conv.decode(payload) + assert decoded == proto