diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml
new file mode 100644
index 00000000..f2e1fc6f
--- /dev/null
+++ b/.github/workflows/publish-pypi.yml
@@ -0,0 +1,25 @@
+# workflow for re-running publishing to PyPI in case it fails for some reason
+# you can run this workflow by navigating to https://www.github.com/Finch-API/Finch-API/finch-api-python/actions/workflows/publish-pypi.yml
+name: Publish PyPI
+on:
+ workflow_dispatch:
+
+jobs:
+ publish:
+ name: publish
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Set up Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.7'
+
+ - name: Publish to PyPI
+ run: |
+ pipx install poetry
+ bash ./bin/publish-pypi
+ env:
+ PYPI_TOKEN: ${{ secrets.FINCH_PYPI_TOKEN }}
diff --git a/.github/workflows/release-doctor.yml b/.github/workflows/release-doctor.yml
new file mode 100644
index 00000000..eaae9f1c
--- /dev/null
+++ b/.github/workflows/release-doctor.yml
@@ -0,0 +1,20 @@
+name: Release Doctor
+on:
+ pull_request:
+ workflow_dispatch:
+
+jobs:
+ release_doctor:
+ name: release doctor
+ runs-on: ubuntu-latest
+ if: github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next'
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Check release environment
+ run: |
+ bash ./bin/check-release-environment
+ env:
+ STAINLESS_API_KEY: ${{ secrets.STAINLESS_API_KEY }}
+ PYPI_TOKEN: ${{ secrets.FINCH_PYPI_TOKEN }}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 00000000..c20d8c99
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,34 @@
+name: Release
+on:
+ push:
+ branches:
+ - main
+
+jobs:
+ release:
+ name: release
+ if: github.ref == 'refs/heads/main' && github.repository == 'Finch-API/finch-api-python'
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - uses: stainless-api/trigger-release-please@v1
+ id: release
+ with:
+ repo: ${{ github.event.repository.full_name }}
+ stainless-api-key: ${{ secrets.STAINLESS_API_KEY }}
+
+ - name: Set up Python
+ if: ${{ steps.release.outputs.releases_created }}
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.7'
+
+ - name: Publish to PyPI
+ if: ${{ steps.release.outputs.releases_created }}
+ run: |
+ pipx install poetry
+ bash ./bin/publish-pypi
+ env:
+ PYPI_TOKEN: ${{ secrets.FINCH_PYPI_TOKEN }}
diff --git a/.gitignore b/.gitignore
index 1521c8b7..3e9b0e57 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1 +1,9 @@
+.vscode
+_dev
+
+__pycache__
+.mypy_cache
+
dist
+
+.env
diff --git a/.release-please-manifest.json b/.release-please-manifest.json
new file mode 100644
index 00000000..1332969b
--- /dev/null
+++ b/.release-please-manifest.json
@@ -0,0 +1,3 @@
+{
+ ".": "0.0.1"
+}
\ No newline at end of file
diff --git a/.stats.yml b/.stats.yml
new file mode 100644
index 00000000..d8ad62fa
--- /dev/null
+++ b/.stats.yml
@@ -0,0 +1 @@
+configured_endpoints: 27
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 00000000..6e22ceda
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2023 Finch
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/README.md b/README.md
index 264341ad..f7c97805 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,271 @@
# Finch Python API Library
-Placeholder package for the Finch SDK.
+[](https://pypi.org/project/finch-api/)
+
+The Finch Python library provides convenient access to the Finch REST API from any Python 3.7+
+application. It includes type definitions for all request params and response fields,
+and offers both synchronous and asynchronous clients powered by [httpx](https://github.com/encode/httpx).
## Documentation
The API documentation can be found [here](https://developer.tryfinch.com/).
+
+## Installation
+
+```sh
+pip install finch-api
+```
+
+## Usage
+
+```python
+from finch import Finch
+
+finch = Finch(
+ access_token="my access token",
+)
+
+page = finch.hris.directory.list_individuals()
+directory = page.individuals[0]
+print(directory.ein)
+```
+
+## Async Usage
+
+Simply import `AsyncFinch` instead of `Finch` and use `await` with each API call:
+
+```python
+from finch import AsyncFinch
+
+finch = AsyncFinch(
+ access_token="my access token",
+)
+
+
+async def main():
+ page = await finch.hris.directory.list_individuals()
+ print(page.individuals[0].ein)
+
+
+asyncio.run(main())
+```
+
+Functionality between the synchronous and asynchronous clients is otherwise identical.
+
+## Using Types
+
+Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict), while responses are [Pydantic](https://pydantic-docs.helpmanual.io/) models. This helps provide autocomplete and documentation within your editor.
+
+If you would like to see type errors in VS Code to help catch bugs earlier, set `python.analysis.typeCheckingMode` to `"basic"`.
+
+## Pagination
+
+List methods in the Finch API are paginated.
+
+This library provides auto-paginating iterators with each list response, so you do not have to request successive pages manually:
+
+```python
+import finch
+
+finch = Finch()
+
+all_jobs = []
+# Automatically fetches more pages as needed.
+for job in finch.ats.jobs.list():
+ # Do something with job here
+ all_jobs.append(job)
+print(all_jobs)
+```
+
+Or, asynchronously:
+
+```python
+import asyncio
+import finch
+
+finch = AsyncFinch()
+
+
+async def main() -> None:
+ all_jobs = []
+ # Iterate through items across all pages, issuing requests as needed.
+ async for job in finch.ats.jobs.list():
+ all_jobs.append(job)
+ print(all_jobs)
+
+
+asyncio.run(main())
+```
+
+Alternatively, you can use the `.has_next_page()`, `.next_page_info()`, or `.get_next_page()` methods for more granular control working with pages:
+
+```python
+first_page = await finch.ats.jobs.list()
+if first_page.has_next_page():
+ print(f"will fetch next page using these details: {first_page.next_page_info()}")
+ next_page = await first_page.get_next_page()
+ print(f"number of items we just fetched: {len(next_page.jobs)}")
+
+# Remove `await` for non-async usage.
+```
+
+Or just work directly with the returned data:
+
+```python
+first_page = await finch.ats.jobs.list()
+
+print(
+ f"the current start offset for this page: {first_page.paging.offset}"
+) # => "the current start offset for this page: 1"
+for job in first_page.jobs:
+ print(job.id)
+
+# Remove `await` for non-async usage.
+```
+
+## Nested params
+
+Nested parameters are dictionaries, typed using `TypedDict`, for example:
+
+```python
+from finch import Finch
+
+finch = Finch()
+
+finch.hris.directory.list_individuals(
+ path_params=[],
+ params={},
+)
+```
+
+## Handling errors
+
+When the library is unable to connect to the API (e.g., due to network connection problems or a timeout), a subclass of `finch.APIConnectionError` is raised.
+
+When the API returns a non-success status code (i.e., 4xx or 5xx
+response), a subclass of `finch.APIStatusError` will be raised, containing `status_code` and `response` properties.
+
+All errors inherit from `finch.APIError`.
+
+```python
+from finch import Finch
+
+finch = Finch()
+
+try:
+ finch.hris.directory.list_individuals()
+except finch.APIConnectionError as e:
+ print("The server could not be reached")
+ print(e.__cause__) # an underlying Exception, likely raised within httpx.
+except finch.RateLimitError as e:
+ print("A 429 status code was received; we should back off a bit.")
+except finch.APIStatusError as e:
+ print("Another non-200-range status code was received")
+ print(e.status_code)
+ print(e.response)
+```
+
+Error codes are as followed:
+
+| Status Code | Error Type |
+| ----------- | -------------------------- |
+| 400 | `BadRequestError` |
+| 401 | `AuthenticationError` |
+| 403 | `PermissionDeniedError` |
+| 404 | `NotFoundError` |
+| 422 | `UnprocessableEntityError` |
+| 429 | `RateLimitError` |
+| >=500 | `InternalServerError` |
+| N/A | `APIConnectionError` |
+
+### Retries
+
+Certain errors will be automatically retried 2 times by default, with a short exponential backoff.
+Connection errors (for example, due to a network connectivity problem), 409 Conflict, 429 Rate Limit,
+and >=500 Internal errors will all be retried by default.
+
+You can use the `max_retries` option to configure or disable this:
+
+```python
+from finch import Finch
+
+# Configure the default for all requests:
+finch = Finch(
+ # default is 2
+ max_retries=0,
+)
+
+# Or, configure per-request:
+finch.with_options(max_retries=5).hris.directory.list_individuals()
+```
+
+### Timeouts
+
+Requests time out after 60 seconds by default. You can configure this with a `timeout` option,
+which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/#fine-tuning-the-configuration):
+
+```python
+from finch import Finch
+
+# Configure the default for all requests:
+finch = Finch(
+ # default is 60s
+ timeout=20.0,
+)
+
+# More granular control:
+finch = Finch(
+ timeout=httpx.Timeout(60.0, read=5.0, write=10.0, connect=2.0),
+)
+
+# Override per-request:
+finch.with_options(timeout=5 * 1000).hris.directory.list_individuals()
+```
+
+On timeout, an `APITimeoutError` is thrown.
+
+Note that requests which time out will be [retried twice by default](#retries).
+
+## Default Headers
+
+We automatically send the `Finch-API-Version` header set to `2020-09-17`.
+
+If you need to, you can override it by setting default headers per-request or on the client object.
+
+```python
+from finch import Finch
+
+finch = Finch(
+ default_headers={"Finch-API-Version": My - Custom - Value},
+)
+```
+
+## Advanced: Configuring custom URLs, proxies, and transports
+
+You can configure the following keyword arguments when instantiating the client:
+
+```python
+import httpx
+from finch import Finch
+
+finch = Finch(
+ # Use a custom base URL
+ base_url="http://my.test.server.example.com:8083",
+ proxies="http://my.test.proxy.example.com",
+ transport=httpx.HTTPTransport(local_address="0.0.0.0"),
+)
+```
+
+See the httpx documentation for information about the [`proxies`](https://www.python-httpx.org/advanced/#http-proxying) and [`transport`](https://www.python-httpx.org/advanced/#custom-transports) keyword arguments.
+
+## Status
+
+This package is in beta. Its internals and interfaces are not stable and subject to change without a major semver bump;
+please reach out if you rely on any undocumented behavior.
+
+We are keen for your feedback; please email us at [founders@tryfinch.com](mailto:founders@tryfinch.com) or open an issue with questions,
+bugs, or suggestions.
+
+## Requirements
+
+Python 3.7 or higher.
\ No newline at end of file
diff --git a/api.md b/api.md
new file mode 100644
index 00000000..25f4d089
--- /dev/null
+++ b/api.md
@@ -0,0 +1,225 @@
+# Top Level
+
+Custom Methods:
+
+- `get_access_token`
+- `get_auth_url`
+
+# ATS
+
+## Candidates
+
+Types:
+
+```python
+from finch.types.ats import Candidate
+```
+
+Methods:
+
+- client.ats.candidates.retrieve(candidate_id) -> Candidate
+- client.ats.candidates.list(\*\*params) -> SyncCandidatesPage[Candidate]
+
+## Applications
+
+Types:
+
+```python
+from finch.types.ats import Application
+```
+
+Methods:
+
+- client.ats.applications.retrieve(application_id) -> Application
+- client.ats.applications.list(\*\*params) -> SyncApplicationsPage[Application]
+
+## Stages
+
+Types:
+
+```python
+from finch.types.ats import Stage
+```
+
+Methods:
+
+- client.ats.stages.list() -> SyncSinglePage[Stage]
+
+## Jobs
+
+Types:
+
+```python
+from finch.types.ats import Job
+```
+
+Methods:
+
+- client.ats.jobs.retrieve(job_id) -> Job
+- client.ats.jobs.list(\*\*params) -> SyncJobsPage[Job]
+
+## Offers
+
+Types:
+
+```python
+from finch.types.ats import Offer
+```
+
+Methods:
+
+- client.ats.offers.retrieve(offer_id) -> Offer
+- client.ats.offers.list(\*\*params) -> SyncOffersPage[Offer]
+
+# HRIS
+
+Types:
+
+```python
+from finch.types import Income, Location, Money, Paging
+```
+
+## CompanyResource
+
+Types:
+
+```python
+from finch.types.hris import Company
+```
+
+Methods:
+
+- client.hris.company.retrieve() -> Company
+
+## Payments
+
+Types:
+
+```python
+from finch.types.hris import Payment
+```
+
+Methods:
+
+- client.hris.payments.list(\*\*params) -> SyncSinglePage[Payment]
+
+## PayStatements
+
+Types:
+
+```python
+from finch.types.hris import (
+ PayStatement,
+ PayStatementResponse,
+ PayStatementResponseBody,
+)
+```
+
+Methods:
+
+- client.hris.pay_statements.retrieve_many(\*\*params) -> SyncResponsesPage[PayStatementResponse]
+
+## Directory
+
+Types:
+
+```python
+from finch.types.hris import IndividualInDirectory
+```
+
+Methods:
+
+- client.hris.directory.list_individuals(\*\*params) -> SyncIndividualsPage[IndividualInDirectory]
+
+## Individuals
+
+Types:
+
+```python
+from finch.types.hris import Individual, IndividualResponse
+```
+
+Methods:
+
+- client.hris.individuals.retrieve_many(\*\*params) -> SyncResponsesPage[IndividualResponse]
+
+### EmploymentData
+
+Types:
+
+```python
+from finch.types.hris.individuals import EmploymentData, EmploymentDataResponse
+```
+
+Methods:
+
+- client.hris.individuals.employment_data.retrieve_many(\*\*params) -> SyncResponsesPage[EmploymentDataResponse]
+
+## Benefits
+
+Types:
+
+```python
+from finch.types.hris import (
+ BenefitFrequency,
+ BenefitType,
+ BenfitContribution,
+ CompanyBenefit,
+ CreateCompanyBenefitsResponse,
+ SupportedBenefit,
+ UpdateCompanyBenefitResponse,
+)
+```
+
+Methods:
+
+- client.hris.benefits.create(\*\*params) -> CreateCompanyBenefitsResponse
+- client.hris.benefits.retrieve(benefit_id) -> CompanyBenefit
+- client.hris.benefits.update(benefit_id, \*\*params) -> UpdateCompanyBenefitResponse
+- client.hris.benefits.list() -> SyncSinglePage[CompanyBenefit]
+- client.hris.benefits.list_supported_benefits() -> SyncSinglePage[SupportedBenefit]
+
+### Individuals
+
+Types:
+
+```python
+from finch.types.hris.benefits import (
+ EnrolledIndividual,
+ IndividualBenefit,
+ UnenrolledIndividual,
+ IndividualEnrolledIDsResponse,
+)
+```
+
+Methods:
+
+- client.hris.benefits.individuals.enroll_many(benefit_id, \*\*params) -> SyncSinglePage[EnrolledIndividual]
+- client.hris.benefits.individuals.enrolled_ids(benefit_id) -> IndividualEnrolledIDsResponse
+- client.hris.benefits.individuals.retrieve_many_benefits(benefit_id, \*\*params) -> SyncSinglePage[IndividualBenefit]
+- client.hris.benefits.individuals.unenroll(benefit_id, \*\*params) -> SyncSinglePage[UnenrolledIndividual]
+
+# Providers
+
+Types:
+
+```python
+from finch.types import Provider
+```
+
+Methods:
+
+- client.providers.list() -> SyncSinglePage[Provider]
+
+# Account
+
+Types:
+
+```python
+from finch.types import DisconnectResponse, Introspection
+```
+
+Methods:
+
+- client.account.disconnect() -> DisconnectResponse
+- client.account.introspect() -> Introspection
\ No newline at end of file
diff --git a/bin/blacken-docs.py b/bin/blacken-docs.py
new file mode 100644
index 00000000..45d0ad12
--- /dev/null
+++ b/bin/blacken-docs.py
@@ -0,0 +1,251 @@
+# fork of https://github.com/asottile/blacken-docs implementing https://github.com/asottile/blacken-docs/issues/170
+from __future__ import annotations
+
+import re
+import argparse
+import textwrap
+import contextlib
+from typing import Match, Optional, Sequence, Generator, NamedTuple, cast
+
+import black
+from black.mode import TargetVersion
+from black.const import DEFAULT_LINE_LENGTH
+
+MD_RE = re.compile(
+ r"(?P^(?P *)```\s*python\n)" r"(?P.*?)" r"(?P^(?P=indent)```\s*$)",
+ re.DOTALL | re.MULTILINE,
+)
+MD_PYCON_RE = re.compile(
+ r"(?P^(?P *)```\s*pycon\n)" r"(?P.*?)" r"(?P^(?P=indent)```.*$)",
+ re.DOTALL | re.MULTILINE,
+)
+RST_PY_LANGS = frozenset(("python", "py", "sage", "python3", "py3", "numpy"))
+BLOCK_TYPES = "(code|code-block|sourcecode|ipython)"
+DOCTEST_TYPES = "(testsetup|testcleanup|testcode)"
+RST_RE = re.compile(
+ rf"(?P"
+ rf"^(?P *)\.\. ("
+ rf"jupyter-execute::|"
+ rf"{BLOCK_TYPES}:: (?P\w+)|"
+ rf"{DOCTEST_TYPES}::.*"
+ rf")\n"
+ rf"((?P=indent) +:.*\n)*"
+ rf"\n*"
+ rf")"
+ rf"(?P(^((?P=indent) +.*)?\n)+)",
+ re.MULTILINE,
+)
+RST_PYCON_RE = re.compile(
+ r"(?P"
+ r"(?P *)\.\. ((code|code-block):: pycon|doctest::.*)\n"
+ r"((?P=indent) +:.*\n)*"
+ r"\n*"
+ r")"
+ r"(?P(^((?P=indent) +.*)?(\n|$))+)",
+ re.MULTILINE,
+)
+PYCON_PREFIX = ">>> "
+PYCON_CONTINUATION_PREFIX = "..."
+PYCON_CONTINUATION_RE = re.compile(
+ rf"^{re.escape(PYCON_CONTINUATION_PREFIX)}( |$)",
+)
+LATEX_RE = re.compile(
+ r"(?P^(?P *)\\begin{minted}{python}\n)"
+ r"(?P.*?)"
+ r"(?P^(?P=indent)\\end{minted}\s*$)",
+ re.DOTALL | re.MULTILINE,
+)
+LATEX_PYCON_RE = re.compile(
+ r"(?P^(?P *)\\begin{minted}{pycon}\n)" r"(?P.*?)" r"(?P^(?P=indent)\\end{minted}\s*$)",
+ re.DOTALL | re.MULTILINE,
+)
+PYTHONTEX_LANG = r"(?Ppyblock|pycode|pyconsole|pyverbatim)"
+PYTHONTEX_RE = re.compile(
+ rf"(?P^(?P *)\\begin{{{PYTHONTEX_LANG}}}\n)"
+ rf"(?P.*?)"
+ rf"(?P^(?P=indent)\\end{{(?P=lang)}}\s*$)",
+ re.DOTALL | re.MULTILINE,
+)
+INDENT_RE = re.compile("^ +(?=[^ ])", re.MULTILINE)
+TRAILING_NL_RE = re.compile(r"\n+\Z", re.MULTILINE)
+
+
+class CodeBlockError(NamedTuple):
+ offset: int
+ exc: Exception
+
+
+def format_str(
+ src: str,
+ black_mode: black.FileMode,
+) -> tuple[str, Sequence[CodeBlockError]]:
+ errors: list[CodeBlockError] = []
+
+ @contextlib.contextmanager
+ def _collect_error(match: Match[str]) -> Generator[None, None, None]:
+ try:
+ yield
+ except Exception as e:
+ errors.append(CodeBlockError(match.start(), e))
+
+ def _md_match(match: Match[str]) -> str:
+ code = textwrap.dedent(match["code"])
+ with _collect_error(match):
+ code = black.format_str(code, mode=black_mode)
+ code = textwrap.indent(code, match["indent"])
+ return f'{match["before"]}{code}{match["after"]}'
+
+ def _rst_match(match: Match[str]) -> str:
+ lang = match["lang"]
+ if lang is not None and lang not in RST_PY_LANGS:
+ return match[0]
+ min_indent = min(INDENT_RE.findall(match["code"]))
+ trailing_ws_match = TRAILING_NL_RE.search(match["code"])
+ assert trailing_ws_match
+ trailing_ws = trailing_ws_match.group()
+ code = textwrap.dedent(match["code"])
+ with _collect_error(match):
+ code = black.format_str(code, mode=black_mode)
+ code = textwrap.indent(code, min_indent)
+ return f'{match["before"]}{code.rstrip()}{trailing_ws}'
+
+ def _pycon_match(match: Match[str]) -> str:
+ code = ""
+ fragment = cast(Optional[str], None)
+
+ def finish_fragment() -> None:
+ nonlocal code
+ nonlocal fragment
+
+ if fragment is not None:
+ with _collect_error(match):
+ fragment = black.format_str(fragment, mode=black_mode)
+ fragment_lines = fragment.splitlines()
+ code += f"{PYCON_PREFIX}{fragment_lines[0]}\n"
+ for line in fragment_lines[1:]:
+ # Skip blank lines to handle Black adding a blank above
+ # functions within blocks. A blank line would end the REPL
+ # continuation prompt.
+ #
+ # >>> if True:
+ # ... def f():
+ # ... pass
+ # ...
+ if line:
+ code += f"{PYCON_CONTINUATION_PREFIX} {line}\n"
+ if fragment_lines[-1].startswith(" "):
+ code += f"{PYCON_CONTINUATION_PREFIX}\n"
+ fragment = None
+
+ indentation = None
+ for line in match["code"].splitlines():
+ orig_line, line = line, line.lstrip()
+ if indentation is None and line:
+ indentation = len(orig_line) - len(line)
+ continuation_match = PYCON_CONTINUATION_RE.match(line)
+ if continuation_match and fragment is not None:
+ fragment += line[continuation_match.end() :] + "\n"
+ else:
+ finish_fragment()
+ if line.startswith(PYCON_PREFIX):
+ fragment = line[len(PYCON_PREFIX) :] + "\n"
+ else:
+ code += orig_line[indentation:] + "\n"
+ finish_fragment()
+ return code
+
+ def _md_pycon_match(match: Match[str]) -> str:
+ code = _pycon_match(match)
+ code = textwrap.indent(code, match["indent"])
+ return f'{match["before"]}{code}{match["after"]}'
+
+ def _rst_pycon_match(match: Match[str]) -> str:
+ code = _pycon_match(match)
+ min_indent = min(INDENT_RE.findall(match["code"]))
+ code = textwrap.indent(code, min_indent)
+ return f'{match["before"]}{code}'
+
+ def _latex_match(match: Match[str]) -> str:
+ code = textwrap.dedent(match["code"])
+ with _collect_error(match):
+ code = black.format_str(code, mode=black_mode)
+ code = textwrap.indent(code, match["indent"])
+ return f'{match["before"]}{code}{match["after"]}'
+
+ def _latex_pycon_match(match: Match[str]) -> str:
+ code = _pycon_match(match)
+ code = textwrap.indent(code, match["indent"])
+ return f'{match["before"]}{code}{match["after"]}'
+
+ src = MD_RE.sub(_md_match, src)
+ src = MD_PYCON_RE.sub(_md_pycon_match, src)
+ src = RST_RE.sub(_rst_match, src)
+ src = RST_PYCON_RE.sub(_rst_pycon_match, src)
+ src = LATEX_RE.sub(_latex_match, src)
+ src = LATEX_PYCON_RE.sub(_latex_pycon_match, src)
+ src = PYTHONTEX_RE.sub(_latex_match, src)
+ return src, errors
+
+
+def format_file(
+ filename: str,
+ black_mode: black.FileMode,
+ skip_errors: bool,
+) -> int:
+ with open(filename, encoding="UTF-8") as f:
+ contents = f.read()
+ new_contents, errors = format_str(contents, black_mode)
+ for error in errors:
+ lineno = contents[: error.offset].count("\n") + 1
+ print(f"{filename}:{lineno}: code block parse error {error.exc}")
+ if errors and not skip_errors:
+ return 1
+ if contents != new_contents:
+ print(f"{filename}: Rewriting...")
+ with open(filename, "w", encoding="UTF-8") as f:
+ f.write(new_contents)
+ return 0
+ else:
+ return 0
+
+
+def main(argv: Sequence[str] | None = None) -> int:
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "-l",
+ "--line-length",
+ type=int,
+ default=DEFAULT_LINE_LENGTH,
+ )
+ parser.add_argument(
+ "-t",
+ "--target-version",
+ action="append",
+ type=lambda v: TargetVersion[v.upper()],
+ default=[],
+ help=f"choices: {[v.name.lower() for v in TargetVersion]}",
+ dest="target_versions",
+ )
+ parser.add_argument(
+ "-S",
+ "--skip-string-normalization",
+ action="store_true",
+ )
+ parser.add_argument("-E", "--skip-errors", action="store_true")
+ parser.add_argument("filenames", nargs="*")
+ args = parser.parse_args(argv)
+
+ black_mode = black.FileMode(
+ target_versions=set(args.target_versions),
+ line_length=args.line_length,
+ string_normalization=not args.skip_string_normalization,
+ )
+
+ retv = 0
+ for filename in args.filenames:
+ retv |= format_file(filename, black_mode, skip_errors=args.skip_errors)
+ return retv
+
+
+if __name__ == "__main__":
+ raise SystemExit(main())
diff --git a/bin/check-release-environment b/bin/check-release-environment
new file mode 100644
index 00000000..201ba680
--- /dev/null
+++ b/bin/check-release-environment
@@ -0,0 +1,25 @@
+#!/usr/bin/env bash
+
+errors=()
+
+if [ -z "${STAINLESS_API_KEY}" ]; then
+ errors+=("The STAINLESS_API_KEY secret has not been set. Please contact Stainless for an API key & set it in your organisation secrets on GitHub.")
+fi
+
+if [ -z "${PYPI_TOKEN}" ]; then
+ errors+=("The FINCH_PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organisation secrets.")
+fi
+
+len=${#errors[@]}
+
+if [[ len -gt 0 ]]; then
+ echo -e "Found the following errors in the release environment:\n"
+
+ for error in "${errors[@]}"; do
+ echo -e "- $error\n"
+ done
+
+ exit 1
+fi
+
+echo "The environment is ready to push releases!"
diff --git a/bin/check-test-server b/bin/check-test-server
new file mode 100755
index 00000000..34efa9da
--- /dev/null
+++ b/bin/check-test-server
@@ -0,0 +1,50 @@
+#!/usr/bin/env bash
+
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+YELLOW='\033[0;33m'
+NC='\033[0m' # No Color
+
+function prism_is_running() {
+ curl --silent "http://localhost:4010" >/dev/null 2>&1
+}
+
+function is_overriding_api_base_url() {
+ [ -n "$API_BASE_URL" ]
+}
+
+if is_overriding_api_base_url ; then
+ # If someone is running the tests against the live API, we can trust they know
+ # what they're doing and exit early.
+ echo -e "${GREEN}✔ Running tests against ${API_BASE_URL}${NC}"
+
+ exit 0
+elif prism_is_running ; then
+ echo -e "${GREEN}✔ Mock prism server is running with your OpenAPI spec${NC}"
+ echo
+
+ exit 0
+else
+ echo -e "${RED}ERROR:${NC} The test suite will not run without a mock Prism server"
+ echo -e "running against your OpenAPI spec."
+ echo
+ echo -e "${YELLOW}To fix:${NC}"
+ echo
+ echo -e "1. Install Prism (requires Node 16+):"
+ echo
+ echo -e " With npm:"
+ echo -e " \$ ${YELLOW}npm install -g @stoplight/prism-cli${NC}"
+ echo
+ echo -e " With yarn:"
+ echo -e " \$ ${YELLOW}yarn global add @stoplight/prism-cli${NC}"
+ echo
+ echo -e "2. Run the mock server"
+ echo
+ echo -e " To run the server, pass in the path of your OpenAPI"
+ echo -e " spec to the prism command:"
+ echo
+ echo -e " \$ ${YELLOW}prism mock path/to/your.openapi.yml${NC}"
+ echo
+
+ exit 1
+fi
diff --git a/bin/publish-pypi b/bin/publish-pypi
new file mode 100644
index 00000000..ea5ab2ca
--- /dev/null
+++ b/bin/publish-pypi
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+set -eux
+poetry config pypi-token.pypi $PYPI_TOKEN
+poetry publish --build
diff --git a/bin/test b/bin/test
new file mode 100755
index 00000000..ac284456
--- /dev/null
+++ b/bin/test
@@ -0,0 +1,3 @@
+#!/usr/bin/env bash
+
+bin/check-test-server && poetry run pytest "$@"
diff --git a/mypy.ini b/mypy.ini
new file mode 100644
index 00000000..8ea2d5af
--- /dev/null
+++ b/mypy.ini
@@ -0,0 +1,43 @@
+[mypy]
+pretty = True
+show_error_codes = True
+exclude = _dev
+
+strict_equality = True
+implicit_reexport = True
+check_untyped_defs = True
+no_implicit_optional = True
+
+warn_return_any = True
+warn_unreachable = True
+warn_unused_configs = True
+
+# Turn these options off as it could cause conflicts
+# with the Pyright options.
+warn_unused_ignores = False
+warn_redundant_casts = False
+
+disallow_any_generics = True
+disallow_untyped_defs = True
+disallow_untyped_calls = True
+disallow_subclassing_any = True
+disallow_incomplete_defs = True
+disallow_untyped_decorators = True
+cache_fine_grained = True
+
+# By default, mypy reports an error if you assign a value to the result
+# of a function call that doesn't return anything. We do this in our test
+# cases:
+# ```
+# result = ...
+# assert result is None
+# ```
+# Changing this codegen to make mypy happy would increase complexity
+# and would not be worth it.
+disable_error_code = func-returns-value
+
+# https://github.com/python/mypy/issues/12162
+[mypy.overrides]
+module = "black.files.*"
+ignore_errors = true
+ignore_missing_imports = true
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 00000000..5df5bfbc
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,814 @@
+# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand.
+
+[[package]]
+name = "anyio"
+version = "3.6.2"
+description = "High level compatibility layer for multiple asynchronous event loop implementations"
+category = "main"
+optional = false
+python-versions = ">=3.6.2"
+files = [
+ {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"},
+ {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"},
+]
+
+[package.dependencies]
+idna = ">=2.8"
+sniffio = ">=1.1"
+typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
+
+[package.extras]
+doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
+test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"]
+trio = ["trio (>=0.16,<0.22)"]
+
+[[package]]
+name = "atomicwrites"
+version = "1.4.1"
+description = "Atomic file writes."
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"},
+]
+
+[[package]]
+name = "attrs"
+version = "22.1.0"
+description = "Classes Without Boilerplate"
+category = "dev"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
+ {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"},
+]
+
+[package.extras]
+dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
+docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
+tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
+tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
+
+[[package]]
+name = "black"
+version = "22.10.0"
+description = "The uncompromising code formatter."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"},
+ {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"},
+ {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"},
+ {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"},
+ {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"},
+ {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"},
+ {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"},
+ {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"},
+ {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"},
+ {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"},
+ {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"},
+ {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"},
+ {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"},
+ {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"},
+ {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"},
+ {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"},
+ {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"},
+ {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"},
+ {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"},
+ {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"},
+ {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"},
+]
+
+[package.dependencies]
+click = ">=8.0.0"
+mypy-extensions = ">=0.4.3"
+pathspec = ">=0.9.0"
+platformdirs = ">=2"
+tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
+typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
+typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+colorama = ["colorama (>=0.4.3)"]
+d = ["aiohttp (>=3.7.4)"]
+jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
+uvloop = ["uvloop (>=0.15.2)"]
+
+[[package]]
+name = "certifi"
+version = "2022.9.24"
+description = "Python package for providing Mozilla's CA Bundle."
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"},
+ {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"},
+]
+
+[[package]]
+name = "click"
+version = "8.1.3"
+description = "Composable command line interface toolkit"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
+ {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "distro"
+version = "1.8.0"
+description = "Distro - an OS platform information API"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "distro-1.8.0-py3-none-any.whl", hash = "sha256:99522ca3e365cac527b44bde033f64c6945d90eb9f769703caaec52b09bbd3ff"},
+ {file = "distro-1.8.0.tar.gz", hash = "sha256:02e111d1dc6a50abb8eed6bf31c3e48ed8b0830d1ea2a1b78c61765c2513fdd8"},
+]
+
+[[package]]
+name = "h11"
+version = "0.12.0"
+description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"},
+ {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"},
+]
+
+[[package]]
+name = "httpcore"
+version = "0.15.0"
+description = "A minimal low-level HTTP client."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"},
+ {file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"},
+]
+
+[package.dependencies]
+anyio = ">=3.0.0,<4.0.0"
+certifi = "*"
+h11 = ">=0.11,<0.13"
+sniffio = ">=1.0.0,<2.0.0"
+
+[package.extras]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (>=1.0.0,<2.0.0)"]
+
+[[package]]
+name = "httpx"
+version = "0.23.0"
+description = "The next generation HTTP client."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "httpx-0.23.0-py3-none-any.whl", hash = "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b"},
+ {file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"},
+]
+
+[package.dependencies]
+certifi = "*"
+httpcore = ">=0.15.0,<0.16.0"
+rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
+sniffio = "*"
+
+[package.extras]
+brotli = ["brotli", "brotlicffi"]
+cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (>=1.0.0,<2.0.0)"]
+
+[[package]]
+name = "idna"
+version = "3.4"
+description = "Internationalized Domain Names in Applications (IDNA)"
+category = "main"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
+ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
+]
+
+[[package]]
+name = "importlib-metadata"
+version = "5.0.0"
+description = "Read metadata from Python packages"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "importlib_metadata-5.0.0-py3-none-any.whl", hash = "sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43"},
+ {file = "importlib_metadata-5.0.0.tar.gz", hash = "sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab"},
+]
+
+[package.dependencies]
+typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
+zipp = ">=0.5"
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
+perf = ["ipython"]
+testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
+
+[[package]]
+name = "iniconfig"
+version = "1.1.1"
+description = "iniconfig: brain-dead simple config-ini parsing"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
+ {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
+]
+
+[[package]]
+name = "isort"
+version = "5.10.1"
+description = "A Python utility / library to sort Python imports."
+category = "dev"
+optional = false
+python-versions = ">=3.6.1,<4.0"
+files = [
+ {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"},
+ {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"},
+]
+
+[package.extras]
+colors = ["colorama (>=0.4.3,<0.5.0)"]
+pipfile-deprecated-finder = ["pipreqs", "requirementslib"]
+plugins = ["setuptools"]
+requirements-deprecated-finder = ["pip-api", "pipreqs"]
+
+[[package]]
+name = "mypy"
+version = "1.1.1"
+description = "Optional static typing for Python"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "mypy-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39c7119335be05630611ee798cc982623b9e8f0cff04a0b48dfc26100e0b97af"},
+ {file = "mypy-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61bf08362e93b6b12fad3eab68c4ea903a077b87c90ac06c11e3d7a09b56b9c1"},
+ {file = "mypy-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbb19c9f662e41e474e0cff502b7064a7edc6764f5262b6cd91d698163196799"},
+ {file = "mypy-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:315ac73cc1cce4771c27d426b7ea558fb4e2836f89cb0296cbe056894e3a1f78"},
+ {file = "mypy-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5cb14ff9919b7df3538590fc4d4c49a0f84392237cbf5f7a816b4161c061829e"},
+ {file = "mypy-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:26cdd6a22b9b40b2fd71881a8a4f34b4d7914c679f154f43385ca878a8297389"},
+ {file = "mypy-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b5f81b40d94c785f288948c16e1f2da37203c6006546c5d947aab6f90aefef2"},
+ {file = "mypy-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b437be1c02712a605591e1ed1d858aba681757a1e55fe678a15c2244cd68a5"},
+ {file = "mypy-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d809f88734f44a0d44959d795b1e6f64b2bbe0ea4d9cc4776aa588bb4229fc1c"},
+ {file = "mypy-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:a380c041db500e1410bb5b16b3c1c35e61e773a5c3517926b81dfdab7582be54"},
+ {file = "mypy-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b7c7b708fe9a871a96626d61912e3f4ddd365bf7f39128362bc50cbd74a634d5"},
+ {file = "mypy-1.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1c10fa12df1232c936830839e2e935d090fc9ee315744ac33b8a32216b93707"},
+ {file = "mypy-1.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0a28a76785bf57655a8ea5eb0540a15b0e781c807b5aa798bd463779988fa1d5"},
+ {file = "mypy-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ef6a01e563ec6a4940784c574d33f6ac1943864634517984471642908b30b6f7"},
+ {file = "mypy-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d64c28e03ce40d5303450f547e07418c64c241669ab20610f273c9e6290b4b0b"},
+ {file = "mypy-1.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64cc3afb3e9e71a79d06e3ed24bb508a6d66f782aff7e56f628bf35ba2e0ba51"},
+ {file = "mypy-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce61663faf7a8e5ec6f456857bfbcec2901fbdb3ad958b778403f63b9e606a1b"},
+ {file = "mypy-1.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b0c373d071593deefbcdd87ec8db91ea13bd8f1328d44947e88beae21e8d5e9"},
+ {file = "mypy-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:2888ce4fe5aae5a673386fa232473014056967f3904f5abfcf6367b5af1f612a"},
+ {file = "mypy-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:19ba15f9627a5723e522d007fe708007bae52b93faab00f95d72f03e1afa9598"},
+ {file = "mypy-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:59bbd71e5c58eed2e992ce6523180e03c221dcd92b52f0e792f291d67b15a71c"},
+ {file = "mypy-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9401e33814cec6aec8c03a9548e9385e0e228fc1b8b0a37b9ea21038e64cdd8a"},
+ {file = "mypy-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b398d8b1f4fba0e3c6463e02f8ad3346f71956b92287af22c9b12c3ec965a9f"},
+ {file = "mypy-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:69b35d1dcb5707382810765ed34da9db47e7f95b3528334a3c999b0c90fe523f"},
+ {file = "mypy-1.1.1-py3-none-any.whl", hash = "sha256:4e4e8b362cdf99ba00c2b218036002bdcdf1e0de085cdb296a49df03fb31dfc4"},
+ {file = "mypy-1.1.1.tar.gz", hash = "sha256:ae9ceae0f5b9059f33dbc62dea087e942c0ccab4b7a003719cb70f9b8abfa32f"},
+]
+
+[package.dependencies]
+mypy-extensions = ">=1.0.0"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""}
+typing-extensions = ">=3.10"
+
+[package.extras]
+dmypy = ["psutil (>=4.0)"]
+install-types = ["pip"]
+python2 = ["typed-ast (>=1.4.0,<2)"]
+reports = ["lxml"]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+description = "Type system extensions for programs checked with the mypy type checker."
+category = "dev"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
+]
+
+[[package]]
+name = "nodeenv"
+version = "1.7.0"
+description = "Node.js virtual environment builder"
+category = "dev"
+optional = false
+python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
+files = [
+ {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"},
+ {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"},
+]
+
+[package.dependencies]
+setuptools = "*"
+
+[[package]]
+name = "packaging"
+version = "21.3"
+description = "Core utilities for Python packages"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
+ {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
+]
+
+[package.dependencies]
+pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
+
+[[package]]
+name = "pathspec"
+version = "0.10.1"
+description = "Utility library for gitignore style pattern matching of file paths."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"},
+ {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"},
+]
+
+[[package]]
+name = "platformdirs"
+version = "2.5.2"
+description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
+ {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
+]
+
+[package.extras]
+docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"]
+test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"]
+
+[[package]]
+name = "pluggy"
+version = "1.0.0"
+description = "plugin and hook calling mechanisms for python"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
+ {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
+]
+
+[package.dependencies]
+importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+testing = ["pytest", "pytest-benchmark"]
+
+[[package]]
+name = "py"
+version = "1.11.0"
+description = "library with cross-python path, ini-parsing, io, code, log facilities"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
+ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
+]
+
+[[package]]
+name = "pydantic"
+version = "1.10.2"
+description = "Data validation and settings management using python type hints"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"},
+ {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"},
+ {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"},
+ {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559"},
+ {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"},
+ {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c"},
+ {file = "pydantic-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644"},
+ {file = "pydantic-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f"},
+ {file = "pydantic-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a"},
+ {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525"},
+ {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283"},
+ {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42"},
+ {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52"},
+ {file = "pydantic-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c"},
+ {file = "pydantic-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5"},
+ {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c"},
+ {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254"},
+ {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5"},
+ {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d"},
+ {file = "pydantic-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2"},
+ {file = "pydantic-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13"},
+ {file = "pydantic-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116"},
+ {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624"},
+ {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1"},
+ {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9"},
+ {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965"},
+ {file = "pydantic-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e"},
+ {file = "pydantic-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488"},
+ {file = "pydantic-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41"},
+ {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b"},
+ {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe"},
+ {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d"},
+ {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda"},
+ {file = "pydantic-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6"},
+ {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"},
+ {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"},
+]
+
+[package.dependencies]
+typing-extensions = ">=4.1.0"
+
+[package.extras]
+dotenv = ["python-dotenv (>=0.10.4)"]
+email = ["email-validator (>=1.0.3)"]
+
+[[package]]
+name = "pyparsing"
+version = "3.0.9"
+description = "pyparsing module - Classes and methods to define and execute parsing grammars"
+category = "dev"
+optional = false
+python-versions = ">=3.6.8"
+files = [
+ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
+ {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"},
+]
+
+[package.extras]
+diagrams = ["jinja2", "railroad-diagrams"]
+
+[[package]]
+name = "pyright"
+version = "1.1.297"
+description = "Command line wrapper for pyright"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pyright-1.1.297-py3-none-any.whl", hash = "sha256:3fd6528280eb649f8b64b7ece55299f01e340d29f4cf257da876957e3ee24062"},
+ {file = "pyright-1.1.297.tar.gz", hash = "sha256:89082de2fbd240fa75767b57824f4d8516f2fb9005047265a67b895547c6272f"},
+]
+
+[package.dependencies]
+nodeenv = ">=1.6.0"
+typing-extensions = {version = ">=3.7", markers = "python_version < \"3.8\""}
+
+[package.extras]
+all = ["twine (>=3.4.1)"]
+dev = ["twine (>=3.4.1)"]
+
+[[package]]
+name = "pytest"
+version = "7.1.1"
+description = "pytest: simple powerful testing with Python"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-7.1.1-py3-none-any.whl", hash = "sha256:92f723789a8fdd7180b6b06483874feca4c48a5c76968e03bb3e7f806a1869ea"},
+ {file = "pytest-7.1.1.tar.gz", hash = "sha256:841132caef6b1ad17a9afde46dc4f6cfa59a05f9555aae5151f73bdf2820ca63"},
+]
+
+[package.dependencies]
+atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
+attrs = ">=19.2.0"
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
+iniconfig = "*"
+packaging = "*"
+pluggy = ">=0.12,<2.0"
+py = ">=1.8.2"
+tomli = ">=1.0.0"
+
+[package.extras]
+testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
+
+[[package]]
+name = "pytest-asyncio"
+version = "0.18.3"
+description = "Pytest support for asyncio"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-asyncio-0.18.3.tar.gz", hash = "sha256:7659bdb0a9eb9c6e3ef992eef11a2b3e69697800ad02fb06374a210d85b29f91"},
+ {file = "pytest_asyncio-0.18.3-1-py3-none-any.whl", hash = "sha256:16cf40bdf2b4fb7fc8e4b82bd05ce3fbcd454cbf7b92afc445fe299dabb88213"},
+ {file = "pytest_asyncio-0.18.3-py3-none-any.whl", hash = "sha256:8fafa6c52161addfd41ee7ab35f11836c5a16ec208f93ee388f752bea3493a84"},
+]
+
+[package.dependencies]
+pytest = ">=6.1.0"
+typing-extensions = {version = ">=3.7.2", markers = "python_version < \"3.8\""}
+
+[package.extras]
+testing = ["coverage (==6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (==0.931)", "pytest-trio (>=0.7.0)"]
+
+[[package]]
+name = "python-dateutil"
+version = "2.8.2"
+description = "Extensions to the standard Python datetime module"
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+files = [
+ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
+ {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
+]
+
+[package.dependencies]
+six = ">=1.5"
+
+[[package]]
+name = "respx"
+version = "0.19.2"
+description = "A utility for mocking out the Python HTTPX and HTTP Core libraries."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "respx-0.19.2-py2.py3-none-any.whl", hash = "sha256:417f986fec599b9cc6531e93e494b7a75d1cb7bccff9dde5b53edc51f7954494"},
+ {file = "respx-0.19.2.tar.gz", hash = "sha256:f3d210bb4de0ccc4c5afabeb87c3c1b03b3765a9c1a73eb042a07bb18ac33705"},
+]
+
+[package.dependencies]
+httpx = ">=0.21.0"
+
+[[package]]
+name = "rfc3986"
+version = "1.5.0"
+description = "Validating URI References per RFC 3986"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
+ {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
+]
+
+[package.dependencies]
+idna = {version = "*", optional = true, markers = "extra == \"idna2008\""}
+
+[package.extras]
+idna2008 = ["idna"]
+
+[[package]]
+name = "ruff"
+version = "0.0.239"
+description = "An extremely fast Python linter, written in Rust."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "ruff-0.0.239-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:de9830cfcca81c5d25233bf4be6f2f95deb58a7eaf2295f91280de97a54240d7"},
+ {file = "ruff-0.0.239-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:71c05f627fb9efb0859af248fe00f61391266d14e9406c10236dec7db1b1bfe5"},
+ {file = "ruff-0.0.239-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b1caea095c22067abc685b8452be5a9079ca27b88bffd80730f9e4ca8f81e7c"},
+ {file = "ruff-0.0.239-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:97293bae6da5bd82672473f6d43421de7dd27fe8a8b1b3c79ffeda37bcb492b4"},
+ {file = "ruff-0.0.239-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25c3a84f8db9281385685041e98b67afd4a0bb5a872e897331af8229eb28b965"},
+ {file = "ruff-0.0.239-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:0056b7e1975a3ad7ef7eb9df4ca9f905661caf924c6843ecbe4e10719cd6cd0b"},
+ {file = "ruff-0.0.239-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b38916c62eb2587743d2ec8e1f03a43eb5889cf4a56cb55f5eb3bffee5260e32"},
+ {file = "ruff-0.0.239-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7afe3bc50339b916b75a587ed299859fae9c1056ed73d35531c0af610dbe67f6"},
+ {file = "ruff-0.0.239-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:daac673996ecbef77bccf8cd03189c60302568b7aec669e352aa2d8925b074b2"},
+ {file = "ruff-0.0.239-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d3e4bb03333b0c8012ad7d27246a7a95dba3d6ec8796c172af055a179a86b61c"},
+ {file = "ruff-0.0.239-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:801a209887f777811caf9fd4513eec9b505e139d1b0844e1eacd1b4683b0eba2"},
+ {file = "ruff-0.0.239-py3-none-musllinux_1_2_i686.whl", hash = "sha256:216592db3bd93c260cc329d3237548f17fea1f43f87673ab5cdeb04d548fbea9"},
+ {file = "ruff-0.0.239-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1e523edf92c5e888d93a145ef20ad548fca4da7cbd7a8321b3539967a03b7caf"},
+ {file = "ruff-0.0.239-py3-none-win32.whl", hash = "sha256:e54233e7d97d5b705582f673dc3184c7b16c42f7eac3be707c0adf716e457293"},
+ {file = "ruff-0.0.239-py3-none-win_amd64.whl", hash = "sha256:a1bdd3b5ea60b160d3ceb2f8fef4d88af58b7932d4ed46c85c586ef2f277b71d"},
+ {file = "ruff-0.0.239.tar.gz", hash = "sha256:bbb1fe64d4641ce7e5855bbebd9e81f6a596501bcb67842600186b3aa9b950cf"},
+]
+
+[[package]]
+name = "setuptools"
+version = "67.4.0"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "setuptools-67.4.0-py3-none-any.whl", hash = "sha256:f106dee1b506dee5102cc3f3e9e68137bbad6d47b616be7991714b0c62204251"},
+ {file = "setuptools-67.4.0.tar.gz", hash = "sha256:e5fd0a713141a4a105412233c63dc4e17ba0090c8e8334594ac790ec97792330"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+
+[[package]]
+name = "sniffio"
+version = "1.3.0"
+description = "Sniff out which async library your code is running under"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"},
+ {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
+]
+
+[[package]]
+name = "time-machine"
+version = "2.9.0"
+description = "Travel through time in your tests."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "time-machine-2.9.0.tar.gz", hash = "sha256:60222d43f6e93a926adc36ed37a54bc8e4d0d8d1c4d449096afcfe85086129c2"},
+ {file = "time_machine-2.9.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fd72c0b2e7443fff6e4481991742b72c17f73735e5fdd176406ca48df187a5c9"},
+ {file = "time_machine-2.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5657e0e6077cf15b37f0d8cf78e868113bbb3ecccc60064c40fe52d8166ca8b1"},
+ {file = "time_machine-2.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bfa82614a98ecee70272bb6038d210b2ad7b2a6b8a678b400c34bdaf776802a7"},
+ {file = "time_machine-2.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4380bd6697cc7db3c9e6843f24779ac0550affa9d9a8e5f9e5d5cc139cb6583"},
+ {file = "time_machine-2.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6211beee9f5dace08b1bbbb1fb09e34a69c52d87eea676729f14c8660481dff6"},
+ {file = "time_machine-2.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:68ec8b83197db32c7a12da5f6b83c91271af3ed7f5dc122d2900a8de01dff9f0"},
+ {file = "time_machine-2.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c5dbc8b87cdc7be070a499f2bd1cd405c7f647abeb3447dfd397639df040bc64"},
+ {file = "time_machine-2.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:948ca690f9770ad4a93fa183061c11346505598f5f0b721965bc85ec83bb103d"},
+ {file = "time_machine-2.9.0-cp310-cp310-win32.whl", hash = "sha256:f92d5d2eb119a6518755c4c9170112094c706d1c604460f50afc1308eeb97f0e"},
+ {file = "time_machine-2.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:cb51432652ad663b4cbd631c73c90f9e94f463382b86c0b6b854173700512a70"},
+ {file = "time_machine-2.9.0-cp310-cp310-win_arm64.whl", hash = "sha256:8976b7b1f7de13598b655d459f5640f90f3cd587283e1b914a22e45946c5485b"},
+ {file = "time_machine-2.9.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6463e302c96eb8c691c4340e281bd54327a213b924fa189aea81accf7e7f78df"},
+ {file = "time_machine-2.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b632d60aa0883dc7292ac3d32050604d26ec2bbd5c4d42fb0de3b4ef17343e2"},
+ {file = "time_machine-2.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d329578abe47ce95baa015ef3825acebb1b73b5fa6f818fdf2d4685a00ca457f"},
+ {file = "time_machine-2.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ba5fc2655749066d68986de8368984dad4082db2fbeade78f40506dc5b65672"},
+ {file = "time_machine-2.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49df5eea2160068e5b2bf28c22fc4c5aea00862ad88ddc3b62fc0f0683e97538"},
+ {file = "time_machine-2.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8830510adbf0a231184da277db9de1d55ef93ed228a575d217aaee295505abf1"},
+ {file = "time_machine-2.9.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b16a2129f9146faa080bfd1b53447761f7386ec5c72890c827a65f33ab200336"},
+ {file = "time_machine-2.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a2cf80e5deaaa68c6cefb25303a4c870490b4e7591ed8e2435a65728920bc097"},
+ {file = "time_machine-2.9.0-cp311-cp311-win32.whl", hash = "sha256:fe013942ab7f3241fcbe66ee43222d47f499d1e0cb69e913791c52e638ddd7f0"},
+ {file = "time_machine-2.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:1d0ab46ce8a60baf9d86525694bf698fed9efefd22b8cbe1ca3e74abbb3239e1"},
+ {file = "time_machine-2.9.0-cp311-cp311-win_arm64.whl", hash = "sha256:4f3755d9342ca1f1019418db52072272dfd75eb818fa4726fa8aabe208b38c26"},
+ {file = "time_machine-2.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9ee553f7732fa51e019e3329a6984593184c4e0410af1e73d91ce38a5d4b34ab"},
+ {file = "time_machine-2.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:359c806e5b9a7a3c73dbb808d19dca297f5504a5eefdc5d031db8d918f43e364"},
+ {file = "time_machine-2.9.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e2a90b8300812d8d774f2d2fc216fec3c7d94132ac589e062489c395061f16c"},
+ {file = "time_machine-2.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36dde844d28549929fab171d683c28a8db1c206547bcf6b7aca77319847d2046"},
+ {file = "time_machine-2.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:728263611d7940fda34d21573bd2b3f1491bdb52dbf75c5fe6c226dfe4655201"},
+ {file = "time_machine-2.9.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8bcc86b5a07ea9745f26dfad958dde0a4f56748c2ae0c9a96200a334d1b55055"},
+ {file = "time_machine-2.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b9c36240876622b7f2f9e11bf72f100857c0a1e1a59af2da3d5067efea62c37"},
+ {file = "time_machine-2.9.0-cp37-cp37m-win32.whl", hash = "sha256:eaf334477bc0a9283d5150a56be8670a07295ef676e5b5a7f086952929d1a56b"},
+ {file = "time_machine-2.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8e797e5a2a99d1b237183e52251abfc1ad85c376278b39d1aca76a451a97861a"},
+ {file = "time_machine-2.9.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:69898aed9b2315a90f5855343d9aa34d05fa06032e2e3bb14f2528941ec89dc1"},
+ {file = "time_machine-2.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c01dbc3671d0649023daf623e952f9f0b4d904d57ab546d6d35a4aeb14915e8d"},
+ {file = "time_machine-2.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f080f6f7ca8cfca43bc5639288aebd0a273b4b5bd0acff609c2318728b13a18"},
+ {file = "time_machine-2.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8670cb5cfda99f483d60de6ce56ceb0ec5d359193e79e4688e1c3c9db3937383"},
+ {file = "time_machine-2.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f97ed8bc5b517844a71030f74e9561de92f4902c306e6ccc8331a5b0c8dd0e00"},
+ {file = "time_machine-2.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bdbe785e046d124f73cca603ee37d5fae0b15dc4c13702488ad19de56aae08ba"},
+ {file = "time_machine-2.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fcdef7687aed5c4331c9808f4a414a41987441c3e7a2ba554e4dccfa4218e788"},
+ {file = "time_machine-2.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f6e79643368828d4651146a486be5a662846ac223ab5e2c73ddd519acfcc243c"},
+ {file = "time_machine-2.9.0-cp38-cp38-win32.whl", hash = "sha256:bb15b2b79b00d3f6cf7d62096f5e782fa740ecedfe0540c09f1d1e4d3d7b81ba"},
+ {file = "time_machine-2.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:3ff5148e2e73392db8418a1fe2f0b06f4a0e76772933502fb61e4c3000b5324e"},
+ {file = "time_machine-2.9.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8367fd03f2d7349c7fc20f14de186974eaca2502c64b948212de663742c8fd11"},
+ {file = "time_machine-2.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4b55654aaeaba380fcd6c004b8ada2978fdd4ece1e61e6b9717c6d4cc7fbbcd9"},
+ {file = "time_machine-2.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae4e3f02ab5dabb35adca606237c7e1a515c86d69c0b7092bbe0e1cfe5cffc61"},
+ {file = "time_machine-2.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:010a58a8de1120308befae19e6c9de2ef5ca5206635cea33cb264998725cc027"},
+ {file = "time_machine-2.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b32addbf56639a9a8261fb62f8ea83473447671c83ca2c017ab1eabf4841157f"},
+ {file = "time_machine-2.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:372a97da01db89533d2f4ce50bbd908e5c56df7b8cfd6a005b177d0b14dc2938"},
+ {file = "time_machine-2.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b8faff03231ee55d5a216ce3e9171c5205459f866f54d4b5ee8aa1d860e4ce11"},
+ {file = "time_machine-2.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:748d701228e646c224f2adfa6a11b986cd4aa90f1b8c13ef4534a3919c796bc0"},
+ {file = "time_machine-2.9.0-cp39-cp39-win32.whl", hash = "sha256:d79d374e32488c76cdb06fbdd4464083aeaa715ddca3e864bac7c7760eb03729"},
+ {file = "time_machine-2.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:cc6bf01211b5ea40f633d5502c5aa495b415ebaff66e041820997dae70a508e1"},
+ {file = "time_machine-2.9.0-cp39-cp39-win_arm64.whl", hash = "sha256:3ce445775fcf7cb4040cfdba4b7c4888e7fd98bbcccfe1dc3fa8a798ed1f1d24"},
+]
+
+[package.dependencies]
+python-dateutil = "*"
+
+[[package]]
+name = "tomli"
+version = "2.0.1"
+description = "A lil' TOML parser"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
+ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+]
+
+[[package]]
+name = "typed-ast"
+version = "1.5.4"
+description = "a fork of Python 2 and 3 ast modules with type comment support"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"},
+ {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"},
+ {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"},
+ {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"},
+ {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"},
+ {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"},
+ {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"},
+ {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"},
+ {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"},
+ {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"},
+ {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"},
+ {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"},
+ {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"},
+ {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"},
+ {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"},
+ {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"},
+ {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"},
+ {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"},
+ {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"},
+ {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"},
+ {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"},
+ {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"},
+ {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"},
+ {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"},
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.4.0"
+description = "Backported and Experimental Type Hints for Python 3.7+"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
+ {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
+]
+
+[[package]]
+name = "zipp"
+version = "3.10.0"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "zipp-3.10.0-py3-none-any.whl", hash = "sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1"},
+ {file = "zipp-3.10.0.tar.gz", hash = "sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
+testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
+
+[metadata]
+lock-version = "2.0"
+python-versions = "^3.7"
+content-hash = "cab46dc3be2a1bc422f10fb0046e47c31006fbcf9a0830168b236c5ab92a3c0d"
diff --git a/pyproject.toml b/pyproject.toml
index f42f1bf9..7894508d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,5 +1,5 @@
[tool.poetry]
-name = "finch.py"
+name = "finch-api"
version = "0.0.1"
description = "Client library for the Finch API"
readme = "README.md"
@@ -18,15 +18,17 @@ typing-extensions = ">= 4.1.1"
anyio = ">= 3.5.0"
distro = ">= 1.7.0"
-[tool.poetry.dev-dependencies]
-mypy = "^0.982"
-black = "^22.1.0"
-respx = "^0.19.2"
-pytest = "^7.1.1"
-pytest-asyncio = "^0.18.3"
-pyright = "^1.1.277"
-isort = "^5.10.1"
-autoflake = "^1.4"
+
+[tool.poetry.group.dev.dependencies]
+pyright = "1.1.297"
+mypy = "1.1.1"
+black = "22.10.0"
+respx = "0.19.2"
+pytest = "7.1.1"
+pytest-asyncio = "0.18.3"
+ruff = "0.0.239"
+isort = "5.10.1"
+time-machine = "^2.9.0"
[build-system]
requires = ["poetry-core>=1.0.0"]
@@ -43,6 +45,9 @@ xfail_strict = true
log_cli = true
log_level = "INFO"
asyncio_mode = "auto"
+filterwarnings = [
+ "error"
+]
[tool.pyright]
# this enables practically every flag given by pyright.
@@ -62,3 +67,29 @@ reportPrivateUsage = false
profile = "black"
length_sort = true
extra_standard_library = ["typing_extensions"]
+
+[tool.ruff]
+line-length = 120
+format = "grouped"
+target-version = "py37"
+select = [
+ # remove unused imports
+ "F401",
+ # bare except statements
+ "E722",
+ # print statements
+ "T201",
+ "T203",
+]
+unfixable = [
+ # disable auto fix for print statements
+ "T201",
+ "T203",
+]
+ignore-init-module-imports = true
+
+
+[tool.ruff.per-file-ignores]
+"bin/**.py" = ["T201", "T203"]
+"tests/**.py" = ["T201", "T203"]
+"examples/**.py" = ["T201", "T203"]
diff --git a/release-please-config.json b/release-please-config.json
new file mode 100644
index 00000000..eb41d1e6
--- /dev/null
+++ b/release-please-config.json
@@ -0,0 +1,14 @@
+{
+ "include-v-in-tag": true,
+ "bump-minor-pre-major": true,
+ "bump-patch-for-minor-pre-major": true,
+ "pull-request-header": "Automated Release PR",
+ "packages": {
+ ".": {}
+ },
+ "$schema": "https://raw.githubusercontent.com/stainless-api/release-please/main/schemas/config.json",
+ "release-type": "python",
+ "extra-files": [
+ "src/finch/_version.py"
+ ]
+}
\ No newline at end of file
diff --git a/src/finch/__init__.py b/src/finch/__init__.py
index cf5cf2d9..1d5b4bf5 100644
--- a/src/finch/__init__.py
+++ b/src/finch/__init__.py
@@ -1 +1,77 @@
-# Placeholder package for the Finch SDK
+# File generated from our OpenAPI spec by Stainless.
+
+from . import types
+from ._types import NoneType, Transport, ProxiesTypes
+from ._utils import file_from_path
+from ._client import (
+ Finch,
+ Client,
+ Stream,
+ Timeout,
+ Transport,
+ AsyncFinch,
+ AsyncClient,
+ AsyncStream,
+ ProxiesTypes,
+ RequestOptions,
+)
+from ._version import __title__, __version__
+from ._exceptions import (
+ APIError,
+ ConflictError,
+ NotFoundError,
+ APIStatusError,
+ RateLimitError,
+ APITimeoutError,
+ BadRequestError,
+ APIConnectionError,
+ AuthenticationError,
+ InternalServerError,
+ PermissionDeniedError,
+ UnprocessableEntityError,
+ APIResponseValidationError,
+)
+
+__all__ = [
+ "types",
+ "__version__",
+ "__title__",
+ "NoneType",
+ "Transport",
+ "ProxiesTypes",
+ "APIError",
+ "APIConnectionError",
+ "APIResponseValidationError",
+ "APIStatusError",
+ "APITimeoutError",
+ "AuthenticationError",
+ "BadRequestError",
+ "ConflictError",
+ "InternalServerError",
+ "NotFoundError",
+ "PermissionDeniedError",
+ "RateLimitError",
+ "UnprocessableEntityError",
+ "Timeout",
+ "RequestOptions",
+ "Client",
+ "AsyncClient",
+ "Stream",
+ "AsyncStream",
+ "Finch",
+ "AsyncFinch",
+ "file_from_path",
+]
+
+# Update the __module__ attribute for exported symbols so that
+# error messages point to this module instead of the module
+# it was originally defined in, e.g.
+# finch._base_exceptions.NotFoundError -> finch.NotFoundError
+__locals = locals()
+for __name in __all__:
+ if not __name.startswith("__"):
+ try:
+ setattr(__locals[__name], "__module__", "finch")
+ except (TypeError, AttributeError):
+ # Some of our exported symbols are builtins which we can't set attributes for.
+ pass
diff --git a/src/finch/_base_client.py b/src/finch/_base_client.py
new file mode 100644
index 00000000..43e23178
--- /dev/null
+++ b/src/finch/_base_client.py
@@ -0,0 +1,1431 @@
+from __future__ import annotations
+
+import json
+import time
+import uuid
+import inspect
+import platform
+from random import random
+from typing import (
+ Any,
+ Dict,
+ List,
+ Type,
+ Union,
+ Generic,
+ Mapping,
+ TypeVar,
+ Iterable,
+ Iterator,
+ Optional,
+ Generator,
+ AsyncIterator,
+ cast,
+ overload,
+)
+from functools import lru_cache
+from typing_extensions import Literal, get_origin
+
+import anyio
+import httpx
+import distro
+import pydantic
+from httpx import URL, Limits
+from pydantic import PrivateAttr
+
+from . import _base_exceptions as exceptions
+from ._qs import Querystring
+from ._types import (
+ NOT_GIVEN,
+ Body,
+ Omit,
+ Query,
+ ModelT,
+ Headers,
+ Timeout,
+ NoneType,
+ NotGiven,
+ Transport,
+ AnyMapping,
+ ProxiesTypes,
+ RequestFiles,
+ RequestOptions,
+ UnknownResponse,
+ ModelBuilderProtocol,
+)
+from ._utils import is_dict, is_mapping
+from ._models import (
+ BaseModel,
+ GenericModel,
+ FinalRequestOptions,
+ validate_type,
+ construct_type,
+)
+from ._base_exceptions import (
+ APIStatusError,
+ APITimeoutError,
+ APIConnectionError,
+ APIResponseValidationError,
+)
+
+# TODO: make base page type vars covariant
+SyncPageT = TypeVar("SyncPageT", bound="BaseSyncPage[Any]")
+AsyncPageT = TypeVar("AsyncPageT", bound="BaseAsyncPage[Any]")
+
+
+ResponseT = TypeVar(
+ "ResponseT",
+ bound=Union[
+ str,
+ None,
+ BaseModel,
+ List[Any],
+ Dict[str, Any],
+ httpx.Response,
+ UnknownResponse,
+ ModelBuilderProtocol,
+ ],
+)
+
+_T = TypeVar("_T")
+_T_co = TypeVar("_T_co", covariant=True)
+
+DEFAULT_TIMEOUT = Timeout(timeout=60.0, connect=5.0)
+DEFAULT_MAX_RETRIES = 2
+DEFAULT_LIMITS = Limits(max_connections=100, max_keepalive_connections=20)
+
+
+class StopStreaming(Exception):
+ """Raised internally when processing of a streamed response should be stopped."""
+
+
+class Stream(Generic[ResponseT]):
+ response: httpx.Response
+
+ def __init__(
+ self,
+ *,
+ cast_to: type[ResponseT],
+ response: httpx.Response,
+ client: SyncAPIClient,
+ ) -> None:
+ self.response = response
+ self._cast_to = cast_to
+ self._client = client
+ self._iterator = self.__iter()
+
+ def __next__(self) -> ResponseT:
+ return self._iterator.__next__()
+
+ def __iter__(self) -> Iterator[ResponseT]:
+ for item in self._iterator:
+ yield item
+
+ def __iter(self) -> Iterator[ResponseT]:
+ cast_to = self._cast_to
+ response = self.response
+ process_line = self._client._process_stream_line
+ process_data = self._client._process_response_data
+
+ awaiting_ping_data = False
+ for raw_line in response.iter_lines():
+ if not raw_line or raw_line == "\n":
+ continue
+
+ if raw_line.startswith("event: ping"):
+ awaiting_ping_data = True
+ continue
+ if awaiting_ping_data:
+ awaiting_ping_data = False
+ continue
+
+ try:
+ line = process_line(raw_line)
+ except StopStreaming:
+ # we are done!
+ break
+
+ yield process_data(data=json.loads(line), cast_to=cast_to, response=response)
+
+
+class AsyncStream(Generic[ResponseT]):
+ response: httpx.Response
+
+ def __init__(
+ self,
+ *,
+ cast_to: type[ResponseT],
+ response: httpx.Response,
+ client: AsyncAPIClient,
+ ) -> None:
+ self.response = response
+ self._cast_to = cast_to
+ self._client = client
+ self._iterator = self.__iter()
+
+ async def __anext__(self) -> ResponseT:
+ return await self._iterator.__anext__()
+
+ async def __aiter__(self) -> AsyncIterator[ResponseT]:
+ async for item in self._iterator:
+ yield item
+
+ async def __iter(self) -> AsyncIterator[ResponseT]:
+ cast_to = self._cast_to
+ response = self.response
+ process_line = self._client._process_stream_line
+ process_data = self._client._process_response_data
+
+ awaiting_ping_data = False
+ async for raw_line in response.aiter_lines():
+ if not raw_line or raw_line == "\n":
+ continue
+
+ if raw_line.startswith("event: ping"):
+ awaiting_ping_data = True
+ continue
+ if awaiting_ping_data:
+ awaiting_ping_data = False
+ continue
+
+ try:
+ line = process_line(raw_line)
+ except StopStreaming:
+ # we are done!
+ break
+
+ yield process_data(data=json.loads(line), cast_to=cast_to, response=response)
+
+
+class PageInfo:
+ """Stores the necesary information to build the request to retrieve the next page.
+
+ Either `url` or `params` must be set.
+ """
+
+ url: URL | NotGiven
+ params: Query | NotGiven
+
+ @overload
+ def __init__(
+ self,
+ *,
+ url: URL,
+ ) -> None:
+ ...
+
+ @overload
+ def __init__(
+ self,
+ *,
+ params: Query,
+ ) -> None:
+ ...
+
+ def __init__(
+ self,
+ *,
+ url: URL | NotGiven = NOT_GIVEN,
+ params: Query | NotGiven = NOT_GIVEN,
+ ) -> None:
+ self.url = url
+ self.params = params
+
+
+class BasePage(GenericModel, Generic[ModelT]):
+ _options: FinalRequestOptions = PrivateAttr()
+ _model: Type[ModelT] = PrivateAttr()
+
+ def has_next_page(self) -> bool:
+ items = self._get_page_items()
+ if not items:
+ return False
+ return self.next_page_info() is not None
+
+ def next_page_info(self) -> Optional[PageInfo]:
+ ...
+
+ def _get_page_items(self) -> Iterable[ModelT]: # type: ignore[empty-body]
+ ...
+
+ def _params_from_url(self, url: URL) -> httpx.QueryParams:
+ # TODO: do we have to preprocess params here?
+ return httpx.QueryParams(cast(Any, self._options.params)).merge(url.params)
+
+ def _info_to_options(self, info: PageInfo) -> FinalRequestOptions:
+ options = self._options.copy()
+
+ if not isinstance(info.params, NotGiven):
+ options.params = {**options.params, **info.params}
+ return options
+
+ if not isinstance(info.url, NotGiven):
+ params = self._params_from_url(info.url)
+ url = info.url.copy_with(params=params)
+ options.params = dict(url.params)
+ options.url = str(url)
+ return options
+
+ raise ValueError("Unexpected PageInfo state")
+
+
+class BaseSyncPage(BasePage[ModelT], Generic[ModelT]):
+ _client: SyncAPIClient = pydantic.PrivateAttr()
+
+ def _set_private_attributes(
+ self,
+ client: SyncAPIClient,
+ model: Type[ModelT],
+ options: FinalRequestOptions,
+ ) -> None:
+ self._model = model
+ self._client = client
+ self._options = options
+
+ # Pydantic uses a custom `__iter__` method to support casting BaseModels
+ # to dictionaries. e.g. dict(model).
+ # As we want to support `for item in page`, this is inherently incompatible
+ # with the default pydantic behaviour. It is not possible to support both
+ # use cases at once. Fortunately, this is not a big deal as all other pydantic
+ # methods should continue to work as expected as there is an alternative method
+ # to cast a model to a dictionary, model.dict(), which is used internally
+ # by pydantic.
+ def __iter__(self) -> Iterator[ModelT]: # type: ignore
+ for page in self.iter_pages():
+ for item in page._get_page_items():
+ yield item
+
+ def iter_pages(self: SyncPageT) -> Iterator[SyncPageT]:
+ page = self
+ while True:
+ yield page
+ if page.has_next_page():
+ page = page.get_next_page()
+ else:
+ return
+
+ def get_next_page(self: SyncPageT) -> SyncPageT:
+ info = self.next_page_info()
+ if not info:
+ raise RuntimeError(
+ "No next page expected; please check `.has_next_page()` before calling `.get_next_page()`."
+ )
+
+ options = self._info_to_options(info)
+ return self._client._request_api_list(self._model, page=self.__class__, options=options)
+
+
+class AsyncPaginator(Generic[ModelT, AsyncPageT]):
+ def __init__(
+ self,
+ client: AsyncAPIClient,
+ options: FinalRequestOptions,
+ page_cls: Type[AsyncPageT],
+ model: Type[ModelT],
+ ) -> None:
+ self._model = model
+ self._client = client
+ self._options = options
+ self._page_cls = page_cls
+
+ def __await__(self) -> Generator[Any, None, AsyncPageT]:
+ return self._get_page().__await__()
+
+ async def _get_page(self) -> AsyncPageT:
+ page = await self._client.request(self._page_cls, self._options)
+ page._set_private_attributes( # pyright: ignore[reportPrivateUsage]
+ model=self._model,
+ options=self._options,
+ client=self._client,
+ )
+ return page
+
+ async def __aiter__(self) -> AsyncIterator[ModelT]:
+ # https://github.com/microsoft/pyright/issues/3464
+ page = cast(
+ AsyncPageT,
+ await self, # type: ignore
+ )
+ async for item in page:
+ yield item
+
+
+class BaseAsyncPage(BasePage[ModelT], Generic[ModelT]):
+ _client: AsyncAPIClient = pydantic.PrivateAttr()
+
+ def _set_private_attributes(
+ self,
+ model: Type[ModelT],
+ client: AsyncAPIClient,
+ options: FinalRequestOptions,
+ ) -> None:
+ self._model = model
+ self._client = client
+ self._options = options
+
+ async def __aiter__(self) -> AsyncIterator[ModelT]:
+ async for page in self.iter_pages():
+ for item in page._get_page_items():
+ yield item
+
+ async def iter_pages(self: AsyncPageT) -> AsyncIterator[AsyncPageT]:
+ page = self
+ while True:
+ yield page
+ if page.has_next_page():
+ page = await page.get_next_page()
+ else:
+ return
+
+ async def get_next_page(self: AsyncPageT) -> AsyncPageT:
+ info = self.next_page_info()
+ if not info:
+ raise RuntimeError(
+ "No next page expected; please check `.has_next_page()` before calling `.get_next_page()`."
+ )
+
+ options = self._info_to_options(info)
+ return await self._client._request_api_list(self._model, page=self.__class__, options=options)
+
+
+class BaseClient:
+ _client: httpx.Client | httpx.AsyncClient
+ _version: str
+ max_retries: int
+ timeout: Union[float, Timeout, None]
+ _limits: httpx.Limits
+ _strict_response_validation: bool
+ _idempotency_header: str | None
+
+ def __init__(
+ self,
+ *,
+ version: str,
+ _strict_response_validation: bool,
+ max_retries: int = DEFAULT_MAX_RETRIES,
+ timeout: float | Timeout | None = DEFAULT_TIMEOUT,
+ limits: httpx.Limits,
+ custom_headers: Mapping[str, str] | None = None,
+ custom_query: Mapping[str, object] | None = None,
+ ) -> None:
+ self._version = version
+ self.max_retries = max_retries
+ self.timeout = timeout
+ self._limits = limits
+ self._custom_headers = custom_headers or {}
+ self._custom_query = custom_query or {}
+ self._strict_response_validation = _strict_response_validation
+ self._idempotency_header = None
+
+ def _make_status_error_from_response(
+ self,
+ request: httpx.Request,
+ response: httpx.Response,
+ ) -> APIStatusError:
+ err_text = response.text.strip()
+ body = err_text
+
+ try:
+ body = json.loads(err_text)
+ err_msg = f"Error code: {response.status_code} - {body}"
+ except Exception:
+ err_msg = err_text or f"Error code: {response.status_code}"
+
+ return self._make_status_error(err_msg, body=body, request=request, response=response)
+
+ def _make_status_error(
+ self,
+ err_msg: str,
+ *,
+ body: object,
+ request: httpx.Request,
+ response: httpx.Response,
+ ) -> APIStatusError:
+ if response.status_code == 400:
+ return exceptions.BadRequestError(err_msg, request=request, response=response, body=body)
+ if response.status_code == 401:
+ return exceptions.AuthenticationError(err_msg, request=request, response=response, body=body)
+ if response.status_code == 403:
+ return exceptions.PermissionDeniedError(err_msg, request=request, response=response, body=body)
+ if response.status_code == 404:
+ return exceptions.NotFoundError(err_msg, request=request, response=response, body=body)
+ if response.status_code == 409:
+ return exceptions.ConflictError(err_msg, request=request, response=response, body=body)
+ if response.status_code == 422:
+ return exceptions.UnprocessableEntityError(err_msg, request=request, response=response, body=body)
+ if response.status_code == 429:
+ return exceptions.RateLimitError(err_msg, request=request, response=response, body=body)
+ if response.status_code >= 500:
+ return exceptions.InternalServerError(err_msg, request=request, response=response, body=body)
+ return APIStatusError(err_msg, request=request, response=response, body=body)
+
+ def _remaining_retries(
+ self,
+ remaining_retries: Optional[int],
+ options: FinalRequestOptions,
+ ) -> int:
+ return remaining_retries if remaining_retries is not None else options.get_max_retries(self.max_retries)
+
+ def _build_headers(self, options: FinalRequestOptions) -> httpx.Headers:
+ custom_headers = options.headers or {}
+ headers_dict = _merge_mappings(self.default_headers, custom_headers)
+ self._validate_headers(headers_dict, custom_headers)
+
+ headers = httpx.Headers(headers_dict)
+
+ idempotency_header = self._idempotency_header
+ if idempotency_header and options.method.lower() != "get" and idempotency_header not in headers:
+ if not options.idempotency_key:
+ options.idempotency_key = self._idempotency_key()
+
+ headers[idempotency_header] = options.idempotency_key
+
+ return headers
+
+ def _build_request(
+ self,
+ options: FinalRequestOptions,
+ ) -> httpx.Request:
+ headers = self._build_headers(options)
+
+ kwargs: dict[str, Any] = {}
+
+ json_data = options.json_data
+ if options.extra_json is not None:
+ if json_data is None:
+ json_data = cast(Body, options.extra_json)
+ elif is_mapping(json_data):
+ json_data = _merge_mappings(json_data, options.extra_json)
+ else:
+ raise RuntimeError(f"Unexpected JSON data type, {type(json_data)}, cannot merge with `extra_body`")
+
+ params = _merge_mappings(self._custom_query, options.params)
+
+ # If the given Content-Type header is multipart/form-data then it
+ # has to be removed so that httpx can generate the header with
+ # additional information for us as it has to be in this form
+ # for the server to be able to correctly parse the request:
+ # multipart/form-data; boundary=---abc--
+ if headers.get("Content-Type") == "multipart/form-data":
+ headers.pop("Content-Type")
+
+ # As we are now sending multipart/form-data instead of application/json
+ # we need to tell httpx to use it, https://www.python-httpx.org/advanced/#multipart-file-encoding
+ if json_data:
+ if not is_dict(json_data):
+ raise TypeError(
+ f"Expected query input to be a dictionary for multipart requests but got {type(json_data)} instead."
+ )
+ kwargs["data"] = self._serialize_multipartform(json_data)
+
+ # TODO: report this error to httpx
+ return self._client.build_request( # pyright: ignore[reportUnknownMemberType]
+ headers=headers,
+ timeout=self.timeout if isinstance(options.timeout, NotGiven) else options.timeout,
+ method=options.method,
+ url=options.url,
+ # the `Query` type that we use is incompatible with qs'
+ # `Params` type as it needs to be typed as `Mapping[str, object]`
+ # so that passing a `TypedDict` doesn't cause an error.
+ # https://github.com/microsoft/pyright/issues/3526#event-6715453066
+ params=self.qs.stringify(cast(Mapping[str, Any], params)) if params else None,
+ json=json_data,
+ files=options.files,
+ **kwargs,
+ )
+
+ def _serialize_multipartform(self, data: Mapping[object, object]) -> dict[str, object]:
+ items = self.qs.stringify_items(
+ # TODO: type ignore is required as stringify_items is well typed but we can't be
+ # well typed without heavy validation.
+ data, # type: ignore
+ array_format="brackets",
+ )
+ serialized: dict[str, object] = {}
+ for key, value in items:
+ if key in serialized:
+ raise ValueError(f"Duplicate key encountered: {key}; This behaviour is not supported")
+ serialized[key] = value
+ return serialized
+
+ def _process_response(
+ self,
+ *,
+ cast_to: Type[ResponseT],
+ options: FinalRequestOptions,
+ response: httpx.Response,
+ ) -> ResponseT:
+ if cast_to is NoneType:
+ return cast(ResponseT, None)
+
+ if cast_to == str:
+ return cast(ResponseT, response.text)
+
+ origin = get_origin(cast_to) or cast_to
+
+ if inspect.isclass(origin) and issubclass(origin, httpx.Response):
+ # Because of the invariance of our ResponseT TypeVar, users can subclass httpx.Response
+ # and pass that class to our request functions. We cannot change the variance to be either
+ # covariant or contravariant as that makes our usage of ResponseT illegal. We could construct
+ # the response class ourselves but that is something that should be supported directly in httpx
+ # as it would be easy to incorrectly construct the Response object due to the multitude of arguments.
+ if cast_to != httpx.Response:
+ raise ValueError(f"Subclasses of httpx.Response cannot be passed to `cast_to`")
+ return cast(ResponseT, response)
+
+ # The check here is necessary as we are subverting the the type system
+ # with casts as the relationship between TypeVars and Types are very strict
+ # which means we must return *exactly* what was input or transform it in a
+ # way that retains the TypeVar state. As we cannot do that in this function
+ # then we have to resort to using `cast`. At the time of writing, we know this
+ # to be safe as we have handled all the types that could be bound to the
+ # `ResponseT` TypeVar, however if that TypeVar is ever updated in the future, then
+ # this function would become unsafe but a type checker would not report an error.
+ if (
+ cast_to is not UnknownResponse
+ and not origin is list
+ and not origin is dict
+ and not origin is Union
+ and not issubclass(origin, BaseModel)
+ ):
+ raise RuntimeError(
+ f"Invalid state, expected {cast_to} to be a subclass type of {BaseModel}, {dict}, {list} or {Union}."
+ )
+
+ # split is required to handle cases where additional information is included
+ # in the response, e.g. application/json; charset=utf-8
+ content_type, *_ = response.headers.get("content-type").split(";")
+ if content_type != "application/json":
+ raise ValueError(
+ f"Expected Content-Type response header to be `application/json` but received {content_type} instead."
+ )
+
+ data = response.json()
+ return self._process_response_data(data=data, cast_to=cast_to, response=response)
+
+ def _process_response_data(
+ self,
+ *,
+ data: object,
+ cast_to: type[ResponseT],
+ response: httpx.Response,
+ ) -> ResponseT:
+ if data is None:
+ return cast(ResponseT, None)
+
+ if cast_to is UnknownResponse:
+ return cast(ResponseT, data)
+
+ if inspect.isclass(cast_to) and issubclass(cast_to, ModelBuilderProtocol):
+ return cast(ResponseT, cast_to.build(response=response, data=data))
+
+ if self._strict_response_validation:
+ return cast(ResponseT, validate_type(type_=cast_to, value=data))
+
+ return cast(ResponseT, construct_type(type_=cast_to, value=data))
+
+ def _process_stream_line(self, contents: str) -> str:
+ """Pre-process an indiviudal line from a streaming response"""
+ if contents == "data: [DONE]\n":
+ raise StopStreaming()
+
+ if contents.startswith("data: "):
+ return contents[6:]
+
+ return contents
+
+ @property
+ def qs(self) -> Querystring:
+ return Querystring()
+
+ @property
+ def custom_auth(self) -> httpx.Auth | None:
+ return None
+
+ @property
+ def auth_headers(self) -> dict[str, str]:
+ return {}
+
+ @property
+ def default_headers(self) -> dict[str, str | Omit]:
+ return {
+ "Content-Type": "application/json",
+ "User-Agent": self.user_agent,
+ **self.platform_headers(),
+ **self.auth_headers,
+ **self._custom_headers,
+ }
+
+ def _validate_headers(self, headers: Headers, custom_headers: Headers) -> None:
+ """Validate the given default headers and custom headers.
+
+ Does nothing by default.
+ """
+ return
+
+ @property
+ def user_agent(self) -> str:
+ return f"{self.__class__.__name__}/Python {self._version}"
+
+ @property
+ def base_url(self) -> URL:
+ return self._client.base_url
+
+ @lru_cache(maxsize=None)
+ def platform_headers(self) -> Dict[str, str]:
+ return {
+ "X-Stainless-Lang": "python",
+ "X-Stainless-Package-Version": self._version,
+ "X-Stainless-OS": str(get_platform()),
+ "X-Stainless-Arch": str(get_architecture()),
+ "X-Stainless-Runtime": platform.python_implementation(),
+ "X-Stainless-Runtime-Version": platform.python_version(),
+ }
+
+ def _calculate_retry_timeout(
+ self,
+ remaining_retries: int,
+ options: FinalRequestOptions,
+ response_headers: Optional[httpx.Headers] = None,
+ ) -> float:
+ max_retries = options.get_max_retries(self.max_retries)
+ try:
+ # About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After
+ #
+ # TODO: we may want to handle the case where the header is using the http-date syntax: "Retry-After:
+ # ". See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After#syntax for
+ # details.
+ retry_after = -1 if response_headers is None else int(response_headers.get("retry-after"))
+ except Exception:
+ retry_after = -1
+
+ # If the API asks us to wait a certain amount of time (and it's a reasonable amount), just do what it says.
+ if 0 < retry_after <= 60:
+ return retry_after
+
+ initial_retry_delay = 0.5
+ max_retry_delay = 2.0
+ nb_retries = max_retries - remaining_retries
+
+ # Apply exponential backoff, but not more than the max.
+ sleep_seconds = min(initial_retry_delay * pow(nb_retries - 1, 2), max_retry_delay)
+
+ # Apply some jitter, plus-or-minus half a second.
+ jitter = random() - 0.5
+ timeout = sleep_seconds + jitter
+ return timeout if timeout >= 0 else 0
+
+ def _should_retry(self, response: httpx.Response) -> bool:
+ # Note: this is not a standard header
+ should_retry_header = response.headers.get("x-should-retry")
+
+ # If the server explicitly says whether or not to retry, obey.
+ if should_retry_header == "true":
+ return True
+ if should_retry_header == "false":
+ return False
+
+ # Retry on lock timeouts.
+ if response.status_code == 409:
+ return True
+
+ # Retry on rate limits.
+ if response.status_code == 429:
+ return True
+
+ # Retry internal errors.
+ if response.status_code >= 500:
+ return True
+
+ return False
+
+ def _idempotency_key(self) -> str:
+ return f"stainless-python-retry-{uuid.uuid4()}"
+
+
+class SyncAPIClient(BaseClient):
+ _client: httpx.Client
+
+ def __init__(
+ self,
+ *,
+ version: str,
+ base_url: str,
+ max_retries: int = DEFAULT_MAX_RETRIES,
+ timeout: float | Timeout | None = DEFAULT_TIMEOUT,
+ transport: Transport | None = None,
+ proxies: ProxiesTypes | None = None,
+ limits: Limits | None = DEFAULT_LIMITS,
+ custom_headers: Mapping[str, str] | None = None,
+ custom_query: Mapping[str, object] | None = None,
+ _strict_response_validation: bool,
+ ) -> None:
+ limits = limits or DEFAULT_LIMITS
+ super().__init__(
+ version=version,
+ limits=limits,
+ timeout=timeout,
+ max_retries=max_retries,
+ custom_query=custom_query,
+ custom_headers=custom_headers,
+ _strict_response_validation=_strict_response_validation,
+ )
+ self._client = httpx.Client(
+ base_url=base_url,
+ timeout=timeout,
+ proxies=proxies, # type: ignore
+ transport=transport, # type: ignore
+ limits=limits,
+ headers={"Accept": "application/json"},
+ )
+
+ @overload
+ def request(
+ self,
+ cast_to: Type[ResponseT],
+ options: FinalRequestOptions,
+ remaining_retries: Optional[int] = None,
+ *,
+ stream: Literal[True],
+ ) -> Stream[ResponseT]:
+ ...
+
+ @overload
+ def request(
+ self,
+ cast_to: Type[ResponseT],
+ options: FinalRequestOptions,
+ remaining_retries: Optional[int] = None,
+ *,
+ stream: Literal[False] = False,
+ ) -> ResponseT:
+ ...
+
+ @overload
+ def request(
+ self,
+ cast_to: Type[ResponseT],
+ options: FinalRequestOptions,
+ remaining_retries: Optional[int] = None,
+ *,
+ stream: bool = False,
+ ) -> ResponseT | Stream[ResponseT]:
+ ...
+
+ def request(
+ self,
+ cast_to: Type[ResponseT],
+ options: FinalRequestOptions,
+ remaining_retries: Optional[int] = None,
+ *,
+ stream: bool = False,
+ ) -> ResponseT | Stream[ResponseT]:
+ return self._request(
+ cast_to=cast_to,
+ options=options,
+ stream=stream,
+ remaining_retries=remaining_retries,
+ )
+
+ def _request(
+ self,
+ *,
+ cast_to: Type[ResponseT],
+ options: FinalRequestOptions,
+ remaining_retries: int | None,
+ stream: bool,
+ ) -> ResponseT | Stream[ResponseT]:
+ retries = self._remaining_retries(remaining_retries, options)
+ request = self._build_request(options)
+
+ try:
+ response = self._client.send(request, auth=self.custom_auth, stream=stream)
+ response.raise_for_status()
+ except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code
+ if retries > 0 and self._should_retry(err.response):
+ return self._retry_request(options, cast_to, retries, err.response.headers, stream=stream)
+
+ # If the response is streamed then we need to explicitly read the response
+ # to completion before attempting to access the response text.
+ err.response.read()
+ raise self._make_status_error_from_response(request, err.response) from None
+ except httpx.TimeoutException as err:
+ if retries > 0:
+ return self._retry_request(options, cast_to, retries, stream=stream)
+ raise APITimeoutError(request=request) from err
+ except Exception as err:
+ if retries > 0:
+ return self._retry_request(options, cast_to, retries, stream=stream)
+ raise APIConnectionError(request=request) from err
+
+ if stream:
+ return Stream(cast_to=cast_to, response=response, client=self)
+
+ try:
+ rsp = self._process_response(cast_to=cast_to, options=options, response=response)
+ except pydantic.ValidationError as err:
+ raise APIResponseValidationError(request=request, response=response) from err
+
+ return rsp
+
+ def _retry_request(
+ self,
+ options: FinalRequestOptions,
+ cast_to: Type[ResponseT],
+ remaining_retries: int,
+ response_headers: Optional[httpx.Headers] = None,
+ *,
+ stream: bool,
+ ) -> ResponseT | Stream[ResponseT]:
+ remaining = remaining_retries - 1
+ timeout = self._calculate_retry_timeout(remaining, options, response_headers)
+
+ # In a synchronous context we are blocking the entire thread. Up to the library user to run the client in a
+ # different thread if necessary.
+ time.sleep(timeout)
+
+ return self._request(
+ options=options,
+ cast_to=cast_to,
+ remaining_retries=remaining,
+ stream=stream,
+ )
+
+ def _request_api_list(
+ self,
+ model: Type[ModelT],
+ page: Type[SyncPageT],
+ options: FinalRequestOptions,
+ ) -> SyncPageT:
+ resp = cast(SyncPageT, self.request(page, options, stream=False))
+ resp._set_private_attributes( # pyright: ignore[reportPrivateUsage]
+ client=self,
+ model=model,
+ options=options,
+ )
+ return resp
+
+ def get(
+ self,
+ path: str,
+ *,
+ cast_to: Type[ResponseT],
+ options: RequestOptions = {},
+ ) -> ResponseT:
+ opts = FinalRequestOptions.construct(method="get", url=path, **options)
+ # cast is required because mypy complains about returning Any even though
+ # it understands the type variables
+ return cast(ResponseT, self.request(cast_to, opts, stream=False))
+
+ @overload
+ def post(
+ self,
+ path: str,
+ *,
+ cast_to: Type[ResponseT],
+ body: Body | None = None,
+ options: RequestOptions = {},
+ files: RequestFiles | None = None,
+ stream: Literal[False] = False,
+ ) -> ResponseT:
+ ...
+
+ @overload
+ def post(
+ self,
+ path: str,
+ *,
+ cast_to: Type[ResponseT],
+ body: Body | None = None,
+ options: RequestOptions = {},
+ files: RequestFiles | None = None,
+ stream: Literal[True],
+ ) -> Stream[ResponseT]:
+ ...
+
+ @overload
+ def post(
+ self,
+ path: str,
+ *,
+ cast_to: Type[ResponseT],
+ body: Body | None = None,
+ options: RequestOptions = {},
+ files: RequestFiles | None = None,
+ stream: bool,
+ ) -> ResponseT | Stream[ResponseT]:
+ ...
+
+ def post(
+ self,
+ path: str,
+ *,
+ cast_to: Type[ResponseT],
+ body: Body | None = None,
+ options: RequestOptions = {},
+ files: RequestFiles | None = None,
+ stream: bool = False,
+ ) -> ResponseT | Stream[ResponseT]:
+ opts = FinalRequestOptions.construct(method="post", url=path, json_data=body, files=files, **options)
+ return cast(ResponseT, self.request(cast_to, opts, stream=stream))
+
+ def patch(
+ self,
+ path: str,
+ *,
+ cast_to: Type[ResponseT],
+ body: Body | None = None,
+ options: RequestOptions = {},
+ ) -> ResponseT:
+ opts = FinalRequestOptions.construct(method="patch", url=path, json_data=body, **options)
+ return cast(ResponseT, self.request(cast_to, opts))
+
+ def put(
+ self,
+ path: str,
+ *,
+ cast_to: Type[ResponseT],
+ body: Body | None = None,
+ files: RequestFiles | None = None,
+ options: RequestOptions = {},
+ ) -> ResponseT:
+ opts = FinalRequestOptions.construct(method="put", url=path, json_data=body, files=files, **options)
+ return cast(ResponseT, self.request(cast_to, opts))
+
+ def delete(
+ self,
+ path: str,
+ *,
+ cast_to: Type[ResponseT],
+ body: Body | None = None,
+ options: RequestOptions = {},
+ ) -> ResponseT:
+ opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options)
+ return cast(ResponseT, self.request(cast_to, opts))
+
+ def get_api_list(
+ self,
+ path: str,
+ *,
+ model: Type[ModelT],
+ page: Type[SyncPageT],
+ body: Body | None = None,
+ options: RequestOptions = {},
+ method: str = "get",
+ ) -> SyncPageT:
+ opts = FinalRequestOptions.construct(method=method, url=path, json_data=body, **options)
+ return self._request_api_list(model, page, opts)
+
+
+class AsyncAPIClient(BaseClient):
+ _client: httpx.AsyncClient
+
+ def __init__(
+ self,
+ *,
+ version: str,
+ base_url: str,
+ _strict_response_validation: bool,
+ max_retries: int = DEFAULT_MAX_RETRIES,
+ timeout: float | Timeout | None = DEFAULT_TIMEOUT,
+ transport: Transport | None = None,
+ proxies: ProxiesTypes | None = None,
+ limits: Limits | None = DEFAULT_LIMITS,
+ custom_headers: Mapping[str, str] | None = None,
+ custom_query: Mapping[str, object] | None = None,
+ ) -> None:
+ limits = limits or DEFAULT_LIMITS
+ super().__init__(
+ version=version,
+ limits=limits,
+ timeout=timeout,
+ max_retries=max_retries,
+ custom_query=custom_query,
+ custom_headers=custom_headers,
+ _strict_response_validation=_strict_response_validation,
+ )
+ self._client = httpx.AsyncClient(
+ base_url=base_url,
+ timeout=timeout,
+ proxies=proxies, # type: ignore
+ transport=transport, # type: ignore
+ limits=limits,
+ headers={"Accept": "application/json"},
+ )
+
+ @overload
+ async def request(
+ self,
+ cast_to: Type[ResponseT],
+ options: FinalRequestOptions,
+ *,
+ stream: Literal[False] = False,
+ remaining_retries: Optional[int] = None,
+ ) -> ResponseT:
+ ...
+
+ @overload
+ async def request(
+ self,
+ cast_to: Type[ResponseT],
+ options: FinalRequestOptions,
+ *,
+ stream: Literal[True],
+ remaining_retries: Optional[int] = None,
+ ) -> AsyncStream[ResponseT]:
+ ...
+
+ @overload
+ async def request(
+ self,
+ cast_to: Type[ResponseT],
+ options: FinalRequestOptions,
+ *,
+ stream: bool,
+ remaining_retries: Optional[int] = None,
+ ) -> ResponseT | AsyncStream[ResponseT]:
+ ...
+
+ async def request(
+ self,
+ cast_to: Type[ResponseT],
+ options: FinalRequestOptions,
+ *,
+ stream: bool = False,
+ remaining_retries: Optional[int] = None,
+ ) -> ResponseT | AsyncStream[ResponseT]:
+ return await self._request(
+ cast_to=cast_to,
+ options=options,
+ stream=stream,
+ remaining_retries=remaining_retries,
+ )
+
+ async def _request(
+ self,
+ cast_to: Type[ResponseT],
+ options: FinalRequestOptions,
+ *,
+ stream: bool,
+ remaining_retries: int | None,
+ ) -> ResponseT | AsyncStream[ResponseT]:
+ retries = self._remaining_retries(remaining_retries, options)
+ request = self._build_request(options)
+
+ try:
+ response = await self._client.send(request, auth=self.custom_auth, stream=stream)
+ response.raise_for_status()
+ except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code
+ if retries > 0 and self._should_retry(err.response):
+ return await self._retry_request(options, cast_to, retries, err.response.headers, stream=stream)
+
+ # If the response is streamed then we need to explicitly read the response
+ # to completion before attempting to access the response text.
+ await err.response.aread()
+ raise self._make_status_error_from_response(request, err.response) from None
+ except httpx.ConnectTimeout as err:
+ if retries > 0:
+ return await self._retry_request(options, cast_to, retries, stream=stream)
+ raise APITimeoutError(request=request) from err
+ except httpx.ReadTimeout as err:
+ # We explicitly do not retry on ReadTimeout errors as this means
+ # that the server processing the request has taken 60 seconds
+ # (our default timeout). This likely indicates that something
+ # is not working as expected on the server side.
+ raise
+ except httpx.TimeoutException as err:
+ if retries > 0:
+ return await self._retry_request(options, cast_to, retries, stream=stream)
+ raise APITimeoutError(request=request) from err
+ except Exception as err:
+ if retries > 0:
+ return await self._retry_request(options, cast_to, retries, stream=stream)
+ raise APIConnectionError(request=request) from err
+
+ if stream:
+ return AsyncStream(cast_to=cast_to, response=response, client=self)
+
+ try:
+ rsp = self._process_response(cast_to=cast_to, options=options, response=response)
+ except pydantic.ValidationError as err:
+ raise APIResponseValidationError(request=request, response=response) from err
+
+ return rsp
+
+ async def _retry_request(
+ self,
+ options: FinalRequestOptions,
+ cast_to: Type[ResponseT],
+ remaining_retries: int,
+ response_headers: Optional[httpx.Headers] = None,
+ *,
+ stream: bool,
+ ) -> ResponseT | AsyncStream[ResponseT]:
+ remaining = remaining_retries - 1
+ timeout = self._calculate_retry_timeout(remaining, options, response_headers)
+
+ await anyio.sleep(timeout)
+
+ return await self._request(
+ options=options,
+ cast_to=cast_to,
+ remaining_retries=remaining,
+ stream=stream,
+ )
+
+ def _request_api_list(
+ self,
+ model: Type[ModelT],
+ page: Type[AsyncPageT],
+ options: FinalRequestOptions,
+ ) -> AsyncPaginator[ModelT, AsyncPageT]:
+ return AsyncPaginator(client=self, options=options, page_cls=page, model=model)
+
+ async def get(
+ self,
+ path: str,
+ *,
+ cast_to: Type[ResponseT],
+ options: RequestOptions = {},
+ ) -> ResponseT:
+ opts = FinalRequestOptions.construct(method="get", url=path, **options)
+ return await self.request(cast_to, opts)
+
+ @overload
+ async def post(
+ self,
+ path: str,
+ *,
+ cast_to: Type[ResponseT],
+ body: Body | None = None,
+ files: RequestFiles | None = None,
+ options: RequestOptions = {},
+ stream: Literal[False] = False,
+ ) -> ResponseT:
+ ...
+
+ @overload
+ async def post(
+ self,
+ path: str,
+ *,
+ cast_to: Type[ResponseT],
+ body: Body | None = None,
+ files: RequestFiles | None = None,
+ options: RequestOptions = {},
+ stream: Literal[True],
+ ) -> AsyncStream[ResponseT]:
+ ...
+
+ @overload
+ async def post(
+ self,
+ path: str,
+ *,
+ cast_to: Type[ResponseT],
+ body: Body | None = None,
+ files: RequestFiles | None = None,
+ options: RequestOptions = {},
+ stream: bool,
+ ) -> ResponseT | AsyncStream[ResponseT]:
+ ...
+
+ async def post(
+ self,
+ path: str,
+ *,
+ cast_to: Type[ResponseT],
+ body: Body | None = None,
+ files: RequestFiles | None = None,
+ options: RequestOptions = {},
+ stream: bool = False,
+ ) -> ResponseT | AsyncStream[ResponseT]:
+ opts = FinalRequestOptions.construct(method="post", url=path, json_data=body, files=files, **options)
+ return await self.request(cast_to, opts, stream=stream)
+
+ async def patch(
+ self,
+ path: str,
+ *,
+ cast_to: Type[ResponseT],
+ body: Body | None = None,
+ options: RequestOptions = {},
+ ) -> ResponseT:
+ opts = FinalRequestOptions.construct(method="patch", url=path, json_data=body, **options)
+ return await self.request(cast_to, opts)
+
+ async def put(
+ self,
+ path: str,
+ *,
+ cast_to: Type[ResponseT],
+ body: Body | None = None,
+ files: RequestFiles | None = None,
+ options: RequestOptions = {},
+ ) -> ResponseT:
+ opts = FinalRequestOptions.construct(method="put", url=path, json_data=body, files=files, **options)
+ return await self.request(cast_to, opts)
+
+ async def delete(
+ self,
+ path: str,
+ *,
+ cast_to: Type[ResponseT],
+ body: Body | None = None,
+ options: RequestOptions = {},
+ ) -> ResponseT:
+ opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options)
+ return await self.request(cast_to, opts)
+
+ def get_api_list(
+ self,
+ path: str,
+ *,
+ # TODO: support paginating `str`
+ model: Type[ModelT],
+ page: Type[AsyncPageT],
+ body: Body | None = None,
+ options: RequestOptions = {},
+ method: str = "get",
+ ) -> AsyncPaginator[ModelT, AsyncPageT]:
+ opts = FinalRequestOptions.construct(method=method, url=path, json_data=body, **options)
+ return self._request_api_list(model, page, opts)
+
+
+def make_request_options(
+ *,
+ query: Query | None = None,
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ idempotency_key: str | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+) -> RequestOptions:
+ """Create a dict of type RequestOptions without keys of NotGiven values."""
+ options: RequestOptions = {}
+ if extra_headers is not None:
+ options["headers"] = extra_headers
+
+ if extra_body is not None:
+ options["extra_json"] = cast(AnyMapping, extra_body)
+
+ if query is not None:
+ options["params"] = query
+
+ if extra_query is not None:
+ options["params"] = {**options.get("params", {}), **extra_query}
+
+ if not isinstance(timeout, NotGiven):
+ options["timeout"] = timeout
+
+ if idempotency_key is not None:
+ options["idempotency_key"] = idempotency_key
+
+ return options
+
+
+class OtherPlatform:
+ def __init__(self, name: str) -> None:
+ self.name = name
+
+ def __str__(self) -> str:
+ return f"Other:{self.name}"
+
+
+Platform = Union[
+ OtherPlatform,
+ Literal[
+ "MacOS",
+ "Linux",
+ "Windows",
+ "FreeBSD",
+ "OpenBSD",
+ "iOS",
+ "Android",
+ "Unknown",
+ ],
+]
+
+
+def get_platform() -> Platform:
+ system = platform.system().lower()
+ platform_name = platform.platform().lower()
+ if "iphone" in platform_name or "ipad" in platform_name:
+ # Tested using Python3IDE on an iPhone 11 and Pythonista on an iPad 7
+ # system is Darwin and platform_name is a string like:
+ # - Darwin-21.6.0-iPhone12,1-64bit
+ # - Darwin-21.6.0-iPad7,11-64bit
+ return "iOS"
+
+ if system == "darwin":
+ return "MacOS"
+
+ if system == "windows":
+ return "Windows"
+
+ if "android" in platform_name:
+ # Tested using Pydroid 3
+ # system is Linux and platform_name is a string like 'Linux-5.10.81-android12-9-00001-geba40aecb3b7-ab8534902-aarch64-with-libc'
+ return "Android"
+
+ if system == "linux":
+ # https://distro.readthedocs.io/en/latest/#distro.id
+ distro_id = distro.id()
+ if distro_id == "freebsd":
+ return "FreeBSD"
+
+ if distro_id == "openbsd":
+ return "OpenBSD"
+
+ return "Linux"
+
+ if platform_name:
+ return OtherPlatform(platform_name)
+
+ return "Unknown"
+
+
+class OtherArch:
+ def __init__(self, name: str) -> None:
+ self.name = name
+
+ def __str__(self) -> str:
+ return f"other:{self.name}"
+
+
+Arch = Union[OtherArch, Literal["x32", "x64", "arm", "arm64", "unknown"]]
+
+
+def get_architecture() -> Arch:
+ python_bitness, _ = platform.architecture()
+ machine = platform.machine().lower()
+ if machine in ("arm64", "aarch64"):
+ return "arm64"
+
+ # TODO: untested
+ if machine == "arm":
+ return "arm"
+
+ if machine == "x86_64":
+ return "x64"
+
+ # TODO: untested
+ if python_bitness == "32bit":
+ return "x32"
+
+ if machine:
+ return OtherArch(machine)
+
+ return "unknown"
+
+
+def _merge_mappings(
+ obj1: Mapping[_T_co, Union[_T, Omit]],
+ obj2: Mapping[_T_co, Union[_T, Omit]],
+) -> Dict[_T_co, _T]:
+ """Merge two mappings of the same type, removing any values that are instances of `Omit`.
+
+ In cases with duplicate keys the second mapping takes precedence.
+ """
+ merged = {**obj1, **obj2}
+ return {key: value for key, value in merged.items() if not isinstance(value, Omit)}
diff --git a/src/finch/_base_exceptions.py b/src/finch/_base_exceptions.py
new file mode 100644
index 00000000..aac00103
--- /dev/null
+++ b/src/finch/_base_exceptions.py
@@ -0,0 +1,117 @@
+from typing_extensions import Literal
+
+from httpx import Request, Response
+
+
+class APIError(Exception):
+ message: str
+ request: Request
+
+ def __init__(self, message: str, request: Request) -> None:
+ super().__init__(message)
+ self.request = request
+ self.message = message
+
+
+class APIResponseValidationError(APIError):
+ response: Response
+ status_code: int
+
+ def __init__(self, request: Request, response: Response) -> None:
+ super().__init__("Data returned by API invalid for expected schema.", request)
+ self.response = response
+ self.status_code = response.status_code
+
+
+class APIStatusError(APIError):
+ """Raised when an API response has a status code of 4xx or 5xx."""
+
+ response: Response
+ status_code: int
+
+ body: object
+ """The API response body.
+
+ If the API responded with a valid JSON structure then this property will be the decoded result.
+ If it isn't a valid JSON structure then this will be the raw response.
+ """
+
+ def __init__(self, message: str, *, request: Request, response: Response, body: object) -> None:
+ super().__init__(message, request)
+ self.response = response
+ self.status_code = response.status_code
+ self.body = body
+
+
+class BadRequestError(APIStatusError):
+ status_code: Literal[400]
+
+ def __init__(self, message: str, *, request: Request, response: Response, body: object) -> None:
+ super().__init__(message, request=request, response=response, body=body)
+ self.status_code = 400
+
+
+class AuthenticationError(APIStatusError):
+ status_code: Literal[401]
+
+ def __init__(self, message: str, *, request: Request, response: Response, body: object) -> None:
+ super().__init__(message, request=request, response=response, body=body)
+ self.status_code = 401
+
+
+class PermissionDeniedError(APIStatusError):
+ status_code: Literal[403]
+
+ def __init__(self, message: str, *, request: Request, response: Response, body: object) -> None:
+ super().__init__(message, request=request, response=response, body=body)
+ self.status_code = 403
+
+
+class NotFoundError(APIStatusError):
+ status_code: Literal[404]
+
+ def __init__(self, message: str, *, request: Request, response: Response, body: object) -> None:
+ super().__init__(message, request=request, response=response, body=body)
+ self.status_code = 404
+
+
+class ConflictError(APIStatusError):
+ status_code: Literal[409]
+
+ def __init__(self, message: str, *, request: Request, response: Response, body: object) -> None:
+ super().__init__(message, request=request, response=response, body=body)
+ self.status_code = 409
+
+
+class UnprocessableEntityError(APIStatusError):
+ status_code: Literal[422]
+
+ def __init__(self, message: str, *, request: Request, response: Response, body: object) -> None:
+ super().__init__(message, request=request, response=response, body=body)
+ self.status_code = 422
+
+
+class RateLimitError(APIStatusError):
+ status_code: Literal[429]
+
+ def __init__(self, message: str, *, request: Request, response: Response, body: object) -> None:
+ super().__init__(message, request=request, response=response, body=body)
+ self.status_code = 429
+
+
+class InternalServerError(APIStatusError):
+ status_code: int
+
+ def __init__(self, message: str, *, request: Request, response: Response, body: object) -> None:
+ super().__init__(message, request=request, response=response, body=body)
+ self.status_code = response.status_code
+
+
+class APIConnectionError(APIError):
+ def __init__(self, request: Request, message: str = "Connection error.") -> None:
+ super().__init__(message, request)
+
+
+class APITimeoutError(APIConnectionError):
+ def __init__(self, request: Request) -> None:
+ super().__init__(request, "Request timed out.")
diff --git a/src/finch/_client.py b/src/finch/_client.py
new file mode 100644
index 00000000..4cdb3b70
--- /dev/null
+++ b/src/finch/_client.py
@@ -0,0 +1,485 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+from typing import Union, Mapping, Optional
+
+import httpx
+
+from . import resources
+from ._qs import Querystring
+from ._types import (
+ NOT_GIVEN,
+ Omit,
+ Headers,
+ Timeout,
+ NotGiven,
+ Transport,
+ ProxiesTypes,
+ RequestOptions,
+)
+from ._version import __version__
+from ._base_client import DEFAULT_LIMITS, DEFAULT_TIMEOUT, DEFAULT_MAX_RETRIES
+from ._base_client import Stream as Stream
+from ._base_client import AsyncStream as AsyncStream
+from ._base_client import SyncAPIClient, AsyncAPIClient
+
+__all__ = [
+ "Timeout",
+ "Transport",
+ "ProxiesTypes",
+ "RequestOptions",
+ "resources",
+ "Finch",
+ "AsyncFinch",
+ "Client",
+ "AsyncClient",
+]
+
+
+class Finch(SyncAPIClient):
+ ats: resources.ATS
+ hris: resources.HRIS
+ providers: resources.Providers
+ account: resources.Account
+
+ # client options
+ access_token: str | None
+ client_id: str | None
+ client_secret: str | None
+
+ def __init__(
+ self,
+ *,
+ client_id: str | None = None,
+ client_secret: str | None = None,
+ base_url: Optional[str] = None,
+ access_token: Optional[str] = None,
+ timeout: Union[float, Timeout, None] = DEFAULT_TIMEOUT,
+ max_retries: int = DEFAULT_MAX_RETRIES,
+ default_headers: Mapping[str, str] | None = None,
+ default_query: Mapping[str, object] | None = None,
+ # See httpx documentation for [custom transports](https://www.python-httpx.org/advanced/#custom-transports)
+ transport: Optional[Transport] = None,
+ # See httpx documentation for [proxies](https://www.python-httpx.org/advanced/#http-proxying)
+ proxies: Optional[ProxiesTypes] = None,
+ # See httpx documentation for [limits](https://www.python-httpx.org/advanced/#pool-limit-configuration)
+ connection_pool_limits: httpx.Limits | None = DEFAULT_LIMITS,
+ # Enable or disable schema validation for data returned by the API.
+ # When enabled an error APIResponseValidationError is raised
+ # if the API responds with invalid data for the expected schema.
+ #
+ # This parameter may be removed or changed in the future.
+ # If you rely on this feature, please open a GitHub issue
+ # outlining your use-case to help us decide if it should be
+ # part of our public interface in the future.
+ _strict_response_validation: bool = False,
+ ) -> None:
+ """Construct a new synchronous Finch client instance.
+
+ This automatically infers the following arguments from their corresponding environment variables if they are not provided:
+ - `client_id` from `FINCH_CLIENT_ID`
+ - `client_secret` from `FINCH_CLIENT_SECRET`
+ """
+
+ if base_url is None:
+ base_url = "https://api.tryfinch.com"
+
+ super().__init__(
+ version=__version__,
+ base_url=base_url,
+ max_retries=max_retries,
+ timeout=timeout,
+ transport=transport,
+ proxies=proxies,
+ limits=connection_pool_limits,
+ custom_headers=default_headers,
+ custom_query=default_query,
+ _strict_response_validation=_strict_response_validation,
+ )
+
+ self.access_token = access_token
+
+ client_id_envvar = os.environ.get("FINCH_CLIENT_ID", None)
+ self.client_id = client_id or client_id_envvar or None
+
+ client_secret_envvar = os.environ.get("FINCH_CLIENT_SECRET", None)
+ self.client_secret = client_secret or client_secret_envvar or None
+
+ self.ats = resources.ATS(self)
+ self.hris = resources.HRIS(self)
+ self.providers = resources.Providers(self)
+ self.account = resources.Account(self)
+
+ @property
+ def qs(self) -> Querystring:
+ return Querystring(array_format="comma")
+
+ @property
+ def auth_headers(self) -> dict[str, str]:
+ access_token = self.access_token
+ if access_token is None:
+ return {}
+ return {"Authorization": f"Bearer {access_token}"}
+
+ @property
+ def default_headers(self) -> dict[str, str | Omit]:
+ return {
+ **super().default_headers,
+ "Finch-API-Version": "2020-09-17",
+ **self._custom_headers,
+ }
+
+ def _validate_headers(self, headers: Headers, custom_headers: Headers) -> None:
+ if self.access_token and headers.get("Authorization"):
+ return
+ if isinstance(custom_headers.get("Authorization"), Omit):
+ return
+
+ raise TypeError(
+ '"Could not resolve authentication method. Expected the access_token to be set. Or for the `Authorization` headers to be explicitly omitted"'
+ )
+
+ def copy(
+ self,
+ *,
+ client_id: str | None = None,
+ client_secret: str | None = None,
+ access_token: str | None = None,
+ base_url: str | None = None,
+ timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
+ connection_pool_limits: httpx.Limits | NotGiven = NOT_GIVEN,
+ max_retries: int | NotGiven = NOT_GIVEN,
+ default_headers: Mapping[str, str] | None = None,
+ set_default_headers: Mapping[str, str] | None = None,
+ default_query: Mapping[str, object] | None = None,
+ set_default_query: Mapping[str, object] | None = None,
+ ) -> Finch:
+ """
+ Create a new client instance re-using the same options given to the current client with optional overriding.
+
+ It should be noted that this does not share the underlying httpx client class which may lead
+ to performance issues.
+ """
+ if default_headers is not None and set_default_headers is not None:
+ raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
+
+ if default_query is not None and set_default_query is not None:
+ raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
+
+ headers = self._custom_headers
+ if default_headers is not None:
+ headers = {**headers, **default_headers}
+ elif set_default_headers is not None:
+ headers = set_default_headers
+
+ params = self._custom_query
+ if default_query is not None:
+ params = {**params, **default_query}
+ elif set_default_query is not None:
+ params = set_default_query
+
+ # TODO: share the same httpx client between instances
+ return self.__class__(
+ client_id=client_id or self.client_id,
+ client_secret=client_secret or self.client_secret,
+ base_url=base_url or str(self.base_url),
+ access_token=access_token or self.access_token,
+ timeout=self.timeout if isinstance(timeout, NotGiven) else timeout,
+ connection_pool_limits=self._limits
+ if isinstance(connection_pool_limits, NotGiven)
+ else connection_pool_limits,
+ max_retries=self.max_retries if isinstance(max_retries, NotGiven) else max_retries,
+ default_headers=headers,
+ default_query=params,
+ )
+
+ # Alias for `copy` for nicer inline usage, e.g.
+ # client.with_options(timeout=10).foo.create(...)
+ with_options = copy
+
+ def get_access_token(
+ self,
+ code: str,
+ *,
+ redirect_uri: str,
+ ) -> str:
+ """Returns an access token for the Finch API given an authorization code.
+
+ An
+ authorization code can be obtained by visiting the url returned by
+ get_auth_url().
+ """
+ if self.client_id is None:
+ raise ValueError("Expected client_id to be set in order to call get_access_token")
+
+ if self.client_secret is None:
+ raise ValueError("Expected client_secret to be set in order to call get_access_token")
+
+ response = self.post(
+ "/auth/token",
+ body={
+ "client_id": self.client_id,
+ "client_secret": self.client_secret,
+ "code": code,
+ "redirect_uri": redirect_uri,
+ },
+ options={"headers": {"Authorization": Omit()}},
+ cast_to=httpx.Response,
+ )
+ data = response.json()
+ return str(data["access_token"])
+
+ def get_auth_url(
+ self,
+ *,
+ products: str,
+ redirect_uri: str,
+ sandbox: bool,
+ ) -> str:
+ """
+ Returns the authorization url which can be visited in order to obtain an
+ authorization code from Finch. The autorization code can then be exchanged for
+ an access token for the Finch api by calling get_access_token().
+ """
+ if self.client_id is None:
+ raise ValueError("Expected the client_id to be set in order to call get_auth_url")
+
+ return str(
+ httpx.URL(
+ "https://connect.tryfinch.com/authorize",
+ params={
+ "client_id": self.client_id,
+ "products": products,
+ "redirect_uri": redirect_uri,
+ "sandbox": sandbox,
+ },
+ )
+ )
+
+
+class AsyncFinch(AsyncAPIClient):
+ ats: resources.AsyncATS
+ hris: resources.AsyncHRIS
+ providers: resources.AsyncProviders
+ account: resources.AsyncAccount
+
+ # client options
+ access_token: str | None
+ client_id: str | None
+ client_secret: str | None
+
+ def __init__(
+ self,
+ *,
+ client_id: str | None = None,
+ client_secret: str | None = None,
+ base_url: Optional[str] = None,
+ access_token: Optional[str] = None,
+ timeout: Union[float, Timeout, None] = DEFAULT_TIMEOUT,
+ max_retries: int = DEFAULT_MAX_RETRIES,
+ default_headers: Mapping[str, str] | None = None,
+ default_query: Mapping[str, object] | None = None,
+ # See httpx documentation for [custom transports](https://www.python-httpx.org/advanced/#custom-transports)
+ transport: Optional[Transport] = None,
+ # See httpx documentation for [proxies](https://www.python-httpx.org/advanced/#http-proxying)
+ proxies: Optional[ProxiesTypes] = None,
+ # See httpx documentation for [limits](https://www.python-httpx.org/advanced/#pool-limit-configuration)
+ connection_pool_limits: httpx.Limits | None = DEFAULT_LIMITS,
+ # Enable or disable schema validation for data returned by the API.
+ # When enabled an error APIResponseValidationError is raised
+ # if the API responds with invalid data for the expected schema.
+ #
+ # This parameter may be removed or changed in the future.
+ # If you rely on this feature, please open a GitHub issue
+ # outlining your use-case to help us decide if it should be
+ # part of our public interface in the future.
+ _strict_response_validation: bool = False,
+ ) -> None:
+ """Construct a new async Finch client instance.
+
+ This automatically infers the following arguments from their corresponding environment variables if they are not provided:
+ - `client_id` from `FINCH_CLIENT_ID`
+ - `client_secret` from `FINCH_CLIENT_SECRET`
+ """
+
+ if base_url is None:
+ base_url = "https://api.tryfinch.com"
+
+ super().__init__(
+ version=__version__,
+ base_url=base_url,
+ max_retries=max_retries,
+ timeout=timeout,
+ transport=transport,
+ proxies=proxies,
+ limits=connection_pool_limits,
+ custom_headers=default_headers,
+ custom_query=default_query,
+ _strict_response_validation=_strict_response_validation,
+ )
+
+ self.access_token = access_token
+
+ client_id_envvar = os.environ.get("FINCH_CLIENT_ID", None)
+ self.client_id = client_id or client_id_envvar or None
+
+ client_secret_envvar = os.environ.get("FINCH_CLIENT_SECRET", None)
+ self.client_secret = client_secret or client_secret_envvar or None
+
+ self.ats = resources.AsyncATS(self)
+ self.hris = resources.AsyncHRIS(self)
+ self.providers = resources.AsyncProviders(self)
+ self.account = resources.AsyncAccount(self)
+
+ @property
+ def qs(self) -> Querystring:
+ return Querystring(array_format="comma")
+
+ @property
+ def auth_headers(self) -> dict[str, str]:
+ access_token = self.access_token
+ if access_token is None:
+ return {}
+ return {"Authorization": f"Bearer {access_token}"}
+
+ @property
+ def default_headers(self) -> dict[str, str | Omit]:
+ return {
+ **super().default_headers,
+ "Finch-API-Version": "2020-09-17",
+ **self._custom_headers,
+ }
+
+ def _validate_headers(self, headers: Headers, custom_headers: Headers) -> None:
+ if self.access_token and headers.get("Authorization"):
+ return
+ if isinstance(custom_headers.get("Authorization"), Omit):
+ return
+
+ raise TypeError(
+ '"Could not resolve authentication method. Expected the access_token to be set. Or for the `Authorization` headers to be explicitly omitted"'
+ )
+
+ def copy(
+ self,
+ *,
+ client_id: str | None = None,
+ client_secret: str | None = None,
+ access_token: str | None = None,
+ base_url: str | None = None,
+ timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
+ connection_pool_limits: httpx.Limits | NotGiven = NOT_GIVEN,
+ max_retries: int | NotGiven = NOT_GIVEN,
+ default_headers: Mapping[str, str] | None = None,
+ set_default_headers: Mapping[str, str] | None = None,
+ default_query: Mapping[str, object] | None = None,
+ set_default_query: Mapping[str, object] | None = None,
+ ) -> AsyncFinch:
+ """
+ Create a new client instance re-using the same options given to the current client with optional overriding.
+
+ It should be noted that this does not share the underlying httpx client class which may lead
+ to performance issues.
+ """
+ if default_headers is not None and set_default_headers is not None:
+ raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
+
+ if default_query is not None and set_default_query is not None:
+ raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
+
+ headers = self._custom_headers
+ if default_headers is not None:
+ headers = {**headers, **default_headers}
+ elif set_default_headers is not None:
+ headers = set_default_headers
+
+ params = self._custom_query
+ if default_query is not None:
+ params = {**params, **default_query}
+ elif set_default_query is not None:
+ params = set_default_query
+
+ # TODO: share the same httpx client between instances
+ return self.__class__(
+ client_id=client_id or self.client_id,
+ client_secret=client_secret or self.client_secret,
+ base_url=base_url or str(self.base_url),
+ access_token=access_token or self.access_token,
+ timeout=self.timeout if isinstance(timeout, NotGiven) else timeout,
+ connection_pool_limits=self._limits
+ if isinstance(connection_pool_limits, NotGiven)
+ else connection_pool_limits,
+ max_retries=self.max_retries if isinstance(max_retries, NotGiven) else max_retries,
+ default_headers=headers,
+ default_query=params,
+ )
+
+ # Alias for `copy` for nicer inline usage, e.g.
+ # client.with_options(timeout=10).foo.create(...)
+ with_options = copy
+
+ async def get_access_token(
+ self,
+ code: str,
+ *,
+ redirect_uri: str,
+ ) -> str:
+ """Returns an access token for the Finch API given an authorization code.
+
+ An
+ authorization code can be obtained by visiting the url returned by
+ get_auth_url().
+ """
+ if self.client_id is None:
+ raise ValueError("Expected client_id to be set in order to call get_access_token")
+
+ if self.client_secret is None:
+ raise ValueError("Expected client_secret to be set in order to call get_access_token")
+
+ response = await self.post(
+ "/auth/token",
+ body={
+ "client_id": self.client_id,
+ "client_secret": self.client_secret,
+ "code": code,
+ "redirect_uri": redirect_uri,
+ },
+ options={"headers": {"Authorization": Omit()}},
+ cast_to=httpx.Response,
+ )
+ data = response.json()
+ return str(data["access_token"])
+
+ def get_auth_url(
+ self,
+ *,
+ products: str,
+ redirect_uri: str,
+ sandbox: bool,
+ ) -> str:
+ """
+ Returns the authorization url which can be visited in order to obtain an
+ authorization code from Finch. The autorization code can then be exchanged for
+ an access token for the Finch api by calling get_access_token().
+ """
+ if self.client_id is None:
+ raise ValueError("Expected the client_id to be set in order to call get_auth_url")
+
+ return str(
+ httpx.URL(
+ "https://connect.tryfinch.com/authorize",
+ params={
+ "client_id": self.client_id,
+ "products": products,
+ "redirect_uri": redirect_uri,
+ "sandbox": sandbox,
+ },
+ )
+ )
+
+
+Client = Finch
+
+AsyncClient = AsyncFinch
diff --git a/src/finch/_exceptions.py b/src/finch/_exceptions.py
new file mode 100644
index 00000000..ffdf6a23
--- /dev/null
+++ b/src/finch/_exceptions.py
@@ -0,0 +1,31 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from ._base_exceptions import APIError as APIError
+from ._base_exceptions import ConflictError as ConflictError
+from ._base_exceptions import NotFoundError as NotFoundError
+from ._base_exceptions import APIStatusError as APIStatusError
+from ._base_exceptions import RateLimitError as RateLimitError
+from ._base_exceptions import APITimeoutError as APITimeoutError
+from ._base_exceptions import BadRequestError as BadRequestError
+from ._base_exceptions import APIConnectionError as APIConnectionError
+from ._base_exceptions import AuthenticationError as AuthenticationError
+from ._base_exceptions import InternalServerError as InternalServerError
+from ._base_exceptions import PermissionDeniedError as PermissionDeniedError
+from ._base_exceptions import UnprocessableEntityError as UnprocessableEntityError
+from ._base_exceptions import APIResponseValidationError as APIResponseValidationError
+
+__all__ = [
+ "APIError",
+ "APIConnectionError",
+ "APIResponseValidationError",
+ "APIStatusError",
+ "APITimeoutError",
+ "AuthenticationError",
+ "BadRequestError",
+ "ConflictError",
+ "InternalServerError",
+ "NotFoundError",
+ "PermissionDeniedError",
+ "RateLimitError",
+ "UnprocessableEntityError",
+]
diff --git a/src/finch/_models.py b/src/finch/_models.py
new file mode 100644
index 00000000..faab75dd
--- /dev/null
+++ b/src/finch/_models.py
@@ -0,0 +1,235 @@
+from __future__ import annotations
+
+import inspect
+from typing import Any, Type, Union, Generic, TypeVar, cast
+from datetime import date, datetime
+from typing_extensions import final
+
+import pydantic
+import pydantic.generics
+from pydantic import Extra
+from pydantic.fields import ModelField
+from pydantic.typing import get_args, is_union, get_origin, is_literal_type
+from pydantic.datetime_parse import parse_date
+
+from ._types import (
+ Body,
+ Query,
+ ModelT,
+ Headers,
+ Timeout,
+ NotGiven,
+ AnyMapping,
+ RequestFiles,
+)
+from ._utils import is_list, is_mapping, parse_datetime, strip_not_given
+
+__all__ = ["BaseModel", "GenericModel"]
+
+_T = TypeVar("_T")
+
+
+class BaseModel(pydantic.BaseModel):
+ class Config(pydantic.BaseConfig):
+ extra: Extra = Extra.allow
+
+ def __str__(self) -> str:
+ return f'{self.__repr_name__()}({self.__repr_str__(", ")})'
+
+ # Override the 'construct' method in a way that supports recursive parsing without validation.
+ # Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836.
+ @classmethod
+ def construct(
+ cls: Type[ModelT],
+ _fields_set: set[str] | None = None,
+ **values: object,
+ ) -> ModelT:
+ m = cls.__new__(cls)
+ fields_values: dict[str, object] = {}
+
+ config = cls.__config__
+
+ for name, field in cls.__fields__.items():
+ key = field.alias
+ if key not in values and config.allow_population_by_field_name:
+ key = name
+
+ if key in values:
+ value = values[key]
+ fields_values[name] = _construct_field(value=value, field=field)
+ elif not field.required:
+ fields_values[name] = field.get_default()
+
+ for key, value in values.items():
+ if key not in cls.__fields__:
+ fields_values[key] = value
+
+ object.__setattr__(m, "__dict__", fields_values)
+ if _fields_set is None:
+ _fields_set = set(fields_values.keys())
+ object.__setattr__(m, "__fields_set__", _fields_set)
+ m._init_private_attributes()
+ return m
+
+
+def _construct_field(value: object, field: ModelField) -> object:
+ if value is None:
+ return field.get_default()
+
+ return construct_type(value=value, type_=field.outer_type_)
+
+
+def construct_type(*, value: object, type_: type) -> object:
+ """Loose coercion to the expected type with construction of nested values.
+
+ If the given value does not match the expected type then it is returned as-is.
+ """
+
+ # we need to use the origin class for any types that are subscripted generics
+ # e.g. Dict[str, object]
+ origin = get_origin(type_) or type_
+ args = get_args(type_)
+
+ if is_union(origin):
+ new_value, error = _create_pydantic_field(type_).validate(value, {}, loc="")
+ if not error:
+ return new_value
+
+ # if the data is not valid, use the first variant that doesn't fail while deserializing
+ for variant in args:
+ try:
+ return construct_type(value=value, type_=variant)
+ except Exception:
+ continue
+
+ raise RuntimeError(f"Could not convert data into a valid instance of {type_}")
+
+ if origin == dict:
+ if not is_mapping(value):
+ return value
+
+ _, items_type = get_args(type_) # Dict[_, items_type]
+ return {key: construct_type(value=item, type_=items_type) for key, item in value.items()}
+
+ if not is_literal_type(type_) and (issubclass(origin, BaseModel) or issubclass(origin, GenericModel)):
+ if is_list(value):
+ return [cast(Any, type_).construct(**entry) if is_mapping(entry) else entry for entry in value]
+
+ if is_mapping(value):
+ if issubclass(type_, BaseModel):
+ return type_.construct(**value) # type: ignore[arg-type]
+
+ return cast(Any, type_).construct(**value)
+
+ if origin == list:
+ if not is_list(value):
+ return value
+
+ inner_type = args[0] # List[inner_type]
+ return [construct_type(value=entry, type_=inner_type) for entry in value]
+
+ if origin == float:
+ try:
+ return float(cast(Any, value))
+ except Exception:
+ return value
+
+ if origin == int:
+ try:
+ return int(cast(Any, value))
+ except Exception:
+ return value
+
+ if type_ == datetime:
+ try:
+ return parse_datetime(value) # type: ignore
+ except Exception:
+ return value
+
+ if type_ == date:
+ try:
+ return parse_date(value) # type: ignore
+ except Exception:
+ return value
+
+ return value
+
+
+def validate_type(*, type_: type[_T], value: object) -> _T:
+ """Strict validation that the given value matches the expected type"""
+ if inspect.isclass(type_) and issubclass(type_, pydantic.BaseModel):
+ return cast(_T, type_.parse_obj(value))
+
+ model = _create_pydantic_model(type_).validate(value)
+ return cast(_T, model.__root__)
+
+
+def _create_pydantic_model(type_: _T) -> Type[RootModel[_T]]:
+ return RootModel[type_] # type: ignore
+
+
+def _create_pydantic_field(type_: type) -> ModelField:
+ # TODO: benchmark this
+ model_type = cast(Type[RootModel[object]], RootModel[type_]) # type: ignore
+ return model_type.__fields__["__root__"]
+
+
+class GenericModel(BaseModel, pydantic.generics.GenericModel):
+ pass
+
+
+class RootModel(GenericModel, Generic[_T]):
+ """Used as a placeholder to easily convert runtime types to a Pydantic format
+ to provide validation.
+
+ For example:
+ ```py
+ validated = RootModel[int](__root__='5').__root__
+ # validated: 5
+ ```
+ """
+
+ __root__: _T
+
+
+@final
+class FinalRequestOptions(pydantic.BaseModel):
+ method: str
+ url: str
+ params: Query = {}
+ headers: Union[Headers, NotGiven] = NotGiven()
+ max_retries: Union[int, NotGiven] = NotGiven()
+ timeout: Union[float, Timeout, None, NotGiven] = NotGiven()
+ files: Union[RequestFiles, None] = None
+ idempotency_key: Union[str, None] = None
+
+ # It should be noted that we cannot use `json` here as that would override
+ # a BaseModel method in an incompatible fashion.
+ json_data: Union[Body, None] = None
+ extra_json: Union[AnyMapping, None] = None
+
+ class Config(pydantic.BaseConfig):
+ arbitrary_types_allowed: bool = True
+
+ def get_max_retries(self, max_retries: int) -> int:
+ if isinstance(self.max_retries, NotGiven):
+ return max_retries
+ return self.max_retries
+
+ # override the `construct` method so that we can run custom transformations.
+ # this is necessary as we don't want to do any actual runtime type checking
+ # (which means we can't use validators) but we do want to ensure that `NotGiven`
+ # values are not present
+ @classmethod
+ def construct(
+ cls,
+ _fields_set: set[str] | None = None,
+ **values: object,
+ ) -> FinalRequestOptions:
+ kwargs = {
+ # we unconditionally call `strip_not_given` on any value
+ # as it will just ignore any non-mapping types
+ key: strip_not_given(value)
+ for key, value in values.items()
+ }
+ return super().construct(_fields_set, **kwargs)
diff --git a/src/finch/_qs.py b/src/finch/_qs.py
new file mode 100644
index 00000000..e7aa3e13
--- /dev/null
+++ b/src/finch/_qs.py
@@ -0,0 +1,148 @@
+from __future__ import annotations
+
+from typing import Any, List, Tuple, Union, Mapping, TypeVar
+from urllib.parse import parse_qs, urlencode
+from typing_extensions import Literal, get_args
+
+from ._types import NOT_GIVEN, NotGiven, NotGivenOr
+from ._utils import flatten
+
+_T = TypeVar("_T")
+
+
+ArrayFormat = Literal["comma", "repeat", "indices", "brackets"]
+NestedFormat = Literal["dots", "brackets"]
+
+PrimitiveData = Union[str, int, float, bool, None]
+# this should be Data = Union[PrimitiveData, "List[Data]", "Tuple[Data]", "Mapping[str, Data]"]
+# https://github.com/microsoft/pyright/issues/3555
+Data = Union[PrimitiveData, List[Any], Tuple[Any], "Mapping[str, Any]"]
+Params = Mapping[str, Data]
+
+
+class Querystring:
+ array_format: ArrayFormat
+ nested_format: NestedFormat
+
+ def __init__(
+ self,
+ *,
+ array_format: ArrayFormat = "repeat",
+ nested_format: NestedFormat = "brackets",
+ ) -> None:
+ self.array_format = array_format
+ self.nested_format = nested_format
+
+ def parse(self, query: str) -> Mapping[str, object]:
+ # TODO
+ return parse_qs(query)
+
+ def stringify(
+ self,
+ params: Params,
+ *,
+ array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
+ nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
+ ) -> str:
+ return urlencode(
+ self.stringify_items(
+ params,
+ array_format=array_format,
+ nested_format=nested_format,
+ )
+ )
+
+ def stringify_items(
+ self,
+ params: Params,
+ *,
+ array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
+ nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
+ ) -> list[tuple[str, str]]:
+ opts = Options(
+ qs=self,
+ array_format=array_format,
+ nested_format=nested_format,
+ )
+ return flatten([self._stringify_item(key, value, opts) for key, value in params.items()])
+
+ def _stringify_item(
+ self,
+ key: str,
+ value: Data,
+ opts: Options,
+ ) -> list[tuple[str, str]]:
+ if isinstance(value, Mapping):
+ items: list[tuple[str, str]] = []
+ nested_format = opts.nested_format
+ for subkey, subvalue in value.items():
+ items.extend(
+ self._stringify_item(
+ # TODO: error if unknown format
+ f"{key}.{subkey}" if nested_format == "dots" else f"{key}[{subkey}]",
+ subvalue,
+ opts,
+ )
+ )
+ return items
+
+ if isinstance(value, (list, tuple)):
+ array_format = opts.array_format
+ if array_format == "comma":
+ # TODO: support list of objects?
+ return [
+ (key, ",".join(self._primitive_value_to_str(item) for item in value if item is not None)),
+ ]
+ elif array_format == "repeat":
+ items = []
+ for item in value:
+ items.extend(self._stringify_item(key, item, opts))
+ return items
+ elif array_format == "indices":
+ raise NotImplementedError("The array indices format is not supported yet")
+ elif array_format == "brackets":
+ items = []
+ key = key + "[]"
+ for item in value:
+ items.extend(self._stringify_item(key, item, opts))
+ return items
+ else:
+ raise NotImplementedError(
+ f"Unknown array_format value: {array_format}, choose from {', '.join(get_args(ArrayFormat))}"
+ )
+
+ serialised = self._primitive_value_to_str(value)
+ if not serialised:
+ return []
+ return [(key, serialised)]
+
+ def _primitive_value_to_str(self, value: PrimitiveData) -> str:
+ # copied from httpx
+ if value is True:
+ return "true"
+ elif value is False:
+ return "false"
+ elif value is None:
+ return ""
+ return str(value)
+
+
+_qs = Querystring()
+parse = _qs.parse
+stringify = _qs.stringify
+stringify_items = _qs.stringify_items
+
+
+class Options:
+ array_format: ArrayFormat
+ nested_format: NestedFormat
+
+ def __init__(
+ self,
+ qs: Querystring = _qs,
+ *,
+ array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
+ nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
+ ) -> None:
+ self.array_format = qs.array_format if isinstance(array_format, NotGiven) else array_format
+ self.nested_format = qs.nested_format if isinstance(nested_format, NotGiven) else nested_format
diff --git a/src/finch/_resource.py b/src/finch/_resource.py
new file mode 100644
index 00000000..2e07d57a
--- /dev/null
+++ b/src/finch/_resource.py
@@ -0,0 +1,34 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._client import Finch, AsyncFinch
+
+
+class SyncAPIResource:
+ _client: Finch
+
+ def __init__(self, client: Finch) -> None:
+ self._client = client
+ self._get = client.get
+ self._post = client.post
+ self._patch = client.patch
+ self._put = client.put
+ self._delete = client.delete
+ self._get_api_list = client.get_api_list
+
+
+class AsyncAPIResource:
+ _client: AsyncFinch
+
+ def __init__(self, client: AsyncFinch) -> None:
+ self._client = client
+ self._get = client.get
+ self._post = client.post
+ self._patch = client.patch
+ self._put = client.put
+ self._delete = client.delete
+ self._get_api_list = client.get_api_list
diff --git a/src/finch/_types.py b/src/finch/_types.py
new file mode 100644
index 00000000..c1ca74bf
--- /dev/null
+++ b/src/finch/_types.py
@@ -0,0 +1,145 @@
+from __future__ import annotations
+
+from typing import (
+ IO,
+ TYPE_CHECKING,
+ Dict,
+ Type,
+ Tuple,
+ Union,
+ Mapping,
+ TypeVar,
+ Optional,
+ Sequence,
+)
+from typing_extensions import Literal, Protocol, TypedDict, runtime_checkable
+
+import pydantic
+from httpx import Proxy, Timeout, Response, BaseTransport
+
+Transport = BaseTransport
+Query = Mapping[str, object]
+Body = object
+AnyMapping = Mapping[str, object]
+ModelT = TypeVar("ModelT", bound=pydantic.BaseModel)
+_T = TypeVar("_T")
+
+# Approximates httpx internal ProxiesTypes and RequestFiles types
+ProxiesTypes = Union[str, Proxy, Dict[str, Union[None, str, Proxy]]]
+FileContent = Union[IO[bytes], bytes]
+FileTypes = Union[
+ # file (or bytes)
+ FileContent,
+ # (filename, file (or bytes))
+ Tuple[Optional[str], FileContent],
+ # (filename, file (or bytes), content_type)
+ Tuple[Optional[str], FileContent, Optional[str]],
+ # (filename, file (or bytes), content_type, headers)
+ Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]],
+]
+RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]]
+
+
+# Workaround to support (cast_to: Type[ResponseT]) -> ResponseT
+# where ResponseT includes `None`. In order to support directly
+# passing `None`, overloads would have to be defined for every
+# method that uses `ResponseT` which would lead to an unacceptable
+# amount of code duplication and make it unreadable. See _base_client.py
+# for example usage.
+#
+# This unfortunately means that you will either have
+# to import this type and pass it explicitly:
+#
+# from finch import NoneType
+# client.get('/foo', cast_to=NoneType)
+#
+# or build it yourself:
+#
+# client.get('/foo', cast_to=type(None))
+if TYPE_CHECKING:
+ NoneType: Type[None]
+else:
+ NoneType = type(None)
+
+
+class RequestOptions(TypedDict, total=False):
+ headers: Headers
+ max_retries: int
+ timeout: float | Timeout | None
+ params: Query
+ extra_json: AnyMapping
+ idempotency_key: str
+
+
+# Sentinel class used when the response type is an object with an unknown schema
+class UnknownResponse:
+ ...
+
+
+# Sentinel class used until PEP 0661 is accepted
+class NotGiven:
+ """
+ A sentinel singleton class used to distinguish omitted keyword arguments
+ from those passed in with the value None (which may have different behavior).
+
+ For example:
+
+ ```py
+ def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...
+
+ get(timout=1) # 1s timeout
+ get(timout=None) # No timeout
+ get() # Default timeout behavior, which may not be statically known at the method definition.
+ ```
+ """
+
+ def __bool__(self) -> Literal[False]:
+ return False
+
+
+NotGivenOr = Union[_T, NotGiven]
+NOT_GIVEN = NotGiven()
+
+
+class Omit:
+ """In certain situations you need to be able to represent a case where a default value has
+ to be explicitly removed and `None` is not an appropriate substitute, for example:
+
+ ```py
+ # as the default `Content-Type` header is `application/json` that will be sent
+ client.post('/upload/files', files={'file': b'my raw file content'})
+
+ # you can't explicitly override the header as it has to be dynamically generated
+ # to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983'
+ client.post(..., headers={'Content-Type': 'multipart/form-data'})
+
+ # instead you can remove the default `application/json` header by passing Omit
+ client.post(..., headers={'Content-Type': Omit()})
+ ```
+ """
+
+ def __bool__(self) -> Literal[False]:
+ return False
+
+
+@runtime_checkable
+class ModelBuilderProtocol(Protocol):
+ @classmethod
+ def build(
+ cls: type[_T],
+ *,
+ response: Response,
+ data: object,
+ ) -> _T:
+ ...
+
+
+Headers = Mapping[str, Union[str, Omit]]
+
+
+class HeadersLikeProtocol(Protocol):
+ def get(self, __key: str) -> str | None:
+ ...
+
+
+HeadersLike = Union[Headers, HeadersLikeProtocol]
diff --git a/src/finch/_utils/__init__.py b/src/finch/_utils/__init__.py
new file mode 100644
index 00000000..05a82c73
--- /dev/null
+++ b/src/finch/_utils/__init__.py
@@ -0,0 +1,25 @@
+from ._utils import flatten as flatten
+from ._utils import is_dict as is_dict
+from ._utils import is_list as is_list
+from ._utils import is_mapping as is_mapping
+from ._utils import parse_date as parse_date
+from ._utils import coerce_float as coerce_float
+from ._utils import is_list_type as is_list_type
+from ._utils import removeprefix as removeprefix
+from ._utils import removesuffix as removesuffix
+from ._utils import extract_files as extract_files
+from ._utils import is_union_type as is_union_type
+from ._utils import required_args as required_args
+from ._utils import coerce_boolean as coerce_boolean
+from ._utils import coerce_integer as coerce_integer
+from ._utils import file_from_path as file_from_path
+from ._utils import parse_datetime as parse_datetime
+from ._utils import strip_not_given as strip_not_given
+from ._utils import deepcopy_minimal as deepcopy_minimal
+from ._utils import extract_type_arg as extract_type_arg
+from ._utils import is_required_type as is_required_type
+from ._utils import is_annotated_type as is_annotated_type
+from ._utils import strip_annotated_type as strip_annotated_type
+from ._transform import PropertyInfo as PropertyInfo
+from ._transform import transform as transform
+from ._transform import maybe_transform as maybe_transform
diff --git a/src/finch/_utils/_transform.py b/src/finch/_utils/_transform.py
new file mode 100644
index 00000000..5bb03ea3
--- /dev/null
+++ b/src/finch/_utils/_transform.py
@@ -0,0 +1,205 @@
+from __future__ import annotations
+
+from typing import Any, List, Mapping, TypeVar, cast
+from datetime import date, datetime
+from typing_extensions import Literal, get_args, get_type_hints
+
+from pydantic.typing import is_typeddict
+
+from ._utils import (
+ is_list,
+ is_mapping,
+ is_list_type,
+ is_union_type,
+ extract_type_arg,
+ is_required_type,
+ is_annotated_type,
+ strip_annotated_type,
+)
+
+_T = TypeVar("_T")
+
+
+# TODO: support for drilling globals() and locals()
+# TODO: ensure works correctly with forward references in all cases
+
+
+PropertyFormat = Literal["iso8601", "custom"]
+
+
+class PropertyInfo:
+ """Metadata class to be used in Annotated types to provide information about a given type.
+
+ For example:
+
+ class MyParams(TypedDict):
+ account_holder_name: Annotated[str, PropertyInfo(alias='accountHolderName')]
+
+ This means that {'account_holder_name': 'Robert'} will be transformed to {'accountHolderName': 'Robert'} before being sent to the API.
+ """
+
+ alias: str | None
+ format: PropertyFormat | None
+ format_template: str | None
+
+ def __init__(
+ self,
+ *,
+ alias: str | None = None,
+ format: PropertyFormat | None = None,
+ format_template: str | None = None,
+ ) -> None:
+ self.alias = alias
+ self.format = format
+ self.format_template = format_template
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}(alias='{self.alias}', format={self.format}, format_template='{self.format_template}')"
+
+
+def maybe_transform(
+ data: Mapping[str, object] | List[Any] | None,
+ expected_type: object,
+) -> Any | None:
+ """Wrapper over `transform()` that allows `None` to be passed.
+
+ See `transform()` for more details.
+ """
+ if data is None:
+ return None
+ return transform(data, expected_type)
+
+
+# Wrapper over _transform_recursive providing fake types
+def transform(
+ data: _T,
+ expected_type: object,
+) -> _T:
+ """Transform dictionaries based off of type information from the given type, for example:
+
+ ```py
+ class Params(TypedDict, total=False):
+ card_id: Required[Annotated[str, PropertyInfo(alias='cardID')]]
+
+ transformed = transform({'card_id': ''}, Params)
+ # {'cardID': ''}
+ ```
+
+ Any keys / data that does not have type information given will be included as is.
+
+ It should be noted that the transformations that this function does are not represented in the type system.
+ """
+ transformed = _transform_recursive(data, annotation=cast(type, expected_type))
+ return cast(_T, transformed)
+
+
+def _get_annoted_type(type_: type) -> type | None:
+ """If the given type is an `Annotated` type then it is returned, if not `None` is returned.
+
+ This also unwraps the type when applicable, e.g. `Required[Annotated[T, ...]]`
+ """
+ if is_required_type(type_):
+ # Unwrap `Required[Annotated[T, ...]]` to `Annotated[T, ...]`
+ type_ = get_args(type_)[0]
+
+ if is_annotated_type(type_):
+ return type_
+
+ return None
+
+
+def _maybe_transform_key(key: str, type_: type) -> str:
+ annotated_type = _get_annoted_type(type_)
+ if annotated_type is None:
+ # no `Annotated` definition for this type, no transformation needed
+ return key
+
+ # ignore the first argument as it is the actual type
+ annotations = get_args(type_)[1:]
+ for annotation in annotations:
+ if isinstance(annotation, PropertyInfo) and annotation.alias is not None:
+ return annotation.alias
+
+ return key
+
+
+def _transform_recursive(
+ data: object,
+ *,
+ annotation: type,
+ inner_type: type | None = None,
+) -> object:
+ """Transform the given data against the expected type.
+
+ Args:
+ annotation: The direct type annotation given to the particular piece of data.
+ This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc
+
+ inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type
+ is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in
+ the list can be transformed using the metadata from the container type.
+
+ Defaults to the same value as the `annotation` argument.
+ """
+ if inner_type is None:
+ inner_type = annotation
+
+ stripped_type = strip_annotated_type(inner_type)
+ if is_typeddict(stripped_type) and is_mapping(data):
+ return _transform_typeddict(data, stripped_type)
+
+ if is_list_type(stripped_type) and is_list(data):
+ inner_type = extract_type_arg(stripped_type, 0)
+ return [_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data]
+
+ if is_union_type(stripped_type):
+ # For union types we run the transformation against all subtypes to ensure that everything is transformed.
+ #
+ # TODO: there may be edge cases where the same normalized field name will transform to two different names
+ # in different subtypes.
+ for subtype in get_args(stripped_type):
+ data = _transform_recursive(data, annotation=annotation, inner_type=subtype)
+ return data
+
+ return _transform_value(data, annotation)
+
+
+def _transform_value(data: object, type_: type) -> object:
+ annotated_type = _get_annoted_type(type_)
+ if annotated_type is None:
+ return data
+
+ # ignore the first argument as it is the actual type
+ annotations = get_args(annotated_type)[1:]
+ for annotation in annotations:
+ if isinstance(annotation, PropertyInfo) and annotation.format is not None:
+ return _format_data(data, annotation.format, annotation.format_template)
+
+ return data
+
+
+def _format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object:
+ if isinstance(data, (date, datetime)):
+ if format_ == "iso8601":
+ return data.isoformat()
+
+ if format_ == "custom" and format_template is not None:
+ return data.strftime(format_template)
+
+ return data
+
+
+def _transform_typeddict(
+ data: Mapping[str, object],
+ expected_type: type,
+) -> Mapping[str, object]:
+ result: dict[str, object] = {}
+ annotations = get_type_hints(expected_type, include_extras=True)
+ for key, value in data.items():
+ type_ = annotations.get(key)
+ if type_ is None:
+ # we do not have a type annotation for this field, leave it as is
+ result[key] = value
+ else:
+ result[_maybe_transform_key(key, type_)] = _transform_recursive(value, annotation=type_)
+ return result
diff --git a/src/finch/_utils/_utils.py b/src/finch/_utils/_utils.py
new file mode 100644
index 00000000..04bee66c
--- /dev/null
+++ b/src/finch/_utils/_utils.py
@@ -0,0 +1,325 @@
+from __future__ import annotations
+
+import os
+import inspect
+import functools
+from typing import Any, Mapping, TypeVar, Callable, Iterable, Sequence, cast, overload
+from pathlib import Path
+from typing_extensions import Required, Annotated, TypeGuard, get_args, get_origin
+
+from pydantic.typing import is_union as _is_union
+
+# re-export for forwards compat
+from pydantic.datetime_parse import parse_date as parse_date
+from pydantic.datetime_parse import parse_datetime as parse_datetime
+
+from .._types import NotGiven, FileTypes
+
+_T = TypeVar("_T")
+CallableT = TypeVar("CallableT", bound=Callable[..., Any])
+
+
+def flatten(t: Iterable[Iterable[_T]]) -> list[_T]:
+ return [item for sublist in t for item in sublist]
+
+
+def extract_files(
+ # TODO: this needs to take Dict but variance issues.....
+ # create protocol type ?
+ query: Mapping[str, object],
+ *,
+ paths: Sequence[Sequence[str]],
+) -> list[tuple[str, FileTypes]]:
+ files: list[tuple[str, FileTypes]] = []
+ for path in paths:
+ files.extend(_extract_items(query, path, index=0, flattened_key=None))
+ return files
+
+
+def _extract_items(
+ obj: object,
+ path: Sequence[str],
+ *,
+ index: int,
+ # TODO: rename
+ flattened_key: str | None,
+) -> list[tuple[str, FileTypes]]:
+ try:
+ key = path[index]
+ except IndexError:
+ # We have exhausted the path, return the entry we found.
+ if not isinstance(obj, bytes) and not isinstance(obj, tuple):
+ raise RuntimeError(
+ f"Expected entry at {flattened_key} to be bytes or a tuple but received {type(obj)} instead."
+ )
+
+ # TODO: validate obj more?
+ assert flattened_key is not None
+ return [(flattened_key, cast(FileTypes, obj))]
+
+ index += 1
+ if is_dict(obj):
+ try:
+ # We are at the last entry in the path so we must remove the field
+ if (len(path)) == index:
+ item = obj.pop(key)
+ else:
+ item = obj[key]
+ except KeyError:
+ # Key was not present in the dictionary, this is not indicative of an error
+ # as the given path may not point to a required field. We also do not want
+ # to enforce required fields as the API may differ from the spec in some cases.
+ return []
+ if flattened_key is None:
+ flattened_key = key
+ else:
+ flattened_key += f"[{key}]"
+ return _extract_items(
+ item,
+ path,
+ index=index,
+ flattened_key=flattened_key,
+ )
+ elif is_list(obj):
+ if key != "":
+ return []
+
+ return flatten(
+ [
+ _extract_items(
+ item,
+ path,
+ index=index,
+ flattened_key=flattened_key + "[]" if flattened_key is not None else "[]",
+ )
+ for item in obj
+ ]
+ )
+
+ # Something unexpected was passed, just ignore it.
+ return []
+
+
+# Type safe methods for narrowing types with TypeVars.
+# The default narrowing for isinstance(obj, dict) is dict[unknown, unknown],
+# however this cause Pyright to rightfully report errors. As we know we don't
+# care about the contained types we can safely use `object` in it's place.
+
+
+def is_mapping(obj: object) -> TypeGuard[Mapping[str, object]]:
+ return isinstance(obj, Mapping)
+
+
+def is_dict(obj: object) -> TypeGuard[dict[object, object]]:
+ return isinstance(obj, dict)
+
+
+def is_list(obj: object) -> TypeGuard[list[object]]:
+ return isinstance(obj, list)
+
+
+def is_annotated_type(typ: type) -> bool:
+ return get_origin(typ) == Annotated
+
+
+def is_list_type(typ: type) -> bool:
+ return (get_origin(typ) or typ) == list
+
+
+def is_union_type(typ: type) -> bool:
+ return _is_union(get_origin(typ))
+
+
+def is_required_type(typ: type) -> bool:
+ return get_origin(typ) == Required
+
+
+# Extracts T from Annotated[T, ...] or from Required[Annotated[T, ...]]
+def strip_annotated_type(typ: type) -> type:
+ if is_required_type(typ) or is_annotated_type(typ):
+ return strip_annotated_type(cast(type, get_args(typ)[0]))
+
+ return typ
+
+
+def extract_type_arg(typ: type, index: int) -> type:
+ args = get_args(typ)
+ try:
+ return cast(type, args[index])
+ except IndexError:
+ raise RuntimeError(f"Expected type {typ} to have a type argument at index {index} but it did not")
+
+
+def deepcopy_minimal(item: _T) -> _T:
+ """Minimal reimplementation of copy.deepcopy() that will only copy certain object types:
+
+ - mappings, e.g. `dict`
+ - list
+
+ This is done for performance reasons.
+ """
+ if is_mapping(item):
+ return cast(_T, {k: deepcopy_minimal(v) for k, v in item.items()})
+ if is_list(item):
+ return cast(_T, [deepcopy_minimal(entry) for entry in item])
+ return item
+
+
+# copied from https://github.com/Rapptz/RoboDanny
+def human_join(seq: Sequence[str], *, delim: str = ", ", final: str = "or") -> str:
+ size = len(seq)
+ if size == 0:
+ return ""
+
+ if size == 1:
+ return seq[0]
+
+ if size == 2:
+ return f"{seq[0]} {final} {seq[1]}"
+
+ return delim.join(seq[:-1]) + f" {final} {seq[-1]}"
+
+
+def quote(string: str) -> str:
+ """Add single quotation marks around the given string. Does *not* do any escaping."""
+ return "'" + string + "'"
+
+
+def required_args(*variants: Sequence[str]) -> Callable[[CallableT], CallableT]:
+ """Decorator to enforce a given set of arguments or variants of arguments are passed to the decorated function.
+
+ Useful for enforcing runtime validation of overloaded functions.
+
+ Example usage:
+ ```py
+ @overload
+ def foo(*, a: str) -> str:
+ ...
+
+ @overload
+ def foo(*, b: bool) -> str:
+ ...
+
+ # This enforces the same constraints that a static type checker would
+ # i.e. that either a or b must be passed to the function
+ @required_args(['a'], ['b'])
+ def foo(*, a: str | None = None, b: bool | None = None) -> str:
+ ...
+ ```
+ """
+
+ def inner(func: CallableT) -> CallableT:
+ params = inspect.signature(func).parameters
+ positional = [
+ name
+ for name, param in params.items()
+ if param.kind
+ in {
+ param.POSITIONAL_ONLY,
+ param.POSITIONAL_OR_KEYWORD,
+ }
+ ]
+
+ @functools.wraps(func)
+ def wrapper(*args: object, **kwargs: object) -> object:
+ given_params: set[str] = set()
+ for i, _ in enumerate(args):
+ try:
+ given_params.add(positional[i])
+ except IndexError:
+ raise TypeError(f"{func.__name__}() takes {len(positional)} argument(s) but {len(args)} were given")
+
+ for key in kwargs.keys():
+ given_params.add(key)
+
+ for variant in variants:
+ matches = all((param in given_params for param in variant))
+ if matches:
+ break
+ else: # no break
+ if len(variants) > 1:
+ variations = human_join(
+ ["(" + human_join([quote(arg) for arg in variant], final="and") + ")" for variant in variants]
+ )
+ msg = f"Missing required arguments; Expected either {variations} arguments to be given"
+ else:
+ # TODO: this error message is not deterministic
+ missing = list(set(variants[0]) - given_params)
+ if len(missing) > 1:
+ msg = f"Missing required arguments: {human_join([quote(arg) for arg in missing])}"
+ else:
+ msg = f"Missing required argument: {quote(missing[0])}"
+ raise TypeError(msg)
+ return func(*args, **kwargs)
+
+ return wrapper # type: ignore
+
+ return inner
+
+
+_K = TypeVar("_K")
+_V = TypeVar("_V")
+
+
+@overload
+def strip_not_given(obj: None) -> None:
+ ...
+
+
+@overload
+def strip_not_given(obj: Mapping[_K, _V | NotGiven]) -> dict[_K, _V]:
+ ...
+
+
+@overload
+def strip_not_given(obj: object) -> object:
+ ...
+
+
+def strip_not_given(obj: object | None) -> object:
+ """Remove all top-level keys where their values are instances of `NotGiven`"""
+ if obj is None:
+ return None
+
+ if not is_mapping(obj):
+ return obj
+
+ return {key: value for key, value in obj.items() if not isinstance(value, NotGiven)}
+
+
+def coerce_integer(val: str) -> int:
+ return int(val, base=10)
+
+
+def coerce_float(val: str) -> float:
+ return float(val)
+
+
+def coerce_boolean(val: str) -> bool:
+ return val == "true" or val == "1" or val == "on"
+
+
+def removeprefix(string: str, prefix: str) -> str:
+ """Remove a prefix from a string.
+
+ Backport of `str.removeprefix` for Python < 3.9
+ """
+ if string.startswith(prefix):
+ return string[len(prefix) :]
+ return string
+
+
+def removesuffix(string: str, suffix: str) -> str:
+ """Remove a suffix from a string.
+
+ Backport of `str.removesuffix` for Python < 3.9
+ """
+ if string.endswith(suffix):
+ return string[: -len(suffix)]
+ return string
+
+
+def file_from_path(path: str) -> FileTypes:
+ contents = Path(path).read_bytes()
+ file_name = os.path.basename(path)
+ return (file_name, contents)
diff --git a/src/finch/_version.py b/src/finch/_version.py
new file mode 100644
index 00000000..5a2dad9e
--- /dev/null
+++ b/src/finch/_version.py
@@ -0,0 +1,4 @@
+# File generated from our OpenAPI spec by Stainless.
+
+__title__ = "finch"
+__version__ = "0.0.1" # x-release-please-version
diff --git a/src/finch/pagination.py b/src/finch/pagination.py
new file mode 100644
index 00000000..d0b046ea
--- /dev/null
+++ b/src/finch/pagination.py
@@ -0,0 +1,390 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import List, Type, Generic, Mapping, TypeVar, Optional
+
+from httpx import Response
+
+from ._types import ModelT
+from ._models import BaseModel
+from ._base_client import BasePage, PageInfo, BaseSyncPage, BaseAsyncPage
+
+__all__ = [
+ "SyncSinglePage",
+ "AsyncSinglePage",
+ "SyncResponsesPage",
+ "AsyncResponsesPage",
+ "SyncIndividualsPage",
+ "AsyncIndividualsPage",
+ "SyncCandidatesPage",
+ "AsyncCandidatesPage",
+ "SyncApplicationsPage",
+ "AsyncApplicationsPage",
+ "SyncJobsPage",
+ "AsyncJobsPage",
+ "SyncOffersPage",
+ "AsyncOffersPage",
+]
+
+_BaseModelT = TypeVar("_BaseModelT", bound=BaseModel)
+
+
+class SyncSinglePage(BaseSyncPage[ModelT], BasePage[ModelT], Generic[ModelT]):
+ items: List[ModelT]
+
+ def _get_page_items(self) -> List[ModelT]:
+ return self.items
+
+ def next_page_info(self) -> None:
+ """
+ This page represents a response that isn't actually paginated at the API level
+ so there will never be a next page.
+ """
+ return None
+
+ @classmethod
+ def build(cls: Type[_BaseModelT], *, response: Response, data: object) -> _BaseModelT:
+ return cls.construct(
+ **{
+ **(data if isinstance(data, Mapping) else {"items": data}),
+ }
+ )
+
+
+class AsyncSinglePage(BaseAsyncPage[ModelT], BasePage[ModelT], Generic[ModelT]):
+ items: List[ModelT]
+
+ def _get_page_items(self) -> List[ModelT]:
+ return self.items
+
+ def next_page_info(self) -> None:
+ """
+ This page represents a response that isn't actually paginated at the API level
+ so there will never be a next page.
+ """
+ return None
+
+ @classmethod
+ def build(cls: Type[_BaseModelT], *, response: Response, data: object) -> _BaseModelT:
+ return cls.construct(
+ **{
+ **(data if isinstance(data, Mapping) else {"items": data}),
+ }
+ )
+
+
+class SyncResponsesPage(BaseSyncPage[ModelT], BasePage[ModelT], Generic[ModelT]):
+ responses: List[ModelT]
+
+ def _get_page_items(self) -> List[ModelT]:
+ return self.responses
+
+ def next_page_info(self) -> None:
+ """
+ This page represents a response that isn't actually paginated at the API level
+ so there will never be a next page.
+ """
+ return None
+
+
+class AsyncResponsesPage(BaseAsyncPage[ModelT], BasePage[ModelT], Generic[ModelT]):
+ responses: List[ModelT]
+
+ def _get_page_items(self) -> List[ModelT]:
+ return self.responses
+
+ def next_page_info(self) -> None:
+ """
+ This page represents a response that isn't actually paginated at the API level
+ so there will never be a next page.
+ """
+ return None
+
+
+class IndividualsPagePaging(BaseModel):
+ count: Optional[int]
+ """The total number of elements for the entire query (not just the given page)"""
+
+ offset: Optional[int]
+ """The current start index of the returned list of elements"""
+
+
+class SyncIndividualsPage(BaseSyncPage[ModelT], BasePage[ModelT], Generic[ModelT]):
+ paging: IndividualsPagePaging
+ individuals: List[ModelT]
+
+ def _get_page_items(self) -> List[ModelT]:
+ return self.individuals
+
+ def next_page_info(self) -> Optional[PageInfo]:
+ offset = self.paging.offset
+ if offset is None:
+ return None
+
+ length = len(self.individuals)
+ current_count = offset + length
+
+ total_count = self.paging.count
+ if total_count is None:
+ return None
+
+ if current_count < total_count:
+ return PageInfo(params={"offset": current_count})
+
+ return None
+
+
+class AsyncIndividualsPage(BaseAsyncPage[ModelT], BasePage[ModelT], Generic[ModelT]):
+ paging: IndividualsPagePaging
+ individuals: List[ModelT]
+
+ def _get_page_items(self) -> List[ModelT]:
+ return self.individuals
+
+ def next_page_info(self) -> Optional[PageInfo]:
+ offset = self.paging.offset
+ if offset is None:
+ return None
+
+ length = len(self.individuals)
+ current_count = offset + length
+
+ total_count = self.paging.count
+ if total_count is None:
+ return None
+
+ if current_count < total_count:
+ return PageInfo(params={"offset": current_count})
+
+ return None
+
+
+class CandidatesPagePaging(BaseModel):
+ count: Optional[int]
+ """The total number of elements for the entire query (not just the given page)"""
+
+ offset: Optional[int]
+ """The current start index of the returned list of elements"""
+
+
+class SyncCandidatesPage(BaseSyncPage[ModelT], BasePage[ModelT], Generic[ModelT]):
+ paging: CandidatesPagePaging
+ candidates: List[ModelT]
+
+ def _get_page_items(self) -> List[ModelT]:
+ return self.candidates
+
+ def next_page_info(self) -> Optional[PageInfo]:
+ offset = self.paging.offset
+ if offset is None:
+ return None
+
+ length = len(self.candidates)
+ current_count = offset + length
+
+ total_count = self.paging.count
+ if total_count is None:
+ return None
+
+ if current_count < total_count:
+ return PageInfo(params={"offset": current_count})
+
+ return None
+
+
+class AsyncCandidatesPage(BaseAsyncPage[ModelT], BasePage[ModelT], Generic[ModelT]):
+ paging: CandidatesPagePaging
+ candidates: List[ModelT]
+
+ def _get_page_items(self) -> List[ModelT]:
+ return self.candidates
+
+ def next_page_info(self) -> Optional[PageInfo]:
+ offset = self.paging.offset
+ if offset is None:
+ return None
+
+ length = len(self.candidates)
+ current_count = offset + length
+
+ total_count = self.paging.count
+ if total_count is None:
+ return None
+
+ if current_count < total_count:
+ return PageInfo(params={"offset": current_count})
+
+ return None
+
+
+class ApplicationsPagePaging(BaseModel):
+ count: Optional[int]
+ """The total number of elements for the entire query (not just the given page)"""
+
+ offset: Optional[int]
+ """The current start index of the returned list of elements"""
+
+
+class SyncApplicationsPage(BaseSyncPage[ModelT], BasePage[ModelT], Generic[ModelT]):
+ paging: ApplicationsPagePaging
+ applications: List[ModelT]
+
+ def _get_page_items(self) -> List[ModelT]:
+ return self.applications
+
+ def next_page_info(self) -> Optional[PageInfo]:
+ offset = self.paging.offset
+ if offset is None:
+ return None
+
+ length = len(self.applications)
+ current_count = offset + length
+
+ total_count = self.paging.count
+ if total_count is None:
+ return None
+
+ if current_count < total_count:
+ return PageInfo(params={"offset": current_count})
+
+ return None
+
+
+class AsyncApplicationsPage(BaseAsyncPage[ModelT], BasePage[ModelT], Generic[ModelT]):
+ paging: ApplicationsPagePaging
+ applications: List[ModelT]
+
+ def _get_page_items(self) -> List[ModelT]:
+ return self.applications
+
+ def next_page_info(self) -> Optional[PageInfo]:
+ offset = self.paging.offset
+ if offset is None:
+ return None
+
+ length = len(self.applications)
+ current_count = offset + length
+
+ total_count = self.paging.count
+ if total_count is None:
+ return None
+
+ if current_count < total_count:
+ return PageInfo(params={"offset": current_count})
+
+ return None
+
+
+class JobsPagePaging(BaseModel):
+ count: Optional[int]
+ """The total number of elements for the entire query (not just the given page)"""
+
+ offset: Optional[int]
+ """The current start index of the returned list of elements"""
+
+
+class SyncJobsPage(BaseSyncPage[ModelT], BasePage[ModelT], Generic[ModelT]):
+ paging: JobsPagePaging
+ jobs: List[ModelT]
+
+ def _get_page_items(self) -> List[ModelT]:
+ return self.jobs
+
+ def next_page_info(self) -> Optional[PageInfo]:
+ offset = self.paging.offset
+ if offset is None:
+ return None
+
+ length = len(self.jobs)
+ current_count = offset + length
+
+ total_count = self.paging.count
+ if total_count is None:
+ return None
+
+ if current_count < total_count:
+ return PageInfo(params={"offset": current_count})
+
+ return None
+
+
+class AsyncJobsPage(BaseAsyncPage[ModelT], BasePage[ModelT], Generic[ModelT]):
+ paging: JobsPagePaging
+ jobs: List[ModelT]
+
+ def _get_page_items(self) -> List[ModelT]:
+ return self.jobs
+
+ def next_page_info(self) -> Optional[PageInfo]:
+ offset = self.paging.offset
+ if offset is None:
+ return None
+
+ length = len(self.jobs)
+ current_count = offset + length
+
+ total_count = self.paging.count
+ if total_count is None:
+ return None
+
+ if current_count < total_count:
+ return PageInfo(params={"offset": current_count})
+
+ return None
+
+
+class OffersPagePaging(BaseModel):
+ count: Optional[int]
+ """The total number of elements for the entire query (not just the given page)"""
+
+ offset: Optional[int]
+ """The current start index of the returned list of elements"""
+
+
+class SyncOffersPage(BaseSyncPage[ModelT], BasePage[ModelT], Generic[ModelT]):
+ paging: OffersPagePaging
+ offers: List[ModelT]
+
+ def _get_page_items(self) -> List[ModelT]:
+ return self.offers
+
+ def next_page_info(self) -> Optional[PageInfo]:
+ offset = self.paging.offset
+ if offset is None:
+ return None
+
+ length = len(self.offers)
+ current_count = offset + length
+
+ total_count = self.paging.count
+ if total_count is None:
+ return None
+
+ if current_count < total_count:
+ return PageInfo(params={"offset": current_count})
+
+ return None
+
+
+class AsyncOffersPage(BaseAsyncPage[ModelT], BasePage[ModelT], Generic[ModelT]):
+ paging: OffersPagePaging
+ offers: List[ModelT]
+
+ def _get_page_items(self) -> List[ModelT]:
+ return self.offers
+
+ def next_page_info(self) -> Optional[PageInfo]:
+ offset = self.paging.offset
+ if offset is None:
+ return None
+
+ length = len(self.offers)
+ current_count = offset + length
+
+ total_count = self.paging.count
+ if total_count is None:
+ return None
+
+ if current_count < total_count:
+ return PageInfo(params={"offset": current_count})
+
+ return None
diff --git a/src/finch/py.typed b/src/finch/py.typed
new file mode 100644
index 00000000..e69de29b
diff --git a/src/finch/resources/__init__.py b/src/finch/resources/__init__.py
new file mode 100644
index 00000000..ee82a021
--- /dev/null
+++ b/src/finch/resources/__init__.py
@@ -0,0 +1,8 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from .ats import ATS, AsyncATS
+from .hris import HRIS, AsyncHRIS
+from .account import Account, AsyncAccount
+from .providers import Providers, AsyncProviders
+
+__all__ = ["ATS", "AsyncATS", "HRIS", "AsyncHRIS", "Providers", "AsyncProviders", "Account", "AsyncAccount"]
diff --git a/src/finch/resources/account.py b/src/finch/resources/account.py
new file mode 100644
index 00000000..db678453
--- /dev/null
+++ b/src/finch/resources/account.py
@@ -0,0 +1,98 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from ..types import Introspection, DisconnectResponse
+from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._resource import SyncAPIResource, AsyncAPIResource
+from .._base_client import make_request_options
+
+__all__ = ["Account", "AsyncAccount"]
+
+
+class Account(SyncAPIResource):
+ def disconnect(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> DisconnectResponse:
+ """
+ Disconnect an employer from your application and invalidate all `access_token`s
+ associated with the employer. We require applications to implement the
+ Disconnect endpoint for billing and security purposes.
+ """
+ return self._post(
+ "/disconnect",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=DisconnectResponse,
+ )
+
+ def introspect(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> Introspection:
+ """Read account information associated with an `access_token`"""
+ return self._get(
+ "/introspect",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Introspection,
+ )
+
+
+class AsyncAccount(AsyncAPIResource):
+ async def disconnect(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> DisconnectResponse:
+ """
+ Disconnect an employer from your application and invalidate all `access_token`s
+ associated with the employer. We require applications to implement the
+ Disconnect endpoint for billing and security purposes.
+ """
+ return await self._post(
+ "/disconnect",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=DisconnectResponse,
+ )
+
+ async def introspect(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> Introspection:
+ """Read account information associated with an `access_token`"""
+ return await self._get(
+ "/introspect",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Introspection,
+ )
diff --git a/src/finch/resources/ats/__init__.py b/src/finch/resources/ats/__init__.py
new file mode 100644
index 00000000..4d718632
--- /dev/null
+++ b/src/finch/resources/ats/__init__.py
@@ -0,0 +1,23 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from .ats import ATS, AsyncATS
+from .jobs import Jobs, AsyncJobs
+from .offers import Offers, AsyncOffers
+from .stages import Stages, AsyncStages
+from .candidates import Candidates, AsyncCandidates
+from .applications import Applications, AsyncApplications
+
+__all__ = [
+ "Candidates",
+ "AsyncCandidates",
+ "Applications",
+ "AsyncApplications",
+ "Stages",
+ "AsyncStages",
+ "Jobs",
+ "AsyncJobs",
+ "Offers",
+ "AsyncOffers",
+ "ATS",
+ "AsyncATS",
+]
diff --git a/src/finch/resources/ats/applications.py b/src/finch/resources/ats/applications.py
new file mode 100644
index 00000000..615e0bc2
--- /dev/null
+++ b/src/finch/resources/ats/applications.py
@@ -0,0 +1,150 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._utils import maybe_transform
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from ...types.ats import Application, application_list_params
+from ...pagination import SyncApplicationsPage, AsyncApplicationsPage
+from ..._base_client import AsyncPaginator, make_request_options
+
+__all__ = ["Applications", "AsyncApplications"]
+
+
+class Applications(SyncAPIResource):
+ def retrieve(
+ self,
+ application_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> Application:
+ """Gets an application from an organization."""
+ return self._get(
+ f"/ats/applications/{application_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Application,
+ )
+
+ def list(
+ self,
+ *,
+ limit: int | NotGiven = NOT_GIVEN,
+ offset: int | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> SyncApplicationsPage[Application]:
+ """
+ Gets all of an organization's applications.
+
+ Args:
+ limit: Number of applications to return (defaults to all)
+
+ offset: Index to start from (defaults to 0)
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/ats/applications",
+ page=SyncApplicationsPage[Application],
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "limit": limit,
+ "offset": offset,
+ },
+ application_list_params.ApplicationListParams,
+ ),
+ ),
+ model=Application,
+ )
+
+
+class AsyncApplications(AsyncAPIResource):
+ async def retrieve(
+ self,
+ application_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> Application:
+ """Gets an application from an organization."""
+ return await self._get(
+ f"/ats/applications/{application_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Application,
+ )
+
+ def list(
+ self,
+ *,
+ limit: int | NotGiven = NOT_GIVEN,
+ offset: int | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[Application, AsyncApplicationsPage[Application]]:
+ """
+ Gets all of an organization's applications.
+
+ Args:
+ limit: Number of applications to return (defaults to all)
+
+ offset: Index to start from (defaults to 0)
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/ats/applications",
+ page=AsyncApplicationsPage[Application],
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "limit": limit,
+ "offset": offset,
+ },
+ application_list_params.ApplicationListParams,
+ ),
+ ),
+ model=Application,
+ )
diff --git a/src/finch/resources/ats/ats.py b/src/finch/resources/ats/ats.py
new file mode 100644
index 00000000..843f7e20
--- /dev/null
+++ b/src/finch/resources/ats/ats.py
@@ -0,0 +1,49 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from .jobs import Jobs, AsyncJobs
+from .offers import Offers, AsyncOffers
+from .stages import Stages, AsyncStages
+from .candidates import Candidates, AsyncCandidates
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from .applications import Applications, AsyncApplications
+
+if TYPE_CHECKING:
+ from ..._client import Finch, AsyncFinch
+
+__all__ = ["ATS", "AsyncATS"]
+
+
+class ATS(SyncAPIResource):
+ candidates: Candidates
+ applications: Applications
+ stages: Stages
+ jobs: Jobs
+ offers: Offers
+
+ def __init__(self, client: Finch) -> None:
+ super().__init__(client)
+ self.candidates = Candidates(client)
+ self.applications = Applications(client)
+ self.stages = Stages(client)
+ self.jobs = Jobs(client)
+ self.offers = Offers(client)
+
+
+class AsyncATS(AsyncAPIResource):
+ candidates: AsyncCandidates
+ applications: AsyncApplications
+ stages: AsyncStages
+ jobs: AsyncJobs
+ offers: AsyncOffers
+
+ def __init__(self, client: AsyncFinch) -> None:
+ super().__init__(client)
+ self.candidates = AsyncCandidates(client)
+ self.applications = AsyncApplications(client)
+ self.stages = AsyncStages(client)
+ self.jobs = AsyncJobs(client)
+ self.offers = AsyncOffers(client)
diff --git a/src/finch/resources/ats/candidates.py b/src/finch/resources/ats/candidates.py
new file mode 100644
index 00000000..045b23b9
--- /dev/null
+++ b/src/finch/resources/ats/candidates.py
@@ -0,0 +1,162 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._utils import maybe_transform
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from ...types.ats import Candidate, candidate_list_params
+from ...pagination import SyncCandidatesPage, AsyncCandidatesPage
+from ..._base_client import AsyncPaginator, make_request_options
+
+__all__ = ["Candidates", "AsyncCandidates"]
+
+
+class Candidates(SyncAPIResource):
+ def retrieve(
+ self,
+ candidate_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> Candidate:
+ """Gets a candidate from an organization.
+
+ A candidate represents an individual
+ associated with one or more applications.
+ """
+ return self._get(
+ f"/ats/candidates/{candidate_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Candidate,
+ )
+
+ def list(
+ self,
+ *,
+ limit: int | NotGiven = NOT_GIVEN,
+ offset: int | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> SyncCandidatesPage[Candidate]:
+ """Gets all of an organization's candidates.
+
+ A candidate represents an individual
+ associated with one or more applications.
+
+ Args:
+ limit: Number of candidates to return (defaults to all)
+
+ offset: Index to start from (defaults to 0)
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/ats/candidates",
+ page=SyncCandidatesPage[Candidate],
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "limit": limit,
+ "offset": offset,
+ },
+ candidate_list_params.CandidateListParams,
+ ),
+ ),
+ model=Candidate,
+ )
+
+
+class AsyncCandidates(AsyncAPIResource):
+ async def retrieve(
+ self,
+ candidate_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> Candidate:
+ """Gets a candidate from an organization.
+
+ A candidate represents an individual
+ associated with one or more applications.
+ """
+ return await self._get(
+ f"/ats/candidates/{candidate_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Candidate,
+ )
+
+ def list(
+ self,
+ *,
+ limit: int | NotGiven = NOT_GIVEN,
+ offset: int | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[Candidate, AsyncCandidatesPage[Candidate]]:
+ """Gets all of an organization's candidates.
+
+ A candidate represents an individual
+ associated with one or more applications.
+
+ Args:
+ limit: Number of candidates to return (defaults to all)
+
+ offset: Index to start from (defaults to 0)
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/ats/candidates",
+ page=AsyncCandidatesPage[Candidate],
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "limit": limit,
+ "offset": offset,
+ },
+ candidate_list_params.CandidateListParams,
+ ),
+ ),
+ model=Candidate,
+ )
diff --git a/src/finch/resources/ats/jobs.py b/src/finch/resources/ats/jobs.py
new file mode 100644
index 00000000..38e323fa
--- /dev/null
+++ b/src/finch/resources/ats/jobs.py
@@ -0,0 +1,150 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._utils import maybe_transform
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from ...types.ats import Job, job_list_params
+from ...pagination import SyncJobsPage, AsyncJobsPage
+from ..._base_client import AsyncPaginator, make_request_options
+
+__all__ = ["Jobs", "AsyncJobs"]
+
+
+class Jobs(SyncAPIResource):
+ def retrieve(
+ self,
+ job_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> Job:
+ """Gets a job from an organization."""
+ return self._get(
+ f"/ats/jobs/{job_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Job,
+ )
+
+ def list(
+ self,
+ *,
+ limit: int | NotGiven = NOT_GIVEN,
+ offset: int | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> SyncJobsPage[Job]:
+ """
+ Gets all of an organization's jobs.
+
+ Args:
+ limit: Number of jobs to return (defaults to all)
+
+ offset: Index to start from (defaults to 0)
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/ats/jobs",
+ page=SyncJobsPage[Job],
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "limit": limit,
+ "offset": offset,
+ },
+ job_list_params.JobListParams,
+ ),
+ ),
+ model=Job,
+ )
+
+
+class AsyncJobs(AsyncAPIResource):
+ async def retrieve(
+ self,
+ job_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> Job:
+ """Gets a job from an organization."""
+ return await self._get(
+ f"/ats/jobs/{job_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Job,
+ )
+
+ def list(
+ self,
+ *,
+ limit: int | NotGiven = NOT_GIVEN,
+ offset: int | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[Job, AsyncJobsPage[Job]]:
+ """
+ Gets all of an organization's jobs.
+
+ Args:
+ limit: Number of jobs to return (defaults to all)
+
+ offset: Index to start from (defaults to 0)
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/ats/jobs",
+ page=AsyncJobsPage[Job],
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "limit": limit,
+ "offset": offset,
+ },
+ job_list_params.JobListParams,
+ ),
+ ),
+ model=Job,
+ )
diff --git a/src/finch/resources/ats/offers.py b/src/finch/resources/ats/offers.py
new file mode 100644
index 00000000..771e234b
--- /dev/null
+++ b/src/finch/resources/ats/offers.py
@@ -0,0 +1,150 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._utils import maybe_transform
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from ...types.ats import Offer, offer_list_params
+from ...pagination import SyncOffersPage, AsyncOffersPage
+from ..._base_client import AsyncPaginator, make_request_options
+
+__all__ = ["Offers", "AsyncOffers"]
+
+
+class Offers(SyncAPIResource):
+ def retrieve(
+ self,
+ offer_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> Offer:
+ """Get a single offer from an organization."""
+ return self._get(
+ f"/ats/offers/{offer_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Offer,
+ )
+
+ def list(
+ self,
+ *,
+ limit: int | NotGiven = NOT_GIVEN,
+ offset: int | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> SyncOffersPage[Offer]:
+ """
+ Get all offers put out by an organization.
+
+ Args:
+ limit: Number of offers to return (defaults to all)
+
+ offset: Index to start from (defaults to 0)
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/ats/offers",
+ page=SyncOffersPage[Offer],
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "limit": limit,
+ "offset": offset,
+ },
+ offer_list_params.OfferListParams,
+ ),
+ ),
+ model=Offer,
+ )
+
+
+class AsyncOffers(AsyncAPIResource):
+ async def retrieve(
+ self,
+ offer_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> Offer:
+ """Get a single offer from an organization."""
+ return await self._get(
+ f"/ats/offers/{offer_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Offer,
+ )
+
+ def list(
+ self,
+ *,
+ limit: int | NotGiven = NOT_GIVEN,
+ offset: int | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[Offer, AsyncOffersPage[Offer]]:
+ """
+ Get all offers put out by an organization.
+
+ Args:
+ limit: Number of offers to return (defaults to all)
+
+ offset: Index to start from (defaults to 0)
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/ats/offers",
+ page=AsyncOffersPage[Offer],
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "limit": limit,
+ "offset": offset,
+ },
+ offer_list_params.OfferListParams,
+ ),
+ ),
+ model=Offer,
+ )
diff --git a/src/finch/resources/ats/stages.py b/src/finch/resources/ats/stages.py
new file mode 100644
index 00000000..0974e2de
--- /dev/null
+++ b/src/finch/resources/ats/stages.py
@@ -0,0 +1,67 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from ...types.ats import Stage
+from ...pagination import SyncSinglePage, AsyncSinglePage
+from ..._base_client import AsyncPaginator, make_request_options
+
+__all__ = ["Stages", "AsyncStages"]
+
+
+class Stages(SyncAPIResource):
+ def list(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> SyncSinglePage[Stage]:
+ """Get all job stages for an organization.
+
+ Depending on the system, some jobs may
+ have stages specific to that job. Other job stages may apply broadly to all jobs
+ in the system. Use the `job_id` to determine whether a job applies specifically
+ to a job.
+ """
+ return self._get_api_list(
+ "/ats/stages",
+ page=SyncSinglePage[Stage],
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=Stage,
+ )
+
+
+class AsyncStages(AsyncAPIResource):
+ def list(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[Stage, AsyncSinglePage[Stage]]:
+ """Get all job stages for an organization.
+
+ Depending on the system, some jobs may
+ have stages specific to that job. Other job stages may apply broadly to all jobs
+ in the system. Use the `job_id` to determine whether a job applies specifically
+ to a job.
+ """
+ return self._get_api_list(
+ "/ats/stages",
+ page=AsyncSinglePage[Stage],
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=Stage,
+ )
diff --git a/src/finch/resources/hris/__init__.py b/src/finch/resources/hris/__init__.py
new file mode 100644
index 00000000..c712adf5
--- /dev/null
+++ b/src/finch/resources/hris/__init__.py
@@ -0,0 +1,26 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from .hris import HRIS, AsyncHRIS
+from .company import CompanyResource, AsyncCompanyResource
+from .benefits import Benefits, AsyncBenefits
+from .payments import Payments, AsyncPayments
+from .directory import Directory, AsyncDirectory
+from .individuals import Individuals, AsyncIndividuals
+from .pay_statements import PayStatements, AsyncPayStatements
+
+__all__ = [
+ "CompanyResource",
+ "AsyncCompanyResource",
+ "Payments",
+ "AsyncPayments",
+ "PayStatements",
+ "AsyncPayStatements",
+ "Directory",
+ "AsyncDirectory",
+ "Individuals",
+ "AsyncIndividuals",
+ "Benefits",
+ "AsyncBenefits",
+ "HRIS",
+ "AsyncHRIS",
+]
diff --git a/src/finch/resources/hris/benefits/__init__.py b/src/finch/resources/hris/benefits/__init__.py
new file mode 100644
index 00000000..d1f80294
--- /dev/null
+++ b/src/finch/resources/hris/benefits/__init__.py
@@ -0,0 +1,6 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from .benefits import Benefits, AsyncBenefits
+from .individuals import Individuals, AsyncIndividuals
+
+__all__ = ["Individuals", "AsyncIndividuals", "Benefits", "AsyncBenefits"]
diff --git a/src/finch/resources/hris/benefits/benefits.py b/src/finch/resources/hris/benefits/benefits.py
new file mode 100644
index 00000000..f85ba3a3
--- /dev/null
+++ b/src/finch/resources/hris/benefits/benefits.py
@@ -0,0 +1,355 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ...._utils import maybe_transform
+from .individuals import Individuals, AsyncIndividuals
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ....pagination import SyncSinglePage, AsyncSinglePage
+from ....types.hris import (
+ BenefitType,
+ CompanyBenefit,
+ BenefitFrequency,
+ SupportedBenefit,
+ UpdateCompanyBenefitResponse,
+ CreateCompanyBenefitsResponse,
+ benefit_create_params,
+ benefit_update_params,
+)
+from ...._base_client import AsyncPaginator, make_request_options
+
+if TYPE_CHECKING:
+ from ...._client import Finch, AsyncFinch
+
+__all__ = ["Benefits", "AsyncBenefits"]
+
+
+class Benefits(SyncAPIResource):
+ individuals: Individuals
+
+ def __init__(self, client: Finch) -> None:
+ super().__init__(client)
+ self.individuals = Individuals(client)
+
+ def create(
+ self,
+ *,
+ description: str | NotGiven = NOT_GIVEN,
+ frequency: BenefitFrequency | NotGiven = NOT_GIVEN,
+ type: BenefitType | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> CreateCompanyBenefitsResponse:
+ """
+ **Availability: Automated and Assisted Benefits providers**
+
+ Creates a new company-wide benefit. Please use the `/meta` endpoint to view
+ available types for each provider.
+
+ Args:
+ type: Type of benefit.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._post(
+ "/employer/benefits",
+ body=maybe_transform(
+ {
+ "type": type,
+ "description": description,
+ "frequency": frequency,
+ },
+ benefit_create_params.BenefitCreateParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=CreateCompanyBenefitsResponse,
+ )
+
+ def retrieve(
+ self,
+ benefit_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> CompanyBenefit:
+ """
+ **Availability: Automated Benefits providers only**
+
+ Lists benefit information for a given benefit
+ """
+ return self._get(
+ f"/employer/benefits/{benefit_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=CompanyBenefit,
+ )
+
+ def update(
+ self,
+ benefit_id: str,
+ *,
+ description: str | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> UpdateCompanyBenefitResponse:
+ """
+ **Availability: Automated and Assisted Benefits providers**
+
+ Updates an existing company-wide benefit
+
+ Args:
+ description: Updated name or description.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._post(
+ f"/employer/benefits/{benefit_id}",
+ body=maybe_transform({"description": description}, benefit_update_params.BenefitUpdateParams),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=UpdateCompanyBenefitResponse,
+ )
+
+ def list(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> SyncSinglePage[CompanyBenefit]:
+ """
+ **Availability: Automated Benefits providers only**
+
+ List all company-wide benefits.
+ """
+ return self._get_api_list(
+ "/employer/benefits",
+ page=SyncSinglePage[CompanyBenefit],
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=CompanyBenefit,
+ )
+
+ def list_supported_benefits(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> SyncSinglePage[SupportedBenefit]:
+ """
+ **Availability: Automated and Assisted Benefits providers**
+
+ Lists available types and configurations for the provider associated with the
+ access token.
+ """
+ return self._get_api_list(
+ "/employer/benefits/meta",
+ page=SyncSinglePage[SupportedBenefit],
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=SupportedBenefit,
+ )
+
+
+class AsyncBenefits(AsyncAPIResource):
+ individuals: AsyncIndividuals
+
+ def __init__(self, client: AsyncFinch) -> None:
+ super().__init__(client)
+ self.individuals = AsyncIndividuals(client)
+
+ async def create(
+ self,
+ *,
+ description: str | NotGiven = NOT_GIVEN,
+ frequency: BenefitFrequency | NotGiven = NOT_GIVEN,
+ type: BenefitType | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> CreateCompanyBenefitsResponse:
+ """
+ **Availability: Automated and Assisted Benefits providers**
+
+ Creates a new company-wide benefit. Please use the `/meta` endpoint to view
+ available types for each provider.
+
+ Args:
+ type: Type of benefit.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return await self._post(
+ "/employer/benefits",
+ body=maybe_transform(
+ {
+ "type": type,
+ "description": description,
+ "frequency": frequency,
+ },
+ benefit_create_params.BenefitCreateParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=CreateCompanyBenefitsResponse,
+ )
+
+ async def retrieve(
+ self,
+ benefit_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> CompanyBenefit:
+ """
+ **Availability: Automated Benefits providers only**
+
+ Lists benefit information for a given benefit
+ """
+ return await self._get(
+ f"/employer/benefits/{benefit_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=CompanyBenefit,
+ )
+
+ async def update(
+ self,
+ benefit_id: str,
+ *,
+ description: str | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> UpdateCompanyBenefitResponse:
+ """
+ **Availability: Automated and Assisted Benefits providers**
+
+ Updates an existing company-wide benefit
+
+ Args:
+ description: Updated name or description.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return await self._post(
+ f"/employer/benefits/{benefit_id}",
+ body=maybe_transform({"description": description}, benefit_update_params.BenefitUpdateParams),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=UpdateCompanyBenefitResponse,
+ )
+
+ def list(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[CompanyBenefit, AsyncSinglePage[CompanyBenefit]]:
+ """
+ **Availability: Automated Benefits providers only**
+
+ List all company-wide benefits.
+ """
+ return self._get_api_list(
+ "/employer/benefits",
+ page=AsyncSinglePage[CompanyBenefit],
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=CompanyBenefit,
+ )
+
+ def list_supported_benefits(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[SupportedBenefit, AsyncSinglePage[SupportedBenefit]]:
+ """
+ **Availability: Automated and Assisted Benefits providers**
+
+ Lists available types and configurations for the provider associated with the
+ access token.
+ """
+ return self._get_api_list(
+ "/employer/benefits/meta",
+ page=AsyncSinglePage[SupportedBenefit],
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=SupportedBenefit,
+ )
diff --git a/src/finch/resources/hris/benefits/individuals.py b/src/finch/resources/hris/benefits/individuals.py
new file mode 100644
index 00000000..d2d25b67
--- /dev/null
+++ b/src/finch/resources/hris/benefits/individuals.py
@@ -0,0 +1,338 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import List
+
+from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ...._utils import maybe_transform
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ....pagination import SyncSinglePage, AsyncSinglePage
+from ...._base_client import AsyncPaginator, make_request_options
+from ....types.hris.benefits import (
+ IndividualBenefit,
+ EnrolledIndividual,
+ UnenrolledIndividual,
+ IndividualEnrolledIDsResponse,
+ individual_unenroll_params,
+ individual_enroll_many_params,
+ individual_retrieve_many_benefits_params,
+)
+
+__all__ = ["Individuals", "AsyncIndividuals"]
+
+
+class Individuals(SyncAPIResource):
+ def enroll_many(
+ self,
+ benefit_id: str,
+ *,
+ individuals: List[individual_enroll_many_params.IndividualEnrollManyParam],
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> SyncSinglePage[EnrolledIndividual]:
+ """
+ **Availability: Automated and Assisted Benefits providers**
+
+ Enroll an individual into a benefit. If the employee is already enrolled, the
+ enrollment amounts will be adjusted.
+
+
+
+ > Making changes to an individual's benefits may have tax consequences based on
+ > IRS regulations. Please consult a tax expert to ensure all changes being made
+ > to the system are compliant with local, state, and federal law.
+
+ Args:
+ individuals: Array of the individual_id to enroll and a configuration object.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ f"/employer/benefits/{benefit_id}/individuals",
+ page=SyncSinglePage[EnrolledIndividual],
+ body=maybe_transform(individuals, individual_enroll_many_params.IndividualEnrollManyParams),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=EnrolledIndividual,
+ method="post",
+ )
+
+ def enrolled_ids(
+ self,
+ benefit_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> IndividualEnrolledIDsResponse:
+ """
+ **Availability: Automated Benefits providers only**
+
+ Lists individuals currently enrolled in a given benefit.
+ """
+ return self._get(
+ f"/employer/benefits/{benefit_id}/enrolled",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=IndividualEnrolledIDsResponse,
+ )
+
+ def retrieve_many_benefits(
+ self,
+ benefit_id: str,
+ *,
+ individual_ids: str | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> SyncSinglePage[IndividualBenefit]:
+ """
+ **Availability: Automated Benefits providers only**
+
+ Get enrolled benefit information for the given individuals.
+
+ Args:
+ individual_ids: comma-delimited list of stable Finch uuids for each individual. If empty,
+ defaults to all individuals
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ f"/employer/benefits/{benefit_id}/individuals",
+ page=SyncSinglePage[IndividualBenefit],
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {"individual_ids": individual_ids},
+ individual_retrieve_many_benefits_params.IndividualRetrieveManyBenefitsParams,
+ ),
+ ),
+ model=IndividualBenefit,
+ )
+
+ def unenroll(
+ self,
+ benefit_id: str,
+ *,
+ individual_ids: List[str] | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> SyncSinglePage[UnenrolledIndividual]:
+ """
+ **Availability: Automated and Assisted Benefits providers**
+
+ Unenroll individuals from a benefit
+
+ Args:
+ individual_ids: Array of individual_ids to unenroll.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ f"/employer/benefits/{benefit_id}/individuals",
+ page=SyncSinglePage[UnenrolledIndividual],
+ body=maybe_transform(
+ {"individual_ids": individual_ids}, individual_unenroll_params.IndividualUnenrollParams
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=UnenrolledIndividual,
+ method="delete",
+ )
+
+
+class AsyncIndividuals(AsyncAPIResource):
+ def enroll_many(
+ self,
+ benefit_id: str,
+ *,
+ individuals: List[individual_enroll_many_params.IndividualEnrollManyParam],
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[EnrolledIndividual, AsyncSinglePage[EnrolledIndividual]]:
+ """
+ **Availability: Automated and Assisted Benefits providers**
+
+ Enroll an individual into a benefit. If the employee is already enrolled, the
+ enrollment amounts will be adjusted.
+
+
+
+ > Making changes to an individual's benefits may have tax consequences based on
+ > IRS regulations. Please consult a tax expert to ensure all changes being made
+ > to the system are compliant with local, state, and federal law.
+
+ Args:
+ individuals: Array of the individual_id to enroll and a configuration object.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ f"/employer/benefits/{benefit_id}/individuals",
+ page=AsyncSinglePage[EnrolledIndividual],
+ body=maybe_transform(individuals, individual_enroll_many_params.IndividualEnrollManyParams),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=EnrolledIndividual,
+ method="post",
+ )
+
+ async def enrolled_ids(
+ self,
+ benefit_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> IndividualEnrolledIDsResponse:
+ """
+ **Availability: Automated Benefits providers only**
+
+ Lists individuals currently enrolled in a given benefit.
+ """
+ return await self._get(
+ f"/employer/benefits/{benefit_id}/enrolled",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=IndividualEnrolledIDsResponse,
+ )
+
+ def retrieve_many_benefits(
+ self,
+ benefit_id: str,
+ *,
+ individual_ids: str | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[IndividualBenefit, AsyncSinglePage[IndividualBenefit]]:
+ """
+ **Availability: Automated Benefits providers only**
+
+ Get enrolled benefit information for the given individuals.
+
+ Args:
+ individual_ids: comma-delimited list of stable Finch uuids for each individual. If empty,
+ defaults to all individuals
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ f"/employer/benefits/{benefit_id}/individuals",
+ page=AsyncSinglePage[IndividualBenefit],
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {"individual_ids": individual_ids},
+ individual_retrieve_many_benefits_params.IndividualRetrieveManyBenefitsParams,
+ ),
+ ),
+ model=IndividualBenefit,
+ )
+
+ def unenroll(
+ self,
+ benefit_id: str,
+ *,
+ individual_ids: List[str] | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[UnenrolledIndividual, AsyncSinglePage[UnenrolledIndividual]]:
+ """
+ **Availability: Automated and Assisted Benefits providers**
+
+ Unenroll individuals from a benefit
+
+ Args:
+ individual_ids: Array of individual_ids to unenroll.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ f"/employer/benefits/{benefit_id}/individuals",
+ page=AsyncSinglePage[UnenrolledIndividual],
+ body=maybe_transform(
+ {"individual_ids": individual_ids}, individual_unenroll_params.IndividualUnenrollParams
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=UnenrolledIndividual,
+ method="delete",
+ )
diff --git a/src/finch/resources/hris/company.py b/src/finch/resources/hris/company.py
new file mode 100644
index 00000000..30de8618
--- /dev/null
+++ b/src/finch/resources/hris/company.py
@@ -0,0 +1,52 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from ...types.hris import Company
+from ..._base_client import make_request_options
+
+__all__ = ["CompanyResource", "AsyncCompanyResource"]
+
+
+class CompanyResource(SyncAPIResource):
+ def retrieve(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> Company:
+ """Read basic company data"""
+ return self._get(
+ "/employer/company",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Company,
+ )
+
+
+class AsyncCompanyResource(AsyncAPIResource):
+ async def retrieve(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> Company:
+ """Read basic company data"""
+ return await self._get(
+ "/employer/company",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Company,
+ )
diff --git a/src/finch/resources/hris/directory.py b/src/finch/resources/hris/directory.py
new file mode 100644
index 00000000..0076a97c
--- /dev/null
+++ b/src/finch/resources/hris/directory.py
@@ -0,0 +1,110 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._utils import maybe_transform
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from ...pagination import SyncIndividualsPage, AsyncIndividualsPage
+from ...types.hris import IndividualInDirectory, directory_list_individuals_params
+from ..._base_client import AsyncPaginator, make_request_options
+
+__all__ = ["Directory", "AsyncDirectory"]
+
+
+class Directory(SyncAPIResource):
+ def list_individuals(
+ self,
+ *,
+ limit: int | NotGiven = NOT_GIVEN,
+ offset: int | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> SyncIndividualsPage[IndividualInDirectory]:
+ """
+ Read company directory and organization structure
+
+ Args:
+ limit: Number of employees to return (defaults to all)
+
+ offset: Index to start from (defaults to 0)
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/employer/directory",
+ page=SyncIndividualsPage[IndividualInDirectory],
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "limit": limit,
+ "offset": offset,
+ },
+ directory_list_individuals_params.DirectoryListIndividualsParams,
+ ),
+ ),
+ model=IndividualInDirectory,
+ )
+
+
+class AsyncDirectory(AsyncAPIResource):
+ def list_individuals(
+ self,
+ *,
+ limit: int | NotGiven = NOT_GIVEN,
+ offset: int | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[IndividualInDirectory, AsyncIndividualsPage[IndividualInDirectory]]:
+ """
+ Read company directory and organization structure
+
+ Args:
+ limit: Number of employees to return (defaults to all)
+
+ offset: Index to start from (defaults to 0)
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/employer/directory",
+ page=AsyncIndividualsPage[IndividualInDirectory],
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "limit": limit,
+ "offset": offset,
+ },
+ directory_list_individuals_params.DirectoryListIndividualsParams,
+ ),
+ ),
+ model=IndividualInDirectory,
+ )
diff --git a/src/finch/resources/hris/hris.py b/src/finch/resources/hris/hris.py
new file mode 100644
index 00000000..b0fbfd06
--- /dev/null
+++ b/src/finch/resources/hris/hris.py
@@ -0,0 +1,54 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from .company import CompanyResource, AsyncCompanyResource
+from .benefits import Benefits, AsyncBenefits
+from .payments import Payments, AsyncPayments
+from .directory import Directory, AsyncDirectory
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from .individuals import Individuals, AsyncIndividuals
+from .pay_statements import PayStatements, AsyncPayStatements
+
+if TYPE_CHECKING:
+ from ..._client import Finch, AsyncFinch
+
+__all__ = ["HRIS", "AsyncHRIS"]
+
+
+class HRIS(SyncAPIResource):
+ company: CompanyResource
+ payments: Payments
+ pay_statements: PayStatements
+ directory: Directory
+ individuals: Individuals
+ benefits: Benefits
+
+ def __init__(self, client: Finch) -> None:
+ super().__init__(client)
+ self.company = CompanyResource(client)
+ self.payments = Payments(client)
+ self.pay_statements = PayStatements(client)
+ self.directory = Directory(client)
+ self.individuals = Individuals(client)
+ self.benefits = Benefits(client)
+
+
+class AsyncHRIS(AsyncAPIResource):
+ company: AsyncCompanyResource
+ payments: AsyncPayments
+ pay_statements: AsyncPayStatements
+ directory: AsyncDirectory
+ individuals: AsyncIndividuals
+ benefits: AsyncBenefits
+
+ def __init__(self, client: AsyncFinch) -> None:
+ super().__init__(client)
+ self.company = AsyncCompanyResource(client)
+ self.payments = AsyncPayments(client)
+ self.pay_statements = AsyncPayStatements(client)
+ self.directory = AsyncDirectory(client)
+ self.individuals = AsyncIndividuals(client)
+ self.benefits = AsyncBenefits(client)
diff --git a/src/finch/resources/hris/individuals/__init__.py b/src/finch/resources/hris/individuals/__init__.py
new file mode 100644
index 00000000..e8c1f441
--- /dev/null
+++ b/src/finch/resources/hris/individuals/__init__.py
@@ -0,0 +1,6 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from .individuals import Individuals, AsyncIndividuals
+from .employment_data import EmploymentData, AsyncEmploymentData
+
+__all__ = ["EmploymentData", "AsyncEmploymentData", "Individuals", "AsyncIndividuals"]
diff --git a/src/finch/resources/hris/individuals/employment_data.py b/src/finch/resources/hris/individuals/employment_data.py
new file mode 100644
index 00000000..05ea5540
--- /dev/null
+++ b/src/finch/resources/hris/individuals/employment_data.py
@@ -0,0 +1,105 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import List
+
+from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ...._utils import maybe_transform
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ....pagination import SyncResponsesPage, AsyncResponsesPage
+from ...._base_client import AsyncPaginator, make_request_options
+from ....types.hris.individuals import (
+ EmploymentDataResponse,
+ employment_data_retrieve_many_params,
+)
+
+__all__ = ["EmploymentData", "AsyncEmploymentData"]
+
+
+class EmploymentData(SyncAPIResource):
+ def retrieve_many(
+ self,
+ *,
+ requests: List[employment_data_retrieve_many_params.Request],
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> SyncResponsesPage[EmploymentDataResponse]:
+ """
+ Read individual employment and income data
+
+ Note: Income information is returned as reported by the provider. This may not
+ always be annualized income, but may be in units of bi-weekly, semi-monthly,
+ daily, etc, depending on what information the provider returns.
+
+ Args:
+ requests: The array of batch requests.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/employer/employment",
+ page=SyncResponsesPage[EmploymentDataResponse],
+ body=maybe_transform(
+ {"requests": requests}, employment_data_retrieve_many_params.EmploymentDataRetrieveManyParams
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=EmploymentDataResponse,
+ method="post",
+ )
+
+
+class AsyncEmploymentData(AsyncAPIResource):
+ def retrieve_many(
+ self,
+ *,
+ requests: List[employment_data_retrieve_many_params.Request],
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[EmploymentDataResponse, AsyncResponsesPage[EmploymentDataResponse]]:
+ """
+ Read individual employment and income data
+
+ Note: Income information is returned as reported by the provider. This may not
+ always be annualized income, but may be in units of bi-weekly, semi-monthly,
+ daily, etc, depending on what information the provider returns.
+
+ Args:
+ requests: The array of batch requests.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/employer/employment",
+ page=AsyncResponsesPage[EmploymentDataResponse],
+ body=maybe_transform(
+ {"requests": requests}, employment_data_retrieve_many_params.EmploymentDataRetrieveManyParams
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=EmploymentDataResponse,
+ method="post",
+ )
diff --git a/src/finch/resources/hris/individuals/individuals.py b/src/finch/resources/hris/individuals/individuals.py
new file mode 100644
index 00000000..5e2a3d58
--- /dev/null
+++ b/src/finch/resources/hris/individuals/individuals.py
@@ -0,0 +1,116 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, List, Optional
+
+from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ...._utils import maybe_transform
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ....pagination import SyncResponsesPage, AsyncResponsesPage
+from ....types.hris import IndividualResponse, individual_retrieve_many_params
+from ...._base_client import AsyncPaginator, make_request_options
+from .employment_data import EmploymentData, AsyncEmploymentData
+
+if TYPE_CHECKING:
+ from ...._client import Finch, AsyncFinch
+
+__all__ = ["Individuals", "AsyncIndividuals"]
+
+
+class Individuals(SyncAPIResource):
+ employment_data: EmploymentData
+
+ def __init__(self, client: Finch) -> None:
+ super().__init__(client)
+ self.employment_data = EmploymentData(client)
+
+ def retrieve_many(
+ self,
+ *,
+ options: Optional[individual_retrieve_many_params.Options] | NotGiven = NOT_GIVEN,
+ requests: List[individual_retrieve_many_params.Request] | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> SyncResponsesPage[IndividualResponse]:
+ """
+ Read individual data, excluding income and employment data
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/employer/individual",
+ page=SyncResponsesPage[IndividualResponse],
+ body=maybe_transform(
+ {
+ "requests": requests,
+ "options": options,
+ },
+ individual_retrieve_many_params.IndividualRetrieveManyParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=IndividualResponse,
+ method="post",
+ )
+
+
+class AsyncIndividuals(AsyncAPIResource):
+ employment_data: AsyncEmploymentData
+
+ def __init__(self, client: AsyncFinch) -> None:
+ super().__init__(client)
+ self.employment_data = AsyncEmploymentData(client)
+
+ def retrieve_many(
+ self,
+ *,
+ options: Optional[individual_retrieve_many_params.Options] | NotGiven = NOT_GIVEN,
+ requests: List[individual_retrieve_many_params.Request] | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[IndividualResponse, AsyncResponsesPage[IndividualResponse]]:
+ """
+ Read individual data, excluding income and employment data
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/employer/individual",
+ page=AsyncResponsesPage[IndividualResponse],
+ body=maybe_transform(
+ {
+ "requests": requests,
+ "options": options,
+ },
+ individual_retrieve_many_params.IndividualRetrieveManyParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=IndividualResponse,
+ method="post",
+ )
diff --git a/src/finch/resources/hris/pay_statements.py b/src/finch/resources/hris/pay_statements.py
new file mode 100644
index 00000000..6a78a424
--- /dev/null
+++ b/src/finch/resources/hris/pay_statements.py
@@ -0,0 +1,100 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import List
+
+from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._utils import maybe_transform
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from ...pagination import SyncResponsesPage, AsyncResponsesPage
+from ...types.hris import PayStatementResponse, pay_statement_retrieve_many_params
+from ..._base_client import AsyncPaginator, make_request_options
+
+__all__ = ["PayStatements", "AsyncPayStatements"]
+
+
+class PayStatements(SyncAPIResource):
+ def retrieve_many(
+ self,
+ *,
+ requests: List[pay_statement_retrieve_many_params.Request],
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> SyncResponsesPage[PayStatementResponse]:
+ """
+ Read detailed pay statements for each individual.
+
+ Deduction and contribution types are supported by the payroll systems that
+ support Benefits.
+
+ Args:
+ requests: The array of batch requests.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/employer/pay-statement",
+ page=SyncResponsesPage[PayStatementResponse],
+ body=maybe_transform(
+ {"requests": requests}, pay_statement_retrieve_many_params.PayStatementRetrieveManyParams
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=PayStatementResponse,
+ method="post",
+ )
+
+
+class AsyncPayStatements(AsyncAPIResource):
+ def retrieve_many(
+ self,
+ *,
+ requests: List[pay_statement_retrieve_many_params.Request],
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[PayStatementResponse, AsyncResponsesPage[PayStatementResponse]]:
+ """
+ Read detailed pay statements for each individual.
+
+ Deduction and contribution types are supported by the payroll systems that
+ support Benefits.
+
+ Args:
+ requests: The array of batch requests.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/employer/pay-statement",
+ page=AsyncResponsesPage[PayStatementResponse],
+ body=maybe_transform(
+ {"requests": requests}, pay_statement_retrieve_many_params.PayStatementRetrieveManyParams
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=PayStatementResponse,
+ method="post",
+ )
diff --git a/src/finch/resources/hris/payments.py b/src/finch/resources/hris/payments.py
new file mode 100644
index 00000000..2eba7fe6
--- /dev/null
+++ b/src/finch/resources/hris/payments.py
@@ -0,0 +1,117 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import Union
+from datetime import date
+
+from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._utils import maybe_transform
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from ...pagination import SyncSinglePage, AsyncSinglePage
+from ...types.hris import Payment, payment_list_params
+from ..._base_client import AsyncPaginator, make_request_options
+
+__all__ = ["Payments", "AsyncPayments"]
+
+
+class Payments(SyncAPIResource):
+ def list(
+ self,
+ *,
+ end_date: Union[str, date],
+ start_date: Union[str, date],
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> SyncSinglePage[Payment]:
+ """
+ Read payroll and contractor related payments by the company.
+
+ Args:
+ end_date: The end date to retrieve payments by a company (inclusive) in `YYYY-MM-DD`
+ format.
+
+ start_date: The start date to retrieve payments by a company (inclusive) in `YYYY-MM-DD`
+ format.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/employer/payment",
+ page=SyncSinglePage[Payment],
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "start_date": start_date,
+ "end_date": end_date,
+ },
+ payment_list_params.PaymentListParams,
+ ),
+ ),
+ model=Payment,
+ )
+
+
+class AsyncPayments(AsyncAPIResource):
+ def list(
+ self,
+ *,
+ end_date: Union[str, date],
+ start_date: Union[str, date],
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[Payment, AsyncSinglePage[Payment]]:
+ """
+ Read payroll and contractor related payments by the company.
+
+ Args:
+ end_date: The end date to retrieve payments by a company (inclusive) in `YYYY-MM-DD`
+ format.
+
+ start_date: The start date to retrieve payments by a company (inclusive) in `YYYY-MM-DD`
+ format.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/employer/payment",
+ page=AsyncSinglePage[Payment],
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "start_date": start_date,
+ "end_date": end_date,
+ },
+ payment_list_params.PaymentListParams,
+ ),
+ ),
+ model=Payment,
+ )
diff --git a/src/finch/resources/providers.py b/src/finch/resources/providers.py
new file mode 100644
index 00000000..57d70a3b
--- /dev/null
+++ b/src/finch/resources/providers.py
@@ -0,0 +1,55 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from ..types import Provider
+from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._resource import SyncAPIResource, AsyncAPIResource
+from ..pagination import SyncSinglePage, AsyncSinglePage
+from .._base_client import AsyncPaginator, make_request_options
+
+__all__ = ["Providers", "AsyncProviders"]
+
+
+class Providers(SyncAPIResource):
+ def list(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> SyncSinglePage[Provider]:
+ """Return details on all available payroll and HR systems."""
+ return self._get_api_list(
+ "/providers",
+ page=SyncSinglePage[Provider],
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=Provider,
+ )
+
+
+class AsyncProviders(AsyncAPIResource):
+ def list(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[Provider, AsyncSinglePage[Provider]]:
+ """Return details on all available payroll and HR systems."""
+ return self._get_api_list(
+ "/providers",
+ page=AsyncSinglePage[Provider],
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ model=Provider,
+ )
diff --git a/src/finch/types/__init__.py b/src/finch/types/__init__.py
new file mode 100644
index 00000000..dc60674b
--- /dev/null
+++ b/src/finch/types/__init__.py
@@ -0,0 +1,11 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from .money import Money as Money
+from .income import Income as Income
+from .paging import Paging as Paging
+from .location import Location as Location
+from .provider import Provider as Provider
+from .introspection import Introspection as Introspection
+from .disconnect_response import DisconnectResponse as DisconnectResponse
diff --git a/src/finch/types/ats/__init__.py b/src/finch/types/ats/__init__.py
new file mode 100644
index 00000000..8ead8f42
--- /dev/null
+++ b/src/finch/types/ats/__init__.py
@@ -0,0 +1,13 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from .job import Job as Job
+from .offer import Offer as Offer
+from .stage import Stage as Stage
+from .candidate import Candidate as Candidate
+from .application import Application as Application
+from .job_list_params import JobListParams as JobListParams
+from .offer_list_params import OfferListParams as OfferListParams
+from .candidate_list_params import CandidateListParams as CandidateListParams
+from .application_list_params import ApplicationListParams as ApplicationListParams
diff --git a/src/finch/types/ats/application.py b/src/finch/types/ats/application.py
new file mode 100644
index 00000000..e08f4eb5
--- /dev/null
+++ b/src/finch/types/ats/application.py
@@ -0,0 +1,29 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+from datetime import datetime
+
+from ..._models import BaseModel
+from ...types.ats import stage
+
+__all__ = ["Application", "RejectedReason"]
+
+
+class RejectedReason(BaseModel):
+ text: Optional[str]
+
+
+class Application(BaseModel):
+ candidate_id: str
+
+ id: str
+
+ job_id: str
+
+ offer_id: Optional[str]
+
+ rejected_at: Optional[datetime]
+
+ rejected_reason: Optional[RejectedReason]
+
+ stage: Optional[stage.Stage]
diff --git a/src/finch/types/ats/application_list_params.py b/src/finch/types/ats/application_list_params.py
new file mode 100644
index 00000000..c6581adf
--- /dev/null
+++ b/src/finch/types/ats/application_list_params.py
@@ -0,0 +1,15 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing_extensions import TypedDict
+
+__all__ = ["ApplicationListParams"]
+
+
+class ApplicationListParams(TypedDict, total=False):
+ limit: int
+ """Number of applications to return (defaults to all)"""
+
+ offset: int
+ """Index to start from (defaults to 0)"""
diff --git a/src/finch/types/ats/candidate.py b/src/finch/types/ats/candidate.py
new file mode 100644
index 00000000..1792a092
--- /dev/null
+++ b/src/finch/types/ats/candidate.py
@@ -0,0 +1,41 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import List, Optional
+from datetime import datetime
+
+from ..._models import BaseModel
+
+__all__ = ["Candidate", "Email", "PhoneNumber"]
+
+
+class Email(BaseModel):
+ data: Optional[str]
+
+ type: Optional[str]
+
+
+class PhoneNumber(BaseModel):
+ data: Optional[str]
+
+ type: Optional[str]
+
+
+class Candidate(BaseModel):
+ application_ids: List[str]
+ """Array of Finch uuids corresponding to `application`s for this individual"""
+
+ created_at: datetime
+
+ emails: List[Email]
+
+ first_name: Optional[str]
+
+ full_name: Optional[str]
+
+ id: str
+
+ last_activity_at: datetime
+
+ last_name: Optional[str]
+
+ phone_numbers: List[PhoneNumber]
diff --git a/src/finch/types/ats/candidate_list_params.py b/src/finch/types/ats/candidate_list_params.py
new file mode 100644
index 00000000..c7cac955
--- /dev/null
+++ b/src/finch/types/ats/candidate_list_params.py
@@ -0,0 +1,15 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing_extensions import TypedDict
+
+__all__ = ["CandidateListParams"]
+
+
+class CandidateListParams(TypedDict, total=False):
+ limit: int
+ """Number of candidates to return (defaults to all)"""
+
+ offset: int
+ """Index to start from (defaults to 0)"""
diff --git a/src/finch/types/ats/job.py b/src/finch/types/ats/job.py
new file mode 100644
index 00000000..5e53b52a
--- /dev/null
+++ b/src/finch/types/ats/job.py
@@ -0,0 +1,43 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import List, Optional
+from datetime import datetime
+from typing_extensions import Literal
+
+from ..._models import BaseModel
+
+__all__ = ["Job", "Department", "HiringTeam", "HiringTeamHiringManager", "HiringTeamRecruiter"]
+
+
+class Department(BaseModel):
+ name: Optional[str]
+
+
+class HiringTeamHiringManager(BaseModel):
+ name: Optional[str]
+
+
+class HiringTeamRecruiter(BaseModel):
+ name: Optional[str]
+
+
+class HiringTeam(BaseModel):
+ hiring_managers: Optional[List[HiringTeamHiringManager]]
+
+ recruiters: Optional[List[HiringTeamRecruiter]]
+
+
+class Job(BaseModel):
+ closed_at: Optional[datetime]
+
+ created_at: Optional[datetime]
+
+ department: Department
+
+ hiring_team: HiringTeam
+
+ id: str
+
+ name: Optional[str]
+
+ status: Optional[Literal["open", "closed", "on_hold", "draft", "archived"]]
diff --git a/src/finch/types/ats/job_list_params.py b/src/finch/types/ats/job_list_params.py
new file mode 100644
index 00000000..d0a3c034
--- /dev/null
+++ b/src/finch/types/ats/job_list_params.py
@@ -0,0 +1,15 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing_extensions import TypedDict
+
+__all__ = ["JobListParams"]
+
+
+class JobListParams(TypedDict, total=False):
+ limit: int
+ """Number of jobs to return (defaults to all)"""
+
+ offset: int
+ """Index to start from (defaults to 0)"""
diff --git a/src/finch/types/ats/offer.py b/src/finch/types/ats/offer.py
new file mode 100644
index 00000000..17f7e7ac
--- /dev/null
+++ b/src/finch/types/ats/offer.py
@@ -0,0 +1,26 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from datetime import datetime
+from typing_extensions import Literal
+
+from ..._models import BaseModel
+
+__all__ = ["Offer"]
+
+
+class Offer(BaseModel):
+ application_id: str
+
+ candidate_id: str
+
+ created_at: datetime
+
+ id: str
+
+ job_id: str
+
+ status: Literal[
+ "signed", "rejected", "draft", "approval-sent", "approved", "sent", "sent-manually", "opened", "archived"
+ ]
+
+ updated_at: datetime
diff --git a/src/finch/types/ats/offer_list_params.py b/src/finch/types/ats/offer_list_params.py
new file mode 100644
index 00000000..3df4dd5f
--- /dev/null
+++ b/src/finch/types/ats/offer_list_params.py
@@ -0,0 +1,15 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing_extensions import TypedDict
+
+__all__ = ["OfferListParams"]
+
+
+class OfferListParams(TypedDict, total=False):
+ limit: int
+ """Number of offers to return (defaults to all)"""
+
+ offset: int
+ """Index to start from (defaults to 0)"""
diff --git a/src/finch/types/ats/stage.py b/src/finch/types/ats/stage.py
new file mode 100644
index 00000000..aa68f561
--- /dev/null
+++ b/src/finch/types/ats/stage.py
@@ -0,0 +1,19 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+
+from ..._models import BaseModel
+
+__all__ = ["Stage"]
+
+
+class Stage(BaseModel):
+ id: Optional[str]
+
+ job_id: Optional[str]
+ """The job id that this stage applies to, if applicable.
+
+ Not all systems enumerate stages specific to jobs.
+ """
+
+ name: Optional[str]
diff --git a/src/finch/types/disconnect_response.py b/src/finch/types/disconnect_response.py
new file mode 100644
index 00000000..f33e239b
--- /dev/null
+++ b/src/finch/types/disconnect_response.py
@@ -0,0 +1,10 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from .._models import BaseModel
+
+__all__ = ["DisconnectResponse"]
+
+
+class DisconnectResponse(BaseModel):
+ status: str
+ """If the request is successful, Finch will return “success” (HTTP 200 status)."""
diff --git a/src/finch/types/hris/__init__.py b/src/finch/types/hris/__init__.py
new file mode 100644
index 00000000..e3045ae3
--- /dev/null
+++ b/src/finch/types/hris/__init__.py
@@ -0,0 +1,37 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from .company import Company as Company
+from .payment import Payment as Payment
+from .individual import Individual as Individual
+from .benefit_type import BenefitType as BenefitType
+from .pay_statement import PayStatement as PayStatement
+from .company_benefit import CompanyBenefit as CompanyBenefit
+from .benefit_frequency import BenefitFrequency as BenefitFrequency
+from .supported_benefit import SupportedBenefit as SupportedBenefit
+from .benfit_contribution import BenfitContribution as BenfitContribution
+from .individual_response import IndividualResponse as IndividualResponse
+from .payment_list_params import PaymentListParams as PaymentListParams
+from .benefit_create_params import BenefitCreateParams as BenefitCreateParams
+from .benefit_update_params import BenefitUpdateParams as BenefitUpdateParams
+from .pay_statement_response import PayStatementResponse as PayStatementResponse
+from .individual_in_directory import IndividualInDirectory as IndividualInDirectory
+from .pay_statement_response_body import (
+ PayStatementResponseBody as PayStatementResponseBody,
+)
+from .individual_retrieve_many_params import (
+ IndividualRetrieveManyParams as IndividualRetrieveManyParams,
+)
+from .update_company_benefit_response import (
+ UpdateCompanyBenefitResponse as UpdateCompanyBenefitResponse,
+)
+from .create_company_benefits_response import (
+ CreateCompanyBenefitsResponse as CreateCompanyBenefitsResponse,
+)
+from .directory_list_individuals_params import (
+ DirectoryListIndividualsParams as DirectoryListIndividualsParams,
+)
+from .pay_statement_retrieve_many_params import (
+ PayStatementRetrieveManyParams as PayStatementRetrieveManyParams,
+)
diff --git a/src/finch/types/hris/benefit_create_params.py b/src/finch/types/hris/benefit_create_params.py
new file mode 100644
index 00000000..195e2ee3
--- /dev/null
+++ b/src/finch/types/hris/benefit_create_params.py
@@ -0,0 +1,39 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import Optional
+from typing_extensions import Literal, TypedDict
+
+__all__ = ["BenefitCreateParams"]
+
+
+class BenefitCreateParams(TypedDict, total=False):
+ description: str
+
+ frequency: Optional[Literal["one_time", "every_paycheck"]]
+
+ type: Optional[
+ Literal[
+ "401k",
+ "401k_roth",
+ "401k_loan",
+ "403b",
+ "403b_roth",
+ "457",
+ "457_roth",
+ "s125_medical",
+ "s125_dental",
+ "s125_vision",
+ "hsa_pre",
+ "hsa_post",
+ "fsa_medical",
+ "fsa_dependent_care",
+ "simple_ira",
+ "simple",
+ "commuter",
+ "custom_post_tax",
+ "custom_pre_tax",
+ ]
+ ]
+ """Type of benefit."""
diff --git a/src/finch/types/hris/benefit_frequency.py b/src/finch/types/hris/benefit_frequency.py
new file mode 100644
index 00000000..b4a7414f
--- /dev/null
+++ b/src/finch/types/hris/benefit_frequency.py
@@ -0,0 +1,8 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+from typing_extensions import Literal
+
+__all__ = ["BenefitFrequency"]
+
+BenefitFrequency = Optional[Literal["one_time", "every_paycheck"]]
diff --git a/src/finch/types/hris/benefit_type.py b/src/finch/types/hris/benefit_type.py
new file mode 100644
index 00000000..81735c9c
--- /dev/null
+++ b/src/finch/types/hris/benefit_type.py
@@ -0,0 +1,30 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+from typing_extensions import Literal
+
+__all__ = ["BenefitType"]
+
+BenefitType = Optional[
+ Literal[
+ "401k",
+ "401k_roth",
+ "401k_loan",
+ "403b",
+ "403b_roth",
+ "457",
+ "457_roth",
+ "s125_medical",
+ "s125_dental",
+ "s125_vision",
+ "hsa_pre",
+ "hsa_post",
+ "fsa_medical",
+ "fsa_dependent_care",
+ "simple_ira",
+ "simple",
+ "commuter",
+ "custom_post_tax",
+ "custom_pre_tax",
+ ]
+]
diff --git a/src/finch/types/hris/benefit_update_params.py b/src/finch/types/hris/benefit_update_params.py
new file mode 100644
index 00000000..00abf779
--- /dev/null
+++ b/src/finch/types/hris/benefit_update_params.py
@@ -0,0 +1,12 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing_extensions import TypedDict
+
+__all__ = ["BenefitUpdateParams"]
+
+
+class BenefitUpdateParams(TypedDict, total=False):
+ description: str
+ """Updated name or description."""
diff --git a/src/finch/types/hris/benefits/__init__.py b/src/finch/types/hris/benefits/__init__.py
new file mode 100644
index 00000000..9745ef54
--- /dev/null
+++ b/src/finch/types/hris/benefits/__init__.py
@@ -0,0 +1,19 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from .individual_benefit import IndividualBenefit as IndividualBenefit
+from .enrolled_individual import EnrolledIndividual as EnrolledIndividual
+from .unenrolled_individual import UnenrolledIndividual as UnenrolledIndividual
+from .individual_unenroll_params import (
+ IndividualUnenrollParams as IndividualUnenrollParams,
+)
+from .individual_enroll_many_params import (
+ IndividualEnrollManyParams as IndividualEnrollManyParams,
+)
+from .individual_enrolled_ids_response import (
+ IndividualEnrolledIDsResponse as IndividualEnrolledIDsResponse,
+)
+from .individual_retrieve_many_benefits_params import (
+ IndividualRetrieveManyBenefitsParams as IndividualRetrieveManyBenefitsParams,
+)
diff --git a/src/finch/types/hris/benefits/enrolled_individual.py b/src/finch/types/hris/benefits/enrolled_individual.py
new file mode 100644
index 00000000..3fbaf64e
--- /dev/null
+++ b/src/finch/types/hris/benefits/enrolled_individual.py
@@ -0,0 +1,28 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+from typing_extensions import Literal
+
+from ...._models import BaseModel
+
+__all__ = ["EnrolledIndividual", "Body"]
+
+
+class Body(BaseModel):
+ finch_code: Optional[str]
+ """A descriptive identifier for the response"""
+
+ message: Optional[str]
+ """Short description in English that provides more information about the response."""
+
+ name: Optional[str]
+ """Identifier indicating whether the benefit was newly enrolled or updated."""
+
+
+class EnrolledIndividual(BaseModel):
+ body: Optional[Body]
+
+ code: Optional[Literal[200, 201, 404, 403]]
+ """HTTP status code. Either 201 or 200"""
+
+ individual_id: Optional[str]
diff --git a/src/finch/types/hris/benefits/individual_benefit.py b/src/finch/types/hris/benefits/individual_benefit.py
new file mode 100644
index 00000000..e549e80a
--- /dev/null
+++ b/src/finch/types/hris/benefits/individual_benefit.py
@@ -0,0 +1,37 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+from typing_extensions import Literal
+
+from ...._models import BaseModel
+from ....types.hris import benfit_contribution
+
+__all__ = ["IndividualBenefit", "Body"]
+
+
+class Body(BaseModel):
+ annual_maximum: Optional[int]
+ """
+ If the benefit supports annual maximum, the amount in cents for this individual.
+ """
+
+ catch_up: Optional[bool]
+ """
+ If the benefit supports catch up (401k, 403b, etc.), whether catch up is enabled
+ for this individual.
+ """
+
+ company_contribution: Optional[benfit_contribution.BenfitContribution]
+
+ employee_deduction: Optional[benfit_contribution.BenfitContribution]
+
+ hsa_contribution_limit: Optional[Literal["individual", "family"]]
+ """Type for HSA contribution limit if the benefit is a HSA."""
+
+
+class IndividualBenefit(BaseModel):
+ body: Optional[Body]
+
+ code: Optional[int]
+
+ individual_id: Optional[str]
diff --git a/src/finch/types/hris/benefits/individual_enroll_many_params.py b/src/finch/types/hris/benefits/individual_enroll_many_params.py
new file mode 100644
index 00000000..62f2a2f2
--- /dev/null
+++ b/src/finch/types/hris/benefits/individual_enroll_many_params.py
@@ -0,0 +1,18 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import List
+from typing_extensions import TypedDict
+
+__all__ = ["IndividualEnrollManyParam"]
+
+
+class IndividualEnrollManyParam(TypedDict, total=False):
+ configuration: object
+
+ individual_id: str
+ """Finch id (uuidv4) for the individual to enroll"""
+
+
+IndividualEnrollManyParams = List[IndividualEnrollManyParam]
diff --git a/src/finch/types/hris/benefits/individual_enrolled_ids_response.py b/src/finch/types/hris/benefits/individual_enrolled_ids_response.py
new file mode 100644
index 00000000..a7281461
--- /dev/null
+++ b/src/finch/types/hris/benefits/individual_enrolled_ids_response.py
@@ -0,0 +1,13 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import List
+
+from ...._models import BaseModel
+
+__all__ = ["IndividualEnrolledIDsResponse"]
+
+
+class IndividualEnrolledIDsResponse(BaseModel):
+ benefit_id: str
+
+ individual_ids: List[str]
diff --git a/src/finch/types/hris/benefits/individual_retrieve_many_benefits_params.py b/src/finch/types/hris/benefits/individual_retrieve_many_benefits_params.py
new file mode 100644
index 00000000..6905dac9
--- /dev/null
+++ b/src/finch/types/hris/benefits/individual_retrieve_many_benefits_params.py
@@ -0,0 +1,15 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing_extensions import TypedDict
+
+__all__ = ["IndividualRetrieveManyBenefitsParams"]
+
+
+class IndividualRetrieveManyBenefitsParams(TypedDict, total=False):
+ individual_ids: str
+ """comma-delimited list of stable Finch uuids for each individual.
+
+ If empty, defaults to all individuals
+ """
diff --git a/src/finch/types/hris/benefits/individual_unenroll_params.py b/src/finch/types/hris/benefits/individual_unenroll_params.py
new file mode 100644
index 00000000..f53a33a8
--- /dev/null
+++ b/src/finch/types/hris/benefits/individual_unenroll_params.py
@@ -0,0 +1,13 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import List
+from typing_extensions import TypedDict
+
+__all__ = ["IndividualUnenrollParams"]
+
+
+class IndividualUnenrollParams(TypedDict, total=False):
+ individual_ids: List[str]
+ """Array of individual_ids to unenroll."""
diff --git a/src/finch/types/hris/benefits/unenrolled_individual.py b/src/finch/types/hris/benefits/unenrolled_individual.py
new file mode 100644
index 00000000..96eb5563
--- /dev/null
+++ b/src/finch/types/hris/benefits/unenrolled_individual.py
@@ -0,0 +1,27 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+
+from ...._models import BaseModel
+
+__all__ = ["UnenrolledIndividual", "Body"]
+
+
+class Body(BaseModel):
+ finch_code: Optional[str]
+ """A descriptive identifier for the response."""
+
+ message: Optional[str]
+ """Short description in English that provides more information about the response."""
+
+ name: Optional[str]
+ """Identifier indicating whether the benefit was newly enrolled or updated."""
+
+
+class UnenrolledIndividual(BaseModel):
+ body: Optional[Body]
+
+ code: Optional[int]
+ """HTTP status code"""
+
+ individual_id: Optional[str]
diff --git a/src/finch/types/hris/benfit_contribution.py b/src/finch/types/hris/benfit_contribution.py
new file mode 100644
index 00000000..65e99cdb
--- /dev/null
+++ b/src/finch/types/hris/benfit_contribution.py
@@ -0,0 +1,16 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+from typing_extensions import Literal
+
+from ..._models import BaseModel
+
+__all__ = ["BenfitContribution"]
+
+
+class BenfitContribution(BaseModel):
+ amount: Optional[int]
+ """Contribution amount in cents (if `fixed`) or basis points (if `percent`)."""
+
+ type: Optional[Literal["fixed", "percent"]]
+ """Contribution type."""
diff --git a/src/finch/types/hris/company.py b/src/finch/types/hris/company.py
new file mode 100644
index 00000000..ae98bba2
--- /dev/null
+++ b/src/finch/types/hris/company.py
@@ -0,0 +1,78 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import List, Optional
+from typing_extensions import Literal
+
+from ...types import location
+from ..._models import BaseModel
+
+__all__ = ["Company", "Entity", "Department", "DepartmentParent", "Account"]
+
+
+class Entity(BaseModel):
+ subtype: Optional[Literal["s_corporation", "c_corporation", "b_corporation"]]
+ """The tax payer subtype of the company."""
+
+ type: Optional[Literal["llc", "corporation", "sole_proprietor", "non_profit", "partnership", "cooperative"]]
+ """The tax payer type of the company."""
+
+
+class DepartmentParent(BaseModel):
+ name: Optional[str]
+ """The parent department's name."""
+
+
+class Department(BaseModel):
+ name: Optional[str]
+ """The department name."""
+
+ parent: Optional[DepartmentParent]
+ """The parent department, if present."""
+
+
+class Account(BaseModel):
+ account_name: Optional[str]
+ """The name of the bank associated in the payroll/HRIS system."""
+
+ account_number: Optional[str]
+ """10-12 digit number to specify the bank account"""
+
+ account_type: Optional[Literal["checking", "savings"]]
+ """The type of bank account."""
+
+ institution_name: Optional[str]
+ """Name of the banking institution."""
+
+ routing_number: Optional[str]
+ """A nine-digit code that's based on the U.S.
+
+ Bank location where your account was opened.
+ """
+
+
+class Company(BaseModel):
+ accounts: Optional[List[Account]]
+ """An array of bank account objects associated with the payroll/HRIS system."""
+
+ departments: Optional[List[Optional[Department]]]
+ """The array of company departments."""
+
+ ein: Optional[str]
+ """The employer identification number."""
+
+ entity: Optional[Entity]
+ """The entity type object."""
+
+ id: str
+ """A stable Finch `id` (UUID v4) for the company."""
+
+ legal_name: Optional[str]
+ """The legal name of the company."""
+
+ locations: Optional[List[Optional[location.Location]]]
+
+ primary_email: Optional[str]
+ """The email of the main administrator on the account."""
+
+ primary_phone_number: Optional[str]
+ """The phone number of the main administrator on the account. Format: `XXXXXXXXXX`"""
diff --git a/src/finch/types/hris/company_benefit.py b/src/finch/types/hris/company_benefit.py
new file mode 100644
index 00000000..aa60bff7
--- /dev/null
+++ b/src/finch/types/hris/company_benefit.py
@@ -0,0 +1,23 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+
+from ..._models import BaseModel
+from ...types.hris import benefit_type, benefit_frequency, benfit_contribution
+
+__all__ = ["CompanyBenefit"]
+
+
+class CompanyBenefit(BaseModel):
+ benefit_id: str
+
+ company_contribution: Optional[benfit_contribution.BenfitContribution]
+
+ description: Optional[str]
+
+ employee_deduction: Optional[benfit_contribution.BenfitContribution]
+
+ frequency: Optional[benefit_frequency.BenefitFrequency]
+
+ type: Optional[benefit_type.BenefitType]
+ """Type of benefit."""
diff --git a/src/finch/types/hris/create_company_benefits_response.py b/src/finch/types/hris/create_company_benefits_response.py
new file mode 100644
index 00000000..473ff89c
--- /dev/null
+++ b/src/finch/types/hris/create_company_benefits_response.py
@@ -0,0 +1,9 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from ..._models import BaseModel
+
+__all__ = ["CreateCompanyBenefitsResponse"]
+
+
+class CreateCompanyBenefitsResponse(BaseModel):
+ benefit_id: str
diff --git a/src/finch/types/hris/directory_list_individuals_params.py b/src/finch/types/hris/directory_list_individuals_params.py
new file mode 100644
index 00000000..de07f63f
--- /dev/null
+++ b/src/finch/types/hris/directory_list_individuals_params.py
@@ -0,0 +1,15 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing_extensions import TypedDict
+
+__all__ = ["DirectoryListIndividualsParams"]
+
+
+class DirectoryListIndividualsParams(TypedDict, total=False):
+ limit: int
+ """Number of employees to return (defaults to all)"""
+
+ offset: int
+ """Index to start from (defaults to 0)"""
diff --git a/src/finch/types/hris/individual.py b/src/finch/types/hris/individual.py
new file mode 100644
index 00000000..12ddb0c3
--- /dev/null
+++ b/src/finch/types/hris/individual.py
@@ -0,0 +1,55 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import List, Optional
+from typing_extensions import Literal
+
+from ...types import location
+from ..._models import BaseModel
+
+__all__ = ["Individual", "Email", "PhoneNumber"]
+
+
+class Email(BaseModel):
+ data: Optional[str]
+
+ type: Optional[Literal["work", "personal"]]
+
+
+class PhoneNumber(BaseModel):
+ data: Optional[str]
+
+ type: Optional[Literal["work", "personal"]]
+
+
+class Individual(BaseModel):
+ dob: Optional[str]
+
+ emails: Optional[List[Email]]
+
+ first_name: Optional[str]
+ """The legal first name of the individual."""
+
+ gender: Optional[Literal["female", "male", "other", "decline_to_specify"]]
+ """The gender of the individual."""
+
+ id: Optional[str]
+ """A stable Finch `id` (UUID v4) for an individual in the company."""
+
+ last_name: Optional[str]
+ """The legal last name of the individual."""
+
+ middle_name: Optional[str]
+ """The legal middle name of the individual."""
+
+ phone_numbers: Optional[List[Optional[PhoneNumber]]]
+
+ preferred_name: Optional[str]
+ """The preferred name of the individual."""
+
+ residence: Optional[location.Location]
+
+ ssn: Optional[str]
+ """Note: This property is only available if enabled for your account.
+
+ Please reach out to your Finch representative if you would like access.
+ """
diff --git a/src/finch/types/hris/individual_in_directory.py b/src/finch/types/hris/individual_in_directory.py
new file mode 100644
index 00000000..98651bfb
--- /dev/null
+++ b/src/finch/types/hris/individual_in_directory.py
@@ -0,0 +1,40 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+
+from ..._models import BaseModel
+
+__all__ = ["IndividualInDirectory", "Manager", "Department"]
+
+
+class Manager(BaseModel):
+ id: Optional[str]
+ """A stable Finch `id` (UUID v4) for an individual in the company."""
+
+
+class Department(BaseModel):
+ name: Optional[str]
+ """The name of the department."""
+
+
+class IndividualInDirectory(BaseModel):
+ department: Optional[Department]
+ """The department object."""
+
+ first_name: Optional[str]
+ """The legal first name of the individual."""
+
+ id: Optional[str]
+ """A stable Finch id (UUID v4) for an individual in the company."""
+
+ is_active: Optional[bool]
+ """`true` if the individual is an active employee or contractor at the company."""
+
+ last_name: Optional[str]
+ """The legal last name of the individual."""
+
+ manager: Optional[Manager]
+ """The manager object."""
+
+ middle_name: Optional[str]
+ """The legal middle name of the individual."""
diff --git a/src/finch/types/hris/individual_response.py b/src/finch/types/hris/individual_response.py
new file mode 100644
index 00000000..797e1523
--- /dev/null
+++ b/src/finch/types/hris/individual_response.py
@@ -0,0 +1,16 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+
+from ..._models import BaseModel
+from ...types.hris import individual
+
+__all__ = ["IndividualResponse"]
+
+
+class IndividualResponse(BaseModel):
+ body: Optional[individual.Individual]
+
+ code: Optional[int]
+
+ individual_id: Optional[str]
diff --git a/src/finch/types/hris/individual_retrieve_many_params.py b/src/finch/types/hris/individual_retrieve_many_params.py
new file mode 100644
index 00000000..f937db83
--- /dev/null
+++ b/src/finch/types/hris/individual_retrieve_many_params.py
@@ -0,0 +1,22 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import List, Optional
+from typing_extensions import TypedDict
+
+__all__ = ["IndividualRetrieveManyParams", "Request", "Options"]
+
+
+class Request(TypedDict, total=False):
+ individual_id: str
+
+
+class Options(TypedDict, total=False):
+ include: List[str]
+
+
+class IndividualRetrieveManyParams(TypedDict, total=False):
+ options: Optional[Options]
+
+ requests: List[Request]
diff --git a/src/finch/types/hris/individuals/__init__.py b/src/finch/types/hris/individuals/__init__.py
new file mode 100644
index 00000000..0b0b2394
--- /dev/null
+++ b/src/finch/types/hris/individuals/__init__.py
@@ -0,0 +1,9 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from .employment_data import EmploymentData as EmploymentData
+from .employment_data_response import EmploymentDataResponse as EmploymentDataResponse
+from .employment_data_retrieve_many_params import (
+ EmploymentDataRetrieveManyParams as EmploymentDataRetrieveManyParams,
+)
diff --git a/src/finch/types/hris/individuals/employment_data.py b/src/finch/types/hris/individuals/employment_data.py
new file mode 100644
index 00000000..e3363f13
--- /dev/null
+++ b/src/finch/types/hris/individuals/employment_data.py
@@ -0,0 +1,99 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import List, Optional
+from typing_extensions import Literal
+
+from pydantic import Field as FieldInfo
+
+from ....types import income, location
+from ...._models import BaseModel
+
+__all__ = ["EmploymentData", "Manager", "Department", "Employment"]
+
+
+class Manager(BaseModel):
+ id: Optional[str]
+ """A stable Finch `id` (UUID v4) for an individual in the company."""
+
+
+class Department(BaseModel):
+ name: Optional[str]
+ """The name of the department associated with the individual."""
+
+
+class Employment(BaseModel):
+ subtype: Optional[Literal["full_time", "intern", "part_time", "temp", "seasonal", "individual_contractor"]]
+ """The secondary employment type of the individual.
+
+ Options: `full_time`, `part_time`, `intern`, `temp`, `seasonal` and
+ `individual_contractor`.
+ """
+
+ type: Optional[Literal["employee", "contractor"]]
+ """The main employment type of the individual."""
+
+
+class EmploymentData(BaseModel):
+ class_code: Optional[str]
+ """Worker's compensation classification code for this employee"""
+
+ department: Optional[Department]
+ """The department object."""
+
+ employment: Optional[Employment]
+ """The employment object."""
+
+ end_date: Optional[str]
+
+ first_name: Optional[str]
+ """The legal first name of the individual."""
+
+ id: Optional[str]
+ """string A stable Finch `id` (UUID v4) for an individual in the company."""
+
+ income_history: Optional[List[Optional[income.Income]]]
+ """The array of income history."""
+
+ income: Optional[income.Income]
+ """The employee's income as reported by the provider.
+
+ This may not always be annualized income, but may be in units of bi-weekly,
+ semi-monthly, daily, etc, depending on what information the provider returns.
+ """
+
+ is_active: Optional[bool]
+ """`true` if the individual an an active employee or contractor at the company."""
+
+ last_name: Optional[str]
+ """The legal last name of the individual."""
+
+ location: Optional[location.Location]
+
+ manager: Optional[Manager]
+ """The manager object representing the manager of the individual within the org."""
+
+ middle_name: Optional[str]
+ """The legal middle name of the individual."""
+
+ pay_group_ids: Optional[List[str]]
+ """Note: This property is only available if enabled for your account.
+
+ Please reach out to your Finch representative if you would like access.
+ """
+
+ start_date: Optional[str]
+
+ title: Optional[str]
+ """The current title of the individual."""
+
+ work_id: Optional[str]
+ """Note: This property is only available if enabled for your account.
+
+ Please reach out to your Finch representative if you would like access.
+ """
+
+ work_id2: Optional[str] = FieldInfo(alias="work_id_2")
+ """Note: This property is only available if enabled for your account.
+
+ Please reach out to your Finch representative if you would like access.
+ """
diff --git a/src/finch/types/hris/individuals/employment_data_response.py b/src/finch/types/hris/individuals/employment_data_response.py
new file mode 100644
index 00000000..de6369cd
--- /dev/null
+++ b/src/finch/types/hris/individuals/employment_data_response.py
@@ -0,0 +1,16 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+
+from ...._models import BaseModel
+from ....types.hris.individuals import employment_data
+
+__all__ = ["EmploymentDataResponse"]
+
+
+class EmploymentDataResponse(BaseModel):
+ body: Optional[employment_data.EmploymentData]
+
+ code: Optional[int]
+
+ individual_id: Optional[str]
diff --git a/src/finch/types/hris/individuals/employment_data_retrieve_many_params.py b/src/finch/types/hris/individuals/employment_data_retrieve_many_params.py
new file mode 100644
index 00000000..2e099514
--- /dev/null
+++ b/src/finch/types/hris/individuals/employment_data_retrieve_many_params.py
@@ -0,0 +1,23 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import List
+from typing_extensions import Required, TypedDict
+
+__all__ = ["EmploymentDataRetrieveManyParams", "Request"]
+
+
+class Request(TypedDict, total=False):
+ individual_id: Required[str]
+ """A stable Finch `id` (UUID v4) for an individual in the company.
+
+ There is no limit to the number of `individual_id` to send per request. It is
+ preferantial to send all ids in a single request for Finch to optimize provider
+ rate-limits.
+ """
+
+
+class EmploymentDataRetrieveManyParams(TypedDict, total=False):
+ requests: Required[List[Request]]
+ """The array of batch requests."""
diff --git a/src/finch/types/hris/pay_statement.py b/src/finch/types/hris/pay_statement.py
new file mode 100644
index 00000000..43a776cc
--- /dev/null
+++ b/src/finch/types/hris/pay_statement.py
@@ -0,0 +1,124 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import List, Optional
+from typing_extensions import Literal
+
+from ...types import money
+from ..._models import BaseModel
+from ...types.hris import benefit_type
+
+__all__ = ["PayStatement", "Earning", "Tax", "EmployeeDeduction", "EmployerContribution"]
+
+
+class Earning(BaseModel):
+ amount: Optional[int]
+ """The earnings amount in cents."""
+
+ currency: Optional[str]
+ """The earnings currency code."""
+
+ hours: Optional[float]
+ """The number of hours associated with this earning.
+
+ (For salaried employees, this could be hours per pay period, `0` or `null`,
+ depending on the provider).
+ """
+
+ name: Optional[str]
+ """The exact name of the deduction from the pay statement."""
+
+ type: Optional[
+ Literal[
+ "salary",
+ "wage",
+ "reimbursement",
+ "overtime",
+ "severance",
+ "double_overtime",
+ "pto",
+ "sick",
+ "bonus",
+ "commission",
+ "tips",
+ "1099",
+ "other",
+ ]
+ ]
+ """The type of earning."""
+
+
+class Tax(BaseModel):
+ amount: Optional[int]
+ """The tax amount in cents."""
+
+ currency: Optional[str]
+ """The currency code."""
+
+ employer: Optional[bool]
+ """`true` if the amount is paid by the employers."""
+
+ name: Optional[str]
+ """The exact name of tax from the pay statement."""
+
+ type: Optional[Literal["state", "federal", "local", "fica"]]
+ """The type of taxes."""
+
+
+class EmployeeDeduction(BaseModel):
+ amount: Optional[int]
+ """The deduction amount in cents."""
+
+ currency: Optional[str]
+ """The deduction currency."""
+
+ name: Optional[str]
+ """The deduction name from the pay statement."""
+
+ pre_tax: Optional[bool]
+ """Boolean indicating if the deduction is pre-tax."""
+
+ type: Optional[benefit_type.BenefitType]
+ """Type of benefit."""
+
+
+class EmployerContribution(BaseModel):
+ amount: Optional[int]
+ """The contribution amount in cents."""
+
+ currency: Optional[str]
+ """The contribution currency."""
+
+ name: Optional[str]
+ """The contribution name from the pay statement."""
+
+ type: Optional[benefit_type.BenefitType]
+ """Type of benefit."""
+
+
+class PayStatement(BaseModel):
+ earnings: Optional[List[Optional[Earning]]]
+ """The array of earnings objects associated with this pay statement"""
+
+ employee_deductions: Optional[List[Optional[EmployeeDeduction]]]
+ """The array of deductions objects associated with this pay statement."""
+
+ employer_contributions: Optional[List[Optional[EmployerContribution]]]
+
+ gross_pay: Optional[money.Money]
+
+ individual_id: Optional[str]
+ """A stable Finch `id` (UUID v4) for an individual in the company"""
+
+ net_pay: Optional[money.Money]
+
+ payment_method: Optional[Literal["check", "direct_deposit"]]
+ """The payment method."""
+
+ taxes: Optional[List[Optional[Tax]]]
+ """The array of taxes objects associated with this pay statement."""
+
+ total_hours: Optional[int]
+ """The number of hours worked for this pay period"""
+
+ type: Optional[Literal["regular_payroll", "off_cycle_payroll", "one_time_payment"]]
+ """The type of the payment associated with the pay statement."""
diff --git a/src/finch/types/hris/pay_statement_response.py b/src/finch/types/hris/pay_statement_response.py
new file mode 100644
index 00000000..bae6937f
--- /dev/null
+++ b/src/finch/types/hris/pay_statement_response.py
@@ -0,0 +1,16 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+
+from ..._models import BaseModel
+from ...types.hris import pay_statement_response_body
+
+__all__ = ["PayStatementResponse"]
+
+
+class PayStatementResponse(BaseModel):
+ body: Optional[pay_statement_response_body.PayStatementResponseBody]
+
+ code: Optional[int]
+
+ payment_id: Optional[str]
diff --git a/src/finch/types/hris/pay_statement_response_body.py b/src/finch/types/hris/pay_statement_response_body.py
new file mode 100644
index 00000000..6b1c3cdf
--- /dev/null
+++ b/src/finch/types/hris/pay_statement_response_body.py
@@ -0,0 +1,16 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import List, Optional
+
+from ...types import paging
+from ..._models import BaseModel
+from ...types.hris import pay_statement
+
+__all__ = ["PayStatementResponseBody"]
+
+
+class PayStatementResponseBody(BaseModel):
+ paging: Optional[paging.Paging]
+
+ pay_statements: Optional[List[pay_statement.PayStatement]]
+ """The array of pay statements for the current payment."""
diff --git a/src/finch/types/hris/pay_statement_retrieve_many_params.py b/src/finch/types/hris/pay_statement_retrieve_many_params.py
new file mode 100644
index 00000000..d166c486
--- /dev/null
+++ b/src/finch/types/hris/pay_statement_retrieve_many_params.py
@@ -0,0 +1,24 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import List
+from typing_extensions import Required, TypedDict
+
+__all__ = ["PayStatementRetrieveManyParams", "Request"]
+
+
+class Request(TypedDict, total=False):
+ payment_id: Required[str]
+ """A stable Finch `id` (UUID v4) for a payment."""
+
+ limit: int
+ """Number of pay statements to return (defaults to all)."""
+
+ offset: int
+ """Index to start from."""
+
+
+class PayStatementRetrieveManyParams(TypedDict, total=False):
+ requests: Required[List[Request]]
+ """The array of batch requests."""
diff --git a/src/finch/types/hris/payment.py b/src/finch/types/hris/payment.py
new file mode 100644
index 00000000..9c7b0a83
--- /dev/null
+++ b/src/finch/types/hris/payment.py
@@ -0,0 +1,39 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import List, Optional
+
+from ...types import money
+from ..._models import BaseModel
+
+__all__ = ["Payment", "PayPeriod"]
+
+
+class PayPeriod(BaseModel):
+ end_date: Optional[str]
+
+ start_date: Optional[str]
+
+
+class Payment(BaseModel):
+ company_debit: Optional[money.Money]
+
+ debit_date: Optional[str]
+
+ employee_taxes: Optional[money.Money]
+
+ employer_taxes: Optional[money.Money]
+
+ gross_pay: Optional[money.Money]
+
+ id: Optional[str]
+ """The unique id for the payment."""
+
+ individual_ids: Optional[List[str]]
+ """Array of every individual on this payment."""
+
+ net_pay: Optional[money.Money]
+
+ pay_date: Optional[str]
+
+ pay_period: Optional[PayPeriod]
+ """The pay period object."""
diff --git a/src/finch/types/hris/payment_list_params.py b/src/finch/types/hris/payment_list_params.py
new file mode 100644
index 00000000..d3e69a9b
--- /dev/null
+++ b/src/finch/types/hris/payment_list_params.py
@@ -0,0 +1,25 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import Union
+from datetime import date
+from typing_extensions import Required, Annotated, TypedDict
+
+from ..._utils import PropertyInfo
+
+__all__ = ["PaymentListParams"]
+
+
+class PaymentListParams(TypedDict, total=False):
+ end_date: Required[Annotated[Union[str, date], PropertyInfo(format="iso8601")]]
+ """
+ The end date to retrieve payments by a company (inclusive) in `YYYY-MM-DD`
+ format.
+ """
+
+ start_date: Required[Annotated[Union[str, date], PropertyInfo(format="iso8601")]]
+ """
+ The start date to retrieve payments by a company (inclusive) in `YYYY-MM-DD`
+ format.
+ """
diff --git a/src/finch/types/hris/supported_benefit.py b/src/finch/types/hris/supported_benefit.py
new file mode 100644
index 00000000..52136220
--- /dev/null
+++ b/src/finch/types/hris/supported_benefit.py
@@ -0,0 +1,47 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import List, Optional
+from typing_extensions import Literal
+
+from ..._models import BaseModel
+from ...types.hris import benefit_type
+
+__all__ = ["SupportedBenefit"]
+
+
+class SupportedBenefit(BaseModel):
+ annual_maximum: Optional[bool]
+ """Whether the provider supports an annual maximum for this benefit."""
+
+ catch_up: Optional[bool]
+ """Whether the provider supports catch up for this benefit.
+
+ This field will only be true for retirement benefits.
+ """
+
+ company_contribution: Optional[List[Literal["fixed", "percent"]]]
+ """Supported contribution types.
+
+ An empty array indicates contributions are not supported.
+ """
+
+ description: Optional[str]
+
+ employee_deduction: Optional[List[Literal["fixed", "percent"]]]
+ """Supported deduction types.
+
+ An empty array indicates deductions are not supported.
+ """
+
+ frequencies: Optional[List[Optional[Literal["one_time", "every_paycheck"]]]]
+ """The list of frequencies supported by the provider for this benefit"""
+
+ hsa_contribution_limit: Optional[List[Literal["individual", "family"]]]
+ """Whether the provider supports HSA contribution limits.
+
+ Empty if this feature is not supported for the benefit. This array only has
+ values for HSA benefits.
+ """
+
+ type: Optional[benefit_type.BenefitType]
+ """Type of benefit."""
diff --git a/src/finch/types/hris/update_company_benefit_response.py b/src/finch/types/hris/update_company_benefit_response.py
new file mode 100644
index 00000000..4cd064e4
--- /dev/null
+++ b/src/finch/types/hris/update_company_benefit_response.py
@@ -0,0 +1,9 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from ..._models import BaseModel
+
+__all__ = ["UpdateCompanyBenefitResponse"]
+
+
+class UpdateCompanyBenefitResponse(BaseModel):
+ benefit_id: str
diff --git a/src/finch/types/income.py b/src/finch/types/income.py
new file mode 100644
index 00000000..ca3cf90a
--- /dev/null
+++ b/src/finch/types/income.py
@@ -0,0 +1,28 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+from typing_extensions import Literal
+
+from .._models import BaseModel
+
+__all__ = ["Income"]
+
+
+class Income(BaseModel):
+ amount: Optional[int]
+ """The income amount in cents."""
+
+ currency: Optional[str]
+ """The currency code."""
+
+ effective_date: Optional[str]
+ """The date the income amount went into effect."""
+
+ unit: Optional[
+ Literal["yearly", "quarterly", "monthly", "semi_monthly", "bi_weekly", "weekly", "daily", "hourly", "fixed"]
+ ]
+ """The income unit of payment.
+
+ Options: `yearly`, `quarterly`, `monthly`, `semi_monthly`, `bi_weekly`,
+ `weekly`, `daily`, `hourly`, and `fixed`.
+ """
diff --git a/src/finch/types/introspection.py b/src/finch/types/introspection.py
new file mode 100644
index 00000000..a8f26da6
--- /dev/null
+++ b/src/finch/types/introspection.py
@@ -0,0 +1,31 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import List
+
+from .._models import BaseModel
+
+__all__ = ["Introspection"]
+
+
+class Introspection(BaseModel):
+ client_id: str
+ """The client id of the application associated with the `access_token`."""
+
+ company_id: str
+ """The Finch uuid of the company associated with the `access_token`."""
+
+ manual: bool
+ """
+ Whether the connection associated with the `access_token` uses the Assisted
+ Connect Flow. (`true` if using Assisted Connect, `false` if connection is
+ automated)
+ """
+
+ payroll_provider_id: str
+ """The payroll provider associated with the `access_token`."""
+
+ products: List[str]
+ """An array of the authorized products associated with the `access_token`."""
+
+ username: str
+ """The account username used for login associated with the `access_token`."""
diff --git a/src/finch/types/location.py b/src/finch/types/location.py
new file mode 100644
index 00000000..4cca753c
--- /dev/null
+++ b/src/finch/types/location.py
@@ -0,0 +1,31 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+
+from .._models import BaseModel
+
+__all__ = ["Location"]
+
+
+class Location(BaseModel):
+ city: Optional[str]
+ """City, district, suburb, town, or village."""
+
+ country: Optional[str]
+ """The 2-letter ISO 3166 country code."""
+
+ line1: Optional[str]
+ """Street address or PO box."""
+
+ line2: Optional[str]
+ """Apartment, suite, unit, or building."""
+
+ name: Optional[str]
+
+ postal_code: Optional[str]
+ """The postal code or zip code."""
+
+ source_id: Optional[str]
+
+ state: Optional[str]
+ """The state code."""
diff --git a/src/finch/types/money.py b/src/finch/types/money.py
new file mode 100644
index 00000000..dc71085f
--- /dev/null
+++ b/src/finch/types/money.py
@@ -0,0 +1,14 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+
+from .._models import BaseModel
+
+__all__ = ["Money"]
+
+
+class Money(BaseModel):
+ amount: Optional[int]
+ """Amount for money object (in cents)"""
+
+ currency: Optional[str]
diff --git a/src/finch/types/paging.py b/src/finch/types/paging.py
new file mode 100644
index 00000000..b697a2b0
--- /dev/null
+++ b/src/finch/types/paging.py
@@ -0,0 +1,15 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+
+from .._models import BaseModel
+
+__all__ = ["Paging"]
+
+
+class Paging(BaseModel):
+ count: Optional[int]
+ """The total number of elements for the entire query (not just the given page)"""
+
+ offset: Optional[int]
+ """The current start index of the returned list of elements"""
diff --git a/src/finch/types/provider.py b/src/finch/types/provider.py
new file mode 100644
index 00000000..00dc6f7d
--- /dev/null
+++ b/src/finch/types/provider.py
@@ -0,0 +1,36 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import List, Optional
+
+from .._models import BaseModel
+
+__all__ = ["Provider"]
+
+
+class Provider(BaseModel):
+ display_name: Optional[str]
+ """The display name of the payroll provider."""
+
+ icon: Optional[str]
+ """The url to the official icon of the payroll provider."""
+
+ id: Optional[str]
+ """The id of the payroll provider used in Connect."""
+
+ logo: Optional[str]
+ """The url to the official logo of the payroll provider."""
+
+ manual: Optional[bool]
+ """
+ Whether the Finch integration with this provider uses the Assisted Connect Flow
+ by default.
+ """
+
+ mfa_required: Optional[bool]
+ """whether MFA is required for the provider."""
+
+ primary_color: Optional[str]
+ """The hex code for the primary color of the payroll provider."""
+
+ products: Optional[List[str]]
+ """The list of Finch products supported on this payroll provider."""
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 00000000..1016754e
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1 @@
+# File generated from our OpenAPI spec by Stainless.
diff --git a/tests/api_resources/__init__.py b/tests/api_resources/__init__.py
new file mode 100644
index 00000000..1016754e
--- /dev/null
+++ b/tests/api_resources/__init__.py
@@ -0,0 +1 @@
+# File generated from our OpenAPI spec by Stainless.
diff --git a/tests/api_resources/ats/__init__.py b/tests/api_resources/ats/__init__.py
new file mode 100644
index 00000000..1016754e
--- /dev/null
+++ b/tests/api_resources/ats/__init__.py
@@ -0,0 +1 @@
+# File generated from our OpenAPI spec by Stainless.
diff --git a/tests/api_resources/ats/test_applications.py b/tests/api_resources/ats/test_applications.py
new file mode 100644
index 00000000..623dbf24
--- /dev/null
+++ b/tests/api_resources/ats/test_applications.py
@@ -0,0 +1,67 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+from tests.utils import assert_matches_type
+from finch.types.ats import Application
+from finch.pagination import SyncApplicationsPage, AsyncApplicationsPage
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestApplications:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ def test_method_retrieve(self, client: Finch) -> None:
+ application = client.ats.applications.retrieve(
+ "string",
+ )
+ assert_matches_type(Application, application, path=["response"])
+
+ @parametrize
+ def test_method_list(self, client: Finch) -> None:
+ application = client.ats.applications.list()
+ assert_matches_type(SyncApplicationsPage[Application], application, path=["response"])
+
+ @parametrize
+ def test_method_list_with_all_params(self, client: Finch) -> None:
+ application = client.ats.applications.list(
+ limit=0,
+ offset=0,
+ )
+ assert_matches_type(SyncApplicationsPage[Application], application, path=["response"])
+
+
+class TestAsyncApplications:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ async def test_method_retrieve(self, client: AsyncFinch) -> None:
+ application = await client.ats.applications.retrieve(
+ "string",
+ )
+ assert_matches_type(Application, application, path=["response"])
+
+ @parametrize
+ async def test_method_list(self, client: AsyncFinch) -> None:
+ application = await client.ats.applications.list()
+ assert_matches_type(AsyncApplicationsPage[Application], application, path=["response"])
+
+ @parametrize
+ async def test_method_list_with_all_params(self, client: AsyncFinch) -> None:
+ application = await client.ats.applications.list(
+ limit=0,
+ offset=0,
+ )
+ assert_matches_type(AsyncApplicationsPage[Application], application, path=["response"])
diff --git a/tests/api_resources/ats/test_candidates.py b/tests/api_resources/ats/test_candidates.py
new file mode 100644
index 00000000..9acb013d
--- /dev/null
+++ b/tests/api_resources/ats/test_candidates.py
@@ -0,0 +1,67 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+from tests.utils import assert_matches_type
+from finch.types.ats import Candidate
+from finch.pagination import SyncCandidatesPage, AsyncCandidatesPage
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestCandidates:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ def test_method_retrieve(self, client: Finch) -> None:
+ candidate = client.ats.candidates.retrieve(
+ "string",
+ )
+ assert_matches_type(Candidate, candidate, path=["response"])
+
+ @parametrize
+ def test_method_list(self, client: Finch) -> None:
+ candidate = client.ats.candidates.list()
+ assert_matches_type(SyncCandidatesPage[Candidate], candidate, path=["response"])
+
+ @parametrize
+ def test_method_list_with_all_params(self, client: Finch) -> None:
+ candidate = client.ats.candidates.list(
+ limit=0,
+ offset=0,
+ )
+ assert_matches_type(SyncCandidatesPage[Candidate], candidate, path=["response"])
+
+
+class TestAsyncCandidates:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ async def test_method_retrieve(self, client: AsyncFinch) -> None:
+ candidate = await client.ats.candidates.retrieve(
+ "string",
+ )
+ assert_matches_type(Candidate, candidate, path=["response"])
+
+ @parametrize
+ async def test_method_list(self, client: AsyncFinch) -> None:
+ candidate = await client.ats.candidates.list()
+ assert_matches_type(AsyncCandidatesPage[Candidate], candidate, path=["response"])
+
+ @parametrize
+ async def test_method_list_with_all_params(self, client: AsyncFinch) -> None:
+ candidate = await client.ats.candidates.list(
+ limit=0,
+ offset=0,
+ )
+ assert_matches_type(AsyncCandidatesPage[Candidate], candidate, path=["response"])
diff --git a/tests/api_resources/ats/test_jobs.py b/tests/api_resources/ats/test_jobs.py
new file mode 100644
index 00000000..cbe6e7b7
--- /dev/null
+++ b/tests/api_resources/ats/test_jobs.py
@@ -0,0 +1,67 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+from tests.utils import assert_matches_type
+from finch.types.ats import Job
+from finch.pagination import SyncJobsPage, AsyncJobsPage
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestJobs:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ def test_method_retrieve(self, client: Finch) -> None:
+ job = client.ats.jobs.retrieve(
+ "string",
+ )
+ assert_matches_type(Job, job, path=["response"])
+
+ @parametrize
+ def test_method_list(self, client: Finch) -> None:
+ job = client.ats.jobs.list()
+ assert_matches_type(SyncJobsPage[Job], job, path=["response"])
+
+ @parametrize
+ def test_method_list_with_all_params(self, client: Finch) -> None:
+ job = client.ats.jobs.list(
+ limit=0,
+ offset=0,
+ )
+ assert_matches_type(SyncJobsPage[Job], job, path=["response"])
+
+
+class TestAsyncJobs:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ async def test_method_retrieve(self, client: AsyncFinch) -> None:
+ job = await client.ats.jobs.retrieve(
+ "string",
+ )
+ assert_matches_type(Job, job, path=["response"])
+
+ @parametrize
+ async def test_method_list(self, client: AsyncFinch) -> None:
+ job = await client.ats.jobs.list()
+ assert_matches_type(AsyncJobsPage[Job], job, path=["response"])
+
+ @parametrize
+ async def test_method_list_with_all_params(self, client: AsyncFinch) -> None:
+ job = await client.ats.jobs.list(
+ limit=0,
+ offset=0,
+ )
+ assert_matches_type(AsyncJobsPage[Job], job, path=["response"])
diff --git a/tests/api_resources/ats/test_offers.py b/tests/api_resources/ats/test_offers.py
new file mode 100644
index 00000000..a18e4550
--- /dev/null
+++ b/tests/api_resources/ats/test_offers.py
@@ -0,0 +1,67 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+from tests.utils import assert_matches_type
+from finch.types.ats import Offer
+from finch.pagination import SyncOffersPage, AsyncOffersPage
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestOffers:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ def test_method_retrieve(self, client: Finch) -> None:
+ offer = client.ats.offers.retrieve(
+ "string",
+ )
+ assert_matches_type(Offer, offer, path=["response"])
+
+ @parametrize
+ def test_method_list(self, client: Finch) -> None:
+ offer = client.ats.offers.list()
+ assert_matches_type(SyncOffersPage[Offer], offer, path=["response"])
+
+ @parametrize
+ def test_method_list_with_all_params(self, client: Finch) -> None:
+ offer = client.ats.offers.list(
+ limit=0,
+ offset=0,
+ )
+ assert_matches_type(SyncOffersPage[Offer], offer, path=["response"])
+
+
+class TestAsyncOffers:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ async def test_method_retrieve(self, client: AsyncFinch) -> None:
+ offer = await client.ats.offers.retrieve(
+ "string",
+ )
+ assert_matches_type(Offer, offer, path=["response"])
+
+ @parametrize
+ async def test_method_list(self, client: AsyncFinch) -> None:
+ offer = await client.ats.offers.list()
+ assert_matches_type(AsyncOffersPage[Offer], offer, path=["response"])
+
+ @parametrize
+ async def test_method_list_with_all_params(self, client: AsyncFinch) -> None:
+ offer = await client.ats.offers.list(
+ limit=0,
+ offset=0,
+ )
+ assert_matches_type(AsyncOffersPage[Offer], offer, path=["response"])
diff --git a/tests/api_resources/ats/test_stages.py b/tests/api_resources/ats/test_stages.py
new file mode 100644
index 00000000..b4a89e9d
--- /dev/null
+++ b/tests/api_resources/ats/test_stages.py
@@ -0,0 +1,37 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+from tests.utils import assert_matches_type
+from finch.types.ats import Stage
+from finch.pagination import SyncSinglePage, AsyncSinglePage
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestStages:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ def test_method_list(self, client: Finch) -> None:
+ stage = client.ats.stages.list()
+ assert_matches_type(SyncSinglePage[Stage], stage, path=["response"])
+
+
+class TestAsyncStages:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ async def test_method_list(self, client: AsyncFinch) -> None:
+ stage = await client.ats.stages.list()
+ assert_matches_type(AsyncSinglePage[Stage], stage, path=["response"])
diff --git a/tests/api_resources/hris/__init__.py b/tests/api_resources/hris/__init__.py
new file mode 100644
index 00000000..1016754e
--- /dev/null
+++ b/tests/api_resources/hris/__init__.py
@@ -0,0 +1 @@
+# File generated from our OpenAPI spec by Stainless.
diff --git a/tests/api_resources/hris/benefits/__init__.py b/tests/api_resources/hris/benefits/__init__.py
new file mode 100644
index 00000000..1016754e
--- /dev/null
+++ b/tests/api_resources/hris/benefits/__init__.py
@@ -0,0 +1 @@
+# File generated from our OpenAPI spec by Stainless.
diff --git a/tests/api_resources/hris/benefits/test_individuals.py b/tests/api_resources/hris/benefits/test_individuals.py
new file mode 100644
index 00000000..9e75da35
--- /dev/null
+++ b/tests/api_resources/hris/benefits/test_individuals.py
@@ -0,0 +1,122 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+from tests.utils import assert_matches_type
+from finch.pagination import SyncSinglePage, AsyncSinglePage
+from finch.types.hris.benefits import (
+ IndividualBenefit,
+ EnrolledIndividual,
+ UnenrolledIndividual,
+ IndividualEnrolledIDsResponse,
+)
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestIndividuals:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ def test_method_enroll_many(self, client: Finch) -> None:
+ individual = client.hris.benefits.individuals.enroll_many(
+ "string",
+ individuals=[{}, {}, {}],
+ )
+ assert_matches_type(SyncSinglePage[EnrolledIndividual], individual, path=["response"])
+
+ @parametrize
+ def test_method_enrolled_ids(self, client: Finch) -> None:
+ individual = client.hris.benefits.individuals.enrolled_ids(
+ "string",
+ )
+ assert_matches_type(IndividualEnrolledIDsResponse, individual, path=["response"])
+
+ @parametrize
+ def test_method_retrieve_many_benefits(self, client: Finch) -> None:
+ individual = client.hris.benefits.individuals.retrieve_many_benefits(
+ "string",
+ )
+ assert_matches_type(SyncSinglePage[IndividualBenefit], individual, path=["response"])
+
+ @parametrize
+ def test_method_retrieve_many_benefits_with_all_params(self, client: Finch) -> None:
+ individual = client.hris.benefits.individuals.retrieve_many_benefits(
+ "string",
+ individual_ids="d675d2b7-6d7b-41a8-b2d3-001eb3fb88f6,d02a6346-1f08-4312-a064-49ff3cafaa7a",
+ )
+ assert_matches_type(SyncSinglePage[IndividualBenefit], individual, path=["response"])
+
+ @parametrize
+ def test_method_unenroll(self, client: Finch) -> None:
+ individual = client.hris.benefits.individuals.unenroll(
+ "string",
+ )
+ assert_matches_type(SyncSinglePage[UnenrolledIndividual], individual, path=["response"])
+
+ @parametrize
+ def test_method_unenroll_with_all_params(self, client: Finch) -> None:
+ individual = client.hris.benefits.individuals.unenroll(
+ "string",
+ individual_ids=["string", "string", "string"],
+ )
+ assert_matches_type(SyncSinglePage[UnenrolledIndividual], individual, path=["response"])
+
+
+class TestAsyncIndividuals:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ async def test_method_enroll_many(self, client: AsyncFinch) -> None:
+ individual = await client.hris.benefits.individuals.enroll_many(
+ "string",
+ individuals=[{}, {}, {}],
+ )
+ assert_matches_type(AsyncSinglePage[EnrolledIndividual], individual, path=["response"])
+
+ @parametrize
+ async def test_method_enrolled_ids(self, client: AsyncFinch) -> None:
+ individual = await client.hris.benefits.individuals.enrolled_ids(
+ "string",
+ )
+ assert_matches_type(IndividualEnrolledIDsResponse, individual, path=["response"])
+
+ @parametrize
+ async def test_method_retrieve_many_benefits(self, client: AsyncFinch) -> None:
+ individual = await client.hris.benefits.individuals.retrieve_many_benefits(
+ "string",
+ )
+ assert_matches_type(AsyncSinglePage[IndividualBenefit], individual, path=["response"])
+
+ @parametrize
+ async def test_method_retrieve_many_benefits_with_all_params(self, client: AsyncFinch) -> None:
+ individual = await client.hris.benefits.individuals.retrieve_many_benefits(
+ "string",
+ individual_ids="d675d2b7-6d7b-41a8-b2d3-001eb3fb88f6,d02a6346-1f08-4312-a064-49ff3cafaa7a",
+ )
+ assert_matches_type(AsyncSinglePage[IndividualBenefit], individual, path=["response"])
+
+ @parametrize
+ async def test_method_unenroll(self, client: AsyncFinch) -> None:
+ individual = await client.hris.benefits.individuals.unenroll(
+ "string",
+ )
+ assert_matches_type(AsyncSinglePage[UnenrolledIndividual], individual, path=["response"])
+
+ @parametrize
+ async def test_method_unenroll_with_all_params(self, client: AsyncFinch) -> None:
+ individual = await client.hris.benefits.individuals.unenroll(
+ "string",
+ individual_ids=["string", "string", "string"],
+ )
+ assert_matches_type(AsyncSinglePage[UnenrolledIndividual], individual, path=["response"])
diff --git a/tests/api_resources/hris/individuals/__init__.py b/tests/api_resources/hris/individuals/__init__.py
new file mode 100644
index 00000000..1016754e
--- /dev/null
+++ b/tests/api_resources/hris/individuals/__init__.py
@@ -0,0 +1 @@
+# File generated from our OpenAPI spec by Stainless.
diff --git a/tests/api_resources/hris/individuals/test_employment_data.py b/tests/api_resources/hris/individuals/test_employment_data.py
new file mode 100644
index 00000000..355516b3
--- /dev/null
+++ b/tests/api_resources/hris/individuals/test_employment_data.py
@@ -0,0 +1,41 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+from tests.utils import assert_matches_type
+from finch.pagination import SyncResponsesPage, AsyncResponsesPage
+from finch.types.hris.individuals import EmploymentDataResponse
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestEmploymentData:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ def test_method_retrieve_many(self, client: Finch) -> None:
+ employment_data = client.hris.individuals.employment_data.retrieve_many(
+ requests=[{"individual_id": "string"}, {"individual_id": "string"}, {"individual_id": "string"}],
+ )
+ assert_matches_type(SyncResponsesPage[EmploymentDataResponse], employment_data, path=["response"])
+
+
+class TestAsyncEmploymentData:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ async def test_method_retrieve_many(self, client: AsyncFinch) -> None:
+ employment_data = await client.hris.individuals.employment_data.retrieve_many(
+ requests=[{"individual_id": "string"}, {"individual_id": "string"}, {"individual_id": "string"}],
+ )
+ assert_matches_type(AsyncResponsesPage[EmploymentDataResponse], employment_data, path=["response"])
diff --git a/tests/api_resources/hris/test_benefits.py b/tests/api_resources/hris/test_benefits.py
new file mode 100644
index 00000000..4ed8a6b3
--- /dev/null
+++ b/tests/api_resources/hris/test_benefits.py
@@ -0,0 +1,124 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+from tests.utils import assert_matches_type
+from finch.pagination import SyncSinglePage, AsyncSinglePage
+from finch.types.hris import (
+ CompanyBenefit,
+ SupportedBenefit,
+ UpdateCompanyBenefitResponse,
+ CreateCompanyBenefitsResponse,
+)
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestBenefits:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ def test_method_create(self, client: Finch) -> None:
+ benefit = client.hris.benefits.create()
+ assert_matches_type(CreateCompanyBenefitsResponse, benefit, path=["response"])
+
+ @parametrize
+ def test_method_create_with_all_params(self, client: Finch) -> None:
+ benefit = client.hris.benefits.create(
+ type="401k",
+ description="string",
+ frequency="one_time",
+ )
+ assert_matches_type(CreateCompanyBenefitsResponse, benefit, path=["response"])
+
+ @parametrize
+ def test_method_retrieve(self, client: Finch) -> None:
+ benefit = client.hris.benefits.retrieve(
+ "string",
+ )
+ assert_matches_type(CompanyBenefit, benefit, path=["response"])
+
+ @parametrize
+ def test_method_update(self, client: Finch) -> None:
+ benefit = client.hris.benefits.update(
+ "string",
+ )
+ assert_matches_type(UpdateCompanyBenefitResponse, benefit, path=["response"])
+
+ @parametrize
+ def test_method_update_with_all_params(self, client: Finch) -> None:
+ benefit = client.hris.benefits.update(
+ "string",
+ description="string",
+ )
+ assert_matches_type(UpdateCompanyBenefitResponse, benefit, path=["response"])
+
+ @parametrize
+ def test_method_list(self, client: Finch) -> None:
+ benefit = client.hris.benefits.list()
+ assert_matches_type(SyncSinglePage[CompanyBenefit], benefit, path=["response"])
+
+ @parametrize
+ def test_method_list_supported_benefits(self, client: Finch) -> None:
+ benefit = client.hris.benefits.list_supported_benefits()
+ assert_matches_type(SyncSinglePage[SupportedBenefit], benefit, path=["response"])
+
+
+class TestAsyncBenefits:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ async def test_method_create(self, client: AsyncFinch) -> None:
+ benefit = await client.hris.benefits.create()
+ assert_matches_type(CreateCompanyBenefitsResponse, benefit, path=["response"])
+
+ @parametrize
+ async def test_method_create_with_all_params(self, client: AsyncFinch) -> None:
+ benefit = await client.hris.benefits.create(
+ type="401k",
+ description="string",
+ frequency="one_time",
+ )
+ assert_matches_type(CreateCompanyBenefitsResponse, benefit, path=["response"])
+
+ @parametrize
+ async def test_method_retrieve(self, client: AsyncFinch) -> None:
+ benefit = await client.hris.benefits.retrieve(
+ "string",
+ )
+ assert_matches_type(CompanyBenefit, benefit, path=["response"])
+
+ @parametrize
+ async def test_method_update(self, client: AsyncFinch) -> None:
+ benefit = await client.hris.benefits.update(
+ "string",
+ )
+ assert_matches_type(UpdateCompanyBenefitResponse, benefit, path=["response"])
+
+ @parametrize
+ async def test_method_update_with_all_params(self, client: AsyncFinch) -> None:
+ benefit = await client.hris.benefits.update(
+ "string",
+ description="string",
+ )
+ assert_matches_type(UpdateCompanyBenefitResponse, benefit, path=["response"])
+
+ @parametrize
+ async def test_method_list(self, client: AsyncFinch) -> None:
+ benefit = await client.hris.benefits.list()
+ assert_matches_type(AsyncSinglePage[CompanyBenefit], benefit, path=["response"])
+
+ @parametrize
+ async def test_method_list_supported_benefits(self, client: AsyncFinch) -> None:
+ benefit = await client.hris.benefits.list_supported_benefits()
+ assert_matches_type(AsyncSinglePage[SupportedBenefit], benefit, path=["response"])
diff --git a/tests/api_resources/hris/test_company.py b/tests/api_resources/hris/test_company.py
new file mode 100644
index 00000000..ee65f80d
--- /dev/null
+++ b/tests/api_resources/hris/test_company.py
@@ -0,0 +1,36 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+from tests.utils import assert_matches_type
+from finch.types.hris import Company
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestCompany:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ def test_method_retrieve(self, client: Finch) -> None:
+ company = client.hris.company.retrieve()
+ assert_matches_type(Company, company, path=["response"])
+
+
+class TestAsyncCompany:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ async def test_method_retrieve(self, client: AsyncFinch) -> None:
+ company = await client.hris.company.retrieve()
+ assert_matches_type(Company, company, path=["response"])
diff --git a/tests/api_resources/hris/test_directory.py b/tests/api_resources/hris/test_directory.py
new file mode 100644
index 00000000..baa0898e
--- /dev/null
+++ b/tests/api_resources/hris/test_directory.py
@@ -0,0 +1,53 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+from tests.utils import assert_matches_type
+from finch.pagination import SyncIndividualsPage, AsyncIndividualsPage
+from finch.types.hris import IndividualInDirectory
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestDirectory:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ def test_method_list_individuals(self, client: Finch) -> None:
+ directory = client.hris.directory.list_individuals()
+ assert_matches_type(SyncIndividualsPage[IndividualInDirectory], directory, path=["response"])
+
+ @parametrize
+ def test_method_list_individuals_with_all_params(self, client: Finch) -> None:
+ directory = client.hris.directory.list_individuals(
+ limit=0,
+ offset=0,
+ )
+ assert_matches_type(SyncIndividualsPage[IndividualInDirectory], directory, path=["response"])
+
+
+class TestAsyncDirectory:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ async def test_method_list_individuals(self, client: AsyncFinch) -> None:
+ directory = await client.hris.directory.list_individuals()
+ assert_matches_type(AsyncIndividualsPage[IndividualInDirectory], directory, path=["response"])
+
+ @parametrize
+ async def test_method_list_individuals_with_all_params(self, client: AsyncFinch) -> None:
+ directory = await client.hris.directory.list_individuals(
+ limit=0,
+ offset=0,
+ )
+ assert_matches_type(AsyncIndividualsPage[IndividualInDirectory], directory, path=["response"])
diff --git a/tests/api_resources/hris/test_individuals.py b/tests/api_resources/hris/test_individuals.py
new file mode 100644
index 00000000..6bcb7217
--- /dev/null
+++ b/tests/api_resources/hris/test_individuals.py
@@ -0,0 +1,53 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+from tests.utils import assert_matches_type
+from finch.pagination import SyncResponsesPage, AsyncResponsesPage
+from finch.types.hris import IndividualResponse
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestIndividuals:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ def test_method_retrieve_many(self, client: Finch) -> None:
+ individual = client.hris.individuals.retrieve_many()
+ assert_matches_type(SyncResponsesPage[IndividualResponse], individual, path=["response"])
+
+ @parametrize
+ def test_method_retrieve_many_with_all_params(self, client: Finch) -> None:
+ individual = client.hris.individuals.retrieve_many(
+ requests=[{"individual_id": "string"}, {"individual_id": "string"}, {"individual_id": "string"}],
+ options={"include": ["string", "string", "string"]},
+ )
+ assert_matches_type(SyncResponsesPage[IndividualResponse], individual, path=["response"])
+
+
+class TestAsyncIndividuals:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ async def test_method_retrieve_many(self, client: AsyncFinch) -> None:
+ individual = await client.hris.individuals.retrieve_many()
+ assert_matches_type(AsyncResponsesPage[IndividualResponse], individual, path=["response"])
+
+ @parametrize
+ async def test_method_retrieve_many_with_all_params(self, client: AsyncFinch) -> None:
+ individual = await client.hris.individuals.retrieve_many(
+ requests=[{"individual_id": "string"}, {"individual_id": "string"}, {"individual_id": "string"}],
+ options={"include": ["string", "string", "string"]},
+ )
+ assert_matches_type(AsyncResponsesPage[IndividualResponse], individual, path=["response"])
diff --git a/tests/api_resources/hris/test_pay_statements.py b/tests/api_resources/hris/test_pay_statements.py
new file mode 100644
index 00000000..58b0ca54
--- /dev/null
+++ b/tests/api_resources/hris/test_pay_statements.py
@@ -0,0 +1,49 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+from tests.utils import assert_matches_type
+from finch.pagination import SyncResponsesPage, AsyncResponsesPage
+from finch.types.hris import PayStatementResponse
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestPayStatements:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ def test_method_retrieve_many(self, client: Finch) -> None:
+ pay_statement = client.hris.pay_statements.retrieve_many(
+ requests=[
+ {"payment_id": "e8b90071-0c11-471c-86e8-e303ef2f6782"},
+ {"payment_id": "e8b90071-0c11-471c-86e8-e303ef2f6782"},
+ {"payment_id": "e8b90071-0c11-471c-86e8-e303ef2f6782"},
+ ],
+ )
+ assert_matches_type(SyncResponsesPage[PayStatementResponse], pay_statement, path=["response"])
+
+
+class TestAsyncPayStatements:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ async def test_method_retrieve_many(self, client: AsyncFinch) -> None:
+ pay_statement = await client.hris.pay_statements.retrieve_many(
+ requests=[
+ {"payment_id": "e8b90071-0c11-471c-86e8-e303ef2f6782"},
+ {"payment_id": "e8b90071-0c11-471c-86e8-e303ef2f6782"},
+ {"payment_id": "e8b90071-0c11-471c-86e8-e303ef2f6782"},
+ ],
+ )
+ assert_matches_type(AsyncResponsesPage[PayStatementResponse], pay_statement, path=["response"])
diff --git a/tests/api_resources/hris/test_payments.py b/tests/api_resources/hris/test_payments.py
new file mode 100644
index 00000000..83e294c8
--- /dev/null
+++ b/tests/api_resources/hris/test_payments.py
@@ -0,0 +1,44 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+from tests.utils import assert_matches_type
+from finch._utils import parse_date
+from finch.pagination import SyncSinglePage, AsyncSinglePage
+from finch.types.hris import Payment
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestPayments:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ def test_method_list(self, client: Finch) -> None:
+ payment = client.hris.payments.list(
+ start_date=parse_date("2021-01-01"),
+ end_date=parse_date("2021-01-01"),
+ )
+ assert_matches_type(SyncSinglePage[Payment], payment, path=["response"])
+
+
+class TestAsyncPayments:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ async def test_method_list(self, client: AsyncFinch) -> None:
+ payment = await client.hris.payments.list(
+ start_date=parse_date("2021-01-01"),
+ end_date=parse_date("2021-01-01"),
+ )
+ assert_matches_type(AsyncSinglePage[Payment], payment, path=["response"])
diff --git a/tests/api_resources/test_account.py b/tests/api_resources/test_account.py
new file mode 100644
index 00000000..428efc53
--- /dev/null
+++ b/tests/api_resources/test_account.py
@@ -0,0 +1,46 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+from finch.types import Introspection, DisconnectResponse
+from tests.utils import assert_matches_type
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestAccount:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ def test_method_disconnect(self, client: Finch) -> None:
+ account = client.account.disconnect()
+ assert_matches_type(DisconnectResponse, account, path=["response"])
+
+ @parametrize
+ def test_method_introspect(self, client: Finch) -> None:
+ account = client.account.introspect()
+ assert_matches_type(Introspection, account, path=["response"])
+
+
+class TestAsyncAccount:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ async def test_method_disconnect(self, client: AsyncFinch) -> None:
+ account = await client.account.disconnect()
+ assert_matches_type(DisconnectResponse, account, path=["response"])
+
+ @parametrize
+ async def test_method_introspect(self, client: AsyncFinch) -> None:
+ account = await client.account.introspect()
+ assert_matches_type(Introspection, account, path=["response"])
diff --git a/tests/api_resources/test_ats.py b/tests/api_resources/test_ats.py
new file mode 100644
index 00000000..8c8a9926
--- /dev/null
+++ b/tests/api_resources/test_ats.py
@@ -0,0 +1,24 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestATS:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+
+class TestAsyncATS:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
diff --git a/tests/api_resources/test_hris.py b/tests/api_resources/test_hris.py
new file mode 100644
index 00000000..9e954a66
--- /dev/null
+++ b/tests/api_resources/test_hris.py
@@ -0,0 +1,24 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestHRIS:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+
+class TestAsyncHRIS:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
diff --git a/tests/api_resources/test_providers.py b/tests/api_resources/test_providers.py
new file mode 100644
index 00000000..3cf60a06
--- /dev/null
+++ b/tests/api_resources/test_providers.py
@@ -0,0 +1,37 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+from finch.types import Provider
+from tests.utils import assert_matches_type
+from finch.pagination import SyncSinglePage, AsyncSinglePage
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestProviders:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ def test_method_list(self, client: Finch) -> None:
+ provider = client.providers.list()
+ assert_matches_type(SyncSinglePage[Provider], provider, path=["response"])
+
+
+class TestAsyncProviders:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+ @parametrize
+ async def test_method_list(self, client: AsyncFinch) -> None:
+ provider = await client.providers.list()
+ assert_matches_type(AsyncSinglePage[Provider], provider, path=["response"])
diff --git a/tests/api_resources/test_top_level.py b/tests/api_resources/test_top_level.py
new file mode 100644
index 00000000..abeff802
--- /dev/null
+++ b/tests/api_resources/test_top_level.py
@@ -0,0 +1,24 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+
+import pytest
+
+from finch import Finch, AsyncFinch
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+class TestTopLevel:
+ strict_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+
+
+class TestAsyncTopLevel:
+ strict_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ loose_client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=False)
+ parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 00000000..4452b7a7
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,10 @@
+import asyncio
+
+import pytest
+
+pytest.register_assert_rewrite("tests.utils")
+
+
+@pytest.fixture(scope="session")
+def event_loop() -> asyncio.AbstractEventLoop:
+ return asyncio.new_event_loop()
diff --git a/tests/test_client.py b/tests/test_client.py
new file mode 100644
index 00000000..16bf5b78
--- /dev/null
+++ b/tests/test_client.py
@@ -0,0 +1,660 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+import os
+import json
+import inspect
+from typing import Any, Dict, Union, cast
+
+import httpx
+import pytest
+from respx import MockRouter
+
+from finch import Finch, AsyncFinch
+from finch._types import Omit
+from finch._models import BaseModel, FinalRequestOptions
+from finch._base_client import BaseClient, make_request_options
+
+base_url = os.environ.get("API_BASE_URL", "http://127.0.0.1:4010")
+access_token = os.environ.get("API_KEY", "something1234")
+
+
+def _get_params(client: BaseClient) -> dict[str, str]:
+ request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
+ url = httpx.URL(request.url)
+ return dict(url.params)
+
+
+class TestFinch:
+ client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+
+ def test_raw_response(self) -> None:
+ response = self.client.get("/providers", cast_to=httpx.Response)
+ assert response.status_code == 200
+ assert isinstance(response, httpx.Response)
+
+ def test_copy(self) -> None:
+ copied = self.client.copy()
+ assert id(copied) != id(self.client)
+
+ copied = self.client.copy(access_token="my new access token")
+ assert copied.access_token == "my new access token"
+ assert self.client.access_token == access_token
+
+ def test_copy_default_options(self) -> None:
+ # options that have a default are overriden correctly
+ copied = self.client.copy(max_retries=7)
+ assert copied.max_retries == 7
+ assert self.client.max_retries == 2
+
+ copied2 = copied.copy(max_retries=6)
+ assert copied2.max_retries == 6
+ assert copied.max_retries == 7
+
+ # timeout
+ assert isinstance(self.client.timeout, httpx.Timeout)
+ copied = self.client.copy(timeout=None)
+ assert copied.timeout is None
+ assert isinstance(self.client.timeout, httpx.Timeout)
+
+ def test_copy_default_headers(self) -> None:
+ client = Finch(
+ base_url=base_url,
+ access_token=access_token,
+ _strict_response_validation=True,
+ default_headers={"X-Foo": "bar"},
+ )
+ assert client.default_headers["X-Foo"] == "bar"
+
+ # does not override the already given value when not specified
+ copied = client.copy()
+ assert copied.default_headers["X-Foo"] == "bar"
+
+ # merges already given headers
+ copied = client.copy(default_headers={"X-Bar": "stainless"})
+ assert copied.default_headers["X-Foo"] == "bar"
+ assert copied.default_headers["X-Bar"] == "stainless"
+
+ # uses new values for any already given headers
+ copied = client.copy(default_headers={"X-Foo": "stainless"})
+ assert copied.default_headers["X-Foo"] == "stainless"
+
+ # set_default_headers
+
+ # completely overrides already set values
+ copied = client.copy(set_default_headers={})
+ assert copied.default_headers.get("X-Foo") is None
+
+ copied = client.copy(set_default_headers={"X-Bar": "Robert"})
+ assert copied.default_headers["X-Bar"] == "Robert"
+
+ with pytest.raises(
+ ValueError,
+ match="`default_headers` and `set_default_headers` arguments are mutually exclusive",
+ ):
+ client.copy(set_default_headers={}, default_headers={"X-Foo": "Bar"})
+
+ def test_copy_default_query(self) -> None:
+ client = Finch(
+ base_url=base_url, access_token=access_token, _strict_response_validation=True, default_query={"foo": "bar"}
+ )
+ assert _get_params(client)["foo"] == "bar"
+
+ # does not override the already given value when not specified
+ copied = client.copy()
+ assert _get_params(copied)["foo"] == "bar"
+
+ # merges already given params
+ copied = client.copy(default_query={"bar": "stainless"})
+ params = _get_params(copied)
+ assert params["foo"] == "bar"
+ assert params["bar"] == "stainless"
+
+ # uses new values for any already given headers
+ copied = client.copy(default_query={"foo": "stainless"})
+ assert _get_params(copied)["foo"] == "stainless"
+
+ # set_default_query
+
+ # completely overrides already set values
+ copied = client.copy(set_default_query={})
+ assert _get_params(copied) == {}
+
+ copied = client.copy(set_default_query={"bar": "Robert"})
+ assert _get_params(copied)["bar"] == "Robert"
+
+ with pytest.raises(
+ ValueError,
+ # TODO: update
+ match="`default_query` and `set_default_query` arguments are mutually exclusive",
+ ):
+ client.copy(set_default_query={}, default_query={"foo": "Bar"})
+
+ def test_copy_signature(self) -> None:
+ # ensure the same parameters that can be passed to the client are defined in the `.copy()` method
+ init_signature = inspect.signature(
+ # mypy doesn't like that we access the `__init__` property.
+ self.client.__init__, # type: ignore[misc]
+ )
+ copy_signature = inspect.signature(self.client.copy)
+ exclude_params = {"transport", "proxies", "_strict_response_validation"}
+
+ for name in init_signature.parameters.keys():
+ if name in exclude_params:
+ continue
+
+ copy_param = copy_signature.parameters.get(name)
+ assert copy_param is not None, f"copy() signature is missing the {name} param"
+
+ def test_default_headers_option(self) -> None:
+ client = Finch(
+ base_url=base_url,
+ access_token=access_token,
+ _strict_response_validation=True,
+ default_headers={"X-Foo": "bar"},
+ )
+ request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
+ assert request.headers.get("x-foo") == "bar"
+ assert request.headers.get("x-stainless-lang") == "python"
+
+ client2 = Finch(
+ base_url=base_url,
+ access_token=access_token,
+ _strict_response_validation=True,
+ default_headers={
+ "X-Foo": "stainless",
+ "X-Stainless-Lang": "my-overriding-header",
+ },
+ )
+ request = client2._build_request(FinalRequestOptions(method="get", url="/foo"))
+ assert request.headers.get("x-foo") == "stainless"
+ assert request.headers.get("x-stainless-lang") == "my-overriding-header"
+
+ def test_validate_headers(self) -> None:
+ client = Finch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
+ assert request.headers.get("Authorization") == f"Bearer {access_token}"
+
+ client2 = Finch(base_url=base_url, access_token=None, _strict_response_validation=True)
+ with pytest.raises(
+ TypeError,
+ match="Could not resolve authentication method. Expected the access_token to be set. Or for the `Authorization` headers to be explicitly omitted",
+ ):
+ client2._build_request(FinalRequestOptions(method="get", url="/foo"))
+
+ request2 = client2._build_request(
+ FinalRequestOptions(method="get", url="/foo", headers={"Authorization": Omit()})
+ )
+ assert request2.headers.get("Authorization") is None
+
+ def test_default_query_option(self) -> None:
+ client = Finch(
+ base_url=base_url,
+ access_token=access_token,
+ _strict_response_validation=True,
+ default_query={"query_param": "bar"},
+ )
+ request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
+ url = httpx.URL(request.url)
+ assert dict(url.params) == {"query_param": "bar"}
+
+ request = client._build_request(
+ FinalRequestOptions(
+ method="get",
+ url="/foo",
+ params={"foo": "baz", "query_param": "overriden"},
+ )
+ )
+ url = httpx.URL(request.url)
+ assert dict(url.params) == {"foo": "baz", "query_param": "overriden"}
+
+ def test_request_extra_json(self) -> None:
+ request = self.client._build_request(
+ FinalRequestOptions(
+ method="post",
+ url="/foo",
+ json_data={"foo": "bar"},
+ extra_json={"baz": False},
+ ),
+ )
+ data = json.loads(request.content.decode("utf-8"))
+ assert data == {"foo": "bar", "baz": False}
+
+ request = self.client._build_request(
+ FinalRequestOptions(
+ method="post",
+ url="/foo",
+ extra_json={"baz": False},
+ ),
+ )
+ data = json.loads(request.content.decode("utf-8"))
+ assert data == {"baz": False}
+
+ # `extra_json` takes priority over `json_data` when keys clash
+ request = self.client._build_request(
+ FinalRequestOptions(
+ method="post",
+ url="/foo",
+ json_data={"foo": "bar", "baz": True},
+ extra_json={"baz": None},
+ ),
+ )
+ data = json.loads(request.content.decode("utf-8"))
+ assert data == {"foo": "bar", "baz": None}
+
+ def test_request_extra_headers(self) -> None:
+ request = self.client._build_request(
+ FinalRequestOptions(
+ method="post",
+ url="/foo",
+ **make_request_options(extra_headers={"X-Foo": "Foo"}),
+ ),
+ )
+ assert request.headers.get("X-Foo") == "Foo"
+
+ # `extra_headers` takes priority over `default_headers` when keys clash
+ request = self.client.with_options(default_headers={"X-Bar": "true"})._build_request(
+ FinalRequestOptions(
+ method="post",
+ url="/foo",
+ **make_request_options(
+ extra_headers={"X-Bar": "false"},
+ ),
+ ),
+ )
+ assert request.headers.get("X-Bar") == "false"
+
+ def test_request_extra_query(self) -> None:
+ request = self.client._build_request(
+ FinalRequestOptions(
+ method="post",
+ url="/foo",
+ **make_request_options(
+ extra_query={"my_query_param": "Foo"},
+ ),
+ ),
+ )
+ params = cast(Dict[str, str], dict(request.url.params))
+ assert params == {"my_query_param": "Foo"}
+
+ # if both `query` and `extra_query` are given, they are merged
+ request = self.client._build_request(
+ FinalRequestOptions(
+ method="post",
+ url="/foo",
+ **make_request_options(
+ query={"bar": "1"},
+ extra_query={"foo": "2"},
+ ),
+ ),
+ )
+ params = cast(Dict[str, str], dict(request.url.params))
+ assert params == {"bar": "1", "foo": "2"}
+
+ # `extra_query` takes priority over `query` when keys clash
+ request = self.client._build_request(
+ FinalRequestOptions(
+ method="post",
+ url="/foo",
+ **make_request_options(
+ query={"foo": "1"},
+ extra_query={"foo": "2"},
+ ),
+ ),
+ )
+ params = cast(Dict[str, str], dict(request.url.params))
+ assert params == {"foo": "2"}
+
+ @pytest.mark.respx(base_url=base_url)
+ def test_basic_union_response(self, respx_mock: MockRouter) -> None:
+ class Model1(BaseModel):
+ name: str
+
+ class Model2(BaseModel):
+ foo: str
+
+ respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
+
+ response = self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2]))
+ assert isinstance(response, Model2)
+ assert response.foo == "bar"
+
+ @pytest.mark.respx(base_url=base_url)
+ def test_union_response_different_types(self, respx_mock: MockRouter) -> None:
+ """Union of objects with the same field name using a different type"""
+
+ class Model1(BaseModel):
+ foo: int
+
+ class Model2(BaseModel):
+ foo: str
+
+ respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
+
+ response = self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2]))
+ assert isinstance(response, Model2)
+ assert response.foo == "bar"
+
+ respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": 1}))
+
+ response = self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2]))
+ assert isinstance(response, Model1)
+ assert response.foo == 1
+
+
+class TestAsyncFinch:
+ client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+
+ async def test_raw_response(self) -> None:
+ response = await self.client.get("/providers", cast_to=httpx.Response)
+ assert response.status_code == 200
+ assert isinstance(response, httpx.Response)
+
+ def test_copy(self) -> None:
+ copied = self.client.copy()
+ assert id(copied) != id(self.client)
+
+ copied = self.client.copy(access_token="my new access token")
+ assert copied.access_token == "my new access token"
+ assert self.client.access_token == access_token
+
+ def test_copy_default_options(self) -> None:
+ # options that have a default are overriden correctly
+ copied = self.client.copy(max_retries=7)
+ assert copied.max_retries == 7
+ assert self.client.max_retries == 2
+
+ copied2 = copied.copy(max_retries=6)
+ assert copied2.max_retries == 6
+ assert copied.max_retries == 7
+
+ # timeout
+ assert isinstance(self.client.timeout, httpx.Timeout)
+ copied = self.client.copy(timeout=None)
+ assert copied.timeout is None
+ assert isinstance(self.client.timeout, httpx.Timeout)
+
+ def test_copy_default_headers(self) -> None:
+ client = AsyncFinch(
+ base_url=base_url,
+ access_token=access_token,
+ _strict_response_validation=True,
+ default_headers={"X-Foo": "bar"},
+ )
+ assert client.default_headers["X-Foo"] == "bar"
+
+ # does not override the already given value when not specified
+ copied = client.copy()
+ assert copied.default_headers["X-Foo"] == "bar"
+
+ # merges already given headers
+ copied = client.copy(default_headers={"X-Bar": "stainless"})
+ assert copied.default_headers["X-Foo"] == "bar"
+ assert copied.default_headers["X-Bar"] == "stainless"
+
+ # uses new values for any already given headers
+ copied = client.copy(default_headers={"X-Foo": "stainless"})
+ assert copied.default_headers["X-Foo"] == "stainless"
+
+ # set_default_headers
+
+ # completely overrides already set values
+ copied = client.copy(set_default_headers={})
+ assert copied.default_headers.get("X-Foo") is None
+
+ copied = client.copy(set_default_headers={"X-Bar": "Robert"})
+ assert copied.default_headers["X-Bar"] == "Robert"
+
+ with pytest.raises(
+ ValueError,
+ match="`default_headers` and `set_default_headers` arguments are mutually exclusive",
+ ):
+ client.copy(set_default_headers={}, default_headers={"X-Foo": "Bar"})
+
+ def test_copy_default_query(self) -> None:
+ client = AsyncFinch(
+ base_url=base_url, access_token=access_token, _strict_response_validation=True, default_query={"foo": "bar"}
+ )
+ assert _get_params(client)["foo"] == "bar"
+
+ # does not override the already given value when not specified
+ copied = client.copy()
+ assert _get_params(copied)["foo"] == "bar"
+
+ # merges already given params
+ copied = client.copy(default_query={"bar": "stainless"})
+ params = _get_params(copied)
+ assert params["foo"] == "bar"
+ assert params["bar"] == "stainless"
+
+ # uses new values for any already given headers
+ copied = client.copy(default_query={"foo": "stainless"})
+ assert _get_params(copied)["foo"] == "stainless"
+
+ # set_default_query
+
+ # completely overrides already set values
+ copied = client.copy(set_default_query={})
+ assert _get_params(copied) == {}
+
+ copied = client.copy(set_default_query={"bar": "Robert"})
+ assert _get_params(copied)["bar"] == "Robert"
+
+ with pytest.raises(
+ ValueError,
+ # TODO: update
+ match="`default_query` and `set_default_query` arguments are mutually exclusive",
+ ):
+ client.copy(set_default_query={}, default_query={"foo": "Bar"})
+
+ def test_copy_signature(self) -> None:
+ # ensure the same parameters that can be passed to the client are defined in the `.copy()` method
+ init_signature = inspect.signature(
+ # mypy doesn't like that we access the `__init__` property.
+ self.client.__init__, # type: ignore[misc]
+ )
+ copy_signature = inspect.signature(self.client.copy)
+ exclude_params = {"transport", "proxies", "_strict_response_validation"}
+
+ for name in init_signature.parameters.keys():
+ if name in exclude_params:
+ continue
+
+ copy_param = copy_signature.parameters.get(name)
+ assert copy_param is not None, f"copy() signature is missing the {name} param"
+
+ def test_default_headers_option(self) -> None:
+ client = AsyncFinch(
+ base_url=base_url,
+ access_token=access_token,
+ _strict_response_validation=True,
+ default_headers={"X-Foo": "bar"},
+ )
+ request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
+ assert request.headers.get("x-foo") == "bar"
+ assert request.headers.get("x-stainless-lang") == "python"
+
+ client2 = AsyncFinch(
+ base_url=base_url,
+ access_token=access_token,
+ _strict_response_validation=True,
+ default_headers={
+ "X-Foo": "stainless",
+ "X-Stainless-Lang": "my-overriding-header",
+ },
+ )
+ request = client2._build_request(FinalRequestOptions(method="get", url="/foo"))
+ assert request.headers.get("x-foo") == "stainless"
+ assert request.headers.get("x-stainless-lang") == "my-overriding-header"
+
+ def test_validate_headers(self) -> None:
+ client = AsyncFinch(base_url=base_url, access_token=access_token, _strict_response_validation=True)
+ request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
+ assert request.headers.get("Authorization") == f"Bearer {access_token}"
+
+ client2 = AsyncFinch(base_url=base_url, access_token=None, _strict_response_validation=True)
+ with pytest.raises(
+ TypeError,
+ match="Could not resolve authentication method. Expected the access_token to be set. Or for the `Authorization` headers to be explicitly omitted",
+ ):
+ client2._build_request(FinalRequestOptions(method="get", url="/foo"))
+
+ request2 = client2._build_request(
+ FinalRequestOptions(method="get", url="/foo", headers={"Authorization": Omit()})
+ )
+ assert request2.headers.get("Authorization") is None
+
+ def test_default_query_option(self) -> None:
+ client = AsyncFinch(
+ base_url=base_url,
+ access_token=access_token,
+ _strict_response_validation=True,
+ default_query={"query_param": "bar"},
+ )
+ request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
+ url = httpx.URL(request.url)
+ assert dict(url.params) == {"query_param": "bar"}
+
+ request = client._build_request(
+ FinalRequestOptions(
+ method="get",
+ url="/foo",
+ params={"foo": "baz", "query_param": "overriden"},
+ )
+ )
+ url = httpx.URL(request.url)
+ assert dict(url.params) == {"foo": "baz", "query_param": "overriden"}
+
+ def test_request_extra_json(self) -> None:
+ request = self.client._build_request(
+ FinalRequestOptions(
+ method="post",
+ url="/foo",
+ json_data={"foo": "bar"},
+ extra_json={"baz": False},
+ ),
+ )
+ data = json.loads(request.content.decode("utf-8"))
+ assert data == {"foo": "bar", "baz": False}
+
+ request = self.client._build_request(
+ FinalRequestOptions(
+ method="post",
+ url="/foo",
+ extra_json={"baz": False},
+ ),
+ )
+ data = json.loads(request.content.decode("utf-8"))
+ assert data == {"baz": False}
+
+ # `extra_json` takes priority over `json_data` when keys clash
+ request = self.client._build_request(
+ FinalRequestOptions(
+ method="post",
+ url="/foo",
+ json_data={"foo": "bar", "baz": True},
+ extra_json={"baz": None},
+ ),
+ )
+ data = json.loads(request.content.decode("utf-8"))
+ assert data == {"foo": "bar", "baz": None}
+
+ def test_request_extra_headers(self) -> None:
+ request = self.client._build_request(
+ FinalRequestOptions(
+ method="post",
+ url="/foo",
+ **make_request_options(extra_headers={"X-Foo": "Foo"}),
+ ),
+ )
+ assert request.headers.get("X-Foo") == "Foo"
+
+ # `extra_headers` takes priority over `default_headers` when keys clash
+ request = self.client.with_options(default_headers={"X-Bar": "true"})._build_request(
+ FinalRequestOptions(
+ method="post",
+ url="/foo",
+ **make_request_options(
+ extra_headers={"X-Bar": "false"},
+ ),
+ ),
+ )
+ assert request.headers.get("X-Bar") == "false"
+
+ def test_request_extra_query(self) -> None:
+ request = self.client._build_request(
+ FinalRequestOptions(
+ method="post",
+ url="/foo",
+ **make_request_options(
+ extra_query={"my_query_param": "Foo"},
+ ),
+ ),
+ )
+ params = cast(Dict[str, str], dict(request.url.params))
+ assert params == {"my_query_param": "Foo"}
+
+ # if both `query` and `extra_query` are given, they are merged
+ request = self.client._build_request(
+ FinalRequestOptions(
+ method="post",
+ url="/foo",
+ **make_request_options(
+ query={"bar": "1"},
+ extra_query={"foo": "2"},
+ ),
+ ),
+ )
+ params = cast(Dict[str, str], dict(request.url.params))
+ assert params == {"bar": "1", "foo": "2"}
+
+ # `extra_query` takes priority over `query` when keys clash
+ request = self.client._build_request(
+ FinalRequestOptions(
+ method="post",
+ url="/foo",
+ **make_request_options(
+ query={"foo": "1"},
+ extra_query={"foo": "2"},
+ ),
+ ),
+ )
+ params = cast(Dict[str, str], dict(request.url.params))
+ assert params == {"foo": "2"}
+
+ @pytest.mark.respx(base_url=base_url)
+ async def test_basic_union_response(self, respx_mock: MockRouter) -> None:
+ class Model1(BaseModel):
+ name: str
+
+ class Model2(BaseModel):
+ foo: str
+
+ respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
+
+ response = await self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2]))
+ assert isinstance(response, Model2)
+ assert response.foo == "bar"
+
+ @pytest.mark.respx(base_url=base_url)
+ async def test_union_response_different_types(self, respx_mock: MockRouter) -> None:
+ """Union of objects with the same field name using a different type"""
+
+ class Model1(BaseModel):
+ foo: int
+
+ class Model2(BaseModel):
+ foo: str
+
+ respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
+
+ response = await self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2]))
+ assert isinstance(response, Model2)
+ assert response.foo == "bar"
+
+ respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": 1}))
+
+ response = await self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2]))
+ assert isinstance(response, Model1)
+ assert response.foo == 1
diff --git a/tests/test_deepcopy.py b/tests/test_deepcopy.py
new file mode 100644
index 00000000..3fce55d9
--- /dev/null
+++ b/tests/test_deepcopy.py
@@ -0,0 +1,59 @@
+from finch._utils import deepcopy_minimal
+
+
+def assert_different_identities(obj1: object, obj2: object) -> None:
+ assert obj1 == obj2
+ assert id(obj1) != id(obj2)
+
+
+def test_simple_dict() -> None:
+ obj1 = {"foo": "bar"}
+ obj2 = deepcopy_minimal(obj1)
+ assert_different_identities(obj1, obj2)
+
+
+def test_nested_dict() -> None:
+ obj1 = {"foo": {"bar": True}}
+ obj2 = deepcopy_minimal(obj1)
+ assert_different_identities(obj1, obj2)
+ assert_different_identities(obj1["foo"], obj2["foo"])
+
+
+def test_complex_nested_dict() -> None:
+ obj1 = {"foo": {"bar": [{"hello": "world"}]}}
+ obj2 = deepcopy_minimal(obj1)
+ assert_different_identities(obj1, obj2)
+ assert_different_identities(obj1["foo"], obj2["foo"])
+ assert_different_identities(obj1["foo"]["bar"], obj2["foo"]["bar"])
+ assert_different_identities(obj1["foo"]["bar"][0], obj2["foo"]["bar"][0])
+
+
+def test_simple_list() -> None:
+ obj1 = ["a", "b", "c"]
+ obj2 = deepcopy_minimal(obj1)
+ assert_different_identities(obj1, obj2)
+
+
+def test_nested_list() -> None:
+ obj1 = ["a", [1, 2, 3]]
+ obj2 = deepcopy_minimal(obj1)
+ assert_different_identities(obj1, obj2)
+ assert_different_identities(obj1[1], obj2[1])
+
+
+class MyObject:
+ ...
+
+
+def test_ignores_other_types() -> None:
+ # custom classes
+ my_obj = MyObject()
+ obj1 = {"foo": my_obj}
+ obj2 = deepcopy_minimal(obj1)
+ assert_different_identities(obj1, obj2)
+ assert obj1["foo"] is my_obj
+
+ # tuples
+ obj3 = ("a", "b")
+ obj4 = deepcopy_minimal(obj3)
+ assert obj3 is obj4
diff --git a/tests/test_extract_files.py b/tests/test_extract_files.py
new file mode 100644
index 00000000..43cc2bee
--- /dev/null
+++ b/tests/test_extract_files.py
@@ -0,0 +1,64 @@
+from __future__ import annotations
+
+from typing import Sequence
+
+import pytest
+
+from finch._types import FileTypes
+from finch._utils import extract_files
+
+
+def test_removes_files_from_input() -> None:
+ query = {"foo": "bar"}
+ assert extract_files(query, paths=[]) == []
+ assert query == {"foo": "bar"}
+
+ query2 = {"foo": b"Bar", "hello": "world"}
+ assert extract_files(query2, paths=[["foo"]]) == [("foo", b"Bar")]
+ assert query2 == {"hello": "world"}
+
+ query3 = {"foo": {"foo": {"bar": b"Bar"}}, "hello": "world"}
+ assert extract_files(query3, paths=[["foo", "foo", "bar"]]) == [("foo[foo][bar]", b"Bar")]
+ assert query3 == {"foo": {"foo": {}}, "hello": "world"}
+
+ query4 = {"foo": {"bar": b"Bar", "baz": "foo"}, "hello": "world"}
+ assert extract_files(query4, paths=[["foo", "bar"]]) == [("foo[bar]", b"Bar")]
+ assert query4 == {"hello": "world", "foo": {"baz": "foo"}}
+
+
+def test_multiple_files() -> None:
+ query = {"documents": [{"file": b"My first file"}, {"file": b"My second file"}]}
+ assert extract_files(query, paths=[["documents", "", "file"]]) == [
+ ("documents[][file]", b"My first file"),
+ ("documents[][file]", b"My second file"),
+ ]
+ assert query == {"documents": [{}, {}]}
+
+
+@pytest.mark.parametrize(
+ "query,paths,expected",
+ [
+ [
+ {"foo": {"bar": "baz"}},
+ [["foo", "", "bar"]],
+ [],
+ ],
+ [
+ {"foo": ["bar", "baz"]},
+ [["foo", "bar"]],
+ [],
+ ],
+ [
+ {"foo": {"bar": "baz"}},
+ [["foo", "foo"]],
+ [],
+ ],
+ ],
+ ids=["dict expecting array", "arraye expecting dict", "unknown keys"],
+)
+def test_ignores_incorrect_paths(
+ query: dict[str, object],
+ paths: Sequence[Sequence[str]],
+ expected: list[tuple[str, FileTypes]],
+) -> None:
+ assert extract_files(query, paths=paths) == expected
diff --git a/tests/test_models.py b/tests/test_models.py
new file mode 100644
index 00000000..97cee5ba
--- /dev/null
+++ b/tests/test_models.py
@@ -0,0 +1,445 @@
+from typing import Any, Dict, List, Union, Optional, cast
+from datetime import datetime, timezone
+from typing_extensions import Literal
+
+import pytest
+from pydantic import Field
+
+from finch._models import BaseModel
+
+
+class BasicModel(BaseModel):
+ foo: str
+
+
+@pytest.mark.parametrize("value", ["hello", 1], ids=["correct type", "mismatched"])
+def test_basic(value: object) -> None:
+ m = BasicModel.construct(foo=value)
+ assert m.foo == value
+
+
+def test_directly_nested_model() -> None:
+ class NestedModel(BaseModel):
+ nested: BasicModel
+
+ m = NestedModel.construct(nested={"foo": "Foo!"})
+ assert m.nested.foo == "Foo!"
+
+ # mismatched types
+ m = NestedModel.construct(nested="hello!")
+ assert m.nested == "hello!"
+
+
+def test_optional_nested_model() -> None:
+ class NestedModel(BaseModel):
+ nested: Optional[BasicModel]
+
+ m1 = NestedModel.construct(nested=None)
+ assert m1.nested is None
+
+ m2 = NestedModel.construct(nested={"foo": "bar"})
+ assert m2.nested is not None
+ assert m2.nested.foo == "bar"
+
+ # mismatched types
+ m3 = NestedModel.construct(nested={"foo"})
+ assert isinstance(cast(Any, m3.nested), set)
+ assert m3.nested == {"foo"}
+
+
+def test_list_nested_model() -> None:
+ class NestedModel(BaseModel):
+ nested: List[BasicModel]
+
+ m = NestedModel.construct(nested=[{"foo": "bar"}, {"foo": "2"}])
+ assert m.nested is not None
+ assert isinstance(m.nested, list)
+ assert len(m.nested) == 2
+ assert m.nested[0].foo == "bar"
+ assert m.nested[1].foo == "2"
+
+ # mismatched types
+ m = NestedModel.construct(nested=True)
+ assert cast(Any, m.nested) is True
+
+ m = NestedModel.construct(nested=[False])
+ assert cast(Any, m.nested) == [False]
+
+
+def test_optional_list_nested_model() -> None:
+ class NestedModel(BaseModel):
+ nested: Optional[List[BasicModel]]
+
+ m1 = NestedModel.construct(nested=[{"foo": "bar"}, {"foo": "2"}])
+ assert m1.nested is not None
+ assert isinstance(m1.nested, list)
+ assert len(m1.nested) == 2
+ assert m1.nested[0].foo == "bar"
+ assert m1.nested[1].foo == "2"
+
+ m2 = NestedModel.construct(nested=None)
+ assert m2.nested is None
+
+ # mismatched types
+ m3 = NestedModel.construct(nested={1})
+ assert cast(Any, m3.nested) == {1}
+
+ m4 = NestedModel.construct(nested=[False])
+ assert cast(Any, m4.nested) == [False]
+
+
+def test_list_optional_items_nested_model() -> None:
+ class NestedModel(BaseModel):
+ nested: List[Optional[BasicModel]]
+
+ m = NestedModel.construct(nested=[None, {"foo": "bar"}])
+ assert m.nested is not None
+ assert isinstance(m.nested, list)
+ assert len(m.nested) == 2
+ assert m.nested[0] is None
+ assert m.nested[1] is not None
+ assert m.nested[1].foo == "bar"
+
+ # mismatched types
+ m3 = NestedModel.construct(nested="foo")
+ assert cast(Any, m3.nested) == "foo"
+
+ m4 = NestedModel.construct(nested=[False])
+ assert cast(Any, m4.nested) == [False]
+
+
+def test_list_mismatched_type() -> None:
+ class NestedModel(BaseModel):
+ nested: List[str]
+
+ m = NestedModel.construct(nested=False)
+ assert cast(Any, m.nested) is False
+
+
+def test_raw_dictionary() -> None:
+ class NestedModel(BaseModel):
+ nested: Dict[str, str]
+
+ m = NestedModel.construct(nested={"hello": "world"})
+ assert m.nested == {"hello": "world"}
+
+ # mismatched types
+ m = NestedModel.construct(nested=False)
+ assert cast(Any, m.nested) is False
+
+
+def test_nested_dictionary_model() -> None:
+ class NestedModel(BaseModel):
+ nested: Dict[str, BasicModel]
+
+ m = NestedModel.construct(nested={"hello": {"foo": "bar"}})
+ assert isinstance(m.nested, dict)
+ assert m.nested["hello"].foo == "bar"
+
+ # mismatched types
+ m = NestedModel.construct(nested={"hello": False})
+ assert cast(Any, m.nested["hello"]) is False
+
+
+def test_unknown_fields() -> None:
+ m1 = BasicModel.construct(foo="foo", unknown=1)
+ assert m1.foo == "foo"
+ assert cast(Any, m1).unknown == 1
+
+ m2 = BasicModel.construct(foo="foo", unknown={"foo_bar": True})
+ assert m2.foo == "foo"
+ assert cast(Any, m2).unknown == {"foo_bar": True}
+
+ assert m2.dict() == {"foo": "foo", "unknown": {"foo_bar": True}}
+
+
+def test_strict_validation_unknown_fields() -> None:
+ class Model(BaseModel):
+ foo: str
+
+ model = Model.parse_obj(dict(foo="hello!", user="Robert"))
+ assert model.foo == "hello!"
+ assert cast(Any, model).user == "Robert"
+
+ assert model.dict() == {"foo": "hello!", "user": "Robert"}
+
+
+def test_aliases() -> None:
+ class Model(BaseModel):
+ my_field: int = Field(alias="myField")
+
+ m = Model.construct(myField=1)
+ assert m.my_field == 1
+
+ # mismatched types
+ m = Model.construct(myField={"hello": False})
+ assert cast(Any, m.my_field) == {"hello": False}
+
+
+def test_repr() -> None:
+ model = BasicModel(foo="bar")
+ assert str(model) == "BasicModel(foo='bar')"
+ assert repr(model) == "BasicModel(foo='bar')"
+
+
+def test_repr_nested_model() -> None:
+ class Child(BaseModel):
+ name: str
+ age: int
+
+ class Parent(BaseModel):
+ name: str
+ child: Child
+
+ model = Parent(name="Robert", child=Child(name="Foo", age=5))
+ assert str(model) == "Parent(name='Robert', child=Child(name='Foo', age=5))"
+ assert repr(model) == "Parent(name='Robert', child=Child(name='Foo', age=5))"
+
+
+def test_optional_list() -> None:
+ class Submodel(BaseModel):
+ name: str
+
+ class Model(BaseModel):
+ items: Optional[List[Submodel]]
+
+ m = Model.construct(items=None)
+ assert m.items is None
+
+ m = Model.construct(items=[])
+ assert m.items == []
+
+ m = Model.construct(items=[{"name": "Robert"}])
+ assert m.items is not None
+ assert len(m.items) == 1
+ assert m.items[0].name == "Robert"
+
+
+def test_nested_union_of_models() -> None:
+ class Submodel1(BaseModel):
+ bar: bool
+
+ class Submodel2(BaseModel):
+ thing: str
+
+ class Model(BaseModel):
+ foo: Union[Submodel1, Submodel2]
+
+ m = Model.construct(foo={"thing": "hello"})
+ assert isinstance(m.foo, Submodel2)
+ assert m.foo.thing == "hello"
+
+
+def test_nested_union_of_mixed_types() -> None:
+ class Submodel1(BaseModel):
+ bar: bool
+
+ class Model(BaseModel):
+ foo: Union[Submodel1, Literal[True], Literal["CARD_HOLDER"]]
+
+ m = Model.construct(foo=True)
+ assert m.foo is True
+
+ m = Model.construct(foo="CARD_HOLDER")
+ assert m.foo is "CARD_HOLDER"
+
+ m = Model.construct(foo={"bar": False})
+ assert isinstance(m.foo, Submodel1)
+ assert m.foo.bar is False
+
+
+def test_nested_union_multiple_variants() -> None:
+ class Submodel1(BaseModel):
+ bar: bool
+
+ class Submodel2(BaseModel):
+ thing: str
+
+ class Submodel3(BaseModel):
+ foo: int
+
+ class Model(BaseModel):
+ foo: Union[Submodel1, Submodel2, None, Submodel3]
+
+ m = Model.construct(foo={"thing": "hello"})
+ assert isinstance(m.foo, Submodel2)
+ assert m.foo.thing == "hello"
+
+ m = Model.construct(foo=None)
+ assert m.foo is None
+
+ m = Model.construct()
+ assert m.foo is None
+
+ m = Model.construct(foo={"foo": "1"})
+ assert isinstance(m.foo, Submodel3)
+ assert m.foo.foo == 1
+
+
+def test_nested_union_invalid_data() -> None:
+ class Submodel1(BaseModel):
+ level: int
+
+ class Submodel2(BaseModel):
+ name: str
+
+ class Model(BaseModel):
+ foo: Union[Submodel1, Submodel2]
+
+ m = Model.construct(foo=True)
+ assert cast(bool, m.foo) is True
+
+ m = Model.construct(foo={"name": 3})
+ assert isinstance(m.foo, Submodel2)
+ assert m.foo.name == "3"
+
+
+def test_list_of_unions() -> None:
+ class Submodel1(BaseModel):
+ level: int
+
+ class Submodel2(BaseModel):
+ name: str
+
+ class Model(BaseModel):
+ items: List[Union[Submodel1, Submodel2]]
+
+ m = Model.construct(items=[{"level": 1}, {"name": "Robert"}])
+ assert len(m.items) == 2
+ assert isinstance(m.items[0], Submodel1)
+ assert m.items[0].level == 1
+ assert isinstance(m.items[1], Submodel2)
+ assert m.items[1].name == "Robert"
+
+ m = Model.construct(items=[{"level": -1}, 156])
+ assert len(m.items) == 2
+ assert isinstance(m.items[0], Submodel1)
+ assert m.items[0].level == -1
+ assert m.items[1] == 156
+
+
+def test_union_of_lists() -> None:
+ class SubModel1(BaseModel):
+ level: int
+
+ class SubModel2(BaseModel):
+ name: str
+
+ class Model(BaseModel):
+ items: Union[List[SubModel1], List[SubModel2]]
+
+ # with one valid entry
+ m = Model.construct(items=[{"name": "Robert"}])
+ assert len(m.items) == 1
+ assert isinstance(m.items[0], SubModel2)
+ assert m.items[0].name == "Robert"
+
+ # with two entries pointing to different types
+ m = Model.construct(items=[{"level": 1}, {"name": "Robert"}])
+ assert len(m.items) == 2
+ assert isinstance(m.items[0], SubModel1)
+ assert m.items[0].level == 1
+ assert isinstance(m.items[1], SubModel1)
+ assert cast(Any, m.items[1]).name == "Robert"
+
+ # with two entries pointing to *completely* different types
+ m = Model.construct(items=[{"level": -1}, 156])
+ assert len(m.items) == 2
+ assert isinstance(m.items[0], SubModel1)
+ assert m.items[0].level == -1
+ assert m.items[1] == 156
+
+
+def test_dict_of_union() -> None:
+ class SubModel1(BaseModel):
+ name: str
+
+ class SubModel2(BaseModel):
+ foo: str
+
+ class Model(BaseModel):
+ data: Dict[str, Union[SubModel1, SubModel2]]
+
+ m = Model.construct(data={"hello": {"name": "there"}, "foo": {"foo": "bar"}})
+ assert len(list(m.data.keys())) == 2
+ assert isinstance(m.data["hello"], SubModel1)
+ assert m.data["hello"].name == "there"
+ assert isinstance(m.data["foo"], SubModel2)
+ assert m.data["foo"].foo == "bar"
+
+ # TODO: test mismatched type
+
+
+def test_double_nested_union() -> None:
+ class SubModel1(BaseModel):
+ name: str
+
+ class SubModel2(BaseModel):
+ bar: str
+
+ class Model(BaseModel):
+ data: Dict[str, List[Union[SubModel1, SubModel2]]]
+
+ m = Model.construct(data={"foo": [{"bar": "baz"}, {"name": "Robert"}]})
+ assert len(m.data["foo"]) == 2
+
+ entry1 = m.data["foo"][0]
+ assert isinstance(entry1, SubModel2)
+ assert entry1.bar == "baz"
+
+ entry2 = m.data["foo"][1]
+ assert isinstance(entry2, SubModel1)
+ assert entry2.name == "Robert"
+
+ # TODO: test mismatched type
+
+
+def test_union_of_dict() -> None:
+ class SubModel1(BaseModel):
+ name: str
+
+ class SubModel2(BaseModel):
+ foo: str
+
+ class Model(BaseModel):
+ data: Union[Dict[str, SubModel1], Dict[str, SubModel2]]
+
+ m = Model.construct(data={"hello": {"name": "there"}, "foo": {"foo": "bar"}})
+ assert len(list(m.data.keys())) == 2
+ assert isinstance(m.data["hello"], SubModel1)
+ assert m.data["hello"].name == "there"
+ assert isinstance(m.data["foo"], SubModel1)
+ assert cast(Any, m.data["foo"]).foo == "bar"
+
+
+def test_iso8601_datetime() -> None:
+ class Model(BaseModel):
+ created_at: datetime
+
+ expected = datetime(2019, 12, 27, 18, 11, 19, 117000, tzinfo=timezone.utc)
+ expected_json = '{"created_at": "2019-12-27T18:11:19.117000+00:00"}'
+
+ model = Model.construct(created_at="2019-12-27T18:11:19.117Z")
+ assert model.created_at == expected
+ assert model.json() == expected_json
+
+ model = Model.parse_obj(dict(created_at="2019-12-27T18:11:19.117Z"))
+ assert model.created_at == expected
+ assert model.json() == expected_json
+
+
+def test_coerces_int() -> None:
+ class Model(BaseModel):
+ bar: int
+
+ assert Model.construct(bar=1).bar == 1
+ assert Model.construct(bar=10.9).bar == 10
+ assert Model.construct(bar="19").bar == 19
+ assert Model.construct(bar=False).bar == 0
+
+ # TODO: support this
+ # assert Model.construct(bar="True").bar == 1
+
+ # mismatched types are left as-is
+ m = Model.construct(bar={"foo": "bar"})
+ assert m.bar == {"foo": "bar"} # type: ignore[comparison-overlap]
diff --git a/tests/test_qs.py b/tests/test_qs.py
new file mode 100644
index 00000000..d9a25686
--- /dev/null
+++ b/tests/test_qs.py
@@ -0,0 +1,66 @@
+from typing import Any, cast
+from functools import partial
+from urllib.parse import unquote
+
+import pytest
+
+from finch._qs import Querystring, stringify
+
+
+def test_empty() -> None:
+ assert stringify({}) == ""
+ assert stringify({"a": {}}) == ""
+ assert stringify({"a": {"b": {"c": {}}}}) == ""
+
+
+def test_basic() -> None:
+ assert stringify({"a": 1}) == "a=1"
+ assert stringify({"a": "b"}) == "a=b"
+ assert stringify({"a": True}) == "a=true"
+ assert stringify({"a": False}) == "a=false"
+ assert stringify({"a": 1.23456}) == "a=1.23456"
+ assert stringify({"a": None}) == ""
+
+
+@pytest.mark.parametrize("method", ["class", "function"])
+def test_nested_dotted(method: str) -> None:
+ if method == "class":
+ serialise = Querystring(nested_format="dots").stringify
+ else:
+ serialise = partial(stringify, nested_format="dots")
+
+ assert unquote(serialise({"a": {"b": "c"}})) == "a.b=c"
+ assert unquote(serialise({"a": {"b": "c", "d": "e", "f": "g"}})) == "a.b=c&a.d=e&a.f=g"
+ assert unquote(serialise({"a": {"b": {"c": {"d": "e"}}}})) == "a.b.c.d=e"
+ assert unquote(serialise({"a": {"b": True}})) == "a.b=true"
+
+
+def test_nested_brackets() -> None:
+ assert unquote(stringify({"a": {"b": "c"}})) == "a[b]=c"
+ assert unquote(stringify({"a": {"b": "c", "d": "e", "f": "g"}})) == "a[b]=c&a[d]=e&a[f]=g"
+ assert unquote(stringify({"a": {"b": {"c": {"d": "e"}}}})) == "a[b][c][d]=e"
+ assert unquote(stringify({"a": {"b": True}})) == "a[b]=true"
+
+
+@pytest.mark.parametrize("method", ["class", "function"])
+def test_array_comma(method: str) -> None:
+ if method == "class":
+ serialise = Querystring(array_format="comma").stringify
+ else:
+ serialise = partial(stringify, array_format="comma")
+
+ assert unquote(serialise({"in": ["foo", "bar"]})) == "in=foo,bar"
+ assert unquote(serialise({"a": {"b": [True, False]}})) == "a[b]=true,false"
+ assert unquote(serialise({"a": {"b": [True, False, None, True]}})) == "a[b]=true,false,true"
+
+
+def test_array_repeat() -> None:
+ assert unquote(stringify({"in": ["foo", "bar"]})) == "in=foo&in=bar"
+ assert unquote(stringify({"a": {"b": [True, False]}})) == "a[b]=true&a[b]=false"
+ assert unquote(stringify({"a": {"b": [True, False, None, True]}})) == "a[b]=true&a[b]=false&a[b]=true"
+ assert unquote(stringify({"in": ["foo", {"b": {"c": ["d", "e"]}}]})) == "in=foo&in[b][c]=d&in[b][c]=e"
+
+
+def test_unknown_array_format() -> None:
+ with pytest.raises(NotImplementedError, match="Unknown array_format value: foo, choose from comma, repeat"):
+ stringify({"a": ["foo", "bar"]}, array_format=cast(Any, "foo"))
diff --git a/tests/test_required_args.py b/tests/test_required_args.py
new file mode 100644
index 00000000..e839289d
--- /dev/null
+++ b/tests/test_required_args.py
@@ -0,0 +1,111 @@
+from __future__ import annotations
+
+import pytest
+
+from finch._utils import required_args
+
+
+def test_too_many_positional_params() -> None:
+ @required_args(["a"])
+ def foo(a: str | None = None) -> str | None:
+ return a
+
+ with pytest.raises(TypeError, match=r"foo\(\) takes 1 argument\(s\) but 2 were given"):
+ foo("a", "b") # type: ignore
+
+
+def test_positional_param() -> None:
+ @required_args(["a"])
+ def foo(a: str | None = None) -> str | None:
+ return a
+
+ assert foo("a") == "a"
+ assert foo(None) is None
+ assert foo(a="b") == "b"
+
+ with pytest.raises(TypeError, match="Missing required argument: 'a'"):
+ foo()
+
+
+def test_keyword_only_param() -> None:
+ @required_args(["a"])
+ def foo(*, a: str | None = None) -> str | None:
+ return a
+
+ assert foo(a="a") == "a"
+ assert foo(a=None) is None
+ assert foo(a="b") == "b"
+
+ with pytest.raises(TypeError, match="Missing required argument: 'a'"):
+ foo()
+
+
+def test_multiple_params() -> None:
+ @required_args(["a", "b", "c"])
+ def foo(a: str = "", *, b: str = "", c: str = "") -> str | None:
+ return a + " " + b + " " + c
+
+ assert foo(a="a", b="b", c="c") == "a b c"
+
+ error_message = r"Missing required arguments.*"
+
+ with pytest.raises(TypeError, match=error_message):
+ foo()
+
+ with pytest.raises(TypeError, match=error_message):
+ foo(a="a")
+
+ with pytest.raises(TypeError, match=error_message):
+ foo(b="b")
+
+ with pytest.raises(TypeError, match=error_message):
+ foo(c="c")
+
+ with pytest.raises(TypeError, match=r"Missing required argument: 'a'"):
+ foo(b="a", c="c")
+
+ with pytest.raises(TypeError, match=r"Missing required argument: 'b'"):
+ foo("a", c="c")
+
+
+def test_multiple_variants() -> None:
+ @required_args(["a"], ["b"])
+ def foo(*, a: str | None = None, b: str | None = None) -> str | None:
+ return a if a is not None else b
+
+ assert foo(a="foo") == "foo"
+ assert foo(b="bar") == "bar"
+ assert foo(a=None) is None
+ assert foo(b=None) is None
+
+ # TODO: this error message could probably be improved
+ with pytest.raises(
+ TypeError,
+ match=r"Missing required arguments; Expected either \('a'\) or \('b'\) arguments to be given",
+ ):
+ foo()
+
+
+def test_multiple_params_multiple_variants() -> None:
+ @required_args(["a", "b"], ["c"])
+ def foo(*, a: str | None = None, b: str | None = None, c: str | None = None) -> str | None:
+ if a is not None:
+ return a
+ if b is not None:
+ return b
+ return c
+
+ error_message = r"Missing required arguments; Expected either \('a' and 'b'\) or \('c'\) arguments to be given"
+
+ with pytest.raises(TypeError, match=error_message):
+ foo(a="foo")
+
+ with pytest.raises(TypeError, match=error_message):
+ foo(b="bar")
+
+ with pytest.raises(TypeError, match=error_message):
+ foo()
+
+ assert foo(a=None, b="bar") == "bar"
+ assert foo(c=None) is None
+ assert foo(c="foo") == "foo"
diff --git a/tests/test_transform.py b/tests/test_transform.py
new file mode 100644
index 00000000..00ca4b72
--- /dev/null
+++ b/tests/test_transform.py
@@ -0,0 +1,179 @@
+from __future__ import annotations
+
+from typing import List, Union, Optional
+from datetime import date, datetime
+from typing_extensions import Required, Annotated, TypedDict
+
+from finch._utils import PropertyInfo, transform, parse_datetime
+
+
+class Foo1(TypedDict):
+ foo_bar: Annotated[str, PropertyInfo(alias="fooBar")]
+
+
+def test_top_level_alias() -> None:
+ assert transform({"foo_bar": "hello"}, expected_type=Foo1) == {"fooBar": "hello"}
+
+
+class Foo2(TypedDict):
+ bar: Bar2
+
+
+class Bar2(TypedDict):
+ this_thing: Annotated[int, PropertyInfo(alias="this__thing")]
+ baz: Annotated[Baz2, PropertyInfo(alias="Baz")]
+
+
+class Baz2(TypedDict):
+ my_baz: Annotated[str, PropertyInfo(alias="myBaz")]
+
+
+def test_recursive_typeddict() -> None:
+ assert transform({"bar": {"this_thing": 1}}, Foo2) == {"bar": {"this__thing": 1}}
+ assert transform({"bar": {"baz": {"my_baz": "foo"}}}, Foo2) == {"bar": {"Baz": {"myBaz": "foo"}}}
+
+
+class Foo3(TypedDict):
+ things: List[Bar3]
+
+
+class Bar3(TypedDict):
+ my_field: Annotated[str, PropertyInfo(alias="myField")]
+
+
+def test_list_of_typeddict() -> None:
+ result = transform({"things": [{"my_field": "foo"}, {"my_field": "foo2"}]}, expected_type=Foo3)
+ assert result == {"things": [{"myField": "foo"}, {"myField": "foo2"}]}
+
+
+class Foo4(TypedDict):
+ foo: Union[Bar4, Baz4]
+
+
+class Bar4(TypedDict):
+ foo_bar: Annotated[str, PropertyInfo(alias="fooBar")]
+
+
+class Baz4(TypedDict):
+ foo_baz: Annotated[str, PropertyInfo(alias="fooBaz")]
+
+
+def test_union_of_typeddict() -> None:
+ assert transform({"foo": {"foo_bar": "bar"}}, Foo4) == {"foo": {"fooBar": "bar"}}
+ assert transform({"foo": {"foo_baz": "baz"}}, Foo4) == {"foo": {"fooBaz": "baz"}}
+ assert transform({"foo": {"foo_baz": "baz", "foo_bar": "bar"}}, Foo4) == {"foo": {"fooBaz": "baz", "fooBar": "bar"}}
+
+
+class Foo5(TypedDict):
+ foo: Annotated[Union[Bar4, List[Baz4]], PropertyInfo(alias="FOO")]
+
+
+class Bar5(TypedDict):
+ foo_bar: Annotated[str, PropertyInfo(alias="fooBar")]
+
+
+class Baz5(TypedDict):
+ foo_baz: Annotated[str, PropertyInfo(alias="fooBaz")]
+
+
+def test_union_of_list() -> None:
+ assert transform({"foo": {"foo_bar": "bar"}}, Foo5) == {"FOO": {"fooBar": "bar"}}
+ assert transform(
+ {
+ "foo": [
+ {"foo_baz": "baz"},
+ {"foo_baz": "baz"},
+ ]
+ },
+ Foo5,
+ ) == {"FOO": [{"fooBaz": "baz"}, {"fooBaz": "baz"}]}
+
+
+class Foo6(TypedDict):
+ bar: Annotated[str, PropertyInfo(alias="Bar")]
+
+
+def test_includes_unknown_keys() -> None:
+ assert transform({"bar": "bar", "baz_": {"FOO": 1}}, Foo6) == {
+ "Bar": "bar",
+ "baz_": {"FOO": 1},
+ }
+
+
+class Foo7(TypedDict):
+ bar: Annotated[List[Bar7], PropertyInfo(alias="bAr")]
+ foo: Bar7
+
+
+class Bar7(TypedDict):
+ foo: str
+
+
+def test_ignores_invalid_input() -> None:
+ assert transform({"bar": ""}, Foo7) == {"bAr": ""}
+ assert transform({"foo": ""}, Foo7) == {"foo": ""}
+
+
+class DatetimeDict(TypedDict, total=False):
+ foo: Annotated[datetime, PropertyInfo(format="iso8601")]
+
+ bar: Annotated[Optional[datetime], PropertyInfo(format="iso8601")]
+
+ required: Required[Annotated[Optional[datetime], PropertyInfo(format="iso8601")]]
+
+ list_: Required[Annotated[Optional[List[datetime]], PropertyInfo(format="iso8601")]]
+
+ union: Annotated[Union[int, datetime], PropertyInfo(format="iso8601")]
+
+
+class DateDict(TypedDict, total=False):
+ foo: Annotated[date, PropertyInfo(format="iso8601")]
+
+
+def test_iso8601_format() -> None:
+ dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00")
+ assert transform({"foo": dt}, DatetimeDict) == {"foo": "2023-02-23T14:16:36.337692+00:00"} # type: ignore[comparison-overlap]
+
+ dt = dt.replace(tzinfo=None)
+ assert transform({"foo": dt}, DatetimeDict) == {"foo": "2023-02-23T14:16:36.337692"} # type: ignore[comparison-overlap]
+
+ assert transform({"foo": None}, DateDict) == {"foo": None} # type: ignore[comparison-overlap]
+ assert transform({"foo": date.fromisoformat("2023-02-23")}, DateDict) == {"foo": "2023-02-23"} # type: ignore[comparison-overlap]
+
+
+def test_optional_iso8601_format() -> None:
+ dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00")
+ assert transform({"bar": dt}, DatetimeDict) == {"bar": "2023-02-23T14:16:36.337692+00:00"} # type: ignore[comparison-overlap]
+
+ assert transform({"bar": None}, DatetimeDict) == {"bar": None}
+
+
+def test_required_iso8601_format() -> None:
+ dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00")
+ assert transform({"required": dt}, DatetimeDict) == {"required": "2023-02-23T14:16:36.337692+00:00"} # type: ignore[comparison-overlap]
+
+ assert transform({"required": None}, DatetimeDict) == {"required": None}
+
+
+def test_union_datetime() -> None:
+ dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00")
+ assert transform({"union": dt}, DatetimeDict) == { # type: ignore[comparison-overlap]
+ "union": "2023-02-23T14:16:36.337692+00:00"
+ }
+
+ assert transform({"union": "foo"}, DatetimeDict) == {"union": "foo"}
+
+
+def test_nested_list_iso6801_format() -> None:
+ dt1 = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00")
+ dt2 = parse_datetime("2022-01-15T06:34:23Z")
+ assert transform({"list_": [dt1, dt2]}, DatetimeDict) == { # type: ignore[comparison-overlap]
+ "list_": ["2023-02-23T14:16:36.337692+00:00", "2022-01-15T06:34:23+00:00"]
+ }
+
+
+def test_datetime_custom_format() -> None:
+ dt = parse_datetime("2022-01-15T06:34:23Z")
+
+ result = transform(dt, Annotated[datetime, PropertyInfo(format="custom", format_template="%H")])
+ assert result == "06" # type: ignore[comparison-overlap]
diff --git a/tests/utils.py b/tests/utils.py
new file mode 100644
index 00000000..986f79fa
--- /dev/null
+++ b/tests/utils.py
@@ -0,0 +1,82 @@
+from __future__ import annotations
+
+import traceback
+from typing import Any, TypeVar, cast
+from datetime import date, datetime
+from typing_extensions import Literal, get_args, get_origin, assert_type
+
+from finch._utils import is_dict, is_list, is_list_type, is_union_type
+from finch._models import BaseModel
+
+BaseModelT = TypeVar("BaseModelT", bound=BaseModel)
+
+
+def assert_matches_model(model: type[BaseModelT], value: BaseModelT, *, path: list[str]) -> bool:
+ for name, field in model.__fields__.items():
+ field_value = getattr(value, name)
+
+ if field.allow_none and field_value is None:
+ continue
+
+ assert_matches_type(field.outer_type_, field_value, path=[*path, name])
+
+ return True
+
+
+# Note: the `path` argument is only used to improve error messages when `--showlocals` is used
+def assert_matches_type(type_: Any, value: object, *, path: list[str]) -> None:
+ origin = get_origin(type_) or type_
+
+ if is_list_type(type_):
+ return _assert_list_type(type_, value)
+
+ if origin == str:
+ assert isinstance(value, str)
+ elif origin == int:
+ assert isinstance(value, int)
+ elif origin == bool:
+ assert isinstance(value, bool)
+ elif origin == float:
+ assert isinstance(value, float)
+ elif origin == datetime:
+ assert isinstance(value, datetime)
+ elif origin == date:
+ assert isinstance(value, date)
+ elif origin == object:
+ # nothing to do here, the expected type is unknown
+ pass
+ elif origin == Literal:
+ assert value in get_args(type_)
+ elif origin == dict:
+ assert is_dict(value)
+
+ args = get_args(type_)
+ key_type = args[0]
+ items_type = args[1]
+
+ for key, item in value.items():
+ assert_matches_type(key_type, key, path=[*path, ""])
+ assert_matches_type(items_type, item, path=[*path, ""])
+ elif is_union_type(type_):
+ for i, variant in enumerate(get_args(type_)):
+ try:
+ assert_matches_type(variant, value, path=[*path, f"variant {i}"])
+ return
+ except AssertionError:
+ traceback.print_exc()
+ continue
+
+ assert False, "Did not match any variants"
+ elif issubclass(origin, BaseModel):
+ assert isinstance(value, type_)
+ assert assert_matches_model(type_, cast(Any, value), path=path)
+ else:
+ assert None, f"Unhandled field type: {type_}"
+
+
+def _assert_list_type(type_: type[object], value: object) -> None:
+ assert is_list(value)
+
+ inner_type = get_args(type_)[0]
+ for entry in value:
+ assert_type(inner_type, entry) # type: ignore